# This file was auto-generated by Fern from our API Definition.

import typing
import urllib.parse
from json.decoder import JSONDecodeError

from ...core.api_error import ApiError
from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
from ...core.jsonable_encoder import jsonable_encoder
from ...core.remove_none_from_dict import remove_none_from_dict
from ...errors.unprocessable_entity_error import UnprocessableEntityError
from ...types.configurable_data_source_names import ConfigurableDataSourceNames
from ...types.data_source import DataSource
from ...types.data_source_create import DataSourceCreate
from ...types.http_validation_error import HttpValidationError
from .types.data_source_update_component import DataSourceUpdateComponent
from .types.data_source_update_custom_metadata_value import DataSourceUpdateCustomMetadataValue

try:
    import pydantic
    if pydantic.__version__.startswith("1."):
        raise ImportError
    import pydantic.v1 as pydantic  # type: ignore
except ImportError:
    import pydantic  # type: ignore

# this is used as the default value for optional parameters
OMIT = typing.cast(typing.Any, ...)


class DataSourcesClient:
    def __init__(self, *, client_wrapper: SyncClientWrapper):
        self._client_wrapper = client_wrapper

    def list_data_sources(
        self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
    ) -> typing.List[DataSource]:
        """
        List data sources for a given project.
        If project_id is not provided, uses the default project.

        Parameters:
            - project_id: typing.Optional[str].

            - organization_id: typing.Optional[str].
        ---
        from llama_cloud.client import LlamaCloud

        client = LlamaCloud(
            token="YOUR_TOKEN",
        )
        client.data_sources.list_data_sources()
        """
        _response = self._client_wrapper.httpx_client.request(
            "GET",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
            params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return pydantic.parse_obj_as(typing.List[DataSource], _response.json())  # type: ignore
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)

    def create_data_source(
        self,
        *,
        project_id: typing.Optional[str] = None,
        organization_id: typing.Optional[str] = None,
        request: DataSourceCreate,
    ) -> DataSource:
        """
        Create a new data source.

        Parameters:
            - project_id: typing.Optional[str].

            - organization_id: typing.Optional[str].

            - request: DataSourceCreate.
        ---
        from llama_cloud import ConfigurableDataSourceNames, DataSourceCreate
        from llama_cloud.client import LlamaCloud

        client = LlamaCloud(
            token="YOUR_TOKEN",
        )
        client.data_sources.create_data_source(
            request=DataSourceCreate(
                name="string",
                source_type=ConfigurableDataSourceNames.S_3,
            ),
        )
        """
        _response = self._client_wrapper.httpx_client.request(
            "POST",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
            params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
            json=jsonable_encoder(request),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return pydantic.parse_obj_as(DataSource, _response.json())  # type: ignore
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)

    def upsert_data_source(
        self,
        *,
        project_id: typing.Optional[str] = None,
        organization_id: typing.Optional[str] = None,
        request: DataSourceCreate,
    ) -> DataSource:
        """
        Upserts a data source.
        Updates if a data source with the same name and project_id already exists. Otherwise, creates a new data source.

        Parameters:
            - project_id: typing.Optional[str].

            - organization_id: typing.Optional[str].

            - request: DataSourceCreate.
        ---
        from llama_cloud import ConfigurableDataSourceNames, DataSourceCreate
        from llama_cloud.client import LlamaCloud

        client = LlamaCloud(
            token="YOUR_TOKEN",
        )
        client.data_sources.upsert_data_source(
            request=DataSourceCreate(
                name="string",
                source_type=ConfigurableDataSourceNames.S_3,
            ),
        )
        """
        _response = self._client_wrapper.httpx_client.request(
            "PUT",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
            params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
            json=jsonable_encoder(request),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return pydantic.parse_obj_as(DataSource, _response.json())  # type: ignore
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)

    def get_data_source(self, data_source_id: str) -> DataSource:
        """
        Get a data source by ID.

        Parameters:
            - data_source_id: str.
        ---
        from llama_cloud.client import LlamaCloud

        client = LlamaCloud(
            token="YOUR_TOKEN",
        )
        client.data_sources.get_data_source(
            data_source_id="string",
        )
        """
        _response = self._client_wrapper.httpx_client.request(
            "GET",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sources/{data_source_id}"),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return pydantic.parse_obj_as(DataSource, _response.json())  # type: ignore
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)

    def update_data_source(
        self,
        data_source_id: str,
        *,
        name: typing.Optional[str] = OMIT,
        source_type: ConfigurableDataSourceNames,
        custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[DataSourceUpdateCustomMetadataValue]]] = OMIT,
        component: typing.Optional[DataSourceUpdateComponent] = OMIT,
    ) -> DataSource:
        """
        Update a data source by ID.

        Parameters:
            - data_source_id: str.

            - name: typing.Optional[str].

            - source_type: ConfigurableDataSourceNames.

            - custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[DataSourceUpdateCustomMetadataValue]]].

            - component: typing.Optional[DataSourceUpdateComponent]. Component that implements the data source
        ---
        from llama_cloud import ConfigurableDataSourceNames
        from llama_cloud.client import LlamaCloud

        client = LlamaCloud(
            token="YOUR_TOKEN",
        )
        client.data_sources.update_data_source(
            data_source_id="string",
            source_type=ConfigurableDataSourceNames.S_3,
        )
        """
        _request: typing.Dict[str, typing.Any] = {"source_type": source_type}
        if name is not OMIT:
            _request["name"] = name
        if custom_metadata is not OMIT:
            _request["custom_metadata"] = custom_metadata
        if component is not OMIT:
            _request["component"] = component
        _response = self._client_wrapper.httpx_client.request(
            "PUT",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sources/{data_source_id}"),
            json=jsonable_encoder(_request),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return pydantic.parse_obj_as(DataSource, _response.json())  # type: ignore
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)

    def delete_data_source(self, data_source_id: str) -> None:
        """
        Delete a data source by ID.

        Parameters:
            - data_source_id: str.
        ---
        from llama_cloud.client import LlamaCloud

        client = LlamaCloud(
            token="YOUR_TOKEN",
        )
        client.data_sources.delete_data_source(
            data_source_id="string",
        )
        """
        _response = self._client_wrapper.httpx_client.request(
            "DELETE",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sources/{data_source_id}"),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)


class AsyncDataSourcesClient:
    def __init__(self, *, client_wrapper: AsyncClientWrapper):
        self._client_wrapper = client_wrapper

    async def list_data_sources(
        self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
    ) -> typing.List[DataSource]:
        """
        List data sources for a given project.
        If project_id is not provided, uses the default project.

        Parameters:
            - project_id: typing.Optional[str].

            - organization_id: typing.Optional[str].
        ---
        from llama_cloud.client import AsyncLlamaCloud

        client = AsyncLlamaCloud(
            token="YOUR_TOKEN",
        )
        await client.data_sources.list_data_sources()
        """
        _response = await self._client_wrapper.httpx_client.request(
            "GET",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
            params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return pydantic.parse_obj_as(typing.List[DataSource], _response.json())  # type: ignore
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)

    async def create_data_source(
        self,
        *,
        project_id: typing.Optional[str] = None,
        organization_id: typing.Optional[str] = None,
        request: DataSourceCreate,
    ) -> DataSource:
        """
        Create a new data source.

        Parameters:
            - project_id: typing.Optional[str].

            - organization_id: typing.Optional[str].

            - request: DataSourceCreate.
        ---
        from llama_cloud import ConfigurableDataSourceNames, DataSourceCreate
        from llama_cloud.client import AsyncLlamaCloud

        client = AsyncLlamaCloud(
            token="YOUR_TOKEN",
        )
        await client.data_sources.create_data_source(
            request=DataSourceCreate(
                name="string",
                source_type=ConfigurableDataSourceNames.S_3,
            ),
        )
        """
        _response = await self._client_wrapper.httpx_client.request(
            "POST",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
            params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
            json=jsonable_encoder(request),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return pydantic.parse_obj_as(DataSource, _response.json())  # type: ignore
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)

    async def upsert_data_source(
        self,
        *,
        project_id: typing.Optional[str] = None,
        organization_id: typing.Optional[str] = None,
        request: DataSourceCreate,
    ) -> DataSource:
        """
        Upserts a data source.
        Updates if a data source with the same name and project_id already exists. Otherwise, creates a new data source.

        Parameters:
            - project_id: typing.Optional[str].

            - organization_id: typing.Optional[str].

            - request: DataSourceCreate.
        ---
        from llama_cloud import ConfigurableDataSourceNames, DataSourceCreate
        from llama_cloud.client import AsyncLlamaCloud

        client = AsyncLlamaCloud(
            token="YOUR_TOKEN",
        )
        await client.data_sources.upsert_data_source(
            request=DataSourceCreate(
                name="string",
                source_type=ConfigurableDataSourceNames.S_3,
            ),
        )
        """
        _response = await self._client_wrapper.httpx_client.request(
            "PUT",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
            params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
            json=jsonable_encoder(request),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return pydantic.parse_obj_as(DataSource, _response.json())  # type: ignore
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)

    async def get_data_source(self, data_source_id: str) -> DataSource:
        """
        Get a data source by ID.

        Parameters:
            - data_source_id: str.
        ---
        from llama_cloud.client import AsyncLlamaCloud

        client = AsyncLlamaCloud(
            token="YOUR_TOKEN",
        )
        await client.data_sources.get_data_source(
            data_source_id="string",
        )
        """
        _response = await self._client_wrapper.httpx_client.request(
            "GET",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sources/{data_source_id}"),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return pydantic.parse_obj_as(DataSource, _response.json())  # type: ignore
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)

    async def update_data_source(
        self,
        data_source_id: str,
        *,
        name: typing.Optional[str] = OMIT,
        source_type: ConfigurableDataSourceNames,
        custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[DataSourceUpdateCustomMetadataValue]]] = OMIT,
        component: typing.Optional[DataSourceUpdateComponent] = OMIT,
    ) -> DataSource:
        """
        Update a data source by ID.

        Parameters:
            - data_source_id: str.

            - name: typing.Optional[str].

            - source_type: ConfigurableDataSourceNames.

            - custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[DataSourceUpdateCustomMetadataValue]]].

            - component: typing.Optional[DataSourceUpdateComponent]. Component that implements the data source
        ---
        from llama_cloud import ConfigurableDataSourceNames
        from llama_cloud.client import AsyncLlamaCloud

        client = AsyncLlamaCloud(
            token="YOUR_TOKEN",
        )
        await client.data_sources.update_data_source(
            data_source_id="string",
            source_type=ConfigurableDataSourceNames.S_3,
        )
        """
        _request: typing.Dict[str, typing.Any] = {"source_type": source_type}
        if name is not OMIT:
            _request["name"] = name
        if custom_metadata is not OMIT:
            _request["custom_metadata"] = custom_metadata
        if component is not OMIT:
            _request["component"] = component
        _response = await self._client_wrapper.httpx_client.request(
            "PUT",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sources/{data_source_id}"),
            json=jsonable_encoder(_request),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return pydantic.parse_obj_as(DataSource, _response.json())  # type: ignore
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)

    async def delete_data_source(self, data_source_id: str) -> None:
        """
        Delete a data source by ID.

        Parameters:
            - data_source_id: str.
        ---
        from llama_cloud.client import AsyncLlamaCloud

        client = AsyncLlamaCloud(
            token="YOUR_TOKEN",
        )
        await client.data_sources.delete_data_source(
            data_source_id="string",
        )
        """
        _response = await self._client_wrapper.httpx_client.request(
            "DELETE",
            urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sources/{data_source_id}"),
            headers=self._client_wrapper.get_headers(),
            timeout=60,
        )
        if 200 <= _response.status_code < 300:
            return
        if _response.status_code == 422:
            raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json()))  # type: ignore
        try:
            _response_json = _response.json()
        except JSONDecodeError:
            raise ApiError(status_code=_response.status_code, body=_response.text)
        raise ApiError(status_code=_response.status_code, body=_response_json)
