From 5a4e732451386df8c2d333b32737d1755327dfae Mon Sep 17 00:00:00 2001 From: "fern-api[bot]" <115122769+fern-api[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 11:18:29 -0500 Subject: [PATCH] SDK regeneration (#465) Co-authored-by: fern-api <115122769+fern-api[bot]@users.noreply.github.com> --- poetry.lock | 12 +- pyproject.toml | 2 +- src/cohere/base_client.py | 232 ++++++++--- src/cohere/connectors/client.py | 226 ++++++++--- src/cohere/core/__init__.py | 4 + src/cohere/core/client_wrapper.py | 2 +- src/cohere/core/unchecked_base_model.py | 191 +++++++++ src/cohere/datasets/client.py | 62 ++- .../types/datasets_create_response.py | 3 +- .../datasets/types/datasets_get_response.py | 3 +- .../types/datasets_get_usage_response.py | 3 +- .../datasets/types/datasets_list_response.py | 3 +- src/cohere/embed_jobs/client.py | 126 ++++-- .../create_embed_job_request_truncate.py | 2 +- src/cohere/finetuning/client.py | 366 +++++++++++++----- .../finetuning/finetuning/types/base_model.py | 3 +- .../finetuning/finetuning/types/base_type.py | 2 +- .../types/create_finetuned_model_response.py | 3 +- .../finetuning/finetuning/types/error.py | 3 +- .../finetuning/finetuning/types/event.py | 3 +- .../finetuning/types/finetuned_model.py | 3 +- .../types/get_finetuned_model_response.py | 3 +- .../finetuning/types/hyperparameters.py | 3 +- .../finetuning/types/list_events_response.py | 3 +- .../types/list_finetuned_models_response.py | 3 +- .../list_training_step_metrics_response.py | 3 +- .../finetuning/finetuning/types/settings.py | 3 +- .../finetuning/finetuning/types/status.py | 2 +- .../finetuning/finetuning/types/strategy.py | 2 +- .../finetuning/types/training_step_metrics.py | 3 +- .../types/update_finetuned_model_response.py | 3 +- src/cohere/models/client.py | 42 +- src/cohere/types/api_meta.py | 3 +- src/cohere/types/api_meta_api_version.py | 3 +- src/cohere/types/api_meta_billed_units.py | 3 +- src/cohere/types/auth_token_type.py | 2 +- src/cohere/types/chat_citation.py | 3 +- src/cohere/types/chat_connector.py | 3 +- src/cohere/types/chat_data_metrics.py | 9 +- src/cohere/types/chat_message.py | 3 +- src/cohere/types/chat_message_role.py | 2 +- .../types/chat_request_citation_quality.py | 2 +- .../chat_request_connectors_search_options.py | 3 +- .../types/chat_request_prompt_truncation.py | 2 +- .../types/chat_request_tool_results_item.py | 3 +- src/cohere/types/chat_search_query.py | 3 +- src/cohere/types/chat_search_result.py | 3 +- .../types/chat_search_result_connector.py | 3 +- .../chat_stream_end_event_finish_reason.py | 2 +- src/cohere/types/chat_stream_event.py | 3 +- .../chat_stream_request_citation_quality.py | 2 +- ...tream_request_connectors_search_options.py | 3 +- .../chat_stream_request_prompt_truncation.py | 2 +- .../chat_stream_request_tool_results_item.py | 3 +- src/cohere/types/classify_data_metrics.py | 7 +- src/cohere/types/classify_example.py | 3 +- src/cohere/types/classify_request_truncate.py | 2 +- src/cohere/types/classify_response.py | 3 +- .../classify_response_classifications_item.py | 3 +- ...lassifications_item_classification_type.py | 2 +- ...ponse_classifications_item_labels_value.py | 3 +- src/cohere/types/compatible_endpoint.py | 2 +- src/cohere/types/connector.py | 3 +- src/cohere/types/connector_auth_status.py | 2 +- src/cohere/types/connector_o_auth.py | 3 +- src/cohere/types/create_connector_o_auth.py | 3 +- src/cohere/types/create_connector_response.py | 3 +- .../types/create_connector_service_auth.py | 3 +- src/cohere/types/create_embed_job_response.py | 3 +- src/cohere/types/dataset.py | 3 +- src/cohere/types/dataset_part.py | 3 +- src/cohere/types/dataset_type.py | 2 +- src/cohere/types/dataset_validation_status.py | 2 +- src/cohere/types/detokenize_response.py | 3 +- src/cohere/types/embed_by_type_response.py | 3 +- .../embed_by_type_response_embeddings.py | 3 +- src/cohere/types/embed_floats_response.py | 3 +- src/cohere/types/embed_input_type.py | 2 +- src/cohere/types/embed_job.py | 3 +- src/cohere/types/embed_job_status.py | 4 +- src/cohere/types/embed_job_truncate.py | 2 +- src/cohere/types/embed_request_truncate.py | 2 +- src/cohere/types/embed_response.py | 8 +- src/cohere/types/embedding_type.py | 2 +- src/cohere/types/finetune_dataset_metrics.py | 17 +- src/cohere/types/finish_reason.py | 2 +- .../generate_request_return_likelihoods.py | 2 +- src/cohere/types/generate_request_truncate.py | 2 +- .../types/generate_stream_end_response.py | 3 +- src/cohere/types/generate_stream_event.py | 3 +- ...erate_stream_request_return_likelihoods.py | 2 +- .../types/generate_stream_request_truncate.py | 2 +- .../types/generate_streamed_response.py | 12 +- src/cohere/types/generation.py | 3 +- src/cohere/types/get_connector_response.py | 3 +- src/cohere/types/get_model_response.py | 8 +- src/cohere/types/label_metric.py | 7 +- src/cohere/types/list_connectors_response.py | 3 +- src/cohere/types/list_embed_job_response.py | 3 +- src/cohere/types/list_models_response.py | 3 +- src/cohere/types/metrics.py | 3 +- .../types/non_streamed_chat_response.py | 3 +- src/cohere/types/o_auth_authorize_response.py | 3 +- src/cohere/types/parse_info.py | 3 +- .../rerank_request_documents_item_text.py | 3 +- src/cohere/types/rerank_response.py | 3 +- .../types/rerank_response_results_item.py | 3 +- .../rerank_response_results_item_document.py | 3 +- src/cohere/types/reranker_data_metrics.py | 19 +- src/cohere/types/single_generation.py | 3 +- .../types/single_generation_in_stream.py | 3 +- ...ingle_generation_token_likelihoods_item.py | 3 +- src/cohere/types/streamed_chat_response.py | 22 +- .../types/summarize_request_extractiveness.py | 2 +- src/cohere/types/summarize_request_format.py | 2 +- src/cohere/types/summarize_request_length.py | 2 +- src/cohere/types/summarize_response.py | 3 +- src/cohere/types/tokenize_response.py | 3 +- src/cohere/types/tool.py | 3 +- src/cohere/types/tool_call.py | 3 +- .../types/tool_parameter_definitions_value.py | 3 +- src/cohere/types/update_connector_response.py | 3 +- 122 files changed, 1198 insertions(+), 454 deletions(-) create mode 100644 src/cohere/core/unchecked_base_model.py diff --git a/poetry.lock b/poetry.lock index 97f643052..df37a25c7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -891,13 +891,13 @@ telegram = ["requests"] [[package]] name = "types-requests" -version = "2.31.0.20240403" +version = "2.31.0.20240406" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.31.0.20240403.tar.gz", hash = "sha256:e1e0cd0b655334f39d9f872b68a1310f0e343647688bf2cee932ec4c2b04de59"}, - {file = "types_requests-2.31.0.20240403-py3-none-any.whl", hash = "sha256:06abf6a68f5c4f2a62f6bb006672dfb26ed50ccbfddb281e1ee6f09a65707d5d"}, + {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, + {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, ] [package.dependencies] @@ -905,13 +905,13 @@ urllib3 = ">=2" [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index e903ed669..3d18127b3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "cohere" -version = "5.2.2" +version = "5.2.3" description = "" readme = "README.md" authors = [] diff --git a/src/cohere/base_client.py b/src/cohere/base_client.py index 3a18df675..94b64354f 100644 --- a/src/cohere/base_client.py +++ b/src/cohere/base_client.py @@ -12,9 +12,9 @@ from .core.api_error import ApiError from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .core.jsonable_encoder import jsonable_encoder -from .core.pydantic_utilities import pydantic_v1 from .core.remove_none_from_dict import remove_none_from_dict from .core.request_options import RequestOptions +from .core.unchecked_base_model import construct_type from .datasets.client import AsyncDatasetsClient, DatasetsClient from .embed_jobs.client import AsyncEmbedJobsClient, EmbedJobsClient from .environment import ClientEnvironment @@ -133,6 +133,7 @@ def chat_stream( documents: typing.Optional[typing.Sequence[ChatDocument]] = OMIT, temperature: typing.Optional[float] = OMIT, max_tokens: typing.Optional[int] = OMIT, + max_input_tokens: typing.Optional[int] = OMIT, k: typing.Optional[int] = OMIT, p: typing.Optional[float] = OMIT, seed: typing.Optional[float] = OMIT, @@ -213,6 +214,10 @@ def chat_stream( - max_tokens: typing.Optional[int]. The maximum number of tokens the model will generate as part of the response. Note: Setting a low value may result in incomplete generations. + - max_input_tokens: typing.Optional[int]. The maximum number of input tokens to send to the model. If not specified, `max_input_tokens` is the model's context length limit minus a small buffer. + + Input will be truncated according to the `prompt_truncation` parameter. + - k: typing.Optional[int]. Ensures only the top `k` most likely tokens are considered for generation at each step. Defaults to `0`, min value of `0`, max value of `500`. @@ -301,6 +306,7 @@ def chat_stream( citation_quality="fast", temperature=1.1, max_tokens=1, + max_input_tokens=1, k=1, p=1.1, seed=1.1, @@ -357,6 +363,8 @@ def chat_stream( _request["temperature"] = temperature if max_tokens is not OMIT: _request["max_tokens"] = max_tokens + if max_input_tokens is not OMIT: + _request["max_input_tokens"] = max_input_tokens if k is not OMIT: _request["k"] = k if p is not OMIT: @@ -405,11 +413,13 @@ def chat_stream( for _text in _response.iter_lines(): if len(_text) == 0: continue - yield pydantic_v1.parse_obj_as(StreamedChatResponse, json.loads(_text)) # type: ignore + yield typing.cast(StreamedChatResponse, construct_type(type_=StreamedChatResponse, object_=json.loads(_text))) # type: ignore return _response.read() if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -430,6 +440,7 @@ def chat( documents: typing.Optional[typing.Sequence[ChatDocument]] = OMIT, temperature: typing.Optional[float] = OMIT, max_tokens: typing.Optional[int] = OMIT, + max_input_tokens: typing.Optional[int] = OMIT, k: typing.Optional[int] = OMIT, p: typing.Optional[float] = OMIT, seed: typing.Optional[float] = OMIT, @@ -510,6 +521,10 @@ def chat( - max_tokens: typing.Optional[int]. The maximum number of tokens the model will generate as part of the response. Note: Setting a low value may result in incomplete generations. + - max_input_tokens: typing.Optional[int]. The maximum number of input tokens to send to the model. If not specified, `max_input_tokens` is the model's context length limit minus a small buffer. + + Input will be truncated according to the `prompt_truncation` parameter. + - k: typing.Optional[int]. Ensures only the top `k` most likely tokens are considered for generation at each step. Defaults to `0`, min value of `0`, max value of `500`. @@ -602,6 +617,8 @@ def chat( _request["temperature"] = temperature if max_tokens is not OMIT: _request["max_tokens"] = max_tokens + if max_input_tokens is not OMIT: + _request["max_input_tokens"] = max_input_tokens if k is not OMIT: _request["k"] = k if p is not OMIT: @@ -647,9 +664,11 @@ def chat( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(NonStreamedChatResponse, _response.json()) # type: ignore + return typing.cast(NonStreamedChatResponse, construct_type(type_=NonStreamedChatResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -826,15 +845,21 @@ def generate_stream( for _text in _response.iter_lines(): if len(_text) == 0: continue - yield pydantic_v1.parse_obj_as(GenerateStreamedResponse, json.loads(_text)) # type: ignore + yield typing.cast(GenerateStreamedResponse, construct_type(type_=GenerateStreamedResponse, object_=json.loads(_text))) # type: ignore return _response.read() if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -994,13 +1019,19 @@ def generate( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Generation, _response.json()) # type: ignore + return typing.cast(Generation, construct_type(type_=Generation, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1107,13 +1138,19 @@ def embed( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EmbedResponse, _response.json()) # type: ignore + return typing.cast(EmbedResponse, construct_type(type_=EmbedResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1206,9 +1243,11 @@ def rerank( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RerankResponse, _response.json()) # type: ignore + return typing.cast(RerankResponse, construct_type(type_=RerankResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1332,13 +1371,19 @@ def classify( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ClassifyResponse, _response.json()) # type: ignore + return typing.cast(ClassifyResponse, construct_type(type_=ClassifyResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1431,9 +1476,11 @@ def summarize( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(SummarizeResponse, _response.json()) # type: ignore + return typing.cast(SummarizeResponse, construct_type(type_=SummarizeResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1491,13 +1538,19 @@ def tokenize( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TokenizeResponse, _response.json()) # type: ignore + return typing.cast(TokenizeResponse, construct_type(type_=TokenizeResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1555,9 +1608,11 @@ def detokenize( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DetokenizeResponse, _response.json()) # type: ignore + return typing.cast(DetokenizeResponse, construct_type(type_=DetokenizeResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1639,6 +1694,7 @@ async def chat_stream( documents: typing.Optional[typing.Sequence[ChatDocument]] = OMIT, temperature: typing.Optional[float] = OMIT, max_tokens: typing.Optional[int] = OMIT, + max_input_tokens: typing.Optional[int] = OMIT, k: typing.Optional[int] = OMIT, p: typing.Optional[float] = OMIT, seed: typing.Optional[float] = OMIT, @@ -1719,6 +1775,10 @@ async def chat_stream( - max_tokens: typing.Optional[int]. The maximum number of tokens the model will generate as part of the response. Note: Setting a low value may result in incomplete generations. + - max_input_tokens: typing.Optional[int]. The maximum number of input tokens to send to the model. If not specified, `max_input_tokens` is the model's context length limit minus a small buffer. + + Input will be truncated according to the `prompt_truncation` parameter. + - k: typing.Optional[int]. Ensures only the top `k` most likely tokens are considered for generation at each step. Defaults to `0`, min value of `0`, max value of `500`. @@ -1807,6 +1867,7 @@ async def chat_stream( citation_quality="fast", temperature=1.1, max_tokens=1, + max_input_tokens=1, k=1, p=1.1, seed=1.1, @@ -1863,6 +1924,8 @@ async def chat_stream( _request["temperature"] = temperature if max_tokens is not OMIT: _request["max_tokens"] = max_tokens + if max_input_tokens is not OMIT: + _request["max_input_tokens"] = max_input_tokens if k is not OMIT: _request["k"] = k if p is not OMIT: @@ -1911,11 +1974,13 @@ async def chat_stream( async for _text in _response.aiter_lines(): if len(_text) == 0: continue - yield pydantic_v1.parse_obj_as(StreamedChatResponse, json.loads(_text)) # type: ignore + yield typing.cast(StreamedChatResponse, construct_type(type_=StreamedChatResponse, object_=json.loads(_text))) # type: ignore return await _response.aread() if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1936,6 +2001,7 @@ async def chat( documents: typing.Optional[typing.Sequence[ChatDocument]] = OMIT, temperature: typing.Optional[float] = OMIT, max_tokens: typing.Optional[int] = OMIT, + max_input_tokens: typing.Optional[int] = OMIT, k: typing.Optional[int] = OMIT, p: typing.Optional[float] = OMIT, seed: typing.Optional[float] = OMIT, @@ -2016,6 +2082,10 @@ async def chat( - max_tokens: typing.Optional[int]. The maximum number of tokens the model will generate as part of the response. Note: Setting a low value may result in incomplete generations. + - max_input_tokens: typing.Optional[int]. The maximum number of input tokens to send to the model. If not specified, `max_input_tokens` is the model's context length limit minus a small buffer. + + Input will be truncated according to the `prompt_truncation` parameter. + - k: typing.Optional[int]. Ensures only the top `k` most likely tokens are considered for generation at each step. Defaults to `0`, min value of `0`, max value of `500`. @@ -2108,6 +2178,8 @@ async def chat( _request["temperature"] = temperature if max_tokens is not OMIT: _request["max_tokens"] = max_tokens + if max_input_tokens is not OMIT: + _request["max_input_tokens"] = max_input_tokens if k is not OMIT: _request["k"] = k if p is not OMIT: @@ -2153,9 +2225,11 @@ async def chat( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(NonStreamedChatResponse, _response.json()) # type: ignore + return typing.cast(NonStreamedChatResponse, construct_type(type_=NonStreamedChatResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -2332,15 +2406,21 @@ async def generate_stream( async for _text in _response.aiter_lines(): if len(_text) == 0: continue - yield pydantic_v1.parse_obj_as(GenerateStreamedResponse, json.loads(_text)) # type: ignore + yield typing.cast(GenerateStreamedResponse, construct_type(type_=GenerateStreamedResponse, object_=json.loads(_text))) # type: ignore return await _response.aread() if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -2500,13 +2580,19 @@ async def generate( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Generation, _response.json()) # type: ignore + return typing.cast(Generation, construct_type(type_=Generation, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -2613,13 +2699,19 @@ async def embed( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EmbedResponse, _response.json()) # type: ignore + return typing.cast(EmbedResponse, construct_type(type_=EmbedResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -2712,9 +2804,11 @@ async def rerank( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RerankResponse, _response.json()) # type: ignore + return typing.cast(RerankResponse, construct_type(type_=RerankResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -2838,13 +2932,19 @@ async def classify( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ClassifyResponse, _response.json()) # type: ignore + return typing.cast(ClassifyResponse, construct_type(type_=ClassifyResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -2937,9 +3037,11 @@ async def summarize( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(SummarizeResponse, _response.json()) # type: ignore + return typing.cast(SummarizeResponse, construct_type(type_=SummarizeResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -2997,13 +3099,19 @@ async def tokenize( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TokenizeResponse, _response.json()) # type: ignore + return typing.cast(TokenizeResponse, construct_type(type_=TokenizeResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -3061,9 +3169,11 @@ async def detokenize( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DetokenizeResponse, _response.json()) # type: ignore + return typing.cast(DetokenizeResponse, construct_type(type_=DetokenizeResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/cohere/connectors/client.py b/src/cohere/connectors/client.py index 4c896cd62..f41892062 100644 --- a/src/cohere/connectors/client.py +++ b/src/cohere/connectors/client.py @@ -7,9 +7,9 @@ from ..core.api_error import ApiError from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 from ..core.remove_none_from_dict import remove_none_from_dict from ..core.request_options import RequestOptions +from ..core.unchecked_base_model import construct_type from ..errors.bad_request_error import BadRequestError from ..errors.forbidden_error import ForbiddenError from ..errors.internal_server_error import InternalServerError @@ -88,13 +88,19 @@ def list( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ListConnectorsResponse, _response.json()) # type: ignore + return typing.cast(ListConnectorsResponse, construct_type(type_=ListConnectorsResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -187,15 +193,23 @@ def create( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CreateConnectorResponse, _response.json()) # type: ignore + return typing.cast(CreateConnectorResponse, construct_type(type_=CreateConnectorResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -242,15 +256,23 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GetConnectorResponse, _response.json()) # type: ignore + return typing.cast(GetConnectorResponse, construct_type(type_=GetConnectorResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -297,17 +319,27 @@ def delete(self, id: str, *, request_options: typing.Optional[RequestOptions] = max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DeleteConnectorResponse, _response.json()) # type: ignore + return typing.cast(DeleteConnectorResponse, construct_type(type_=DeleteConnectorResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -401,17 +433,27 @@ def update( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UpdateConnectorResponse, _response.json()) # type: ignore + return typing.cast(UpdateConnectorResponse, construct_type(type_=UpdateConnectorResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -480,15 +522,23 @@ def o_auth_authorize( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(OAuthAuthorizeResponse, _response.json()) # type: ignore + return typing.cast(OAuthAuthorizeResponse, construct_type(type_=OAuthAuthorizeResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -556,13 +606,19 @@ async def list( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ListConnectorsResponse, _response.json()) # type: ignore + return typing.cast(ListConnectorsResponse, construct_type(type_=ListConnectorsResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -655,15 +711,23 @@ async def create( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CreateConnectorResponse, _response.json()) # type: ignore + return typing.cast(CreateConnectorResponse, construct_type(type_=CreateConnectorResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -710,15 +774,23 @@ async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GetConnectorResponse, _response.json()) # type: ignore + return typing.cast(GetConnectorResponse, construct_type(type_=GetConnectorResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -767,17 +839,27 @@ async def delete( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DeleteConnectorResponse, _response.json()) # type: ignore + return typing.cast(DeleteConnectorResponse, construct_type(type_=DeleteConnectorResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -871,17 +953,27 @@ async def update( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UpdateConnectorResponse, _response.json()) # type: ignore + return typing.cast(UpdateConnectorResponse, construct_type(type_=UpdateConnectorResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -950,15 +1042,23 @@ async def o_auth_authorize( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(OAuthAuthorizeResponse, _response.json()) # type: ignore + return typing.cast(OAuthAuthorizeResponse, construct_type(type_=OAuthAuthorizeResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/cohere/core/__init__.py b/src/cohere/core/__init__.py index 78a7f80fb..c30fc6789 100644 --- a/src/cohere/core/__init__.py +++ b/src/cohere/core/__init__.py @@ -9,6 +9,7 @@ from .pydantic_utilities import pydantic_v1 from .remove_none_from_dict import remove_none_from_dict from .request_options import RequestOptions +from .unchecked_base_model import UncheckedBaseModel, UnionMetadata, construct_type __all__ = [ "ApiError", @@ -19,6 +20,9 @@ "HttpClient", "RequestOptions", "SyncClientWrapper", + "UncheckedBaseModel", + "UnionMetadata", + "construct_type", "convert_file_dict_to_httpx_tuples", "jsonable_encoder", "pydantic_v1", diff --git a/src/cohere/core/client_wrapper.py b/src/cohere/core/client_wrapper.py index 5d0dd6958..071ab6621 100644 --- a/src/cohere/core/client_wrapper.py +++ b/src/cohere/core/client_wrapper.py @@ -25,7 +25,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "cohere", - "X-Fern-SDK-Version": "5.2.2", + "X-Fern-SDK-Version": "5.2.3", } if self._client_name is not None: headers["X-Client-Name"] = self._client_name diff --git a/src/cohere/core/unchecked_base_model.py b/src/cohere/core/unchecked_base_model.py new file mode 100644 index 000000000..6b984ba8d --- /dev/null +++ b/src/cohere/core/unchecked_base_model.py @@ -0,0 +1,191 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import inspect +import typing +import uuid + +import typing_extensions + +from .datetime_utils import serialize_datetime +from .pydantic_utilities import pydantic_v1 + + +class UnionMetadata: + discriminant: str + + def __init__(self, *, discriminant: str) -> None: + self.discriminant = discriminant + + +Model = typing.TypeVar("Model", bound=pydantic_v1.BaseModel) + + +class UncheckedBaseModel(pydantic_v1.BaseModel): + # Allow extra fields + class Config: + extra = pydantic_v1.Extra.allow + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + # Allow construct to not validate model + # Implementation taken from: https://github.com/pydantic/pydantic/issues/1168#issuecomment-817742836 + @classmethod + def construct( + cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any + ) -> "Model": + m = cls.__new__(cls) # type: ignore + fields_values = {} + + config = cls.__config__ + + if _fields_set is None: + _fields_set = set(values.keys()) + + for name, field in cls.__fields__.items(): + key = field.alias + if ( + key not in values and config.allow_population_by_field_name + ): # Added this to allow population by field name + key = name + + if key in values: + if ( + values[key] is None and not field.required + ): # Moved this check since None value can be passed for Optional nested field + fields_values[name] = field.get_default() + else: + type_ = typing.cast(typing.Type, field.outer_type_) # type: ignore + fields_values[name] = construct_type(object_=values[key], type_=type_) + elif not field.required: + default = field.get_default() + fields_values[name] = default + + # If the default values are non-null act like they've been set + # This effectively allows exclude_unset to work like exclude_none where + # the latter passes through intentionally set none values. + if default != None: + _fields_set.add(key) + + # Add extras back in + for key, value in values.items(): + if key not in cls.__fields__: + _fields_set.add(key) + fields_values[key] = value + + object.__setattr__(m, "__dict__", fields_values) + object.__setattr__(m, "__fields_set__", _fields_set) + m._init_private_attributes() + return m + + +def _convert_undiscriminated_union_type(union_type: typing.Type[typing.Any], object_: typing.Any) -> typing.Any: + inner_types = pydantic_v1.typing.get_args(union_type) + if typing.Any in inner_types: + return object_ + + for inner_type in inner_types: + try: + if inspect.isclass(inner_type) and issubclass(inner_type, pydantic_v1.BaseModel): + # Attempt a validated parse until one works + return pydantic_v1.parse_obj_as(inner_type, object_) + except Exception: + continue + + # If none of the types work, just return the first successful cast + for inner_type in inner_types: + try: + return construct_type(object_=object_, type_=inner_type) + except Exception: + continue + + +def _convert_union_type(type_: typing.Type[typing.Any], object_: typing.Any) -> typing.Any: + base_type = pydantic_v1.typing.get_origin(type_) or type_ + union_type = type_ + if base_type == typing_extensions.Annotated: + union_type = pydantic_v1.typing.get_args(type_)[0] + annotated_metadata = pydantic_v1.typing.get_args(type_)[1:] + for metadata in annotated_metadata: + if isinstance(metadata, UnionMetadata): + try: + # Cast to the correct type, based on the discriminant + for inner_type in pydantic_v1.typing.get_args(union_type): + if inner_type.__fields__[metadata.discriminant].default == getattr( + object_, metadata.discriminant + ): + return construct_type(object_=object_, type_=inner_type) + except Exception: + # Allow to fall through to our regular union handling + pass + return _convert_undiscriminated_union_type(union_type, object_) + + +def construct_type(*, type_: typing.Type[typing.Any], object_: typing.Any) -> typing.Any: + """ + Here we are essentially creating the same `construct` method in spirit as the above, but for all types, not just + Pydantic models. + The idea is to essentially attempt to coerce object_ to type_ (recursively) + """ + base_type = pydantic_v1.typing.get_origin(type_) or type_ + is_annotated = base_type == typing_extensions.Annotated + maybe_annotation_members = pydantic_v1.typing.get_args(type_) + is_annotated_union = is_annotated and pydantic_v1.typing.is_union( + pydantic_v1.typing.get_origin(maybe_annotation_members[0]) + ) + + if base_type == typing.Any: + return object_ + + if base_type == dict: + if not isinstance(object_, typing.Mapping): + return object_ + + _, items_type = pydantic_v1.typing.get_args(type_) + return {key: construct_type(object_=item, type_=items_type) for key, item in object_.items()} + + if base_type == list: + if not isinstance(object_, list): + return object_ + + inner_type = pydantic_v1.typing.get_args(type_)[0] + return [construct_type(object_=entry, type_=inner_type) for entry in object_] + + if base_type == set: + if not isinstance(object_, set) and not isinstance(object_, list): + return object_ + + inner_type = pydantic_v1.typing.get_args(type_)[0] + return {construct_type(object_=entry, type_=inner_type) for entry in object_} + + if pydantic_v1.typing.is_union(base_type) or is_annotated_union: + return _convert_union_type(type_, object_) + + # Cannot do an `issubclass` with a literal type, let's also just confirm we have a class before this call + if not pydantic_v1.typing.is_literal_type(type_) and ( + inspect.isclass(base_type) and issubclass(base_type, pydantic_v1.BaseModel) + ): + return type_.construct(**object_) + + if base_type == dt.datetime: + try: + return pydantic_v1.datetime_parse.parse_datetime(object_) + except Exception: + return object_ + + if base_type == dt.date: + try: + return pydantic_v1.datetime_parse.parse_date(object_) + except Exception: + return object_ + + if base_type == uuid.UUID: + try: + return uuid.UUID(object_) + except Exception: + return object_ + + return object_ diff --git a/src/cohere/datasets/client.py b/src/cohere/datasets/client.py index 051141f66..b627ef8e1 100644 --- a/src/cohere/datasets/client.py +++ b/src/cohere/datasets/client.py @@ -10,9 +10,9 @@ from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.datetime_utils import serialize_datetime from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 from ..core.remove_none_from_dict import remove_none_from_dict from ..core.request_options import RequestOptions +from ..core.unchecked_base_model import construct_type from ..errors.too_many_requests_error import TooManyRequestsError from ..types.dataset_type import DatasetType from .types.datasets_create_response import DatasetsCreateResponse @@ -96,9 +96,11 @@ def list( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetsListResponse, _response.json()) # type: ignore + return typing.cast(DatasetsListResponse, construct_type(type_=DatasetsListResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -207,9 +209,11 @@ def create( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetsCreateResponse, _response.json()) # type: ignore + return typing.cast(DatasetsCreateResponse, construct_type(type_=DatasetsCreateResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -252,9 +256,11 @@ def get_usage(self, *, request_options: typing.Optional[RequestOptions] = None) max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetsGetUsageResponse, _response.json()) # type: ignore + return typing.cast(DatasetsGetUsageResponse, construct_type(type_=DatasetsGetUsageResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -301,9 +307,11 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetsGetResponse, _response.json()) # type: ignore + return typing.cast(DatasetsGetResponse, construct_type(type_=DatasetsGetResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -352,9 +360,11 @@ def delete( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Dict[str, typing.Any], _response.json()) # type: ignore + return typing.cast(typing.Dict[str, typing.Any], construct_type(type_=typing.Dict[str, typing.Any], object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -434,9 +444,11 @@ async def list( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetsListResponse, _response.json()) # type: ignore + return typing.cast(DatasetsListResponse, construct_type(type_=DatasetsListResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -545,9 +557,11 @@ async def create( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetsCreateResponse, _response.json()) # type: ignore + return typing.cast(DatasetsCreateResponse, construct_type(type_=DatasetsCreateResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -590,9 +604,11 @@ async def get_usage(self, *, request_options: typing.Optional[RequestOptions] = max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetsGetUsageResponse, _response.json()) # type: ignore + return typing.cast(DatasetsGetUsageResponse, construct_type(type_=DatasetsGetUsageResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -639,9 +655,11 @@ async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DatasetsGetResponse, _response.json()) # type: ignore + return typing.cast(DatasetsGetResponse, construct_type(type_=DatasetsGetResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -690,9 +708,11 @@ async def delete( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Dict[str, typing.Any], _response.json()) # type: ignore + return typing.cast(typing.Dict[str, typing.Any], construct_type(type_=typing.Dict[str, typing.Any], object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/cohere/datasets/types/datasets_create_response.py b/src/cohere/datasets/types/datasets_create_response.py index 62a93a655..47ed8073d 100644 --- a/src/cohere/datasets/types/datasets_create_response.py +++ b/src/cohere/datasets/types/datasets_create_response.py @@ -5,9 +5,10 @@ from ...core.datetime_utils import serialize_datetime from ...core.pydantic_utilities import pydantic_v1 +from ...core.unchecked_base_model import UncheckedBaseModel -class DatasetsCreateResponse(pydantic_v1.BaseModel): +class DatasetsCreateResponse(UncheckedBaseModel): id: typing.Optional[str] = pydantic_v1.Field(default=None) """ The dataset ID diff --git a/src/cohere/datasets/types/datasets_get_response.py b/src/cohere/datasets/types/datasets_get_response.py index dc7d9e9ff..8afc2eaa7 100644 --- a/src/cohere/datasets/types/datasets_get_response.py +++ b/src/cohere/datasets/types/datasets_get_response.py @@ -5,10 +5,11 @@ from ...core.datetime_utils import serialize_datetime from ...core.pydantic_utilities import pydantic_v1 +from ...core.unchecked_base_model import UncheckedBaseModel from ...types.dataset import Dataset -class DatasetsGetResponse(pydantic_v1.BaseModel): +class DatasetsGetResponse(UncheckedBaseModel): dataset: Dataset def json(self, **kwargs: typing.Any) -> str: diff --git a/src/cohere/datasets/types/datasets_get_usage_response.py b/src/cohere/datasets/types/datasets_get_usage_response.py index 792968e92..8b9faebbd 100644 --- a/src/cohere/datasets/types/datasets_get_usage_response.py +++ b/src/cohere/datasets/types/datasets_get_usage_response.py @@ -5,9 +5,10 @@ from ...core.datetime_utils import serialize_datetime from ...core.pydantic_utilities import pydantic_v1 +from ...core.unchecked_base_model import UncheckedBaseModel -class DatasetsGetUsageResponse(pydantic_v1.BaseModel): +class DatasetsGetUsageResponse(UncheckedBaseModel): organization_usage: typing.Optional[str] = pydantic_v1.Field(default=None) """ The total number of bytes used by the organization. diff --git a/src/cohere/datasets/types/datasets_list_response.py b/src/cohere/datasets/types/datasets_list_response.py index 20039ee49..f6bf827c1 100644 --- a/src/cohere/datasets/types/datasets_list_response.py +++ b/src/cohere/datasets/types/datasets_list_response.py @@ -5,10 +5,11 @@ from ...core.datetime_utils import serialize_datetime from ...core.pydantic_utilities import pydantic_v1 +from ...core.unchecked_base_model import UncheckedBaseModel from ...types.dataset import Dataset -class DatasetsListResponse(pydantic_v1.BaseModel): +class DatasetsListResponse(UncheckedBaseModel): datasets: typing.Optional[typing.List[Dataset]] = None def json(self, **kwargs: typing.Any) -> str: diff --git a/src/cohere/embed_jobs/client.py b/src/cohere/embed_jobs/client.py index 112357880..d0671d040 100644 --- a/src/cohere/embed_jobs/client.py +++ b/src/cohere/embed_jobs/client.py @@ -7,9 +7,9 @@ from ..core.api_error import ApiError from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 from ..core.remove_none_from_dict import remove_none_from_dict from ..core.request_options import RequestOptions +from ..core.unchecked_base_model import construct_type from ..errors.bad_request_error import BadRequestError from ..errors.internal_server_error import InternalServerError from ..errors.not_found_error import NotFoundError @@ -65,13 +65,19 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> Li max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ListEmbedJobResponse, _response.json()) # type: ignore + return typing.cast(ListEmbedJobResponse, construct_type(type_=ListEmbedJobResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -167,13 +173,19 @@ def create( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CreateEmbedJobResponse, _response.json()) # type: ignore + return typing.cast(CreateEmbedJobResponse, construct_type(type_=CreateEmbedJobResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -220,15 +232,23 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EmbedJob, _response.json()) # type: ignore + return typing.cast(EmbedJob, construct_type(type_=EmbedJob, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -282,13 +302,21 @@ def cancel(self, id: str, *, request_options: typing.Optional[RequestOptions] = if 200 <= _response.status_code < 300: return if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -336,13 +364,19 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ListEmbedJobResponse, _response.json()) # type: ignore + return typing.cast(ListEmbedJobResponse, construct_type(type_=ListEmbedJobResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -438,13 +472,19 @@ async def create( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CreateEmbedJobResponse, _response.json()) # type: ignore + return typing.cast(CreateEmbedJobResponse, construct_type(type_=CreateEmbedJobResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -491,15 +531,23 @@ async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EmbedJob, _response.json()) # type: ignore + return typing.cast(EmbedJob, construct_type(type_=EmbedJob, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -553,13 +601,21 @@ async def cancel(self, id: str, *, request_options: typing.Optional[RequestOptio if 200 <= _response.status_code < 300: return if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/cohere/embed_jobs/types/create_embed_job_request_truncate.py b/src/cohere/embed_jobs/types/create_embed_job_request_truncate.py index 27510ac55..30c12cb05 100644 --- a/src/cohere/embed_jobs/types/create_embed_job_request_truncate.py +++ b/src/cohere/embed_jobs/types/create_embed_job_request_truncate.py @@ -2,4 +2,4 @@ import typing -CreateEmbedJobRequestTruncate = typing.Union[typing.AnyStr, typing.Literal["START", "END"]] +CreateEmbedJobRequestTruncate = typing.Union[typing.Literal["START", "END"], typing.Any] diff --git a/src/cohere/finetuning/client.py b/src/cohere/finetuning/client.py index 036fe47c8..31124e99d 100644 --- a/src/cohere/finetuning/client.py +++ b/src/cohere/finetuning/client.py @@ -8,9 +8,9 @@ from ..core.api_error import ApiError from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 from ..core.remove_none_from_dict import remove_none_from_dict from ..core.request_options import RequestOptions +from ..core.unchecked_base_model import construct_type from ..errors.bad_request_error import BadRequestError from ..errors.forbidden_error import ForbiddenError from ..errors.internal_server_error import InternalServerError @@ -100,19 +100,31 @@ def list_finetuned_models( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ListFinetunedModelsResponse, _response.json()) # type: ignore + return typing.cast(ListFinetunedModelsResponse, construct_type(type_=ListFinetunedModelsResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -174,19 +186,31 @@ def create_finetuned_model( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CreateFinetunedModelResponse, _response.json()) # type: ignore + return typing.cast(CreateFinetunedModelResponse, construct_type(type_=CreateFinetunedModelResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -235,19 +259,31 @@ def get_finetuned_model( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GetFinetunedModelResponse, _response.json()) # type: ignore + return typing.cast(GetFinetunedModelResponse, construct_type(type_=GetFinetunedModelResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -296,19 +332,31 @@ def delete_finetuned_model( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DeleteFinetunedModelResponse, _response.json()) # type: ignore + return typing.cast(DeleteFinetunedModelResponse, construct_type(type_=DeleteFinetunedModelResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -416,19 +464,31 @@ def update_finetuned_model( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UpdateFinetunedModelResponse, _response.json()) # type: ignore + return typing.cast(UpdateFinetunedModelResponse, construct_type(type_=UpdateFinetunedModelResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -506,19 +566,31 @@ def list_events( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ListEventsResponse, _response.json()) # type: ignore + return typing.cast(ListEventsResponse, construct_type(type_=ListEventsResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -587,19 +659,31 @@ def list_training_step_metrics( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ListTrainingStepMetricsResponse, _response.json()) # type: ignore + return typing.cast(ListTrainingStepMetricsResponse, construct_type(type_=ListTrainingStepMetricsResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -674,19 +758,31 @@ async def list_finetuned_models( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ListFinetunedModelsResponse, _response.json()) # type: ignore + return typing.cast(ListFinetunedModelsResponse, construct_type(type_=ListFinetunedModelsResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -748,19 +844,31 @@ async def create_finetuned_model( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CreateFinetunedModelResponse, _response.json()) # type: ignore + return typing.cast(CreateFinetunedModelResponse, construct_type(type_=CreateFinetunedModelResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -809,19 +917,31 @@ async def get_finetuned_model( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GetFinetunedModelResponse, _response.json()) # type: ignore + return typing.cast(GetFinetunedModelResponse, construct_type(type_=GetFinetunedModelResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -870,19 +990,31 @@ async def delete_finetuned_model( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DeleteFinetunedModelResponse, _response.json()) # type: ignore + return typing.cast(DeleteFinetunedModelResponse, construct_type(type_=DeleteFinetunedModelResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -990,19 +1122,31 @@ async def update_finetuned_model( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UpdateFinetunedModelResponse, _response.json()) # type: ignore + return typing.cast(UpdateFinetunedModelResponse, construct_type(type_=UpdateFinetunedModelResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1080,19 +1224,31 @@ async def list_events( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ListEventsResponse, _response.json()) # type: ignore + return typing.cast(ListEventsResponse, construct_type(type_=ListEventsResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -1161,19 +1317,31 @@ async def list_training_step_metrics( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ListTrainingStepMetricsResponse, _response.json()) # type: ignore + return typing.cast(ListTrainingStepMetricsResponse, construct_type(type_=ListTrainingStepMetricsResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise UnauthorizedError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) if _response.status_code == 403: - raise ForbiddenError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise ForbiddenError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 503: - raise ServiceUnavailableError(pydantic_v1.parse_obj_as(Error, _response.json())) # type: ignore + raise ServiceUnavailableError( + typing.cast(Error, construct_type(type_=Error, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/cohere/finetuning/finetuning/types/base_model.py b/src/cohere/finetuning/finetuning/types/base_model.py index bdd499a17..8aaa8f1ca 100644 --- a/src/cohere/finetuning/finetuning/types/base_model.py +++ b/src/cohere/finetuning/finetuning/types/base_model.py @@ -5,11 +5,12 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel from .base_type import BaseType from .strategy import Strategy -class BaseModel(pydantic_v1.BaseModel): +class BaseModel(UncheckedBaseModel): """ The base model used for fine-tuning. """ diff --git a/src/cohere/finetuning/finetuning/types/base_type.py b/src/cohere/finetuning/finetuning/types/base_type.py index d5878bacf..b9c9711ad 100644 --- a/src/cohere/finetuning/finetuning/types/base_type.py +++ b/src/cohere/finetuning/finetuning/types/base_type.py @@ -3,7 +3,6 @@ import typing BaseType = typing.Union[ - typing.AnyStr, typing.Literal[ "BASE_TYPE_UNSPECIFIED", "BASE_TYPE_GENERATIVE", @@ -11,4 +10,5 @@ "BASE_TYPE_RERANK", "BASE_TYPE_CHAT", ], + typing.Any, ] diff --git a/src/cohere/finetuning/finetuning/types/create_finetuned_model_response.py b/src/cohere/finetuning/finetuning/types/create_finetuned_model_response.py index e285431f8..dddf56502 100644 --- a/src/cohere/finetuning/finetuning/types/create_finetuned_model_response.py +++ b/src/cohere/finetuning/finetuning/types/create_finetuned_model_response.py @@ -5,10 +5,11 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel from .finetuned_model import FinetunedModel -class CreateFinetunedModelResponse(pydantic_v1.BaseModel): +class CreateFinetunedModelResponse(UncheckedBaseModel): """ Response to request to create a fine-tuned model. """ diff --git a/src/cohere/finetuning/finetuning/types/error.py b/src/cohere/finetuning/finetuning/types/error.py index 63a10cc0f..4fcbeea44 100644 --- a/src/cohere/finetuning/finetuning/types/error.py +++ b/src/cohere/finetuning/finetuning/types/error.py @@ -5,9 +5,10 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel -class Error(pydantic_v1.BaseModel): +class Error(UncheckedBaseModel): """ Error is the response for any unsuccessful event. """ diff --git a/src/cohere/finetuning/finetuning/types/event.py b/src/cohere/finetuning/finetuning/types/event.py index f442c530b..20aa360e2 100644 --- a/src/cohere/finetuning/finetuning/types/event.py +++ b/src/cohere/finetuning/finetuning/types/event.py @@ -5,10 +5,11 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel from .status import Status -class Event(pydantic_v1.BaseModel): +class Event(UncheckedBaseModel): """ A change in status of a fine-tuned model. """ diff --git a/src/cohere/finetuning/finetuning/types/finetuned_model.py b/src/cohere/finetuning/finetuning/types/finetuned_model.py index 3f57c6f85..16204c3fe 100644 --- a/src/cohere/finetuning/finetuning/types/finetuned_model.py +++ b/src/cohere/finetuning/finetuning/types/finetuned_model.py @@ -5,11 +5,12 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel from .settings import Settings from .status import Status -class FinetunedModel(pydantic_v1.BaseModel): +class FinetunedModel(UncheckedBaseModel): """ This resource represents a fine-tuned model. """ diff --git a/src/cohere/finetuning/finetuning/types/get_finetuned_model_response.py b/src/cohere/finetuning/finetuning/types/get_finetuned_model_response.py index e820ca795..706a7d7ce 100644 --- a/src/cohere/finetuning/finetuning/types/get_finetuned_model_response.py +++ b/src/cohere/finetuning/finetuning/types/get_finetuned_model_response.py @@ -5,10 +5,11 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel from .finetuned_model import FinetunedModel -class GetFinetunedModelResponse(pydantic_v1.BaseModel): +class GetFinetunedModelResponse(UncheckedBaseModel): """ Response to a request to get a fine-tuned model. """ diff --git a/src/cohere/finetuning/finetuning/types/hyperparameters.py b/src/cohere/finetuning/finetuning/types/hyperparameters.py index f39f20101..5147ae56a 100644 --- a/src/cohere/finetuning/finetuning/types/hyperparameters.py +++ b/src/cohere/finetuning/finetuning/types/hyperparameters.py @@ -5,9 +5,10 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel -class Hyperparameters(pydantic_v1.BaseModel): +class Hyperparameters(UncheckedBaseModel): """ The fine-tuning hyperparameters. """ diff --git a/src/cohere/finetuning/finetuning/types/list_events_response.py b/src/cohere/finetuning/finetuning/types/list_events_response.py index df5d30fc4..844eeb5fe 100644 --- a/src/cohere/finetuning/finetuning/types/list_events_response.py +++ b/src/cohere/finetuning/finetuning/types/list_events_response.py @@ -5,10 +5,11 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel from .event import Event -class ListEventsResponse(pydantic_v1.BaseModel): +class ListEventsResponse(UncheckedBaseModel): """ Response to a request to list events of a fine-tuned model. """ diff --git a/src/cohere/finetuning/finetuning/types/list_finetuned_models_response.py b/src/cohere/finetuning/finetuning/types/list_finetuned_models_response.py index 7a69524e2..33bd499e5 100644 --- a/src/cohere/finetuning/finetuning/types/list_finetuned_models_response.py +++ b/src/cohere/finetuning/finetuning/types/list_finetuned_models_response.py @@ -5,10 +5,11 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel from .finetuned_model import FinetunedModel -class ListFinetunedModelsResponse(pydantic_v1.BaseModel): +class ListFinetunedModelsResponse(UncheckedBaseModel): """ Response to a request to list fine-tuned models. """ diff --git a/src/cohere/finetuning/finetuning/types/list_training_step_metrics_response.py b/src/cohere/finetuning/finetuning/types/list_training_step_metrics_response.py index 485c2d3e7..87e3172ba 100644 --- a/src/cohere/finetuning/finetuning/types/list_training_step_metrics_response.py +++ b/src/cohere/finetuning/finetuning/types/list_training_step_metrics_response.py @@ -5,10 +5,11 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel from .training_step_metrics import TrainingStepMetrics -class ListTrainingStepMetricsResponse(pydantic_v1.BaseModel): +class ListTrainingStepMetricsResponse(UncheckedBaseModel): """ Response to a request to list training-step metrics of a fine-tuned model. """ diff --git a/src/cohere/finetuning/finetuning/types/settings.py b/src/cohere/finetuning/finetuning/types/settings.py index 040ae09a2..271e39bbd 100644 --- a/src/cohere/finetuning/finetuning/types/settings.py +++ b/src/cohere/finetuning/finetuning/types/settings.py @@ -5,11 +5,12 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel from .base_model import BaseModel from .hyperparameters import Hyperparameters -class Settings(pydantic_v1.BaseModel): +class Settings(UncheckedBaseModel): """ The configuration used for fine-tuning. """ diff --git a/src/cohere/finetuning/finetuning/types/status.py b/src/cohere/finetuning/finetuning/types/status.py index 966a256e8..c2b1af1a2 100644 --- a/src/cohere/finetuning/finetuning/types/status.py +++ b/src/cohere/finetuning/finetuning/types/status.py @@ -3,7 +3,6 @@ import typing Status = typing.Union[ - typing.AnyStr, typing.Literal[ "STATUS_UNSPECIFIED", "STATUS_FINETUNING", @@ -15,4 +14,5 @@ "STATUS_PAUSED", "STATUS_QUEUED", ], + typing.Any, ] diff --git a/src/cohere/finetuning/finetuning/types/strategy.py b/src/cohere/finetuning/finetuning/types/strategy.py index 0e2d7ff94..8f0e0b5e6 100644 --- a/src/cohere/finetuning/finetuning/types/strategy.py +++ b/src/cohere/finetuning/finetuning/types/strategy.py @@ -2,4 +2,4 @@ import typing -Strategy = typing.Union[typing.AnyStr, typing.Literal["STRATEGY_UNSPECIFIED", "STRATEGY_VANILLA", "STRATEGY_TFEW"]] +Strategy = typing.Union[typing.Literal["STRATEGY_UNSPECIFIED", "STRATEGY_VANILLA", "STRATEGY_TFEW"], typing.Any] diff --git a/src/cohere/finetuning/finetuning/types/training_step_metrics.py b/src/cohere/finetuning/finetuning/types/training_step_metrics.py index 8ac8f4364..c20f1c271 100644 --- a/src/cohere/finetuning/finetuning/types/training_step_metrics.py +++ b/src/cohere/finetuning/finetuning/types/training_step_metrics.py @@ -5,9 +5,10 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel -class TrainingStepMetrics(pydantic_v1.BaseModel): +class TrainingStepMetrics(UncheckedBaseModel): """ The evaluation metrics at a given step of the training of a fine-tuned model. """ diff --git a/src/cohere/finetuning/finetuning/types/update_finetuned_model_response.py b/src/cohere/finetuning/finetuning/types/update_finetuned_model_response.py index b640403e6..0a7c57c98 100644 --- a/src/cohere/finetuning/finetuning/types/update_finetuned_model_response.py +++ b/src/cohere/finetuning/finetuning/types/update_finetuned_model_response.py @@ -5,10 +5,11 @@ from ....core.datetime_utils import serialize_datetime from ....core.pydantic_utilities import pydantic_v1 +from ....core.unchecked_base_model import UncheckedBaseModel from .finetuned_model import FinetunedModel -class UpdateFinetunedModelResponse(pydantic_v1.BaseModel): +class UpdateFinetunedModelResponse(UncheckedBaseModel): """ Response to a request to update a fine-tuned model. """ diff --git a/src/cohere/models/client.py b/src/cohere/models/client.py index 72b4117ce..b474684d1 100644 --- a/src/cohere/models/client.py +++ b/src/cohere/models/client.py @@ -7,9 +7,9 @@ from ..core.api_error import ApiError from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.jsonable_encoder import jsonable_encoder -from ..core.pydantic_utilities import pydantic_v1 from ..core.remove_none_from_dict import remove_none_from_dict from ..core.request_options import RequestOptions +from ..core.unchecked_base_model import construct_type from ..errors.bad_request_error import BadRequestError from ..errors.internal_server_error import InternalServerError from ..errors.too_many_requests_error import TooManyRequestsError @@ -62,13 +62,19 @@ def get(self, model: str, *, request_options: typing.Optional[RequestOptions] = max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GetModelResponse, _response.json()) # type: ignore + return typing.cast(GetModelResponse, construct_type(type_=GetModelResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -135,9 +141,11 @@ def list( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ListModelsResponse, _response.json()) # type: ignore + return typing.cast(ListModelsResponse, construct_type(type_=ListModelsResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -189,13 +197,19 @@ async def get(self, model: str, *, request_options: typing.Optional[RequestOptio max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(GetModelResponse, _response.json()) # type: ignore + return typing.cast(GetModelResponse, construct_type(type_=GetModelResponse, object_=_response.json())) # type: ignore if _response.status_code == 400: - raise BadRequestError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise BadRequestError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) if _response.status_code == 500: - raise InternalServerError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise InternalServerError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: @@ -262,9 +276,11 @@ async def list( max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore ) if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ListModelsResponse, _response.json()) # type: ignore + return typing.cast(ListModelsResponse, construct_type(type_=ListModelsResponse, object_=_response.json())) # type: ignore if _response.status_code == 429: - raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise TooManyRequestsError( + typing.cast(typing.Any, construct_type(type_=typing.Any, object_=_response.json())) # type: ignore + ) try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/cohere/types/api_meta.py b/src/cohere/types/api_meta.py index 8cc07bcb1..9099d77ff 100644 --- a/src/cohere/types/api_meta.py +++ b/src/cohere/types/api_meta.py @@ -5,11 +5,12 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .api_meta_api_version import ApiMetaApiVersion from .api_meta_billed_units import ApiMetaBilledUnits -class ApiMeta(pydantic_v1.BaseModel): +class ApiMeta(UncheckedBaseModel): api_version: typing.Optional[ApiMetaApiVersion] = None billed_units: typing.Optional[ApiMetaBilledUnits] = None warnings: typing.Optional[typing.List[str]] = None diff --git a/src/cohere/types/api_meta_api_version.py b/src/cohere/types/api_meta_api_version.py index 0593c8552..673e820fe 100644 --- a/src/cohere/types/api_meta_api_version.py +++ b/src/cohere/types/api_meta_api_version.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ApiMetaApiVersion(pydantic_v1.BaseModel): +class ApiMetaApiVersion(UncheckedBaseModel): version: str is_deprecated: typing.Optional[bool] = None is_experimental: typing.Optional[bool] = None diff --git a/src/cohere/types/api_meta_billed_units.py b/src/cohere/types/api_meta_billed_units.py index cb5c17387..6908fcd6c 100644 --- a/src/cohere/types/api_meta_billed_units.py +++ b/src/cohere/types/api_meta_billed_units.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ApiMetaBilledUnits(pydantic_v1.BaseModel): +class ApiMetaBilledUnits(UncheckedBaseModel): input_tokens: typing.Optional[float] = pydantic_v1.Field(default=None) """ The number of billed input tokens. diff --git a/src/cohere/types/auth_token_type.py b/src/cohere/types/auth_token_type.py index 8370971b2..d46d0f726 100644 --- a/src/cohere/types/auth_token_type.py +++ b/src/cohere/types/auth_token_type.py @@ -2,4 +2,4 @@ import typing -AuthTokenType = typing.Union[typing.AnyStr, typing.Literal["bearer", "basic", "noscheme"]] +AuthTokenType = typing.Union[typing.Literal["bearer", "basic", "noscheme"], typing.Any] diff --git a/src/cohere/types/chat_citation.py b/src/cohere/types/chat_citation.py index bf444c36f..568fa2b41 100644 --- a/src/cohere/types/chat_citation.py +++ b/src/cohere/types/chat_citation.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ChatCitation(pydantic_v1.BaseModel): +class ChatCitation(UncheckedBaseModel): """ A section of the generated reply which cites external knowledge. """ diff --git a/src/cohere/types/chat_connector.py b/src/cohere/types/chat_connector.py index 8cc97cbdb..7e286e2af 100644 --- a/src/cohere/types/chat_connector.py +++ b/src/cohere/types/chat_connector.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ChatConnector(pydantic_v1.BaseModel): +class ChatConnector(UncheckedBaseModel): """ The connector used for fetching documents. """ diff --git a/src/cohere/types/chat_data_metrics.py b/src/cohere/types/chat_data_metrics.py index 184812d6a..7e0864898 100644 --- a/src/cohere/types/chat_data_metrics.py +++ b/src/cohere/types/chat_data_metrics.py @@ -5,15 +5,16 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ChatDataMetrics(pydantic_v1.BaseModel): - num_train_turns: typing.Optional[str] = pydantic_v1.Field(alias="numTrainTurns", default=None) +class ChatDataMetrics(UncheckedBaseModel): + num_train_turns: typing.Optional[str] = pydantic_v1.Field(default=None) """ The sum of all turns of valid train examples. """ - num_eval_turns: typing.Optional[str] = pydantic_v1.Field(alias="numEvalTurns", default=None) + num_eval_turns: typing.Optional[str] = pydantic_v1.Field(default=None) """ The sum of all turns of valid eval examples. """ @@ -34,7 +35,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True - allow_population_by_field_name = True - populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/cohere/types/chat_message.py b/src/cohere/types/chat_message.py index b96c58d0a..2859cfc61 100644 --- a/src/cohere/types/chat_message.py +++ b/src/cohere/types/chat_message.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .chat_message_role import ChatMessageRole -class ChatMessage(pydantic_v1.BaseModel): +class ChatMessage(UncheckedBaseModel): """ Represents a single message in the chat history, excluding the current user turn. It has two properties: `role` and `message`. The `role` identifies the sender (`CHATBOT`, `SYSTEM`, or `USER`), while the `message` contains the text content. diff --git a/src/cohere/types/chat_message_role.py b/src/cohere/types/chat_message_role.py index 025e44181..e68c05c8b 100644 --- a/src/cohere/types/chat_message_role.py +++ b/src/cohere/types/chat_message_role.py @@ -2,4 +2,4 @@ import typing -ChatMessageRole = typing.Union[typing.AnyStr, typing.Literal["CHATBOT", "SYSTEM", "USER"]] +ChatMessageRole = typing.Union[typing.Literal["CHATBOT", "SYSTEM", "USER"], typing.Any] diff --git a/src/cohere/types/chat_request_citation_quality.py b/src/cohere/types/chat_request_citation_quality.py index b7099863c..f44b9e0ca 100644 --- a/src/cohere/types/chat_request_citation_quality.py +++ b/src/cohere/types/chat_request_citation_quality.py @@ -2,4 +2,4 @@ import typing -ChatRequestCitationQuality = typing.Union[typing.AnyStr, typing.Literal["fast", "accurate"]] +ChatRequestCitationQuality = typing.Union[typing.Literal["fast", "accurate"], typing.Any] diff --git a/src/cohere/types/chat_request_connectors_search_options.py b/src/cohere/types/chat_request_connectors_search_options.py index dba7976f4..8b0ac3108 100644 --- a/src/cohere/types/chat_request_connectors_search_options.py +++ b/src/cohere/types/chat_request_connectors_search_options.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ChatRequestConnectorsSearchOptions(pydantic_v1.BaseModel): +class ChatRequestConnectorsSearchOptions(UncheckedBaseModel): """ (internal) Sets inference and model options for RAG search query and tool use generations. Defaults are used when options are not specified here, meaning that other parameters outside of connectors_search_options are ignored (such as model= or temperature=). """ diff --git a/src/cohere/types/chat_request_prompt_truncation.py b/src/cohere/types/chat_request_prompt_truncation.py index cc55c03af..be4e1b255 100644 --- a/src/cohere/types/chat_request_prompt_truncation.py +++ b/src/cohere/types/chat_request_prompt_truncation.py @@ -2,4 +2,4 @@ import typing -ChatRequestPromptTruncation = typing.Union[typing.AnyStr, typing.Literal["OFF", "AUTO", "AUTO_PRESERVE_ORDER"]] +ChatRequestPromptTruncation = typing.Union[typing.Literal["OFF", "AUTO", "AUTO_PRESERVE_ORDER"], typing.Any] diff --git a/src/cohere/types/chat_request_tool_results_item.py b/src/cohere/types/chat_request_tool_results_item.py index 5d9d3f372..6b363a802 100644 --- a/src/cohere/types/chat_request_tool_results_item.py +++ b/src/cohere/types/chat_request_tool_results_item.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .tool_call import ToolCall -class ChatRequestToolResultsItem(pydantic_v1.BaseModel): +class ChatRequestToolResultsItem(UncheckedBaseModel): call: ToolCall outputs: typing.List[typing.Dict[str, typing.Any]] diff --git a/src/cohere/types/chat_search_query.py b/src/cohere/types/chat_search_query.py index b904ef0a0..46c68f923 100644 --- a/src/cohere/types/chat_search_query.py +++ b/src/cohere/types/chat_search_query.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ChatSearchQuery(pydantic_v1.BaseModel): +class ChatSearchQuery(UncheckedBaseModel): """ The generated search query. Contains the text of the query and a unique identifier for the query. """ diff --git a/src/cohere/types/chat_search_result.py b/src/cohere/types/chat_search_result.py index f06a5b7fd..da34b5ab7 100644 --- a/src/cohere/types/chat_search_result.py +++ b/src/cohere/types/chat_search_result.py @@ -5,11 +5,12 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .chat_search_query import ChatSearchQuery from .chat_search_result_connector import ChatSearchResultConnector -class ChatSearchResult(pydantic_v1.BaseModel): +class ChatSearchResult(UncheckedBaseModel): search_query: typing.Optional[ChatSearchQuery] = None connector: ChatSearchResultConnector = pydantic_v1.Field() """ diff --git a/src/cohere/types/chat_search_result_connector.py b/src/cohere/types/chat_search_result_connector.py index 82b81fcea..5a8009e2d 100644 --- a/src/cohere/types/chat_search_result_connector.py +++ b/src/cohere/types/chat_search_result_connector.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ChatSearchResultConnector(pydantic_v1.BaseModel): +class ChatSearchResultConnector(UncheckedBaseModel): """ The connector used for fetching documents. """ diff --git a/src/cohere/types/chat_stream_end_event_finish_reason.py b/src/cohere/types/chat_stream_end_event_finish_reason.py index 451c76339..9b1e49324 100644 --- a/src/cohere/types/chat_stream_end_event_finish_reason.py +++ b/src/cohere/types/chat_stream_end_event_finish_reason.py @@ -3,5 +3,5 @@ import typing ChatStreamEndEventFinishReason = typing.Union[ - typing.AnyStr, typing.Literal["COMPLETE", "ERROR_LIMIT", "MAX_TOKENS", "ERROR", "ERROR_TOXIC"] + typing.Literal["COMPLETE", "ERROR_LIMIT", "MAX_TOKENS", "ERROR", "ERROR_TOXIC"], typing.Any ] diff --git a/src/cohere/types/chat_stream_event.py b/src/cohere/types/chat_stream_event.py index 5d918bf0c..1cb492cd3 100644 --- a/src/cohere/types/chat_stream_event.py +++ b/src/cohere/types/chat_stream_event.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ChatStreamEvent(pydantic_v1.BaseModel): +class ChatStreamEvent(UncheckedBaseModel): def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) diff --git a/src/cohere/types/chat_stream_request_citation_quality.py b/src/cohere/types/chat_stream_request_citation_quality.py index 40bab8b4e..3c9e885b2 100644 --- a/src/cohere/types/chat_stream_request_citation_quality.py +++ b/src/cohere/types/chat_stream_request_citation_quality.py @@ -2,4 +2,4 @@ import typing -ChatStreamRequestCitationQuality = typing.Union[typing.AnyStr, typing.Literal["fast", "accurate"]] +ChatStreamRequestCitationQuality = typing.Union[typing.Literal["fast", "accurate"], typing.Any] diff --git a/src/cohere/types/chat_stream_request_connectors_search_options.py b/src/cohere/types/chat_stream_request_connectors_search_options.py index 02372e58b..f392af348 100644 --- a/src/cohere/types/chat_stream_request_connectors_search_options.py +++ b/src/cohere/types/chat_stream_request_connectors_search_options.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ChatStreamRequestConnectorsSearchOptions(pydantic_v1.BaseModel): +class ChatStreamRequestConnectorsSearchOptions(UncheckedBaseModel): """ (internal) Sets inference and model options for RAG search query and tool use generations. Defaults are used when options are not specified here, meaning that other parameters outside of connectors_search_options are ignored (such as model= or temperature=). """ diff --git a/src/cohere/types/chat_stream_request_prompt_truncation.py b/src/cohere/types/chat_stream_request_prompt_truncation.py index 7ea988de9..2ce3b4dba 100644 --- a/src/cohere/types/chat_stream_request_prompt_truncation.py +++ b/src/cohere/types/chat_stream_request_prompt_truncation.py @@ -2,4 +2,4 @@ import typing -ChatStreamRequestPromptTruncation = typing.Union[typing.AnyStr, typing.Literal["OFF", "AUTO", "AUTO_PRESERVE_ORDER"]] +ChatStreamRequestPromptTruncation = typing.Union[typing.Literal["OFF", "AUTO", "AUTO_PRESERVE_ORDER"], typing.Any] diff --git a/src/cohere/types/chat_stream_request_tool_results_item.py b/src/cohere/types/chat_stream_request_tool_results_item.py index 6ccbe2bfc..ec09fc2cf 100644 --- a/src/cohere/types/chat_stream_request_tool_results_item.py +++ b/src/cohere/types/chat_stream_request_tool_results_item.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .tool_call import ToolCall -class ChatStreamRequestToolResultsItem(pydantic_v1.BaseModel): +class ChatStreamRequestToolResultsItem(UncheckedBaseModel): call: ToolCall outputs: typing.List[typing.Dict[str, typing.Any]] diff --git a/src/cohere/types/classify_data_metrics.py b/src/cohere/types/classify_data_metrics.py index 69adabf78..dde0979cd 100644 --- a/src/cohere/types/classify_data_metrics.py +++ b/src/cohere/types/classify_data_metrics.py @@ -5,11 +5,12 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .label_metric import LabelMetric -class ClassifyDataMetrics(pydantic_v1.BaseModel): - label_metrics: typing.Optional[typing.List[LabelMetric]] = pydantic_v1.Field(alias="labelMetrics", default=None) +class ClassifyDataMetrics(UncheckedBaseModel): + label_metrics: typing.Optional[typing.List[LabelMetric]] = None def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} @@ -22,7 +23,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True - allow_population_by_field_name = True - populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/cohere/types/classify_example.py b/src/cohere/types/classify_example.py index dfb58f4f3..47a228a17 100644 --- a/src/cohere/types/classify_example.py +++ b/src/cohere/types/classify_example.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ClassifyExample(pydantic_v1.BaseModel): +class ClassifyExample(UncheckedBaseModel): text: typing.Optional[str] = None label: typing.Optional[str] = None diff --git a/src/cohere/types/classify_request_truncate.py b/src/cohere/types/classify_request_truncate.py index 6040dc64d..61fd58d71 100644 --- a/src/cohere/types/classify_request_truncate.py +++ b/src/cohere/types/classify_request_truncate.py @@ -2,4 +2,4 @@ import typing -ClassifyRequestTruncate = typing.Union[typing.AnyStr, typing.Literal["NONE", "START", "END"]] +ClassifyRequestTruncate = typing.Union[typing.Literal["NONE", "START", "END"], typing.Any] diff --git a/src/cohere/types/classify_response.py b/src/cohere/types/classify_response.py index e68cf3520..8493d120d 100644 --- a/src/cohere/types/classify_response.py +++ b/src/cohere/types/classify_response.py @@ -5,11 +5,12 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .api_meta import ApiMeta from .classify_response_classifications_item import ClassifyResponseClassificationsItem -class ClassifyResponse(pydantic_v1.BaseModel): +class ClassifyResponse(UncheckedBaseModel): id: str classifications: typing.List[ClassifyResponseClassificationsItem] meta: typing.Optional[ApiMeta] = None diff --git a/src/cohere/types/classify_response_classifications_item.py b/src/cohere/types/classify_response_classifications_item.py index a7db56cfa..5880f11bf 100644 --- a/src/cohere/types/classify_response_classifications_item.py +++ b/src/cohere/types/classify_response_classifications_item.py @@ -5,13 +5,14 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .classify_response_classifications_item_classification_type import ( ClassifyResponseClassificationsItemClassificationType, ) from .classify_response_classifications_item_labels_value import ClassifyResponseClassificationsItemLabelsValue -class ClassifyResponseClassificationsItem(pydantic_v1.BaseModel): +class ClassifyResponseClassificationsItem(UncheckedBaseModel): id: str input: typing.Optional[str] = pydantic_v1.Field(default=None) """ diff --git a/src/cohere/types/classify_response_classifications_item_classification_type.py b/src/cohere/types/classify_response_classifications_item_classification_type.py index e2f3367ea..b1c3f8560 100644 --- a/src/cohere/types/classify_response_classifications_item_classification_type.py +++ b/src/cohere/types/classify_response_classifications_item_classification_type.py @@ -3,5 +3,5 @@ import typing ClassifyResponseClassificationsItemClassificationType = typing.Union[ - typing.AnyStr, typing.Literal["single-label", "multi-label"] + typing.Literal["single-label", "multi-label"], typing.Any ] diff --git a/src/cohere/types/classify_response_classifications_item_labels_value.py b/src/cohere/types/classify_response_classifications_item_labels_value.py index 48fe6c734..8745c22d3 100644 --- a/src/cohere/types/classify_response_classifications_item_labels_value.py +++ b/src/cohere/types/classify_response_classifications_item_labels_value.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ClassifyResponseClassificationsItemLabelsValue(pydantic_v1.BaseModel): +class ClassifyResponseClassificationsItemLabelsValue(UncheckedBaseModel): confidence: typing.Optional[float] = None def json(self, **kwargs: typing.Any) -> str: diff --git a/src/cohere/types/compatible_endpoint.py b/src/cohere/types/compatible_endpoint.py index 3ce34cd21..3eca74ee2 100644 --- a/src/cohere/types/compatible_endpoint.py +++ b/src/cohere/types/compatible_endpoint.py @@ -3,5 +3,5 @@ import typing CompatibleEndpoint = typing.Union[ - typing.AnyStr, typing.Literal["chat", "embed", "classify", "summarize", "rerank", "rate", "generate"] + typing.Literal["chat", "embed", "classify", "summarize", "rerank", "rate", "generate"], typing.Any ] diff --git a/src/cohere/types/connector.py b/src/cohere/types/connector.py index 1360fc8ce..2c5eaeca3 100644 --- a/src/cohere/types/connector.py +++ b/src/cohere/types/connector.py @@ -5,11 +5,12 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .connector_auth_status import ConnectorAuthStatus from .connector_o_auth import ConnectorOAuth -class Connector(pydantic_v1.BaseModel): +class Connector(UncheckedBaseModel): """ A connector allows you to integrate data sources with the '/chat' endpoint to create grounded generations with citations to the data source. documents to help answer users. diff --git a/src/cohere/types/connector_auth_status.py b/src/cohere/types/connector_auth_status.py index 9dc2db1d3..ad2d768f3 100644 --- a/src/cohere/types/connector_auth_status.py +++ b/src/cohere/types/connector_auth_status.py @@ -2,4 +2,4 @@ import typing -ConnectorAuthStatus = typing.Union[typing.AnyStr, typing.Literal["valid", "expired"]] +ConnectorAuthStatus = typing.Union[typing.Literal["valid", "expired"], typing.Any] diff --git a/src/cohere/types/connector_o_auth.py b/src/cohere/types/connector_o_auth.py index b19c52ccb..b938823ff 100644 --- a/src/cohere/types/connector_o_auth.py +++ b/src/cohere/types/connector_o_auth.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ConnectorOAuth(pydantic_v1.BaseModel): +class ConnectorOAuth(UncheckedBaseModel): client_id: typing.Optional[str] = pydantic_v1.Field(default=None) """ The OAuth 2.0 client ID. This field is encrypted at rest. diff --git a/src/cohere/types/create_connector_o_auth.py b/src/cohere/types/create_connector_o_auth.py index b2ca09244..2c2c245b2 100644 --- a/src/cohere/types/create_connector_o_auth.py +++ b/src/cohere/types/create_connector_o_auth.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class CreateConnectorOAuth(pydantic_v1.BaseModel): +class CreateConnectorOAuth(UncheckedBaseModel): client_id: typing.Optional[str] = pydantic_v1.Field(default=None) """ The OAuth 2.0 client ID. This fields is encrypted at rest. diff --git a/src/cohere/types/create_connector_response.py b/src/cohere/types/create_connector_response.py index 16dfa53d0..839a2d6a8 100644 --- a/src/cohere/types/create_connector_response.py +++ b/src/cohere/types/create_connector_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .connector import Connector -class CreateConnectorResponse(pydantic_v1.BaseModel): +class CreateConnectorResponse(UncheckedBaseModel): connector: Connector def json(self, **kwargs: typing.Any) -> str: diff --git a/src/cohere/types/create_connector_service_auth.py b/src/cohere/types/create_connector_service_auth.py index a1d93f4ff..05034ece3 100644 --- a/src/cohere/types/create_connector_service_auth.py +++ b/src/cohere/types/create_connector_service_auth.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .auth_token_type import AuthTokenType -class CreateConnectorServiceAuth(pydantic_v1.BaseModel): +class CreateConnectorServiceAuth(UncheckedBaseModel): type: AuthTokenType token: str = pydantic_v1.Field() """ diff --git a/src/cohere/types/create_embed_job_response.py b/src/cohere/types/create_embed_job_response.py index 2b2fe8b3e..c61d0a5aa 100644 --- a/src/cohere/types/create_embed_job_response.py +++ b/src/cohere/types/create_embed_job_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .api_meta import ApiMeta -class CreateEmbedJobResponse(pydantic_v1.BaseModel): +class CreateEmbedJobResponse(UncheckedBaseModel): """ Response from creating an embed job. """ diff --git a/src/cohere/types/dataset.py b/src/cohere/types/dataset.py index 1fd664da3..1ac4afe25 100644 --- a/src/cohere/types/dataset.py +++ b/src/cohere/types/dataset.py @@ -5,12 +5,13 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .dataset_part import DatasetPart from .dataset_type import DatasetType from .dataset_validation_status import DatasetValidationStatus -class Dataset(pydantic_v1.BaseModel): +class Dataset(UncheckedBaseModel): id: str = pydantic_v1.Field() """ The dataset ID diff --git a/src/cohere/types/dataset_part.py b/src/cohere/types/dataset_part.py index d3e95f002..ca8ad1600 100644 --- a/src/cohere/types/dataset_part.py +++ b/src/cohere/types/dataset_part.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class DatasetPart(pydantic_v1.BaseModel): +class DatasetPart(UncheckedBaseModel): id: str = pydantic_v1.Field() """ The dataset part ID diff --git a/src/cohere/types/dataset_type.py b/src/cohere/types/dataset_type.py index dd018b91f..a7657424b 100644 --- a/src/cohere/types/dataset_type.py +++ b/src/cohere/types/dataset_type.py @@ -3,7 +3,6 @@ import typing DatasetType = typing.Union[ - typing.AnyStr, typing.Literal[ "embed-input", "embed-result", @@ -15,4 +14,5 @@ "chat-finetune-input", "multi-label-classification-finetune-input", ], + typing.Any, ] diff --git a/src/cohere/types/dataset_validation_status.py b/src/cohere/types/dataset_validation_status.py index 9ceb8f5d1..057e79892 100644 --- a/src/cohere/types/dataset_validation_status.py +++ b/src/cohere/types/dataset_validation_status.py @@ -3,5 +3,5 @@ import typing DatasetValidationStatus = typing.Union[ - typing.AnyStr, typing.Literal["unknown", "queued", "processing", "failed", "validated", "skipped"] + typing.Literal["unknown", "queued", "processing", "failed", "validated", "skipped"], typing.Any ] diff --git a/src/cohere/types/detokenize_response.py b/src/cohere/types/detokenize_response.py index 3c63bf7fe..481ec3202 100644 --- a/src/cohere/types/detokenize_response.py +++ b/src/cohere/types/detokenize_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .api_meta import ApiMeta -class DetokenizeResponse(pydantic_v1.BaseModel): +class DetokenizeResponse(UncheckedBaseModel): text: str = pydantic_v1.Field() """ A string representing the list of tokens. diff --git a/src/cohere/types/embed_by_type_response.py b/src/cohere/types/embed_by_type_response.py index 74afb9a03..bed241b20 100644 --- a/src/cohere/types/embed_by_type_response.py +++ b/src/cohere/types/embed_by_type_response.py @@ -5,11 +5,12 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .api_meta import ApiMeta from .embed_by_type_response_embeddings import EmbedByTypeResponseEmbeddings -class EmbedByTypeResponse(pydantic_v1.BaseModel): +class EmbedByTypeResponse(UncheckedBaseModel): id: str embeddings: EmbedByTypeResponseEmbeddings = pydantic_v1.Field() """ diff --git a/src/cohere/types/embed_by_type_response_embeddings.py b/src/cohere/types/embed_by_type_response_embeddings.py index 81b8b2107..9f48f175e 100644 --- a/src/cohere/types/embed_by_type_response_embeddings.py +++ b/src/cohere/types/embed_by_type_response_embeddings.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class EmbedByTypeResponseEmbeddings(pydantic_v1.BaseModel): +class EmbedByTypeResponseEmbeddings(UncheckedBaseModel): """ An object with different embedding types. The length of each embedding type array will be the same as the length of the original `texts` array. """ diff --git a/src/cohere/types/embed_floats_response.py b/src/cohere/types/embed_floats_response.py index 743671d55..1e1192e4e 100644 --- a/src/cohere/types/embed_floats_response.py +++ b/src/cohere/types/embed_floats_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .api_meta import ApiMeta -class EmbedFloatsResponse(pydantic_v1.BaseModel): +class EmbedFloatsResponse(UncheckedBaseModel): id: str embeddings: typing.List[typing.List[float]] = pydantic_v1.Field() """ diff --git a/src/cohere/types/embed_input_type.py b/src/cohere/types/embed_input_type.py index 58a0992b8..3bffc50f9 100644 --- a/src/cohere/types/embed_input_type.py +++ b/src/cohere/types/embed_input_type.py @@ -3,5 +3,5 @@ import typing EmbedInputType = typing.Union[ - typing.AnyStr, typing.Literal["search_document", "search_query", "classification", "clustering"] + typing.Literal["search_document", "search_query", "classification", "clustering"], typing.Any ] diff --git a/src/cohere/types/embed_job.py b/src/cohere/types/embed_job.py index e20981903..7cb3e6812 100644 --- a/src/cohere/types/embed_job.py +++ b/src/cohere/types/embed_job.py @@ -5,12 +5,13 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .api_meta import ApiMeta from .embed_job_status import EmbedJobStatus from .embed_job_truncate import EmbedJobTruncate -class EmbedJob(pydantic_v1.BaseModel): +class EmbedJob(UncheckedBaseModel): job_id: str = pydantic_v1.Field() """ ID of the embed job diff --git a/src/cohere/types/embed_job_status.py b/src/cohere/types/embed_job_status.py index be90235d9..0fb8f727f 100644 --- a/src/cohere/types/embed_job_status.py +++ b/src/cohere/types/embed_job_status.py @@ -2,6 +2,4 @@ import typing -EmbedJobStatus = typing.Union[ - typing.AnyStr, typing.Literal["processing", "complete", "cancelling", "cancelled", "failed"] -] +EmbedJobStatus = typing.Union[typing.Literal["processing", "complete", "cancelling", "cancelled", "failed"], typing.Any] diff --git a/src/cohere/types/embed_job_truncate.py b/src/cohere/types/embed_job_truncate.py index f695b792f..ec7fb3769 100644 --- a/src/cohere/types/embed_job_truncate.py +++ b/src/cohere/types/embed_job_truncate.py @@ -2,4 +2,4 @@ import typing -EmbedJobTruncate = typing.Union[typing.AnyStr, typing.Literal["START", "END"]] +EmbedJobTruncate = typing.Union[typing.Literal["START", "END"], typing.Any] diff --git a/src/cohere/types/embed_request_truncate.py b/src/cohere/types/embed_request_truncate.py index 2c13242a9..988ddf45b 100644 --- a/src/cohere/types/embed_request_truncate.py +++ b/src/cohere/types/embed_request_truncate.py @@ -2,4 +2,4 @@ import typing -EmbedRequestTruncate = typing.Union[typing.AnyStr, typing.Literal["NONE", "START", "END"]] +EmbedRequestTruncate = typing.Union[typing.Literal["NONE", "START", "END"], typing.Any] diff --git a/src/cohere/types/embed_response.py b/src/cohere/types/embed_response.py index 48e0b2122..d8047addb 100644 --- a/src/cohere/types/embed_response.py +++ b/src/cohere/types/embed_response.py @@ -4,6 +4,9 @@ import typing +import typing_extensions + +from ..core.unchecked_base_model import UnionMetadata from .embed_by_type_response import EmbedByTypeResponse from .embed_floats_response import EmbedFloatsResponse @@ -28,4 +31,7 @@ class Config: populate_by_name = True -EmbedResponse = typing.Union[EmbedResponse_EmbeddingsFloats, EmbedResponse_EmbeddingsByType] +EmbedResponse = typing_extensions.Annotated[ + typing.Union[EmbedResponse_EmbeddingsFloats, EmbedResponse_EmbeddingsByType], + UnionMetadata(discriminant="response_type"), +] diff --git a/src/cohere/types/embedding_type.py b/src/cohere/types/embedding_type.py index 29ec3ede0..06663e566 100644 --- a/src/cohere/types/embedding_type.py +++ b/src/cohere/types/embedding_type.py @@ -2,4 +2,4 @@ import typing -EmbeddingType = typing.Union[typing.AnyStr, typing.Literal["float", "int8", "uint8", "binary", "ubinary"]] +EmbeddingType = typing.Union[typing.Literal["float", "int8", "uint8", "binary", "ubinary"], typing.Any] diff --git a/src/cohere/types/finetune_dataset_metrics.py b/src/cohere/types/finetune_dataset_metrics.py index 64c41d5a2..f3501c84e 100644 --- a/src/cohere/types/finetune_dataset_metrics.py +++ b/src/cohere/types/finetune_dataset_metrics.py @@ -5,35 +5,36 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class FinetuneDatasetMetrics(pydantic_v1.BaseModel): - trainable_token_count: typing.Optional[str] = pydantic_v1.Field(alias="trainableTokenCount", default=None) +class FinetuneDatasetMetrics(UncheckedBaseModel): + trainable_token_count: typing.Optional[str] = pydantic_v1.Field(default=None) """ The number of tokens of valid examples that can be used for training. """ - total_examples: typing.Optional[str] = pydantic_v1.Field(alias="totalExamples", default=None) + total_examples: typing.Optional[str] = pydantic_v1.Field(default=None) """ The overall number of examples. """ - train_examples: typing.Optional[str] = pydantic_v1.Field(alias="trainExamples", default=None) + train_examples: typing.Optional[str] = pydantic_v1.Field(default=None) """ The number of training examples. """ - train_size_bytes: typing.Optional[str] = pydantic_v1.Field(alias="trainSizeBytes", default=None) + train_size_bytes: typing.Optional[str] = pydantic_v1.Field(default=None) """ The size in bytes of all training examples. """ - eval_examples: typing.Optional[str] = pydantic_v1.Field(alias="evalExamples", default=None) + eval_examples: typing.Optional[str] = pydantic_v1.Field(default=None) """ Number of evaluation examples. """ - eval_size_bytes: typing.Optional[str] = pydantic_v1.Field(alias="evalSizeBytes", default=None) + eval_size_bytes: typing.Optional[str] = pydantic_v1.Field(default=None) """ The size in bytes of all eval examples. """ @@ -49,7 +50,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True - allow_population_by_field_name = True - populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/cohere/types/finish_reason.py b/src/cohere/types/finish_reason.py index d7f704330..42c6de052 100644 --- a/src/cohere/types/finish_reason.py +++ b/src/cohere/types/finish_reason.py @@ -3,5 +3,5 @@ import typing FinishReason = typing.Union[ - typing.AnyStr, typing.Literal["COMPLETE", "ERROR", "ERROR_TOXIC", "ERROR_LIMIT", "USER_CANCEL", "MAX_TOKENS"] + typing.Literal["COMPLETE", "ERROR", "ERROR_TOXIC", "ERROR_LIMIT", "USER_CANCEL", "MAX_TOKENS"], typing.Any ] diff --git a/src/cohere/types/generate_request_return_likelihoods.py b/src/cohere/types/generate_request_return_likelihoods.py index 2d5c1621d..bc8971a1a 100644 --- a/src/cohere/types/generate_request_return_likelihoods.py +++ b/src/cohere/types/generate_request_return_likelihoods.py @@ -2,4 +2,4 @@ import typing -GenerateRequestReturnLikelihoods = typing.Union[typing.AnyStr, typing.Literal["GENERATION", "ALL", "NONE"]] +GenerateRequestReturnLikelihoods = typing.Union[typing.Literal["GENERATION", "ALL", "NONE"], typing.Any] diff --git a/src/cohere/types/generate_request_truncate.py b/src/cohere/types/generate_request_truncate.py index acf0f6d31..0fa2972a8 100644 --- a/src/cohere/types/generate_request_truncate.py +++ b/src/cohere/types/generate_request_truncate.py @@ -2,4 +2,4 @@ import typing -GenerateRequestTruncate = typing.Union[typing.AnyStr, typing.Literal["NONE", "START", "END"]] +GenerateRequestTruncate = typing.Union[typing.Literal["NONE", "START", "END"], typing.Any] diff --git a/src/cohere/types/generate_stream_end_response.py b/src/cohere/types/generate_stream_end_response.py index 5153c0b78..ed4d91766 100644 --- a/src/cohere/types/generate_stream_end_response.py +++ b/src/cohere/types/generate_stream_end_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .single_generation_in_stream import SingleGenerationInStream -class GenerateStreamEndResponse(pydantic_v1.BaseModel): +class GenerateStreamEndResponse(UncheckedBaseModel): id: str prompt: typing.Optional[str] = None generations: typing.Optional[typing.List[SingleGenerationInStream]] = None diff --git a/src/cohere/types/generate_stream_event.py b/src/cohere/types/generate_stream_event.py index fdbba80a9..3f1ce0947 100644 --- a/src/cohere/types/generate_stream_event.py +++ b/src/cohere/types/generate_stream_event.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class GenerateStreamEvent(pydantic_v1.BaseModel): +class GenerateStreamEvent(UncheckedBaseModel): def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} return super().json(**kwargs_with_defaults) diff --git a/src/cohere/types/generate_stream_request_return_likelihoods.py b/src/cohere/types/generate_stream_request_return_likelihoods.py index cea4a97b0..2c272d6f0 100644 --- a/src/cohere/types/generate_stream_request_return_likelihoods.py +++ b/src/cohere/types/generate_stream_request_return_likelihoods.py @@ -2,4 +2,4 @@ import typing -GenerateStreamRequestReturnLikelihoods = typing.Union[typing.AnyStr, typing.Literal["GENERATION", "ALL", "NONE"]] +GenerateStreamRequestReturnLikelihoods = typing.Union[typing.Literal["GENERATION", "ALL", "NONE"], typing.Any] diff --git a/src/cohere/types/generate_stream_request_truncate.py b/src/cohere/types/generate_stream_request_truncate.py index dc1c34cc5..2eac895b2 100644 --- a/src/cohere/types/generate_stream_request_truncate.py +++ b/src/cohere/types/generate_stream_request_truncate.py @@ -2,4 +2,4 @@ import typing -GenerateStreamRequestTruncate = typing.Union[typing.AnyStr, typing.Literal["NONE", "START", "END"]] +GenerateStreamRequestTruncate = typing.Union[typing.Literal["NONE", "START", "END"], typing.Any] diff --git a/src/cohere/types/generate_streamed_response.py b/src/cohere/types/generate_streamed_response.py index f3f57c1b7..fddaa5803 100644 --- a/src/cohere/types/generate_streamed_response.py +++ b/src/cohere/types/generate_streamed_response.py @@ -4,6 +4,9 @@ import typing +import typing_extensions + +from ..core.unchecked_base_model import UnionMetadata from .generate_stream_end import GenerateStreamEnd from .generate_stream_error import GenerateStreamError from .generate_stream_text import GenerateStreamText @@ -39,6 +42,11 @@ class Config: populate_by_name = True -GenerateStreamedResponse = typing.Union[ - GenerateStreamedResponse_TextGeneration, GenerateStreamedResponse_StreamEnd, GenerateStreamedResponse_StreamError +GenerateStreamedResponse = typing_extensions.Annotated[ + typing.Union[ + GenerateStreamedResponse_TextGeneration, + GenerateStreamedResponse_StreamEnd, + GenerateStreamedResponse_StreamError, + ], + UnionMetadata(discriminant="event_type"), ] diff --git a/src/cohere/types/generation.py b/src/cohere/types/generation.py index db5a2bfa7..fa708bdfd 100644 --- a/src/cohere/types/generation.py +++ b/src/cohere/types/generation.py @@ -5,11 +5,12 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .api_meta import ApiMeta from .single_generation import SingleGeneration -class Generation(pydantic_v1.BaseModel): +class Generation(UncheckedBaseModel): id: str prompt: typing.Optional[str] = pydantic_v1.Field(default=None) """ diff --git a/src/cohere/types/get_connector_response.py b/src/cohere/types/get_connector_response.py index 97ef93d73..48096c37a 100644 --- a/src/cohere/types/get_connector_response.py +++ b/src/cohere/types/get_connector_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .connector import Connector -class GetConnectorResponse(pydantic_v1.BaseModel): +class GetConnectorResponse(UncheckedBaseModel): connector: Connector def json(self, **kwargs: typing.Any) -> str: diff --git a/src/cohere/types/get_model_response.py b/src/cohere/types/get_model_response.py index 28378d370..411ac3286 100644 --- a/src/cohere/types/get_model_response.py +++ b/src/cohere/types/get_model_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .compatible_endpoint import CompatibleEndpoint -class GetModelResponse(pydantic_v1.BaseModel): +class GetModelResponse(UncheckedBaseModel): """ Contains information about the model and which API endpoints it can be used with. """ @@ -33,11 +34,6 @@ class GetModelResponse(pydantic_v1.BaseModel): The maximum number of tokens that the model can process in a single request. Note that not all of these tokens are always available due to special tokens and preambles that Cohere has added by default. """ - tokenizer: typing.Optional[str] = pydantic_v1.Field(default=None) - """ - The name of the tokenizer used for the model. - """ - tokenizer_url: typing.Optional[str] = pydantic_v1.Field(default=None) """ Public URL to the tokenizer's configuration file. diff --git a/src/cohere/types/label_metric.py b/src/cohere/types/label_metric.py index 43122c516..81b435d24 100644 --- a/src/cohere/types/label_metric.py +++ b/src/cohere/types/label_metric.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class LabelMetric(pydantic_v1.BaseModel): - total_examples: typing.Optional[str] = pydantic_v1.Field(alias="totalExamples", default=None) +class LabelMetric(UncheckedBaseModel): + total_examples: typing.Optional[str] = pydantic_v1.Field(default=None) """ Total number of examples for this label """ @@ -34,7 +35,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True - allow_population_by_field_name = True - populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/cohere/types/list_connectors_response.py b/src/cohere/types/list_connectors_response.py index ca1a598fa..aa3c3d4ce 100644 --- a/src/cohere/types/list_connectors_response.py +++ b/src/cohere/types/list_connectors_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .connector import Connector -class ListConnectorsResponse(pydantic_v1.BaseModel): +class ListConnectorsResponse(UncheckedBaseModel): connectors: typing.List[Connector] total_count: typing.Optional[float] = pydantic_v1.Field(default=None) """ diff --git a/src/cohere/types/list_embed_job_response.py b/src/cohere/types/list_embed_job_response.py index 73a19aff9..5726c94c0 100644 --- a/src/cohere/types/list_embed_job_response.py +++ b/src/cohere/types/list_embed_job_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .embed_job import EmbedJob -class ListEmbedJobResponse(pydantic_v1.BaseModel): +class ListEmbedJobResponse(UncheckedBaseModel): embed_jobs: typing.Optional[typing.List[EmbedJob]] = None def json(self, **kwargs: typing.Any) -> str: diff --git a/src/cohere/types/list_models_response.py b/src/cohere/types/list_models_response.py index 054cb4f5a..d61724770 100644 --- a/src/cohere/types/list_models_response.py +++ b/src/cohere/types/list_models_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .get_model_response import GetModelResponse -class ListModelsResponse(pydantic_v1.BaseModel): +class ListModelsResponse(UncheckedBaseModel): models: typing.List[GetModelResponse] next_page_token: typing.Optional[str] = pydantic_v1.Field(default=None) """ diff --git a/src/cohere/types/metrics.py b/src/cohere/types/metrics.py index 9802cd2ae..d2bcc3805 100644 --- a/src/cohere/types/metrics.py +++ b/src/cohere/types/metrics.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .finetune_dataset_metrics import FinetuneDatasetMetrics -class Metrics(pydantic_v1.BaseModel): +class Metrics(UncheckedBaseModel): finetune_dataset_metrics: typing.Optional[FinetuneDatasetMetrics] = None def json(self, **kwargs: typing.Any) -> str: diff --git a/src/cohere/types/non_streamed_chat_response.py b/src/cohere/types/non_streamed_chat_response.py index 043412524..19be9547c 100644 --- a/src/cohere/types/non_streamed_chat_response.py +++ b/src/cohere/types/non_streamed_chat_response.py @@ -5,6 +5,7 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .chat_citation import ChatCitation from .chat_document import ChatDocument from .chat_message import ChatMessage @@ -14,7 +15,7 @@ from .tool_call import ToolCall -class NonStreamedChatResponse(pydantic_v1.BaseModel): +class NonStreamedChatResponse(UncheckedBaseModel): text: str = pydantic_v1.Field() """ Contents of the reply generated by the model. diff --git a/src/cohere/types/o_auth_authorize_response.py b/src/cohere/types/o_auth_authorize_response.py index a7046f717..bfb85cd3a 100644 --- a/src/cohere/types/o_auth_authorize_response.py +++ b/src/cohere/types/o_auth_authorize_response.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class OAuthAuthorizeResponse(pydantic_v1.BaseModel): +class OAuthAuthorizeResponse(UncheckedBaseModel): redirect_url: typing.Optional[str] = pydantic_v1.Field(default=None) """ The OAuth 2.0 redirect url. Redirect the user to this url to authorize the connector. diff --git a/src/cohere/types/parse_info.py b/src/cohere/types/parse_info.py index 7b3c5a75b..14eb014de 100644 --- a/src/cohere/types/parse_info.py +++ b/src/cohere/types/parse_info.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ParseInfo(pydantic_v1.BaseModel): +class ParseInfo(UncheckedBaseModel): separator: typing.Optional[str] = None delimiter: typing.Optional[str] = None diff --git a/src/cohere/types/rerank_request_documents_item_text.py b/src/cohere/types/rerank_request_documents_item_text.py index f1c2a1761..faaf8ba5a 100644 --- a/src/cohere/types/rerank_request_documents_item_text.py +++ b/src/cohere/types/rerank_request_documents_item_text.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class RerankRequestDocumentsItemText(pydantic_v1.BaseModel): +class RerankRequestDocumentsItemText(UncheckedBaseModel): text: str = pydantic_v1.Field() """ The text of the document to rerank. diff --git a/src/cohere/types/rerank_response.py b/src/cohere/types/rerank_response.py index 931cfa97a..9f6bc70e6 100644 --- a/src/cohere/types/rerank_response.py +++ b/src/cohere/types/rerank_response.py @@ -5,11 +5,12 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .api_meta import ApiMeta from .rerank_response_results_item import RerankResponseResultsItem -class RerankResponse(pydantic_v1.BaseModel): +class RerankResponse(UncheckedBaseModel): id: typing.Optional[str] = None results: typing.List[RerankResponseResultsItem] = pydantic_v1.Field() """ diff --git a/src/cohere/types/rerank_response_results_item.py b/src/cohere/types/rerank_response_results_item.py index 605b7be96..c5e6373a0 100644 --- a/src/cohere/types/rerank_response_results_item.py +++ b/src/cohere/types/rerank_response_results_item.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .rerank_response_results_item_document import RerankResponseResultsItemDocument -class RerankResponseResultsItem(pydantic_v1.BaseModel): +class RerankResponseResultsItem(UncheckedBaseModel): document: typing.Optional[RerankResponseResultsItemDocument] = pydantic_v1.Field(default=None) """ The doc object which was ranked diff --git a/src/cohere/types/rerank_response_results_item_document.py b/src/cohere/types/rerank_response_results_item_document.py index 28ca7b902..1a8964cec 100644 --- a/src/cohere/types/rerank_response_results_item_document.py +++ b/src/cohere/types/rerank_response_results_item_document.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class RerankResponseResultsItemDocument(pydantic_v1.BaseModel): +class RerankResponseResultsItemDocument(UncheckedBaseModel): """ The doc object which was ranked """ diff --git a/src/cohere/types/reranker_data_metrics.py b/src/cohere/types/reranker_data_metrics.py index 1ecfe53e7..044d9215f 100644 --- a/src/cohere/types/reranker_data_metrics.py +++ b/src/cohere/types/reranker_data_metrics.py @@ -5,37 +5,36 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class RerankerDataMetrics(pydantic_v1.BaseModel): - num_train_queries: typing.Optional[str] = pydantic_v1.Field(alias="numTrainQueries", default=None) +class RerankerDataMetrics(UncheckedBaseModel): + num_train_queries: typing.Optional[str] = pydantic_v1.Field(default=None) """ The number of training queries. """ - num_train_relevant_passages: typing.Optional[str] = pydantic_v1.Field( - alias="numTrainRelevantPassages", default=None - ) + num_train_relevant_passages: typing.Optional[str] = pydantic_v1.Field(default=None) """ The sum of all relevant passages of valid training examples. """ - num_train_hard_negatives: typing.Optional[str] = pydantic_v1.Field(alias="numTrainHardNegatives", default=None) + num_train_hard_negatives: typing.Optional[str] = pydantic_v1.Field(default=None) """ The sum of all hard negatives of valid training examples. """ - num_eval_queries: typing.Optional[str] = pydantic_v1.Field(alias="numEvalQueries", default=None) + num_eval_queries: typing.Optional[str] = pydantic_v1.Field(default=None) """ The number of evaluation queries. """ - num_eval_relevant_passages: typing.Optional[str] = pydantic_v1.Field(alias="numEvalRelevantPassages", default=None) + num_eval_relevant_passages: typing.Optional[str] = pydantic_v1.Field(default=None) """ The sum of all relevant passages of valid eval examples. """ - num_eval_hard_negatives: typing.Optional[str] = pydantic_v1.Field(alias="numEvalHardNegatives", default=None) + num_eval_hard_negatives: typing.Optional[str] = pydantic_v1.Field(default=None) """ The sum of all hard negatives of valid eval examples. """ @@ -51,7 +50,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True - allow_population_by_field_name = True - populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/cohere/types/single_generation.py b/src/cohere/types/single_generation.py index 93a803c45..75baae802 100644 --- a/src/cohere/types/single_generation.py +++ b/src/cohere/types/single_generation.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .single_generation_token_likelihoods_item import SingleGenerationTokenLikelihoodsItem -class SingleGeneration(pydantic_v1.BaseModel): +class SingleGeneration(UncheckedBaseModel): id: str text: str index: typing.Optional[int] = pydantic_v1.Field(default=None) diff --git a/src/cohere/types/single_generation_in_stream.py b/src/cohere/types/single_generation_in_stream.py index 949da6574..0be0dff5e 100644 --- a/src/cohere/types/single_generation_in_stream.py +++ b/src/cohere/types/single_generation_in_stream.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .finish_reason import FinishReason -class SingleGenerationInStream(pydantic_v1.BaseModel): +class SingleGenerationInStream(UncheckedBaseModel): id: str text: str = pydantic_v1.Field() """ diff --git a/src/cohere/types/single_generation_token_likelihoods_item.py b/src/cohere/types/single_generation_token_likelihoods_item.py index 4916f2dda..bff8f0c55 100644 --- a/src/cohere/types/single_generation_token_likelihoods_item.py +++ b/src/cohere/types/single_generation_token_likelihoods_item.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class SingleGenerationTokenLikelihoodsItem(pydantic_v1.BaseModel): +class SingleGenerationTokenLikelihoodsItem(UncheckedBaseModel): token: str likelihood: float diff --git a/src/cohere/types/streamed_chat_response.py b/src/cohere/types/streamed_chat_response.py index a3c9db19a..487ffd445 100644 --- a/src/cohere/types/streamed_chat_response.py +++ b/src/cohere/types/streamed_chat_response.py @@ -4,6 +4,9 @@ import typing +import typing_extensions + +from ..core.unchecked_base_model import UnionMetadata from .chat_citation_generation_event import ChatCitationGenerationEvent from .chat_search_queries_generation_event import ChatSearchQueriesGenerationEvent from .chat_search_results_event import ChatSearchResultsEvent @@ -83,12 +86,15 @@ class Config: populate_by_name = True -StreamedChatResponse = typing.Union[ - StreamedChatResponse_StreamStart, - StreamedChatResponse_SearchQueriesGeneration, - StreamedChatResponse_SearchResults, - StreamedChatResponse_TextGeneration, - StreamedChatResponse_CitationGeneration, - StreamedChatResponse_ToolCallsGeneration, - StreamedChatResponse_StreamEnd, +StreamedChatResponse = typing_extensions.Annotated[ + typing.Union[ + StreamedChatResponse_StreamStart, + StreamedChatResponse_SearchQueriesGeneration, + StreamedChatResponse_SearchResults, + StreamedChatResponse_TextGeneration, + StreamedChatResponse_CitationGeneration, + StreamedChatResponse_ToolCallsGeneration, + StreamedChatResponse_StreamEnd, + ], + UnionMetadata(discriminant="event_type"), ] diff --git a/src/cohere/types/summarize_request_extractiveness.py b/src/cohere/types/summarize_request_extractiveness.py index af4db0b85..a7963e97a 100644 --- a/src/cohere/types/summarize_request_extractiveness.py +++ b/src/cohere/types/summarize_request_extractiveness.py @@ -2,4 +2,4 @@ import typing -SummarizeRequestExtractiveness = typing.Union[typing.AnyStr, typing.Literal["low", "medium", "high"]] +SummarizeRequestExtractiveness = typing.Union[typing.Literal["low", "medium", "high"], typing.Any] diff --git a/src/cohere/types/summarize_request_format.py b/src/cohere/types/summarize_request_format.py index 4b0e1def6..feb9f75a7 100644 --- a/src/cohere/types/summarize_request_format.py +++ b/src/cohere/types/summarize_request_format.py @@ -2,4 +2,4 @@ import typing -SummarizeRequestFormat = typing.Union[typing.AnyStr, typing.Literal["paragraph", "bullets"]] +SummarizeRequestFormat = typing.Union[typing.Literal["paragraph", "bullets"], typing.Any] diff --git a/src/cohere/types/summarize_request_length.py b/src/cohere/types/summarize_request_length.py index 17cb2644f..da5d3596b 100644 --- a/src/cohere/types/summarize_request_length.py +++ b/src/cohere/types/summarize_request_length.py @@ -2,4 +2,4 @@ import typing -SummarizeRequestLength = typing.Union[typing.AnyStr, typing.Literal["short", "medium", "long"]] +SummarizeRequestLength = typing.Union[typing.Literal["short", "medium", "long"], typing.Any] diff --git a/src/cohere/types/summarize_response.py b/src/cohere/types/summarize_response.py index 7e7dde5e1..c2840ce10 100644 --- a/src/cohere/types/summarize_response.py +++ b/src/cohere/types/summarize_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .api_meta import ApiMeta -class SummarizeResponse(pydantic_v1.BaseModel): +class SummarizeResponse(UncheckedBaseModel): id: typing.Optional[str] = pydantic_v1.Field(default=None) """ Generated ID for the summary diff --git a/src/cohere/types/tokenize_response.py b/src/cohere/types/tokenize_response.py index 661d6f7b7..916fcdf41 100644 --- a/src/cohere/types/tokenize_response.py +++ b/src/cohere/types/tokenize_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .api_meta import ApiMeta -class TokenizeResponse(pydantic_v1.BaseModel): +class TokenizeResponse(UncheckedBaseModel): tokens: typing.List[int] = pydantic_v1.Field() """ An array of tokens, where each token is an integer. diff --git a/src/cohere/types/tool.py b/src/cohere/types/tool.py index a083411c2..2940c987b 100644 --- a/src/cohere/types/tool.py +++ b/src/cohere/types/tool.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .tool_parameter_definitions_value import ToolParameterDefinitionsValue -class Tool(pydantic_v1.BaseModel): +class Tool(UncheckedBaseModel): name: str = pydantic_v1.Field() """ The name of the tool to be called. Valid names contain only the characters `a-z`, `A-Z`, `0-9`, `_` and must not begin with a digit. diff --git a/src/cohere/types/tool_call.py b/src/cohere/types/tool_call.py index 3ef653fe1..49dec523b 100644 --- a/src/cohere/types/tool_call.py +++ b/src/cohere/types/tool_call.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ToolCall(pydantic_v1.BaseModel): +class ToolCall(UncheckedBaseModel): """ Contains the tool calls generated by the model. Use it to invoke your tools. """ diff --git a/src/cohere/types/tool_parameter_definitions_value.py b/src/cohere/types/tool_parameter_definitions_value.py index 0c52e7ba7..9963f5a6c 100644 --- a/src/cohere/types/tool_parameter_definitions_value.py +++ b/src/cohere/types/tool_parameter_definitions_value.py @@ -5,9 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel -class ToolParameterDefinitionsValue(pydantic_v1.BaseModel): +class ToolParameterDefinitionsValue(UncheckedBaseModel): description: typing.Optional[str] = pydantic_v1.Field(default=None) """ The description of the parameter. diff --git a/src/cohere/types/update_connector_response.py b/src/cohere/types/update_connector_response.py index 2d6d768ac..3bb598fbd 100644 --- a/src/cohere/types/update_connector_response.py +++ b/src/cohere/types/update_connector_response.py @@ -5,10 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import pydantic_v1 +from ..core.unchecked_base_model import UncheckedBaseModel from .connector import Connector -class UpdateConnectorResponse(pydantic_v1.BaseModel): +class UpdateConnectorResponse(UncheckedBaseModel): connector: Connector def json(self, **kwargs: typing.Any) -> str: