Skip to content

Commit

Permalink
SDK regeneration (#460)
Browse files Browse the repository at this point in the history
Co-authored-by: fern-api <115122769+fern-api[bot]@users.noreply.github.com>
  • Loading branch information
fern-api[bot] authored Apr 4, 2024
1 parent 69e3b6c commit 2c493eb
Show file tree
Hide file tree
Showing 103 changed files with 799 additions and 1,151 deletions.
2 changes: 1 addition & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "cohere"
version = "5.2.1"
version = "5.2.2"
description = ""
readme = "README.md"
authors = []
Expand All @@ -13,9 +13,9 @@ python = "^3.8"
fastavro = "^1.9.4"
httpx = ">=0.21.2"
pydantic = ">= 1.9.2"
requests = "^2.31.0"
requests = "^2.0.0"
tokenizers = "^0.15.2"
types-requests = "^2.31.0.20240311"
types-requests = "^2.0.0"
typing_extensions = ">= 4.0.0"

[tool.poetry.dev-dependencies]
Expand Down
144 changes: 77 additions & 67 deletions src/cohere/base_client.py

Large diffs are not rendered by default.

130 changes: 63 additions & 67 deletions src/cohere/connectors/client.py

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions src/cohere/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from .file import File, convert_file_dict_to_httpx_tuples
from .http_client import AsyncHttpClient, HttpClient
from .jsonable_encoder import jsonable_encoder
from .pydantic_utilities import pydantic_v1
from .remove_none_from_dict import remove_none_from_dict
from .request_options import RequestOptions

Expand All @@ -20,6 +21,7 @@
"SyncClientWrapper",
"convert_file_dict_to_httpx_tuples",
"jsonable_encoder",
"pydantic_v1",
"remove_none_from_dict",
"serialize_datetime",
]
2 changes: 1 addition & 1 deletion src/cohere/core/client_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def get_headers(self) -> typing.Dict[str, str]:
headers: typing.Dict[str, str] = {
"X-Fern-Language": "Python",
"X-Fern-SDK-Name": "cohere",
"X-Fern-SDK-Version": "5.2.1",
"X-Fern-SDK-Version": "5.2.2",
}
if self._client_name is not None:
headers["X-Client-Name"] = self._client_name
Expand Down
10 changes: 1 addition & 9 deletions src/cohere/core/jsonable_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,8 @@
from types import GeneratorType
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union

import pydantic

from .datetime_utils import serialize_datetime

IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.")

if IS_PYDANTIC_V2:
import pydantic.v1 as pydantic_v1 # type: ignore
else:
import pydantic as pydantic_v1 # type: ignore
from .pydantic_utilities import pydantic_v1

SetIntStr = Set[Union[int, str]]
DictIntStrAny = Dict[Union[int, str], Any]
Expand Down
12 changes: 12 additions & 0 deletions src/cohere/core/pydantic_utilities.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# This file was auto-generated by Fern from our API Definition.

import pydantic

IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.")

if IS_PYDANTIC_V2:
import pydantic.v1 as pydantic_v1 # type: ignore # nopycln: import
else:
import pydantic as pydantic_v1 # type: ignore # nopycln: import

__all__ = ["pydantic_v1"]
46 changes: 21 additions & 25 deletions src/cohere/datasets/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
from ..core.datetime_utils import serialize_datetime
from ..core.jsonable_encoder import jsonable_encoder
from ..core.pydantic_utilities import pydantic_v1
from ..core.remove_none_from_dict import remove_none_from_dict
from ..core.request_options import RequestOptions
from ..errors.too_many_requests_error import TooManyRequestsError
Expand All @@ -19,11 +20,6 @@
from .types.datasets_get_usage_response import DatasetsGetUsageResponse
from .types.datasets_list_response import DatasetsListResponse

try:
import pydantic.v1 as pydantic # type: ignore
except ImportError:
import pydantic # type: ignore

# this is used as the default value for optional parameters
OMIT = typing.cast(typing.Any, ...)

Expand Down Expand Up @@ -100,9 +96,9 @@ def list(
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(DatasetsListResponse, _response.json()) # type: ignore
return pydantic_v1.parse_obj_as(DatasetsListResponse, _response.json()) # type: ignore
if _response.status_code == 429:
raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
Expand Down Expand Up @@ -211,9 +207,9 @@ def create(
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(DatasetsCreateResponse, _response.json()) # type: ignore
return pydantic_v1.parse_obj_as(DatasetsCreateResponse, _response.json()) # type: ignore
if _response.status_code == 429:
raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
Expand Down Expand Up @@ -256,9 +252,9 @@ def get_usage(self, *, request_options: typing.Optional[RequestOptions] = None)
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(DatasetsGetUsageResponse, _response.json()) # type: ignore
return pydantic_v1.parse_obj_as(DatasetsGetUsageResponse, _response.json()) # type: ignore
if _response.status_code == 429:
raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
Expand Down Expand Up @@ -305,9 +301,9 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(DatasetsGetResponse, _response.json()) # type: ignore
return pydantic_v1.parse_obj_as(DatasetsGetResponse, _response.json()) # type: ignore
if _response.status_code == 429:
raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
Expand Down Expand Up @@ -356,9 +352,9 @@ def delete(
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.Dict[str, typing.Any], _response.json()) # type: ignore
return pydantic_v1.parse_obj_as(typing.Dict[str, typing.Any], _response.json()) # type: ignore
if _response.status_code == 429:
raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
Expand Down Expand Up @@ -438,9 +434,9 @@ async def list(
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(DatasetsListResponse, _response.json()) # type: ignore
return pydantic_v1.parse_obj_as(DatasetsListResponse, _response.json()) # type: ignore
if _response.status_code == 429:
raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
Expand Down Expand Up @@ -549,9 +545,9 @@ async def create(
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(DatasetsCreateResponse, _response.json()) # type: ignore
return pydantic_v1.parse_obj_as(DatasetsCreateResponse, _response.json()) # type: ignore
if _response.status_code == 429:
raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
Expand Down Expand Up @@ -594,9 +590,9 @@ async def get_usage(self, *, request_options: typing.Optional[RequestOptions] =
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(DatasetsGetUsageResponse, _response.json()) # type: ignore
return pydantic_v1.parse_obj_as(DatasetsGetUsageResponse, _response.json()) # type: ignore
if _response.status_code == 429:
raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
Expand Down Expand Up @@ -643,9 +639,9 @@ async def get(self, id: str, *, request_options: typing.Optional[RequestOptions]
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(DatasetsGetResponse, _response.json()) # type: ignore
return pydantic_v1.parse_obj_as(DatasetsGetResponse, _response.json()) # type: ignore
if _response.status_code == 429:
raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
Expand Down Expand Up @@ -694,9 +690,9 @@ async def delete(
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.Dict[str, typing.Any], _response.json()) # type: ignore
return pydantic_v1.parse_obj_as(typing.Dict[str, typing.Any], _response.json()) # type: ignore
if _response.status_code == 429:
raise TooManyRequestsError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
raise TooManyRequestsError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
Expand Down
12 changes: 4 additions & 8 deletions src/cohere/datasets/types/datasets_create_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,11 @@
import typing

from ...core.datetime_utils import serialize_datetime
from ...core.pydantic_utilities import pydantic_v1

try:
import pydantic.v1 as pydantic # type: ignore
except ImportError:
import pydantic # type: ignore


class DatasetsCreateResponse(pydantic.BaseModel):
id: typing.Optional[str] = pydantic.Field(default=None)
class DatasetsCreateResponse(pydantic_v1.BaseModel):
id: typing.Optional[str] = pydantic_v1.Field(default=None)
"""
The dataset ID
"""
Expand All @@ -28,5 +24,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow
extra = pydantic_v1.Extra.allow
json_encoders = {dt.datetime: serialize_datetime}
10 changes: 3 additions & 7 deletions src/cohere/datasets/types/datasets_get_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,11 @@
import typing

from ...core.datetime_utils import serialize_datetime
from ...core.pydantic_utilities import pydantic_v1
from ...types.dataset import Dataset

try:
import pydantic.v1 as pydantic # type: ignore
except ImportError:
import pydantic # type: ignore


class DatasetsGetResponse(pydantic.BaseModel):
class DatasetsGetResponse(pydantic_v1.BaseModel):
dataset: Dataset

def json(self, **kwargs: typing.Any) -> str:
Expand All @@ -26,5 +22,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow
extra = pydantic_v1.Extra.allow
json_encoders = {dt.datetime: serialize_datetime}
12 changes: 4 additions & 8 deletions src/cohere/datasets/types/datasets_get_usage_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,11 @@
import typing

from ...core.datetime_utils import serialize_datetime
from ...core.pydantic_utilities import pydantic_v1

try:
import pydantic.v1 as pydantic # type: ignore
except ImportError:
import pydantic # type: ignore


class DatasetsGetUsageResponse(pydantic.BaseModel):
organization_usage: typing.Optional[str] = pydantic.Field(default=None)
class DatasetsGetUsageResponse(pydantic_v1.BaseModel):
organization_usage: typing.Optional[str] = pydantic_v1.Field(default=None)
"""
The total number of bytes used by the organization.
"""
Expand All @@ -28,5 +24,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow
extra = pydantic_v1.Extra.allow
json_encoders = {dt.datetime: serialize_datetime}
10 changes: 3 additions & 7 deletions src/cohere/datasets/types/datasets_list_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,11 @@
import typing

from ...core.datetime_utils import serialize_datetime
from ...core.pydantic_utilities import pydantic_v1
from ...types.dataset import Dataset

try:
import pydantic.v1 as pydantic # type: ignore
except ImportError:
import pydantic # type: ignore


class DatasetsListResponse(pydantic.BaseModel):
class DatasetsListResponse(pydantic_v1.BaseModel):
datasets: typing.Optional[typing.List[Dataset]] = None

def json(self, **kwargs: typing.Any) -> str:
Expand All @@ -26,5 +22,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow
extra = pydantic_v1.Extra.allow
json_encoders = {dt.datetime: serialize_datetime}
Loading

0 comments on commit 2c493eb

Please sign in to comment.