From d02dc1b5154fc34d40b837ec2415870575474437 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Tue, 26 Nov 2024 19:31:51 +0100 Subject: [PATCH 01/15] feat(media): broaden content type support --- langfuse/api/README.md | 9 +- langfuse/api/client.py | 4 +- langfuse/api/core/api_error.py | 4 +- langfuse/api/core/client_wrapper.py | 6 +- langfuse/api/core/datetime_utils.py | 4 +- langfuse/api/core/file.py | 9 +- langfuse/api/core/http_client.py | 147 +++-- langfuse/api/core/jsonable_encoder.py | 14 +- langfuse/api/core/query_encoder.py | 10 +- langfuse/api/reference.md | 556 ++++++++++-------- langfuse/api/resources/comments/client.py | 12 +- .../api/resources/commons/errors/__init__.py | 8 +- .../resources/commons/types/boolean_score.py | 21 +- .../commons/types/categorical_score.py | 21 +- .../commons/types/config_category.py | 21 +- .../api/resources/commons/types/dataset.py | 21 +- .../resources/commons/types/dataset_item.py | 33 +- .../resources/commons/types/dataset_run.py | 21 +- .../commons/types/dataset_run_item.py | 25 +- .../commons/types/dataset_run_with_items.py | 25 +- .../resources/commons/types/dataset_status.py | 6 +- .../api/resources/commons/types/map_value.py | 5 +- .../resources/commons/types/numeric_score.py | 21 +- .../resources/commons/types/observation.py | 37 +- .../commons/types/observations_view.py | 57 +- .../resources/commons/types/score_config.py | 33 +- .../api/resources/commons/types/session.py | 21 +- .../commons/types/session_with_traces.py | 21 +- langfuse/api/resources/commons/types/trace.py | 25 +- .../commons/types/trace_with_details.py | 21 +- .../commons/types/trace_with_full_details.py | 21 +- langfuse/api/resources/commons/types/usage.py | 33 +- .../api/resources/dataset_items/client.py | 146 +++-- .../types/create_dataset_item_request.py | 33 +- .../types/paginated_dataset_items.py | 21 +- .../api/resources/dataset_run_items/client.py | 58 +- .../types/create_dataset_run_item_request.py | 29 +- langfuse/api/resources/datasets/client.py | 236 ++++++-- .../datasets/types/create_dataset_request.py | 21 +- .../datasets/types/paginated_dataset_runs.py | 21 +- .../datasets/types/paginated_datasets.py | 21 +- langfuse/api/resources/health/client.py | 44 +- .../resources/health/types/health_response.py | 21 +- langfuse/api/resources/ingestion/client.py | 40 +- .../resources/ingestion/types/base_event.py | 21 +- .../ingestion/types/create_event_body.py | 21 +- .../ingestion/types/create_event_event.py | 21 +- .../ingestion/types/create_generation_body.py | 33 +- .../types/create_generation_event.py | 21 +- .../types/create_observation_event.py | 21 +- .../ingestion/types/create_span_body.py | 25 +- .../ingestion/types/create_span_event.py | 21 +- .../ingestion/types/ingestion_error.py | 21 +- .../ingestion/types/ingestion_event.py | 210 +++++-- .../ingestion/types/ingestion_response.py | 21 +- .../ingestion/types/ingestion_success.py | 21 +- .../ingestion/types/observation_body.py | 41 +- .../ingestion/types/open_ai_usage.py | 33 +- .../types/optional_observation_body.py | 33 +- .../resources/ingestion/types/score_body.py | 29 +- .../resources/ingestion/types/score_event.py | 21 +- .../resources/ingestion/types/sdk_log_body.py | 21 +- .../ingestion/types/sdk_log_event.py | 21 +- .../resources/ingestion/types/trace_body.py | 25 +- .../resources/ingestion/types/trace_event.py | 21 +- .../ingestion/types/update_event_body.py | 21 +- .../ingestion/types/update_generation_body.py | 33 +- .../types/update_generation_event.py | 21 +- .../types/update_observation_event.py | 21 +- .../ingestion/types/update_span_body.py | 25 +- .../ingestion/types/update_span_event.py | 21 +- langfuse/api/resources/media/client.py | 12 +- .../media/types/media_content_type.py | 20 + langfuse/api/resources/metrics/client.py | 4 +- .../resources/metrics/types/daily_metrics.py | 21 +- .../metrics/types/daily_metrics_details.py | 21 +- .../resources/metrics/types/usage_by_model.py | 21 +- langfuse/api/resources/models/client.py | 16 +- .../models/types/paginated_models.py | 21 +- langfuse/api/resources/observations/client.py | 108 +++- .../observations/types/observations.py | 21 +- .../observations/types/observations_views.py | 21 +- langfuse/api/resources/projects/client.py | 44 +- .../api/resources/projects/types/project.py | 21 +- .../api/resources/projects/types/projects.py | 21 +- langfuse/api/resources/prompts/client.py | 154 +++-- .../api/resources/prompts/types/__init__.py | 6 +- .../resources/prompts/types/base_prompt.py | 21 +- .../resources/prompts/types/chat_message.py | 21 +- .../resources/prompts/types/chat_prompt.py | 21 +- .../types/create_chat_prompt_request.py | 21 +- .../prompts/types/create_prompt_request.py | 42 +- .../types/create_text_prompt_request.py | 21 +- .../api/resources/prompts/types/prompt.py | 42 +- .../resources/prompts/types/prompt_meta.py | 21 +- .../types/prompt_meta_list_response.py | 21 +- .../resources/prompts/types/text_prompt.py | 21 +- langfuse/api/resources/score/client.py | 16 +- .../score/types/create_score_request.py | 29 +- .../score/types/create_score_response.py | 21 +- .../api/resources/score_configs/client.py | 142 +++-- .../types/create_score_config_request.py | 33 +- .../score_configs/types/score_configs.py | 21 +- langfuse/api/resources/sessions/client.py | 108 +++- .../sessions/types/paginated_sessions.py | 21 +- langfuse/api/resources/trace/client.py | 100 +++- langfuse/api/resources/trace/types/sort.py | 21 +- langfuse/api/resources/trace/types/traces.py | 21 +- .../pagination/types/meta_response.py | 21 +- 109 files changed, 3084 insertions(+), 1061 deletions(-) diff --git a/langfuse/api/README.md b/langfuse/api/README.md index 4087db553..918d79330 100644 --- a/langfuse/api/README.md +++ b/langfuse/api/README.md @@ -17,7 +17,7 @@ Instantiate and use the client with the following: ```python from finto import CreateCommentRequest -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -46,7 +46,7 @@ The SDK also exports an `async` client so that you can make non-blocking calls t import asyncio from finto import CreateCommentRequest -from finto.client import AsyncFernLangfuse +from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -116,7 +116,7 @@ The SDK defaults to a 60 second timeout. You can configure this with a timeout o ```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse(..., { timeout=20.0 }, ) @@ -131,9 +131,10 @@ client.comments.create(...,{ You can override the `httpx` client to customize it for your use-case. Some common use-cases include support for proxies and transports. + ```python import httpx -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( ..., diff --git a/langfuse/api/client.py b/langfuse/api/client.py index da24da20f..0e71fe1be 100644 --- a/langfuse/api/client.py +++ b/langfuse/api/client.py @@ -51,7 +51,7 @@ class FernLangfuse: Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -140,7 +140,7 @@ class AsyncFernLangfuse: Examples -------- - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", diff --git a/langfuse/api/core/api_error.py b/langfuse/api/core/api_error.py index 2e9fc5431..da734b580 100644 --- a/langfuse/api/core/api_error.py +++ b/langfuse/api/core/api_error.py @@ -7,7 +7,9 @@ class ApiError(Exception): status_code: typing.Optional[int] body: typing.Any - def __init__(self, *, status_code: typing.Optional[int] = None, body: typing.Any = None): + def __init__( + self, *, status_code: typing.Optional[int] = None, body: typing.Any = None + ): self.status_code = status_code self.body = body diff --git a/langfuse/api/core/client_wrapper.py b/langfuse/api/core/client_wrapper.py index 49932db6c..8a053f4a7 100644 --- a/langfuse/api/core/client_wrapper.py +++ b/langfuse/api/core/client_wrapper.py @@ -17,7 +17,7 @@ def __init__( username: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, password: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, base_url: str, - timeout: typing.Optional[float] = None + timeout: typing.Optional[float] = None, ): self._x_langfuse_sdk_name = x_langfuse_sdk_name self._x_langfuse_sdk_version = x_langfuse_sdk_version @@ -71,7 +71,7 @@ def __init__( password: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, base_url: str, timeout: typing.Optional[float] = None, - httpx_client: httpx.Client + httpx_client: httpx.Client, ): super().__init__( x_langfuse_sdk_name=x_langfuse_sdk_name, @@ -101,7 +101,7 @@ def __init__( password: typing.Optional[typing.Union[str, typing.Callable[[], str]]] = None, base_url: str, timeout: typing.Optional[float] = None, - httpx_client: httpx.AsyncClient + httpx_client: httpx.AsyncClient, ): super().__init__( x_langfuse_sdk_name=x_langfuse_sdk_name, diff --git a/langfuse/api/core/datetime_utils.py b/langfuse/api/core/datetime_utils.py index 7c9864a94..47344e9d9 100644 --- a/langfuse/api/core/datetime_utils.py +++ b/langfuse/api/core/datetime_utils.py @@ -13,7 +13,9 @@ def serialize_datetime(v: dt.datetime) -> str: """ def _serialize_zoned_datetime(v: dt.datetime) -> str: - if v.tzinfo is not None and v.tzinfo.tzname(None) == dt.timezone.utc.tzname(None): + if v.tzinfo is not None and v.tzinfo.tzname(None) == dt.timezone.utc.tzname( + None + ): # UTC is a special case where we use "Z" at the end instead of "+00:00" return v.isoformat().replace("+00:00", "Z") else: diff --git a/langfuse/api/core/file.py b/langfuse/api/core/file.py index cb0d40bbb..6e0f92bfc 100644 --- a/langfuse/api/core/file.py +++ b/langfuse/api/core/file.py @@ -13,12 +13,17 @@ # (filename, file (or bytes), content_type) typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str]], # (filename, file (or bytes), content_type, headers) - typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str], typing.Mapping[str, str]], + typing.Tuple[ + typing.Optional[str], + FileContent, + typing.Optional[str], + typing.Mapping[str, str], + ], ] def convert_file_dict_to_httpx_tuples( - d: typing.Dict[str, typing.Union[File, typing.List[File]]] + d: typing.Dict[str, typing.Union[File, typing.List[File]]], ) -> typing.List[typing.Tuple[str, File]]: """ The format we use is a list of tuples, where the first element is the diff --git a/langfuse/api/core/http_client.py b/langfuse/api/core/http_client.py index 9333d8a7f..091f71bc1 100644 --- a/langfuse/api/core/http_client.py +++ b/langfuse/api/core/http_client.py @@ -77,7 +77,9 @@ def _retry_timeout(response: httpx.Response, retries: int) -> float: return retry_after # Apply exponential backoff, capped at MAX_RETRY_DELAY_SECONDS. - retry_delay = min(INITIAL_RETRY_DELAY_SECONDS * pow(2.0, retries), MAX_RETRY_DELAY_SECONDS) + retry_delay = min( + INITIAL_RETRY_DELAY_SECONDS * pow(2.0, retries), MAX_RETRY_DELAY_SECONDS + ) # Add a randomness / jitter to the retry delay to avoid overwhelming the server with retries. timeout = retry_delay * (1 - 0.25 * random()) @@ -90,7 +92,8 @@ def _should_retry(response: httpx.Response) -> bool: def remove_omit_from_dict( - original: typing.Dict[str, typing.Optional[typing.Any]], omit: typing.Optional[typing.Any] + original: typing.Dict[str, typing.Optional[typing.Any]], + omit: typing.Optional[typing.Any], ) -> typing.Dict[str, typing.Any]: if omit is None: return original @@ -108,7 +111,8 @@ def maybe_filter_request_body( ) -> typing.Optional[typing.Any]: if data is None: return ( - jsonable_encoder(request_options.get("additional_body_parameters", {})) or {} + jsonable_encoder(request_options.get("additional_body_parameters", {})) + or {} if request_options is not None else None ) @@ -118,7 +122,8 @@ def maybe_filter_request_body( data_content = { **(jsonable_encoder(remove_omit_from_dict(data, omit))), # type: ignore **( - jsonable_encoder(request_options.get("additional_body_parameters", {})) or {} + jsonable_encoder(request_options.get("additional_body_parameters", {})) + or {} if request_options is not None else {} ), @@ -162,7 +167,9 @@ def __init__( def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str: base_url = self.base_url if maybe_base_url is None else maybe_base_url if base_url is None: - raise ValueError("A base_url is required to make this request, please provide one and try again.") + raise ValueError( + "A base_url is required to make this request, please provide one and try again." + ) return base_url def request( @@ -174,8 +181,12 @@ def request( params: typing.Optional[typing.Dict[str, typing.Any]] = None, json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, - content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + content: typing.Optional[ + typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]] + ] = None, + files: typing.Optional[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 0, @@ -184,11 +195,14 @@ def request( base_url = self.get_base_url(base_url) timeout = ( request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None + if request_options is not None + and request_options.get("timeout_in_seconds") is not None else self.base_timeout ) - json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + json_body, data_body = get_request_body( + json=json, data=data, request_options=request_options, omit=omit + ) response = self.httpx_client.request( method=method, @@ -198,7 +212,11 @@ def request( { **self.base_headers, **(headers if headers is not None else {}), - **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}), + **( + request_options.get("additional_headers", {}) or {} + if request_options is not None + else {} + ), } ) ), @@ -209,7 +227,10 @@ def request( { **(params if params is not None else {}), **( - request_options.get("additional_query_parameters", {}) or {} + request_options.get( + "additional_query_parameters", {} + ) + or {} if request_options is not None else {} ), @@ -222,11 +243,15 @@ def request( json=json_body, data=data_body, content=content, - files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, + files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) + if files is not None + else None, timeout=timeout, ) - max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0 + max_retries: int = ( + request_options.get("max_retries", 0) if request_options is not None else 0 + ) if _should_retry(response=response): if max_retries > retries: time.sleep(_retry_timeout(response=response, retries=retries)) @@ -256,8 +281,12 @@ def stream( params: typing.Optional[typing.Dict[str, typing.Any]] = None, json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, - content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + content: typing.Optional[ + typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]] + ] = None, + files: typing.Optional[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 0, @@ -266,11 +295,14 @@ def stream( base_url = self.get_base_url(base_url) timeout = ( request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None + if request_options is not None + and request_options.get("timeout_in_seconds") is not None else self.base_timeout ) - json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + json_body, data_body = get_request_body( + json=json, data=data, request_options=request_options, omit=omit + ) with self.httpx_client.stream( method=method, @@ -280,7 +312,11 @@ def stream( { **self.base_headers, **(headers if headers is not None else {}), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), + **( + request_options.get("additional_headers", {}) + if request_options is not None + else {} + ), } ) ), @@ -291,7 +327,9 @@ def stream( { **(params if params is not None else {}), **( - request_options.get("additional_query_parameters", {}) + request_options.get( + "additional_query_parameters", {} + ) if request_options is not None else {} ), @@ -304,7 +342,9 @@ def stream( json=json_body, data=data_body, content=content, - files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, + files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) + if files is not None + else None, timeout=timeout, ) as stream: yield stream @@ -327,7 +367,9 @@ def __init__( def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str: base_url = self.base_url if maybe_base_url is None else maybe_base_url if base_url is None: - raise ValueError("A base_url is required to make this request, please provide one and try again.") + raise ValueError( + "A base_url is required to make this request, please provide one and try again." + ) return base_url async def request( @@ -339,8 +381,12 @@ async def request( params: typing.Optional[typing.Dict[str, typing.Any]] = None, json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, - content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + content: typing.Optional[ + typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]] + ] = None, + files: typing.Optional[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 0, @@ -349,11 +395,14 @@ async def request( base_url = self.get_base_url(base_url) timeout = ( request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None + if request_options is not None + and request_options.get("timeout_in_seconds") is not None else self.base_timeout ) - json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + json_body, data_body = get_request_body( + json=json, data=data, request_options=request_options, omit=omit + ) # Add the input to each of these and do None-safety checks response = await self.httpx_client.request( @@ -364,7 +413,11 @@ async def request( { **self.base_headers, **(headers if headers is not None else {}), - **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}), + **( + request_options.get("additional_headers", {}) or {} + if request_options is not None + else {} + ), } ) ), @@ -375,7 +428,10 @@ async def request( { **(params if params is not None else {}), **( - request_options.get("additional_query_parameters", {}) or {} + request_options.get( + "additional_query_parameters", {} + ) + or {} if request_options is not None else {} ), @@ -388,11 +444,15 @@ async def request( json=json_body, data=data_body, content=content, - files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, + files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) + if files is not None + else None, timeout=timeout, ) - max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0 + max_retries: int = ( + request_options.get("max_retries", 0) if request_options is not None else 0 + ) if _should_retry(response=response): if max_retries > retries: await asyncio.sleep(_retry_timeout(response=response, retries=retries)) @@ -421,8 +481,12 @@ async def stream( params: typing.Optional[typing.Dict[str, typing.Any]] = None, json: typing.Optional[typing.Any] = None, data: typing.Optional[typing.Any] = None, - content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, - files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None, + content: typing.Optional[ + typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]] + ] = None, + files: typing.Optional[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]] + ] = None, headers: typing.Optional[typing.Dict[str, typing.Any]] = None, request_options: typing.Optional[RequestOptions] = None, retries: int = 0, @@ -431,11 +495,14 @@ async def stream( base_url = self.get_base_url(base_url) timeout = ( request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None + if request_options is not None + and request_options.get("timeout_in_seconds") is not None else self.base_timeout ) - json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + json_body, data_body = get_request_body( + json=json, data=data, request_options=request_options, omit=omit + ) async with self.httpx_client.stream( method=method, @@ -445,7 +512,11 @@ async def stream( { **self.base_headers, **(headers if headers is not None else {}), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), + **( + request_options.get("additional_headers", {}) + if request_options is not None + else {} + ), } ) ), @@ -456,7 +527,9 @@ async def stream( { **(params if params is not None else {}), **( - request_options.get("additional_query_parameters", {}) + request_options.get( + "additional_query_parameters", {} + ) if request_options is not None else {} ), @@ -469,7 +542,9 @@ async def stream( json=json_body, data=data_body, content=content, - files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, + files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) + if files is not None + else None, timeout=timeout, ) as stream: yield stream diff --git a/langfuse/api/core/jsonable_encoder.py b/langfuse/api/core/jsonable_encoder.py index f09aaf6b9..7a05e9190 100644 --- a/langfuse/api/core/jsonable_encoder.py +++ b/langfuse/api/core/jsonable_encoder.py @@ -25,18 +25,24 @@ def generate_encoders_by_class_tuples( - type_encoder_map: Dict[Any, Callable[[Any], Any]] + type_encoder_map: Dict[Any, Callable[[Any], Any]], ) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]: - encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple) + encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict( + tuple + ) for type_, encoder in type_encoder_map.items(): encoders_by_class_tuples[encoder] += (type_,) return encoders_by_class_tuples -encoders_by_class_tuples = generate_encoders_by_class_tuples(pydantic_v1.json.ENCODERS_BY_TYPE) +encoders_by_class_tuples = generate_encoders_by_class_tuples( + pydantic_v1.json.ENCODERS_BY_TYPE +) -def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any: +def jsonable_encoder( + obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None +) -> Any: custom_encoder = custom_encoder or {} if custom_encoder: if type(obj) in custom_encoder: diff --git a/langfuse/api/core/query_encoder.py b/langfuse/api/core/query_encoder.py index 1f5f766b4..069633086 100644 --- a/langfuse/api/core/query_encoder.py +++ b/langfuse/api/core/query_encoder.py @@ -7,7 +7,9 @@ # Flattens dicts to be of the form {"key[subkey][subkey2]": value} where value is not a dict -def traverse_query_dict(dict_flat: Dict[str, Any], key_prefix: Optional[str] = None) -> Dict[str, Any]: +def traverse_query_dict( + dict_flat: Dict[str, Any], key_prefix: Optional[str] = None +) -> Dict[str, Any]: result = {} for k, v in dict_flat.items(): key = f"{key_prefix}[{k}]" if key_prefix is not None else k @@ -30,4 +32,8 @@ def single_query_encoder(query_key: str, query_value: Any) -> Dict[str, Any]: def encode_query(query: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: - return dict(ChainMap(*[single_query_encoder(k, v) for k, v in query.items()])) if query is not None else None + return ( + dict(ChainMap(*[single_query_encoder(k, v) for k, v in query.items()])) + if query is not None + else None + ) diff --git a/langfuse/api/reference.md b/langfuse/api/reference.md index 068e8bce5..54f1c84fc 100644 --- a/langfuse/api/reference.md +++ b/langfuse/api/reference.md @@ -1,5 +1,7 @@ # Reference + ## Comments +
client.comments.create(...)
@@ -13,6 +15,7 @@
Create a comment. Comments may be attached to different object types (trace, observation, session, prompt). +
@@ -28,7 +31,7 @@ Create a comment. Comments may be attached to different object types (trace, obs ```python from finto import CreateCommentRequest -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -49,6 +52,7 @@ client.comments.create( ) ``` + @@ -62,8 +66,8 @@ client.comments.create(
-**request:** `CreateCommentRequest` - +**request:** `CreateCommentRequest` +
@@ -71,13 +75,12 @@ client.comments.create(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
@@ -95,6 +98,7 @@ client.comments.create(
Get all comments +
@@ -109,7 +113,7 @@ Get all comments
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -128,6 +132,7 @@ client.comments.get( ) ``` +
@@ -142,7 +147,7 @@ client.comments.get(
**page:** `typing.Optional[int]` — Page number, starts at 1. - +
@@ -150,7 +155,7 @@ client.comments.get(
**limit:** `typing.Optional[int]` — Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit - +
@@ -158,7 +163,7 @@ client.comments.get(
**object_type:** `typing.Optional[str]` — Filter comments by object type (trace, observation, session, prompt). - +
@@ -166,7 +171,7 @@ client.comments.get(
**object_id:** `typing.Optional[str]` — Filter comments by object id. If objectType is not provided, an error will be thrown. - +
@@ -174,7 +179,7 @@ client.comments.get(
**author_user_id:** `typing.Optional[str]` — Filter comments by author user id. - +
@@ -182,13 +187,12 @@ client.comments.get(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- @@ -206,6 +210,7 @@ client.comments.get(
Get a comment by id +
@@ -220,7 +225,7 @@ Get a comment by id
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -235,6 +240,7 @@ client.comments.get_by_id( ) ``` +
@@ -249,7 +255,7 @@ client.comments.get_by_id(
**comment_id:** `str` — The unique langfuse identifier of a comment - +
@@ -257,18 +263,18 @@ client.comments.get_by_id(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- ## DatasetItems +
client.dataset_items.create(...)
@@ -282,6 +288,7 @@ client.comments.get_by_id(
Create a dataset item +
@@ -297,7 +304,7 @@ Create a dataset item ```python from finto import CreateDatasetItemRequest, DatasetStatus -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -321,6 +328,7 @@ client.dataset_items.create( ) ``` + @@ -334,8 +342,8 @@ client.dataset_items.create(
-**request:** `CreateDatasetItemRequest` - +**request:** `CreateDatasetItemRequest` +
@@ -343,13 +351,12 @@ client.dataset_items.create(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
@@ -367,6 +374,7 @@ client.dataset_items.create(
Get a dataset item +
@@ -381,7 +389,7 @@ Get a dataset item
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -396,6 +404,7 @@ client.dataset_items.get( ) ``` +
@@ -409,8 +418,8 @@ client.dataset_items.get(
-**id:** `str` - +**id:** `str` +
@@ -418,13 +427,12 @@ client.dataset_items.get(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- @@ -442,6 +450,7 @@ client.dataset_items.get(
Get dataset items +
@@ -456,7 +465,7 @@ Get dataset items
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -475,6 +484,7 @@ client.dataset_items.list( ) ``` +
@@ -488,24 +498,24 @@ client.dataset_items.list(
-**dataset_name:** `typing.Optional[str]` - +**dataset_name:** `typing.Optional[str]` +
-**source_trace_id:** `typing.Optional[str]` - +**source_trace_id:** `typing.Optional[str]` +
-**source_observation_id:** `typing.Optional[str]` - +**source_observation_id:** `typing.Optional[str]` +
@@ -513,7 +523,7 @@ client.dataset_items.list(
**page:** `typing.Optional[int]` — page number, starts at 1 - +
@@ -521,7 +531,7 @@ client.dataset_items.list(
**limit:** `typing.Optional[int]` — limit of items per page - +
@@ -529,18 +539,18 @@ client.dataset_items.list(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- ## DatasetRunItems +
client.dataset_run_items.create(...)
@@ -554,6 +564,7 @@ client.dataset_items.list(
Create a dataset run item +
@@ -569,7 +580,7 @@ Create a dataset run item ```python from finto import CreateDatasetRunItemRequest -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -591,6 +602,7 @@ client.dataset_run_items.create( ) ``` + @@ -604,8 +616,8 @@ client.dataset_run_items.create(
-**request:** `CreateDatasetRunItemRequest` - +**request:** `CreateDatasetRunItemRequest` +
@@ -613,18 +625,18 @@ client.dataset_run_items.create(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
## Datasets +
client.datasets.list(...)
@@ -638,6 +650,7 @@ client.dataset_run_items.create(
Get all datasets +
@@ -652,7 +665,7 @@ Get all datasets
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -668,6 +681,7 @@ client.datasets.list( ) ``` +
@@ -682,7 +696,7 @@ client.datasets.list(
**page:** `typing.Optional[int]` — page number, starts at 1 - +
@@ -690,7 +704,7 @@ client.datasets.list(
**limit:** `typing.Optional[int]` — limit of items per page - +
@@ -698,13 +712,12 @@ client.datasets.list(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
@@ -722,6 +735,7 @@ client.datasets.list(
Get a dataset +
@@ -736,7 +750,7 @@ Get a dataset
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -751,6 +765,7 @@ client.datasets.get( ) ``` +
@@ -764,8 +779,8 @@ client.datasets.get(
-**dataset_name:** `str` - +**dataset_name:** `str` +
@@ -773,13 +788,12 @@ client.datasets.get(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- @@ -797,6 +811,7 @@ client.datasets.get(
Create a dataset +
@@ -812,7 +827,7 @@ Create a dataset ```python from finto import CreateDatasetRequest -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -831,6 +846,7 @@ client.datasets.create( ) ``` + @@ -844,8 +860,8 @@ client.datasets.create(
-**request:** `CreateDatasetRequest` - +**request:** `CreateDatasetRequest` +
@@ -853,13 +869,12 @@ client.datasets.create(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- @@ -877,6 +892,7 @@ client.datasets.create(
Get a dataset run and its items +
@@ -891,7 +907,7 @@ Get a dataset run and its items
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -907,6 +923,7 @@ client.datasets.get_run( ) ``` +
@@ -920,16 +937,16 @@ client.datasets.get_run(
-**dataset_name:** `str` - +**dataset_name:** `str` +
-**run_name:** `str` - +**run_name:** `str` +
@@ -937,13 +954,12 @@ client.datasets.get_run(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- @@ -961,6 +977,7 @@ client.datasets.get_run(
Get dataset runs +
@@ -975,7 +992,7 @@ Get dataset runs
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -992,6 +1009,7 @@ client.datasets.get_runs( ) ``` +
@@ -1005,8 +1023,8 @@ client.datasets.get_runs(
-**dataset_name:** `str` - +**dataset_name:** `str` +
@@ -1014,7 +1032,7 @@ client.datasets.get_runs(
**page:** `typing.Optional[int]` — page number, starts at 1 - +
@@ -1022,7 +1040,7 @@ client.datasets.get_runs(
**limit:** `typing.Optional[int]` — limit of items per page - +
@@ -1030,18 +1048,18 @@ client.datasets.get_runs(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- ## Health +
client.health.health()
@@ -1055,6 +1073,7 @@ client.datasets.get_runs(
Check health of API and database +
@@ -1069,7 +1088,7 @@ Check health of API and database
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1082,6 +1101,7 @@ client = FernLangfuse( client.health.health() ``` +
@@ -1096,18 +1116,18 @@ client.health.health()
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
## Ingestion +
client.ingestion.batch(...)
@@ -1143,7 +1163,7 @@ Notes: import datetime from finto import IngestionEvent_TraceCreate, TraceBody -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1181,6 +1201,7 @@ client.ingestion.batch( ) ``` +
@@ -1195,7 +1216,7 @@ client.ingestion.batch(
**batch:** `typing.Sequence[IngestionEvent]` — Batch of tracing events to be ingested. Discriminated by attribute `type`. - +
@@ -1203,7 +1224,7 @@ client.ingestion.batch(
**metadata:** `typing.Optional[typing.Any]` — Optional. Metadata field used by the Langfuse SDKs for debugging. - +
@@ -1211,18 +1232,18 @@ client.ingestion.batch(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
## Media +
client.media.get(...)
@@ -1236,6 +1257,7 @@ client.ingestion.batch(
Get a media record +
@@ -1250,7 +1272,7 @@ Get a media record
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1265,6 +1287,7 @@ client.media.get( ) ``` +
@@ -1279,7 +1302,7 @@ client.media.get(
**media_id:** `str` — The unique langfuse identifier of a media record - +
@@ -1287,13 +1310,12 @@ client.media.get(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
@@ -1311,6 +1333,7 @@ client.media.get(
Patch a media record +
@@ -1328,7 +1351,7 @@ Patch a media record import datetime from finto import PatchMediaBody -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1351,6 +1374,7 @@ client.media.patch( ) ``` + @@ -1365,15 +1389,15 @@ client.media.patch(
**media_id:** `str` — The unique langfuse identifier of a media record - +
-**request:** `PatchMediaBody` - +**request:** `PatchMediaBody` +
@@ -1381,13 +1405,12 @@ client.media.patch(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- @@ -1405,6 +1428,7 @@ client.media.patch(
Get a presigned upload URL for a media record +
@@ -1420,7 +1444,7 @@ Get a presigned upload URL for a media record ```python from finto import GetMediaUploadUrlRequest -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1441,6 +1465,7 @@ client.media.get_upload_url( ) ``` + @@ -1454,8 +1479,8 @@ client.media.get_upload_url(
-**request:** `GetMediaUploadUrlRequest` - +**request:** `GetMediaUploadUrlRequest` +
@@ -1463,18 +1488,18 @@ client.media.get_upload_url(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- ## Metrics +
client.metrics.daily(...)
@@ -1488,6 +1513,7 @@ client.media.get_upload_url(
Get daily metrics of the Langfuse project +
@@ -1504,7 +1530,7 @@ Get daily metrics of the Langfuse project ```python import datetime -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1529,6 +1555,7 @@ client.metrics.daily( ) ``` + @@ -1543,7 +1570,7 @@ client.metrics.daily(
**page:** `typing.Optional[int]` — page number, starts at 1 - +
@@ -1551,7 +1578,7 @@ client.metrics.daily(
**limit:** `typing.Optional[int]` — limit of items per page - +
@@ -1559,7 +1586,7 @@ client.metrics.daily(
**trace_name:** `typing.Optional[str]` — Optional filter by the name of the trace - +
@@ -1567,7 +1594,7 @@ client.metrics.daily(
**user_id:** `typing.Optional[str]` — Optional filter by the userId associated with the trace - +
@@ -1575,7 +1602,7 @@ client.metrics.daily(
**tags:** `typing.Optional[typing.Union[str, typing.Sequence[str]]]` — Optional filter for metrics where traces include all of these tags - +
@@ -1583,7 +1610,7 @@ client.metrics.daily(
**from_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include traces and observations on or after a certain datetime (ISO 8601) - +
@@ -1591,7 +1618,7 @@ client.metrics.daily(
**to_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include traces and observations before a certain datetime (ISO 8601) - +
@@ -1599,18 +1626,18 @@ client.metrics.daily(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
## Models +
client.models.create(...)
@@ -1624,6 +1651,7 @@ client.metrics.daily(
Create a model +
@@ -1641,7 +1669,7 @@ Create a model import datetime from finto import CreateModelRequest, ModelUsageUnit -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1668,6 +1696,7 @@ client.models.create( ) ``` + @@ -1681,8 +1710,8 @@ client.models.create(
-**request:** `CreateModelRequest` - +**request:** `CreateModelRequest` +
@@ -1690,13 +1719,12 @@ client.models.create(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
@@ -1714,6 +1742,7 @@ client.models.create(
Get all models +
@@ -1728,7 +1757,7 @@ Get all models
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1744,6 +1773,7 @@ client.models.list( ) ``` +
@@ -1758,7 +1788,7 @@ client.models.list(
**page:** `typing.Optional[int]` — page number, starts at 1 - +
@@ -1766,7 +1796,7 @@ client.models.list(
**limit:** `typing.Optional[int]` — limit of items per page - +
@@ -1774,13 +1804,12 @@ client.models.list(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- @@ -1798,6 +1827,7 @@ client.models.list(
Get a model +
@@ -1812,7 +1842,7 @@ Get a model
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1827,6 +1857,7 @@ client.models.get( ) ``` +
@@ -1840,8 +1871,8 @@ client.models.get(
-**id:** `str` - +**id:** `str` +
@@ -1849,13 +1880,12 @@ client.models.get(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- @@ -1873,6 +1903,7 @@ client.models.get(
Delete a model. Cannot delete models managed by Langfuse. You can create your own definition with the same modelName to override the definition though. +
@@ -1887,7 +1918,7 @@ Delete a model. Cannot delete models managed by Langfuse. You can create your ow
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1902,6 +1933,7 @@ client.models.delete( ) ``` +
@@ -1915,8 +1947,8 @@ client.models.delete(
-**id:** `str` - +**id:** `str` +
@@ -1924,18 +1956,18 @@ client.models.delete(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- ## Observations +
client.observations.get(...)
@@ -1949,6 +1981,7 @@ client.models.delete(
Get a observation +
@@ -1963,7 +1996,7 @@ Get a observation
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -1978,6 +2011,7 @@ client.observations.get( ) ``` +
@@ -1992,7 +2026,7 @@ client.observations.get(
**observation_id:** `str` — The unique langfuse identifier of an observation, can be an event, span or generation - +
@@ -2000,13 +2034,12 @@ client.observations.get(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
@@ -2024,6 +2057,7 @@ client.observations.get(
Get a list of observations +
@@ -2040,7 +2074,7 @@ Get a list of observations ```python import datetime -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -2068,6 +2102,7 @@ client.observations.get_many( ) ``` + @@ -2082,7 +2117,7 @@ client.observations.get_many(
**page:** `typing.Optional[int]` — Page number, starts at 1. - +
@@ -2090,47 +2125,47 @@ client.observations.get_many(
**limit:** `typing.Optional[int]` — Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. - +
-**name:** `typing.Optional[str]` - +**name:** `typing.Optional[str]` +
-**user_id:** `typing.Optional[str]` - +**user_id:** `typing.Optional[str]` +
-**type:** `typing.Optional[str]` - +**type:** `typing.Optional[str]` +
-**trace_id:** `typing.Optional[str]` - +**trace_id:** `typing.Optional[str]` +
-**parent_observation_id:** `typing.Optional[str]` - +**parent_observation_id:** `typing.Optional[str]` +
@@ -2138,7 +2173,7 @@ client.observations.get_many(
**from_start_time:** `typing.Optional[dt.datetime]` — Retrieve only observations with a start_time or or after this datetime (ISO 8601). - +
@@ -2146,7 +2181,7 @@ client.observations.get_many(
**to_start_time:** `typing.Optional[dt.datetime]` — Retrieve only observations with a start_time before this datetime (ISO 8601). - +
@@ -2154,7 +2189,7 @@ client.observations.get_many(
**version:** `typing.Optional[str]` — Optional filter to only include observations with a certain version. - +
@@ -2162,18 +2197,18 @@ client.observations.get_many(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- ## Projects +
client.projects.get()
@@ -2187,6 +2222,7 @@ client.observations.get_many(
Get Project associated with API key +
@@ -2201,7 +2237,7 @@ Get Project associated with API key
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -2214,6 +2250,7 @@ client = FernLangfuse( client.projects.get() ``` +
@@ -2228,18 +2265,18 @@ client.projects.get()
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
## Prompts +
client.prompts.get(...)
@@ -2253,6 +2290,7 @@ client.projects.get()
Get a prompt +
@@ -2267,7 +2305,7 @@ Get a prompt
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -2284,6 +2322,7 @@ client.prompts.get( ) ``` +
@@ -2298,7 +2337,7 @@ client.prompts.get(
**prompt_name:** `str` — The name of the prompt - +
@@ -2306,7 +2345,7 @@ client.prompts.get(
**version:** `typing.Optional[int]` — Version of the prompt to be retrieved. - +
@@ -2314,7 +2353,7 @@ client.prompts.get(
**label:** `typing.Optional[str]` — Label of the prompt to be retrieved. Defaults to "production" if no label or version is set. - +
@@ -2322,13 +2361,12 @@ client.prompts.get(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
@@ -2346,6 +2384,7 @@ client.prompts.get(
Get a list of prompt names with versions and labels +
@@ -2362,7 +2401,7 @@ Get a list of prompt names with versions and labels ```python import datetime -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -2387,6 +2426,7 @@ client.prompts.list( ) ``` + @@ -2400,24 +2440,24 @@ client.prompts.list(
-**name:** `typing.Optional[str]` - +**name:** `typing.Optional[str]` +
-**label:** `typing.Optional[str]` - +**label:** `typing.Optional[str]` +
-**tag:** `typing.Optional[str]` - +**tag:** `typing.Optional[str]` +
@@ -2425,7 +2465,7 @@ client.prompts.list(
**page:** `typing.Optional[int]` — page number, starts at 1 - +
@@ -2433,7 +2473,7 @@ client.prompts.list(
**limit:** `typing.Optional[int]` — limit of items per page - +
@@ -2441,7 +2481,7 @@ client.prompts.list(
**from_updated_at:** `typing.Optional[dt.datetime]` — Optional filter to only include prompt versions created/updated on or after a certain datetime (ISO 8601) - +
@@ -2449,7 +2489,7 @@ client.prompts.list(
**to_updated_at:** `typing.Optional[dt.datetime]` — Optional filter to only include prompt versions created/updated before a certain datetime (ISO 8601) - +
@@ -2457,13 +2497,12 @@ client.prompts.list(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- @@ -2481,6 +2520,7 @@ client.prompts.list(
Create a new version for the prompt with the given `name` +
@@ -2496,7 +2536,7 @@ Create a new version for the prompt with the given `name` ```python from finto import ChatMessage, CreatePromptRequest_Chat -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -2522,6 +2562,7 @@ client.prompts.create( ) ``` + @@ -2535,8 +2576,8 @@ client.prompts.create(
-**request:** `CreatePromptRequest` - +**request:** `CreatePromptRequest` +
@@ -2544,18 +2585,18 @@ client.prompts.create(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- ## ScoreConfigs +
client.score_configs.create(...)
@@ -2569,6 +2610,7 @@ client.prompts.create(
Create a score configuration (config). Score configs are used to define the structure of scores +
@@ -2584,7 +2626,7 @@ Create a score configuration (config). Score configs are used to define the stru ```python from finto import ConfigCategory, CreateScoreConfigRequest, ScoreDataType -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -2611,6 +2653,7 @@ client.score_configs.create( ) ``` + @@ -2624,8 +2667,8 @@ client.score_configs.create(
-**request:** `CreateScoreConfigRequest` - +**request:** `CreateScoreConfigRequest` +
@@ -2633,13 +2676,12 @@ client.score_configs.create(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
@@ -2657,6 +2699,7 @@ client.score_configs.create(
Get all score configs +
@@ -2671,7 +2714,7 @@ Get all score configs
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -2687,6 +2730,7 @@ client.score_configs.get( ) ``` +
@@ -2701,7 +2745,7 @@ client.score_configs.get(
**page:** `typing.Optional[int]` — Page number, starts at 1. - +
@@ -2709,7 +2753,7 @@ client.score_configs.get(
**limit:** `typing.Optional[int]` — Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit - +
@@ -2717,13 +2761,12 @@ client.score_configs.get(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- @@ -2741,6 +2784,7 @@ client.score_configs.get(
Get a score config +
@@ -2755,7 +2799,7 @@ Get a score config
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -2770,6 +2814,7 @@ client.score_configs.get_by_id( ) ``` +
@@ -2784,7 +2829,7 @@ client.score_configs.get_by_id(
**config_id:** `str` — The unique langfuse identifier of a score config - +
@@ -2792,18 +2837,18 @@ client.score_configs.get_by_id(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- ## Score +
client.score.create(...)
@@ -2817,6 +2862,7 @@ client.score_configs.get_by_id(
Create a score +
@@ -2832,7 +2878,7 @@ Create a score ```python from finto import CreateScoreRequest -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -2851,6 +2897,7 @@ client.score.create( ) ``` + @@ -2864,8 +2911,8 @@ client.score.create(
-**request:** `CreateScoreRequest` - +**request:** `CreateScoreRequest` +
@@ -2873,13 +2920,12 @@ client.score.create(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
@@ -2897,6 +2943,7 @@ client.score.create(
Get a list of scores +
@@ -2914,7 +2961,7 @@ Get a list of scores import datetime from finto import ScoreDataType, ScoreSource -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -2946,6 +2993,7 @@ client.score.get( ) ``` + @@ -2960,7 +3008,7 @@ client.score.get(
**page:** `typing.Optional[int]` — Page number, starts at 1. - +
@@ -2968,7 +3016,7 @@ client.score.get(
**limit:** `typing.Optional[int]` — Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. - +
@@ -2976,7 +3024,7 @@ client.score.get(
**user_id:** `typing.Optional[str]` — Retrieve only scores with this userId associated to the trace. - +
@@ -2984,7 +3032,7 @@ client.score.get(
**name:** `typing.Optional[str]` — Retrieve only scores with this name. - +
@@ -2992,7 +3040,7 @@ client.score.get(
**from_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include scores created on or after a certain datetime (ISO 8601) - +
@@ -3000,7 +3048,7 @@ client.score.get(
**to_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include scores created before a certain datetime (ISO 8601) - +
@@ -3008,7 +3056,7 @@ client.score.get(
**source:** `typing.Optional[ScoreSource]` — Retrieve only scores from a specific source. - +
@@ -3016,7 +3064,7 @@ client.score.get(
**operator:** `typing.Optional[str]` — Retrieve only scores with value. - +
@@ -3024,7 +3072,7 @@ client.score.get(
**value:** `typing.Optional[float]` — Retrieve only scores with value. - +
@@ -3032,7 +3080,7 @@ client.score.get(
**score_ids:** `typing.Optional[str]` — Comma-separated list of score IDs to limit the results to. - +
@@ -3040,7 +3088,7 @@ client.score.get(
**config_id:** `typing.Optional[str]` — Retrieve only scores with a specific configId. - +
@@ -3048,7 +3096,7 @@ client.score.get(
**queue_id:** `typing.Optional[str]` — Retrieve only scores with a specific annotation queueId. - +
@@ -3056,7 +3104,7 @@ client.score.get(
**data_type:** `typing.Optional[ScoreDataType]` — Retrieve only scores with a specific dataType. - +
@@ -3066,7 +3114,7 @@ client.score.get( **trace_tags:** `typing.Optional[ typing.Union[typing.Sequence[str], typing.Sequence[typing.Sequence[str]]] ]` — Only scores linked to traces that include all of these tags will be returned. - + @@ -3074,13 +3122,12 @@ client.score.get(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- @@ -3098,6 +3145,7 @@ client.score.get(
Get a score +
@@ -3112,7 +3160,7 @@ Get a score
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -3127,6 +3175,7 @@ client.score.get_by_id( ) ``` +
@@ -3141,7 +3190,7 @@ client.score.get_by_id(
**score_id:** `str` — The unique langfuse identifier of a score - +
@@ -3149,13 +3198,12 @@ client.score.get_by_id(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- @@ -3173,6 +3221,7 @@ client.score.get_by_id(
Delete a score +
@@ -3187,7 +3236,7 @@ Delete a score
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -3202,6 +3251,7 @@ client.score.delete( ) ``` +
@@ -3216,7 +3266,7 @@ client.score.delete(
**score_id:** `str` — The unique langfuse identifier of a score - +
@@ -3224,18 +3274,18 @@ client.score.delete(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- ## Sessions +
client.sessions.list(...)
@@ -3249,6 +3299,7 @@ client.score.delete(
Get sessions +
@@ -3265,7 +3316,7 @@ Get sessions ```python import datetime -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -3287,6 +3338,7 @@ client.sessions.list( ) ``` + @@ -3301,7 +3353,7 @@ client.sessions.list(
**page:** `typing.Optional[int]` — Page number, starts at 1 - +
@@ -3309,7 +3361,7 @@ client.sessions.list(
**limit:** `typing.Optional[int]` — Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. - +
@@ -3317,7 +3369,7 @@ client.sessions.list(
**from_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include sessions created on or after a certain datetime (ISO 8601) - +
@@ -3325,7 +3377,7 @@ client.sessions.list(
**to_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include sessions created before a certain datetime (ISO 8601) - +
@@ -3333,13 +3385,12 @@ client.sessions.list(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
@@ -3357,6 +3408,7 @@ client.sessions.list(
Get a session. Please note that `traces` on this endpoint are not paginated, if you plan to fetch large sessions, consider `GET /api/public/traces?sessionId=` +
@@ -3371,7 +3423,7 @@ Get a session. Please note that `traces` on this endpoint are not paginated, if
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -3386,6 +3438,7 @@ client.sessions.get( ) ``` +
@@ -3400,7 +3453,7 @@ client.sessions.get(
**session_id:** `str` — The unique id of a session - +
@@ -3408,18 +3461,18 @@ client.sessions.get(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- ## Trace +
client.trace.get(...)
@@ -3433,6 +3486,7 @@ client.sessions.get(
Get a specific trace +
@@ -3447,7 +3501,7 @@ Get a specific trace
```python -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -3462,6 +3516,7 @@ client.trace.get( ) ``` +
@@ -3476,7 +3531,7 @@ client.trace.get(
**trace_id:** `str` — The unique langfuse identifier of a trace - +
@@ -3484,13 +3539,12 @@ client.trace.get(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
-
@@ -3508,6 +3562,7 @@ client.trace.get(
Get list of traces +
@@ -3524,7 +3579,7 @@ Get list of traces ```python import datetime -from finto.client import FernLangfuse +from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -3553,6 +3608,7 @@ client.trace.list( ) ``` + @@ -3567,7 +3623,7 @@ client.trace.list(
**page:** `typing.Optional[int]` — Page number, starts at 1 - +
@@ -3575,31 +3631,31 @@ client.trace.list(
**limit:** `typing.Optional[int]` — Limit of items per page. If you encounter api issues due to too large page sizes, try to reduce the limit. - +
-**user_id:** `typing.Optional[str]` - +**user_id:** `typing.Optional[str]` +
-**name:** `typing.Optional[str]` - +**name:** `typing.Optional[str]` +
-**session_id:** `typing.Optional[str]` - +**session_id:** `typing.Optional[str]` +
@@ -3607,7 +3663,7 @@ client.trace.list(
**from_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include traces with a trace.timestamp on or after a certain datetime (ISO 8601) - +
@@ -3615,7 +3671,7 @@ client.trace.list(
**to_timestamp:** `typing.Optional[dt.datetime]` — Optional filter to only include traces with a trace.timestamp before a certain datetime (ISO 8601) - +
@@ -3623,7 +3679,7 @@ client.trace.list(
**order_by:** `typing.Optional[str]` — Format of the string [field].[asc/desc]. Fields: id, timestamp, name, userId, release, version, public, bookmarked, sessionId. Example: timestamp.asc - +
@@ -3631,7 +3687,7 @@ client.trace.list(
**tags:** `typing.Optional[typing.Union[str, typing.Sequence[str]]]` — Only traces that include all of these tags will be returned. - +
@@ -3639,7 +3695,7 @@ client.trace.list(
**version:** `typing.Optional[str]` — Optional filter to only include traces with a certain version. - +
@@ -3647,7 +3703,7 @@ client.trace.list(
**release:** `typing.Optional[str]` — Optional filter to only include traces with a certain release. - +
@@ -3655,14 +3711,12 @@ client.trace.list(
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. - +
- - diff --git a/langfuse/api/resources/comments/client.py b/langfuse/api/resources/comments/client.py index 5c17f1a7c..80476b00b 100644 --- a/langfuse/api/resources/comments/client.py +++ b/langfuse/api/resources/comments/client.py @@ -49,7 +49,7 @@ def create( Examples -------- from finto import CreateCommentRequest - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -141,7 +141,7 @@ def get( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -220,7 +220,7 @@ def get_by_id( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -295,7 +295,7 @@ async def create( import asyncio from finto import CreateCommentRequest - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -395,7 +395,7 @@ async def get( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -482,7 +482,7 @@ async def get_by_id( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", diff --git a/langfuse/api/resources/commons/errors/__init__.py b/langfuse/api/resources/commons/errors/__init__.py index eb5a67925..0aef2f92f 100644 --- a/langfuse/api/resources/commons/errors/__init__.py +++ b/langfuse/api/resources/commons/errors/__init__.py @@ -6,4 +6,10 @@ from .not_found_error import NotFoundError from .unauthorized_error import UnauthorizedError -__all__ = ["AccessDeniedError", "Error", "MethodNotAllowedError", "NotFoundError", "UnauthorizedError"] +__all__ = [ + "AccessDeniedError", + "Error", + "MethodNotAllowedError", + "NotFoundError", + "UnauthorizedError", +] diff --git a/langfuse/api/resources/commons/types/boolean_score.py b/langfuse/api/resources/commons/types/boolean_score.py index 7a97a9337..d838b7db9 100644 --- a/langfuse/api/resources/commons/types/boolean_score.py +++ b/langfuse/api/resources/commons/types/boolean_score.py @@ -20,15 +20,28 @@ class BooleanScore(BaseScore): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/categorical_score.py b/langfuse/api/resources/commons/types/categorical_score.py index 2bc86cb32..847caf47b 100644 --- a/langfuse/api/resources/commons/types/categorical_score.py +++ b/langfuse/api/resources/commons/types/categorical_score.py @@ -20,15 +20,28 @@ class CategoricalScore(BaseScore): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/config_category.py b/langfuse/api/resources/commons/types/config_category.py index 7455c70a0..b1cbde9f2 100644 --- a/langfuse/api/resources/commons/types/config_category.py +++ b/langfuse/api/resources/commons/types/config_category.py @@ -12,15 +12,28 @@ class ConfigCategory(pydantic_v1.BaseModel): label: str def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/dataset.py b/langfuse/api/resources/commons/types/dataset.py index abc35a168..be59a951a 100644 --- a/langfuse/api/resources/commons/types/dataset.py +++ b/langfuse/api/resources/commons/types/dataset.py @@ -17,15 +17,28 @@ class Dataset(pydantic_v1.BaseModel): updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/dataset_item.py b/langfuse/api/resources/commons/types/dataset_item.py index 9a34107a8..dd5f85e78 100644 --- a/langfuse/api/resources/commons/types/dataset_item.py +++ b/langfuse/api/resources/commons/types/dataset_item.py @@ -12,25 +12,44 @@ class DatasetItem(pydantic_v1.BaseModel): id: str status: DatasetStatus input: typing.Optional[typing.Any] = None - expected_output: typing.Optional[typing.Any] = pydantic_v1.Field(alias="expectedOutput", default=None) + expected_output: typing.Optional[typing.Any] = pydantic_v1.Field( + alias="expectedOutput", default=None + ) metadata: typing.Optional[typing.Any] = None - source_trace_id: typing.Optional[str] = pydantic_v1.Field(alias="sourceTraceId", default=None) - source_observation_id: typing.Optional[str] = pydantic_v1.Field(alias="sourceObservationId", default=None) + source_trace_id: typing.Optional[str] = pydantic_v1.Field( + alias="sourceTraceId", default=None + ) + source_observation_id: typing.Optional[str] = pydantic_v1.Field( + alias="sourceObservationId", default=None + ) dataset_id: str = pydantic_v1.Field(alias="datasetId") dataset_name: str = pydantic_v1.Field(alias="datasetName") created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/dataset_run.py b/langfuse/api/resources/commons/types/dataset_run.py index 7fc733ff7..74b1a2ac8 100644 --- a/langfuse/api/resources/commons/types/dataset_run.py +++ b/langfuse/api/resources/commons/types/dataset_run.py @@ -49,15 +49,28 @@ class DatasetRun(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/dataset_run_item.py b/langfuse/api/resources/commons/types/dataset_run_item.py index 683c055df..f1b3af163 100644 --- a/langfuse/api/resources/commons/types/dataset_run_item.py +++ b/langfuse/api/resources/commons/types/dataset_run_item.py @@ -13,20 +13,35 @@ class DatasetRunItem(pydantic_v1.BaseModel): dataset_run_name: str = pydantic_v1.Field(alias="datasetRunName") dataset_item_id: str = pydantic_v1.Field(alias="datasetItemId") trace_id: str = pydantic_v1.Field(alias="traceId") - observation_id: typing.Optional[str] = pydantic_v1.Field(alias="observationId", default=None) + observation_id: typing.Optional[str] = pydantic_v1.Field( + alias="observationId", default=None + ) created_at: dt.datetime = pydantic_v1.Field(alias="createdAt") updated_at: dt.datetime = pydantic_v1.Field(alias="updatedAt") def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/dataset_run_with_items.py b/langfuse/api/resources/commons/types/dataset_run_with_items.py index 0179bea39..647d2c553 100644 --- a/langfuse/api/resources/commons/types/dataset_run_with_items.py +++ b/langfuse/api/resources/commons/types/dataset_run_with_items.py @@ -10,18 +10,33 @@ class DatasetRunWithItems(DatasetRun): - dataset_run_items: typing.List[DatasetRunItem] = pydantic_v1.Field(alias="datasetRunItems") + dataset_run_items: typing.List[DatasetRunItem] = pydantic_v1.Field( + alias="datasetRunItems" + ) def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/dataset_status.py b/langfuse/api/resources/commons/types/dataset_status.py index 50b636851..09eac62fe 100644 --- a/langfuse/api/resources/commons/types/dataset_status.py +++ b/langfuse/api/resources/commons/types/dataset_status.py @@ -10,7 +10,11 @@ class DatasetStatus(str, enum.Enum): ACTIVE = "ACTIVE" ARCHIVED = "ARCHIVED" - def visit(self, active: typing.Callable[[], T_Result], archived: typing.Callable[[], T_Result]) -> T_Result: + def visit( + self, + active: typing.Callable[[], T_Result], + archived: typing.Callable[[], T_Result], + ) -> T_Result: if self is DatasetStatus.ACTIVE: return active() if self is DatasetStatus.ARCHIVED: diff --git a/langfuse/api/resources/commons/types/map_value.py b/langfuse/api/resources/commons/types/map_value.py index aebdb7f97..e1e771a9b 100644 --- a/langfuse/api/resources/commons/types/map_value.py +++ b/langfuse/api/resources/commons/types/map_value.py @@ -3,5 +3,8 @@ import typing MapValue = typing.Union[ - typing.Optional[str], typing.Optional[int], typing.Optional[bool], typing.Optional[typing.List[str]] + typing.Optional[str], + typing.Optional[int], + typing.Optional[bool], + typing.Optional[typing.List[str]], ] diff --git a/langfuse/api/resources/commons/types/numeric_score.py b/langfuse/api/resources/commons/types/numeric_score.py index 188191dd1..d7f860cd5 100644 --- a/langfuse/api/resources/commons/types/numeric_score.py +++ b/langfuse/api/resources/commons/types/numeric_score.py @@ -15,15 +15,28 @@ class NumericScore(BaseScore): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/observation.py b/langfuse/api/resources/commons/types/observation.py index 2d0914059..130fe732d 100644 --- a/langfuse/api/resources/commons/types/observation.py +++ b/langfuse/api/resources/commons/types/observation.py @@ -36,12 +36,16 @@ class Observation(pydantic_v1.BaseModel): The start time of the observation """ - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field(alias="endTime", default=None) + end_time: typing.Optional[dt.datetime] = pydantic_v1.Field( + alias="endTime", default=None + ) """ The end time of the observation. """ - completion_start_time: typing.Optional[dt.datetime] = pydantic_v1.Field(alias="completionStartTime", default=None) + completion_start_time: typing.Optional[dt.datetime] = pydantic_v1.Field( + alias="completionStartTime", default=None + ) """ The completion start time of the observation """ @@ -88,12 +92,16 @@ class Observation(pydantic_v1.BaseModel): The level of the observation """ - status_message: typing.Optional[str] = pydantic_v1.Field(alias="statusMessage", default=None) + status_message: typing.Optional[str] = pydantic_v1.Field( + alias="statusMessage", default=None + ) """ The status message of the observation """ - parent_observation_id: typing.Optional[str] = pydantic_v1.Field(alias="parentObservationId", default=None) + parent_observation_id: typing.Optional[str] = pydantic_v1.Field( + alias="parentObservationId", default=None + ) """ The parent observation ID """ @@ -104,15 +112,28 @@ class Observation(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/observations_view.py b/langfuse/api/resources/commons/types/observations_view.py index 360bda59c..3e15909ea 100644 --- a/langfuse/api/resources/commons/types/observations_view.py +++ b/langfuse/api/resources/commons/types/observations_view.py @@ -9,12 +9,16 @@ class ObservationsView(Observation): - prompt_name: typing.Optional[str] = pydantic_v1.Field(alias="promptName", default=None) + prompt_name: typing.Optional[str] = pydantic_v1.Field( + alias="promptName", default=None + ) """ The name of the prompt associated with the observation """ - prompt_version: typing.Optional[int] = pydantic_v1.Field(alias="promptVersion", default=None) + prompt_version: typing.Optional[int] = pydantic_v1.Field( + alias="promptVersion", default=None + ) """ The version of the prompt associated with the observation """ @@ -24,32 +28,44 @@ class ObservationsView(Observation): The unique identifier of the model """ - input_price: typing.Optional[float] = pydantic_v1.Field(alias="inputPrice", default=None) + input_price: typing.Optional[float] = pydantic_v1.Field( + alias="inputPrice", default=None + ) """ The price of the input in USD """ - output_price: typing.Optional[float] = pydantic_v1.Field(alias="outputPrice", default=None) + output_price: typing.Optional[float] = pydantic_v1.Field( + alias="outputPrice", default=None + ) """ The price of the output in USD. """ - total_price: typing.Optional[float] = pydantic_v1.Field(alias="totalPrice", default=None) + total_price: typing.Optional[float] = pydantic_v1.Field( + alias="totalPrice", default=None + ) """ The total price in USD. """ - calculated_input_cost: typing.Optional[float] = pydantic_v1.Field(alias="calculatedInputCost", default=None) + calculated_input_cost: typing.Optional[float] = pydantic_v1.Field( + alias="calculatedInputCost", default=None + ) """ The calculated cost of the input in USD """ - calculated_output_cost: typing.Optional[float] = pydantic_v1.Field(alias="calculatedOutputCost", default=None) + calculated_output_cost: typing.Optional[float] = pydantic_v1.Field( + alias="calculatedOutputCost", default=None + ) """ The calculated cost of the output in USD """ - calculated_total_cost: typing.Optional[float] = pydantic_v1.Field(alias="calculatedTotalCost", default=None) + calculated_total_cost: typing.Optional[float] = pydantic_v1.Field( + alias="calculatedTotalCost", default=None + ) """ The calculated total cost in USD """ @@ -59,21 +75,36 @@ class ObservationsView(Observation): The latency in seconds. """ - time_to_first_token: typing.Optional[float] = pydantic_v1.Field(alias="timeToFirstToken", default=None) + time_to_first_token: typing.Optional[float] = pydantic_v1.Field( + alias="timeToFirstToken", default=None + ) """ The time to the first token in seconds """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/score_config.py b/langfuse/api/resources/commons/types/score_config.py index f31fefc5c..4a7b30e0e 100644 --- a/langfuse/api/resources/commons/types/score_config.py +++ b/langfuse/api/resources/commons/types/score_config.py @@ -25,17 +25,23 @@ class ScoreConfig(pydantic_v1.BaseModel): Whether the score config is archived. Defaults to false """ - min_value: typing.Optional[float] = pydantic_v1.Field(alias="minValue", default=None) + min_value: typing.Optional[float] = pydantic_v1.Field( + alias="minValue", default=None + ) """ Sets minimum value for numerical scores. If not set, the minimum value defaults to -∞ """ - max_value: typing.Optional[float] = pydantic_v1.Field(alias="maxValue", default=None) + max_value: typing.Optional[float] = pydantic_v1.Field( + alias="maxValue", default=None + ) """ Sets maximum value for numerical scores. If not set, the maximum value defaults to +∞ """ - categories: typing.Optional[typing.List[ConfigCategory]] = pydantic_v1.Field(default=None) + categories: typing.Optional[typing.List[ConfigCategory]] = pydantic_v1.Field( + default=None + ) """ Configures custom categories for categorical scores """ @@ -43,15 +49,28 @@ class ScoreConfig(pydantic_v1.BaseModel): description: typing.Optional[str] = None def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/session.py b/langfuse/api/resources/commons/types/session.py index 8b5bf1556..230bf004b 100644 --- a/langfuse/api/resources/commons/types/session.py +++ b/langfuse/api/resources/commons/types/session.py @@ -13,15 +13,28 @@ class Session(pydantic_v1.BaseModel): project_id: str = pydantic_v1.Field(alias="projectId") def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/session_with_traces.py b/langfuse/api/resources/commons/types/session_with_traces.py index d1cfa1808..b5465daa9 100644 --- a/langfuse/api/resources/commons/types/session_with_traces.py +++ b/langfuse/api/resources/commons/types/session_with_traces.py @@ -13,15 +13,28 @@ class SessionWithTraces(Session): traces: typing.List[Trace] def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/trace.py b/langfuse/api/resources/commons/types/trace.py index b180518d3..9c9be5ae8 100644 --- a/langfuse/api/resources/commons/types/trace.py +++ b/langfuse/api/resources/commons/types/trace.py @@ -33,7 +33,9 @@ class Trace(pydantic_v1.BaseModel): The output data of the trace. Can be any JSON. """ - session_id: typing.Optional[str] = pydantic_v1.Field(alias="sessionId", default=None) + session_id: typing.Optional[str] = pydantic_v1.Field( + alias="sessionId", default=None + ) """ The session identifier associated with the trace """ @@ -69,15 +71,28 @@ class Trace(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/trace_with_details.py b/langfuse/api/resources/commons/types/trace_with_details.py index 5aba777f3..5ffe6f218 100644 --- a/langfuse/api/resources/commons/types/trace_with_details.py +++ b/langfuse/api/resources/commons/types/trace_with_details.py @@ -35,15 +35,28 @@ class TraceWithDetails(Trace): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/trace_with_full_details.py b/langfuse/api/resources/commons/types/trace_with_full_details.py index 0f6b7bb0b..c96258d73 100644 --- a/langfuse/api/resources/commons/types/trace_with_full_details.py +++ b/langfuse/api/resources/commons/types/trace_with_full_details.py @@ -37,15 +37,28 @@ class TraceWithFullDetails(Trace): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/commons/types/usage.py b/langfuse/api/resources/commons/types/usage.py index 097a3f304..bc5041c5f 100644 --- a/langfuse/api/resources/commons/types/usage.py +++ b/langfuse/api/resources/commons/types/usage.py @@ -29,31 +29,50 @@ class Usage(pydantic_v1.BaseModel): """ unit: typing.Optional[ModelUsageUnit] = None - input_cost: typing.Optional[float] = pydantic_v1.Field(alias="inputCost", default=None) + input_cost: typing.Optional[float] = pydantic_v1.Field( + alias="inputCost", default=None + ) """ USD input cost """ - output_cost: typing.Optional[float] = pydantic_v1.Field(alias="outputCost", default=None) + output_cost: typing.Optional[float] = pydantic_v1.Field( + alias="outputCost", default=None + ) """ USD output cost """ - total_cost: typing.Optional[float] = pydantic_v1.Field(alias="totalCost", default=None) + total_cost: typing.Optional[float] = pydantic_v1.Field( + alias="totalCost", default=None + ) """ USD total cost, defaults to input+output """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/dataset_items/client.py b/langfuse/api/resources/dataset_items/client.py index 2f0b810f8..89f19266d 100644 --- a/langfuse/api/resources/dataset_items/client.py +++ b/langfuse/api/resources/dataset_items/client.py @@ -26,7 +26,10 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper def create( - self, *, request: CreateDatasetItemRequest, request_options: typing.Optional[RequestOptions] = None + self, + *, + request: CreateDatasetItemRequest, + request_options: typing.Optional[RequestOptions] = None, ) -> DatasetItem: """ Create a dataset item @@ -45,7 +48,7 @@ def create( Examples -------- from finto import CreateDatasetItemRequest, DatasetStatus - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -69,7 +72,11 @@ def create( ) """ _response = self._client_wrapper.httpx_client.request( - "api/public/dataset-items", method="POST", json=request, request_options=request_options, omit=OMIT + "api/public/dataset-items", + method="POST", + json=request, + request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: @@ -77,19 +84,29 @@ def create( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> DatasetItem: + def get( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> DatasetItem: """ Get a dataset item @@ -106,7 +123,7 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -121,7 +138,9 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non ) """ _response = self._client_wrapper.httpx_client.request( - f"api/public/dataset-items/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/public/dataset-items/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -129,13 +148,21 @@ def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = Non if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -177,7 +204,7 @@ def list( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -213,13 +240,21 @@ def list( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -231,7 +266,10 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper async def create( - self, *, request: CreateDatasetItemRequest, request_options: typing.Optional[RequestOptions] = None + self, + *, + request: CreateDatasetItemRequest, + request_options: typing.Optional[RequestOptions] = None, ) -> DatasetItem: """ Create a dataset item @@ -252,7 +290,7 @@ async def create( import asyncio from finto import CreateDatasetItemRequest, DatasetStatus - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -282,7 +320,11 @@ async def main() -> None: asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/public/dataset-items", method="POST", json=request, request_options=request_options, omit=OMIT + "api/public/dataset-items", + method="POST", + json=request, + request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: @@ -290,19 +332,29 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] = None) -> DatasetItem: + async def get( + self, id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> DatasetItem: """ Get a dataset item @@ -321,7 +373,7 @@ async def get(self, id: str, *, request_options: typing.Optional[RequestOptions] -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -342,7 +394,9 @@ async def main() -> None: asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/public/dataset-items/{jsonable_encoder(id)}", method="GET", request_options=request_options + f"api/public/dataset-items/{jsonable_encoder(id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -350,13 +404,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -400,7 +462,7 @@ async def list( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -442,13 +504,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/langfuse/api/resources/dataset_items/types/create_dataset_item_request.py b/langfuse/api/resources/dataset_items/types/create_dataset_item_request.py index 52d3ff635..111f6819a 100644 --- a/langfuse/api/resources/dataset_items/types/create_dataset_item_request.py +++ b/langfuse/api/resources/dataset_items/types/create_dataset_item_request.py @@ -11,10 +11,16 @@ class CreateDatasetItemRequest(pydantic_v1.BaseModel): dataset_name: str = pydantic_v1.Field(alias="datasetName") input: typing.Optional[typing.Any] = None - expected_output: typing.Optional[typing.Any] = pydantic_v1.Field(alias="expectedOutput", default=None) + expected_output: typing.Optional[typing.Any] = pydantic_v1.Field( + alias="expectedOutput", default=None + ) metadata: typing.Optional[typing.Any] = None - source_trace_id: typing.Optional[str] = pydantic_v1.Field(alias="sourceTraceId", default=None) - source_observation_id: typing.Optional[str] = pydantic_v1.Field(alias="sourceObservationId", default=None) + source_trace_id: typing.Optional[str] = pydantic_v1.Field( + alias="sourceTraceId", default=None + ) + source_observation_id: typing.Optional[str] = pydantic_v1.Field( + alias="sourceObservationId", default=None + ) id: typing.Optional[str] = pydantic_v1.Field(default=None) """ Dataset items are upserted on their id. Id needs to be unique (project-level) and cannot be reused across datasets. @@ -26,15 +32,28 @@ class CreateDatasetItemRequest(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/dataset_items/types/paginated_dataset_items.py b/langfuse/api/resources/dataset_items/types/paginated_dataset_items.py index c63f147c4..8592ba80f 100644 --- a/langfuse/api/resources/dataset_items/types/paginated_dataset_items.py +++ b/langfuse/api/resources/dataset_items/types/paginated_dataset_items.py @@ -14,15 +14,28 @@ class PaginatedDatasetItems(pydantic_v1.BaseModel): meta: MetaResponse def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/dataset_run_items/client.py b/langfuse/api/resources/dataset_run_items/client.py index ad54e5651..109ce60b8 100644 --- a/langfuse/api/resources/dataset_run_items/client.py +++ b/langfuse/api/resources/dataset_run_items/client.py @@ -24,7 +24,10 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper def create( - self, *, request: CreateDatasetRunItemRequest, request_options: typing.Optional[RequestOptions] = None + self, + *, + request: CreateDatasetRunItemRequest, + request_options: typing.Optional[RequestOptions] = None, ) -> DatasetRunItem: """ Create a dataset run item @@ -43,7 +46,7 @@ def create( Examples -------- from finto import CreateDatasetRunItemRequest - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -65,7 +68,11 @@ def create( ) """ _response = self._client_wrapper.httpx_client.request( - "api/public/dataset-run-items", method="POST", json=request, request_options=request_options, omit=OMIT + "api/public/dataset-run-items", + method="POST", + json=request, + request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: @@ -73,13 +80,21 @@ def create( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -91,7 +106,10 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper async def create( - self, *, request: CreateDatasetRunItemRequest, request_options: typing.Optional[RequestOptions] = None + self, + *, + request: CreateDatasetRunItemRequest, + request_options: typing.Optional[RequestOptions] = None, ) -> DatasetRunItem: """ Create a dataset run item @@ -112,7 +130,7 @@ async def create( import asyncio from finto import CreateDatasetRunItemRequest - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -140,7 +158,11 @@ async def main() -> None: asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/public/dataset-run-items", method="POST", json=request, request_options=request_options, omit=OMIT + "api/public/dataset-run-items", + method="POST", + json=request, + request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: @@ -148,13 +170,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/langfuse/api/resources/dataset_run_items/types/create_dataset_run_item_request.py b/langfuse/api/resources/dataset_run_items/types/create_dataset_run_item_request.py index 8762d7418..0a643b835 100644 --- a/langfuse/api/resources/dataset_run_items/types/create_dataset_run_item_request.py +++ b/langfuse/api/resources/dataset_run_items/types/create_dataset_run_item_request.py @@ -9,7 +9,9 @@ class CreateDatasetRunItemRequest(pydantic_v1.BaseModel): run_name: str = pydantic_v1.Field(alias="runName") - run_description: typing.Optional[str] = pydantic_v1.Field(alias="runDescription", default=None) + run_description: typing.Optional[str] = pydantic_v1.Field( + alias="runDescription", default=None + ) """ Description of the run. If run exists, description will be updated. """ @@ -20,22 +22,37 @@ class CreateDatasetRunItemRequest(pydantic_v1.BaseModel): """ dataset_item_id: str = pydantic_v1.Field(alias="datasetItemId") - observation_id: typing.Optional[str] = pydantic_v1.Field(alias="observationId", default=None) + observation_id: typing.Optional[str] = pydantic_v1.Field( + alias="observationId", default=None + ) trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) """ traceId should always be provided. For compatibility with older SDK versions it can also be inferred from the provided observationId. """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/datasets/client.py b/langfuse/api/resources/datasets/client.py index 48054f617..2aeb05323 100644 --- a/langfuse/api/resources/datasets/client.py +++ b/langfuse/api/resources/datasets/client.py @@ -54,7 +54,7 @@ def list( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -81,19 +81,32 @@ def list( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def get(self, dataset_name: str, *, request_options: typing.Optional[RequestOptions] = None) -> Dataset: + def get( + self, + dataset_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> Dataset: """ Get a dataset @@ -110,7 +123,7 @@ def get(self, dataset_name: str, *, request_options: typing.Optional[RequestOpti Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -125,7 +138,9 @@ def get(self, dataset_name: str, *, request_options: typing.Optional[RequestOpti ) """ _response = self._client_wrapper.httpx_client.request( - f"api/public/v2/datasets/{jsonable_encoder(dataset_name)}", method="GET", request_options=request_options + f"api/public/v2/datasets/{jsonable_encoder(dataset_name)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -133,20 +148,31 @@ def get(self, dataset_name: str, *, request_options: typing.Optional[RequestOpti if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) def create( - self, *, request: CreateDatasetRequest, request_options: typing.Optional[RequestOptions] = None + self, + *, + request: CreateDatasetRequest, + request_options: typing.Optional[RequestOptions] = None, ) -> Dataset: """ Create a dataset @@ -165,7 +191,7 @@ def create( Examples -------- from finto import CreateDatasetRequest - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -184,7 +210,11 @@ def create( ) """ _response = self._client_wrapper.httpx_client.request( - "api/public/v2/datasets", method="POST", json=request, request_options=request_options, omit=OMIT + "api/public/v2/datasets", + method="POST", + json=request, + request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: @@ -192,20 +222,32 @@ def create( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) def get_run( - self, dataset_name: str, run_name: str, *, request_options: typing.Optional[RequestOptions] = None + self, + dataset_name: str, + run_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> DatasetRunWithItems: """ Get a dataset run and its items @@ -225,7 +267,7 @@ def get_run( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -251,13 +293,21 @@ def get_run( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -293,7 +343,7 @@ def get_runs( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -321,13 +371,21 @@ def get_runs( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -367,7 +425,7 @@ async def list( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -400,19 +458,32 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def get(self, dataset_name: str, *, request_options: typing.Optional[RequestOptions] = None) -> Dataset: + async def get( + self, + dataset_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> Dataset: """ Get a dataset @@ -431,7 +502,7 @@ async def get(self, dataset_name: str, *, request_options: typing.Optional[Reque -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -452,7 +523,9 @@ async def main() -> None: asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/public/v2/datasets/{jsonable_encoder(dataset_name)}", method="GET", request_options=request_options + f"api/public/v2/datasets/{jsonable_encoder(dataset_name)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -460,20 +533,31 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) async def create( - self, *, request: CreateDatasetRequest, request_options: typing.Optional[RequestOptions] = None + self, + *, + request: CreateDatasetRequest, + request_options: typing.Optional[RequestOptions] = None, ) -> Dataset: """ Create a dataset @@ -494,7 +578,7 @@ async def create( import asyncio from finto import CreateDatasetRequest - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -519,7 +603,11 @@ async def main() -> None: asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/public/v2/datasets", method="POST", json=request, request_options=request_options, omit=OMIT + "api/public/v2/datasets", + method="POST", + json=request, + request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: @@ -527,20 +615,32 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) async def get_run( - self, dataset_name: str, run_name: str, *, request_options: typing.Optional[RequestOptions] = None + self, + dataset_name: str, + run_name: str, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> DatasetRunWithItems: """ Get a dataset run and its items @@ -562,7 +662,7 @@ async def get_run( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -594,13 +694,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -638,7 +746,7 @@ async def get_runs( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -672,13 +780,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/langfuse/api/resources/datasets/types/create_dataset_request.py b/langfuse/api/resources/datasets/types/create_dataset_request.py index 9fb01dde2..023cb4c12 100644 --- a/langfuse/api/resources/datasets/types/create_dataset_request.py +++ b/langfuse/api/resources/datasets/types/create_dataset_request.py @@ -13,15 +13,28 @@ class CreateDatasetRequest(pydantic_v1.BaseModel): metadata: typing.Optional[typing.Any] = None def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/datasets/types/paginated_dataset_runs.py b/langfuse/api/resources/datasets/types/paginated_dataset_runs.py index dad465bc7..86f2f0a73 100644 --- a/langfuse/api/resources/datasets/types/paginated_dataset_runs.py +++ b/langfuse/api/resources/datasets/types/paginated_dataset_runs.py @@ -14,15 +14,28 @@ class PaginatedDatasetRuns(pydantic_v1.BaseModel): meta: MetaResponse def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/datasets/types/paginated_datasets.py b/langfuse/api/resources/datasets/types/paginated_datasets.py index e24be3483..c2d436bf4 100644 --- a/langfuse/api/resources/datasets/types/paginated_datasets.py +++ b/langfuse/api/resources/datasets/types/paginated_datasets.py @@ -14,15 +14,28 @@ class PaginatedDatasets(pydantic_v1.BaseModel): meta: MetaResponse def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/health/client.py b/langfuse/api/resources/health/client.py index 085b065c0..3e84ced68 100644 --- a/langfuse/api/resources/health/client.py +++ b/langfuse/api/resources/health/client.py @@ -20,7 +20,9 @@ class HealthClient: def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper - def health(self, *, request_options: typing.Optional[RequestOptions] = None) -> HealthResponse: + def health( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HealthResponse: """ Check health of API and database @@ -35,7 +37,7 @@ def health(self, *, request_options: typing.Optional[RequestOptions] = None) -> Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -58,13 +60,21 @@ def health(self, *, request_options: typing.Optional[RequestOptions] = None) -> if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -75,7 +85,9 @@ class AsyncHealthClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper - async def health(self, *, request_options: typing.Optional[RequestOptions] = None) -> HealthResponse: + async def health( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> HealthResponse: """ Check health of API and database @@ -92,7 +104,7 @@ async def health(self, *, request_options: typing.Optional[RequestOptions] = Non -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -121,13 +133,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/langfuse/api/resources/health/types/health_response.py b/langfuse/api/resources/health/types/health_response.py index ef614a0c2..a864ea9ae 100644 --- a/langfuse/api/resources/health/types/health_response.py +++ b/langfuse/api/resources/health/types/health_response.py @@ -27,15 +27,28 @@ class HealthResponse(pydantic_v1.BaseModel): status: str def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/client.py b/langfuse/api/resources/ingestion/client.py index 58df39b6b..1e3b99be1 100644 --- a/langfuse/api/resources/ingestion/client.py +++ b/langfuse/api/resources/ingestion/client.py @@ -28,7 +28,7 @@ def batch( *, batch: typing.Sequence[IngestionEvent], metadata: typing.Optional[typing.Any] = OMIT, - request_options: typing.Optional[RequestOptions] = None + request_options: typing.Optional[RequestOptions] = None, ) -> IngestionResponse: """ Batched ingestion for Langfuse Tracing. If you want to use tracing via the API, such as to build your own Langfuse client implementation, this is the only API route you need to implement. @@ -58,7 +58,7 @@ def batch( import datetime from finto import IngestionEvent_TraceCreate, TraceBody - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -108,13 +108,21 @@ def batch( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -130,7 +138,7 @@ async def batch( *, batch: typing.Sequence[IngestionEvent], metadata: typing.Optional[typing.Any] = OMIT, - request_options: typing.Optional[RequestOptions] = None + request_options: typing.Optional[RequestOptions] = None, ) -> IngestionResponse: """ Batched ingestion for Langfuse Tracing. If you want to use tracing via the API, such as to build your own Langfuse client implementation, this is the only API route you need to implement. @@ -161,7 +169,7 @@ async def batch( import datetime from finto import IngestionEvent_TraceCreate, TraceBody - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -217,13 +225,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/langfuse/api/resources/ingestion/types/base_event.py b/langfuse/api/resources/ingestion/types/base_event.py index 145d224ec..dec8a52e7 100644 --- a/langfuse/api/resources/ingestion/types/base_event.py +++ b/langfuse/api/resources/ingestion/types/base_event.py @@ -24,15 +24,28 @@ class BaseEvent(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/create_event_body.py b/langfuse/api/resources/ingestion/types/create_event_body.py index 566c39294..afe8677f3 100644 --- a/langfuse/api/resources/ingestion/types/create_event_body.py +++ b/langfuse/api/resources/ingestion/types/create_event_body.py @@ -12,15 +12,28 @@ class CreateEventBody(OptionalObservationBody): id: typing.Optional[str] = None def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/create_event_event.py b/langfuse/api/resources/ingestion/types/create_event_event.py index fad6d92e0..0c3cce040 100644 --- a/langfuse/api/resources/ingestion/types/create_event_event.py +++ b/langfuse/api/resources/ingestion/types/create_event_event.py @@ -13,15 +13,28 @@ class CreateEventEvent(BaseEvent): body: CreateEventBody def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/create_generation_body.py b/langfuse/api/resources/ingestion/types/create_generation_body.py index 871505e4e..65905e78e 100644 --- a/langfuse/api/resources/ingestion/types/create_generation_body.py +++ b/langfuse/api/resources/ingestion/types/create_generation_body.py @@ -11,25 +11,44 @@ class CreateGenerationBody(CreateSpanBody): - completion_start_time: typing.Optional[dt.datetime] = pydantic_v1.Field(alias="completionStartTime", default=None) + completion_start_time: typing.Optional[dt.datetime] = pydantic_v1.Field( + alias="completionStartTime", default=None + ) model: typing.Optional[str] = None model_parameters: typing.Optional[typing.Dict[str, MapValue]] = pydantic_v1.Field( alias="modelParameters", default=None ) usage: typing.Optional[IngestionUsage] = None - prompt_name: typing.Optional[str] = pydantic_v1.Field(alias="promptName", default=None) - prompt_version: typing.Optional[int] = pydantic_v1.Field(alias="promptVersion", default=None) + prompt_name: typing.Optional[str] = pydantic_v1.Field( + alias="promptName", default=None + ) + prompt_version: typing.Optional[int] = pydantic_v1.Field( + alias="promptVersion", default=None + ) def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/create_generation_event.py b/langfuse/api/resources/ingestion/types/create_generation_event.py index 4b95cfa58..cb7b484dd 100644 --- a/langfuse/api/resources/ingestion/types/create_generation_event.py +++ b/langfuse/api/resources/ingestion/types/create_generation_event.py @@ -13,15 +13,28 @@ class CreateGenerationEvent(BaseEvent): body: CreateGenerationBody def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/create_observation_event.py b/langfuse/api/resources/ingestion/types/create_observation_event.py index 7a72cc017..adfefc793 100644 --- a/langfuse/api/resources/ingestion/types/create_observation_event.py +++ b/langfuse/api/resources/ingestion/types/create_observation_event.py @@ -13,15 +13,28 @@ class CreateObservationEvent(BaseEvent): body: ObservationBody def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/create_span_body.py b/langfuse/api/resources/ingestion/types/create_span_body.py index 21ef5aaf3..c31fde567 100644 --- a/langfuse/api/resources/ingestion/types/create_span_body.py +++ b/langfuse/api/resources/ingestion/types/create_span_body.py @@ -9,18 +9,33 @@ class CreateSpanBody(CreateEventBody): - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field(alias="endTime", default=None) + end_time: typing.Optional[dt.datetime] = pydantic_v1.Field( + alias="endTime", default=None + ) def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/create_span_event.py b/langfuse/api/resources/ingestion/types/create_span_event.py index 042124a93..7a8e8154c 100644 --- a/langfuse/api/resources/ingestion/types/create_span_event.py +++ b/langfuse/api/resources/ingestion/types/create_span_event.py @@ -13,15 +13,28 @@ class CreateSpanEvent(BaseEvent): body: CreateSpanBody def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/ingestion_error.py b/langfuse/api/resources/ingestion/types/ingestion_error.py index 14c661961..b9028ce1d 100644 --- a/langfuse/api/resources/ingestion/types/ingestion_error.py +++ b/langfuse/api/resources/ingestion/types/ingestion_error.py @@ -14,15 +14,28 @@ class IngestionError(pydantic_v1.BaseModel): error: typing.Optional[typing.Any] = None def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/ingestion_event.py b/langfuse/api/resources/ingestion/types/ingestion_event.py index b8093f89b..e083c9354 100644 --- a/langfuse/api/resources/ingestion/types/ingestion_event.py +++ b/langfuse/api/resources/ingestion/types/ingestion_event.py @@ -26,15 +26,28 @@ class IngestionEvent_TraceCreate(pydantic_v1.BaseModel): type: typing.Literal["trace-create"] = "trace-create" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: @@ -52,15 +65,28 @@ class IngestionEvent_ScoreCreate(pydantic_v1.BaseModel): type: typing.Literal["score-create"] = "score-create" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: @@ -78,15 +104,28 @@ class IngestionEvent_SpanCreate(pydantic_v1.BaseModel): type: typing.Literal["span-create"] = "span-create" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: @@ -104,15 +143,28 @@ class IngestionEvent_SpanUpdate(pydantic_v1.BaseModel): type: typing.Literal["span-update"] = "span-update" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: @@ -130,15 +182,28 @@ class IngestionEvent_GenerationCreate(pydantic_v1.BaseModel): type: typing.Literal["generation-create"] = "generation-create" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: @@ -156,15 +221,28 @@ class IngestionEvent_GenerationUpdate(pydantic_v1.BaseModel): type: typing.Literal["generation-update"] = "generation-update" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: @@ -182,15 +260,28 @@ class IngestionEvent_EventCreate(pydantic_v1.BaseModel): type: typing.Literal["event-create"] = "event-create" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: @@ -208,15 +299,28 @@ class IngestionEvent_SdkLog(pydantic_v1.BaseModel): type: typing.Literal["sdk-log"] = "sdk-log" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: @@ -234,15 +338,28 @@ class IngestionEvent_ObservationCreate(pydantic_v1.BaseModel): type: typing.Literal["observation-create"] = "observation-create" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: @@ -260,15 +377,28 @@ class IngestionEvent_ObservationUpdate(pydantic_v1.BaseModel): type: typing.Literal["observation-update"] = "observation-update" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/ingestion_response.py b/langfuse/api/resources/ingestion/types/ingestion_response.py index deadc1cb9..b4e66349c 100644 --- a/langfuse/api/resources/ingestion/types/ingestion_response.py +++ b/langfuse/api/resources/ingestion/types/ingestion_response.py @@ -14,15 +14,28 @@ class IngestionResponse(pydantic_v1.BaseModel): errors: typing.List[IngestionError] def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/ingestion_success.py b/langfuse/api/resources/ingestion/types/ingestion_success.py index 043533491..481e64752 100644 --- a/langfuse/api/resources/ingestion/types/ingestion_success.py +++ b/langfuse/api/resources/ingestion/types/ingestion_success.py @@ -12,15 +12,28 @@ class IngestionSuccess(pydantic_v1.BaseModel): status: int def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/observation_body.py b/langfuse/api/resources/ingestion/types/observation_body.py index de88ad69f..254c600da 100644 --- a/langfuse/api/resources/ingestion/types/observation_body.py +++ b/langfuse/api/resources/ingestion/types/observation_body.py @@ -16,9 +16,15 @@ class ObservationBody(pydantic_v1.BaseModel): trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) type: ObservationType name: typing.Optional[str] = None - start_time: typing.Optional[dt.datetime] = pydantic_v1.Field(alias="startTime", default=None) - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field(alias="endTime", default=None) - completion_start_time: typing.Optional[dt.datetime] = pydantic_v1.Field(alias="completionStartTime", default=None) + start_time: typing.Optional[dt.datetime] = pydantic_v1.Field( + alias="startTime", default=None + ) + end_time: typing.Optional[dt.datetime] = pydantic_v1.Field( + alias="endTime", default=None + ) + completion_start_time: typing.Optional[dt.datetime] = pydantic_v1.Field( + alias="completionStartTime", default=None + ) model: typing.Optional[str] = None model_parameters: typing.Optional[typing.Dict[str, MapValue]] = pydantic_v1.Field( alias="modelParameters", default=None @@ -29,19 +35,36 @@ class ObservationBody(pydantic_v1.BaseModel): output: typing.Optional[typing.Any] = None usage: typing.Optional[Usage] = None level: typing.Optional[ObservationLevel] = None - status_message: typing.Optional[str] = pydantic_v1.Field(alias="statusMessage", default=None) - parent_observation_id: typing.Optional[str] = pydantic_v1.Field(alias="parentObservationId", default=None) + status_message: typing.Optional[str] = pydantic_v1.Field( + alias="statusMessage", default=None + ) + parent_observation_id: typing.Optional[str] = pydantic_v1.Field( + alias="parentObservationId", default=None + ) def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/open_ai_usage.py b/langfuse/api/resources/ingestion/types/open_ai_usage.py index c5a8ade45..86e7ebd82 100644 --- a/langfuse/api/resources/ingestion/types/open_ai_usage.py +++ b/langfuse/api/resources/ingestion/types/open_ai_usage.py @@ -12,20 +12,39 @@ class OpenAiUsage(pydantic_v1.BaseModel): Usage interface of OpenAI for improved compatibility. """ - prompt_tokens: typing.Optional[int] = pydantic_v1.Field(alias="promptTokens", default=None) - completion_tokens: typing.Optional[int] = pydantic_v1.Field(alias="completionTokens", default=None) - total_tokens: typing.Optional[int] = pydantic_v1.Field(alias="totalTokens", default=None) + prompt_tokens: typing.Optional[int] = pydantic_v1.Field( + alias="promptTokens", default=None + ) + completion_tokens: typing.Optional[int] = pydantic_v1.Field( + alias="completionTokens", default=None + ) + total_tokens: typing.Optional[int] = pydantic_v1.Field( + alias="totalTokens", default=None + ) def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/optional_observation_body.py b/langfuse/api/resources/ingestion/types/optional_observation_body.py index d38e07da4..bd7cb5808 100644 --- a/langfuse/api/resources/ingestion/types/optional_observation_body.py +++ b/langfuse/api/resources/ingestion/types/optional_observation_body.py @@ -11,25 +11,44 @@ class OptionalObservationBody(pydantic_v1.BaseModel): trace_id: typing.Optional[str] = pydantic_v1.Field(alias="traceId", default=None) name: typing.Optional[str] = None - start_time: typing.Optional[dt.datetime] = pydantic_v1.Field(alias="startTime", default=None) + start_time: typing.Optional[dt.datetime] = pydantic_v1.Field( + alias="startTime", default=None + ) metadata: typing.Optional[typing.Any] = None input: typing.Optional[typing.Any] = None output: typing.Optional[typing.Any] = None level: typing.Optional[ObservationLevel] = None - status_message: typing.Optional[str] = pydantic_v1.Field(alias="statusMessage", default=None) - parent_observation_id: typing.Optional[str] = pydantic_v1.Field(alias="parentObservationId", default=None) + status_message: typing.Optional[str] = pydantic_v1.Field( + alias="statusMessage", default=None + ) + parent_observation_id: typing.Optional[str] = pydantic_v1.Field( + alias="parentObservationId", default=None + ) version: typing.Optional[str] = None def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/score_body.py b/langfuse/api/resources/ingestion/types/score_body.py index f7eda85ee..1bc7e581a 100644 --- a/langfuse/api/resources/ingestion/types/score_body.py +++ b/langfuse/api/resources/ingestion/types/score_body.py @@ -30,9 +30,13 @@ class ScoreBody(pydantic_v1.BaseModel): The value of the score. Must be passed as string for categorical scores, and numeric for boolean and numeric scores. Boolean score values must equal either 1 or 0 (true or false) """ - observation_id: typing.Optional[str] = pydantic_v1.Field(alias="observationId", default=None) + observation_id: typing.Optional[str] = pydantic_v1.Field( + alias="observationId", default=None + ) comment: typing.Optional[str] = None - data_type: typing.Optional[ScoreDataType] = pydantic_v1.Field(alias="dataType", default=None) + data_type: typing.Optional[ScoreDataType] = pydantic_v1.Field( + alias="dataType", default=None + ) """ When set, must match the score value's type. If not set, will be inferred from the score value or config """ @@ -43,15 +47,28 @@ class ScoreBody(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/score_event.py b/langfuse/api/resources/ingestion/types/score_event.py index 36e4e1424..ea05aedef 100644 --- a/langfuse/api/resources/ingestion/types/score_event.py +++ b/langfuse/api/resources/ingestion/types/score_event.py @@ -13,15 +13,28 @@ class ScoreEvent(BaseEvent): body: ScoreBody def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/sdk_log_body.py b/langfuse/api/resources/ingestion/types/sdk_log_body.py index 7bf69e4d4..df8972860 100644 --- a/langfuse/api/resources/ingestion/types/sdk_log_body.py +++ b/langfuse/api/resources/ingestion/types/sdk_log_body.py @@ -11,15 +11,28 @@ class SdkLogBody(pydantic_v1.BaseModel): log: typing.Any def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/sdk_log_event.py b/langfuse/api/resources/ingestion/types/sdk_log_event.py index e4fcc2672..d7ad87de8 100644 --- a/langfuse/api/resources/ingestion/types/sdk_log_event.py +++ b/langfuse/api/resources/ingestion/types/sdk_log_event.py @@ -13,15 +13,28 @@ class SdkLogEvent(BaseEvent): body: SdkLogBody def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/trace_body.py b/langfuse/api/resources/ingestion/types/trace_body.py index de01c2f02..42cb4f197 100644 --- a/langfuse/api/resources/ingestion/types/trace_body.py +++ b/langfuse/api/resources/ingestion/types/trace_body.py @@ -14,7 +14,9 @@ class TraceBody(pydantic_v1.BaseModel): user_id: typing.Optional[str] = pydantic_v1.Field(alias="userId", default=None) input: typing.Optional[typing.Any] = None output: typing.Optional[typing.Any] = None - session_id: typing.Optional[str] = pydantic_v1.Field(alias="sessionId", default=None) + session_id: typing.Optional[str] = pydantic_v1.Field( + alias="sessionId", default=None + ) release: typing.Optional[str] = None version: typing.Optional[str] = None metadata: typing.Optional[typing.Any] = None @@ -25,15 +27,28 @@ class TraceBody(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/trace_event.py b/langfuse/api/resources/ingestion/types/trace_event.py index c2e39defa..b84ddd615 100644 --- a/langfuse/api/resources/ingestion/types/trace_event.py +++ b/langfuse/api/resources/ingestion/types/trace_event.py @@ -13,15 +13,28 @@ class TraceEvent(BaseEvent): body: TraceBody def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/update_event_body.py b/langfuse/api/resources/ingestion/types/update_event_body.py index 12c0b0f45..35bbb359b 100644 --- a/langfuse/api/resources/ingestion/types/update_event_body.py +++ b/langfuse/api/resources/ingestion/types/update_event_body.py @@ -12,15 +12,28 @@ class UpdateEventBody(OptionalObservationBody): id: str def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/update_generation_body.py b/langfuse/api/resources/ingestion/types/update_generation_body.py index 783fc9d51..e216e4604 100644 --- a/langfuse/api/resources/ingestion/types/update_generation_body.py +++ b/langfuse/api/resources/ingestion/types/update_generation_body.py @@ -11,25 +11,44 @@ class UpdateGenerationBody(UpdateSpanBody): - completion_start_time: typing.Optional[dt.datetime] = pydantic_v1.Field(alias="completionStartTime", default=None) + completion_start_time: typing.Optional[dt.datetime] = pydantic_v1.Field( + alias="completionStartTime", default=None + ) model: typing.Optional[str] = None model_parameters: typing.Optional[typing.Dict[str, MapValue]] = pydantic_v1.Field( alias="modelParameters", default=None ) usage: typing.Optional[IngestionUsage] = None - prompt_name: typing.Optional[str] = pydantic_v1.Field(alias="promptName", default=None) - prompt_version: typing.Optional[int] = pydantic_v1.Field(alias="promptVersion", default=None) + prompt_name: typing.Optional[str] = pydantic_v1.Field( + alias="promptName", default=None + ) + prompt_version: typing.Optional[int] = pydantic_v1.Field( + alias="promptVersion", default=None + ) def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/update_generation_event.py b/langfuse/api/resources/ingestion/types/update_generation_event.py index 33c8b42f0..da8f6a9fa 100644 --- a/langfuse/api/resources/ingestion/types/update_generation_event.py +++ b/langfuse/api/resources/ingestion/types/update_generation_event.py @@ -13,15 +13,28 @@ class UpdateGenerationEvent(BaseEvent): body: UpdateGenerationBody def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/update_observation_event.py b/langfuse/api/resources/ingestion/types/update_observation_event.py index ebc1353cf..9d7af357f 100644 --- a/langfuse/api/resources/ingestion/types/update_observation_event.py +++ b/langfuse/api/resources/ingestion/types/update_observation_event.py @@ -13,15 +13,28 @@ class UpdateObservationEvent(BaseEvent): body: ObservationBody def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/update_span_body.py b/langfuse/api/resources/ingestion/types/update_span_body.py index 1575ba553..e3484879b 100644 --- a/langfuse/api/resources/ingestion/types/update_span_body.py +++ b/langfuse/api/resources/ingestion/types/update_span_body.py @@ -9,18 +9,33 @@ class UpdateSpanBody(UpdateEventBody): - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field(alias="endTime", default=None) + end_time: typing.Optional[dt.datetime] = pydantic_v1.Field( + alias="endTime", default=None + ) def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/ingestion/types/update_span_event.py b/langfuse/api/resources/ingestion/types/update_span_event.py index 3ad1b61b4..ec7d83b15 100644 --- a/langfuse/api/resources/ingestion/types/update_span_event.py +++ b/langfuse/api/resources/ingestion/types/update_span_event.py @@ -13,15 +13,28 @@ class UpdateSpanEvent(BaseEvent): body: UpdateSpanBody def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/media/client.py b/langfuse/api/resources/media/client.py index 4541966e9..3c2176573 100644 --- a/langfuse/api/resources/media/client.py +++ b/langfuse/api/resources/media/client.py @@ -46,7 +46,7 @@ def get( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -120,7 +120,7 @@ def patch( import datetime from finto import PatchMediaBody - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -198,7 +198,7 @@ def get_upload_url( Examples -------- from finto import GetMediaUploadUrlRequest - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -280,7 +280,7 @@ async def get( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -361,7 +361,7 @@ async def patch( import datetime from finto import PatchMediaBody - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -447,7 +447,7 @@ async def get_upload_url( import asyncio from finto import GetMediaUploadUrlRequest - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", diff --git a/langfuse/api/resources/media/types/media_content_type.py b/langfuse/api/resources/media/types/media_content_type.py index bf9368fb3..9b0cea41a 100644 --- a/langfuse/api/resources/media/types/media_content_type.py +++ b/langfuse/api/resources/media/types/media_content_type.py @@ -7,9 +7,29 @@ "image/jpeg", "image/jpg", "image/webp", + "image/gif", + "image/svg+xml", + "image/tiff", + "image/bmp", "audio/mpeg", "audio/mp3", "audio/wav", + "audio/ogg", + "audio/oga", + "audio/aac", + "audio/mp4", + "audio/flac", + "video/mp4", + "video/webm", "text/plain", + "text/html", + "text/css", + "text/csv", "application/pdf", + "application/msword", + "application/vnd.ms-excel", + "application/zip", + "application/json", + "application/xml", + "application/octet-stream", ] diff --git a/langfuse/api/resources/metrics/client.py b/langfuse/api/resources/metrics/client.py index ec371e314..0fe6a45ed 100644 --- a/langfuse/api/resources/metrics/client.py +++ b/langfuse/api/resources/metrics/client.py @@ -70,7 +70,7 @@ def daily( -------- import datetime - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -193,7 +193,7 @@ async def daily( import asyncio import datetime - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", diff --git a/langfuse/api/resources/metrics/types/daily_metrics.py b/langfuse/api/resources/metrics/types/daily_metrics.py index 52d5f151d..36a12a3d6 100644 --- a/langfuse/api/resources/metrics/types/daily_metrics.py +++ b/langfuse/api/resources/metrics/types/daily_metrics.py @@ -18,15 +18,28 @@ class DailyMetrics(pydantic_v1.BaseModel): meta: MetaResponse def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/metrics/types/daily_metrics_details.py b/langfuse/api/resources/metrics/types/daily_metrics_details.py index 3a208ab10..e97f65446 100644 --- a/langfuse/api/resources/metrics/types/daily_metrics_details.py +++ b/langfuse/api/resources/metrics/types/daily_metrics_details.py @@ -20,15 +20,28 @@ class DailyMetricsDetails(pydantic_v1.BaseModel): usage: typing.List[UsageByModel] def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/metrics/types/usage_by_model.py b/langfuse/api/resources/metrics/types/usage_by_model.py index b94cc02ac..3ff6de59c 100644 --- a/langfuse/api/resources/metrics/types/usage_by_model.py +++ b/langfuse/api/resources/metrics/types/usage_by_model.py @@ -36,15 +36,28 @@ class UsageByModel(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/models/client.py b/langfuse/api/resources/models/client.py index 6d784394e..799337360 100644 --- a/langfuse/api/resources/models/client.py +++ b/langfuse/api/resources/models/client.py @@ -50,7 +50,7 @@ def create( import datetime from finto import CreateModelRequest, ModelUsageUnit - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -136,7 +136,7 @@ def list( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -202,7 +202,7 @@ def get( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -266,7 +266,7 @@ def delete( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -342,7 +342,7 @@ async def create( import datetime from finto import CreateModelRequest, ModelUsageUnit - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -436,7 +436,7 @@ async def list( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -510,7 +510,7 @@ async def get( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -582,7 +582,7 @@ async def delete( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", diff --git a/langfuse/api/resources/models/types/paginated_models.py b/langfuse/api/resources/models/types/paginated_models.py index 308054c12..3469a1fe6 100644 --- a/langfuse/api/resources/models/types/paginated_models.py +++ b/langfuse/api/resources/models/types/paginated_models.py @@ -14,15 +14,28 @@ class PaginatedModels(pydantic_v1.BaseModel): meta: MetaResponse def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/observations/client.py b/langfuse/api/resources/observations/client.py index ed5039ce6..46190588b 100644 --- a/langfuse/api/resources/observations/client.py +++ b/langfuse/api/resources/observations/client.py @@ -23,7 +23,12 @@ class ObservationsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper - def get(self, observation_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> ObservationsView: + def get( + self, + observation_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> ObservationsView: """ Get a observation @@ -41,7 +46,7 @@ def get(self, observation_id: str, *, request_options: typing.Optional[RequestOp Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -56,7 +61,9 @@ def get(self, observation_id: str, *, request_options: typing.Optional[RequestOp ) """ _response = self._client_wrapper.httpx_client.request( - f"api/public/observations/{jsonable_encoder(observation_id)}", method="GET", request_options=request_options + f"api/public/observations/{jsonable_encoder(observation_id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -64,13 +71,21 @@ def get(self, observation_id: str, *, request_options: typing.Optional[RequestOp if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -132,7 +147,7 @@ def get_many( -------- import datetime - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -170,8 +185,12 @@ def get_many( "type": type, "traceId": trace_id, "parentObservationId": parent_observation_id, - "fromStartTime": serialize_datetime(from_start_time) if from_start_time is not None else None, - "toStartTime": serialize_datetime(to_start_time) if to_start_time is not None else None, + "fromStartTime": serialize_datetime(from_start_time) + if from_start_time is not None + else None, + "toStartTime": serialize_datetime(to_start_time) + if to_start_time is not None + else None, "version": version, }, request_options=request_options, @@ -182,13 +201,21 @@ def get_many( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -200,7 +227,10 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper async def get( - self, observation_id: str, *, request_options: typing.Optional[RequestOptions] = None + self, + observation_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> ObservationsView: """ Get a observation @@ -221,7 +251,7 @@ async def get( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -242,7 +272,9 @@ async def main() -> None: asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/public/observations/{jsonable_encoder(observation_id)}", method="GET", request_options=request_options + f"api/public/observations/{jsonable_encoder(observation_id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -250,13 +282,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -319,7 +359,7 @@ async def get_many( import asyncio import datetime - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -363,8 +403,12 @@ async def main() -> None: "type": type, "traceId": trace_id, "parentObservationId": parent_observation_id, - "fromStartTime": serialize_datetime(from_start_time) if from_start_time is not None else None, - "toStartTime": serialize_datetime(to_start_time) if to_start_time is not None else None, + "fromStartTime": serialize_datetime(from_start_time) + if from_start_time is not None + else None, + "toStartTime": serialize_datetime(to_start_time) + if to_start_time is not None + else None, "version": version, }, request_options=request_options, @@ -375,13 +419,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/langfuse/api/resources/observations/types/observations.py b/langfuse/api/resources/observations/types/observations.py index 4cf452cb2..1534dc87e 100644 --- a/langfuse/api/resources/observations/types/observations.py +++ b/langfuse/api/resources/observations/types/observations.py @@ -14,15 +14,28 @@ class Observations(pydantic_v1.BaseModel): meta: MetaResponse def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/observations/types/observations_views.py b/langfuse/api/resources/observations/types/observations_views.py index 5ff652f67..ed86b7d1e 100644 --- a/langfuse/api/resources/observations/types/observations_views.py +++ b/langfuse/api/resources/observations/types/observations_views.py @@ -14,15 +14,28 @@ class ObservationsViews(pydantic_v1.BaseModel): meta: MetaResponse def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/projects/client.py b/langfuse/api/resources/projects/client.py index 3b7fa4ccf..7f723e629 100644 --- a/langfuse/api/resources/projects/client.py +++ b/langfuse/api/resources/projects/client.py @@ -19,7 +19,9 @@ class ProjectsClient: def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper - def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> Projects: + def get( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> Projects: """ Get Project associated with API key @@ -34,7 +36,7 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> Pro Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -55,13 +57,21 @@ def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> Pro if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -72,7 +82,9 @@ class AsyncProjectsClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper - async def get(self, *, request_options: typing.Optional[RequestOptions] = None) -> Projects: + async def get( + self, *, request_options: typing.Optional[RequestOptions] = None + ) -> Projects: """ Get Project associated with API key @@ -89,7 +101,7 @@ async def get(self, *, request_options: typing.Optional[RequestOptions] = None) -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -116,13 +128,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/langfuse/api/resources/projects/types/project.py b/langfuse/api/resources/projects/types/project.py index 8c0f5fcbd..ad2b48cd2 100644 --- a/langfuse/api/resources/projects/types/project.py +++ b/langfuse/api/resources/projects/types/project.py @@ -12,15 +12,28 @@ class Project(pydantic_v1.BaseModel): name: str def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/projects/types/projects.py b/langfuse/api/resources/projects/types/projects.py index 9e0be782d..c5eaabfbd 100644 --- a/langfuse/api/resources/projects/types/projects.py +++ b/langfuse/api/resources/projects/types/projects.py @@ -12,15 +12,28 @@ class Projects(pydantic_v1.BaseModel): data: typing.List[Project] def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/prompts/client.py b/langfuse/api/resources/prompts/client.py index 5a4e4f3a4..547eb3f6c 100644 --- a/langfuse/api/resources/prompts/client.py +++ b/langfuse/api/resources/prompts/client.py @@ -58,7 +58,7 @@ def get( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -86,13 +86,21 @@ def get( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -144,7 +152,7 @@ def list( -------- import datetime - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -177,31 +185,48 @@ def list( "tag": tag, "page": page, "limit": limit, - "fromUpdatedAt": serialize_datetime(from_updated_at) if from_updated_at is not None else None, - "toUpdatedAt": serialize_datetime(to_updated_at) if to_updated_at is not None else None, + "fromUpdatedAt": serialize_datetime(from_updated_at) + if from_updated_at is not None + else None, + "toUpdatedAt": serialize_datetime(to_updated_at) + if to_updated_at is not None + else None, }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PromptMetaListResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as( + PromptMetaListResponse, _response.json() + ) # type: ignore if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) def create( - self, *, request: CreatePromptRequest, request_options: typing.Optional[RequestOptions] = None + self, + *, + request: CreatePromptRequest, + request_options: typing.Optional[RequestOptions] = None, ) -> Prompt: """ Create a new version for the prompt with the given `name` @@ -220,7 +245,7 @@ def create( Examples -------- from finto import ChatMessage, CreatePromptRequest_Chat - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -246,7 +271,11 @@ def create( ) """ _response = self._client_wrapper.httpx_client.request( - "api/public/v2/prompts", method="POST", json=request, request_options=request_options, omit=OMIT + "api/public/v2/prompts", + method="POST", + json=request, + request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: @@ -254,13 +283,21 @@ def create( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -304,7 +341,7 @@ async def get( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -338,13 +375,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -397,7 +442,7 @@ async def list( import asyncio import datetime - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -436,31 +481,48 @@ async def main() -> None: "tag": tag, "page": page, "limit": limit, - "fromUpdatedAt": serialize_datetime(from_updated_at) if from_updated_at is not None else None, - "toUpdatedAt": serialize_datetime(to_updated_at) if to_updated_at is not None else None, + "fromUpdatedAt": serialize_datetime(from_updated_at) + if from_updated_at is not None + else None, + "toUpdatedAt": serialize_datetime(to_updated_at) + if to_updated_at is not None + else None, }, request_options=request_options, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PromptMetaListResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as( + PromptMetaListResponse, _response.json() + ) # type: ignore if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) async def create( - self, *, request: CreatePromptRequest, request_options: typing.Optional[RequestOptions] = None + self, + *, + request: CreatePromptRequest, + request_options: typing.Optional[RequestOptions] = None, ) -> Prompt: """ Create a new version for the prompt with the given `name` @@ -481,7 +543,7 @@ async def create( import asyncio from finto import ChatMessage, CreatePromptRequest_Chat - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -513,7 +575,11 @@ async def main() -> None: asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/public/v2/prompts", method="POST", json=request, request_options=request_options, omit=OMIT + "api/public/v2/prompts", + method="POST", + json=request, + request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: @@ -521,13 +587,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/langfuse/api/resources/prompts/types/__init__.py b/langfuse/api/resources/prompts/types/__init__.py index 5acd561d1..cb5ba920c 100644 --- a/langfuse/api/resources/prompts/types/__init__.py +++ b/langfuse/api/resources/prompts/types/__init__.py @@ -4,7 +4,11 @@ from .chat_message import ChatMessage from .chat_prompt import ChatPrompt from .create_chat_prompt_request import CreateChatPromptRequest -from .create_prompt_request import CreatePromptRequest, CreatePromptRequest_Chat, CreatePromptRequest_Text +from .create_prompt_request import ( + CreatePromptRequest, + CreatePromptRequest_Chat, + CreatePromptRequest_Text, +) from .create_text_prompt_request import CreateTextPromptRequest from .prompt import Prompt, Prompt_Chat, Prompt_Text from .prompt_meta import PromptMeta diff --git a/langfuse/api/resources/prompts/types/base_prompt.py b/langfuse/api/resources/prompts/types/base_prompt.py index 2192f8b28..82934fbe6 100644 --- a/langfuse/api/resources/prompts/types/base_prompt.py +++ b/langfuse/api/resources/prompts/types/base_prompt.py @@ -22,15 +22,28 @@ class BasePrompt(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/prompts/types/chat_message.py b/langfuse/api/resources/prompts/types/chat_message.py index d97ea60a1..d009bc8cf 100644 --- a/langfuse/api/resources/prompts/types/chat_message.py +++ b/langfuse/api/resources/prompts/types/chat_message.py @@ -12,15 +12,28 @@ class ChatMessage(pydantic_v1.BaseModel): content: str def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/prompts/types/chat_prompt.py b/langfuse/api/resources/prompts/types/chat_prompt.py index 127b7ffa5..7699d288d 100644 --- a/langfuse/api/resources/prompts/types/chat_prompt.py +++ b/langfuse/api/resources/prompts/types/chat_prompt.py @@ -13,15 +13,28 @@ class ChatPrompt(BasePrompt): prompt: typing.List[ChatMessage] def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/prompts/types/create_chat_prompt_request.py b/langfuse/api/resources/prompts/types/create_chat_prompt_request.py index 955a7d45f..a7c9f113b 100644 --- a/langfuse/api/resources/prompts/types/create_chat_prompt_request.py +++ b/langfuse/api/resources/prompts/types/create_chat_prompt_request.py @@ -23,15 +23,28 @@ class CreateChatPromptRequest(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/prompts/types/create_prompt_request.py b/langfuse/api/resources/prompts/types/create_prompt_request.py index 98cf9583b..9b23bd12d 100644 --- a/langfuse/api/resources/prompts/types/create_prompt_request.py +++ b/langfuse/api/resources/prompts/types/create_prompt_request.py @@ -19,15 +19,28 @@ class CreatePromptRequest_Chat(pydantic_v1.BaseModel): type: typing.Literal["chat"] = "chat" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: @@ -46,15 +59,28 @@ class CreatePromptRequest_Text(pydantic_v1.BaseModel): type: typing.Literal["text"] = "text" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/prompts/types/create_text_prompt_request.py b/langfuse/api/resources/prompts/types/create_text_prompt_request.py index 751b4492c..31b1c0eee 100644 --- a/langfuse/api/resources/prompts/types/create_text_prompt_request.py +++ b/langfuse/api/resources/prompts/types/create_text_prompt_request.py @@ -22,15 +22,28 @@ class CreateTextPromptRequest(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/prompts/types/prompt.py b/langfuse/api/resources/prompts/types/prompt.py index 0f96fe7c4..90ba3447f 100644 --- a/langfuse/api/resources/prompts/types/prompt.py +++ b/langfuse/api/resources/prompts/types/prompt.py @@ -20,15 +20,28 @@ class Prompt_Chat(pydantic_v1.BaseModel): type: typing.Literal["chat"] = "chat" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: @@ -48,15 +61,28 @@ class Prompt_Text(pydantic_v1.BaseModel): type: typing.Literal["text"] = "text" def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/prompts/types/prompt_meta.py b/langfuse/api/resources/prompts/types/prompt_meta.py index f0621e105..bbb028fb2 100644 --- a/langfuse/api/resources/prompts/types/prompt_meta.py +++ b/langfuse/api/resources/prompts/types/prompt_meta.py @@ -19,15 +19,28 @@ class PromptMeta(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/prompts/types/prompt_meta_list_response.py b/langfuse/api/resources/prompts/types/prompt_meta_list_response.py index 6ebf13c4a..d3dccf650 100644 --- a/langfuse/api/resources/prompts/types/prompt_meta_list_response.py +++ b/langfuse/api/resources/prompts/types/prompt_meta_list_response.py @@ -14,15 +14,28 @@ class PromptMetaListResponse(pydantic_v1.BaseModel): meta: MetaResponse def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/prompts/types/text_prompt.py b/langfuse/api/resources/prompts/types/text_prompt.py index 6c3d1f3d2..e149ea322 100644 --- a/langfuse/api/resources/prompts/types/text_prompt.py +++ b/langfuse/api/resources/prompts/types/text_prompt.py @@ -12,15 +12,28 @@ class TextPrompt(BasePrompt): prompt: str def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/score/client.py b/langfuse/api/resources/score/client.py index 4408ae195..1d275f6b0 100644 --- a/langfuse/api/resources/score/client.py +++ b/langfuse/api/resources/score/client.py @@ -53,7 +53,7 @@ def create( Examples -------- from finto import CreateScoreRequest - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -184,7 +184,7 @@ def get( import datetime from finto import ScoreDataType, ScoreSource - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -286,7 +286,7 @@ def get_by_id( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -351,7 +351,7 @@ def delete( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -426,7 +426,7 @@ async def create( import asyncio from finto import CreateScoreRequest - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -564,7 +564,7 @@ async def get( import datetime from finto import ScoreDataType, ScoreSource - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -674,7 +674,7 @@ async def get_by_id( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -747,7 +747,7 @@ async def delete( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", diff --git a/langfuse/api/resources/score/types/create_score_request.py b/langfuse/api/resources/score/types/create_score_request.py index 6a769d1d2..8e256e300 100644 --- a/langfuse/api/resources/score/types/create_score_request.py +++ b/langfuse/api/resources/score/types/create_score_request.py @@ -30,9 +30,13 @@ class CreateScoreRequest(pydantic_v1.BaseModel): The value of the score. Must be passed as string for categorical scores, and numeric for boolean and numeric scores. Boolean score values must equal either 1 or 0 (true or false) """ - observation_id: typing.Optional[str] = pydantic_v1.Field(alias="observationId", default=None) + observation_id: typing.Optional[str] = pydantic_v1.Field( + alias="observationId", default=None + ) comment: typing.Optional[str] = None - data_type: typing.Optional[ScoreDataType] = pydantic_v1.Field(alias="dataType", default=None) + data_type: typing.Optional[ScoreDataType] = pydantic_v1.Field( + alias="dataType", default=None + ) """ The data type of the score. When passing a configId this field is inferred. Otherwise, this field must be passed or will default to numeric. """ @@ -43,15 +47,28 @@ class CreateScoreRequest(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/score/types/create_score_response.py b/langfuse/api/resources/score/types/create_score_response.py index db7fba163..a8c90fce2 100644 --- a/langfuse/api/resources/score/types/create_score_response.py +++ b/langfuse/api/resources/score/types/create_score_response.py @@ -14,15 +14,28 @@ class CreateScoreResponse(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/score_configs/client.py b/langfuse/api/resources/score_configs/client.py index a79f89768..4323097bf 100644 --- a/langfuse/api/resources/score_configs/client.py +++ b/langfuse/api/resources/score_configs/client.py @@ -26,7 +26,10 @@ def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper def create( - self, *, request: CreateScoreConfigRequest, request_options: typing.Optional[RequestOptions] = None + self, + *, + request: CreateScoreConfigRequest, + request_options: typing.Optional[RequestOptions] = None, ) -> ScoreConfig: """ Create a score configuration (config). Score configs are used to define the structure of scores @@ -45,7 +48,7 @@ def create( Examples -------- from finto import ConfigCategory, CreateScoreConfigRequest, ScoreDataType - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -72,7 +75,11 @@ def create( ) """ _response = self._client_wrapper.httpx_client.request( - "api/public/score-configs", method="POST", json=request, request_options=request_options, omit=OMIT + "api/public/score-configs", + method="POST", + json=request, + request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: @@ -80,13 +87,21 @@ def create( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -119,7 +134,7 @@ def get( Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -146,19 +161,29 @@ def get( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def get_by_id(self, config_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> ScoreConfig: + def get_by_id( + self, config_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> ScoreConfig: """ Get a score config @@ -176,7 +201,7 @@ def get_by_id(self, config_id: str, *, request_options: typing.Optional[RequestO Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -191,7 +216,9 @@ def get_by_id(self, config_id: str, *, request_options: typing.Optional[RequestO ) """ _response = self._client_wrapper.httpx_client.request( - f"api/public/score-configs/{jsonable_encoder(config_id)}", method="GET", request_options=request_options + f"api/public/score-configs/{jsonable_encoder(config_id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -199,13 +226,21 @@ def get_by_id(self, config_id: str, *, request_options: typing.Optional[RequestO if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -217,7 +252,10 @@ def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper async def create( - self, *, request: CreateScoreConfigRequest, request_options: typing.Optional[RequestOptions] = None + self, + *, + request: CreateScoreConfigRequest, + request_options: typing.Optional[RequestOptions] = None, ) -> ScoreConfig: """ Create a score configuration (config). Score configs are used to define the structure of scores @@ -238,7 +276,7 @@ async def create( import asyncio from finto import ConfigCategory, CreateScoreConfigRequest, ScoreDataType - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -271,7 +309,11 @@ async def main() -> None: asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "api/public/score-configs", method="POST", json=request, request_options=request_options, omit=OMIT + "api/public/score-configs", + method="POST", + json=request, + request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: @@ -279,13 +321,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -320,7 +370,7 @@ async def get( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -353,13 +403,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -387,7 +445,7 @@ async def get_by_id( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -408,7 +466,9 @@ async def main() -> None: asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/public/score-configs/{jsonable_encoder(config_id)}", method="GET", request_options=request_options + f"api/public/score-configs/{jsonable_encoder(config_id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -416,13 +476,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/langfuse/api/resources/score_configs/types/create_score_config_request.py b/langfuse/api/resources/score_configs/types/create_score_config_request.py index bec260d3b..e136af157 100644 --- a/langfuse/api/resources/score_configs/types/create_score_config_request.py +++ b/langfuse/api/resources/score_configs/types/create_score_config_request.py @@ -12,17 +12,23 @@ class CreateScoreConfigRequest(pydantic_v1.BaseModel): name: str data_type: ScoreDataType = pydantic_v1.Field(alias="dataType") - categories: typing.Optional[typing.List[ConfigCategory]] = pydantic_v1.Field(default=None) + categories: typing.Optional[typing.List[ConfigCategory]] = pydantic_v1.Field( + default=None + ) """ Configure custom categories for categorical scores. Pass a list of objects with `label` and `value` properties. Categories are autogenerated for boolean configs and cannot be passed """ - min_value: typing.Optional[float] = pydantic_v1.Field(alias="minValue", default=None) + min_value: typing.Optional[float] = pydantic_v1.Field( + alias="minValue", default=None + ) """ Configure a minimum value for numerical scores. If not set, the minimum value defaults to -∞ """ - max_value: typing.Optional[float] = pydantic_v1.Field(alias="maxValue", default=None) + max_value: typing.Optional[float] = pydantic_v1.Field( + alias="maxValue", default=None + ) """ Configure a maximum value for numerical scores. If not set, the maximum value defaults to +∞ """ @@ -33,15 +39,28 @@ class CreateScoreConfigRequest(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/score_configs/types/score_configs.py b/langfuse/api/resources/score_configs/types/score_configs.py index 6efb652c1..fc84e28a3 100644 --- a/langfuse/api/resources/score_configs/types/score_configs.py +++ b/langfuse/api/resources/score_configs/types/score_configs.py @@ -14,15 +14,28 @@ class ScoreConfigs(pydantic_v1.BaseModel): meta: MetaResponse def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/sessions/client.py b/langfuse/api/resources/sessions/client.py index be861f466..f3814a26d 100644 --- a/langfuse/api/resources/sessions/client.py +++ b/langfuse/api/resources/sessions/client.py @@ -60,7 +60,7 @@ def list( -------- import datetime - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -87,8 +87,12 @@ def list( params={ "page": page, "limit": limit, - "fromTimestamp": serialize_datetime(from_timestamp) if from_timestamp is not None else None, - "toTimestamp": serialize_datetime(to_timestamp) if to_timestamp is not None else None, + "fromTimestamp": serialize_datetime(from_timestamp) + if from_timestamp is not None + else None, + "toTimestamp": serialize_datetime(to_timestamp) + if to_timestamp is not None + else None, }, request_options=request_options, ) @@ -98,19 +102,32 @@ def list( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def get(self, session_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> SessionWithTraces: + def get( + self, + session_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> SessionWithTraces: """ Get a session. Please note that `traces` on this endpoint are not paginated, if you plan to fetch large sessions, consider `GET /api/public/traces?sessionId=` @@ -128,7 +145,7 @@ def get(self, session_id: str, *, request_options: typing.Optional[RequestOption Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -143,7 +160,9 @@ def get(self, session_id: str, *, request_options: typing.Optional[RequestOption ) """ _response = self._client_wrapper.httpx_client.request( - f"api/public/sessions/{jsonable_encoder(session_id)}", method="GET", request_options=request_options + f"api/public/sessions/{jsonable_encoder(session_id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -151,13 +170,21 @@ def get(self, session_id: str, *, request_options: typing.Optional[RequestOption if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -206,7 +233,7 @@ async def list( import asyncio import datetime - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -239,8 +266,12 @@ async def main() -> None: params={ "page": page, "limit": limit, - "fromTimestamp": serialize_datetime(from_timestamp) if from_timestamp is not None else None, - "toTimestamp": serialize_datetime(to_timestamp) if to_timestamp is not None else None, + "fromTimestamp": serialize_datetime(from_timestamp) + if from_timestamp is not None + else None, + "toTimestamp": serialize_datetime(to_timestamp) + if to_timestamp is not None + else None, }, request_options=request_options, ) @@ -250,20 +281,31 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) async def get( - self, session_id: str, *, request_options: typing.Optional[RequestOptions] = None + self, + session_id: str, + *, + request_options: typing.Optional[RequestOptions] = None, ) -> SessionWithTraces: """ Get a session. Please note that `traces` on this endpoint are not paginated, if you plan to fetch large sessions, consider `GET /api/public/traces?sessionId=` @@ -284,7 +326,7 @@ async def get( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -305,7 +347,9 @@ async def main() -> None: asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/public/sessions/{jsonable_encoder(session_id)}", method="GET", request_options=request_options + f"api/public/sessions/{jsonable_encoder(session_id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -313,13 +357,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/langfuse/api/resources/sessions/types/paginated_sessions.py b/langfuse/api/resources/sessions/types/paginated_sessions.py index c220490b9..5dd9fb497 100644 --- a/langfuse/api/resources/sessions/types/paginated_sessions.py +++ b/langfuse/api/resources/sessions/types/paginated_sessions.py @@ -14,15 +14,28 @@ class PaginatedSessions(pydantic_v1.BaseModel): meta: MetaResponse def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/trace/client.py b/langfuse/api/resources/trace/client.py index 82565fab7..12c984828 100644 --- a/langfuse/api/resources/trace/client.py +++ b/langfuse/api/resources/trace/client.py @@ -23,7 +23,9 @@ class TraceClient: def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper - def get(self, trace_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> TraceWithFullDetails: + def get( + self, trace_id: str, *, request_options: typing.Optional[RequestOptions] = None + ) -> TraceWithFullDetails: """ Get a specific trace @@ -41,7 +43,7 @@ def get(self, trace_id: str, *, request_options: typing.Optional[RequestOptions] Examples -------- - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -56,7 +58,9 @@ def get(self, trace_id: str, *, request_options: typing.Optional[RequestOptions] ) """ _response = self._client_wrapper.httpx_client.request( - f"api/public/traces/{jsonable_encoder(trace_id)}", method="GET", request_options=request_options + f"api/public/traces/{jsonable_encoder(trace_id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -64,13 +68,21 @@ def get(self, trace_id: str, *, request_options: typing.Optional[RequestOptions] if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -138,7 +150,7 @@ def list( -------- import datetime - from finto.client import FernLangfuse + from langfuse.api.client import FernLangfuse client = FernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -175,8 +187,12 @@ def list( "userId": user_id, "name": name, "sessionId": session_id, - "fromTimestamp": serialize_datetime(from_timestamp) if from_timestamp is not None else None, - "toTimestamp": serialize_datetime(to_timestamp) if to_timestamp is not None else None, + "fromTimestamp": serialize_datetime(from_timestamp) + if from_timestamp is not None + else None, + "toTimestamp": serialize_datetime(to_timestamp) + if to_timestamp is not None + else None, "orderBy": order_by, "tags": tags, "version": version, @@ -190,13 +206,21 @@ def list( if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -229,7 +253,7 @@ async def get( -------- import asyncio - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -250,7 +274,9 @@ async def main() -> None: asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"api/public/traces/{jsonable_encoder(trace_id)}", method="GET", request_options=request_options + f"api/public/traces/{jsonable_encoder(trace_id)}", + method="GET", + request_options=request_options, ) try: if 200 <= _response.status_code < 300: @@ -258,13 +284,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -333,7 +367,7 @@ async def list( import asyncio import datetime - from finto.client import AsyncFernLangfuse + from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( x_langfuse_sdk_name="YOUR_X_LANGFUSE_SDK_NAME", @@ -376,8 +410,12 @@ async def main() -> None: "userId": user_id, "name": name, "sessionId": session_id, - "fromTimestamp": serialize_datetime(from_timestamp) if from_timestamp is not None else None, - "toTimestamp": serialize_datetime(to_timestamp) if to_timestamp is not None else None, + "fromTimestamp": serialize_datetime(from_timestamp) + if from_timestamp is not None + else None, + "toTimestamp": serialize_datetime(to_timestamp) + if to_timestamp is not None + else None, "orderBy": order_by, "tags": tags, "version": version, @@ -391,13 +429,21 @@ async def main() -> None: if _response.status_code == 400: raise Error(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore if _response.status_code == 401: - raise UnauthorizedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise UnauthorizedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 403: - raise AccessDeniedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise AccessDeniedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 405: - raise MethodNotAllowedError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise MethodNotAllowedError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore if _response.status_code == 404: - raise NotFoundError(pydantic_v1.parse_obj_as(typing.Any, _response.json())) # type: ignore + raise NotFoundError( + pydantic_v1.parse_obj_as(typing.Any, _response.json()) + ) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/langfuse/api/resources/trace/types/sort.py b/langfuse/api/resources/trace/types/sort.py index b1e6bb79e..76a5045b6 100644 --- a/langfuse/api/resources/trace/types/sort.py +++ b/langfuse/api/resources/trace/types/sort.py @@ -11,15 +11,28 @@ class Sort(pydantic_v1.BaseModel): id: str def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/trace/types/traces.py b/langfuse/api/resources/trace/types/traces.py index e31dcce71..09f58978f 100644 --- a/langfuse/api/resources/trace/types/traces.py +++ b/langfuse/api/resources/trace/types/traces.py @@ -14,15 +14,28 @@ class Traces(pydantic_v1.BaseModel): meta: MetaResponse def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: diff --git a/langfuse/api/resources/utils/resources/pagination/types/meta_response.py b/langfuse/api/resources/utils/resources/pagination/types/meta_response.py index 8918812de..2d082c68f 100644 --- a/langfuse/api/resources/utils/resources/pagination/types/meta_response.py +++ b/langfuse/api/resources/utils/resources/pagination/types/meta_response.py @@ -29,15 +29,28 @@ class MetaResponse(pydantic_v1.BaseModel): """ def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } return super().json(**kwargs_with_defaults) def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), ) class Config: From d5a2b80517f395109e8fa5341d819b2f298f6ab4 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Tue, 26 Nov 2024 19:34:09 +0100 Subject: [PATCH 02/15] update ruff toml --- ci.ruff.toml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/ci.ruff.toml b/ci.ruff.toml index 184e12e6f..fe975a8f1 100644 --- a/ci.ruff.toml +++ b/ci.ruff.toml @@ -1,6 +1,4 @@ # This is the Ruff config used in CI. # In development, ruff.toml is used instead. -target-version = 'py38' -[lint] -exclude = ["langfuse/api/**/*.py"] \ No newline at end of file +target-version = 'py38' \ No newline at end of file From 8762399daa1407cf0d6eb6431f9ee6d0fb37cac6 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Wed, 27 Nov 2024 16:26:35 +0100 Subject: [PATCH 03/15] feat(media): anthropic and vertex format support --- langfuse/_task_manager/media_manager.py | 42 ++ poetry.lock | 542 ++++++++++++------------ pyproject.toml | 16 +- 3 files changed, 319 insertions(+), 281 deletions(-) diff --git a/langfuse/_task_manager/media_manager.py b/langfuse/_task_manager/media_manager.py index 498f82cd6..9400d2664 100644 --- a/langfuse/_task_manager/media_manager.py +++ b/langfuse/_task_manager/media_manager.py @@ -127,6 +127,48 @@ def _process_data_recursively(data: Any, level: int): return media + # Anthropic + if ( + isinstance(data, dict) + and "type" in data + and data["type"] == "base64" + and "media_type" in data + and "data" in data + ): + media = LangfuseMedia( + base64_data_uri=f"data:{data['media_type']};base64," + data["data"], + ) + + self._process_media( + media=media, + trace_id=trace_id, + observation_id=observation_id, + field=field, + ) + + return media + + # Vertex + if ( + isinstance(data, dict) + and "type" in data + and data["type"] == "media" + and "mime_type" in data + and "data" in data + ): + media = LangfuseMedia( + base64_data_uri=f"data:{data['mime_type']};base64," + data["data"], + ) + + self._process_media( + media=media, + trace_id=trace_id, + observation_id=observation_id, + field=field, + ) + + return media + if isinstance(data, list): return [_process_data_recursively(item, level + 1) for item in data] diff --git a/poetry.lock b/poetry.lock index 0460376a2..b3d262f5b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -148,28 +148,27 @@ files = [ {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} - [[package]] name = "anthropic" -version = "0.28.1" +version = "0.39.0" description = "The official Python library for the anthropic API" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "anthropic-0.28.1-py3-none-any.whl", hash = "sha256:c4773ae2b42951a6b747bed328b0d03fa412938c95c3a8b9dce70d69badb710b"}, - {file = "anthropic-0.28.1.tar.gz", hash = "sha256:e3a6d595bde241141bdc685edc393903ec95c7fa378013a71186cfb8f32b1793"}, + {file = "anthropic-0.39.0-py3-none-any.whl", hash = "sha256:ea17093ae0ce0e1768b0c46501d6086b5bcd74ff39d68cd2d6396374e9de7c09"}, + {file = "anthropic-0.39.0.tar.gz", hash = "sha256:94671cc80765f9ce693f76d63a97ee9bef4c2d6063c044e983d21a2e262f63ba"}, ] [package.dependencies] anyio = ">=3.5.0,<5" +boto3 = {version = ">=1.28.57", optional = true, markers = "extra == \"bedrock\""} +botocore = {version = ">=1.31.57", optional = true, markers = "extra == \"bedrock\""} distro = ">=1.7.0,<2" +google-auth = {version = ">=2,<3", optional = true, markers = "extra == \"vertex\""} httpx = ">=0.23.0,<1" jiter = ">=0.4.0,<1" pydantic = ">=1.9.0,<3" sniffio = "*" -tokenizers = ">=0.13.0" typing-extensions = ">=4.7,<5" [package.extras] @@ -215,21 +214,6 @@ typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] -[[package]] -name = "astunparse" -version = "1.6.3" -description = "An AST unparser for Python" -optional = false -python-versions = "*" -files = [ - {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, - {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, -] - -[package.dependencies] -six = ">=1.6.1,<2.0" -wheel = ">=0.23.0,<1.0" - [[package]] name = "async-timeout" version = "4.0.3" @@ -615,7 +599,6 @@ bcrypt = ">=4.0.1" build = ">=1.0.3" chroma-hnswlib = "0.7.6" fastapi = ">=0.95.2" -graphlib-backport = {version = ">=1.0.3", markers = "python_version < \"3.9\""} grpcio = ">=1.58.0" httpx = ">=0.27.0" importlib-resources = "*" @@ -831,6 +814,20 @@ files = [ {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, ] +[[package]] +name = "eval-type-backport" +version = "0.2.0" +description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." +optional = false +python-versions = ">=3.8" +files = [ + {file = "eval_type_backport-0.2.0-py3-none-any.whl", hash = "sha256:ac2f73d30d40c5a30a80b8739a789d6bb5e49fdffa66d7912667e2015d9c9933"}, + {file = "eval_type_backport-0.2.0.tar.gz", hash = "sha256:68796cfbc7371ebf923f03bdf7bef415f3ec098aeced24e054b253a0e78f7b37"}, +] + +[package.extras] +tests = ["pytest"] + [[package]] name = "exceptiongroup" version = "1.2.1" @@ -941,6 +938,17 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1 testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] +[[package]] +name = "filetype" +version = "1.2.0" +description = "Infer file type and MIME type of any file/buffer. No external dependencies." +optional = false +python-versions = "*" +files = [ + {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, + {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, +] + [[package]] name = "flatbuffers" version = "24.3.25" @@ -1244,13 +1252,13 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 [[package]] name = "google-cloud-storage" -version = "2.16.0" +version = "2.18.1" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f"}, - {file = "google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852"}, + {file = "google_cloud_storage-2.18.1-py2.py3-none-any.whl", hash = "sha256:9d8db6bde3a979cca7150511cd0e4cb363e5f69d31259d890ba1124fa109418c"}, + {file = "google_cloud_storage-2.18.1.tar.gz", hash = "sha256:6707a6f30a05aee36faca81296419ca2907ac750af1c0457f278bc9a6fb219ad"}, ] [package.dependencies] @@ -1262,7 +1270,8 @@ google-resumable-media = ">=2.6.0" requests = ">=2.18.0,<3.0.0dev" [package.extras] -protobuf = ["protobuf (<5.0.0dev)"] +protobuf = ["protobuf (<6.0.0dev)"] +tracing = ["opentelemetry-api (>=1.1.0)"] [[package]] name = "google-crc32c" @@ -1393,17 +1402,6 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] -[[package]] -name = "graphlib-backport" -version = "1.1.0" -description = "Backport of the Python 3.9 graphlib module for Python 3.6+" -optional = false -python-versions = ">=3.6,<4.0" -files = [ - {file = "graphlib_backport-1.1.0-py3-none-any.whl", hash = "sha256:eccacf9f2126cdf89ce32a6018c88e1ecd3e4898a07568add6e1907a439055ba"}, - {file = "graphlib_backport-1.1.0.tar.gz", hash = "sha256:00a7888b21e5393064a133209cb5d3b3ef0a2096cf023914c9d778dff5644125"}, -] - [[package]] name = "greenlet" version = "3.0.3" @@ -1982,215 +1980,225 @@ adal = ["adal (>=1.0.2)"] [[package]] name = "langchain" -version = "0.2.16" +version = "0.3.8" description = "Building applications with LLMs through composability" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "langchain-0.2.16-py3-none-any.whl", hash = "sha256:8f59ee8b45f268df4b924ea3b9c63e49286efa756d16b3f6a9de5c6e502c36e1"}, - {file = "langchain-0.2.16.tar.gz", hash = "sha256:ffb426a76a703b73ac69abad77cd16eaf03dda76b42cff55572f592d74944166"}, + {file = "langchain-0.3.8-py3-none-any.whl", hash = "sha256:5cae404da30bf6730639a9ad85d3bf4fbb350c0038e5a0b81890e5883b4cff5c"}, + {file = "langchain-0.3.8.tar.gz", hash = "sha256:1cbbf7379b5b2f11b751fc527016f29ee5fe8a2697d166b52b7b5c63fc9702f9"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} -langchain-core = ">=0.2.38,<0.3.0" -langchain-text-splitters = ">=0.2.0,<0.3.0" +langchain-core = ">=0.3.21,<0.4.0" +langchain-text-splitters = ">=0.3.0,<0.4.0" langsmith = ">=0.1.17,<0.2.0" numpy = [ - {version = ">=1,<2", markers = "python_version < \"3.12\""}, - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, + {version = ">=1.22.4,<2", markers = "python_version < \"3.12\""}, + {version = ">=1.26.2,<2", markers = "python_version >= \"3.12\""}, ] -pydantic = ">=1,<3" +pydantic = ">=2.7.4,<3.0.0" PyYAML = ">=5.3" requests = ">=2,<3" SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" +tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10" [[package]] name = "langchain-anthropic" -version = "0.1.13" +version = "0.3.0" description = "An integration package connecting AnthropicMessages and LangChain" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_anthropic-0.1.13-py3-none-any.whl", hash = "sha256:121f6f480da7685c239573d98322adb94fe486d40651ac341637f65da36881de"}, - {file = "langchain_anthropic-0.1.13.tar.gz", hash = "sha256:32e7ac51e1874c47e1a20493e75f5bfc88b0ffeaf5f1aed6091547e1ae44bb85"}, + {file = "langchain_anthropic-0.3.0-py3-none-any.whl", hash = "sha256:96b74a9adfcc092cc2ae137d4189ca50e8f5ad9635618024f7c98d8f9fc1076a"}, + {file = "langchain_anthropic-0.3.0.tar.gz", hash = "sha256:f9b5cbdbf2d5b3432f78f056e474efb10a2c1e37f9a471d3aceb50a0d9f945df"}, ] [package.dependencies] -anthropic = ">=0.26.0,<1" +anthropic = ">=0.39.0,<1" defusedxml = ">=0.7.1,<0.8.0" -langchain-core = ">=0.1.43,<0.3" +langchain-core = ">=0.3.17,<0.4.0" +pydantic = ">=2.7.4,<3.0.0" [[package]] name = "langchain-aws" -version = "0.1.17" +version = "0.2.7" description = "An integration package connecting AWS and LangChain" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_aws-0.1.17-py3-none-any.whl", hash = "sha256:192a94eb3fa510ed7850cb7ab599fef47776ca872545dd37a0d7dc6a2887ae46"}, - {file = "langchain_aws-0.1.17.tar.gz", hash = "sha256:829cbcd8b3ffde6c2d7afd423da51c4bd03423db29b25ae7bc224979876d853e"}, + {file = "langchain_aws-0.2.7-py3-none-any.whl", hash = "sha256:f60f5e76ce9b3c175d569d40a7a8e113d3aa9feb8e88d013e6054da36501afef"}, + {file = "langchain_aws-0.2.7.tar.gz", hash = "sha256:5abf12d7cad5164363008612f906bf27cb85b8f82befe71dbdf27afeda4548eb"}, ] [package.dependencies] -boto3 = ">=1.34.131,<1.35.0" -langchain-core = ">=0.2.33,<0.3" +boto3 = ">=1.34.131" +langchain-core = ">=0.3.15,<0.4" numpy = [ {version = ">=1,<2", markers = "python_version < \"3.12\""}, {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, ] +pydantic = ">=2,<3" [[package]] name = "langchain-community" -version = "0.2.16" +version = "0.3.8" description = "Community contributed LangChain integrations." optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_community-0.2.16-py3-none-any.whl", hash = "sha256:115e1419c176091d4e00240cb5a38612a249e70f213516b6cacae61a8794a868"}, - {file = "langchain_community-0.2.16.tar.gz", hash = "sha256:ab416b793a7aed1fa46ebaffd29993296b02286a99819eabc43be2ea8e41ae78"}, + {file = "langchain_community-0.3.8-py3-none-any.whl", hash = "sha256:191b3fcdf6b2e92934f4daeba5f5d0ac684b03772b15ef9d3c3fbcd86bd6cd64"}, + {file = "langchain_community-0.3.8.tar.gz", hash = "sha256:f7575a717d95208d0e969c090104622783c6a38a5527657aa5aa38776fadc835"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" dataclasses-json = ">=0.5.7,<0.7" -langchain = ">=0.2.16,<0.3.0" -langchain-core = ">=0.2.38,<0.3.0" -langsmith = ">=0.1.0,<0.2.0" +httpx-sse = ">=0.4.0,<0.5.0" +langchain = ">=0.3.8,<0.4.0" +langchain-core = ">=0.3.21,<0.4.0" +langsmith = ">=0.1.125,<0.2.0" numpy = [ - {version = ">=1,<2", markers = "python_version < \"3.12\""}, - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, + {version = ">=1.22.4,<2", markers = "python_version < \"3.12\""}, + {version = ">=1.26.2,<2", markers = "python_version >= \"3.12\""}, ] +pydantic-settings = ">=2.4.0,<3.0.0" PyYAML = ">=5.3" requests = ">=2,<3" -SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" +SQLAlchemy = ">=1.4,<2.0.36" +tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10" [[package]] name = "langchain-core" -version = "0.2.38" +version = "0.3.21" description = "Building applications with LLMs through composability" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_core-0.2.38-py3-none-any.whl", hash = "sha256:8a5729bc7e68b4af089af20eff44fe4e7ca21d0e0c87ec21cef7621981fd1a4a"}, - {file = "langchain_core-0.2.38.tar.gz", hash = "sha256:eb69dbedd344f2ee1f15bcea6c71a05884b867588fadc42d04632e727c1238f3"}, + {file = "langchain_core-0.3.21-py3-none-any.whl", hash = "sha256:7e723dff80946a1198976c6876fea8326dc82566ef9bcb5f8d9188f738733665"}, + {file = "langchain_core-0.3.21.tar.gz", hash = "sha256:561b52b258ffa50a9fb11d7a1940ebfd915654d1ec95b35e81dfd5ee84143411"}, ] [package.dependencies] jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.1.75,<0.2.0" +langsmith = ">=0.1.125,<0.2.0" packaging = ">=23.2,<25" pydantic = [ - {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, + {version = ">=2.5.2,<3.0.0", markers = "python_full_version < \"3.12.4\""}, {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, ] PyYAML = ">=5.3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" +tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10.0.0" typing-extensions = ">=4.7" [[package]] name = "langchain-google-vertexai" -version = "1.0.4" +version = "2.0.3" description = "An integration package connecting Google VertexAI and LangChain" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_google_vertexai-1.0.4-py3-none-any.whl", hash = "sha256:f9d217df2d5cfafb2e551ddd5f1c43611222f542ee0df0cc3b5faed82e657ee3"}, - {file = "langchain_google_vertexai-1.0.4.tar.gz", hash = "sha256:bb2d2e93cc2896b9bdc96789c2df247f6392184dffc0c3dddc06889f2b530465"}, + {file = "langchain_google_vertexai-2.0.3-py3-none-any.whl", hash = "sha256:43835bed9f03f6969b3f8b73356c44d7898d209c69bd5124b0a80c35d8cebdd0"}, + {file = "langchain_google_vertexai-2.0.3.tar.gz", hash = "sha256:6f71061b578c0cd44fd5a147b61f66a1486bfc8b1dc69b4ac31e0f3c470d90d8"}, ] [package.dependencies] -google-cloud-aiplatform = ">=1.47.0,<2.0.0" -google-cloud-storage = ">=2.14.0,<3.0.0" -langchain-core = ">=0.1.42,<0.3" +google-cloud-aiplatform = ">=1.56.0,<2.0.0" +google-cloud-storage = ">=2.17.0,<3.0.0" +httpx = ">=0.27.0,<0.28.0" +httpx-sse = ">=0.4.0,<0.5.0" +langchain-core = ">=0.3.0,<0.4" +pydantic = ">=2,<3" [package.extras] -anthropic = ["anthropic[vertexai] (>=0.23.0,<1)"] +anthropic = ["anthropic[vertexai] (>=0.30.0,<1)"] +mistral = ["langchain-mistralai (>=0.2.0,<1)"] [[package]] name = "langchain-groq" -version = "0.1.5" +version = "0.2.1" description = "An integration package connecting Groq and LangChain" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_groq-0.1.5-py3-none-any.whl", hash = "sha256:f13fbec6143047a352ff2bbd2241e4b4b9559c6f799a26e6da5f2b0d6e02bff5"}, - {file = "langchain_groq-0.1.5.tar.gz", hash = "sha256:af166fd30c60006dba4345bc9a59edfa2745edcb5b3e0bd957abd3d09416bbc5"}, + {file = "langchain_groq-0.2.1-py3-none-any.whl", hash = "sha256:98d282fd9d7d99b0f55de0a1daea2d5d350ef697e3cb5e97de06aeba4eca8679"}, + {file = "langchain_groq-0.2.1.tar.gz", hash = "sha256:a59c81d1a15dc97abf4fdb4c2589f98109313eda147e6b378829222d4d929792"}, ] [package.dependencies] groq = ">=0.4.1,<1" -langchain-core = ">=0.1.45,<0.3" +langchain-core = ">=0.3.15,<0.4.0" [[package]] name = "langchain-mistralai" -version = "0.1.7" +version = "0.2.2" description = "An integration package connecting Mistral and LangChain" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_mistralai-0.1.7-py3-none-any.whl", hash = "sha256:4ab08ebafc5398767dbc4d6d371f4f2bc0974b01b02cb0ee71d351871a370479"}, - {file = "langchain_mistralai-0.1.7.tar.gz", hash = "sha256:44d3fb15ab10b5a04a2cc544d1292af3f884288a59de08a8d7bdd74ce50ddf75"}, + {file = "langchain_mistralai-0.2.2-py3-none-any.whl", hash = "sha256:2245b3590ba2f8e2f24108d6753f85238c154ca86851499c8431af75b9f7e07d"}, + {file = "langchain_mistralai-0.2.2.tar.gz", hash = "sha256:5ff8d318f7c811a49feba7d15e4dad003957e33b12cdcc733839eae1f04bab9d"}, ] [package.dependencies] httpx = ">=0.25.2,<1" httpx-sse = ">=0.3.1,<1" -langchain-core = ">=0.1.46,<0.3" +langchain-core = ">=0.3.15,<0.4.0" +pydantic = ">=2,<3" tokenizers = ">=0.15.1,<1" [[package]] name = "langchain-openai" -version = "0.1.7" +version = "0.2.10" description = "An integration package connecting OpenAI and LangChain" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_openai-0.1.7-py3-none-any.whl", hash = "sha256:39c3cb22bb739900ae8294d4d9939a6138c0ca7ad11198e57038eb14c08d04ec"}, - {file = "langchain_openai-0.1.7.tar.gz", hash = "sha256:fd7e1c33ba8e2cab4b2154f3a2fd4a0d9cc6518b41cf49bb87255f9f732a4896"}, + {file = "langchain_openai-0.2.10-py3-none-any.whl", hash = "sha256:b06a14d99ab81343f23ced83de21fc1cfcd79c9fb96fdbd9070ad018038c5602"}, + {file = "langchain_openai-0.2.10.tar.gz", hash = "sha256:878200a84d80353fc47720631bf591157e56b6a3923e5f7b13c7f61c82999b50"}, ] [package.dependencies] -langchain-core = ">=0.1.46,<0.3" -openai = ">=1.24.0,<2.0.0" +langchain-core = ">=0.3.21,<0.4.0" +openai = ">=1.54.0,<2.0.0" tiktoken = ">=0.7,<1" [[package]] name = "langchain-text-splitters" -version = "0.2.2" +version = "0.3.2" description = "LangChain text splitting utilities" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_text_splitters-0.2.2-py3-none-any.whl", hash = "sha256:1c80d4b11b55e2995f02d2a326c0323ee1eeff24507329bb22924e420c782dff"}, - {file = "langchain_text_splitters-0.2.2.tar.gz", hash = "sha256:a1e45de10919fa6fb080ef0525deab56557e9552083600455cb9fa4238076140"}, + {file = "langchain_text_splitters-0.3.2-py3-none-any.whl", hash = "sha256:0db28c53f41d1bc024cdb3b1646741f6d46d5371e90f31e7e7c9fbe75d01c726"}, + {file = "langchain_text_splitters-0.3.2.tar.gz", hash = "sha256:81e6515d9901d6dd8e35fb31ccd4f30f76d44b771890c789dc835ef9f16204df"}, ] [package.dependencies] -langchain-core = ">=0.2.10,<0.3.0" +langchain-core = ">=0.3.15,<0.4.0" [[package]] name = "langsmith" -version = "0.1.106" +version = "0.1.146" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.106-py3-none-any.whl", hash = "sha256:a418161c98de72ee2c6eea6667c6217814b67db4b9a3a024788013384216ff35"}, - {file = "langsmith-0.1.106.tar.gz", hash = "sha256:64a890a05640d64692f5515ebb444b0457332a9cf9e7605c4651de6737a7d3a0"}, + {file = "langsmith-0.1.146-py3-none-any.whl", hash = "sha256:9d062222f1a32c9b047dab0149b24958f988989cd8d4a5f9139ff959a51e59d8"}, + {file = "langsmith-0.1.146.tar.gz", hash = "sha256:ead8b0b9d5b6cd3ac42937ec48bdf09d4afe7ca1bba22dc05eb65591a18106f8"}, ] [package.dependencies] httpx = ">=0.23.0,<1" -orjson = ">=3.9.14,<4.0.0" +orjson = {version = ">=3.9.14,<4.0.0", markers = "platform_python_implementation != \"PyPy\""} pydantic = [ {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, ] requests = ">=2,<3" +requests-toolbelt = ">=1.0.0,<2.0.0" [[package]] name = "lark" @@ -2211,13 +2219,13 @@ regex = ["regex"] [[package]] name = "llama-cloud" -version = "0.0.17" +version = "0.1.5" description = "" optional = true python-versions = "<4,>=3.8" files = [ - {file = "llama_cloud-0.0.17-py3-none-any.whl", hash = "sha256:da898dcc98de84f29886f979b1ccae1e96d9f73d1b0e07146a51d315b161e45c"}, - {file = "llama_cloud-0.0.17.tar.gz", hash = "sha256:7fd6857bbbb91937535572ccb48daa38189f55cdd7411185d8083dab29ba1299"}, + {file = "llama_cloud-0.1.5-py3-none-any.whl", hash = "sha256:15605022520d04bd6ef6a46c0cbde833f301d652286d34fca02b4c44e2a7a2aa"}, + {file = "llama_cloud-0.1.5.tar.gz", hash = "sha256:8ce1db36754a6a46c8511561dbc040a2e89ba4ca1cf4edfb6ce382a5240f6cb6"}, ] [package.dependencies] @@ -2226,70 +2234,71 @@ pydantic = ">=1.10" [[package]] name = "llama-index" -version = "0.10.68" +version = "0.12.2" description = "Interface between LLMs and your data" optional = true -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index-0.10.68-py3-none-any.whl", hash = "sha256:e2a1919707260c07f9c10a239a576a399e8c50b2ddcd8d0ad8b4ffb4ad5c5c60"}, - {file = "llama_index-0.10.68.tar.gz", hash = "sha256:89f79e7ece951f40d753ee6e5a2273ca3728d800cbb2213f65b7e1d58abff0e5"}, + {file = "llama_index-0.12.2-py3-none-any.whl", hash = "sha256:971528db7889f5a0d15fd9039a403bc6f92bfafc2d4e1bab2d166657728ae94c"}, + {file = "llama_index-0.12.2.tar.gz", hash = "sha256:da9738dd666e219689839c7451c9df8bed72e6510a6f7d6f7d9907bfdd4588eb"}, ] [package.dependencies] -llama-index-agent-openai = ">=0.1.4,<0.3.0" -llama-index-cli = ">=0.1.2,<0.2.0" -llama-index-core = ">=0.10.68,<0.11.0" -llama-index-embeddings-openai = ">=0.1.5,<0.2.0" -llama-index-indices-managed-llama-cloud = ">=0.2.0" +llama-index-agent-openai = ">=0.4.0,<0.5.0" +llama-index-cli = ">=0.4.0,<0.5.0" +llama-index-core = ">=0.12.2,<0.13.0" +llama-index-embeddings-openai = ">=0.3.0,<0.4.0" +llama-index-indices-managed-llama-cloud = ">=0.4.0" llama-index-legacy = ">=0.9.48,<0.10.0" -llama-index-llms-openai = ">=0.1.27,<0.2.0" -llama-index-multi-modal-llms-openai = ">=0.1.3,<0.2.0" -llama-index-program-openai = ">=0.1.3,<0.2.0" -llama-index-question-gen-openai = ">=0.1.2,<0.2.0" -llama-index-readers-file = ">=0.1.4,<0.2.0" -llama-index-readers-llama-parse = ">=0.1.2" +llama-index-llms-openai = ">=0.3.0,<0.4.0" +llama-index-multi-modal-llms-openai = ">=0.3.0,<0.4.0" +llama-index-program-openai = ">=0.3.0,<0.4.0" +llama-index-question-gen-openai = ">=0.3.0,<0.4.0" +llama-index-readers-file = ">=0.4.0,<0.5.0" +llama-index-readers-llama-parse = ">=0.4.0" +nltk = ">3.8.1" [[package]] name = "llama-index-agent-openai" -version = "0.2.9" +version = "0.4.0" description = "llama-index agent openai integration" optional = true -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index_agent_openai-0.2.9-py3-none-any.whl", hash = "sha256:d7f0fd4c87124781acd783be603871f8808b1a3969e876a9c96e2ed0844d46ac"}, - {file = "llama_index_agent_openai-0.2.9.tar.gz", hash = "sha256:debe86da6d9d983db32b445ddca7c798ac140fe59573bafded73595b3995f3d5"}, + {file = "llama_index_agent_openai-0.4.0-py3-none-any.whl", hash = "sha256:71b2f46bb24813129ab6bc2d5bcebb9aebf323403ebf1e6cc9840687a34a6169"}, + {file = "llama_index_agent_openai-0.4.0.tar.gz", hash = "sha256:31d2675dbd84489756dd062a7ffed330b2abdca3b7715d511674f5b5075e4dd6"}, ] [package.dependencies] -llama-index-core = ">=0.10.41,<0.11.0" -llama-index-llms-openai = ">=0.1.5,<0.2.0" +llama-index-core = ">=0.12.0,<0.13.0" +llama-index-llms-openai = ">=0.3.0,<0.4.0" openai = ">=1.14.0" [[package]] name = "llama-index-cli" -version = "0.1.13" +version = "0.4.0" description = "llama-index cli" optional = true -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index_cli-0.1.13-py3-none-any.whl", hash = "sha256:5e05bc3ce55ee1bf6e5af7e87631a71d6b6cf8fc2af10cd3947b09b1bac6788d"}, - {file = "llama_index_cli-0.1.13.tar.gz", hash = "sha256:86147ded4439fbab1d6c7c0d72e8f231d2935da9fdf5c9d3f0dde4f35d44aa59"}, + {file = "llama_index_cli-0.4.0-py3-none-any.whl", hash = "sha256:60d12f89e6b85e80a0cc3a8b531f05a911b5eebaebc37314411476d1ba685904"}, + {file = "llama_index_cli-0.4.0.tar.gz", hash = "sha256:d6ab201359962a8a34368aeda3a49bbbe67e9e009c59bd925c4fb2be4ace3906"}, ] [package.dependencies] -llama-index-core = ">=0.10.11.post1,<0.11.0" -llama-index-embeddings-openai = ">=0.1.1,<0.2.0" -llama-index-llms-openai = ">=0.1.1,<0.2.0" +llama-index-core = ">=0.12.0,<0.13.0" +llama-index-embeddings-openai = ">=0.3.0,<0.4.0" +llama-index-llms-openai = ">=0.3.0,<0.4.0" [[package]] name = "llama-index-core" -version = "0.10.68.post1" +version = "0.12.2" description = "Interface between LLMs and your data" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index_core-0.10.68.post1-py3-none-any.whl", hash = "sha256:1befe1324f0fa1c3a2cfc1e4d38adb0cd0c3b2948badfb2be826da048a3bdbaf"}, - {file = "llama_index_core-0.10.68.post1.tar.gz", hash = "sha256:1215106973f2fb7651c10827c27ca3f47c03ccfae3b8653c5476d454d5ba8cd0"}, + {file = "llama_index_core-0.12.2-py3-none-any.whl", hash = "sha256:27a5548523435a5c2b84f75c15894a44522b7f968e9f29a03f9a301ca09fb7fa"}, + {file = "llama_index_core-0.12.2.tar.gz", hash = "sha256:a48b2de9c3a09608ab5c03c5a313428f119c86946acdefde555992b7c0b8a38e"}, ] [package.dependencies] @@ -2297,15 +2306,16 @@ aiohttp = ">=3.8.6,<4.0.0" dataclasses-json = "*" deprecated = ">=1.2.9.3" dirtyjson = ">=1.0.8,<2.0.0" +eval-type-backport = {version = ">=0.2.0,<0.3.0", markers = "python_version < \"3.10\""} +filetype = ">=1.2.0,<2.0.0" fsspec = ">=2023.5.0" httpx = "*" nest-asyncio = ">=1.5.8,<2.0.0" networkx = ">=3.0" -nltk = ">=3.8.1,<3.9 || >3.9" -numpy = "<2.0.0" -pandas = "*" +nltk = ">3.8.1" +numpy = "*" pillow = ">=9.0.0" -pydantic = "<3.0" +pydantic = ">=2.7.0,<2.10.0" PyYAML = ">=6.0.1" requests = ">=2.31.0" SQLAlchemy = {version = ">=1.4.49", extras = ["asyncio"]} @@ -2318,32 +2328,33 @@ wrapt = "*" [[package]] name = "llama-index-embeddings-openai" -version = "0.1.11" +version = "0.3.0" description = "llama-index embeddings openai integration" optional = true -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index_embeddings_openai-0.1.11-py3-none-any.whl", hash = "sha256:e20806fc4baff6b8f5274decf2c1ca7c5c737648e01865475ffada164e32e173"}, - {file = "llama_index_embeddings_openai-0.1.11.tar.gz", hash = "sha256:6025e229e375201788a9b14d6ebe470329907576cba5f6b7b832c3d68f39db30"}, + {file = "llama_index_embeddings_openai-0.3.0-py3-none-any.whl", hash = "sha256:f6817b856ed3b1afc0d0e1974ef3590f23f3bd9601737a50ccf485485d048e2d"}, + {file = "llama_index_embeddings_openai-0.3.0.tar.gz", hash = "sha256:a37d5ba5cc947a36a3ceaa41dfc65d726a873ffb3a27b7b4959284f5b944f617"}, ] [package.dependencies] -llama-index-core = ">=0.10.1,<0.11.0" +llama-index-core = ">=0.12.0,<0.13.0" +openai = ">=1.1.0" [[package]] name = "llama-index-indices-managed-llama-cloud" -version = "0.2.7" +version = "0.6.2" description = "llama-index indices llama-cloud integration" optional = true -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index_indices_managed_llama_cloud-0.2.7-py3-none-any.whl", hash = "sha256:94335504eab2a6baf7361bbd8bda3ae20a68c7d0111587c9a0793440e9edff21"}, - {file = "llama_index_indices_managed_llama_cloud-0.2.7.tar.gz", hash = "sha256:d7e9b4cc50214b3cfcd75ea63cacce4ee36092cb672c003f15fd23ba31c49ec0"}, + {file = "llama_index_indices_managed_llama_cloud-0.6.2-py3-none-any.whl", hash = "sha256:ef292cb0e8bd25f382a8fdf01fae01aca2d48b25816a786215082ab231bd467c"}, + {file = "llama_index_indices_managed_llama_cloud-0.6.2.tar.gz", hash = "sha256:498481c6a98afce5e816bc7b7f5249fe97c1555e997e23e057dd175a543b651d"}, ] [package.dependencies] -llama-cloud = ">=0.0.11" -llama-index-core = ">=0.10.48.post1,<0.11.0" +llama-cloud = ">=0.1.5" +llama-index-core = ">=0.12.0,<0.13.0" [[package]] name = "llama-index-legacy" @@ -2386,96 +2397,97 @@ query-tools = ["guidance (>=0.0.64,<0.0.65)", "jsonpath-ng (>=1.6.0,<2.0.0)", "l [[package]] name = "llama-index-llms-anthropic" -version = "0.1.17" +version = "0.5.0" description = "llama-index llms anthropic integration" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index_llms_anthropic-0.1.17-py3-none-any.whl", hash = "sha256:e4c2b07a890d0d6f51707379c535df82b2575e187e98ac5e0eb71ab20e3fec26"}, - {file = "llama_index_llms_anthropic-0.1.17.tar.gz", hash = "sha256:436cb69505839af953ab00113e758011774b466bb819b7f3785a306c043a36f2"}, + {file = "llama_index_llms_anthropic-0.5.0-py3-none-any.whl", hash = "sha256:2b9367db45deabcbda4db1b1216c95e2663e1e6f129570fc2d275207dd3901cf"}, + {file = "llama_index_llms_anthropic-0.5.0.tar.gz", hash = "sha256:14e400ccc2deb8e9024ef8cdc24550b67f4240f3563b4564d4870be85de2d9d8"}, ] [package.dependencies] -anthropic = ">=0.26.2,<0.29.0" -llama-index-core = ">=0.10.57,<0.11.0" +anthropic = {version = ">=0.39.0", extras = ["bedrock", "vertex"]} +llama-index-core = ">=0.12.0,<0.13.0" [[package]] name = "llama-index-llms-openai" -version = "0.1.31" +version = "0.3.2" description = "llama-index llms openai integration" optional = true -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index_llms_openai-0.1.31-py3-none-any.whl", hash = "sha256:800815b1b964b7d8dddd0e02a09fb57ac5f2ec6f80db92cd704dae718846023f"}, - {file = "llama_index_llms_openai-0.1.31.tar.gz", hash = "sha256:c235493f453b92903722054a8dfb1452ea850eac47a68a38bab3b823988d56fe"}, + {file = "llama_index_llms_openai-0.3.2-py3-none-any.whl", hash = "sha256:439b8ac8183168156a9724d03e1b3aeeb95d8d3c605b866a6b803b84fae131f6"}, + {file = "llama_index_llms_openai-0.3.2.tar.gz", hash = "sha256:8a443a564e7d12779a9f030cb82fe3243803e217d72410764ac116dd43554fe5"}, ] [package.dependencies] -llama-index-core = ">=0.10.57,<0.11.0" +llama-index-core = ">=0.12.0,<0.13.0" openai = ">=1.40.0,<2.0.0" [[package]] name = "llama-index-multi-modal-llms-openai" -version = "0.1.9" +version = "0.3.0" description = "llama-index multi-modal-llms openai integration" optional = true -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index_multi_modal_llms_openai-0.1.9-py3-none-any.whl", hash = "sha256:614f40427a4671e72742780be8fda77297dbf2942519bffcb2c9de8696a9edff"}, - {file = "llama_index_multi_modal_llms_openai-0.1.9.tar.gz", hash = "sha256:dbacf44d5c2cca07ca424eacd1337583002d70387a3c1868cf8ae743b1dbec4a"}, + {file = "llama_index_multi_modal_llms_openai-0.3.0-py3-none-any.whl", hash = "sha256:9b7e3e39b19b2668b9c75014bcb90795bb546f0f9e1af8b7f1087f8687805763"}, + {file = "llama_index_multi_modal_llms_openai-0.3.0.tar.gz", hash = "sha256:71e983c7771c39088e4058cd78029219315a0fb631b9e12b903e53243b9a3fd6"}, ] [package.dependencies] -llama-index-core = ">=0.10.1,<0.11.0" -llama-index-llms-openai = ">=0.1.1,<0.2.0" +llama-index-core = ">=0.12.0,<0.13.0" +llama-index-llms-openai = ">=0.3.0,<0.4.0" [[package]] name = "llama-index-program-openai" -version = "0.1.7" +version = "0.3.1" description = "llama-index program openai integration" optional = true -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index_program_openai-0.1.7-py3-none-any.whl", hash = "sha256:33489b573c1050a3f583ff68fcbc4bcbd49f29e74f3e5baea08ab0d5f363403c"}, - {file = "llama_index_program_openai-0.1.7.tar.gz", hash = "sha256:bf7eb61a073381714be5a049d93b40044dfe51bd4333bee539d1532b7407621f"}, + {file = "llama_index_program_openai-0.3.1-py3-none-any.whl", hash = "sha256:93646937395dc5318fd095153d2f91bd632b25215d013d14a87c088887d205f9"}, + {file = "llama_index_program_openai-0.3.1.tar.gz", hash = "sha256:6039a6cdbff62c6388c07e82a157fe2edd3bbef0c5adf292ad8546bf4ec75b82"}, ] [package.dependencies] -llama-index-agent-openai = ">=0.1.1,<0.3.0" -llama-index-core = ">=0.10.57,<0.11.0" -llama-index-llms-openai = ">=0.1.1" +llama-index-agent-openai = ">=0.4.0,<0.5.0" +llama-index-core = ">=0.12.0,<0.13.0" +llama-index-llms-openai = ">=0.3.0,<0.4.0" [[package]] name = "llama-index-question-gen-openai" -version = "0.1.3" +version = "0.3.0" description = "llama-index question_gen openai integration" optional = true -python-versions = ">=3.8.1,<4.0" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index_question_gen_openai-0.1.3-py3-none-any.whl", hash = "sha256:1f83b49e8b2e665030d1ec8c54687d6985d9fa8426147b64e46628a9e489b302"}, - {file = "llama_index_question_gen_openai-0.1.3.tar.gz", hash = "sha256:4486198117a45457d2e036ae60b93af58052893cc7d78fa9b6f47dd47b81e2e1"}, + {file = "llama_index_question_gen_openai-0.3.0-py3-none-any.whl", hash = "sha256:9b60ec114273a63b50349948666e5744a8f58acb645824e07c979041e8fec598"}, + {file = "llama_index_question_gen_openai-0.3.0.tar.gz", hash = "sha256:efd3b468232808e9d3474670aaeab00e41b90f75f52d0c9bfbf11207e0963d62"}, ] [package.dependencies] -llama-index-core = ">=0.10.1,<0.11.0" -llama-index-llms-openai = ">=0.1.1,<0.2.0" -llama-index-program-openai = ">=0.1.1,<0.2.0" +llama-index-core = ">=0.12.0,<0.13.0" +llama-index-llms-openai = ">=0.3.0,<0.4.0" +llama-index-program-openai = ">=0.3.0,<0.4.0" [[package]] name = "llama-index-readers-file" -version = "0.1.33" +version = "0.4.0" description = "llama-index readers file integration" optional = true -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index_readers_file-0.1.33-py3-none-any.whl", hash = "sha256:c968308497c1355acf61fe7e3f05ad8e308bb6487dddd3bd2a60e102225d0b38"}, - {file = "llama_index_readers_file-0.1.33.tar.gz", hash = "sha256:247a4d5bfabc7d1022027adf58064bc16c224d006db142abb0d182ac5574a887"}, + {file = "llama_index_readers_file-0.4.0-py3-none-any.whl", hash = "sha256:437a38d63d4e254168980dd17c6eccde18cb97876fb9fffae9da3dfe6737d0fe"}, + {file = "llama_index_readers_file-0.4.0.tar.gz", hash = "sha256:7828dec1feb7c53e6d3140385f8499c0e7ac746265299384714ddfd163f9d15a"}, ] [package.dependencies] beautifulsoup4 = ">=4.12.3,<5.0.0" -llama-index-core = ">=0.10.37.post1,<0.11.0" -pypdf = ">=4.0.1,<5.0.0" +llama-index-core = ">=0.12.0,<0.13.0" +pandas = "*" +pypdf = ">=5.1.0,<6.0.0" striprtf = ">=0.0.26,<0.0.27" [package.extras] @@ -2483,32 +2495,34 @@ pymupdf = ["pymupdf (>=1.23.21,<2.0.0)"] [[package]] name = "llama-index-readers-llama-parse" -version = "0.1.6" +version = "0.4.0" description = "llama-index readers llama-parse integration" optional = true -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "llama_index_readers_llama_parse-0.1.6-py3-none-any.whl", hash = "sha256:71d445a2357ce4c632e0fada7c913ac62790e77c062f12d916dd86378380ff1f"}, - {file = "llama_index_readers_llama_parse-0.1.6.tar.gz", hash = "sha256:04f2dcfbb0fb87ce70890f5a2f4f89941d79be6a818b43738f053560e4b451cf"}, + {file = "llama_index_readers_llama_parse-0.4.0-py3-none-any.whl", hash = "sha256:574e48386f28d2c86c3f961ca4a4906910312f3400dd0c53014465bfbc6b32bf"}, + {file = "llama_index_readers_llama_parse-0.4.0.tar.gz", hash = "sha256:e99ec56f4f8546d7fda1a7c1ae26162fb9acb7ebcac343b5abdb4234b4644e0f"}, ] [package.dependencies] -llama-index-core = ">=0.10.7,<0.11.0" -llama-parse = ">=0.4.0" +llama-index-core = ">=0.12.0,<0.13.0" +llama-parse = ">=0.5.0" [[package]] name = "llama-parse" -version = "0.4.9" +version = "0.5.15" description = "Parse files into RAG-Optimized formats." optional = true python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_parse-0.4.9-py3-none-any.whl", hash = "sha256:71974a57a73d642608cc406942bee4e7fc1a713fa410f51df67da509479ba544"}, - {file = "llama_parse-0.4.9.tar.gz", hash = "sha256:657f8fa5f7d399f14c0454fc05cae6034da0373f191df6cfca17a1b4a704ef87"}, + {file = "llama_parse-0.5.15-py3-none-any.whl", hash = "sha256:7a3506c7d3ae5a8e68c70a457a7213d2698e26abcef1d7a989eb9771cd73ae60"}, + {file = "llama_parse-0.5.15.tar.gz", hash = "sha256:ecb009f71c8b4c657085ca81808a922c80785810e38b10f3b46f03cfd29ba92a"}, ] [package.dependencies] -llama-index-core = ">=0.10.29" +click = ">=8.1.7,<9.0.0" +llama-index-core = ">=0.11.0" +pydantic = "!=2.10" [[package]] name = "markdown-it-py" @@ -2931,43 +2945,6 @@ files = [ [package.dependencies] setuptools = "*" -[[package]] -name = "numpy" -version = "1.24.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, - {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, - {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, - {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, - {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, - {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, - {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, - {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, - {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, - {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, - {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, -] - [[package]] name = "numpy" version = "1.26.4" @@ -3338,7 +3315,7 @@ files = [ name = "pandas" version = "2.0.3" description = "Powerful data structures for data analysis, time series, and statistics" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, @@ -3427,7 +3404,6 @@ files = [ ] [package.dependencies] -astunparse = {version = "*", markers = "python_version < \"3.9\""} Jinja2 = ">=2.11.0" MarkupSafe = "*" pygments = ">=2.12.0" @@ -3897,6 +3873,26 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pydantic-settings" +version = "2.6.1" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87"}, + {file = "pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" + +[package.extras] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + [[package]] name = "pygments" version = "2.17.2" @@ -3995,23 +3991,24 @@ zstd = ["zstandard"] [[package]] name = "pypdf" -version = "4.3.1" +version = "5.1.0" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" optional = true -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pypdf-4.3.1-py3-none-any.whl", hash = "sha256:64b31da97eda0771ef22edb1bfecd5deee4b72c3d1736b7df2689805076d6418"}, - {file = "pypdf-4.3.1.tar.gz", hash = "sha256:b2f37fe9a3030aa97ca86067a56ba3f9d3565f9a791b305c7355d8392c30d91b"}, + {file = "pypdf-5.1.0-py3-none-any.whl", hash = "sha256:3bd4f503f4ebc58bae40d81e81a9176c400cbbac2ba2d877367595fb524dfdfc"}, + {file = "pypdf-5.1.0.tar.gz", hash = "sha256:425a129abb1614183fd1aca6982f650b47f8026867c0ce7c4b9f281c443d2740"}, ] [package.dependencies] typing_extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] -crypto = ["PyCryptodome", "cryptography"] +crypto = ["cryptography"] +cryptodome = ["PyCryptodome"] dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"] docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] -full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] +full = ["Pillow (>=8.0.0)", "cryptography"] image = ["Pillow (>=8.0.0)"] [[package]] @@ -4166,7 +4163,7 @@ cli = ["click (>=5.0)"] name = "pytz" version = "2024.2" description = "World timezone definitions, modern and historical" -optional = false +optional = true python-versions = "*" files = [ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, @@ -4360,6 +4357,20 @@ requests = ">=2.0.0" [package.extras] rsa = ["oauthlib[signedtoken] (>=3.0.0)"] +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + [[package]] name = "respx" version = "0.21.1" @@ -4388,7 +4399,6 @@ files = [ [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] @@ -4983,7 +4993,7 @@ typing-extensions = ">=3.7.4" name = "tzdata" version = "2024.1" description = "Provider of IANA time zone data" -optional = false +optional = true python-versions = ">=2" files = [ {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, @@ -5297,20 +5307,6 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog (>=2.3)"] -[[package]] -name = "wheel" -version = "0.44.0" -description = "A built-package format for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "wheel-0.44.0-py3-none-any.whl", hash = "sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f"}, - {file = "wheel-0.44.0.tar.gz", hash = "sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49"}, -] - -[package.extras] -test = ["pytest (>=6.0.0)", "setuptools (>=65)"] - [[package]] name = "wrapt" version = "1.16.0" @@ -5524,5 +5520,5 @@ openai = ["openai"] [metadata] lock-version = "2.0" -python-versions = ">=3.8.1,<4.0" -content-hash = "331967efea6eead937f04876c8af3e3fc4d291871181a440683fb6d61174e5ad" +python-versions = ">=3.9,<4" +content-hash = "94038e866f19d9b006ca1bb1771a66d2eae61d153176e3ea827d748b35fa6d16" diff --git a/pyproject.toml b/pyproject.toml index a9103cbb6..dfcff90fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,20 +37,20 @@ pytest-asyncio = ">=0.21.1,<0.24.0" pytest-httpserver = "^1.0.8" boto3 = "^1.28.59" ruff = ">=0.1.8,<0.6.0" -langchain-mistralai = ">=0.0.1,<0.1.8" +langchain-mistralai = ">=0.0.1,<0.3" google-cloud-aiplatform = "^1.38.1" cohere = ">=4.46,<6.0" -langchain-google-vertexai = ">=0.0.5,<1.0.5" -langchain-openai = ">=0.0.5,<0.1.8" +langchain-google-vertexai = ">=2.0.0,<3.0.0" +langchain-openai = ">=0.0.5,<0.3" dashscope = "^1.14.1" pymongo = "^4.6.1" -llama-index-llms-anthropic = "^0.1.1" +llama-index-llms-anthropic = ">=0.1.1,<0.6" bson = "^0.5.10" -langchain-anthropic = "^0.1.4" -langchain-groq = "^0.1.3" -langchain-aws = "^0.1.3" +langchain-anthropic = ">=0.1.4,<0.4" +langchain-groq = ">=0.1.3,<0.3" +langchain-aws = ">=0.1.3,<0.3" -langchain-community = "^0.2.14" +langchain-community = ">=0.2.14,<0.4" [tool.poetry.group.docs.dependencies] pdoc = "^14.4.0" From e53e0372da85ac6e1940877488a3b9807969162f Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Wed, 27 Nov 2024 16:38:53 +0100 Subject: [PATCH 04/15] remove finto --- langfuse/api/README.md | 4 ++-- langfuse/api/reference.md | 24 +++++++++---------- langfuse/api/resources/comments/client.py | 4 ++-- .../api/resources/dataset_items/client.py | 4 ++-- .../api/resources/dataset_run_items/client.py | 4 ++-- langfuse/api/resources/datasets/client.py | 4 ++-- .../resources/health/types/health_response.py | 2 +- langfuse/api/resources/ingestion/client.py | 4 ++-- .../resources/ingestion/types/score_body.py | 2 +- langfuse/api/resources/media/client.py | 8 +++---- langfuse/api/resources/models/client.py | 4 ++-- langfuse/api/resources/prompts/client.py | 4 ++-- langfuse/api/resources/score/client.py | 8 +++---- .../score/types/create_score_request.py | 2 +- .../api/resources/score_configs/client.py | 4 ++-- 15 files changed, 41 insertions(+), 41 deletions(-) diff --git a/langfuse/api/README.md b/langfuse/api/README.md index 918d79330..78acd43f1 100644 --- a/langfuse/api/README.md +++ b/langfuse/api/README.md @@ -16,7 +16,7 @@ pip install finto Instantiate and use the client with the following: ```python -from finto import CreateCommentRequest +from langfuse.api import CreateCommentRequest from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -45,7 +45,7 @@ The SDK also exports an `async` client so that you can make non-blocking calls t ```python import asyncio -from finto import CreateCommentRequest +from langfuse.api import CreateCommentRequest from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( diff --git a/langfuse/api/reference.md b/langfuse/api/reference.md index 54f1c84fc..5e5642677 100644 --- a/langfuse/api/reference.md +++ b/langfuse/api/reference.md @@ -30,7 +30,7 @@ Create a comment. Comments may be attached to different object types (trace, obs
```python -from finto import CreateCommentRequest +from langfuse.api import CreateCommentRequest from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -303,7 +303,7 @@ Create a dataset item
```python -from finto import CreateDatasetItemRequest, DatasetStatus +from langfuse.api import CreateDatasetItemRequest, DatasetStatus from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -579,7 +579,7 @@ Create a dataset run item
```python -from finto import CreateDatasetRunItemRequest +from langfuse.api import CreateDatasetRunItemRequest from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -826,7 +826,7 @@ Create a dataset
```python -from finto import CreateDatasetRequest +from langfuse.api import CreateDatasetRequest from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -1162,7 +1162,7 @@ Notes: ```python import datetime -from finto import IngestionEvent_TraceCreate, TraceBody +from langfuse.api import IngestionEvent_TraceCreate, TraceBody from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -1350,7 +1350,7 @@ Patch a media record ```python import datetime -from finto import PatchMediaBody +from langfuse.api import PatchMediaBody from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -1443,7 +1443,7 @@ Get a presigned upload URL for a media record
```python -from finto import GetMediaUploadUrlRequest +from langfuse.api import GetMediaUploadUrlRequest from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -1668,7 +1668,7 @@ Create a model ```python import datetime -from finto import CreateModelRequest, ModelUsageUnit +from langfuse.api import CreateModelRequest, ModelUsageUnit from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -2535,7 +2535,7 @@ Create a new version for the prompt with the given `name`
```python -from finto import ChatMessage, CreatePromptRequest_Chat +from langfuse.api import ChatMessage, CreatePromptRequest_Chat from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -2625,7 +2625,7 @@ Create a score configuration (config). Score configs are used to define the stru
```python -from finto import ConfigCategory, CreateScoreConfigRequest, ScoreDataType +from langfuse.api import ConfigCategory, CreateScoreConfigRequest, ScoreDataType from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -2877,7 +2877,7 @@ Create a score
```python -from finto import CreateScoreRequest +from langfuse.api import CreateScoreRequest from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -2960,7 +2960,7 @@ Get a list of scores ```python import datetime -from finto import ScoreDataType, ScoreSource +from langfuse.api import ScoreDataType, ScoreSource from langfuse.api.client import FernLangfuse client = FernLangfuse( diff --git a/langfuse/api/resources/comments/client.py b/langfuse/api/resources/comments/client.py index 80476b00b..072df98aa 100644 --- a/langfuse/api/resources/comments/client.py +++ b/langfuse/api/resources/comments/client.py @@ -48,7 +48,7 @@ def create( Examples -------- - from finto import CreateCommentRequest + from langfuse.api import CreateCommentRequest from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -294,7 +294,7 @@ async def create( -------- import asyncio - from finto import CreateCommentRequest + from langfuse.api import CreateCommentRequest from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( diff --git a/langfuse/api/resources/dataset_items/client.py b/langfuse/api/resources/dataset_items/client.py index 89f19266d..55f801f2a 100644 --- a/langfuse/api/resources/dataset_items/client.py +++ b/langfuse/api/resources/dataset_items/client.py @@ -47,7 +47,7 @@ def create( Examples -------- - from finto import CreateDatasetItemRequest, DatasetStatus + from langfuse.api import CreateDatasetItemRequest, DatasetStatus from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -289,7 +289,7 @@ async def create( -------- import asyncio - from finto import CreateDatasetItemRequest, DatasetStatus + from langfuse.api import CreateDatasetItemRequest, DatasetStatus from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( diff --git a/langfuse/api/resources/dataset_run_items/client.py b/langfuse/api/resources/dataset_run_items/client.py index 109ce60b8..dcc0f85dc 100644 --- a/langfuse/api/resources/dataset_run_items/client.py +++ b/langfuse/api/resources/dataset_run_items/client.py @@ -45,7 +45,7 @@ def create( Examples -------- - from finto import CreateDatasetRunItemRequest + from langfuse.api import CreateDatasetRunItemRequest from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -129,7 +129,7 @@ async def create( -------- import asyncio - from finto import CreateDatasetRunItemRequest + from langfuse.api import CreateDatasetRunItemRequest from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( diff --git a/langfuse/api/resources/datasets/client.py b/langfuse/api/resources/datasets/client.py index 2aeb05323..4c6048b94 100644 --- a/langfuse/api/resources/datasets/client.py +++ b/langfuse/api/resources/datasets/client.py @@ -190,7 +190,7 @@ def create( Examples -------- - from finto import CreateDatasetRequest + from langfuse.api import CreateDatasetRequest from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -577,7 +577,7 @@ async def create( -------- import asyncio - from finto import CreateDatasetRequest + from langfuse.api import CreateDatasetRequest from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( diff --git a/langfuse/api/resources/health/types/health_response.py b/langfuse/api/resources/health/types/health_response.py index a864ea9ae..3cd5e429a 100644 --- a/langfuse/api/resources/health/types/health_response.py +++ b/langfuse/api/resources/health/types/health_response.py @@ -11,7 +11,7 @@ class HealthResponse(pydantic_v1.BaseModel): """ Examples -------- - from finto import HealthResponse + from langfuse.api import HealthResponse HealthResponse( version="1.25.0", diff --git a/langfuse/api/resources/ingestion/client.py b/langfuse/api/resources/ingestion/client.py index 1e3b99be1..90f88bf45 100644 --- a/langfuse/api/resources/ingestion/client.py +++ b/langfuse/api/resources/ingestion/client.py @@ -57,7 +57,7 @@ def batch( -------- import datetime - from finto import IngestionEvent_TraceCreate, TraceBody + from langfuse.api import IngestionEvent_TraceCreate, TraceBody from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -168,7 +168,7 @@ async def batch( import asyncio import datetime - from finto import IngestionEvent_TraceCreate, TraceBody + from langfuse.api import IngestionEvent_TraceCreate, TraceBody from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( diff --git a/langfuse/api/resources/ingestion/types/score_body.py b/langfuse/api/resources/ingestion/types/score_body.py index 1bc7e581a..a043ef145 100644 --- a/langfuse/api/resources/ingestion/types/score_body.py +++ b/langfuse/api/resources/ingestion/types/score_body.py @@ -13,7 +13,7 @@ class ScoreBody(pydantic_v1.BaseModel): """ Examples -------- - from finto import ScoreBody + from langfuse.api import ScoreBody ScoreBody( name="novelty", diff --git a/langfuse/api/resources/media/client.py b/langfuse/api/resources/media/client.py index 3c2176573..a32916d5e 100644 --- a/langfuse/api/resources/media/client.py +++ b/langfuse/api/resources/media/client.py @@ -119,7 +119,7 @@ def patch( -------- import datetime - from finto import PatchMediaBody + from langfuse.api import PatchMediaBody from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -197,7 +197,7 @@ def get_upload_url( Examples -------- - from finto import GetMediaUploadUrlRequest + from langfuse.api import GetMediaUploadUrlRequest from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -360,7 +360,7 @@ async def patch( import asyncio import datetime - from finto import PatchMediaBody + from langfuse.api import PatchMediaBody from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( @@ -446,7 +446,7 @@ async def get_upload_url( -------- import asyncio - from finto import GetMediaUploadUrlRequest + from langfuse.api import GetMediaUploadUrlRequest from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( diff --git a/langfuse/api/resources/models/client.py b/langfuse/api/resources/models/client.py index 799337360..c44a303d6 100644 --- a/langfuse/api/resources/models/client.py +++ b/langfuse/api/resources/models/client.py @@ -49,7 +49,7 @@ def create( -------- import datetime - from finto import CreateModelRequest, ModelUsageUnit + from langfuse.api import CreateModelRequest, ModelUsageUnit from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -341,7 +341,7 @@ async def create( import asyncio import datetime - from finto import CreateModelRequest, ModelUsageUnit + from langfuse.api import CreateModelRequest, ModelUsageUnit from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( diff --git a/langfuse/api/resources/prompts/client.py b/langfuse/api/resources/prompts/client.py index 547eb3f6c..b900bf494 100644 --- a/langfuse/api/resources/prompts/client.py +++ b/langfuse/api/resources/prompts/client.py @@ -244,7 +244,7 @@ def create( Examples -------- - from finto import ChatMessage, CreatePromptRequest_Chat + from langfuse.api import ChatMessage, CreatePromptRequest_Chat from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -542,7 +542,7 @@ async def create( -------- import asyncio - from finto import ChatMessage, CreatePromptRequest_Chat + from langfuse.api import ChatMessage, CreatePromptRequest_Chat from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( diff --git a/langfuse/api/resources/score/client.py b/langfuse/api/resources/score/client.py index 1d275f6b0..a7e28651c 100644 --- a/langfuse/api/resources/score/client.py +++ b/langfuse/api/resources/score/client.py @@ -52,7 +52,7 @@ def create( Examples -------- - from finto import CreateScoreRequest + from langfuse.api import CreateScoreRequest from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -183,7 +183,7 @@ def get( -------- import datetime - from finto import ScoreDataType, ScoreSource + from langfuse.api import ScoreDataType, ScoreSource from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -425,7 +425,7 @@ async def create( -------- import asyncio - from finto import CreateScoreRequest + from langfuse.api import CreateScoreRequest from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( @@ -563,7 +563,7 @@ async def get( import asyncio import datetime - from finto import ScoreDataType, ScoreSource + from langfuse.api import ScoreDataType, ScoreSource from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( diff --git a/langfuse/api/resources/score/types/create_score_request.py b/langfuse/api/resources/score/types/create_score_request.py index 8e256e300..13f3cebd9 100644 --- a/langfuse/api/resources/score/types/create_score_request.py +++ b/langfuse/api/resources/score/types/create_score_request.py @@ -13,7 +13,7 @@ class CreateScoreRequest(pydantic_v1.BaseModel): """ Examples -------- - from finto import CreateScoreRequest + from langfuse.api import CreateScoreRequest CreateScoreRequest( name="novelty", diff --git a/langfuse/api/resources/score_configs/client.py b/langfuse/api/resources/score_configs/client.py index 4323097bf..3a9092de0 100644 --- a/langfuse/api/resources/score_configs/client.py +++ b/langfuse/api/resources/score_configs/client.py @@ -47,7 +47,7 @@ def create( Examples -------- - from finto import ConfigCategory, CreateScoreConfigRequest, ScoreDataType + from langfuse.api import ConfigCategory, CreateScoreConfigRequest, ScoreDataType from langfuse.api.client import FernLangfuse client = FernLangfuse( @@ -275,7 +275,7 @@ async def create( -------- import asyncio - from finto import ConfigCategory, CreateScoreConfigRequest, ScoreDataType + from langfuse.api import ConfigCategory, CreateScoreConfigRequest, ScoreDataType from langfuse.api.client import AsyncFernLangfuse client = AsyncFernLangfuse( From 70a9e0d553cc3e857b6d09ffc9188c41a7c70154 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Wed, 27 Nov 2024 17:02:31 +0100 Subject: [PATCH 05/15] push --- ci.ruff.toml | 4 +++- langfuse/_task_manager/media_manager.py | 8 ++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/ci.ruff.toml b/ci.ruff.toml index fe975a8f1..184e12e6f 100644 --- a/ci.ruff.toml +++ b/ci.ruff.toml @@ -1,4 +1,6 @@ # This is the Ruff config used in CI. # In development, ruff.toml is used instead. -target-version = 'py38' \ No newline at end of file +target-version = 'py38' +[lint] +exclude = ["langfuse/api/**/*.py"] \ No newline at end of file diff --git a/langfuse/_task_manager/media_manager.py b/langfuse/_task_manager/media_manager.py index 9400d2664..09f459de3 100644 --- a/langfuse/_task_manager/media_manager.py +++ b/langfuse/_task_manager/media_manager.py @@ -146,7 +146,9 @@ def _process_data_recursively(data: Any, level: int): field=field, ) - return media + data["data"] = media + + return data # Vertex if ( @@ -167,7 +169,9 @@ def _process_data_recursively(data: Any, level: int): field=field, ) - return media + data["data"] = media + + return data if isinstance(data, list): return [_process_data_recursively(item, level + 1) for item in data] From cc83035da9ea070abdb80ee5a7432185d93a7b7f Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Wed, 27 Nov 2024 17:26:40 +0100 Subject: [PATCH 06/15] drop 3.8 support --- .github/workflows/ci.yml | 2 +- poetry.lock | 4 ++-- pyproject.toml | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index caa11267c..9ac604fb9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -38,10 +38,10 @@ jobs: fail-fast: false matrix: python-version: - - "3.8" - "3.9" - "3.10" - "3.11" + - "3.12" name: Test on Python version ${{ matrix.python-version }} steps: - uses: actions/checkout@v3 diff --git a/poetry.lock b/poetry.lock index b3d262f5b..2f7d2fff3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -5520,5 +5520,5 @@ openai = ["openai"] [metadata] lock-version = "2.0" -python-versions = ">=3.9,<4" -content-hash = "94038e866f19d9b006ca1bb1771a66d2eae61d153176e3ea827d748b35fa6d16" +python-versions = ">=3.9,<4.0" +content-hash = "040fa7015be6e2e55953668016b062a2e1354eb84bc1591b37b1cdaed5063b29" diff --git a/pyproject.toml b/pyproject.toml index dfcff90fd..e88b7986f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ license = "MIT" readme = "README.md" [tool.poetry.dependencies] -python = ">=3.8.1,<4.0" +python = ">=3.9,<4.0" httpx = ">=0.15.4,<1.0" pydantic = ">=1.10.7, <3.0" backoff = ">=1.10.0" @@ -40,7 +40,7 @@ ruff = ">=0.1.8,<0.6.0" langchain-mistralai = ">=0.0.1,<0.3" google-cloud-aiplatform = "^1.38.1" cohere = ">=4.46,<6.0" -langchain-google-vertexai = ">=2.0.0,<3.0.0" +langchain-google-vertexai = ">=1.0.0,<3.0.0" langchain-openai = ">=0.0.5,<0.3" dashscope = "^1.14.1" pymongo = "^4.6.1" From cbf467b9086976c9ef182fc418cdf6826ccff308 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Wed, 27 Nov 2024 18:10:44 +0100 Subject: [PATCH 07/15] fix test --- langfuse/extract_model.py | 5 + poetry.lock | 665 +++++++++++++++++++++++++++++++++++- pyproject.toml | 3 + tests/test_extract_model.py | 71 ++-- 4 files changed, 684 insertions(+), 60 deletions(-) diff --git a/langfuse/extract_model.py b/langfuse/extract_model.py index 58d6cb22f..ba365b532 100644 --- a/langfuse/extract_model.py +++ b/langfuse/extract_model.py @@ -31,6 +31,7 @@ def _extract_model_name( ("HuggingFacePipeline", ["invocation_params", "model_id"], "kwargs"), ("BedrockChat", ["kwargs", "model_id"], "serialized"), ("Bedrock", ["kwargs", "model_id"], "serialized"), + ("BedrockLLM", ["kwargs", "model_id"], "serialized"), ("ChatBedrock", ["kwargs", "model_id"], "serialized"), ("LlamaCpp", ["invocation_params", "model_path"], "kwargs"), ("WatsonxLLM", ["invocation_params", "model_id"], "kwargs"), @@ -45,6 +46,9 @@ def _extract_model_name( # Second, we match AzureOpenAI as we need to extract the model name, fdeployment version and deployment name if serialized.get("id")[-1] == "AzureOpenAI": + if kwargs.get("invocation_params").get("model"): + return kwargs.get("invocation_params").get("model") + if kwargs.get("invocation_params").get("model_name"): return kwargs.get("invocation_params").get("model_name") @@ -68,6 +72,7 @@ def _extract_model_name( ("ChatAnyscale", "model_name", None), ("TextGen", "model", "text-gen"), ("Ollama", "model", None), + ("OllamaLLM", "model", None), ("ChatOllama", "model", None), ("ChatFireworks", "model", None), ("ChatPerplexity", "model", None), diff --git a/poetry.lock b/poetry.lock index 2f7d2fff3..bbff7cfd1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -350,8 +350,8 @@ files = [ jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = [ - {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, ] [package.extras] @@ -1096,12 +1096,12 @@ files = [ google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" grpcio = [ - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" @@ -2042,6 +2042,28 @@ numpy = [ ] pydantic = ">=2,<3" +[[package]] +name = "langchain-cohere" +version = "0.3.3" +description = "An integration package connecting Cohere and LangChain" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langchain_cohere-0.3.3-py3-none-any.whl", hash = "sha256:c8dee47a31cedb227ccf3ba93dad5f09ebadf9043e0ce941ae0bffdc3a226b37"}, + {file = "langchain_cohere-0.3.3.tar.gz", hash = "sha256:502f35eb5f983656b26114c7411628241fd06f14e24c85721ea57c9ee1c7c890"}, +] + +[package.dependencies] +cohere = ">=5.5.6,<6.0" +langchain-core = ">=0.3.0,<0.4" +langchain-experimental = ">=0.3.0,<0.4.0" +pandas = ">=1.4.3" +pydantic = ">=2,<3" +tabulate = ">=0.9.0,<0.10.0" + +[package.extras] +langchain-community = ["langchain-community (>=0.3.0,<0.4.0)"] + [[package]] name = "langchain-community" version = "0.3.8" @@ -2093,6 +2115,21 @@ PyYAML = ">=5.3" tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10.0.0" typing-extensions = ">=4.7" +[[package]] +name = "langchain-experimental" +version = "0.3.3" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langchain_experimental-0.3.3-py3-none-any.whl", hash = "sha256:da01aafc162631475f306ca368ecae74d5becd93b8039bddb6315e755e274580"}, + {file = "langchain_experimental-0.3.3.tar.gz", hash = "sha256:6bbcdcd084581432ef4b5d732294a59d75a858ede1714b50a5b79bcfe31fa306"}, +] + +[package.dependencies] +langchain-community = ">=0.3.0,<0.4.0" +langchain-core = ">=0.3.15,<0.4.0" + [[package]] name = "langchain-google-vertexai" version = "2.0.3" @@ -2131,6 +2168,24 @@ files = [ groq = ">=0.4.1,<1" langchain-core = ">=0.3.15,<0.4.0" +[[package]] +name = "langchain-huggingface" +version = "0.1.2" +description = "An integration package connecting Hugging Face and LangChain" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langchain_huggingface-0.1.2-py3-none-any.whl", hash = "sha256:7de5cfcae32bfb6a99c084fc16176f02583a4f8d94febb6bb45bed5b34699174"}, + {file = "langchain_huggingface-0.1.2.tar.gz", hash = "sha256:4a66d5c449298fd353bd84c9ed01f9bf4303bf2e4ffce14aab8c55c584eee57c"}, +] + +[package.dependencies] +huggingface-hub = ">=0.23.0" +langchain-core = ">=0.3.15,<0.4.0" +sentence-transformers = ">=2.6.0" +tokenizers = ">=0.19.1" +transformers = ">=4.39.0" + [[package]] name = "langchain-mistralai" version = "0.2.2" @@ -2149,6 +2204,21 @@ langchain-core = ">=0.3.15,<0.4.0" pydantic = ">=2,<3" tokenizers = ">=0.15.1,<1" +[[package]] +name = "langchain-ollama" +version = "0.2.0" +description = "An integration package connecting Ollama and LangChain" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langchain_ollama-0.2.0-py3-none-any.whl", hash = "sha256:aa5b794599652494a07fd27b22784854480cd4c793f0db5e81ebeccc2affd135"}, + {file = "langchain_ollama-0.2.0.tar.gz", hash = "sha256:250ad9f3edce1a0ca16e4fad19f783ac728d7d76888ba952c462cd9f680353f7"}, +] + +[package.dependencies] +langchain-core = ">=0.3.0,<0.4.0" +ollama = ">=0.3.0,<1" + [[package]] name = "langchain-openai" version = "0.2.10" @@ -2990,6 +3060,150 @@ files = [ {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] +[[package]] +name = "nvidia-cublas-cu12" +version = "12.1.3.1" +description = "CUBLAS native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728"}, + {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-win_amd64.whl", hash = "sha256:2b964d60e8cf11b5e1073d179d85fa340c120e99b3067558f3cf98dd69d02906"}, +] + +[[package]] +name = "nvidia-cuda-cupti-cu12" +version = "12.1.105" +description = "CUDA profiling tools runtime libs." +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e"}, + {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:bea8236d13a0ac7190bd2919c3e8e6ce1e402104276e6f9694479e48bb0eb2a4"}, +] + +[[package]] +name = "nvidia-cuda-nvrtc-cu12" +version = "12.1.105" +description = "NVRTC native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2"}, + {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:0a98a522d9ff138b96c010a65e145dc1b4850e9ecb75a0172371793752fd46ed"}, +] + +[[package]] +name = "nvidia-cuda-runtime-cu12" +version = "12.1.105" +description = "CUDA Runtime native Libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40"}, + {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344"}, +] + +[[package]] +name = "nvidia-cudnn-cu12" +version = "9.1.0.70" +description = "cuDNN runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f"}, + {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a"}, +] + +[package.dependencies] +nvidia-cublas-cu12 = "*" + +[[package]] +name = "nvidia-cufft-cu12" +version = "11.0.2.54" +description = "CUFFT native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56"}, + {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-win_amd64.whl", hash = "sha256:d9ac353f78ff89951da4af698f80870b1534ed69993f10a4cf1d96f21357e253"}, +] + +[[package]] +name = "nvidia-curand-cu12" +version = "10.3.2.106" +description = "CURAND native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0"}, + {file = "nvidia_curand_cu12-10.3.2.106-py3-none-win_amd64.whl", hash = "sha256:75b6b0c574c0037839121317e17fd01f8a69fd2ef8e25853d826fec30bdba74a"}, +] + +[[package]] +name = "nvidia-cusolver-cu12" +version = "11.4.5.107" +description = "CUDA solver native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd"}, + {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-win_amd64.whl", hash = "sha256:74e0c3a24c78612192a74fcd90dd117f1cf21dea4822e66d89e8ea80e3cd2da5"}, +] + +[package.dependencies] +nvidia-cublas-cu12 = "*" +nvidia-cusparse-cu12 = "*" +nvidia-nvjitlink-cu12 = "*" + +[[package]] +name = "nvidia-cusparse-cu12" +version = "12.1.0.106" +description = "CUSPARSE native runtime libraries" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c"}, + {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-win_amd64.whl", hash = "sha256:b798237e81b9719373e8fae8d4f091b70a0cf09d9d85c95a557e11df2d8e9a5a"}, +] + +[package.dependencies] +nvidia-nvjitlink-cu12 = "*" + +[[package]] +name = "nvidia-nccl-cu12" +version = "2.20.5" +description = "NVIDIA Collective Communication Library (NCCL) Runtime" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01"}, + {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56"}, +] + +[[package]] +name = "nvidia-nvjitlink-cu12" +version = "12.6.85" +description = "Nvidia JIT LTO Library" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_nvjitlink_cu12-12.6.85-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:eedc36df9e88b682efe4309aa16b5b4e78c2407eac59e8c10a6a47535164369a"}, + {file = "nvidia_nvjitlink_cu12-12.6.85-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cf4eaa7d4b6b543ffd69d6abfb11efdeb2db48270d94dfd3a452c24150829e41"}, + {file = "nvidia_nvjitlink_cu12-12.6.85-py3-none-win_amd64.whl", hash = "sha256:e61120e52ed675747825cdd16febc6a0730537451d867ee58bee3853b1b13d1c"}, +] + +[[package]] +name = "nvidia-nvtx-cu12" +version = "12.1.105" +description = "NVIDIA Tools Extension" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5"}, + {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"}, +] + [[package]] name = "oauthlib" version = "3.2.2" @@ -3006,6 +3220,21 @@ rsa = ["cryptography (>=3.0.0)"] signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] +[[package]] +name = "ollama" +version = "0.4.1" +description = "The official Python client for Ollama." +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "ollama-0.4.1-py3-none-any.whl", hash = "sha256:b6fb16aa5a3652633e1716acb12cf2f44aa18beb229329e46a0302734822dfad"}, + {file = "ollama-0.4.1.tar.gz", hash = "sha256:8c6b5e7ff80dd0b8692150b03359f60bac7ca162b088c604069409142a684ad3"}, +] + +[package.dependencies] +httpx = ">=0.27.0,<0.28.0" +pydantic = ">=2.9.0,<3.0.0" + [[package]] name = "onnxruntime" version = "1.17.3" @@ -3315,7 +3544,7 @@ files = [ name = "pandas" version = "2.0.3" description = "Powerful data structures for data analysis, time series, and statistics" -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, @@ -3347,8 +3576,8 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" @@ -4163,7 +4392,7 @@ cli = ["click (>=5.0)"] name = "pytz" version = "2024.2" description = "World timezone definitions, modern and historical" -optional = true +optional = false python-versions = "*" files = [ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, @@ -4461,6 +4690,257 @@ botocore = ">=1.33.2,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] +[[package]] +name = "safetensors" +version = "0.4.5" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "safetensors-0.4.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a63eaccd22243c67e4f2b1c3e258b257effc4acd78f3b9d397edc8cf8f1298a7"}, + {file = "safetensors-0.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:23fc9b4ec7b602915cbb4ec1a7c1ad96d2743c322f20ab709e2c35d1b66dad27"}, + {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6885016f34bef80ea1085b7e99b3c1f92cb1be78a49839203060f67b40aee761"}, + {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:133620f443450429322f238fda74d512c4008621227fccf2f8cf4a76206fea7c"}, + {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fb3e0609ec12d2a77e882f07cced530b8262027f64b75d399f1504ffec0ba56"}, + {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0f1dd769f064adc33831f5e97ad07babbd728427f98e3e1db6902e369122737"}, + {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6d156bdb26732feada84f9388a9f135528c1ef5b05fae153da365ad4319c4c5"}, + {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e347d77e2c77eb7624400ccd09bed69d35c0332f417ce8c048d404a096c593b"}, + {file = "safetensors-0.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9f556eea3aec1d3d955403159fe2123ddd68e880f83954ee9b4a3f2e15e716b6"}, + {file = "safetensors-0.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9483f42be3b6bc8ff77dd67302de8ae411c4db39f7224dec66b0eb95822e4163"}, + {file = "safetensors-0.4.5-cp310-none-win32.whl", hash = "sha256:7389129c03fadd1ccc37fd1ebbc773f2b031483b04700923c3511d2a939252cc"}, + {file = "safetensors-0.4.5-cp310-none-win_amd64.whl", hash = "sha256:e98ef5524f8b6620c8cdef97220c0b6a5c1cef69852fcd2f174bb96c2bb316b1"}, + {file = "safetensors-0.4.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:21f848d7aebd5954f92538552d6d75f7c1b4500f51664078b5b49720d180e47c"}, + {file = "safetensors-0.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb07000b19d41e35eecef9a454f31a8b4718a185293f0d0b1c4b61d6e4487971"}, + {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09dedf7c2fda934ee68143202acff6e9e8eb0ddeeb4cfc24182bef999efa9f42"}, + {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:59b77e4b7a708988d84f26de3ebead61ef1659c73dcbc9946c18f3b1786d2688"}, + {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d3bc83e14d67adc2e9387e511097f254bd1b43c3020440e708858c684cbac68"}, + {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39371fc551c1072976073ab258c3119395294cf49cdc1f8476794627de3130df"}, + {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6c19feda32b931cae0acd42748a670bdf56bee6476a046af20181ad3fee4090"}, + {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a659467495de201e2f282063808a41170448c78bada1e62707b07a27b05e6943"}, + {file = "safetensors-0.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bad5e4b2476949bcd638a89f71b6916fa9a5cae5c1ae7eede337aca2100435c0"}, + {file = "safetensors-0.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a3a315a6d0054bc6889a17f5668a73f94f7fe55121ff59e0a199e3519c08565f"}, + {file = "safetensors-0.4.5-cp311-none-win32.whl", hash = "sha256:a01e232e6d3d5cf8b1667bc3b657a77bdab73f0743c26c1d3c5dd7ce86bd3a92"}, + {file = "safetensors-0.4.5-cp311-none-win_amd64.whl", hash = "sha256:cbd39cae1ad3e3ef6f63a6f07296b080c951f24cec60188378e43d3713000c04"}, + {file = "safetensors-0.4.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:473300314e026bd1043cef391bb16a8689453363381561b8a3e443870937cc1e"}, + {file = "safetensors-0.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:801183a0f76dc647f51a2d9141ad341f9665602a7899a693207a82fb102cc53e"}, + {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1524b54246e422ad6fb6aea1ac71edeeb77666efa67230e1faf6999df9b2e27f"}, + {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b3139098e3e8b2ad7afbca96d30ad29157b50c90861084e69fcb80dec7430461"}, + {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65573dc35be9059770808e276b017256fa30058802c29e1038eb1c00028502ea"}, + {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd33da8e9407559f8779c82a0448e2133737f922d71f884da27184549416bfed"}, + {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3685ce7ed036f916316b567152482b7e959dc754fcc4a8342333d222e05f407c"}, + {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dde2bf390d25f67908278d6f5d59e46211ef98e44108727084d4637ee70ab4f1"}, + {file = "safetensors-0.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7469d70d3de970b1698d47c11ebbf296a308702cbaae7fcb993944751cf985f4"}, + {file = "safetensors-0.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a6ba28118636a130ccbb968bc33d4684c48678695dba2590169d5ab03a45646"}, + {file = "safetensors-0.4.5-cp312-none-win32.whl", hash = "sha256:c859c7ed90b0047f58ee27751c8e56951452ed36a67afee1b0a87847d065eec6"}, + {file = "safetensors-0.4.5-cp312-none-win_amd64.whl", hash = "sha256:b5a8810ad6a6f933fff6c276eae92c1da217b39b4d8b1bc1c0b8af2d270dc532"}, + {file = "safetensors-0.4.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:25e5f8e2e92a74f05b4ca55686234c32aac19927903792b30ee6d7bd5653d54e"}, + {file = "safetensors-0.4.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:81efb124b58af39fcd684254c645e35692fea81c51627259cdf6d67ff4458916"}, + {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:585f1703a518b437f5103aa9cf70e9bd437cb78eea9c51024329e4fb8a3e3679"}, + {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b99fbf72e3faf0b2f5f16e5e3458b93b7d0a83984fe8d5364c60aa169f2da89"}, + {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b17b299ca9966ca983ecda1c0791a3f07f9ca6ab5ded8ef3d283fff45f6bcd5f"}, + {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76ded72f69209c9780fdb23ea89e56d35c54ae6abcdec67ccb22af8e696e449a"}, + {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2783956926303dcfeb1de91a4d1204cd4089ab441e622e7caee0642281109db3"}, + {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d94581aab8c6b204def4d7320f07534d6ee34cd4855688004a4354e63b639a35"}, + {file = "safetensors-0.4.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:67e1e7cb8678bb1b37ac48ec0df04faf689e2f4e9e81e566b5c63d9f23748523"}, + {file = "safetensors-0.4.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbd280b07e6054ea68b0cb4b16ad9703e7d63cd6890f577cb98acc5354780142"}, + {file = "safetensors-0.4.5-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:77d9b228da8374c7262046a36c1f656ba32a93df6cc51cd4453af932011e77f1"}, + {file = "safetensors-0.4.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:500cac01d50b301ab7bb192353317035011c5ceeef0fca652f9f43c000bb7f8d"}, + {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75331c0c746f03158ded32465b7d0b0e24c5a22121743662a2393439c43a45cf"}, + {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:670e95fe34e0d591d0529e5e59fd9d3d72bc77b1444fcaa14dccda4f36b5a38b"}, + {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:098923e2574ff237c517d6e840acada8e5b311cb1fa226019105ed82e9c3b62f"}, + {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ca0902d2648775089fa6a0c8fc9e6390c5f8ee576517d33f9261656f851e3f"}, + {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f0032bedc869c56f8d26259fe39cd21c5199cd57f2228d817a0e23e8370af25"}, + {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f4b15f51b4f8f2a512341d9ce3475cacc19c5fdfc5db1f0e19449e75f95c7dc8"}, + {file = "safetensors-0.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f6594d130d0ad933d885c6a7b75c5183cb0e8450f799b80a39eae2b8508955eb"}, + {file = "safetensors-0.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:60c828a27e852ded2c85fc0f87bf1ec20e464c5cd4d56ff0e0711855cc2e17f8"}, + {file = "safetensors-0.4.5-cp37-none-win32.whl", hash = "sha256:6d3de65718b86c3eeaa8b73a9c3d123f9307a96bbd7be9698e21e76a56443af5"}, + {file = "safetensors-0.4.5-cp37-none-win_amd64.whl", hash = "sha256:5a2d68a523a4cefd791156a4174189a4114cf0bf9c50ceb89f261600f3b2b81a"}, + {file = "safetensors-0.4.5-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:e7a97058f96340850da0601a3309f3d29d6191b0702b2da201e54c6e3e44ccf0"}, + {file = "safetensors-0.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:63bfd425e25f5c733f572e2246e08a1c38bd6f2e027d3f7c87e2e43f228d1345"}, + {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3664ac565d0e809b0b929dae7ccd74e4d3273cd0c6d1220c6430035befb678e"}, + {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:313514b0b9b73ff4ddfb4edd71860696dbe3c1c9dc4d5cc13dbd74da283d2cbf"}, + {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31fa33ee326f750a2f2134a6174773c281d9a266ccd000bd4686d8021f1f3dac"}, + {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09566792588d77b68abe53754c9f1308fadd35c9f87be939e22c623eaacbed6b"}, + {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309aaec9b66cbf07ad3a2e5cb8a03205663324fea024ba391594423d0f00d9fe"}, + {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:53946c5813b8f9e26103c5efff4a931cc45d874f45229edd68557ffb35ffb9f8"}, + {file = "safetensors-0.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:868f9df9e99ad1e7f38c52194063a982bc88fedc7d05096f4f8160403aaf4bd6"}, + {file = "safetensors-0.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9cc9449bd0b0bc538bd5e268221f0c5590bc5c14c1934a6ae359d44410dc68c4"}, + {file = "safetensors-0.4.5-cp38-none-win32.whl", hash = "sha256:83c4f13a9e687335c3928f615cd63a37e3f8ef072a3f2a0599fa09f863fb06a2"}, + {file = "safetensors-0.4.5-cp38-none-win_amd64.whl", hash = "sha256:b98d40a2ffa560653f6274e15b27b3544e8e3713a44627ce268f419f35c49478"}, + {file = "safetensors-0.4.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cf727bb1281d66699bef5683b04d98c894a2803442c490a8d45cd365abfbdeb2"}, + {file = "safetensors-0.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96f1d038c827cdc552d97e71f522e1049fef0542be575421f7684756a748e457"}, + {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:139fbee92570ecea774e6344fee908907db79646d00b12c535f66bc78bd5ea2c"}, + {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c36302c1c69eebb383775a89645a32b9d266878fab619819ce660309d6176c9b"}, + {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d641f5b8149ea98deb5ffcf604d764aad1de38a8285f86771ce1abf8e74c4891"}, + {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b4db6a61d968de73722b858038c616a1bebd4a86abe2688e46ca0cc2d17558f2"}, + {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b75a616e02f21b6f1d5785b20cecbab5e2bd3f6358a90e8925b813d557666ec1"}, + {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:788ee7d04cc0e0e7f944c52ff05f52a4415b312f5efd2ee66389fb7685ee030c"}, + {file = "safetensors-0.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87bc42bd04fd9ca31396d3ca0433db0be1411b6b53ac5a32b7845a85d01ffc2e"}, + {file = "safetensors-0.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4037676c86365a721a8c9510323a51861d703b399b78a6b4486a54a65a975fca"}, + {file = "safetensors-0.4.5-cp39-none-win32.whl", hash = "sha256:1500418454529d0ed5c1564bda376c4ddff43f30fce9517d9bee7bcce5a8ef50"}, + {file = "safetensors-0.4.5-cp39-none-win_amd64.whl", hash = "sha256:9d1a94b9d793ed8fe35ab6d5cea28d540a46559bafc6aae98f30ee0867000cab"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fdadf66b5a22ceb645d5435a0be7a0292ce59648ca1d46b352f13cff3ea80410"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d42ffd4c2259f31832cb17ff866c111684c87bd930892a1ba53fed28370c918c"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd8a1f6d2063a92cd04145c7fd9e31a1c7d85fbec20113a14b487563fdbc0597"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:951d2fcf1817f4fb0ef0b48f6696688a4e852a95922a042b3f96aaa67eedc920"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ac85d9a8c1af0e3132371d9f2d134695a06a96993c2e2f0bbe25debb9e3f67a"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e3cec4a29eb7fe8da0b1c7988bc3828183080439dd559f720414450de076fcab"}, + {file = "safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:21742b391b859e67b26c0b2ac37f52c9c0944a879a25ad2f9f9f3cd61e7fda8f"}, + {file = "safetensors-0.4.5-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7db3006a4915151ce1913652e907cdede299b974641a83fbc092102ac41b644"}, + {file = "safetensors-0.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f68bf99ea970960a237f416ea394e266e0361895753df06e3e06e6ea7907d98b"}, + {file = "safetensors-0.4.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8158938cf3324172df024da511839d373c40fbfaa83e9abf467174b2910d7b4c"}, + {file = "safetensors-0.4.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:540ce6c4bf6b58cb0fd93fa5f143bc0ee341c93bb4f9287ccd92cf898cc1b0dd"}, + {file = "safetensors-0.4.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bfeaa1a699c6b9ed514bd15e6a91e74738b71125a9292159e3d6b7f0a53d2cde"}, + {file = "safetensors-0.4.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:01c8f00da537af711979e1b42a69a8ec9e1d7112f208e0e9b8a35d2c381085ef"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a0dd565f83b30f2ca79b5d35748d0d99dd4b3454f80e03dfb41f0038e3bdf180"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:023b6e5facda76989f4cba95a861b7e656b87e225f61811065d5c501f78cdb3f"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9633b663393d5796f0b60249549371e392b75a0b955c07e9c6f8708a87fc841f"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78dd8adfb48716233c45f676d6e48534d34b4bceb50162c13d1f0bdf6f78590a"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e8deb16c4321d61ae72533b8451ec4a9af8656d1c61ff81aa49f966406e4b68"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:52452fa5999dc50c4decaf0c53aa28371f7f1e0fe5c2dd9129059fbe1e1599c7"}, + {file = "safetensors-0.4.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d5f23198821e227cfc52d50fa989813513db381255c6d100927b012f0cfec63d"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f4beb84b6073b1247a773141a6331117e35d07134b3bb0383003f39971d414bb"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:68814d599d25ed2fdd045ed54d370d1d03cf35e02dce56de44c651f828fb9b7b"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b6453c54c57c1781292c46593f8a37254b8b99004c68d6c3ce229688931a22"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adaa9c6dead67e2dd90d634f89131e43162012479d86e25618e821a03d1eb1dc"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73e7d408e9012cd17511b382b43547850969c7979efc2bc353f317abaf23c84c"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:775409ce0fcc58b10773fdb4221ed1eb007de10fe7adbdf8f5e8a56096b6f0bc"}, + {file = "safetensors-0.4.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:834001bed193e4440c4a3950a31059523ee5090605c907c66808664c932b549c"}, + {file = "safetensors-0.4.5.tar.gz", hash = "sha256:d73de19682deabb02524b3d5d1f8b3aaba94c72f1bbfc7911b9b9d5d391c0310"}, +] + +[package.extras] +all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"] +dev = ["safetensors[all]"] +jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"] +mlx = ["mlx (>=0.0.9)"] +numpy = ["numpy (>=1.21.6)"] +paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"] +pinned-tf = ["safetensors[numpy]", "tensorflow (==2.11.0)"] +quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] +tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] +testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"] +torch = ["safetensors[numpy]", "torch (>=1.10)"] + +[[package]] +name = "scikit-learn" +version = "1.5.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scikit_learn-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:299406827fb9a4f862626d0fe6c122f5f87f8910b86fe5daa4c32dcd742139b6"}, + {file = "scikit_learn-1.5.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:2d4cad1119c77930b235579ad0dc25e65c917e756fe80cab96aa3b9428bd3fb0"}, + {file = "scikit_learn-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c412ccc2ad9bf3755915e3908e677b367ebc8d010acbb3f182814524f2e5540"}, + {file = "scikit_learn-1.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a686885a4b3818d9e62904d91b57fa757fc2bed3e465c8b177be652f4dd37c8"}, + {file = "scikit_learn-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:c15b1ca23d7c5f33cc2cb0a0d6aaacf893792271cddff0edbd6a40e8319bc113"}, + {file = "scikit_learn-1.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03b6158efa3faaf1feea3faa884c840ebd61b6484167c711548fce208ea09445"}, + {file = "scikit_learn-1.5.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1ff45e26928d3b4eb767a8f14a9a6efbf1cbff7c05d1fb0f95f211a89fd4f5de"}, + {file = "scikit_learn-1.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f763897fe92d0e903aa4847b0aec0e68cadfff77e8a0687cabd946c89d17e675"}, + {file = "scikit_learn-1.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8b0ccd4a902836493e026c03256e8b206656f91fbcc4fde28c57a5b752561f1"}, + {file = "scikit_learn-1.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:6c16d84a0d45e4894832b3c4d0bf73050939e21b99b01b6fd59cbb0cf39163b6"}, + {file = "scikit_learn-1.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f932a02c3f4956dfb981391ab24bda1dbd90fe3d628e4b42caef3e041c67707a"}, + {file = "scikit_learn-1.5.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:3b923d119d65b7bd555c73be5423bf06c0105678ce7e1f558cb4b40b0a5502b1"}, + {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"}, + {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"}, + {file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"}, + {file = "scikit_learn-1.5.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5"}, + {file = "scikit_learn-1.5.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908"}, + {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3"}, + {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12"}, + {file = "scikit_learn-1.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f"}, + {file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"}, + {file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"}, + {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"}, + {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca64b3089a6d9b9363cd3546f8978229dcbb737aceb2c12144ee3f70f95684b7"}, + {file = "scikit_learn-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:3bed4909ba187aca80580fe2ef370d9180dcf18e621a27c4cf2ef10d279a7efe"}, + {file = "scikit_learn-1.5.2.tar.gz", hash = "sha256:b4237ed7b3fdd0a4882792e68ef2545d5baa50aca3bb45aa7df468138ad8f94d"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=3.1.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] +build = ["cython (>=3.0.10)", "meson-python (>=0.16.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-design (>=0.6.0)", "sphinx-gallery (>=0.16.0)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] +maintenance = ["conda-lock (==2.5.6)"] +tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.13.1" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}, + {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}, + {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989"}, + {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f"}, + {file = "scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94"}, + {file = "scipy-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54"}, + {file = "scipy-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9"}, + {file = "scipy-1.13.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326"}, + {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299"}, + {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa"}, + {file = "scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59"}, + {file = "scipy-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b"}, + {file = "scipy-1.13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d72782f39716b2b3509cd7c33cdc08c96f2f4d2b06d51e52fb45a19ca0c86a1"}, + {file = "scipy-1.13.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:017367484ce5498445aade74b1d5ab377acdc65e27095155e448c88497755a5d"}, + {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627"}, + {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884"}, + {file = "scipy-1.13.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16"}, + {file = "scipy-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:cdd7dacfb95fea358916410ec61bbc20440f7860333aee6d882bb8046264e949"}, + {file = "scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5"}, + {file = "scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24"}, + {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004"}, + {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d"}, + {file = "scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c"}, + {file = "scipy-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2"}, + {file = "scipy-1.13.1.tar.gz", hash = "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c"}, +] + +[package.dependencies] +numpy = ">=1.22.4,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "sentence-transformers" +version = "3.3.1" +description = "State-of-the-Art Text Embeddings" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sentence_transformers-3.3.1-py3-none-any.whl", hash = "sha256:abffcc79dab37b7d18d21a26d5914223dd42239cfe18cb5e111c66c54b658ae7"}, + {file = "sentence_transformers-3.3.1.tar.gz", hash = "sha256:9635dbfb11c6b01d036b9cfcee29f7716ab64cf2407ad9f403a2e607da2ac48b"}, +] + +[package.dependencies] +huggingface-hub = ">=0.20.0" +Pillow = "*" +scikit-learn = "*" +scipy = "*" +torch = ">=1.11.0" +tqdm = "*" +transformers = ">=4.41.0,<5.0.0" + +[package.extras] +dev = ["accelerate (>=0.20.3)", "datasets", "peft", "pre-commit", "pytest", "pytest-cov"] +onnx = ["optimum[onnxruntime] (>=1.23.1)"] +onnx-gpu = ["optimum[onnxruntime-gpu] (>=1.23.1)"] +openvino = ["optimum-intel[openvino] (>=1.20.0)"] +train = ["accelerate (>=0.20.3)", "datasets"] + [[package]] name = "setuptools" version = "70.0.0" @@ -4707,6 +5187,20 @@ files = [ [package.dependencies] mpmath = ">=0.19" +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + [[package]] name = "tenacity" version = "8.2.3" @@ -4721,6 +5215,17 @@ files = [ [package.extras] doc = ["reno", "sphinx", "tornado (>=4.5)"] +[[package]] +name = "threadpoolctl" +version = "3.5.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, + {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, +] + [[package]] name = "tiktoken" version = "0.7.0" @@ -4901,6 +5406,60 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "torch" +version = "2.4.1" +description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "torch-2.4.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:362f82e23a4cd46341daabb76fba08f04cd646df9bfaf5da50af97cb60ca4971"}, + {file = "torch-2.4.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:e8ac1985c3ff0f60d85b991954cfc2cc25f79c84545aead422763148ed2759e3"}, + {file = "torch-2.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:91e326e2ccfb1496e3bee58f70ef605aeb27bd26be07ba64f37dcaac3d070ada"}, + {file = "torch-2.4.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:d36a8ef100f5bff3e9c3cea934b9e0d7ea277cb8210c7152d34a9a6c5830eadd"}, + {file = "torch-2.4.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:0b5f88afdfa05a335d80351e3cea57d38e578c8689f751d35e0ff36bce872113"}, + {file = "torch-2.4.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ef503165f2341942bfdf2bd520152f19540d0c0e34961232f134dc59ad435be8"}, + {file = "torch-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:092e7c2280c860eff762ac08c4bdcd53d701677851670695e0c22d6d345b269c"}, + {file = "torch-2.4.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:ddddbd8b066e743934a4200b3d54267a46db02106876d21cf31f7da7a96f98ea"}, + {file = "torch-2.4.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:fdc4fe11db3eb93c1115d3e973a27ac7c1a8318af8934ffa36b0370efe28e042"}, + {file = "torch-2.4.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:18835374f599207a9e82c262153c20ddf42ea49bc76b6eadad8e5f49729f6e4d"}, + {file = "torch-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:ebea70ff30544fc021d441ce6b219a88b67524f01170b1c538d7d3ebb5e7f56c"}, + {file = "torch-2.4.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:72b484d5b6cec1a735bf3fa5a1c4883d01748698c5e9cfdbeb4ffab7c7987e0d"}, + {file = "torch-2.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c99e1db4bf0c5347107845d715b4aa1097e601bdc36343d758963055e9599d93"}, + {file = "torch-2.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b57f07e92858db78c5b72857b4f0b33a65b00dc5d68e7948a8494b0314efb880"}, + {file = "torch-2.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:f18197f3f7c15cde2115892b64f17c80dbf01ed72b008020e7da339902742cf6"}, + {file = "torch-2.4.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:5fc1d4d7ed265ef853579caf272686d1ed87cebdcd04f2a498f800ffc53dab71"}, + {file = "torch-2.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:40f6d3fe3bae74efcf08cb7f8295eaddd8a838ce89e9d26929d4edd6d5e4329d"}, + {file = "torch-2.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c9299c16c9743001ecef515536ac45900247f4338ecdf70746f2461f9e4831db"}, + {file = "torch-2.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:6bce130f2cd2d52ba4e2c6ada461808de7e5eccbac692525337cfb4c19421846"}, + {file = "torch-2.4.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:a38de2803ee6050309aac032676536c3d3b6a9804248537e38e098d0e14817ec"}, +] + +[package.dependencies] +filelock = "*" +fsspec = "*" +jinja2 = "*" +networkx = "*" +nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cudnn-cu12 = {version = "9.1.0.70", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +setuptools = "*" +sympy = "*" +triton = {version = "3.0.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.13\""} +typing-extensions = ">=4.8.0" + +[package.extras] +opt-einsum = ["opt-einsum (>=3.3)"] +optree = ["optree (>=0.11.0)"] + [[package]] name = "tqdm" version = "4.66.3" @@ -4921,6 +5480,96 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "transformers" +version = "4.44.2" +description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "transformers-4.44.2-py3-none-any.whl", hash = "sha256:1c02c65e7bfa5e52a634aff3da52138b583fc6f263c1f28d547dc144ba3d412d"}, + {file = "transformers-4.44.2.tar.gz", hash = "sha256:36aa17cc92ee154058e426d951684a2dab48751b35b49437896f898931270826"}, +] + +[package.dependencies] +filelock = "*" +huggingface-hub = ">=0.23.2,<1.0" +numpy = ">=1.17" +packaging = ">=20.0" +pyyaml = ">=5.1" +regex = "!=2019.12.17" +requests = "*" +safetensors = ">=0.4.1" +tokenizers = ">=0.19,<0.20" +tqdm = ">=4.27" + +[package.extras] +accelerate = ["accelerate (>=0.21.0)"] +agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] +all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision"] +audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +benchmark = ["optimum-benchmark (>=0.2.0)"] +codecarbon = ["codecarbon (==1.2.0)"] +deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.19,<0.20)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)", "scipy (<1.13.0)"] +flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +ftfy = ["ftfy"] +integrations = ["optuna", "ray[tune] (>=2.7.0)", "sigopt"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +modelcreation = ["cookiecutter (==1.7.3)"] +natten = ["natten (>=0.14.6,<0.15.0)"] +onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] +onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] +optuna = ["optuna"] +quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "isort (>=5.5.4)", "ruff (==0.5.1)", "urllib3 (<2.0.0)"] +ray = ["ray[tune] (>=2.7.0)"] +retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] +ruff = ["ruff (==0.5.1)"] +sagemaker = ["sagemaker (>=2.31.0)"] +sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] +serving = ["fastapi", "pydantic", "starlette", "uvicorn"] +sigopt = ["sigopt"] +sklearn = ["scikit-learn"] +speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "parameterized", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +tf = ["keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-cpu = ["keras (>2.9,<2.16)", "keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow-cpu (>2.9,<2.16)", "tensorflow-probability (<0.24)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +timm = ["timm (<=0.9.16)"] +tokenizers = ["tokenizers (>=0.19,<0.20)"] +torch = ["accelerate (>=0.21.0)", "torch"] +torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] +torchhub = ["filelock", "huggingface-hub (>=0.23.2,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.19,<0.20)", "torch", "tqdm (>=4.27)"] +video = ["av (==9.2.0)", "decord (==0.6.0)"] +vision = ["Pillow (>=10.0.1,<=15.0)"] + +[[package]] +name = "triton" +version = "3.0.0" +description = "A language and compiler for custom Deep Learning operations" +optional = false +python-versions = "*" +files = [ + {file = "triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a"}, + {file = "triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ce8520437c602fb633f1324cc3871c47bee3b67acf9756c1a66309b60e3216c"}, + {file = "triton-3.0.0-1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:34e509deb77f1c067d8640725ef00c5cbfcb2052a1a3cb6a6d343841f92624eb"}, + {file = "triton-3.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bcbf3b1c48af6a28011a5c40a5b3b9b5330530c3827716b5fbf6d7adcc1e53e9"}, + {file = "triton-3.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6e5727202f7078c56f91ff13ad0c1abab14a0e7f2c87e91b12b6f64f3e8ae609"}, +] + +[package.dependencies] +filelock = "*" + +[package.extras] +build = ["cmake (>=3.20)", "lit"] +tests = ["autopep8", "flake8", "isort", "llnl-hatchet", "numpy", "pytest", "scipy (>=1.7.1)"] +tutorials = ["matplotlib", "pandas", "tabulate"] + [[package]] name = "typer" version = "0.12.3" @@ -4993,7 +5642,7 @@ typing-extensions = ">=3.7.4" name = "tzdata" version = "2024.1" description = "Provider of IANA time zone data" -optional = true +optional = false python-versions = ">=2" files = [ {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, @@ -5521,4 +6170,4 @@ openai = ["openai"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "040fa7015be6e2e55953668016b062a2e1354eb84bc1591b37b1cdaed5063b29" +content-hash = "c469f3758721fffd50853ba0c5bd8c03edb4fdd37e9804260353c7872d904266" diff --git a/pyproject.toml b/pyproject.toml index e88b7986f..a86b48537 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,6 +19,9 @@ packaging = ">=23.2,<25.0" idna = "^3.7" anyio = "^4.4.0" requests = "^2" +langchain-ollama = "^0.2.0" +langchain-cohere = "^0.3.3" +langchain-huggingface = "^0.1.2" [tool.poetry.group.dev.dependencies] pytest = ">=7.4,<9.0" diff --git a/tests/test_extract_model.py b/tests/test_extract_model.py index 190e703f3..c1f6961f5 100644 --- a/tests/test_extract_model.py +++ b/tests/test_extract_model.py @@ -4,7 +4,6 @@ from langchain_anthropic import ChatAnthropic from langchain_google_vertexai import ChatVertexAI from langchain_groq import ChatGroq -from langchain_aws import ChatBedrock import pytest from langfuse.callback import CallbackHandler @@ -13,26 +12,22 @@ from langchain_community.chat_models import ( - ChatOpenAI, - AzureChatOpenAI, ChatTongyi, ChatCohere, - BedrockChat, - ChatOllama, ) from langchain_community.chat_models.fake import FakeMessagesListChatModel -from langchain_community.llms.anthropic import Anthropic -from langchain_community.llms.bedrock import Bedrock -from langchain_community.llms.cohere import Cohere -from langchain_community.llms.huggingface_hub import HuggingFaceHub -from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline +from langchain_anthropic import Anthropic +from langchain_aws import BedrockLLM, ChatBedrock +from langchain_huggingface.llms import HuggingFacePipeline from langchain_community.llms.textgen import TextGen -from langchain_community.llms.openai import ( +from langchain_openai import ( AzureOpenAI, OpenAI, + ChatOpenAI, + AzureChatOpenAI, ) -from langchain_community.llms.ollama import Ollama +from langchain_ollama import OllamaLLM, ChatOllama from langchain_mistralai.chat_models import ChatMistralAI from langchain.schema.messages import HumanMessage @@ -49,7 +44,7 @@ temperature=0, model_name="mixtral-8x7b-32768", groq_api_key="something" ), ), - ("llama3", Ollama(model="llama3")), + ("llama3", OllamaLLM(model="llama3")), ("llama3", ChatOllama(model="llama3")), ( None, @@ -68,24 +63,15 @@ "claude-3-sonnet-20240229", ChatAnthropic(model="claude-3-sonnet-20240229"), ), - ("anthropic", Anthropic()), - ("anthropic", Anthropic()), + ("claude-2", Anthropic()), + ("claude-2", Anthropic()), ("command", ChatCohere(model="command", cohere_api_key="command")), - ("command", Cohere(model="command", cohere_api_key="command")), (None, ChatTongyi(dashscope_api_key="dash")), ( "amazon.titan-tg1-large", - BedrockChat( - model_id="amazon.titan-tg1-large", - region_name="us-east-1", - client=MagicMock(), - ), - ), - ( - "amazon.titan-tg1-large", - Bedrock( - model_id="amazon.titan-tg1-large", - region_name="us-east-1", + BedrockLLM( + model="amazon.titan-tg1-large", + region="us-east-1", client=MagicMock(), ), ), @@ -99,33 +85,12 @@ ), ( "claude-1", - BedrockChat( - model_id="claude-1", - region_name="us-east-1", + BedrockLLM( + model="claude-1", + region="us-east-1", client=MagicMock(), ), ), - ( - "claude-1", - Bedrock( - model_id="claude-1", - region_name="us-east-1", - client=MagicMock(), - ), - ), - ( - "HuggingFaceH4/zephyr-7b-beta", - HuggingFaceHub( - repo_id="HuggingFaceH4/zephyr-7b-beta", - task="text-generation", - model_kwargs={ - "max_new_tokens": 512, - "top_k": 30, - "temperature": 0.1, - "repetition_penalty": 1.03, - }, - ), - ), ], ) def test_models(expected_model: str, model: Any): @@ -138,12 +103,13 @@ def test_models(expected_model: str, model: Any): @pytest.mark.parametrize( "expected_model,model", [ - ("gpt-3.5-turbo", ChatOpenAI()), + ("gpt-3.5-turbo-0125", ChatOpenAI()), ("gpt-3.5-turbo-instruct", OpenAI()), ( "gpt-3.5-turbo", AzureChatOpenAI( openai_api_version="2023-05-15", + model="gpt-3.5-turbo", azure_deployment="your-deployment-name", azure_endpoint="https://your-endpoint-name.azurewebsites.net", ), @@ -152,6 +118,7 @@ def test_models(expected_model: str, model: Any): "gpt-3.5-turbo-instruct", AzureOpenAI( openai_api_version="2023-05-15", + model="gpt-3.5-turbo-instruct", azure_deployment="your-deployment-name", azure_endpoint="https://your-endpoint-name.azurewebsites.net", ), From e3bda0665937fa3ac318b9f2230f301f1a300b7d Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Wed, 27 Nov 2024 18:20:25 +0100 Subject: [PATCH 08/15] remove azureopenai --- tests/test_extract_model.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/tests/test_extract_model.py b/tests/test_extract_model.py index c1f6961f5..9ef8d02fd 100644 --- a/tests/test_extract_model.py +++ b/tests/test_extract_model.py @@ -22,7 +22,6 @@ from langchain_huggingface.llms import HuggingFacePipeline from langchain_community.llms.textgen import TextGen from langchain_openai import ( - AzureOpenAI, OpenAI, ChatOpenAI, AzureChatOpenAI, @@ -114,15 +113,6 @@ def test_models(expected_model: str, model: Any): azure_endpoint="https://your-endpoint-name.azurewebsites.net", ), ), - ( - "gpt-3.5-turbo-instruct", - AzureOpenAI( - openai_api_version="2023-05-15", - model="gpt-3.5-turbo-instruct", - azure_deployment="your-deployment-name", - azure_endpoint="https://your-endpoint-name.azurewebsites.net", - ), - ), ( "gpt2", HuggingFacePipeline( From 183d7e0592ad0b932272c4fe1bb884d392cbef41 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Wed, 27 Nov 2024 18:33:16 +0100 Subject: [PATCH 09/15] fix deps --- pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a86b48537..e7a2d073a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,9 +19,6 @@ packaging = ">=23.2,<25.0" idna = "^3.7" anyio = "^4.4.0" requests = "^2" -langchain-ollama = "^0.2.0" -langchain-cohere = "^0.3.3" -langchain-huggingface = "^0.1.2" [tool.poetry.group.dev.dependencies] pytest = ">=7.4,<9.0" @@ -52,6 +49,9 @@ bson = "^0.5.10" langchain-anthropic = ">=0.1.4,<0.4" langchain-groq = ">=0.1.3,<0.3" langchain-aws = ">=0.1.3,<0.3" +langchain-ollama = "^0.2.0" +langchain-cohere = "^0.3.3" +langchain-huggingface = "^0.1.2" langchain-community = ">=0.2.14,<0.4" [tool.poetry.group.docs.dependencies] From b692766008645180ade8e63f843b2443f83dbf52 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Wed, 27 Nov 2024 18:59:47 +0100 Subject: [PATCH 10/15] fix lockfile --- poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index bbff7cfd1..423fff287 100644 --- a/poetry.lock +++ b/poetry.lock @@ -6170,4 +6170,4 @@ openai = ["openai"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "c469f3758721fffd50853ba0c5bd8c03edb4fdd37e9804260353c7872d904266" +content-hash = "8b8856c03769a90bb7168f1d7c3bcabe85ce6d4209b5fe375345f8eaa973a422" From 4fc8e029796bdcab3d51215c3da7e18378c45165 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Thu, 28 Nov 2024 10:55:24 +0100 Subject: [PATCH 11/15] fix langchain tests --- .github/workflows/ci.yml | 2 +- langfuse/callback/langchain.py | 3 ++ tests/test_langchain_integration.py | 78 ++++++++++++++++------------- 3 files changed, 46 insertions(+), 37 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9ac604fb9..15242f3d8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -41,7 +41,7 @@ jobs: - "3.9" - "3.10" - "3.11" - - "3.12" + name: Test on Python version ${{ matrix.python-version }} steps: - uses: actions/checkout@v3 diff --git a/langfuse/callback/langchain.py b/langfuse/callback/langchain.py index a14f24c6d..7a4f90942 100644 --- a/langfuse/callback/langchain.py +++ b/langfuse/callback/langchain.py @@ -768,6 +768,9 @@ def _parse_model_parameters(kwargs): for key, value in { "temperature": kwargs["invocation_params"].get("temperature"), "max_tokens": kwargs["invocation_params"].get("max_tokens"), + "max_completion_tokens": kwargs["invocation_params"].get( + "max_completion_tokens" + ), "top_p": kwargs["invocation_params"].get("top_p"), "frequency_penalty": kwargs["invocation_params"].get( "frequency_penalty" diff --git a/tests/test_langchain_integration.py b/tests/test_langchain_integration.py index 2abc515e7..f3d7b6980 100644 --- a/tests/test_langchain_integration.py +++ b/tests/test_langchain_integration.py @@ -20,7 +20,9 @@ def _is_streaming_response(response): def test_stream_chat_models(model_name): name = f"test_stream_chat_models-{create_uuid()}" tags = ["Hello", "world"] - model = ChatOpenAI(streaming=True, max_tokens=300, tags=tags, model=model_name) + model = ChatOpenAI( + streaming=True, max_completion_tokens=300, tags=tags, model=model_name + ) callback = CallbackHandler(trace_name=name) res = model.stream( [{"role": "user", "content": "return the exact phrase - This is a test!"}], @@ -43,10 +45,10 @@ def test_stream_chat_models(model_name): assert len(response_str) > 1 # To check there are more than one chunk. assert len(trace.observations) == 1 assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None - assert generation.model_parameters.get("max_tokens") is not None + assert generation.model_parameters.get("max_completion_tokens") is not None assert generation.model_parameters.get("temperature") is not None assert generation.metadata["tags"] == tags assert generation.usage.output is not None @@ -89,7 +91,7 @@ def test_stream_completions_models(model_name): assert len(response_str) > 1 # To check there are more than one chunk. assert len(trace.observations) == 1 assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None assert generation.model_parameters.get("max_tokens") is not None @@ -111,7 +113,7 @@ def test_stream_completions_models(model_name): def test_invoke_chat_models(model_name): name = f"test_invoke_chat_models-{create_uuid()}" tags = ["Hello", "world"] - model = ChatOpenAI(max_tokens=300, tags=tags, model=model_name) + model = ChatOpenAI(max_completion_tokens=300, tags=tags, model=model_name) callback = CallbackHandler(trace_name=name) _ = model.invoke( [{"role": "user", "content": "return the exact phrase - This is a test!"}], @@ -129,10 +131,10 @@ def test_invoke_chat_models(model_name): assert len(trace.observations) == 1 assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None - assert generation.model_parameters.get("max_tokens") is not None + assert generation.model_parameters.get("max_completion_tokens") is not None assert generation.model_parameters.get("temperature") is not None assert generation.metadata["tags"] == tags assert generation.usage.output is not None @@ -171,7 +173,7 @@ def test_invoke_in_completions_models(model_name): assert len(trace.observations) == 1 assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None assert generation.model_parameters.get("max_tokens") is not None @@ -212,7 +214,7 @@ def test_batch_in_completions_models(model_name): assert len(trace.observations) == 1 assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None assert generation.model_parameters.get("max_tokens") is not None @@ -232,7 +234,7 @@ def test_batch_in_completions_models(model_name): def test_batch_in_chat_models(model_name): name = f"test_batch_in_chat_models-{create_uuid()}" tags = ["Hello", "world"] - model = ChatOpenAI(max_tokens=300, tags=tags, model=model_name) + model = ChatOpenAI(max_completion_tokens=300, tags=tags, model=model_name) callback = CallbackHandler(trace_name=name) input1 = "Who is the first president of America ?" input2 = "Who is the first president of Ireland ?" @@ -251,10 +253,10 @@ def test_batch_in_chat_models(model_name): assert len(trace.observations) == 1 assert trace.name == name for generation in generationList: - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None - assert generation.model_parameters.get("max_tokens") is not None + assert generation.model_parameters.get("max_completion_tokens") is not None assert generation.model_parameters.get("temperature") is not None assert generation.metadata["tags"] == tags assert generation.usage.output is not None @@ -273,7 +275,9 @@ def test_batch_in_chat_models(model_name): async def test_astream_chat_models(model_name): name = f"test_astream_chat_models-{create_uuid()}" tags = ["Hello", "world"] - model = ChatOpenAI(streaming=True, max_tokens=300, tags=tags, model=model_name) + model = ChatOpenAI( + streaming=True, max_completion_tokens=300, tags=tags, model=model_name + ) callback = CallbackHandler(trace_name=name) res = model.astream( [{"role": "user", "content": "Who was the first American president "}], @@ -295,10 +299,10 @@ async def test_astream_chat_models(model_name): assert len(response_str) > 1 # To check there are more than one chunk. assert len(trace.observations) == 1 - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None - assert generation.model_parameters.get("max_tokens") is not None + assert generation.model_parameters.get("max_completion_tokens") is not None assert generation.model_parameters.get("temperature") is not None assert generation.metadata["tags"] == tags assert generation.usage.output is not None @@ -343,7 +347,7 @@ async def test_astream_completions_models(model_name): assert len(response_str) > 1 # To check there are more than one chunk. assert len(trace.observations) == 1 assert test_phrase in "".join(response_str) - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None assert generation.model_parameters.get("max_tokens") is not None @@ -366,7 +370,7 @@ async def test_astream_completions_models(model_name): async def test_ainvoke_chat_models(model_name): name = f"test_ainvoke_chat_models-{create_uuid()}" tags = ["Hello", "world"] - model = ChatOpenAI(max_tokens=300, tags=tags, model=model_name) + model = ChatOpenAI(max_completion_tokens=300, tags=tags, model=model_name) callback = CallbackHandler(trace_name=name) test_phrase = "This is a test!" _ = await model.ainvoke( @@ -385,10 +389,10 @@ async def test_ainvoke_chat_models(model_name): assert len(trace.observations) == 1 assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None - assert generation.model_parameters.get("max_tokens") is not None + assert generation.model_parameters.get("max_completion_tokens") is not None assert generation.model_parameters.get("temperature") is not None assert generation.metadata["tags"] == tags assert generation.usage.output is not None @@ -427,7 +431,7 @@ async def test_ainvoke_in_completions_models(model_name): assert len(trace.observations) == 1 assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None assert generation.model_parameters.get("max_tokens") is not None @@ -452,7 +456,7 @@ async def test_ainvoke_in_completions_models(model_name): def test_chains_batch_in_chat_models(model_name): name = f"test_chains_batch_in_chat_models-{create_uuid()}" tags = ["Hello", "world"] - model = ChatOpenAI(max_tokens=300, tags=tags, model=model_name) + model = ChatOpenAI(max_completion_tokens=300, tags=tags, model=model_name) callback = CallbackHandler(trace_name=name) prompt = ChatPromptTemplate.from_template("tell me a joke about {foo} in 300 words") @@ -473,10 +477,10 @@ def test_chains_batch_in_chat_models(model_name): assert len(trace.observations) == 4 for generation in generationList: assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None - assert generation.model_parameters.get("max_tokens") is not None + assert generation.model_parameters.get("max_completion_tokens") is not None assert generation.model_parameters.get("temperature") is not None assert all(x in generation.metadata["tags"] for x in tags) assert generation.usage.output is not None @@ -514,7 +518,7 @@ def test_chains_batch_in_completions_models(model_name): assert len(trace.observations) == 4 for generation in generationList: assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None assert generation.model_parameters.get("max_tokens") is not None @@ -536,7 +540,7 @@ def test_chains_batch_in_completions_models(model_name): async def test_chains_abatch_in_chat_models(model_name): name = f"test_chains_abatch_in_chat_models-{create_uuid()}" tags = ["Hello", "world"] - model = ChatOpenAI(max_tokens=300, tags=tags, model=model_name) + model = ChatOpenAI(max_completion_tokens=300, tags=tags, model=model_name) callback = CallbackHandler(trace_name=name) prompt = ChatPromptTemplate.from_template("tell me a joke about {foo} in 300 words") @@ -557,10 +561,10 @@ async def test_chains_abatch_in_chat_models(model_name): assert len(trace.observations) == 4 for generation in generationList: assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None - assert generation.model_parameters.get("max_tokens") is not None + assert generation.model_parameters.get("max_completion_tokens") is not None assert generation.model_parameters.get("temperature") is not None assert all(x in generation.metadata["tags"] for x in tags) assert generation.usage.output is not None @@ -596,7 +600,7 @@ async def test_chains_abatch_in_completions_models(model_name): assert len(trace.observations) == 4 for generation in generationList: assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None assert generation.model_parameters.get("max_tokens") is not None @@ -618,7 +622,7 @@ async def test_chains_abatch_in_completions_models(model_name): async def test_chains_ainvoke_chat_models(model_name): name = f"test_chains_ainvoke_chat_models-{create_uuid()}" tags = ["Hello", "world"] - model = ChatOpenAI(max_tokens=300, tags=tags, model=model_name) + model = ChatOpenAI(max_completion_tokens=300, tags=tags, model=model_name) callback = CallbackHandler(trace_name=name) prompt1 = ChatPromptTemplate.from_template( """You are a skilled writer tasked with crafting an engaging introduction for a blog post on the following topic: @@ -643,10 +647,10 @@ async def test_chains_ainvoke_chat_models(model_name): assert trace.input == {"topic": "The Impact of Climate Change"} assert trace.output == res for generation in generationList: - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None - assert generation.model_parameters.get("max_tokens") is not None + assert generation.model_parameters.get("max_completion_tokens") is not None assert generation.model_parameters.get("temperature") is not None assert all(x in generation.metadata["tags"] for x in tags) assert generation.usage.output is not None @@ -692,7 +696,7 @@ async def test_chains_ainvoke_completions_models(model_name): assert trace.output == res assert len(trace.observations) == 4 assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None assert generation.model_parameters.get("max_tokens") is not None @@ -714,7 +718,9 @@ async def test_chains_ainvoke_completions_models(model_name): async def test_chains_astream_chat_models(model_name): name = f"test_chains_astream_chat_models-{create_uuid()}" tags = ["Hello", "world"] - model = ChatOpenAI(streaming=True, max_tokens=300, tags=tags, model=model_name) + model = ChatOpenAI( + streaming=True, max_completion_tokens=300, tags=tags, model=model_name + ) callback = CallbackHandler(trace_name=name) prompt1 = PromptTemplate.from_template( """You are a skilled writer tasked with crafting an engaging introduction for a blog post on the following topic: @@ -745,10 +751,10 @@ async def test_chains_astream_chat_models(model_name): assert len(response_str) > 1 # To check there are more than one chunk. assert len(trace.observations) == 4 assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None - assert generation.model_parameters.get("max_tokens") is not None + assert generation.model_parameters.get("max_completion_tokens") is not None assert generation.model_parameters.get("temperature") is not None assert all(x in generation.metadata["tags"] for x in tags) assert generation.usage.output is not None @@ -800,7 +806,7 @@ async def test_chains_astream_completions_models(model_name): assert len(response_str) > 1 # To check there are more than one chunk. assert len(trace.observations) == 4 assert trace.name == name - assert generation.model == model_name + assert model_name in generation.model assert generation.input is not None assert generation.output is not None assert generation.model_parameters.get("max_tokens") is not None From cc9185c6902ee83ffc56004756712382131c8537 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Thu, 28 Nov 2024 11:05:20 +0100 Subject: [PATCH 12/15] fix tests --- tests/test_extract_model_langchain_openai.py | 21 ++++++++++---------- tests/test_json.py | 3 +-- tests/test_langchain.py | 3 ++- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/tests/test_extract_model_langchain_openai.py b/tests/test_extract_model_langchain_openai.py index 64c5dc0ac..5e2a644ec 100644 --- a/tests/test_extract_model_langchain_openai.py +++ b/tests/test_extract_model_langchain_openai.py @@ -1,4 +1,4 @@ -from langchain_openai import AzureChatOpenAI, AzureOpenAI, ChatOpenAI, OpenAI +from langchain_openai import AzureChatOpenAI, ChatOpenAI, OpenAI import pytest from langfuse.callback import CallbackHandler @@ -18,14 +18,15 @@ azure_endpoint="https://your-endpoint-name.azurewebsites.net", ), ), - ( - "gpt-3.5-turbo-instruct", - AzureOpenAI( - openai_api_version="2023-05-15", - azure_deployment="your-deployment-name", - azure_endpoint="https://your-endpoint-name.azurewebsites.net", - ), - ), + # # default model is now set a s azure-deployment since langchain > 0.3.0 + # ( + # "gpt-3.5-turbo-instruct", + # AzureOpenAI( + # openai_api_version="2023-05-15", + # azure_deployment="your-deployment-name", + # azure_endpoint="https://your-endpoint-name.azurewebsites.net", + # ), + # ), ], ) def test_entire_llm_call_using_langchain_openai(expected_model, model): @@ -46,4 +47,4 @@ def test_entire_llm_call_using_langchain_openai(expected_model, model): assert len(trace.observations) == 1 generation = list(filter(lambda o: o.type == "GENERATION", trace.observations))[0] - assert generation.model == expected_model + assert expected_model in generation.model diff --git a/tests/test_json.py b/tests/test_json.py index 91d442354..e9bd887d3 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -33,10 +33,9 @@ def test_json_encoder(): result = json.dumps(obj, cls=EventSerializer) assert ( - '{"foo": "bar", "bar": "2021-01-01T00:00:00Z", "date": "2024-01-01", "messages": [{"lc": 1, "type": "constructor", "id":' + '{"foo": "bar", "bar": "2021-01-01T00:00:00Z", "date": "2024-01-01", "messages": [{"content": "I love programming!", "additional_kwargs": {}, "response_metadata": {}, "type": "human", "name": null, "id": null, "example": false}]}' in result ) - assert "HumanMessage" in result def test_json_decoder_pydantic(): diff --git a/tests/test_langchain.py b/tests/test_langchain.py index 3ed864fe6..83cca374b 100644 --- a/tests/test_langchain.py +++ b/tests/test_langchain.py @@ -1259,7 +1259,8 @@ def record_dog(name: str, color: str, fav_food: OptionalFavFood) -> str: "function_call": { "arguments": '{\n "name": "Henry",\n "color": "brown",\n "fav_food": {\n "food": null\n }\n}', "name": "record_dog", - } + }, + "refusal": None, }, } assert generation.usage.total is not None From 05c76488a24454d42e53320562e9da0781b5beaa Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Thu, 28 Nov 2024 11:29:55 +0100 Subject: [PATCH 13/15] fix test test_entire_llm_call_using_langchain_openai --- tests/test_extract_model_langchain_openai.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_extract_model_langchain_openai.py b/tests/test_extract_model_langchain_openai.py index 5e2a644ec..cf9c8ba25 100644 --- a/tests/test_extract_model_langchain_openai.py +++ b/tests/test_extract_model_langchain_openai.py @@ -14,6 +14,7 @@ "gpt-3.5-turbo", AzureChatOpenAI( openai_api_version="2023-05-15", + model="gpt-3.5-turbo", azure_deployment="your-deployment-name", azure_endpoint="https://your-endpoint-name.azurewebsites.net", ), From dc3baff5a4dbc02e5831ece29c140ffc57c82710 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Thu, 28 Nov 2024 11:42:35 +0100 Subject: [PATCH 14/15] fix env var LANGFUSE_RETURN_FROM_CLICKHOUSE_ONLY --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 15242f3d8..54644d5ce 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -90,7 +90,7 @@ jobs: rm -rf .env echo "::group::Run server" - TELEMETRY_ENABLED=false CLICKHOUSE_CLUSTER_ENABLED=false LANGFUSE_ASYNC_INGESTION_PROCESSING=false LANGFUSE_ASYNC_CLICKHOUSE_INGESTION_PROCESSING=false LANGFUSE_READ_FROM_POSTGRES_ONLY=true LANGFUSE_READ_FROM_CLICKHOUSE_ONLY=false docker compose -f docker-compose.v3preview.yml up -d + TELEMETRY_ENABLED=false CLICKHOUSE_CLUSTER_ENABLED=false LANGFUSE_ASYNC_INGESTION_PROCESSING=false LANGFUSE_ASYNC_CLICKHOUSE_INGESTION_PROCESSING=false LANGFUSE_READ_FROM_POSTGRES_ONLY=true LANGFUSE_RETURN_FROM_CLICKHOUSE_ONLY=false docker compose -f docker-compose.v3preview.yml up -d echo "::endgroup::" # Add this step to check the health of the container From 18bf194fe0fc3b1e875a18959a6da4ce7b441775 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Thu, 28 Nov 2024 12:40:54 +0100 Subject: [PATCH 15/15] fix ci --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 54644d5ce..65e0f8bd6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -90,7 +90,7 @@ jobs: rm -rf .env echo "::group::Run server" - TELEMETRY_ENABLED=false CLICKHOUSE_CLUSTER_ENABLED=false LANGFUSE_ASYNC_INGESTION_PROCESSING=false LANGFUSE_ASYNC_CLICKHOUSE_INGESTION_PROCESSING=false LANGFUSE_READ_FROM_POSTGRES_ONLY=true LANGFUSE_RETURN_FROM_CLICKHOUSE_ONLY=false docker compose -f docker-compose.v3preview.yml up -d + TELEMETRY_ENABLED=false CLICKHOUSE_CLUSTER_ENABLED=false LANGFUSE_ASYNC_INGESTION_PROCESSING=false LANGFUSE_ASYNC_CLICKHOUSE_INGESTION_PROCESSING=false LANGFUSE_READ_FROM_POSTGRES_ONLY=true LANGFUSE_RETURN_FROM_CLICKHOUSE=false docker compose -f docker-compose.v3preview.yml up -d echo "::endgroup::" # Add this step to check the health of the container