diff --git a/langfuse/_task_manager/ingestion_consumer.py b/langfuse/_task_manager/ingestion_consumer.py index 16d426de0..987353409 100644 --- a/langfuse/_task_manager/ingestion_consumer.py +++ b/langfuse/_task_manager/ingestion_consumer.py @@ -211,10 +211,12 @@ def _truncate_item_in_place( # if item does not have body or input/output fields, drop the event if "body" not in event or ( - "input" not in event["body"] and "output" not in event["body"] + "input" not in event["body"] + and "output" not in event["body"] + and "metadata" not in event["body"] ): self._log.warning( - "Item does not have body or input/output fields, dropping item." + "Item does not have body or input/output/metadata fields, dropping item." ) self._ingestion_queue.task_done() return 0 diff --git a/langfuse/callback/langchain.py b/langfuse/callback/langchain.py index 5b41fa1a0..46445cc6b 100644 --- a/langfuse/callback/langchain.py +++ b/langfuse/callback/langchain.py @@ -56,10 +56,12 @@ try: from langgraph.errors import GraphBubbleUp + CONTROL_FLOW_EXCEPTION_TYPES.add(GraphBubbleUp) except ImportError: pass + class LangchainCallbackHandler( LangchainBaseCallbackHandler, LangfuseBaseCallbackHandler ): @@ -1137,9 +1139,11 @@ def _parse_usage_model(usage: typing.Union[pydantic.BaseModel, dict]): and "modality" in item and "token_count" in item ): - usage_model[f"input_modality_{item['modality']}"] = item[ - "token_count" - ] + value = item["token_count"] + usage_model[f"input_modality_{item['modality']}"] = value + + if "input" in usage_model: + usage_model["input"] = max(0, usage_model["input"] - value) # Vertex AI if "candidates_tokens_details" in usage_model and isinstance( @@ -1153,9 +1157,11 @@ def _parse_usage_model(usage: typing.Union[pydantic.BaseModel, dict]): and "modality" in item and "token_count" in item ): - usage_model[f"output_modality_{item['modality']}"] = item[ - "token_count" - ] + value = item["token_count"] + usage_model[f"output_modality_{item['modality']}"] = value + + if "output" in usage_model: + usage_model["output"] = max(0, usage_model["output"] - value) # Vertex AI if "cache_tokens_details" in usage_model and isinstance( @@ -1169,9 +1175,21 @@ def _parse_usage_model(usage: typing.Union[pydantic.BaseModel, dict]): and "modality" in item and "token_count" in item ): - usage_model[f"cached_modality_{item['modality']}"] = item[ - "token_count" - ] + value = item["token_count"] + usage_model[f"cached_modality_{item['modality']}"] = value + + if "input" in usage_model: + usage_model["input"] = max(0, usage_model["input"] - value) + + usage_model = ( + { + k: v + for k, v in usage_model.items() + if v is not None and not isinstance(v, str) + } + if isinstance(usage_model, dict) + else usage_model + ) return usage_model if usage_model else None diff --git a/langfuse/extract_model.py b/langfuse/extract_model.py index 192522846..cd8d58ca8 100644 --- a/langfuse/extract_model.py +++ b/langfuse/extract_model.py @@ -53,12 +53,25 @@ def _extract_model_name( return kwargs.get("invocation_params").get("model_name") deployment_name = None - if serialized.get("kwargs").get("openai_api_version"): - deployment_name = serialized.get("kwargs").get("deployment_version") deployment_version = None + + if serialized.get("kwargs").get("openai_api_version"): + deployment_version = serialized.get("kwargs").get("deployment_version") + if serialized.get("kwargs").get("deployment_name"): deployment_name = serialized.get("kwargs").get("deployment_name") - return deployment_name + "-" + deployment_version + + if not isinstance(deployment_name, str): + return None + + if not isinstance(deployment_version, str): + return deployment_name + + return ( + f"{deployment_name}-{deployment_version}" + if deployment_version and deployment_version not in deployment_name + else deployment_name + ) # Third, for some models, we are unable to extract the model by a path in an object. Langfuse provides us with a string representation of the model pbjects # We use regex to extract the model from the repr string @@ -106,7 +119,10 @@ def _extract_model_name( def _extract_model_from_repr_by_pattern( - id: str, serialized: Optional[Dict[str, Any]], pattern: str, default: Optional[str] = None + id: str, + serialized: Optional[Dict[str, Any]], + pattern: str, + default: Optional[str] = None, ): if serialized is None: return None diff --git a/langfuse/openai.py b/langfuse/openai.py index f27fedcff..d0c7a815b 100644 --- a/langfuse/openai.py +++ b/langfuse/openai.py @@ -62,6 +62,7 @@ class OpenAiDefinition: type: str sync: bool min_version: Optional[str] = None + max_version: Optional[str] = None OPENAI_METHODS_V0 = [ @@ -118,6 +119,7 @@ class OpenAiDefinition: type="chat", sync=True, min_version="1.50.0", + max_version="1.92.0", ), OpenAiDefinition( module="openai.resources.beta.chat.completions", @@ -126,6 +128,23 @@ class OpenAiDefinition: type="chat", sync=False, min_version="1.50.0", + max_version="1.92.0", + ), + OpenAiDefinition( + module="openai.resources.chat.completions", + object="Completions", + method="parse", + type="chat", + sync=True, + min_version="1.92.0", + ), + OpenAiDefinition( + module="openai.resources.chat.completions", + object="AsyncCompletions", + method="parse", + type="chat", + sync=False, + min_version="1.92.0", ), OpenAiDefinition( module="openai.resources.responses", @@ -876,6 +895,11 @@ def register_tracing(self): ) < Version(resource.min_version): continue + if resource.max_version is not None and Version( + openai.__version__ + ) >= Version(resource.max_version): + continue + wrap_function_wrapper( resource.module, f"{resource.object}.{resource.method}", diff --git a/langfuse/version.py b/langfuse/version.py index 63a799b34..768cb44e5 100644 --- a/langfuse/version.py +++ b/langfuse/version.py @@ -1,3 +1,3 @@ """@private""" -__version__ = "2.60.5" +__version__ = "2.60.10" diff --git a/pyproject.toml b/pyproject.toml index fcdac460e..a1e2902f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "langfuse" -version = "2.60.5" +version = "2.60.10" description = "A client library for accessing langfuse" authors = ["langfuse "] license = "MIT"