diff --git a/langfuse/openai.py b/langfuse/openai.py index 628d17959..5e91edd8e 100644 --- a/langfuse/openai.py +++ b/langfuse/openai.py @@ -84,6 +84,7 @@ def _get_call_details(self, result, api_resource_class, **kwargs): completion = None model = kwargs.get("model", None) if isinstance(result, Exception) else result.model + user_id = kwargs.get("user_id", None) usage = None if isinstance(result, Exception) or result.usage is None else LlmUsage(**result.usage) endTime = datetime.now() @@ -112,7 +113,7 @@ def _get_call_details(self, result, api_resource_class, **kwargs): def _log_result(self, call_details): generation = InitialGeneration(**call_details) if call_details["trace_id"] is not None: - self.langfuse.trace(CreateTrace(id=call_details["trace_id"])) + self.langfuse.trace(CreateTrace(id=call_details["trace_id"], user_id=call_details["user_id"])) self.langfuse.generation(generation) def langfuse_modified(self, func, api_resource_class): diff --git a/tests/test_openai.py b/tests/test_openai.py index 1fe20be56..deec60534 100644 --- a/tests/test_openai.py +++ b/tests/test_openai.py @@ -18,6 +18,7 @@ def test_openai_chat_completion(): messages=[{"role": "user", "content": "1 + 1 = "}], temperature=0, metadata={"someKey": "someResponse"}, + user_id="someUser" ) openai.flush_langfuse() @@ -32,6 +33,7 @@ def test_openai_chat_completion(): assert generation.data[0].input == [{"content": "1 + 1 = ", "role": "user"}] assert generation.data[0].type == "GENERATION" assert generation.data[0].model == "gpt-3.5-turbo-0613" + assert generation.data[0].user_id == "someUser" assert generation.data[0].start_time is not None assert generation.data[0].end_time is not None assert generation.data[0].start_time < generation.data[0].end_time