Skip to content

Commit c0d8d75

Browse files
committed
feat: enhance LangfuseManager to include metadata in ChatPromptTemplate for tracing integration
1 parent a93ded9 commit c0d8d75

File tree

1 file changed

+7
-5
lines changed

1 file changed

+7
-5
lines changed

libs/rag-core-lib/src/rag_core_lib/impl/langfuse_manager/langfuse_manager.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from langfuse import Langfuse
99
from langfuse.api.resources.commons.errors.not_found_error import NotFoundError
1010
from langfuse.model import TextPromptClient
11+
from langchain_core.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate
1112

1213
logger = logging.getLogger(__name__)
1314

@@ -160,7 +161,6 @@ def get_base_prompt(self, name: str) -> ChatPromptTemplate:
160161
chat_messages = langfuse_prompt.get_langchain_prompt()
161162

162163
# Convert Langfuse chat messages to LangChain message templates
163-
from langchain_core.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate
164164

165165
langchain_messages = []
166166
for message in chat_messages:
@@ -185,10 +185,12 @@ def get_base_prompt(self, name: str) -> ChatPromptTemplate:
185185
# Add more role types as needed
186186

187187
# Create ChatPromptTemplate from messages with metadata for tracing
188-
return ChatPromptTemplate.from_messages(
189-
langchain_messages,
190-
metadata={"langfuse_prompt": langfuse_prompt}
191-
)
188+
chat_prompt_template = ChatPromptTemplate.from_messages(langchain_messages)
189+
190+
# Add Langfuse metadata for tracing integration
191+
chat_prompt_template.metadata = {"langfuse_prompt": langfuse_prompt}
192+
193+
return chat_prompt_template
192194
else:
193195
# Use fallback ChatPromptTemplate
194196
logger.error("Could not retrieve prompt template from langfuse. Using fallback value.")

0 commit comments

Comments
 (0)