langfuse.model
@private
1"""@private""" 2 3from abc import ABC, abstractmethod 4from typing import Optional, TypedDict, Any, Dict, Union, List 5import re 6 7from langfuse.api.resources.commons.types.dataset import ( 8 Dataset, # noqa: F401 9) 10 11# these imports need to stay here, otherwise imports from our clients wont work 12from langfuse.api.resources.commons.types.dataset_item import DatasetItem # noqa: F401 13 14# noqa: F401 15from langfuse.api.resources.commons.types.dataset_run import DatasetRun # noqa: F401 16 17# noqa: F401 18from langfuse.api.resources.commons.types.dataset_status import ( # noqa: F401 19 DatasetStatus, 20) 21from langfuse.api.resources.commons.types.map_value import MapValue # noqa: F401 22from langfuse.api.resources.commons.types.observation import Observation # noqa: F401 23from langfuse.api.resources.commons.types.trace_with_full_details import ( # noqa: F401 24 TraceWithFullDetails, 25) 26 27# noqa: F401 28from langfuse.api.resources.dataset_items.types.create_dataset_item_request import ( # noqa: F401 29 CreateDatasetItemRequest, 30) 31from langfuse.api.resources.dataset_run_items.types.create_dataset_run_item_request import ( # noqa: F401 32 CreateDatasetRunItemRequest, 33) 34 35# noqa: F401 36from langfuse.api.resources.datasets.types.create_dataset_request import ( # noqa: F401 37 CreateDatasetRequest, 38) 39from langfuse.api.resources.prompts import Prompt, ChatMessage, Prompt_Chat, Prompt_Text 40 41 42class ModelUsage(TypedDict): 43 unit: Optional[str] 44 input: Optional[int] 45 output: Optional[int] 46 total: Optional[int] 47 input_cost: Optional[float] 48 output_cost: Optional[float] 49 total_cost: Optional[float] 50 51 52class ChatMessageDict(TypedDict): 53 role: str 54 content: str 55 56 57class BasePromptClient(ABC): 58 name: str 59 version: int 60 config: Dict[str, Any] 61 labels: List[str] 62 tags: List[str] 63 64 def __init__(self, prompt: Prompt, is_fallback: bool = False): 65 self.name = prompt.name 66 self.version = prompt.version 67 self.config = prompt.config 68 self.labels = prompt.labels 69 self.tags = prompt.tags 70 self.is_fallback = is_fallback 71 72 @abstractmethod 73 def compile(self, **kwargs) -> Union[str, List[ChatMessage]]: 74 pass 75 76 @abstractmethod 77 def __eq__(self, other): 78 pass 79 80 @abstractmethod 81 def get_langchain_prompt(self): 82 pass 83 84 @staticmethod 85 def _get_langchain_prompt_string(content: str): 86 return re.sub(r"{{\s*(\w+)\s*}}", r"{\g<1>}", content) 87 88 @staticmethod 89 def _compile_template_string(content: str, data: Dict[str, Any] = {}) -> str: 90 opening = "{{" 91 closing = "}}" 92 93 result_list = [] 94 curr_idx = 0 95 96 while curr_idx < len(content): 97 # Find the next opening tag 98 var_start = content.find(opening, curr_idx) 99 100 if var_start == -1: 101 result_list.append(content[curr_idx:]) 102 break 103 104 # Find the next closing tag 105 var_end = content.find(closing, var_start) 106 107 if var_end == -1: 108 result_list.append(content[curr_idx:]) 109 break 110 111 # Append the content before the variable 112 result_list.append(content[curr_idx:var_start]) 113 114 # Extract the variable name 115 variable_name = content[var_start + len(opening) : var_end].strip() 116 117 # Append the variable value 118 if variable_name in data: 119 result_list.append( 120 str(data[variable_name]) if data[variable_name] is not None else "" 121 ) 122 else: 123 result_list.append(content[var_start : var_end + len(closing)]) 124 125 curr_idx = var_end + len(closing) 126 127 return "".join(result_list) 128 129 130class TextPromptClient(BasePromptClient): 131 def __init__(self, prompt: Prompt_Text, is_fallback: bool = False): 132 super().__init__(prompt, is_fallback) 133 self.prompt = prompt.prompt 134 135 def compile(self, **kwargs) -> str: 136 return self._compile_template_string(self.prompt, kwargs) 137 138 def __eq__(self, other): 139 if isinstance(self, other.__class__): 140 return ( 141 self.name == other.name 142 and self.version == other.version 143 and self.prompt == other.prompt 144 and self.config == other.config 145 ) 146 147 return False 148 149 def get_langchain_prompt(self, **kwargs) -> str: 150 """Convert Langfuse prompt into string compatible with Langchain PromptTemplate. 151 152 This method adapts the mustache-style double curly braces {{variable}} used in Langfuse 153 to the single curly brace {variable} format expected by Langchain. 154 155 kwargs: Optional keyword arguments to precompile the template string. Variables that match 156 the provided keyword arguments will be precompiled. Remaining variables must then be 157 handled by Langchain's prompt template. 158 159 Returns: 160 str: The string that can be plugged into Langchain's PromptTemplate. 161 """ 162 prompt = ( 163 self._compile_template_string(self.prompt, kwargs) 164 if kwargs 165 else self.prompt 166 ) 167 168 return self._get_langchain_prompt_string(prompt) 169 170 171class ChatPromptClient(BasePromptClient): 172 def __init__(self, prompt: Prompt_Chat, is_fallback: bool = False): 173 super().__init__(prompt, is_fallback) 174 self.prompt = [ 175 ChatMessageDict(role=p.role, content=p.content) for p in prompt.prompt 176 ] 177 178 def compile(self, **kwargs) -> List[ChatMessageDict]: 179 return [ 180 ChatMessageDict( 181 content=self._compile_template_string(chat_message["content"], kwargs), 182 role=chat_message["role"], 183 ) 184 for chat_message in self.prompt 185 ] 186 187 def __eq__(self, other): 188 if isinstance(self, other.__class__): 189 return ( 190 self.name == other.name 191 and self.version == other.version 192 and all( 193 m1["role"] == m2["role"] and m1["content"] == m2["content"] 194 for m1, m2 in zip(self.prompt, other.prompt) 195 ) 196 and self.config == other.config 197 ) 198 199 return False 200 201 def get_langchain_prompt(self, **kwargs): 202 """Convert Langfuse prompt into string compatible with Langchain ChatPromptTemplate. 203 204 It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse 205 to the single curly brace {variable} format expected by Langchain. 206 207 kwargs: Optional keyword arguments to precompile the template string. Variables that match 208 the provided keyword arguments will be precompiled. Remaining variables must then be 209 handled by Langchain's prompt template. 210 211 Returns: 212 List of messages in the format expected by Langchain's ChatPromptTemplate: (role, content) tuple. 213 """ 214 return [ 215 ( 216 msg["role"], 217 self._get_langchain_prompt_string( 218 self._compile_template_string(msg["content"], kwargs) 219 if kwargs 220 else msg["content"] 221 ), 222 ) 223 for msg in self.prompt 224 ] 225 226 227PromptClient = Union[TextPromptClient, ChatPromptClient]
58class BasePromptClient(ABC): 59 name: str 60 version: int 61 config: Dict[str, Any] 62 labels: List[str] 63 tags: List[str] 64 65 def __init__(self, prompt: Prompt, is_fallback: bool = False): 66 self.name = prompt.name 67 self.version = prompt.version 68 self.config = prompt.config 69 self.labels = prompt.labels 70 self.tags = prompt.tags 71 self.is_fallback = is_fallback 72 73 @abstractmethod 74 def compile(self, **kwargs) -> Union[str, List[ChatMessage]]: 75 pass 76 77 @abstractmethod 78 def __eq__(self, other): 79 pass 80 81 @abstractmethod 82 def get_langchain_prompt(self): 83 pass 84 85 @staticmethod 86 def _get_langchain_prompt_string(content: str): 87 return re.sub(r"{{\s*(\w+)\s*}}", r"{\g<1>}", content) 88 89 @staticmethod 90 def _compile_template_string(content: str, data: Dict[str, Any] = {}) -> str: 91 opening = "{{" 92 closing = "}}" 93 94 result_list = [] 95 curr_idx = 0 96 97 while curr_idx < len(content): 98 # Find the next opening tag 99 var_start = content.find(opening, curr_idx) 100 101 if var_start == -1: 102 result_list.append(content[curr_idx:]) 103 break 104 105 # Find the next closing tag 106 var_end = content.find(closing, var_start) 107 108 if var_end == -1: 109 result_list.append(content[curr_idx:]) 110 break 111 112 # Append the content before the variable 113 result_list.append(content[curr_idx:var_start]) 114 115 # Extract the variable name 116 variable_name = content[var_start + len(opening) : var_end].strip() 117 118 # Append the variable value 119 if variable_name in data: 120 result_list.append( 121 str(data[variable_name]) if data[variable_name] is not None else "" 122 ) 123 else: 124 result_list.append(content[var_start : var_end + len(closing)]) 125 126 curr_idx = var_end + len(closing) 127 128 return "".join(result_list)
Helper class that provides a standard way to create an ABC using inheritance.
131class TextPromptClient(BasePromptClient): 132 def __init__(self, prompt: Prompt_Text, is_fallback: bool = False): 133 super().__init__(prompt, is_fallback) 134 self.prompt = prompt.prompt 135 136 def compile(self, **kwargs) -> str: 137 return self._compile_template_string(self.prompt, kwargs) 138 139 def __eq__(self, other): 140 if isinstance(self, other.__class__): 141 return ( 142 self.name == other.name 143 and self.version == other.version 144 and self.prompt == other.prompt 145 and self.config == other.config 146 ) 147 148 return False 149 150 def get_langchain_prompt(self, **kwargs) -> str: 151 """Convert Langfuse prompt into string compatible with Langchain PromptTemplate. 152 153 This method adapts the mustache-style double curly braces {{variable}} used in Langfuse 154 to the single curly brace {variable} format expected by Langchain. 155 156 kwargs: Optional keyword arguments to precompile the template string. Variables that match 157 the provided keyword arguments will be precompiled. Remaining variables must then be 158 handled by Langchain's prompt template. 159 160 Returns: 161 str: The string that can be plugged into Langchain's PromptTemplate. 162 """ 163 prompt = ( 164 self._compile_template_string(self.prompt, kwargs) 165 if kwargs 166 else self.prompt 167 ) 168 169 return self._get_langchain_prompt_string(prompt)
Helper class that provides a standard way to create an ABC using inheritance.
150 def get_langchain_prompt(self, **kwargs) -> str: 151 """Convert Langfuse prompt into string compatible with Langchain PromptTemplate. 152 153 This method adapts the mustache-style double curly braces {{variable}} used in Langfuse 154 to the single curly brace {variable} format expected by Langchain. 155 156 kwargs: Optional keyword arguments to precompile the template string. Variables that match 157 the provided keyword arguments will be precompiled. Remaining variables must then be 158 handled by Langchain's prompt template. 159 160 Returns: 161 str: The string that can be plugged into Langchain's PromptTemplate. 162 """ 163 prompt = ( 164 self._compile_template_string(self.prompt, kwargs) 165 if kwargs 166 else self.prompt 167 ) 168 169 return self._get_langchain_prompt_string(prompt)
Convert Langfuse prompt into string compatible with Langchain PromptTemplate.
This method adapts the mustache-style double curly braces {{variable}} used in Langfuse to the single curly brace {variable} format expected by Langchain.
kwargs: Optional keyword arguments to precompile the template string. Variables that match the provided keyword arguments will be precompiled. Remaining variables must then be handled by Langchain's prompt template.
Returns:
str: The string that can be plugged into Langchain's PromptTemplate.
Inherited Members
172class ChatPromptClient(BasePromptClient): 173 def __init__(self, prompt: Prompt_Chat, is_fallback: bool = False): 174 super().__init__(prompt, is_fallback) 175 self.prompt = [ 176 ChatMessageDict(role=p.role, content=p.content) for p in prompt.prompt 177 ] 178 179 def compile(self, **kwargs) -> List[ChatMessageDict]: 180 return [ 181 ChatMessageDict( 182 content=self._compile_template_string(chat_message["content"], kwargs), 183 role=chat_message["role"], 184 ) 185 for chat_message in self.prompt 186 ] 187 188 def __eq__(self, other): 189 if isinstance(self, other.__class__): 190 return ( 191 self.name == other.name 192 and self.version == other.version 193 and all( 194 m1["role"] == m2["role"] and m1["content"] == m2["content"] 195 for m1, m2 in zip(self.prompt, other.prompt) 196 ) 197 and self.config == other.config 198 ) 199 200 return False 201 202 def get_langchain_prompt(self, **kwargs): 203 """Convert Langfuse prompt into string compatible with Langchain ChatPromptTemplate. 204 205 It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse 206 to the single curly brace {variable} format expected by Langchain. 207 208 kwargs: Optional keyword arguments to precompile the template string. Variables that match 209 the provided keyword arguments will be precompiled. Remaining variables must then be 210 handled by Langchain's prompt template. 211 212 Returns: 213 List of messages in the format expected by Langchain's ChatPromptTemplate: (role, content) tuple. 214 """ 215 return [ 216 ( 217 msg["role"], 218 self._get_langchain_prompt_string( 219 self._compile_template_string(msg["content"], kwargs) 220 if kwargs 221 else msg["content"] 222 ), 223 ) 224 for msg in self.prompt 225 ]
Helper class that provides a standard way to create an ABC using inheritance.
202 def get_langchain_prompt(self, **kwargs): 203 """Convert Langfuse prompt into string compatible with Langchain ChatPromptTemplate. 204 205 It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse 206 to the single curly brace {variable} format expected by Langchain. 207 208 kwargs: Optional keyword arguments to precompile the template string. Variables that match 209 the provided keyword arguments will be precompiled. Remaining variables must then be 210 handled by Langchain's prompt template. 211 212 Returns: 213 List of messages in the format expected by Langchain's ChatPromptTemplate: (role, content) tuple. 214 """ 215 return [ 216 ( 217 msg["role"], 218 self._get_langchain_prompt_string( 219 self._compile_template_string(msg["content"], kwargs) 220 if kwargs 221 else msg["content"] 222 ), 223 ) 224 for msg in self.prompt 225 ]
Convert Langfuse prompt into string compatible with Langchain ChatPromptTemplate.
It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse to the single curly brace {variable} format expected by Langchain.
kwargs: Optional keyword arguments to precompile the template string. Variables that match the provided keyword arguments will be precompiled. Remaining variables must then be handled by Langchain's prompt template.
Returns:
List of messages in the format expected by Langchain's ChatPromptTemplate: (role, content) tuple.