langfuse.model
@private
1"""@private""" 2 3from abc import ABC, abstractmethod 4from typing import Optional, TypedDict, Any, Dict, Union, List 5import re 6 7from langfuse.api.resources.commons.types.dataset import ( 8 Dataset, # noqa: F401 9) 10 11# these imports need to stay here, otherwise imports from our clients wont work 12from langfuse.api.resources.commons.types.dataset_item import DatasetItem # noqa: F401 13 14# noqa: F401 15from langfuse.api.resources.commons.types.dataset_run import DatasetRun # noqa: F401 16 17# noqa: F401 18from langfuse.api.resources.commons.types.dataset_status import ( # noqa: F401 19 DatasetStatus, 20) 21from langfuse.api.resources.commons.types.map_value import MapValue # noqa: F401 22from langfuse.api.resources.commons.types.observation import Observation # noqa: F401 23from langfuse.api.resources.commons.types.trace_with_full_details import ( # noqa: F401 24 TraceWithFullDetails, 25) 26 27# noqa: F401 28from langfuse.api.resources.dataset_items.types.create_dataset_item_request import ( # noqa: F401 29 CreateDatasetItemRequest, 30) 31from langfuse.api.resources.dataset_run_items.types.create_dataset_run_item_request import ( # noqa: F401 32 CreateDatasetRunItemRequest, 33) 34 35# noqa: F401 36from langfuse.api.resources.datasets.types.create_dataset_request import ( # noqa: F401 37 CreateDatasetRequest, 38) 39from langfuse.api.resources.prompts import Prompt, ChatMessage, Prompt_Chat, Prompt_Text 40 41 42class ModelUsage(TypedDict): 43 unit: Optional[str] 44 input: Optional[int] 45 output: Optional[int] 46 total: Optional[int] 47 input_cost: Optional[float] 48 output_cost: Optional[float] 49 total_cost: Optional[float] 50 51 52class ChatMessageDict(TypedDict): 53 role: str 54 content: str 55 56 57class BasePromptClient(ABC): 58 name: str 59 version: int 60 config: Dict[str, Any] 61 labels: List[str] 62 tags: List[str] 63 64 def __init__(self, prompt: Prompt, is_fallback: bool = False): 65 self.name = prompt.name 66 self.version = prompt.version 67 self.config = prompt.config 68 self.labels = prompt.labels 69 self.tags = prompt.tags 70 self.is_fallback = is_fallback 71 72 @abstractmethod 73 def compile(self, **kwargs) -> Union[str, List[ChatMessage]]: 74 pass 75 76 @abstractmethod 77 def __eq__(self, other): 78 pass 79 80 @abstractmethod 81 def get_langchain_prompt(self): 82 pass 83 84 @staticmethod 85 def _get_langchain_prompt_string(content: str): 86 return re.sub(r"{{\s*(\w+)\s*}}", r"{\g<1>}", content) 87 88 @staticmethod 89 def _compile_template_string(content: str, data: Dict[str, Any] = {}) -> str: 90 opening = "{{" 91 closing = "}}" 92 93 result_list = [] 94 curr_idx = 0 95 96 while curr_idx < len(content): 97 # Find the next opening tag 98 var_start = content.find(opening, curr_idx) 99 100 if var_start == -1: 101 result_list.append(content[curr_idx:]) 102 break 103 104 # Find the next closing tag 105 var_end = content.find(closing, var_start) 106 107 if var_end == -1: 108 result_list.append(content[curr_idx:]) 109 break 110 111 # Append the content before the variable 112 result_list.append(content[curr_idx:var_start]) 113 114 # Extract the variable name 115 variable_name = content[var_start + len(opening) : var_end].strip() 116 117 # Append the variable value 118 if variable_name in data: 119 result_list.append( 120 str(data[variable_name]) if data[variable_name] is not None else "" 121 ) 122 else: 123 result_list.append(content[var_start : var_end + len(closing)]) 124 125 curr_idx = var_end + len(closing) 126 127 return "".join(result_list) 128 129 130class TextPromptClient(BasePromptClient): 131 def __init__(self, prompt: Prompt_Text, is_fallback: bool = False): 132 super().__init__(prompt, is_fallback) 133 self.prompt = prompt.prompt 134 135 def compile(self, **kwargs) -> str: 136 return self._compile_template_string(self.prompt, kwargs) 137 138 def __eq__(self, other): 139 if isinstance(self, other.__class__): 140 return ( 141 self.name == other.name 142 and self.version == other.version 143 and self.prompt == other.prompt 144 and self.config == other.config 145 ) 146 147 return False 148 149 def get_langchain_prompt(self): 150 """Convert Langfuse prompt into string compatible with Langchain PromptTemplate. 151 152 It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse 153 to the single curly brace {variable} format expected by Langchain. 154 155 Returns: 156 str: The string that can be plugged into Langchain's PromptTemplate. 157 """ 158 return self._get_langchain_prompt_string(self.prompt) 159 160 161class ChatPromptClient(BasePromptClient): 162 def __init__(self, prompt: Prompt_Chat, is_fallback: bool = False): 163 super().__init__(prompt, is_fallback) 164 self.prompt = [ 165 ChatMessageDict(role=p.role, content=p.content) for p in prompt.prompt 166 ] 167 168 def compile(self, **kwargs) -> List[ChatMessageDict]: 169 return [ 170 ChatMessageDict( 171 content=self._compile_template_string(chat_message["content"], kwargs), 172 role=chat_message["role"], 173 ) 174 for chat_message in self.prompt 175 ] 176 177 def __eq__(self, other): 178 if isinstance(self, other.__class__): 179 return ( 180 self.name == other.name 181 and self.version == other.version 182 and all( 183 m1["role"] == m2["role"] and m1["content"] == m2["content"] 184 for m1, m2 in zip(self.prompt, other.prompt) 185 ) 186 and self.config == other.config 187 ) 188 189 return False 190 191 def get_langchain_prompt(self): 192 """Convert Langfuse prompt into string compatible with Langchain ChatPromptTemplate. 193 194 It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse 195 to the single curly brace {variable} format expected by Langchain. 196 197 Returns: 198 List of messages in the format expected by Langchain's ChatPromptTemplate: (role, content) tuple. 199 """ 200 return [ 201 (msg["role"], self._get_langchain_prompt_string(msg["content"])) 202 for msg in self.prompt 203 ] 204 205 206PromptClient = Union[TextPromptClient, ChatPromptClient]
58class BasePromptClient(ABC): 59 name: str 60 version: int 61 config: Dict[str, Any] 62 labels: List[str] 63 tags: List[str] 64 65 def __init__(self, prompt: Prompt, is_fallback: bool = False): 66 self.name = prompt.name 67 self.version = prompt.version 68 self.config = prompt.config 69 self.labels = prompt.labels 70 self.tags = prompt.tags 71 self.is_fallback = is_fallback 72 73 @abstractmethod 74 def compile(self, **kwargs) -> Union[str, List[ChatMessage]]: 75 pass 76 77 @abstractmethod 78 def __eq__(self, other): 79 pass 80 81 @abstractmethod 82 def get_langchain_prompt(self): 83 pass 84 85 @staticmethod 86 def _get_langchain_prompt_string(content: str): 87 return re.sub(r"{{\s*(\w+)\s*}}", r"{\g<1>}", content) 88 89 @staticmethod 90 def _compile_template_string(content: str, data: Dict[str, Any] = {}) -> str: 91 opening = "{{" 92 closing = "}}" 93 94 result_list = [] 95 curr_idx = 0 96 97 while curr_idx < len(content): 98 # Find the next opening tag 99 var_start = content.find(opening, curr_idx) 100 101 if var_start == -1: 102 result_list.append(content[curr_idx:]) 103 break 104 105 # Find the next closing tag 106 var_end = content.find(closing, var_start) 107 108 if var_end == -1: 109 result_list.append(content[curr_idx:]) 110 break 111 112 # Append the content before the variable 113 result_list.append(content[curr_idx:var_start]) 114 115 # Extract the variable name 116 variable_name = content[var_start + len(opening) : var_end].strip() 117 118 # Append the variable value 119 if variable_name in data: 120 result_list.append( 121 str(data[variable_name]) if data[variable_name] is not None else "" 122 ) 123 else: 124 result_list.append(content[var_start : var_end + len(closing)]) 125 126 curr_idx = var_end + len(closing) 127 128 return "".join(result_list)
Helper class that provides a standard way to create an ABC using inheritance.
131class TextPromptClient(BasePromptClient): 132 def __init__(self, prompt: Prompt_Text, is_fallback: bool = False): 133 super().__init__(prompt, is_fallback) 134 self.prompt = prompt.prompt 135 136 def compile(self, **kwargs) -> str: 137 return self._compile_template_string(self.prompt, kwargs) 138 139 def __eq__(self, other): 140 if isinstance(self, other.__class__): 141 return ( 142 self.name == other.name 143 and self.version == other.version 144 and self.prompt == other.prompt 145 and self.config == other.config 146 ) 147 148 return False 149 150 def get_langchain_prompt(self): 151 """Convert Langfuse prompt into string compatible with Langchain PromptTemplate. 152 153 It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse 154 to the single curly brace {variable} format expected by Langchain. 155 156 Returns: 157 str: The string that can be plugged into Langchain's PromptTemplate. 158 """ 159 return self._get_langchain_prompt_string(self.prompt)
Helper class that provides a standard way to create an ABC using inheritance.
150 def get_langchain_prompt(self): 151 """Convert Langfuse prompt into string compatible with Langchain PromptTemplate. 152 153 It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse 154 to the single curly brace {variable} format expected by Langchain. 155 156 Returns: 157 str: The string that can be plugged into Langchain's PromptTemplate. 158 """ 159 return self._get_langchain_prompt_string(self.prompt)
Convert Langfuse prompt into string compatible with Langchain PromptTemplate.
It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse to the single curly brace {variable} format expected by Langchain.
Returns:
str: The string that can be plugged into Langchain's PromptTemplate.
Inherited Members
162class ChatPromptClient(BasePromptClient): 163 def __init__(self, prompt: Prompt_Chat, is_fallback: bool = False): 164 super().__init__(prompt, is_fallback) 165 self.prompt = [ 166 ChatMessageDict(role=p.role, content=p.content) for p in prompt.prompt 167 ] 168 169 def compile(self, **kwargs) -> List[ChatMessageDict]: 170 return [ 171 ChatMessageDict( 172 content=self._compile_template_string(chat_message["content"], kwargs), 173 role=chat_message["role"], 174 ) 175 for chat_message in self.prompt 176 ] 177 178 def __eq__(self, other): 179 if isinstance(self, other.__class__): 180 return ( 181 self.name == other.name 182 and self.version == other.version 183 and all( 184 m1["role"] == m2["role"] and m1["content"] == m2["content"] 185 for m1, m2 in zip(self.prompt, other.prompt) 186 ) 187 and self.config == other.config 188 ) 189 190 return False 191 192 def get_langchain_prompt(self): 193 """Convert Langfuse prompt into string compatible with Langchain ChatPromptTemplate. 194 195 It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse 196 to the single curly brace {variable} format expected by Langchain. 197 198 Returns: 199 List of messages in the format expected by Langchain's ChatPromptTemplate: (role, content) tuple. 200 """ 201 return [ 202 (msg["role"], self._get_langchain_prompt_string(msg["content"])) 203 for msg in self.prompt 204 ]
Helper class that provides a standard way to create an ABC using inheritance.
192 def get_langchain_prompt(self): 193 """Convert Langfuse prompt into string compatible with Langchain ChatPromptTemplate. 194 195 It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse 196 to the single curly brace {variable} format expected by Langchain. 197 198 Returns: 199 List of messages in the format expected by Langchain's ChatPromptTemplate: (role, content) tuple. 200 """ 201 return [ 202 (msg["role"], self._get_langchain_prompt_string(msg["content"])) 203 for msg in self.prompt 204 ]
Convert Langfuse prompt into string compatible with Langchain ChatPromptTemplate.
It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse to the single curly brace {variable} format expected by Langchain.
Returns:
List of messages in the format expected by Langchain's ChatPromptTemplate: (role, content) tuple.