langfuse.model

@private

  1"""@private"""
  2
  3from abc import ABC, abstractmethod
  4from typing import Optional, TypedDict, Any, Dict, Union, List
  5import re
  6
  7from langfuse.api.resources.commons.types.dataset import (
  8    Dataset,  # noqa: F401
  9)
 10
 11# these imports need to stay here, otherwise imports from our clients wont work
 12from langfuse.api.resources.commons.types.dataset_item import DatasetItem  # noqa: F401
 13
 14# noqa: F401
 15from langfuse.api.resources.commons.types.dataset_run import DatasetRun  # noqa: F401
 16
 17# noqa: F401
 18from langfuse.api.resources.commons.types.dataset_status import (  # noqa: F401
 19    DatasetStatus,
 20)
 21from langfuse.api.resources.commons.types.map_value import MapValue  # noqa: F401
 22from langfuse.api.resources.commons.types.observation import Observation  # noqa: F401
 23from langfuse.api.resources.commons.types.trace_with_full_details import (  # noqa: F401
 24    TraceWithFullDetails,
 25)
 26
 27# noqa: F401
 28from langfuse.api.resources.dataset_items.types.create_dataset_item_request import (  # noqa: F401
 29    CreateDatasetItemRequest,
 30)
 31from langfuse.api.resources.dataset_run_items.types.create_dataset_run_item_request import (  # noqa: F401
 32    CreateDatasetRunItemRequest,
 33)
 34
 35# noqa: F401
 36from langfuse.api.resources.datasets.types.create_dataset_request import (  # noqa: F401
 37    CreateDatasetRequest,
 38)
 39from langfuse.api.resources.prompts import Prompt, ChatMessage, Prompt_Chat, Prompt_Text
 40
 41
 42class ModelUsage(TypedDict):
 43    unit: Optional[str]
 44    input: Optional[int]
 45    output: Optional[int]
 46    total: Optional[int]
 47    input_cost: Optional[float]
 48    output_cost: Optional[float]
 49    total_cost: Optional[float]
 50
 51
 52class ChatMessageDict(TypedDict):
 53    role: str
 54    content: str
 55
 56
 57class BasePromptClient(ABC):
 58    name: str
 59    version: int
 60    config: Dict[str, Any]
 61    labels: List[str]
 62
 63    def __init__(self, prompt: Prompt):
 64        self.name = prompt.name
 65        self.version = prompt.version
 66        self.config = prompt.config
 67        self.labels = prompt.labels
 68
 69    @abstractmethod
 70    def compile(self, **kwargs) -> Union[str, List[ChatMessage]]:
 71        pass
 72
 73    @abstractmethod
 74    def __eq__(self, other):
 75        pass
 76
 77    @abstractmethod
 78    def get_langchain_prompt(self):
 79        pass
 80
 81    @staticmethod
 82    def _get_langchain_prompt_string(content: str):
 83        return re.sub(r"{{\s*(\w+)\s*}}", r"{\g<1>}", content)
 84
 85    @staticmethod
 86    def _compile_template_string(content: str, data: Dict[str, Any] = {}) -> str:
 87        opening = "{{"
 88        closing = "}}"
 89
 90        result_list = []
 91        curr_idx = 0
 92
 93        while curr_idx < len(content):
 94            # Find the next opening tag
 95            var_start = content.find(opening, curr_idx)
 96
 97            if var_start == -1:
 98                result_list.append(content[curr_idx:])
 99                break
100
101            # Find the next closing tag
102            var_end = content.find(closing, var_start)
103
104            if var_end == -1:
105                result_list.append(content[curr_idx:])
106                break
107
108            # Append the content before the variable
109            result_list.append(content[curr_idx:var_start])
110
111            # Extract the variable name
112            variable_name = content[var_start + len(opening) : var_end].strip()
113
114            # Append the variable value
115            if variable_name in data:
116                result_list.append(
117                    str(data[variable_name]) if data[variable_name] is not None else ""
118                )
119            else:
120                result_list.append(content[var_start : var_end + len(closing)])
121
122            curr_idx = var_end + len(closing)
123
124        return "".join(result_list)
125
126
127class TextPromptClient(BasePromptClient):
128    def __init__(self, prompt: Prompt_Text):
129        super().__init__(prompt)
130        self.prompt = prompt.prompt
131
132    def compile(self, **kwargs) -> str:
133        return self._compile_template_string(self.prompt, kwargs)
134
135    def __eq__(self, other):
136        if isinstance(self, other.__class__):
137            return (
138                self.name == other.name
139                and self.version == other.version
140                and self.prompt == other.prompt
141                and self.config == other.config
142            )
143
144        return False
145
146    def get_langchain_prompt(self):
147        """Convert Langfuse prompt into string compatible with Langchain PromptTemplate.
148
149        It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse
150        to the single curly brace {variable} format expected by Langchain.
151
152        Returns:
153            str: The string that can be plugged into Langchain's PromptTemplate.
154        """
155        return self._get_langchain_prompt_string(self.prompt)
156
157
158class ChatPromptClient(BasePromptClient):
159    def __init__(self, prompt: Prompt_Chat):
160        super().__init__(prompt)
161        self.prompt = [
162            ChatMessageDict(role=p.role, content=p.content) for p in prompt.prompt
163        ]
164
165    def compile(self, **kwargs) -> List[ChatMessageDict]:
166        return [
167            ChatMessageDict(
168                content=self._compile_template_string(chat_message["content"], kwargs),
169                role=chat_message["role"],
170            )
171            for chat_message in self.prompt
172        ]
173
174    def __eq__(self, other):
175        if isinstance(self, other.__class__):
176            return (
177                self.name == other.name
178                and self.version == other.version
179                and all(
180                    m1["role"] == m2["role"] and m1["content"] == m2["content"]
181                    for m1, m2 in zip(self.prompt, other.prompt)
182                )
183                and self.config == other.config
184            )
185
186        return False
187
188    def get_langchain_prompt(self):
189        """Convert Langfuse prompt into string compatible with Langchain ChatPromptTemplate.
190
191        It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse
192        to the single curly brace {variable} format expected by Langchain.
193
194        Returns:
195            List of messages in the format expected by Langchain's ChatPromptTemplate: (role, content) tuple.
196        """
197        return [
198            (msg["role"], self._get_langchain_prompt_string(msg["content"]))
199            for msg in self.prompt
200        ]
201
202
203PromptClient = Union[TextPromptClient, ChatPromptClient]
class ModelUsage(typing.TypedDict):
43class ModelUsage(TypedDict):
44    unit: Optional[str]
45    input: Optional[int]
46    output: Optional[int]
47    total: Optional[int]
48    input_cost: Optional[float]
49    output_cost: Optional[float]
50    total_cost: Optional[float]
unit: Optional[str]
input: Optional[int]
output: Optional[int]
total: Optional[int]
input_cost: Optional[float]
output_cost: Optional[float]
total_cost: Optional[float]
class ChatMessageDict(typing.TypedDict):
53class ChatMessageDict(TypedDict):
54    role: str
55    content: str
role: str
content: str
class BasePromptClient(abc.ABC):
 58class BasePromptClient(ABC):
 59    name: str
 60    version: int
 61    config: Dict[str, Any]
 62    labels: List[str]
 63
 64    def __init__(self, prompt: Prompt):
 65        self.name = prompt.name
 66        self.version = prompt.version
 67        self.config = prompt.config
 68        self.labels = prompt.labels
 69
 70    @abstractmethod
 71    def compile(self, **kwargs) -> Union[str, List[ChatMessage]]:
 72        pass
 73
 74    @abstractmethod
 75    def __eq__(self, other):
 76        pass
 77
 78    @abstractmethod
 79    def get_langchain_prompt(self):
 80        pass
 81
 82    @staticmethod
 83    def _get_langchain_prompt_string(content: str):
 84        return re.sub(r"{{\s*(\w+)\s*}}", r"{\g<1>}", content)
 85
 86    @staticmethod
 87    def _compile_template_string(content: str, data: Dict[str, Any] = {}) -> str:
 88        opening = "{{"
 89        closing = "}}"
 90
 91        result_list = []
 92        curr_idx = 0
 93
 94        while curr_idx < len(content):
 95            # Find the next opening tag
 96            var_start = content.find(opening, curr_idx)
 97
 98            if var_start == -1:
 99                result_list.append(content[curr_idx:])
100                break
101
102            # Find the next closing tag
103            var_end = content.find(closing, var_start)
104
105            if var_end == -1:
106                result_list.append(content[curr_idx:])
107                break
108
109            # Append the content before the variable
110            result_list.append(content[curr_idx:var_start])
111
112            # Extract the variable name
113            variable_name = content[var_start + len(opening) : var_end].strip()
114
115            # Append the variable value
116            if variable_name in data:
117                result_list.append(
118                    str(data[variable_name]) if data[variable_name] is not None else ""
119                )
120            else:
121                result_list.append(content[var_start : var_end + len(closing)])
122
123            curr_idx = var_end + len(closing)
124
125        return "".join(result_list)

Helper class that provides a standard way to create an ABC using inheritance.

name: str
version: int
config: Dict[str, Any]
labels: List[str]
@abstractmethod
def compile( self, **kwargs) -> Union[str, List[langfuse.api.resources.prompts.types.chat_message.ChatMessage]]:
70    @abstractmethod
71    def compile(self, **kwargs) -> Union[str, List[ChatMessage]]:
72        pass
@abstractmethod
def get_langchain_prompt(self):
78    @abstractmethod
79    def get_langchain_prompt(self):
80        pass
class TextPromptClient(BasePromptClient):
128class TextPromptClient(BasePromptClient):
129    def __init__(self, prompt: Prompt_Text):
130        super().__init__(prompt)
131        self.prompt = prompt.prompt
132
133    def compile(self, **kwargs) -> str:
134        return self._compile_template_string(self.prompt, kwargs)
135
136    def __eq__(self, other):
137        if isinstance(self, other.__class__):
138            return (
139                self.name == other.name
140                and self.version == other.version
141                and self.prompt == other.prompt
142                and self.config == other.config
143            )
144
145        return False
146
147    def get_langchain_prompt(self):
148        """Convert Langfuse prompt into string compatible with Langchain PromptTemplate.
149
150        It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse
151        to the single curly brace {variable} format expected by Langchain.
152
153        Returns:
154            str: The string that can be plugged into Langchain's PromptTemplate.
155        """
156        return self._get_langchain_prompt_string(self.prompt)

Helper class that provides a standard way to create an ABC using inheritance.

TextPromptClient(prompt: langfuse.api.resources.prompts.types.prompt.Prompt_Text)
129    def __init__(self, prompt: Prompt_Text):
130        super().__init__(prompt)
131        self.prompt = prompt.prompt
prompt
def compile(self, **kwargs) -> str:
133    def compile(self, **kwargs) -> str:
134        return self._compile_template_string(self.prompt, kwargs)
def get_langchain_prompt(self):
147    def get_langchain_prompt(self):
148        """Convert Langfuse prompt into string compatible with Langchain PromptTemplate.
149
150        It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse
151        to the single curly brace {variable} format expected by Langchain.
152
153        Returns:
154            str: The string that can be plugged into Langchain's PromptTemplate.
155        """
156        return self._get_langchain_prompt_string(self.prompt)

Convert Langfuse prompt into string compatible with Langchain PromptTemplate.

It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse to the single curly brace {variable} format expected by Langchain.

Returns:

str: The string that can be plugged into Langchain's PromptTemplate.

class ChatPromptClient(BasePromptClient):
159class ChatPromptClient(BasePromptClient):
160    def __init__(self, prompt: Prompt_Chat):
161        super().__init__(prompt)
162        self.prompt = [
163            ChatMessageDict(role=p.role, content=p.content) for p in prompt.prompt
164        ]
165
166    def compile(self, **kwargs) -> List[ChatMessageDict]:
167        return [
168            ChatMessageDict(
169                content=self._compile_template_string(chat_message["content"], kwargs),
170                role=chat_message["role"],
171            )
172            for chat_message in self.prompt
173        ]
174
175    def __eq__(self, other):
176        if isinstance(self, other.__class__):
177            return (
178                self.name == other.name
179                and self.version == other.version
180                and all(
181                    m1["role"] == m2["role"] and m1["content"] == m2["content"]
182                    for m1, m2 in zip(self.prompt, other.prompt)
183                )
184                and self.config == other.config
185            )
186
187        return False
188
189    def get_langchain_prompt(self):
190        """Convert Langfuse prompt into string compatible with Langchain ChatPromptTemplate.
191
192        It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse
193        to the single curly brace {variable} format expected by Langchain.
194
195        Returns:
196            List of messages in the format expected by Langchain's ChatPromptTemplate: (role, content) tuple.
197        """
198        return [
199            (msg["role"], self._get_langchain_prompt_string(msg["content"]))
200            for msg in self.prompt
201        ]

Helper class that provides a standard way to create an ABC using inheritance.

ChatPromptClient(prompt: langfuse.api.resources.prompts.types.prompt.Prompt_Chat)
160    def __init__(self, prompt: Prompt_Chat):
161        super().__init__(prompt)
162        self.prompt = [
163            ChatMessageDict(role=p.role, content=p.content) for p in prompt.prompt
164        ]
prompt
def compile(self, **kwargs) -> List[ChatMessageDict]:
166    def compile(self, **kwargs) -> List[ChatMessageDict]:
167        return [
168            ChatMessageDict(
169                content=self._compile_template_string(chat_message["content"], kwargs),
170                role=chat_message["role"],
171            )
172            for chat_message in self.prompt
173        ]
def get_langchain_prompt(self):
189    def get_langchain_prompt(self):
190        """Convert Langfuse prompt into string compatible with Langchain ChatPromptTemplate.
191
192        It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse
193        to the single curly brace {variable} format expected by Langchain.
194
195        Returns:
196            List of messages in the format expected by Langchain's ChatPromptTemplate: (role, content) tuple.
197        """
198        return [
199            (msg["role"], self._get_langchain_prompt_string(msg["content"]))
200            for msg in self.prompt
201        ]

Convert Langfuse prompt into string compatible with Langchain ChatPromptTemplate.

It specifically adapts the mustache-style double curly braces {{variable}} used in Langfuse to the single curly brace {variable} format expected by Langchain.

Returns:

List of messages in the format expected by Langchain's ChatPromptTemplate: (role, content) tuple.

PromptClient = typing.Union[TextPromptClient, ChatPromptClient]