Skip to content

langfuse.langfuse

Integration with Langfuse

BaseCallResponse

Bases: BaseModel, Generic[ResponseT, BaseToolT], ABC

A base abstract interface for LLM call responses.

Attributes:

Name Type Description
response ResponseT

The original response from whichever model response this wraps.

Source code in mirascope/base/types.py
class BaseCallResponse(BaseModel, Generic[ResponseT, BaseToolT], ABC):
    """A base abstract interface for LLM call responses.

    Attributes:
        response: The original response from whichever model response this wraps.
    """

    response: ResponseT
    user_message_param: Optional[Any] = None
    tool_types: Optional[list[Type[BaseToolT]]] = None
    start_time: float  # The start time of the completion in ms
    end_time: float  # The end time of the completion in ms
    cost: Optional[float] = None  # The cost of the completion in dollars

    model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)

    @property
    @abstractmethod
    def message_param(self) -> Any:
        """Returns the assistant's response as a message parameter."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def tools(self) -> Optional[list[BaseToolT]]:
        """Returns the tools for the 0th choice message."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def tool(self) -> Optional[BaseToolT]:
        """Returns the 0th tool for the 0th choice message."""
        ...  # pragma: no cover

    @classmethod
    @abstractmethod
    def tool_message_params(
        cls, tools_and_outputs: list[tuple[BaseToolT, Any]]
    ) -> list[Any]:
        """Returns the tool message parameters for tool call results."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def content(self) -> str:
        """Should return the string content of the response.

        If there are multiple choices in a response, this method should select the 0th
        choice and return it's string content.

        If there is no string content (e.g. when using tools), this method must return
        the empty string.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def finish_reasons(self) -> Union[None, list[str]]:
        """Should return the finish reasons of the response.

        If there is no finish reason, this method must return None.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def model(self) -> Optional[str]:
        """Should return the name of the response model."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def id(self) -> Optional[str]:
        """Should return the id of the response."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def usage(self) -> Any:
        """Should return the usage of the response.

        If there is no usage, this method must return None.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def input_tokens(self) -> Optional[Union[int, float]]:
        """Should return the number of input tokens.

        If there is no input_tokens, this method must return None.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def output_tokens(self) -> Optional[Union[int, float]]:
        """Should return the number of output tokens.

        If there is no output_tokens, this method must return None.
        """
        ...  # pragma: no cover

content: str abstractmethod property

Should return the string content of the response.

If there are multiple choices in a response, this method should select the 0th choice and return it's string content.

If there is no string content (e.g. when using tools), this method must return the empty string.

finish_reasons: Union[None, list[str]] abstractmethod property

Should return the finish reasons of the response.

If there is no finish reason, this method must return None.

id: Optional[str] abstractmethod property

Should return the id of the response.

input_tokens: Optional[Union[int, float]] abstractmethod property

Should return the number of input tokens.

If there is no input_tokens, this method must return None.

message_param: Any abstractmethod property

Returns the assistant's response as a message parameter.

model: Optional[str] abstractmethod property

Should return the name of the response model.

output_tokens: Optional[Union[int, float]] abstractmethod property

Should return the number of output tokens.

If there is no output_tokens, this method must return None.

tool: Optional[BaseToolT] abstractmethod property

Returns the 0th tool for the 0th choice message.

tools: Optional[list[BaseToolT]] abstractmethod property

Returns the tools for the 0th choice message.

usage: Any abstractmethod property

Should return the usage of the response.

If there is no usage, this method must return None.

tool_message_params(tools_and_outputs) abstractmethod classmethod

Returns the tool message parameters for tool call results.

Source code in mirascope/base/types.py
@classmethod
@abstractmethod
def tool_message_params(
    cls, tools_and_outputs: list[tuple[BaseToolT, Any]]
) -> list[Any]:
    """Returns the tool message parameters for tool call results."""
    ...  # pragma: no cover

BaseCallResponseChunk

Bases: BaseModel, Generic[ChunkT, BaseToolT], ABC

A base abstract interface for LLM streaming response chunks.

Attributes:

Name Type Description
response

The original response chunk from whichever model response this wraps.

Source code in mirascope/base/types.py
class BaseCallResponseChunk(BaseModel, Generic[ChunkT, BaseToolT], ABC):
    """A base abstract interface for LLM streaming response chunks.

    Attributes:
        response: The original response chunk from whichever model response this wraps.
    """

    chunk: ChunkT
    user_message_param: Optional[Any] = None
    tool_types: Optional[list[Type[BaseToolT]]] = None
    cost: Optional[float] = None  # The cost of the completion in dollars
    model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)

    @property
    @abstractmethod
    def content(self) -> str:
        """Should return the string content of the response chunk.

        If there are multiple choices in a chunk, this method should select the 0th
        choice and return it's string content.

        If there is no string content (e.g. when using tools), this method must return
        the empty string.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def model(self) -> Optional[str]:
        """Should return the name of the response model."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def id(self) -> Optional[str]:
        """Should return the id of the response."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def finish_reasons(self) -> Union[None, list[str]]:
        """Should return the finish reasons of the response.

        If there is no finish reason, this method must return None.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def usage(self) -> Any:
        """Should return the usage of the response.

        If there is no usage, this method must return None.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def input_tokens(self) -> Optional[Union[int, float]]:
        """Should return the number of input tokens.

        If there is no input_tokens, this method must return None.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def output_tokens(self) -> Optional[Union[int, float]]:
        """Should return the number of output tokens.

        If there is no output_tokens, this method must return None.
        """
        ...  # pragma: no cover

content: str abstractmethod property

Should return the string content of the response chunk.

If there are multiple choices in a chunk, this method should select the 0th choice and return it's string content.

If there is no string content (e.g. when using tools), this method must return the empty string.

finish_reasons: Union[None, list[str]] abstractmethod property

Should return the finish reasons of the response.

If there is no finish reason, this method must return None.

id: Optional[str] abstractmethod property

Should return the id of the response.

input_tokens: Optional[Union[int, float]] abstractmethod property

Should return the number of input tokens.

If there is no input_tokens, this method must return None.

model: Optional[str] abstractmethod property

Should return the name of the response model.

output_tokens: Optional[Union[int, float]] abstractmethod property

Should return the number of output tokens.

If there is no output_tokens, this method must return None.

usage: Any abstractmethod property

Should return the usage of the response.

If there is no usage, this method must return None.

BaseTool

Bases: BaseModel, Generic[ToolCallT], ABC

A base class for easy use of tools with prompts.

BaseTool is an abstract class interface and should not be used directly. When implementing a class that extends BaseTool, you must include the original tool_call from which this till was instantiated. Make sure to skip tool_call when generating the schema by annotating it with SkipJsonSchema.

Source code in mirascope/base/tools.py
class BaseTool(BaseModel, Generic[ToolCallT], ABC):
    """A base class for easy use of tools with prompts.

    `BaseTool` is an abstract class interface and should not be used directly. When
    implementing a class that extends `BaseTool`, you must include the original
    `tool_call` from which this till was instantiated. Make sure to skip `tool_call`
    when generating the schema by annotating it with `SkipJsonSchema`.
    """

    tool_call: SkipJsonSchema[ToolCallT]

    model_config = ConfigDict(arbitrary_types_allowed=True)

    @classmethod
    def name(cls) -> str:
        """Returns the name of the tool."""
        return cls.__name__

    @classmethod
    def description(cls) -> str:
        """Returns the description of the tool."""
        return inspect.cleandoc(cls.__doc__) if cls.__doc__ else DEFAULT_TOOL_DOCSTRING

    @property
    def args(self) -> dict[str, Any]:
        """The arguments of the tool as a dictionary."""
        return {
            field: getattr(self, field)
            for field in self.model_fields
            if field != "tool_call"
        }

    @property
    def fn(self) -> Callable[..., str]:
        """Returns the function that the tool describes."""
        raise RuntimeError("Tool does not have an attached function.")

    def call(self) -> str:
        """Calls the tool's `fn` with the tool's `args`."""
        return self.fn(**self.args)

    @classmethod
    def tool_schema(cls) -> Any:
        """Constructs a JSON Schema tool schema from the `BaseModel` schema defined."""
        model_schema = cls.model_json_schema()
        model_schema.pop("title", None)
        model_schema.pop("description", None)

        fn = {"name": cls.name(), "description": cls.description()}
        if model_schema["properties"]:
            fn["parameters"] = model_schema  # type: ignore

        return fn

    @classmethod
    @abstractmethod
    def from_tool_call(cls, tool_call: ToolCallT) -> BaseTool:
        """Extracts an instance of the tool constructed from a tool call response."""
        ...  # pragma: no cover

    @classmethod
    @abstractmethod
    def from_model(cls, model: type[BaseModel]) -> type[BaseTool]:
        """Constructs a `BaseTool` type from a `BaseModel` type."""
        ...  # pragma: no cover

    @classmethod
    @abstractmethod
    def from_fn(cls, fn: Callable) -> type[BaseTool]:
        """Constructs a `BaseTool` type from a function."""
        ...  # pragma: no cover

    @classmethod
    @abstractmethod
    def from_base_type(cls, base_type: type[BaseType]) -> type[BaseTool]:
        """Constructs a `BaseTool` type from a `BaseType` type."""
        ...  # pragma: no cover

args: dict[str, Any] property

The arguments of the tool as a dictionary.

fn: Callable[..., str] property

Returns the function that the tool describes.

call()

Calls the tool's fn with the tool's args.

Source code in mirascope/base/tools.py
def call(self) -> str:
    """Calls the tool's `fn` with the tool's `args`."""
    return self.fn(**self.args)

description() classmethod

Returns the description of the tool.

Source code in mirascope/base/tools.py
@classmethod
def description(cls) -> str:
    """Returns the description of the tool."""
    return inspect.cleandoc(cls.__doc__) if cls.__doc__ else DEFAULT_TOOL_DOCSTRING

from_base_type(base_type) abstractmethod classmethod

Constructs a BaseTool type from a BaseType type.

Source code in mirascope/base/tools.py
@classmethod
@abstractmethod
def from_base_type(cls, base_type: type[BaseType]) -> type[BaseTool]:
    """Constructs a `BaseTool` type from a `BaseType` type."""
    ...  # pragma: no cover

from_fn(fn) abstractmethod classmethod

Constructs a BaseTool type from a function.

Source code in mirascope/base/tools.py
@classmethod
@abstractmethod
def from_fn(cls, fn: Callable) -> type[BaseTool]:
    """Constructs a `BaseTool` type from a function."""
    ...  # pragma: no cover

from_model(model) abstractmethod classmethod

Constructs a BaseTool type from a BaseModel type.

Source code in mirascope/base/tools.py
@classmethod
@abstractmethod
def from_model(cls, model: type[BaseModel]) -> type[BaseTool]:
    """Constructs a `BaseTool` type from a `BaseModel` type."""
    ...  # pragma: no cover

from_tool_call(tool_call) abstractmethod classmethod

Extracts an instance of the tool constructed from a tool call response.

Source code in mirascope/base/tools.py
@classmethod
@abstractmethod
def from_tool_call(cls, tool_call: ToolCallT) -> BaseTool:
    """Extracts an instance of the tool constructed from a tool call response."""
    ...  # pragma: no cover

name() classmethod

Returns the name of the tool.

Source code in mirascope/base/tools.py
@classmethod
def name(cls) -> str:
    """Returns the name of the tool."""
    return cls.__name__

tool_schema() classmethod

Constructs a JSON Schema tool schema from the BaseModel schema defined.

Source code in mirascope/base/tools.py
@classmethod
def tool_schema(cls) -> Any:
    """Constructs a JSON Schema tool schema from the `BaseModel` schema defined."""
    model_schema = cls.model_json_schema()
    model_schema.pop("title", None)
    model_schema.pop("description", None)

    fn = {"name": cls.name(), "description": cls.description()}
    if model_schema["properties"]:
        fn["parameters"] = model_schema  # type: ignore

    return fn

get_class_vars(self)

Get the class variables of a BaseModel removing any dangerous variables.

Source code in mirascope/base/ops_utils.py
def get_class_vars(self: BaseModel) -> dict[str, Any]:
    """Get the class variables of a `BaseModel` removing any dangerous variables."""
    class_vars = {}
    for classvars in self.__class_vars__:
        if not classvars == "api_key":
            class_vars[classvars] = getattr(self.__class__, classvars)
    return class_vars

handle_after_call(cls, fn, result, before_call, **kwargs)

Adds the response to the Mirascope Langfuse observation.

Source code in mirascope/langfuse/langfuse.py
def handle_after_call(cls, fn, result, before_call: LangfuseDecorator, **kwargs):
    """Adds the response to the Mirascope Langfuse observation."""
    before_call.update_current_observation(output=result)

handle_before_call(self, fn, *args, **kwargs)

Adds metadata to the Mirascope Langfuse observation.

Source code in mirascope/langfuse/langfuse.py
def handle_before_call(self: BaseModel, fn, *args, **kwargs):
    """Adds metadata to the Mirascope Langfuse observation."""
    class_vars = get_class_vars(self)
    langfuse_context.update_current_observation(
        name=self.__class__.__name__,
        input=class_vars.pop("prompt_template", None),
        metadata=class_vars,
        tags=class_vars.pop("tags", []),
    )
    return langfuse_context

langfuse_generation(fn, model_name, **kwargs)

Adds metadata to the Langfuse observation.

Source code in mirascope/langfuse/langfuse.py
def langfuse_generation(fn: Callable, model_name: str, **kwargs) -> None:
    """Adds metadata to the Langfuse observation."""
    model = kwargs.get("model", None) or model_name
    langfuse_context.update_current_observation(
        name=f"{fn.__name__} with {model}",
        input=kwargs.get("messages", []),
        metadata=kwargs,
        tags=kwargs.pop("tags", []),
        model=model,
    )

langfuse_generation_end(response_type=None, result=None, tool_types=None)

Adds the response to the Langfuse observation.

Source code in mirascope/langfuse/langfuse.py
def langfuse_generation_end(
    response_type: Optional[type[BaseCallResponse]] = None,
    result: Any = None,
    tool_types: Optional[list[type[BaseTool]]] = None,
) -> None:
    """Adds the response to the Langfuse observation."""
    if response_type is not None:
        response = response_type(
            response=result, start_time=0, end_time=0, tool_types=tool_types
        )
        usage = ModelUsage(
            input=response.input_tokens,
            output=response.output_tokens,
            unit="TOKENS",
        )
        langfuse_context.update_current_observation(
            output=response.content, usage=usage
        )

mirascope_langfuse_generation()

Wraps a function with a Langfuse generation.

Source code in mirascope/langfuse/langfuse.py
def mirascope_langfuse_generation() -> Callable:
    """Wraps a function with a Langfuse generation."""

    def mirascope_langfuse_decorator(
        fn,
        suffix,
        *,
        is_async: bool = False,
        response_type: Optional[type[BaseCallResponse]] = None,
        response_chunk_type: Optional[type[BaseCallResponseChunk]] = None,
        tool_types: Optional[list[type[BaseTool]]] = None,
        model_name: Optional[str] = None,
    ):
        """Wraps a LLM call with Langfuse."""

        def wrapper(*args, **kwargs):
            """Wraps a function that makes a call to an LLM with Langfuse."""
            langfuse_generation(fn, model_name, **kwargs)
            result = fn(*args, **kwargs)
            langfuse_generation_end(response_type, result, tool_types)
            return result

        def wrapper_generator(*args, **kwargs):
            """Wraps a function that yields a call to an LLM with Langfuse."""
            langfuse_generation(fn, model_name, **kwargs)
            with record_streaming() as record_chunk:
                generator = fn(*args, **kwargs)
                if isinstance(generator, AbstractContextManager):
                    with generator as s:
                        for chunk in s:
                            record_chunk(chunk, response_chunk_type)
                            yield chunk
                else:
                    for chunk in generator:
                        record_chunk(chunk, response_chunk_type)
                        yield chunk

        async def wrapper_async(*args, **kwargs):
            """Wraps a function that makes an async call to an LLM with Langfuse."""
            langfuse_generation(fn, model_name, **kwargs)
            result = await fn(*args, **kwargs)
            langfuse_generation_end(response_type, result, tool_types)
            return result

        async def wrapper_generator_async(*args, **kwargs):
            """Wraps a function that yields an async call to an LLM with Langfuse."""
            langfuse_generation(fn, model_name, **kwargs)
            with record_streaming() as record_chunk:
                stream = fn(*args, **kwargs)
                if inspect.iscoroutine(stream):
                    stream = await stream
                if isinstance(stream, AbstractAsyncContextManager):
                    async with stream as s:
                        async for chunk in s:
                            record_chunk(chunk, response_chunk_type)
                            yield chunk
                else:
                    async for chunk in stream:
                        record_chunk(chunk, response_chunk_type)
                        yield chunk

        wrapper_function = wrapper
        if response_chunk_type and is_async:
            wrapper_function = wrapper_generator_async
        elif response_type and is_async:
            wrapper_function = wrapper_async
        elif response_chunk_type:
            wrapper_function = wrapper_generator
        elif response_type:
            wrapper_function = wrapper
        else:
            raise ValueError("No response type or chunk type provided")

        return observe(name=fn.__name__)(wrapper_function)

    return mirascope_langfuse_decorator

record_streaming()

Langfuse record_streaming with Mirascope providers

Source code in mirascope/langfuse/langfuse.py
@contextmanager
def record_streaming() -> Generator:
    """Langfuse record_streaming with Mirascope providers"""
    content: list[str] = []

    def record_chunk(
        chunk: ChunkT, response_chunk_type: type[BaseCallResponseChunk]
    ) -> Any:
        """Handles all provider chunk_types instead of only OpenAI"""
        chunk_content = response_chunk_type(chunk=chunk).content
        if chunk_content is not None:
            content.append(chunk_content)

    try:
        yield record_chunk
    finally:
        # TODO: Add usage for providers that support usage in streaming
        langfuse_context.update_current_observation(output="".join(content), usage=None)

with_langfuse(cls)

Wraps base classes to automatically use langfuse.

Supported base classes: BaseCall, BaseExtractor, BaseVectorStore, BaseChunker, BaseEmbedder

Example:

from mirascope.openai import OpenAICall
from mirascope.langfuse import with_langfuse


@with_langfuse
class BookRecommender(OpenAICall):
    prompt_template = "Please recommend some {genre} books"

    genre: str


recommender = BookRecommender(genre="fantasy")
response = recommender.call()  # this will automatically get logged with Langfuse
print(response.content)
Source code in mirascope/langfuse/langfuse.py
def with_langfuse(cls):
    """Wraps base classes to automatically use langfuse.

    Supported base classes: `BaseCall`, `BaseExtractor`, `BaseVectorStore`,
    `BaseChunker`, `BaseEmbedder`

    Example:

    ```python

    from mirascope.openai import OpenAICall
    from mirascope.langfuse import with_langfuse


    @with_langfuse
    class BookRecommender(OpenAICall):
        prompt_template = "Please recommend some {genre} books"

        genre: str


    recommender = BookRecommender(genre="fantasy")
    response = recommender.call()  # this will automatically get logged with Langfuse
    print(response.content)
    ```
    """
    wrap_mirascope_class_functions(
        cls,
        handle_before_call=handle_before_call,
        handle_after_call=handle_after_call,
        decorator=observe(),
    )
    if cls._provider and cls._provider == "openai":
        cls.configuration = cls.configuration.model_copy(
            update={
                "client_wrappers": [
                    *cls.configuration.client_wrappers,
                    "langfuse",
                ]
            }
        )
    else:
        cls.configuration = cls.configuration.model_copy(
            update={
                "llm_ops": [
                    *cls.configuration.llm_ops,
                    mirascope_langfuse_generation(),
                ]
            }
        )
    return cls

wrap_mirascope_class_functions(cls, *, handle_before_call=None, handle_before_call_async=None, handle_after_call=None, handle_after_call_async=None, decorator=None, **custom_kwargs)

Wraps Mirascope class functions with a decorator.

Parameters:

Name Type Description Default
cls type[BaseModel]

The Mirascope class to wrap.

required
handle_before_call Optional[Callable[..., Any]]

A function to call before the call to the wrapped function.

None
handle_after_call Optional[Callable[..., Any]]

A function to call after the call to the wrapped function.

None
custom_kwargs Any

Additional keyword arguments to pass to the decorator.

{}
Source code in mirascope/base/ops_utils.py
def wrap_mirascope_class_functions(
    cls: type[BaseModel],
    *,
    handle_before_call: Optional[Callable[..., Any]] = None,
    handle_before_call_async: Optional[Callable[..., Awaitable[Any]]] = None,
    handle_after_call: Optional[Callable[..., Any]] = None,
    handle_after_call_async: Optional[Callable[..., Awaitable[Any]]] = None,
    decorator: Optional[DecoratorType] = None,
    **custom_kwargs: Any,
):
    """Wraps Mirascope class functions with a decorator.

    Args:
        cls: The Mirascope class to wrap.
        handle_before_call: A function to call before the call to the wrapped function.
        handle_after_call: A function to call after the call to the wrapped function.
        custom_kwargs: Additional keyword arguments to pass to the decorator.
    """

    for name in get_class_functions(cls):
        setattr(
            cls,
            name,
            mirascope_span(
                getattr(cls, name),
                handle_before_call=handle_before_call,
                handle_before_call_async=handle_before_call_async,
                handle_after_call=handle_after_call,
                handle_after_call_async=handle_after_call_async,
                decorator=decorator,
                **custom_kwargs,
            ),
        )
    return cls