Skip to content

langfuse

Module for integrations with Langfuse

with_langfuse(cls)

Wraps base classes to automatically use langfuse.

Supported base classes: BaseCall, BaseExtractor, BaseVectorStore, BaseChunker, BaseEmbedder

Example:

from mirascope.openai import OpenAICall
from mirascope.langfuse import with_langfuse


@with_langfuse
class BookRecommender(OpenAICall):
    prompt_template = "Please recommend some {genre} books"

    genre: str


recommender = BookRecommender(genre="fantasy")
response = recommender.call()  # this will automatically get logged with Langfuse
print(response.content)
Source code in mirascope/langfuse/langfuse.py
def with_langfuse(cls):
    """Wraps base classes to automatically use langfuse.

    Supported base classes: `BaseCall`, `BaseExtractor`, `BaseVectorStore`,
    `BaseChunker`, `BaseEmbedder`

    Example:

    ```python

    from mirascope.openai import OpenAICall
    from mirascope.langfuse import with_langfuse


    @with_langfuse
    class BookRecommender(OpenAICall):
        prompt_template = "Please recommend some {genre} books"

        genre: str


    recommender = BookRecommender(genre="fantasy")
    response = recommender.call()  # this will automatically get logged with Langfuse
    print(response.content)
    ```
    """
    wrap_mirascope_class_functions(
        cls,
        handle_before_call=handle_before_call,
        handle_after_call=handle_after_call,
        decorator=observe(),
    )
    if cls._provider and cls._provider == "openai":
        cls.configuration = cls.configuration.model_copy(
            update={
                "client_wrappers": [
                    *cls.configuration.client_wrappers,
                    "langfuse",
                ]
            }
        )
    else:
        cls.configuration = cls.configuration.model_copy(
            update={
                "llm_ops": [
                    *cls.configuration.llm_ops,
                    mirascope_langfuse_generation(),
                ]
            }
        )
    return cls