Skip to content

wandb

Integrations with Weights & Biases toolins (wandb, weave).

WandbCallMixin

Bases: _WandbBaseCall, Generic[BaseCallResponseT]

A mixin for integrating a call with Weights & Biases.

Use this class's built in call_with_trace method to log traces to WandB along with your calls to LLM. These calls will include all of the additional metadata information such as the prompt template, template variables, and more.

Example:

import os

from mirascope.openai import OpenAICall, OpenAICallResponse
from mirascope.wandb import WandbCallMixin
import wandb

wandb.login(key="YOUR_WANDB_API_KEY")
wandb.init(project="wandb_logged_chain")

os.environ["OPENAI_API_KEY"] = "YOUR_OPENAI_API_KEY"


class BookRecommender(OpenAICall, WandbCallMixin[OpenAICallResponse]):
    prompt_template = """
    SYSTEM:
    You are the world's greatest librarian.

    USER:
    Please recommend a {genre} book.
    """

    genre: str


recommender = BookRecommender(span_type="llm", genre="fantasy")
response, span = recommender.call_with_trace()
#           ^ this is a `Span` returned from the trace (or trace error).
Source code in mirascope/wandb/wandb.py
class WandbCallMixin(_WandbBaseCall, Generic[BaseCallResponseT]):
    '''A mixin for integrating a call with Weights & Biases.

    Use this class's built in `call_with_trace` method to log traces to WandB along with
    your calls to LLM. These calls will include all of the additional metadata
    information such as the prompt template, template variables, and more.

    Example:

    ```python
    import os

    from mirascope.openai import OpenAICall, OpenAICallResponse
    from mirascope.wandb import WandbCallMixin
    import wandb

    wandb.login(key="YOUR_WANDB_API_KEY")
    wandb.init(project="wandb_logged_chain")

    os.environ["OPENAI_API_KEY"] = "YOUR_OPENAI_API_KEY"


    class BookRecommender(OpenAICall, WandbCallMixin[OpenAICallResponse]):
        prompt_template = """
        SYSTEM:
        You are the world's greatest librarian.

        USER:
        Please recommend a {genre} book.
        """

        genre: str


    recommender = BookRecommender(span_type="llm", genre="fantasy")
    response, span = recommender.call_with_trace()
    #           ^ this is a `Span` returned from the trace (or trace error).
    ```
    '''

    span_type: Literal["tool", "llm", "chain", "agent"]

    def call_with_trace(
        self,
        parent: Optional[Trace] = None,
        **kwargs: Any,
    ) -> tuple[Optional[BaseCallResponseT], Trace]:
        """Creates an LLM response and logs it via a W&B `Trace`.

        Args:
            parent: The parent trace to connect to.

        Returns:
            A tuple containing the completion and its trace (which has been connected
                to the parent).
        """
        try:
            start_time = datetime.datetime.now().timestamp() * 1000
            response = self.call(**kwargs)
            tool_type = None
            if response.tool_types and len(response.tool_types) > 0:
                tool_type = response.tool_types[0].__bases__[0]  # type: ignore
            span = trace(self, response, tool_type, parent, **kwargs)
            return response, span  # type: ignore
        except Exception as e:
            return None, trace_error(self, e, parent, start_time, **kwargs)

call_with_trace(parent=None, **kwargs)

Creates an LLM response and logs it via a W&B Trace.

Parameters:

Name Type Description Default
parent Optional[Trace]

The parent trace to connect to.

None

Returns:

Type Description
tuple[Optional[BaseCallResponseT], Trace]

A tuple containing the completion and its trace (which has been connected to the parent).

Source code in mirascope/wandb/wandb.py
def call_with_trace(
    self,
    parent: Optional[Trace] = None,
    **kwargs: Any,
) -> tuple[Optional[BaseCallResponseT], Trace]:
    """Creates an LLM response and logs it via a W&B `Trace`.

    Args:
        parent: The parent trace to connect to.

    Returns:
        A tuple containing the completion and its trace (which has been connected
            to the parent).
    """
    try:
        start_time = datetime.datetime.now().timestamp() * 1000
        response = self.call(**kwargs)
        tool_type = None
        if response.tool_types and len(response.tool_types) > 0:
            tool_type = response.tool_types[0].__bases__[0]  # type: ignore
        span = trace(self, response, tool_type, parent, **kwargs)
        return response, span  # type: ignore
    except Exception as e:
        return None, trace_error(self, e, parent, start_time, **kwargs)

WandbExtractorMixin

Bases: _WandbBaseExtractor, Generic[T]

A extractor mixin for integrating with Weights & Biases.

Use this class's built in extract_with_trace method to log traces to WandB along with your calls to the LLM. These calls will include all of the additional metadata information such as the prompt template, template variables, and more.

Example:

import os
from typing import Type

from mirascope.openai import OpenAIExtractor
from mirascope.wandb import WandbExtractorMixin
from pydantic import BaseModel
import wandb

wandb.login(key="YOUR_WANDB_API_KEY")
wandb.init(project="wandb_logged_chain")

os.environ["OPENAI_API_KEY"] = "YOUR_OPENAI_API_KEY"


class Book(BaseModel):
    title: str
    author: str


class BookRecommender(OpenAIExtractor[Book], WandbExtractorMixin[Book]):
    extract_schema: Type[Book] = Book
    prompt_template = """
    SYSTEM:
    You are the world's greatest librarian.

    USER:
    Please recommend a {genre} book.
    """

    genre: str


recommender = BookRecommender(span_type="tool", genre="fantasy")
book, span = recommender.extract_with_trace()
#       ^ this is a `Span` returned from the trace (or trace error).
Source code in mirascope/wandb/wandb.py
class WandbExtractorMixin(_WandbBaseExtractor, Generic[T]):
    '''A extractor mixin for integrating with Weights & Biases.

    Use this class's built in `extract_with_trace` method to log traces to WandB along
    with your calls to the LLM. These calls will include all of the additional metadata
    information such as the prompt template, template variables, and more.

    Example:

    ```python
    import os
    from typing import Type

    from mirascope.openai import OpenAIExtractor
    from mirascope.wandb import WandbExtractorMixin
    from pydantic import BaseModel
    import wandb

    wandb.login(key="YOUR_WANDB_API_KEY")
    wandb.init(project="wandb_logged_chain")

    os.environ["OPENAI_API_KEY"] = "YOUR_OPENAI_API_KEY"


    class Book(BaseModel):
        title: str
        author: str


    class BookRecommender(OpenAIExtractor[Book], WandbExtractorMixin[Book]):
        extract_schema: Type[Book] = Book
        prompt_template = """
        SYSTEM:
        You are the world's greatest librarian.

        USER:
        Please recommend a {genre} book.
        """

        genre: str


    recommender = BookRecommender(span_type="tool", genre="fantasy")
    book, span = recommender.extract_with_trace()
    #       ^ this is a `Span` returned from the trace (or trace error).
    ```
    '''

    span_type: Literal["tool", "llm", "chain", "agent"]

    def extract_with_trace(
        self,
        parent: Optional[Trace] = None,
        retries: int = 0,
        **kwargs: Any,
    ) -> tuple[Optional[T], Trace]:
        """Extracts `extract_schema` from the LLM call response and traces it.

        The `extract_schema` is converted into an tool, complete with a description of
        the tool, all of the fields, and their types. This allows us to take advantage
        of tool/function calling functionality to extract information from a response
        according to the context provided by the `BaseModel` schema.

        Args:
            parent: The parent trace to connect to.
            retries: The maximum number of times to retry the query on validation error.
            **kwargs: Additional keyword arguments parameters to pass to the call. These
                will override any existing arguments in `call_params`.

        Returns:
            The `Schema` instance extracted from the response and it's trace.
        """
        try:
            start_time = datetime.datetime.now().timestamp() * 1000
            model = self.extract(retries=retries, **kwargs)
            span = trace(self, model._response, parent, **kwargs)  # type: ignore
            return model, span  # type: ignore
        except Exception as e:
            return None, trace_error(self, e, parent, start_time, **kwargs)

extract_with_trace(parent=None, retries=0, **kwargs)

Extracts extract_schema from the LLM call response and traces it.

The extract_schema is converted into an tool, complete with a description of the tool, all of the fields, and their types. This allows us to take advantage of tool/function calling functionality to extract information from a response according to the context provided by the BaseModel schema.

Parameters:

Name Type Description Default
parent Optional[Trace]

The parent trace to connect to.

None
retries int

The maximum number of times to retry the query on validation error.

0
**kwargs Any

Additional keyword arguments parameters to pass to the call. These will override any existing arguments in call_params.

{}

Returns:

Type Description
tuple[Optional[T], Trace]

The Schema instance extracted from the response and it's trace.

Source code in mirascope/wandb/wandb.py
def extract_with_trace(
    self,
    parent: Optional[Trace] = None,
    retries: int = 0,
    **kwargs: Any,
) -> tuple[Optional[T], Trace]:
    """Extracts `extract_schema` from the LLM call response and traces it.

    The `extract_schema` is converted into an tool, complete with a description of
    the tool, all of the fields, and their types. This allows us to take advantage
    of tool/function calling functionality to extract information from a response
    according to the context provided by the `BaseModel` schema.

    Args:
        parent: The parent trace to connect to.
        retries: The maximum number of times to retry the query on validation error.
        **kwargs: Additional keyword arguments parameters to pass to the call. These
            will override any existing arguments in `call_params`.

    Returns:
        The `Schema` instance extracted from the response and it's trace.
    """
    try:
        start_time = datetime.datetime.now().timestamp() * 1000
        model = self.extract(retries=retries, **kwargs)
        span = trace(self, model._response, parent, **kwargs)  # type: ignore
        return model, span  # type: ignore
    except Exception as e:
        return None, trace_error(self, e, parent, start_time, **kwargs)

with_weave(cls)

Wraps base classes to automatically use weave.

Supported base classes: BaseCall, BaseExtractor, BaseVectorStore, BaseChunker, BaseEmbedder

Example:

import weave

from mirascope.openai import OpenAICall
from mirascope.wandb import with_weave

weave.init("my-project")


@with_weave
class BookRecommender(OpenAICall):
    prompt_template = "Please recommend some {genre} books"

    genre: str


recommender = BookRecommender(genre="fantasy")
response = recommender.call()  # this will automatically get logged with weave
print(response.content)
Source code in mirascope/wandb/weave.py
def with_weave(cls):
    """Wraps base classes to automatically use weave.

    Supported base classes: `BaseCall`, `BaseExtractor`, `BaseVectorStore`,
    `BaseChunker`, `BaseEmbedder`

    Example:

    ```python
    import weave

    from mirascope.openai import OpenAICall
    from mirascope.wandb import with_weave

    weave.init("my-project")


    @with_weave
    class BookRecommender(OpenAICall):
        prompt_template = "Please recommend some {genre} books"

        genre: str


    recommender = BookRecommender(genre="fantasy")
    response = recommender.call()  # this will automatically get logged with weave
    print(response.content)
    ```
    """
    for name in get_class_functions(cls):
        setattr(cls, name, weave.op()(getattr(cls, name)))
    if hasattr(cls, "_provider") is False or cls._provider != "openai":
        if hasattr(cls, "configuration"):
            cls.configuration = cls.configuration.model_copy(
                update={"llm_ops": [*cls.configuration.llm_ops, "weave"]}
            )
    return cls