Skip to content

anthropic.types

Type classes for interacting with Anthropics's Claude API.

AnthropicAsyncStream

Bases: BaseAsyncStream[AnthropicCallResponseChunk, MessageParam, MessageParam, AnthropicTool]

A class for streaming responses from Anthropic's Claude API.

Source code in mirascope/anthropic/types.py
class AnthropicAsyncStream(
    BaseAsyncStream[
        AnthropicCallResponseChunk,
        MessageParam,
        MessageParam,
        AnthropicTool,
    ]
):
    """A class for streaming responses from Anthropic's Claude API."""

    def __init__(
        self,
        stream: AsyncGenerator[AnthropicCallResponseChunk, None],
        allow_partial: bool = False,
    ):
        """Initializes an instance of `AnthropicAsyncStream`."""
        AnthropicToolStream._check_version_for_partial(allow_partial)
        super().__init__(stream, MessageParam)
        self._allow_partial = allow_partial

    def __aiter__(
        self,
    ) -> AsyncGenerator[
        tuple[AnthropicCallResponseChunk, Optional[AnthropicTool]], None
    ]:
        """Async iterator over the stream and constructs tools as they are streamed."""
        stream = super().__aiter__()

        async def generator():
            current_tool_call = ToolUseBlock(id="", input={}, name="", type="tool_use")
            current_tool_type = None
            buffer, content = "", []
            async for chunk, _ in stream:
                (
                    buffer,
                    tool,
                    current_tool_call,
                    current_tool_type,
                    starting_new,
                ) = _handle_chunk(
                    buffer,
                    chunk,
                    current_tool_call,
                    current_tool_type,
                    self._allow_partial,
                )
                if tool is not None:
                    yield chunk, tool
                elif current_tool_type is None:
                    yield chunk, None
                if starting_new and self._allow_partial:
                    yield chunk, None
                if chunk.chunk.type == "content_block_stop":
                    content.append(chunk.chunk.content_block.model_dump())
            if content:
                self.message_param["content"] = content  # type: ignore

        return generator()

    @classmethod
    def tool_message_params(cls, tools_and_outputs: list[tuple[AnthropicTool, str]]):
        """Returns the tool message parameters for tool call results."""
        return AnthropicCallResponse.tool_message_params(tools_and_outputs)

tool_message_params(tools_and_outputs) classmethod

Returns the tool message parameters for tool call results.

Source code in mirascope/anthropic/types.py
@classmethod
def tool_message_params(cls, tools_and_outputs: list[tuple[AnthropicTool, str]]):
    """Returns the tool message parameters for tool call results."""
    return AnthropicCallResponse.tool_message_params(tools_and_outputs)

AnthropicCallParams

Bases: BaseCallParams[AnthropicTool]

The parameters to use when calling d Claud API with a prompt.

Example:

from mirascope.anthropic import AnthropicCall, AnthropicCallParams


class BookRecommender(AnthropicCall):
    prompt_template = "Please recommend some books."

    call_params = AnthropicCallParams(
        model="anthropic-3-opus-20240229",
    )
Source code in mirascope/anthropic/types.py
class AnthropicCallParams(BaseCallParams[AnthropicTool]):
    """The parameters to use when calling d Claud API with a prompt.

    Example:

    ```python
    from mirascope.anthropic import AnthropicCall, AnthropicCallParams


    class BookRecommender(AnthropicCall):
        prompt_template = "Please recommend some books."

        call_params = AnthropicCallParams(
            model="anthropic-3-opus-20240229",
        )
    ```
    """

    max_tokens: int = 1000
    model: str = "claude-3-haiku-20240307"
    metadata: Optional[Metadata] = None
    stop_sequences: Optional[list[str]] = None
    system: Optional[str] = None
    temperature: Optional[float] = None
    top_k: Optional[int] = None
    top_p: Optional[float] = None
    extra_headers: Optional[Headers] = None
    extra_query: Optional[Query] = None
    extra_body: Optional[Body] = None
    timeout: Optional[Union[float, Timeout]] = 600

    response_format: Optional[Literal["json"]] = None

    model_config = ConfigDict(arbitrary_types_allowed=True)

    def kwargs(
        self,
        tool_type: Optional[Type[AnthropicTool]] = None,
        exclude: Optional[set[str]] = None,
    ) -> dict[str, Any]:
        """Returns the keyword argument call parameters."""
        extra_exclude = {"response_format"}
        exclude = extra_exclude if exclude is None else exclude.union(extra_exclude)
        return super().kwargs(tool_type, exclude)

kwargs(tool_type=None, exclude=None)

Returns the keyword argument call parameters.

Source code in mirascope/anthropic/types.py
def kwargs(
    self,
    tool_type: Optional[Type[AnthropicTool]] = None,
    exclude: Optional[set[str]] = None,
) -> dict[str, Any]:
    """Returns the keyword argument call parameters."""
    extra_exclude = {"response_format"}
    exclude = extra_exclude if exclude is None else exclude.union(extra_exclude)
    return super().kwargs(tool_type, exclude)

AnthropicCallResponse

Bases: BaseCallResponse[Message, AnthropicTool]

Convenience wrapper around the Anthropic Claude API.

When using Mirascope's convenience wrappers to interact with Anthropic models via AnthropicCall, responses using Anthropic.call() will return an AnthropicCallResponse, whereby the implemented properties allow for simpler syntax and a convenient developer experience.

Example:

from mirascope.anthropic import AnthropicCall


class BookRecommender(AnthropicCall):
    prompt_template = "Please recommend some books."


print(BookRecommender().call())
Source code in mirascope/anthropic/types.py
class AnthropicCallResponse(BaseCallResponse[Message, AnthropicTool]):
    """Convenience wrapper around the Anthropic Claude API.

    When using Mirascope's convenience wrappers to interact with Anthropic models via
    `AnthropicCall`, responses using `Anthropic.call()` will return an
    `AnthropicCallResponse`, whereby the implemented properties allow for simpler syntax
    and a convenient developer experience.

    Example:

    ```python
    from mirascope.anthropic import AnthropicCall


    class BookRecommender(AnthropicCall):
        prompt_template = "Please recommend some books."


    print(BookRecommender().call())
    ```
    """

    response_format: Optional[Literal["json"]] = None
    user_message_param: Optional[MessageParam] = None

    @property
    def message_param(self) -> MessageParam:
        """Returns the assistant's response as a message parameter."""
        return self.response.model_dump(include={"content", "role"})  # type: ignore

    @property
    def tools(self) -> Optional[list[AnthropicTool]]:
        """Returns the tools for the 0th choice message."""
        if not self.tool_types:
            return None

        if self.response_format == "json":
            # Note: we only handle single tool calls in JSON mode.
            tool_type = self.tool_types[0]
            return [
                tool_type.from_tool_call(
                    ToolUseBlock(
                        id="id",
                        input=json.loads(self.content),
                        name=tool_type.name(),
                        type="tool_use",
                    )
                )
            ]

        if self.response.stop_reason != "tool_use":
            raise RuntimeError(
                "Generation stopped with stop reason that is not `tool_use`. "
                "This is likely due to a limit on output tokens that is too low. "
                "Note that this could also indicate no tool is beind called, so we "
                "recommend that you check the output of the call to confirm. "
                f"Stop Reason: {self.response.stop_reason} "
            )

        extracted_tools = []
        for tool_call in self.response.content:
            if tool_call.type != "tool_use":
                continue
            for tool_type in self.tool_types:
                if tool_call.name == tool_type.name():
                    tool = tool_type.from_tool_call(tool_call)
                    extracted_tools.append(tool)
                    break

        return extracted_tools

    @property
    def tool(self) -> Optional[AnthropicTool]:
        """Returns the 0th tool for the 0th choice text block."""
        tools = self.tools
        if tools:
            return tools[0]
        return None

    @classmethod
    def tool_message_params(
        self, tools_and_outputs: list[tuple[AnthropicTool, str]]
    ) -> list[MessageParam]:
        """Returns the tool message parameters for tool call results."""
        return [
            {
                "role": "user",
                "content": [
                    ToolResultBlockParam(
                        tool_use_id=tool.tool_call.id,
                        type="tool_result",
                        content=[{"text": output, "type": "text"}],
                    )
                    for tool, output in tools_and_outputs
                ],
            }
        ]

    @property
    def content(self) -> str:
        """Returns the string text of the 0th text block."""
        block = self.response.content[0]
        return block.text if block.type == "text" else ""

    @property
    def model(self) -> str:
        """Returns the name of the response model."""
        return self.response.model

    @property
    def id(self) -> str:
        """Returns the id of the response."""
        return self.response.id

    @property
    def finish_reasons(self) -> Optional[list[str]]:
        """Returns the finish reason of the response."""
        return [str(self.response.stop_reason)]

    @property
    def usage(self) -> Usage:
        """Returns the usage of the message."""
        return self.response.usage

    @property
    def input_tokens(self) -> int:
        """Returns the number of input tokens."""
        return self.usage.input_tokens

    @property
    def output_tokens(self) -> int:
        """Returns the number of output tokens."""
        return self.usage.output_tokens

    def dump(self) -> dict[str, Any]:
        """Dumps the response to a dictionary."""
        return {
            "start_time": self.start_time,
            "end_time": self.end_time,
            "output": self.response.model_dump(),
        }

content: str property

Returns the string text of the 0th text block.

finish_reasons: Optional[list[str]] property

Returns the finish reason of the response.

id: str property

Returns the id of the response.

input_tokens: int property

Returns the number of input tokens.

message_param: MessageParam property

Returns the assistant's response as a message parameter.

model: str property

Returns the name of the response model.

output_tokens: int property

Returns the number of output tokens.

tool: Optional[AnthropicTool] property

Returns the 0th tool for the 0th choice text block.

tools: Optional[list[AnthropicTool]] property

Returns the tools for the 0th choice message.

usage: Usage property

Returns the usage of the message.

dump()

Dumps the response to a dictionary.

Source code in mirascope/anthropic/types.py
def dump(self) -> dict[str, Any]:
    """Dumps the response to a dictionary."""
    return {
        "start_time": self.start_time,
        "end_time": self.end_time,
        "output": self.response.model_dump(),
    }

tool_message_params(tools_and_outputs) classmethod

Returns the tool message parameters for tool call results.

Source code in mirascope/anthropic/types.py
@classmethod
def tool_message_params(
    self, tools_and_outputs: list[tuple[AnthropicTool, str]]
) -> list[MessageParam]:
    """Returns the tool message parameters for tool call results."""
    return [
        {
            "role": "user",
            "content": [
                ToolResultBlockParam(
                    tool_use_id=tool.tool_call.id,
                    type="tool_result",
                    content=[{"text": output, "type": "text"}],
                )
                for tool, output in tools_and_outputs
            ],
        }
    ]

AnthropicCallResponseChunk

Bases: BaseCallResponseChunk[MessageStreamEvent, AnthropicTool]

Convenience wrapper around the Anthropic API streaming chunks.

When using Mirascope's convenience wrappers to interact with Anthropic models via AnthropicCall, responses using AnthropicCall.stream() will yield AnthropicCallResponseChunk, whereby the implemented properties allow for simpler syntax and a convenient developer experience.

Example:

from mirascope.anthropic import AnthropicCall


class Math(AnthropicCall):
    prompt_template = "What is 1 + 2?"


content = ""
for chunk in Math().stream():
    content += chunk.content
    print(content)
#> 1
#  1 +
#  1 + 2
#  1 + 2 equals
#  1 + 2 equals
#  1 + 2 equals 3
#  1 + 2 equals 3.
Source code in mirascope/anthropic/types.py
class AnthropicCallResponseChunk(
    BaseCallResponseChunk[MessageStreamEvent, AnthropicTool]
):
    """Convenience wrapper around the Anthropic API streaming chunks.

    When using Mirascope's convenience wrappers to interact with Anthropic models via
    `AnthropicCall`, responses using `AnthropicCall.stream()` will yield
    `AnthropicCallResponseChunk`, whereby the implemented properties allow for simpler
    syntax and a convenient developer experience.

    Example:

    ```python
    from mirascope.anthropic import AnthropicCall


    class Math(AnthropicCall):
        prompt_template = "What is 1 + 2?"


    content = ""
    for chunk in Math().stream():
        content += chunk.content
        print(content)
    #> 1
    #  1 +
    #  1 + 2
    #  1 + 2 equals
    #  1 + 2 equals
    #  1 + 2 equals 3
    #  1 + 2 equals 3.
    ```
    """

    response_format: Optional[Literal["json"]] = None
    user_message_param: Optional[MessageParam] = None

    @property
    def type(
        self,
    ) -> Literal[
        "text",
        "input_json",
        "message_start",
        "message_delta",
        "message_stop",
        "content_block_start",
        "content_block_delta",
        "content_block_stop",
    ]:
        """Returns the type of the chunk."""
        return self.chunk.type

    @property
    def content(self) -> str:
        """Returns the string content of the 0th message."""
        if isinstance(self.chunk, ContentBlockStartEvent):
            return (
                self.chunk.content_block.text
                if isinstance(self.chunk.content_block, TextBlock)
                else ""
            )
        if isinstance(self.chunk, ContentBlockDeltaEvent):
            return (
                self.chunk.delta.text if isinstance(self.chunk.delta, TextDelta) else ""
            )
        return ""

    @property
    def model(self) -> Optional[str]:
        """Returns the name of the response model."""
        if isinstance(self.chunk, MessageStartEvent):
            return self.chunk.message.model
        return None

    @property
    def id(self) -> Optional[str]:
        """Returns the id of the response."""
        if isinstance(self.chunk, MessageStartEvent):
            return self.chunk.message.id
        return None

    @property
    def finish_reasons(self) -> Optional[list[str]]:
        """Returns the finish reason of the response."""
        if isinstance(self.chunk, MessageStartEvent):
            return [str(self.chunk.message.stop_reason)]
        return None

    @property
    def usage(self) -> Optional[Usage]:
        """Returns the usage of the message."""
        if isinstance(self.chunk, MessageStartEvent):
            return self.chunk.message.usage
        return None

    @property
    def input_tokens(self) -> Optional[int]:
        """Returns the number of input tokens."""
        if self.usage:
            return self.usage.input_tokens
        return None

    @property
    def output_tokens(self) -> Optional[int]:
        """Returns the number of output tokens."""
        if self.usage:
            return self.usage.output_tokens
        return None

content: str property

Returns the string content of the 0th message.

finish_reasons: Optional[list[str]] property

Returns the finish reason of the response.

id: Optional[str] property

Returns the id of the response.

input_tokens: Optional[int] property

Returns the number of input tokens.

model: Optional[str] property

Returns the name of the response model.

output_tokens: Optional[int] property

Returns the number of output tokens.

type: Literal['text', 'input_json', 'message_start', 'message_delta', 'message_stop', 'content_block_start', 'content_block_delta', 'content_block_stop'] property

Returns the type of the chunk.

usage: Optional[Usage] property

Returns the usage of the message.

AnthropicStream

Bases: BaseStream[AnthropicCallResponseChunk, MessageParam, MessageParam, AnthropicTool]

A class for streaming responses from Anthropic's Claude API.

Source code in mirascope/anthropic/types.py
class AnthropicStream(
    BaseStream[
        AnthropicCallResponseChunk,
        MessageParam,
        MessageParam,
        AnthropicTool,
    ]
):
    """A class for streaming responses from Anthropic's Claude API."""

    def __init__(
        self,
        stream: Generator[AnthropicCallResponseChunk, None, None],
        allow_partial: bool = False,
    ):
        """Initializes an instance of `AnthropicStream`."""
        AnthropicToolStream._check_version_for_partial(allow_partial)
        super().__init__(stream, MessageParam)
        self._allow_partial = allow_partial

    def __iter__(
        self,
    ) -> Generator[
        tuple[AnthropicCallResponseChunk, Optional[AnthropicTool]], None, None
    ]:
        """Iterator over the stream and constructs tools as they are streamed."""
        current_tool_call = ToolUseBlock(id="", input={}, name="", type="tool_use")
        current_tool_type = None
        buffer, content = "", []
        for chunk, _ in super().__iter__():
            (
                buffer,
                tool,
                current_tool_call,
                current_tool_type,
                starting_new,
            ) = _handle_chunk(
                buffer,
                chunk,
                current_tool_call,
                current_tool_type,
                self._allow_partial,
            )
            if tool is not None:
                yield chunk, tool
            elif current_tool_type is None:
                yield chunk, None
            if starting_new and self._allow_partial:
                yield chunk, None
            if chunk.chunk.type == "content_block_stop":
                content.append(chunk.chunk.content_block.model_dump())
        if content:
            self.message_param["content"] = content  # type: ignore

    @classmethod
    def tool_message_params(cls, tools_and_outputs: list[tuple[AnthropicTool, str]]):
        """Returns the tool message parameters for tool call results."""
        return AnthropicCallResponse.tool_message_params(tools_and_outputs)

tool_message_params(tools_and_outputs) classmethod

Returns the tool message parameters for tool call results.

Source code in mirascope/anthropic/types.py
@classmethod
def tool_message_params(cls, tools_and_outputs: list[tuple[AnthropicTool, str]]):
    """Returns the tool message parameters for tool call results."""
    return AnthropicCallResponse.tool_message_params(tools_and_outputs)

AnthropicTool

Bases: BaseTool[ToolUseBlock]

A base class for easy use of tools with the Anthropic Claude client.

AnthropicTool internally handles the logic that allows you to use tools with simple calls such as AnthropicCallResponse.tool or AnthropicTool.fn, as seen in the example below.

Example:

from mirascope import AnthropicCall, AnthropicCallParams


def animal_matcher(fav_food: str, fav_color: str) -> str:
    """Tells you your most likely favorite animal from personality traits.

    Args:
        fav_food: your favorite food.
        fav_color: your favorite color.

    Returns:
        The animal most likely to be your favorite based on traits.
    """
    return "Your favorite animal is the best one, a frog."


class AnimalMatcher(AnthropicCall):
    prompt_template = """
    Tell me my favorite animal if my favorite food is {food} and my
    favorite color is {color}.
    """

    food: str
    color: str

    call_params = AnthropicCallParams(tools=[animal_matcher])


response = AnimalMatcher(food="pizza", color="red").call
tool = response.tool
print(tool.fn(**tool.args))
#> Your favorite animal is the best one, a frog.
Source code in mirascope/anthropic/tools.py
class AnthropicTool(BaseTool[ToolUseBlock]):
    '''A base class for easy use of tools with the Anthropic Claude client.

    `AnthropicTool` internally handles the logic that allows you to use tools with
    simple calls such as `AnthropicCallResponse.tool` or `AnthropicTool.fn`, as seen in
    the example below.

    Example:

    ```python
    from mirascope import AnthropicCall, AnthropicCallParams


    def animal_matcher(fav_food: str, fav_color: str) -> str:
        """Tells you your most likely favorite animal from personality traits.

        Args:
            fav_food: your favorite food.
            fav_color: your favorite color.

        Returns:
            The animal most likely to be your favorite based on traits.
        """
        return "Your favorite animal is the best one, a frog."


    class AnimalMatcher(AnthropicCall):
        prompt_template = """
        Tell me my favorite animal if my favorite food is {food} and my
        favorite color is {color}.
        """

        food: str
        color: str

        call_params = AnthropicCallParams(tools=[animal_matcher])


    response = AnimalMatcher(food="pizza", color="red").call
    tool = response.tool
    print(tool.fn(**tool.args))
    #> Your favorite animal is the best one, a frog.
    ```
    '''

    @classmethod
    def tool_schema(cls) -> ToolParam:
        """Constructs JSON tool schema for use with Anthropic's Claude API."""
        schema = super().tool_schema()
        return ToolParam(
            input_schema=schema["parameters"],
            name=schema["name"],
            description=schema["description"],
        )

    @classmethod
    def from_tool_call(cls, tool_call: ToolUseBlock) -> AnthropicTool:
        """Extracts an instance of the tool constructed from a tool call response.

        Given the tool call contents in a `Message` from an Anthropic call response,
        this method parses out the arguments of the tool call and creates an
        `AnthropicTool` instance from them.

        Args:
            tool_call: The list of `TextBlock` contents.

        Returns:
            An instance of the tool constructed from the tool call.

        Raises:
            ValidationError: if the tool call doesn't match the tool schema.
        """
        model_json = tool_call.input
        model_json["tool_call"] = tool_call.model_dump()  # type: ignore
        return cls.model_validate(model_json)

    @classmethod
    def from_model(cls, model: Type[BaseModel]) -> Type[AnthropicTool]:
        """Constructs a `AnthropicTool` type from a `BaseModel` type."""
        return convert_base_model_to_tool(model, AnthropicTool)

    @classmethod
    def from_fn(cls, fn: Callable) -> Type[AnthropicTool]:
        """Constructs a `AnthropicTool` type from a function."""
        return convert_function_to_tool(fn, AnthropicTool)

    @classmethod
    def from_base_type(cls, base_type: Type[BaseType]) -> Type[AnthropicTool]:
        """Constructs a `AnthropicTool` type from a `BaseType` type."""
        return convert_base_type_to_tool(base_type, AnthropicTool)

from_base_type(base_type) classmethod

Constructs a AnthropicTool type from a BaseType type.

Source code in mirascope/anthropic/tools.py
@classmethod
def from_base_type(cls, base_type: Type[BaseType]) -> Type[AnthropicTool]:
    """Constructs a `AnthropicTool` type from a `BaseType` type."""
    return convert_base_type_to_tool(base_type, AnthropicTool)

from_fn(fn) classmethod

Constructs a AnthropicTool type from a function.

Source code in mirascope/anthropic/tools.py
@classmethod
def from_fn(cls, fn: Callable) -> Type[AnthropicTool]:
    """Constructs a `AnthropicTool` type from a function."""
    return convert_function_to_tool(fn, AnthropicTool)

from_model(model) classmethod

Constructs a AnthropicTool type from a BaseModel type.

Source code in mirascope/anthropic/tools.py
@classmethod
def from_model(cls, model: Type[BaseModel]) -> Type[AnthropicTool]:
    """Constructs a `AnthropicTool` type from a `BaseModel` type."""
    return convert_base_model_to_tool(model, AnthropicTool)

from_tool_call(tool_call) classmethod

Extracts an instance of the tool constructed from a tool call response.

Given the tool call contents in a Message from an Anthropic call response, this method parses out the arguments of the tool call and creates an AnthropicTool instance from them.

Parameters:

Name Type Description Default
tool_call ToolUseBlock

The list of TextBlock contents.

required

Returns:

Type Description
AnthropicTool

An instance of the tool constructed from the tool call.

Raises:

Type Description
ValidationError

if the tool call doesn't match the tool schema.

Source code in mirascope/anthropic/tools.py
@classmethod
def from_tool_call(cls, tool_call: ToolUseBlock) -> AnthropicTool:
    """Extracts an instance of the tool constructed from a tool call response.

    Given the tool call contents in a `Message` from an Anthropic call response,
    this method parses out the arguments of the tool call and creates an
    `AnthropicTool` instance from them.

    Args:
        tool_call: The list of `TextBlock` contents.

    Returns:
        An instance of the tool constructed from the tool call.

    Raises:
        ValidationError: if the tool call doesn't match the tool schema.
    """
    model_json = tool_call.input
    model_json["tool_call"] = tool_call.model_dump()  # type: ignore
    return cls.model_validate(model_json)

tool_schema() classmethod

Constructs JSON tool schema for use with Anthropic's Claude API.

Source code in mirascope/anthropic/tools.py
@classmethod
def tool_schema(cls) -> ToolParam:
    """Constructs JSON tool schema for use with Anthropic's Claude API."""
    schema = super().tool_schema()
    return ToolParam(
        input_schema=schema["parameters"],
        name=schema["name"],
        description=schema["description"],
    )

AnthropicToolStream

Bases: BaseToolStream[AnthropicCallResponseChunk, AnthropicTool]

A base class for streaming tools from response chunks.

Source code in mirascope/anthropic/types.py
class AnthropicToolStream(BaseToolStream[AnthropicCallResponseChunk, AnthropicTool]):
    """A base class for streaming tools from response chunks."""

    @classmethod
    @overload
    def from_stream(
        cls,
        stream: Generator[AnthropicCallResponseChunk, None, None],
        allow_partial: Literal[True],
    ) -> Generator[Optional[AnthropicTool], None, None]:
        yield ...  # type: ignore  # pragma: no cover

    @classmethod
    @overload
    def from_stream(
        cls,
        stream: Generator[AnthropicCallResponseChunk, None, None],
        allow_partial: Literal[False],
    ) -> Generator[AnthropicTool, None, None]:
        yield ...  # type: ignore  # pragma: no cover

    @classmethod
    @overload
    def from_stream(
        cls,
        stream: Generator[AnthropicCallResponseChunk, None, None],
        allow_partial: bool = False,
    ) -> Generator[Optional[AnthropicTool], None, None]:
        yield ...  # type: ignore  # pragma: no cover

    @classmethod
    def from_stream(cls, stream, allow_partial=False):
        """Yields partial tools from the given stream of chunks.

        Args:
            stream: The generator of chunks from which to stream tools.
            allow_partial: Whether to allow partial tools.

        Raises:
            RuntimeError: if a tool in the stream is of an unknown type.
        """
        cls._check_version_for_partial(allow_partial)
        current_tool_call = ToolUseBlock(id="", input={}, name="", type="tool_use")
        current_tool_type = None
        buffer = ""
        for chunk in stream:
            (
                buffer,
                tool,
                current_tool_call,
                current_tool_type,
                starting_new,
            ) = _handle_chunk(
                buffer,
                chunk,
                current_tool_call,
                current_tool_type,
                allow_partial,
            )
            if tool is not None:
                yield tool
            if starting_new and allow_partial:
                yield None

    @classmethod
    @overload
    async def from_async_stream(
        cls,
        stream: AsyncGenerator[AnthropicCallResponseChunk, None],
        allow_partial: Literal[True],
    ) -> AsyncGenerator[Optional[AnthropicTool], None]:
        yield ...  # type: ignore  # pragma: no cover

    @classmethod
    @overload
    async def from_async_stream(
        cls,
        stream: AsyncGenerator[AnthropicCallResponseChunk, None],
        allow_partial: Literal[False],
    ) -> AsyncGenerator[AnthropicTool, None]:
        yield ...  # type: ignore  # pragma: no cover

    @classmethod
    @overload
    async def from_async_stream(
        cls,
        stream: AsyncGenerator[AnthropicCallResponseChunk, None],
        allow_partial: bool = False,
    ) -> AsyncGenerator[Optional[AnthropicTool], None]:
        yield ...  # type: ignore  # pragma: no cover

    @classmethod
    async def from_async_stream(cls, async_stream, allow_partial=False):
        """Yields partial tools from the given stream of chunks asynchronously.

        Args:
            stream: The async generator of chunks from which to stream tools.
            allow_partial: Whether to allow partial tools.

        Raises:
            RuntimeError: if a tool in the stream is of an unknown type.
        """
        cls._check_version_for_partial(allow_partial)
        current_tool_call = ToolUseBlock(id="", input={}, name="", type="tool_use")
        current_tool_type = None
        buffer = ""
        async for chunk in async_stream:
            (
                buffer,
                tool,
                current_tool_call,
                current_tool_type,
                starting_new,
            ) = _handle_chunk(
                buffer,
                chunk,
                current_tool_call,
                current_tool_type,
                allow_partial,
            )
            if tool is not None:
                yield tool
            if starting_new and allow_partial:
                yield None

from_async_stream(async_stream, allow_partial=False) async classmethod

Yields partial tools from the given stream of chunks asynchronously.

Parameters:

Name Type Description Default
stream

The async generator of chunks from which to stream tools.

required
allow_partial

Whether to allow partial tools.

False

Raises:

Type Description
RuntimeError

if a tool in the stream is of an unknown type.

Source code in mirascope/anthropic/types.py
@classmethod
async def from_async_stream(cls, async_stream, allow_partial=False):
    """Yields partial tools from the given stream of chunks asynchronously.

    Args:
        stream: The async generator of chunks from which to stream tools.
        allow_partial: Whether to allow partial tools.

    Raises:
        RuntimeError: if a tool in the stream is of an unknown type.
    """
    cls._check_version_for_partial(allow_partial)
    current_tool_call = ToolUseBlock(id="", input={}, name="", type="tool_use")
    current_tool_type = None
    buffer = ""
    async for chunk in async_stream:
        (
            buffer,
            tool,
            current_tool_call,
            current_tool_type,
            starting_new,
        ) = _handle_chunk(
            buffer,
            chunk,
            current_tool_call,
            current_tool_type,
            allow_partial,
        )
        if tool is not None:
            yield tool
        if starting_new and allow_partial:
            yield None

from_stream(stream, allow_partial=False) classmethod

Yields partial tools from the given stream of chunks.

Parameters:

Name Type Description Default
stream

The generator of chunks from which to stream tools.

required
allow_partial

Whether to allow partial tools.

False

Raises:

Type Description
RuntimeError

if a tool in the stream is of an unknown type.

Source code in mirascope/anthropic/types.py
@classmethod
def from_stream(cls, stream, allow_partial=False):
    """Yields partial tools from the given stream of chunks.

    Args:
        stream: The generator of chunks from which to stream tools.
        allow_partial: Whether to allow partial tools.

    Raises:
        RuntimeError: if a tool in the stream is of an unknown type.
    """
    cls._check_version_for_partial(allow_partial)
    current_tool_call = ToolUseBlock(id="", input={}, name="", type="tool_use")
    current_tool_type = None
    buffer = ""
    for chunk in stream:
        (
            buffer,
            tool,
            current_tool_call,
            current_tool_type,
            starting_new,
        ) = _handle_chunk(
            buffer,
            chunk,
            current_tool_call,
            current_tool_type,
            allow_partial,
        )
        if tool is not None:
            yield tool
        if starting_new and allow_partial:
            yield None

BaseAsyncStream

Bases: Generic[BaseCallResponseChunkT, UserMessageParamT, AssistantMessageParamT, BaseToolT], ABC

A base class for async streaming responses from LLMs.

Source code in mirascope/base/types.py
class BaseAsyncStream(
    Generic[
        BaseCallResponseChunkT,
        UserMessageParamT,
        AssistantMessageParamT,
        BaseToolT,
    ],
    ABC,
):
    """A base class for async streaming responses from LLMs."""

    stream: AsyncGenerator[BaseCallResponseChunkT, None]
    message_param_type: type[AssistantMessageParamT]

    cost: Optional[float] = None
    user_message_param: Optional[UserMessageParamT] = None
    message_param: AssistantMessageParamT

    def __init__(
        self,
        stream: AsyncGenerator[BaseCallResponseChunkT, None],
        message_param_type: type[AssistantMessageParamT],
    ):
        """Initializes an instance of `BaseAsyncStream`."""
        self.stream = stream
        self.message_param_type = message_param_type

    def __aiter__(
        self,
    ) -> AsyncGenerator[tuple[BaseCallResponseChunkT, Optional[BaseToolT]], None]:
        """Iterates over the stream and stores useful information."""

        async def generator():
            content = ""
            async for chunk in self.stream:
                content += chunk.content
                if chunk.cost is not None:
                    self.cost = chunk.cost
                yield chunk, None
                self.user_message_param = chunk.user_message_param
            kwargs = {"role": "assistant"}
            if "message" in self.message_param_type.__annotations__:
                kwargs["message"] = content
            else:
                kwargs["content"] = content
            self.message_param = self.message_param_type(**kwargs)

        return generator()

BaseCallParams

Bases: BaseModel, Generic[BaseToolT]

The parameters with which to make a call.

Source code in mirascope/base/types.py
class BaseCallParams(BaseModel, Generic[BaseToolT]):
    """The parameters with which to make a call."""

    model: str
    tools: Optional[list[Union[Callable, Type[BaseToolT]]]] = None

    model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)

    def kwargs(
        self,
        tool_type: Optional[Type[BaseToolT]] = None,
        exclude: Optional[set[str]] = None,
    ) -> dict[str, Any]:
        """Returns all parameters for the call as a keyword arguments dictionary."""
        extra_exclude = {"tools"}
        exclude = extra_exclude if exclude is None else exclude.union(extra_exclude)
        kwargs = {
            key: value
            for key, value in self.model_dump(exclude=exclude).items()
            if value is not None
        }
        if not self.tools or tool_type is None:
            return kwargs
        kwargs["tools"] = [
            tool if isclass(tool) else convert_function_to_tool(tool, tool_type)
            for tool in self.tools
        ]
        return kwargs

kwargs(tool_type=None, exclude=None)

Returns all parameters for the call as a keyword arguments dictionary.

Source code in mirascope/base/types.py
def kwargs(
    self,
    tool_type: Optional[Type[BaseToolT]] = None,
    exclude: Optional[set[str]] = None,
) -> dict[str, Any]:
    """Returns all parameters for the call as a keyword arguments dictionary."""
    extra_exclude = {"tools"}
    exclude = extra_exclude if exclude is None else exclude.union(extra_exclude)
    kwargs = {
        key: value
        for key, value in self.model_dump(exclude=exclude).items()
        if value is not None
    }
    if not self.tools or tool_type is None:
        return kwargs
    kwargs["tools"] = [
        tool if isclass(tool) else convert_function_to_tool(tool, tool_type)
        for tool in self.tools
    ]
    return kwargs

BaseCallResponse

Bases: BaseModel, Generic[ResponseT, BaseToolT], ABC

A base abstract interface for LLM call responses.

Attributes:

Name Type Description
response ResponseT

The original response from whichever model response this wraps.

Source code in mirascope/base/types.py
class BaseCallResponse(BaseModel, Generic[ResponseT, BaseToolT], ABC):
    """A base abstract interface for LLM call responses.

    Attributes:
        response: The original response from whichever model response this wraps.
    """

    response: ResponseT
    user_message_param: Optional[Any] = None
    tool_types: Optional[list[Type[BaseToolT]]] = None
    start_time: float  # The start time of the completion in ms
    end_time: float  # The end time of the completion in ms
    cost: Optional[float] = None  # The cost of the completion in dollars

    model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)

    @property
    @abstractmethod
    def message_param(self) -> Any:
        """Returns the assistant's response as a message parameter."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def tools(self) -> Optional[list[BaseToolT]]:
        """Returns the tools for the 0th choice message."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def tool(self) -> Optional[BaseToolT]:
        """Returns the 0th tool for the 0th choice message."""
        ...  # pragma: no cover

    @classmethod
    @abstractmethod
    def tool_message_params(
        cls, tools_and_outputs: list[tuple[BaseToolT, Any]]
    ) -> list[Any]:
        """Returns the tool message parameters for tool call results."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def content(self) -> str:
        """Should return the string content of the response.

        If there are multiple choices in a response, this method should select the 0th
        choice and return it's string content.

        If there is no string content (e.g. when using tools), this method must return
        the empty string.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def finish_reasons(self) -> Union[None, list[str]]:
        """Should return the finish reasons of the response.

        If there is no finish reason, this method must return None.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def model(self) -> Optional[str]:
        """Should return the name of the response model."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def id(self) -> Optional[str]:
        """Should return the id of the response."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def usage(self) -> Any:
        """Should return the usage of the response.

        If there is no usage, this method must return None.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def input_tokens(self) -> Optional[Union[int, float]]:
        """Should return the number of input tokens.

        If there is no input_tokens, this method must return None.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def output_tokens(self) -> Optional[Union[int, float]]:
        """Should return the number of output tokens.

        If there is no output_tokens, this method must return None.
        """
        ...  # pragma: no cover

content: str abstractmethod property

Should return the string content of the response.

If there are multiple choices in a response, this method should select the 0th choice and return it's string content.

If there is no string content (e.g. when using tools), this method must return the empty string.

finish_reasons: Union[None, list[str]] abstractmethod property

Should return the finish reasons of the response.

If there is no finish reason, this method must return None.

id: Optional[str] abstractmethod property

Should return the id of the response.

input_tokens: Optional[Union[int, float]] abstractmethod property

Should return the number of input tokens.

If there is no input_tokens, this method must return None.

message_param: Any abstractmethod property

Returns the assistant's response as a message parameter.

model: Optional[str] abstractmethod property

Should return the name of the response model.

output_tokens: Optional[Union[int, float]] abstractmethod property

Should return the number of output tokens.

If there is no output_tokens, this method must return None.

tool: Optional[BaseToolT] abstractmethod property

Returns the 0th tool for the 0th choice message.

tools: Optional[list[BaseToolT]] abstractmethod property

Returns the tools for the 0th choice message.

usage: Any abstractmethod property

Should return the usage of the response.

If there is no usage, this method must return None.

tool_message_params(tools_and_outputs) abstractmethod classmethod

Returns the tool message parameters for tool call results.

Source code in mirascope/base/types.py
@classmethod
@abstractmethod
def tool_message_params(
    cls, tools_and_outputs: list[tuple[BaseToolT, Any]]
) -> list[Any]:
    """Returns the tool message parameters for tool call results."""
    ...  # pragma: no cover

BaseCallResponseChunk

Bases: BaseModel, Generic[ChunkT, BaseToolT], ABC

A base abstract interface for LLM streaming response chunks.

Attributes:

Name Type Description
response

The original response chunk from whichever model response this wraps.

Source code in mirascope/base/types.py
class BaseCallResponseChunk(BaseModel, Generic[ChunkT, BaseToolT], ABC):
    """A base abstract interface for LLM streaming response chunks.

    Attributes:
        response: The original response chunk from whichever model response this wraps.
    """

    chunk: ChunkT
    user_message_param: Optional[Any] = None
    tool_types: Optional[list[Type[BaseToolT]]] = None
    cost: Optional[float] = None  # The cost of the completion in dollars
    model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)

    @property
    @abstractmethod
    def content(self) -> str:
        """Should return the string content of the response chunk.

        If there are multiple choices in a chunk, this method should select the 0th
        choice and return it's string content.

        If there is no string content (e.g. when using tools), this method must return
        the empty string.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def model(self) -> Optional[str]:
        """Should return the name of the response model."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def id(self) -> Optional[str]:
        """Should return the id of the response."""
        ...  # pragma: no cover

    @property
    @abstractmethod
    def finish_reasons(self) -> Union[None, list[str]]:
        """Should return the finish reasons of the response.

        If there is no finish reason, this method must return None.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def usage(self) -> Any:
        """Should return the usage of the response.

        If there is no usage, this method must return None.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def input_tokens(self) -> Optional[Union[int, float]]:
        """Should return the number of input tokens.

        If there is no input_tokens, this method must return None.
        """
        ...  # pragma: no cover

    @property
    @abstractmethod
    def output_tokens(self) -> Optional[Union[int, float]]:
        """Should return the number of output tokens.

        If there is no output_tokens, this method must return None.
        """
        ...  # pragma: no cover

content: str abstractmethod property

Should return the string content of the response chunk.

If there are multiple choices in a chunk, this method should select the 0th choice and return it's string content.

If there is no string content (e.g. when using tools), this method must return the empty string.

finish_reasons: Union[None, list[str]] abstractmethod property

Should return the finish reasons of the response.

If there is no finish reason, this method must return None.

id: Optional[str] abstractmethod property

Should return the id of the response.

input_tokens: Optional[Union[int, float]] abstractmethod property

Should return the number of input tokens.

If there is no input_tokens, this method must return None.

model: Optional[str] abstractmethod property

Should return the name of the response model.

output_tokens: Optional[Union[int, float]] abstractmethod property

Should return the number of output tokens.

If there is no output_tokens, this method must return None.

usage: Any abstractmethod property

Should return the usage of the response.

If there is no usage, this method must return None.

BaseStream

Bases: Generic[BaseCallResponseChunkT, UserMessageParamT, AssistantMessageParamT, BaseToolT], ABC

A base class for streaming responses from LLMs.

Source code in mirascope/base/types.py
class BaseStream(
    Generic[
        BaseCallResponseChunkT,
        UserMessageParamT,
        AssistantMessageParamT,
        BaseToolT,
    ],
    ABC,
):
    """A base class for streaming responses from LLMs."""

    stream: Generator[BaseCallResponseChunkT, None, None]
    message_param_type: type[AssistantMessageParamT]

    cost: Optional[float] = None
    user_message_param: Optional[UserMessageParamT] = None
    message_param: AssistantMessageParamT

    def __init__(
        self,
        stream: Generator[BaseCallResponseChunkT, None, None],
        message_param_type: type[AssistantMessageParamT],
    ):
        """Initializes an instance of `BaseStream`."""
        self.stream = stream
        self.message_param_type = message_param_type

    def __iter__(
        self,
    ) -> Generator[tuple[BaseCallResponseChunkT, Optional[BaseToolT]], None, None]:
        """Iterator over the stream and stores useful information."""
        content = ""
        for chunk in self.stream:
            content += chunk.content
            if chunk.cost is not None:
                self.cost = chunk.cost
            yield chunk, None
            self.user_message_param = chunk.user_message_param
        kwargs = {"role": "assistant"}
        if "message" in self.message_param_type.__annotations__:
            kwargs["message"] = content
        else:
            kwargs["content"] = content
        self.message_param = self.message_param_type(**kwargs)

BaseToolStream

Bases: BaseModel, Generic[BaseCallResponseChunkT, BaseToolT], ABC

A base class for streaming tools from response chunks.

Source code in mirascope/base/types.py
class BaseToolStream(BaseModel, Generic[BaseCallResponseChunkT, BaseToolT], ABC):
    """A base class for streaming tools from response chunks."""

    @classmethod
    @abstractmethod
    @overload
    def from_stream(
        cls,
        stream: Generator[BaseCallResponseChunkT, None, None],
        allow_partial: Literal[True],
    ) -> Generator[Optional[BaseToolT], None, None]:
        yield ...  # type: ignore # pragma: no cover

    @classmethod
    @abstractmethod
    @overload
    def from_stream(
        cls,
        stream: Generator[BaseCallResponseChunkT, None, None],
        allow_partial: Literal[False],
    ) -> Generator[BaseToolT, None, None]:
        yield ...  # type: ignore # pragma: no cover

    @classmethod
    @abstractmethod
    @overload
    def from_stream(
        cls,
        stream: Generator[BaseCallResponseChunkT, None, None],
        allow_partial: bool,
    ) -> Generator[Optional[BaseToolT], None, None]:
        yield ...  # type: ignore # pragma: no cover

    @classmethod
    @abstractmethod
    def from_stream(cls, stream, allow_partial=False):
        """Yields tools from the given stream of chunks."""
        yield ...  # type: ignore # pragma: no cover

    @classmethod
    @abstractmethod
    @overload
    async def from_async_stream(
        cls,
        stream: AsyncGenerator[BaseCallResponseChunkT, None],
        allow_partial: Literal[True],
    ) -> AsyncGenerator[Optional[BaseToolT], None]:
        yield ...  # type: ignore # pragma: no cover

    @classmethod
    @abstractmethod
    @overload
    async def from_async_stream(
        cls,
        stream: AsyncGenerator[BaseCallResponseChunkT, None],
        allow_partial: Literal[False],
    ) -> AsyncGenerator[BaseToolT, None]:
        yield ...  # type: ignore # pragma: no cover

    @classmethod
    @abstractmethod
    @overload
    async def from_async_stream(
        cls,
        stream: AsyncGenerator[BaseCallResponseChunkT, None],
        allow_partial: bool,
    ) -> AsyncGenerator[Optional[BaseToolT], None]:
        yield ...  # type: ignore # pragma: no cover

    @classmethod
    @abstractmethod
    async def from_async_stream(cls, async_stream, allow_partial=False):
        """Yields tools asynchronously from the given async stream of chunks."""
        yield ...  # type: ignore # pragma: no cover

    ############################## PRIVATE METHODS ###################################

    @classmethod
    def _check_version_for_partial(cls, partial: bool) -> None:
        """Checks that the correct version of Pydantic is installed to use partial."""
        if partial and int(pydantic.__version__.split(".")[1]) < 7:
            raise ImportError(
                "You must have `pydantic==^2.7.0` to stream tools. "
                f"Current version: {pydantic.__version__}"
            )  # pragma: no cover

from_async_stream(async_stream, allow_partial=False) abstractmethod async classmethod

Yields tools asynchronously from the given async stream of chunks.

Source code in mirascope/base/types.py
@classmethod
@abstractmethod
async def from_async_stream(cls, async_stream, allow_partial=False):
    """Yields tools asynchronously from the given async stream of chunks."""
    yield ...  # type: ignore # pragma: no cover

from_stream(stream, allow_partial=False) abstractmethod classmethod

Yields tools from the given stream of chunks.

Source code in mirascope/base/types.py
@classmethod
@abstractmethod
def from_stream(cls, stream, allow_partial=False):
    """Yields tools from the given stream of chunks."""
    yield ...  # type: ignore # pragma: no cover

partial(wrapped_class)

Generate a new class with all attributes optionals.

Notes

This will wrap a class inheriting form BaseModel and will recursively convert all its attributes and its children's attributes to optionals.

Example:

@partial
class User(BaseModel):
    name: str

user = User(name="None")
Source code in mirascope/partial.py
def partial(wrapped_class: type[Model]) -> type[Model]:
    """Generate a new class with all attributes optionals.

    Notes:
        This will wrap a class inheriting form BaseModel and will recursively
        convert all its attributes and its children's attributes to optionals.

    Example:

    ```python
    @partial
    class User(BaseModel):
        name: str

    user = User(name="None")
    ```
    """

    def _make_field_optional(
        field: FieldInfo,
    ) -> tuple[object, FieldInfo]:
        tmp_field = deepcopy(field)

        annotation = field.annotation
        # If the field is a BaseModel, then recursively convert it's
        # attributes to optionals.
        if type(annotation) is type(BaseModel):
            tmp_field.annotation = Optional[partial(annotation)]  # type: ignore
            tmp_field.default = {}
        else:
            tmp_field.annotation = Optional[field.annotation]  # type: ignore[assignment]
            tmp_field.default = None
        return tmp_field.annotation, tmp_field

    return create_model(  # type: ignore[no-any-return, call-overload]
        f"Partial{wrapped_class.__name__}",
        __base__=wrapped_class,
        __module__=wrapped_class.__module__,
        __doc__=wrapped_class.__doc__,
        **{
            field_name: _make_field_optional(field_info)
            for field_name, field_info in wrapped_class.model_fields.items()
        },
    )