Skip to content

mirascope.core.anthropic.stream

The AnthropicStream class for convenience around streaming LLM calls.

Usage Documentation

Streams

AnthropicStream

AnthropicStream(
    *,
    stream: (
        Generator[
            tuple[
                _BaseCallResponseChunkT, _BaseToolT | None
            ],
            None,
            None,
        ]
        | AsyncGenerator[
            tuple[
                _BaseCallResponseChunkT, _BaseToolT | None
            ],
            None,
        ]
    ),
    metadata: Metadata,
    tool_types: list[type[_BaseToolT]] | None,
    call_response_type: type[_BaseCallResponseT],
    model: str,
    prompt_template: str | None,
    fn_args: dict[str, Any],
    dynamic_config: _BaseDynamicConfigT,
    messages: list[_MessageParamT],
    call_params: _BaseCallParamsT,
    call_kwargs: BaseCallKwargs[_ToolSchemaT]
)

Bases: BaseStream[AnthropicCallResponse, AnthropicCallResponseChunk, MessageParam, MessageParam, MessageParam, MessageParam, AnthropicTool, ToolParam, AnthropicDynamicConfig, AnthropicCallParams, FinishReason]

A class for convenience around streaming Anthropic LLM calls.

Example:

from mirascope.core import prompt_template
from mirascope.core.anthropic import anthropic_call


@anthropic_call("claude-3-5-sonnet-20240620", stream=True)
@prompt_template("Recommend a {genre} book")
def recommend_book(genre: str):
    ...


stream = recommend_book("fantasy")  # returns `AnthropicStream` instance
for chunk, _ in stream:
    print(chunk.content, end="", flush=True)
Source code in mirascope/core/base/stream.py
def __init__(
    self,
    *,
    stream: Generator[tuple[_BaseCallResponseChunkT, _BaseToolT | None], None, None]
    | AsyncGenerator[
        tuple[_BaseCallResponseChunkT, _BaseToolT | None],
        None,
    ],
    metadata: Metadata,
    tool_types: list[type[_BaseToolT]] | None,
    call_response_type: type[_BaseCallResponseT],
    model: str,
    prompt_template: str | None,
    fn_args: dict[str, Any],
    dynamic_config: _BaseDynamicConfigT,
    messages: list[_MessageParamT],
    call_params: _BaseCallParamsT,
    call_kwargs: BaseCallKwargs[_ToolSchemaT],
) -> None:
    """Initializes an instance of `BaseStream`."""
    self.content = ""
    self.stream = stream
    self.metadata = metadata
    self.tool_types = tool_types
    self.call_response_type = call_response_type
    self.model = model
    self.prompt_template = prompt_template
    self.fn_args = fn_args
    self.dynamic_config = dynamic_config
    self.messages = messages
    self.call_params = call_params
    self.call_kwargs = call_kwargs
    self.user_message_param = get_possible_user_message_param(messages)  # pyright: ignore [reportAttributeAccessIssue]

cost property

cost: float | None

Returns the cost of the call.

construct_call_response

construct_call_response() -> AnthropicCallResponse

Constructs the call response from a consumed AnthropicStream.

Raises:

Type Description
ValueError

if the stream has not yet been consumed.

Source code in mirascope/core/anthropic/stream.py
def construct_call_response(self) -> AnthropicCallResponse:
    """Constructs the call response from a consumed AnthropicStream.

    Raises:
        ValueError: if the stream has not yet been consumed.
    """
    if not hasattr(self, "message_param"):
        raise ValueError(
            "No stream response, check if the stream has been consumed."
        )
    usage = Usage(
        input_tokens=int(self.input_tokens or 0),
        output_tokens=int(self.output_tokens or 0),
    )

    content_blocks: list[ContentBlock] = []

    if isinstance(self.message_param["content"], str):
        content_blocks.append(
            TextBlock(text=self.message_param["content"], type="text")
        )
    else:
        for content in self.message_param["content"]:
            content_type = (
                content.type if isinstance(content, BaseModel) else content["type"]
            )

            if content_type == "text":
                content_blocks.append(TextBlock.model_validate(content))
            elif content_type == "tool_use":
                content_blocks.append(ToolUseBlock.model_validate(content))
    completion = Message(
        id=self.id if self.id else "",
        content=content_blocks,
        model=self.model,
        role="assistant",
        stop_reason=self.finish_reasons[0] if self.finish_reasons else None,
        stop_sequence=None,
        type="message",
        usage=usage,
    )
    return AnthropicCallResponse(
        metadata=self.metadata,
        response=completion,
        tool_types=self.tool_types,
        prompt_template=self.prompt_template,
        fn_args=self.fn_args if self.fn_args else {},
        dynamic_config=self.dynamic_config,
        messages=self.messages,
        call_params=self.call_params,
        call_kwargs=self.call_kwargs,
        user_message_param=self.user_message_param,
        start_time=self.start_time,
        end_time=self.end_time,
    )