Skip to content

gemini

A module for interacting with Google's Gemini models.

GeminiCall

Bases: BaseCall[GeminiCallResponse, GeminiCallResponseChunk, GeminiTool]

A class for prompting Google's Gemini Chat API.

This prompt supports the message types: USER, MODEL, TOOL

Example:

from google.generativeai import configure  # type: ignore
from mirascope.gemini import GeminiCall

configure(api_key="YOUR_API_KEY")


class BookRecommender(GeminiCall):
    prompt_template = """
    USER: You're the world's greatest librarian.
    MODEL: Ok, I understand I'm the world's greatest librarian. How can I help?
    USER: Please recommend some {genre} books.

    genre: str


response = BookRecommender(genre="fantasy").call()
print(response.content)
#> As the world's greatest librarian, I am delighted to recommend...
Source code in mirascope/gemini/calls.py
class GeminiCall(BaseCall[GeminiCallResponse, GeminiCallResponseChunk, GeminiTool]):
    '''A class for prompting Google's Gemini Chat API.

    This prompt supports the message types: USER, MODEL, TOOL

    Example:

    ```python
    from google.generativeai import configure  # type: ignore
    from mirascope.gemini import GeminiCall

    configure(api_key="YOUR_API_KEY")


    class BookRecommender(GeminiCall):
        prompt_template = """
        USER: You're the world's greatest librarian.
        MODEL: Ok, I understand I'm the world's greatest librarian. How can I help?
        USER: Please recommend some {genre} books.

        genre: str


    response = BookRecommender(genre="fantasy").call()
    print(response.content)
    #> As the world's greatest librarian, I am delighted to recommend...
    ```
    '''

    call_params: ClassVar[GeminiCallParams] = GeminiCallParams()

    def messages(self) -> ContentsType:
        """Returns the `ContentsType` messages for Gemini `generate_content`.

        Raises:
            ValueError: if the docstring contains an unknown role.
        """
        return [
            {"role": message["role"], "parts": [message["content"]]}
            for message in self._parse_messages(
                [MessageRole.MODEL, MessageRole.USER, MessageRole.TOOL]
            )
        ]

    @retry
    def call(
        self, retries: Union[int, Retrying] = 1, **kwargs: Any
    ) -> GeminiCallResponse:
        """Makes an call to the model using this `GeminiCall` instance.

        Args:
            **kwargs: Additional keyword arguments that will be used for generating the
                response. These will override any existing argument settings in call
                params.

        Returns:
            A `GeminiCallResponse` instance.
        """
        kwargs, tool_types = self._setup(kwargs, GeminiTool)
        model_name = kwargs.pop("model")
        gemini_pro_model = GenerativeModel(model_name=model_name)
        generate_content = gemini_pro_model.generate_content
        if self.call_params.weave is not None:
            generate_content = self.call_params.weave(
                generate_content
            )  # pragma: no cover
        if self.call_params.logfire:
            generate_content = self.call_params.logfire(
                generate_content,
                "gemini",
                response_type=GeminiCallResponse,
                tool_types=tool_types,
            )  # pragma: no cover
            kwargs["model"] = model_name  # pragma: no cover
        start_time = datetime.datetime.now().timestamp() * 1000
        response = generate_content(
            self.messages(),
            stream=False,
            tools=kwargs.pop("tools") if "tools" in kwargs else None,
            **kwargs,
        )
        return GeminiCallResponse(
            response=response,
            tool_types=tool_types,
            start_time=start_time,
            end_time=datetime.datetime.now().timestamp() * 1000,
            cost=None,
        )

    @retry
    async def call_async(
        self, retries: Union[int, AsyncRetrying] = 1, **kwargs: Any
    ) -> GeminiCallResponse:
        """Makes an asynchronous call to the model using this `GeminiCall` instance.

        Args:
            **kwargs: Additional keyword arguments that will be used for generating the
                response. These will override any existing argument settings in call
                params.

        Returns:
            A `GeminiCallResponse` instance.
        """
        kwargs, tool_types = self._setup(kwargs, GeminiTool)
        model_name = kwargs.pop("model")
        gemini_pro_model = GenerativeModel(model_name=model_name)
        generate_content_async = gemini_pro_model.generate_content_async
        if self.call_params.weave is not None:
            generate_content_async = self.call_params.weave(
                generate_content_async
            )  # pragma: no cover
        if self.call_params.logfire:
            generate_content_async = self.call_params.logfire(
                generate_content_async,
                "gemini",
                response_type=GeminiCallResponse,
                tool_types=tool_types,
            )  # pragma: no cover
            kwargs["model"] = model_name  # pragma: no cover
        start_time = datetime.datetime.now().timestamp() * 1000
        response = await generate_content_async(
            self.messages(),
            stream=False,
            tools=kwargs.pop("tools") if "tools" in kwargs else None,
            **kwargs,
        )
        return GeminiCallResponse(
            response=response,
            tool_types=tool_types,
            start_time=start_time,
            end_time=datetime.datetime.now().timestamp() * 1000,
            cost=None,
        )

    @retry
    def stream(
        self, retries: Union[int, Retrying] = 1, **kwargs: Any
    ) -> Generator[GeminiCallResponseChunk, None, None]:
        """Streams the response for a call using this `GeminiCall`.

        Args:
            **kwargs: Additional keyword arguments parameters to pass to the call. These
                will override any existing arguments in `call_params`.

        Yields:
            A `GeminiCallResponseChunk` for each chunk of the response.
        """
        kwargs, tool_types = self._setup(kwargs, GeminiTool)
        model_name = kwargs.pop("model")
        gemini_pro_model = GenerativeModel(model_name=model_name)
        generate_content = gemini_pro_model.generate_content
        if self.call_params.logfire:
            generate_content = self.call_params.logfire(
                generate_content, "gemini", response_chunk_type=GeminiCallResponseChunk
            )  # pragma: no cover
            kwargs["model"] = model_name  # pragma: no cover
        stream = generate_content(
            self.messages(),
            stream=True,
            tools=kwargs.pop("tools") if "tools" in kwargs else None,
            **kwargs,
        )
        for chunk in stream:
            yield GeminiCallResponseChunk(chunk=chunk, tool_types=tool_types)

    @retry
    async def stream_async(
        self, retries: Union[int, AsyncRetrying] = 1, **kwargs: Any
    ) -> AsyncGenerator[GeminiCallResponseChunk, None]:
        """Streams the response asynchronously for a call using this `GeminiCall`.

        Args:
            **kwargs: Additional keyword arguments parameters to pass to the call. These
                will override any existing arguments in `call_params`.

        Yields:
            A `GeminiCallResponseChunk` for each chunk of the response.
        """
        kwargs, tool_types = self._setup(kwargs, GeminiTool)
        model_name = kwargs.pop("model")
        gemini_pro_model = GenerativeModel(model_name=model_name)
        generate_content_async = gemini_pro_model.generate_content_async
        if self.call_params.logfire:
            generate_content_async = self.call_params.logfire(
                generate_content_async,
                "gemini",
                response_chunk_type=GeminiCallResponseChunk,
            )  # pragma: no cover
            kwargs["model"] = model_name  # pragma: no cover
        stream = await generate_content_async(
            self.messages(),
            stream=True,
            tools=kwargs.pop("tools") if "tools" in kwargs else None,
            **kwargs,
        )
        async for chunk in stream:
            yield GeminiCallResponseChunk(chunk=chunk, tool_types=tool_types)

call(retries=1, **kwargs)

Makes an call to the model using this GeminiCall instance.

Parameters:

Name Type Description Default
**kwargs Any

Additional keyword arguments that will be used for generating the response. These will override any existing argument settings in call params.

{}

Returns:

Type Description
GeminiCallResponse

A GeminiCallResponse instance.

Source code in mirascope/gemini/calls.py
@retry
def call(
    self, retries: Union[int, Retrying] = 1, **kwargs: Any
) -> GeminiCallResponse:
    """Makes an call to the model using this `GeminiCall` instance.

    Args:
        **kwargs: Additional keyword arguments that will be used for generating the
            response. These will override any existing argument settings in call
            params.

    Returns:
        A `GeminiCallResponse` instance.
    """
    kwargs, tool_types = self._setup(kwargs, GeminiTool)
    model_name = kwargs.pop("model")
    gemini_pro_model = GenerativeModel(model_name=model_name)
    generate_content = gemini_pro_model.generate_content
    if self.call_params.weave is not None:
        generate_content = self.call_params.weave(
            generate_content
        )  # pragma: no cover
    if self.call_params.logfire:
        generate_content = self.call_params.logfire(
            generate_content,
            "gemini",
            response_type=GeminiCallResponse,
            tool_types=tool_types,
        )  # pragma: no cover
        kwargs["model"] = model_name  # pragma: no cover
    start_time = datetime.datetime.now().timestamp() * 1000
    response = generate_content(
        self.messages(),
        stream=False,
        tools=kwargs.pop("tools") if "tools" in kwargs else None,
        **kwargs,
    )
    return GeminiCallResponse(
        response=response,
        tool_types=tool_types,
        start_time=start_time,
        end_time=datetime.datetime.now().timestamp() * 1000,
        cost=None,
    )

call_async(retries=1, **kwargs) async

Makes an asynchronous call to the model using this GeminiCall instance.

Parameters:

Name Type Description Default
**kwargs Any

Additional keyword arguments that will be used for generating the response. These will override any existing argument settings in call params.

{}

Returns:

Type Description
GeminiCallResponse

A GeminiCallResponse instance.

Source code in mirascope/gemini/calls.py
@retry
async def call_async(
    self, retries: Union[int, AsyncRetrying] = 1, **kwargs: Any
) -> GeminiCallResponse:
    """Makes an asynchronous call to the model using this `GeminiCall` instance.

    Args:
        **kwargs: Additional keyword arguments that will be used for generating the
            response. These will override any existing argument settings in call
            params.

    Returns:
        A `GeminiCallResponse` instance.
    """
    kwargs, tool_types = self._setup(kwargs, GeminiTool)
    model_name = kwargs.pop("model")
    gemini_pro_model = GenerativeModel(model_name=model_name)
    generate_content_async = gemini_pro_model.generate_content_async
    if self.call_params.weave is not None:
        generate_content_async = self.call_params.weave(
            generate_content_async
        )  # pragma: no cover
    if self.call_params.logfire:
        generate_content_async = self.call_params.logfire(
            generate_content_async,
            "gemini",
            response_type=GeminiCallResponse,
            tool_types=tool_types,
        )  # pragma: no cover
        kwargs["model"] = model_name  # pragma: no cover
    start_time = datetime.datetime.now().timestamp() * 1000
    response = await generate_content_async(
        self.messages(),
        stream=False,
        tools=kwargs.pop("tools") if "tools" in kwargs else None,
        **kwargs,
    )
    return GeminiCallResponse(
        response=response,
        tool_types=tool_types,
        start_time=start_time,
        end_time=datetime.datetime.now().timestamp() * 1000,
        cost=None,
    )

messages()

Returns the ContentsType messages for Gemini generate_content.

Raises:

Type Description
ValueError

if the docstring contains an unknown role.

Source code in mirascope/gemini/calls.py
def messages(self) -> ContentsType:
    """Returns the `ContentsType` messages for Gemini `generate_content`.

    Raises:
        ValueError: if the docstring contains an unknown role.
    """
    return [
        {"role": message["role"], "parts": [message["content"]]}
        for message in self._parse_messages(
            [MessageRole.MODEL, MessageRole.USER, MessageRole.TOOL]
        )
    ]

stream(retries=1, **kwargs)

Streams the response for a call using this GeminiCall.

Parameters:

Name Type Description Default
**kwargs Any

Additional keyword arguments parameters to pass to the call. These will override any existing arguments in call_params.

{}

Yields:

Type Description
GeminiCallResponseChunk

A GeminiCallResponseChunk for each chunk of the response.

Source code in mirascope/gemini/calls.py
@retry
def stream(
    self, retries: Union[int, Retrying] = 1, **kwargs: Any
) -> Generator[GeminiCallResponseChunk, None, None]:
    """Streams the response for a call using this `GeminiCall`.

    Args:
        **kwargs: Additional keyword arguments parameters to pass to the call. These
            will override any existing arguments in `call_params`.

    Yields:
        A `GeminiCallResponseChunk` for each chunk of the response.
    """
    kwargs, tool_types = self._setup(kwargs, GeminiTool)
    model_name = kwargs.pop("model")
    gemini_pro_model = GenerativeModel(model_name=model_name)
    generate_content = gemini_pro_model.generate_content
    if self.call_params.logfire:
        generate_content = self.call_params.logfire(
            generate_content, "gemini", response_chunk_type=GeminiCallResponseChunk
        )  # pragma: no cover
        kwargs["model"] = model_name  # pragma: no cover
    stream = generate_content(
        self.messages(),
        stream=True,
        tools=kwargs.pop("tools") if "tools" in kwargs else None,
        **kwargs,
    )
    for chunk in stream:
        yield GeminiCallResponseChunk(chunk=chunk, tool_types=tool_types)

stream_async(retries=1, **kwargs) async

Streams the response asynchronously for a call using this GeminiCall.

Parameters:

Name Type Description Default
**kwargs Any

Additional keyword arguments parameters to pass to the call. These will override any existing arguments in call_params.

{}

Yields:

Type Description
AsyncGenerator[GeminiCallResponseChunk, None]

A GeminiCallResponseChunk for each chunk of the response.

Source code in mirascope/gemini/calls.py
@retry
async def stream_async(
    self, retries: Union[int, AsyncRetrying] = 1, **kwargs: Any
) -> AsyncGenerator[GeminiCallResponseChunk, None]:
    """Streams the response asynchronously for a call using this `GeminiCall`.

    Args:
        **kwargs: Additional keyword arguments parameters to pass to the call. These
            will override any existing arguments in `call_params`.

    Yields:
        A `GeminiCallResponseChunk` for each chunk of the response.
    """
    kwargs, tool_types = self._setup(kwargs, GeminiTool)
    model_name = kwargs.pop("model")
    gemini_pro_model = GenerativeModel(model_name=model_name)
    generate_content_async = gemini_pro_model.generate_content_async
    if self.call_params.logfire:
        generate_content_async = self.call_params.logfire(
            generate_content_async,
            "gemini",
            response_chunk_type=GeminiCallResponseChunk,
        )  # pragma: no cover
        kwargs["model"] = model_name  # pragma: no cover
    stream = await generate_content_async(
        self.messages(),
        stream=True,
        tools=kwargs.pop("tools") if "tools" in kwargs else None,
        **kwargs,
    )
    async for chunk in stream:
        yield GeminiCallResponseChunk(chunk=chunk, tool_types=tool_types)

GeminiCallParams

Bases: BaseCallParams[GeminiTool]

The parameters to use when calling the Gemini API calls.

Example:

from mirascope.gemini import GeminiCall, GeminiCallParams


class BookRecommendation(GeminiPrompt):
    prompt_template = "Please recommend a {genre} book"

    genre: str

    call_params = GeminiCallParams(
        model="gemini-1.0-pro-001",
        generation_config={"candidate_count": 2},
    )


response = BookRecommender(genre="fantasy").call()
print(response.content)
#> The Name of the Wind
Source code in mirascope/gemini/types.py
class GeminiCallParams(BaseCallParams[GeminiTool]):
    """The parameters to use when calling the Gemini API calls.

    Example:

    ```python
    from mirascope.gemini import GeminiCall, GeminiCallParams


    class BookRecommendation(GeminiPrompt):
        prompt_template = "Please recommend a {genre} book"

        genre: str

        call_params = GeminiCallParams(
            model="gemini-1.0-pro-001",
            generation_config={"candidate_count": 2},
        )


    response = BookRecommender(genre="fantasy").call()
    print(response.content)
    #> The Name of the Wind
    ```
    """

    model: str = "gemini-1.0-pro"
    generation_config: Optional[dict[str, Any]] = {"candidate_count": 1}
    safety_settings: Optional[Any] = None
    request_options: Optional[dict[str, Any]] = None

GeminiCallResponse

Bases: BaseCallResponse[Union[GenerateContentResponse, AsyncGenerateContentResponse], GeminiTool]

Convenience wrapper around Gemini's GenerateContentResponse.

When using Mirascope's convenience wrappers to interact with Gemini models via GeminiCall, responses using GeminiCall.call() will return a GeminiCallResponse, whereby the implemented properties allow for simpler syntax and a convenient developer experience.

Example:

from mirascope.gemini import GeminiPrompt


class BookRecommender(GeminiPrompt):
    prompt_template = "Please recommend a {genre} book"

    genre: str


response = BookRecommender(genre="fantasy").call()
print(response.content)
#> The Lord of the Rings
Source code in mirascope/gemini/types.py
class GeminiCallResponse(
    BaseCallResponse[
        Union[GenerateContentResponse, AsyncGenerateContentResponse], GeminiTool
    ]
):
    """Convenience wrapper around Gemini's `GenerateContentResponse`.

    When using Mirascope's convenience wrappers to interact with Gemini models via
    `GeminiCall`, responses using `GeminiCall.call()` will return a
    `GeminiCallResponse`, whereby the implemented properties allow for simpler syntax
    and a convenient developer experience.

    Example:

    ```python
    from mirascope.gemini import GeminiPrompt


    class BookRecommender(GeminiPrompt):
        prompt_template = "Please recommend a {genre} book"

        genre: str


    response = BookRecommender(genre="fantasy").call()
    print(response.content)
    #> The Lord of the Rings
    ```
    """

    @property
    def tools(self) -> Optional[list[GeminiTool]]:
        """Returns the list of tools for the 0th candidate's 0th content part."""
        if self.tool_types is None:
            return None

        if self.response.candidates[0].finish_reason != 1:  # STOP = 1
            raise RuntimeError(
                "Generation stopped before the stop sequence. "
                "This is likely due to a limit on output tokens that is too low. "
                "Note that this could also indicate no tool is beind called, so we "
                "recommend that you check the output of the call to confirm."
                f"Finish Reason: {self.response.candidates[0].finish_reason}"
            )

        tool_calls = [
            part.function_call for part in self.response.candidates[0].content.parts
        ]

        extracted_tools = []
        for tool_call in tool_calls:
            for tool_type in self.tool_types:
                if tool_call.name == tool_type.__name__:
                    extracted_tools.append(tool_type.from_tool_call(tool_call))
                    break

        return extracted_tools

    @property
    def tool(self) -> Optional[GeminiTool]:
        """Returns the 0th tool for the 0th candidate's 0th content part.

        Raises:
            ValidationError: if the tool call doesn't match the tool's schema.
        """
        tools = self.tools
        if tools:
            return tools[0]
        return None

    @property
    def content(self) -> str:
        """Returns the contained string content for the 0th choice."""
        return self.response.candidates[0].content.parts[0].text

    def dump(self) -> dict[str, Any]:
        """Dumps the response to a dictionary."""
        return {
            "start_time": self.start_time,
            "end_time": self.end_time,
            "output": str(self.response),
            "cost": self.cost,
        }

content: str property

Returns the contained string content for the 0th choice.

tool: Optional[GeminiTool] property

Returns the 0th tool for the 0th candidate's 0th content part.

Raises:

Type Description
ValidationError

if the tool call doesn't match the tool's schema.

tools: Optional[list[GeminiTool]] property

Returns the list of tools for the 0th candidate's 0th content part.

dump()

Dumps the response to a dictionary.

Source code in mirascope/gemini/types.py
def dump(self) -> dict[str, Any]:
    """Dumps the response to a dictionary."""
    return {
        "start_time": self.start_time,
        "end_time": self.end_time,
        "output": str(self.response),
        "cost": self.cost,
    }

GeminiCallResponseChunk

Bases: BaseCallResponseChunk[GenerateContentResponse, GeminiTool]

Convenience wrapper around chat completion streaming chunks.

When using Mirascope's convenience wrappers to interact with Gemini models via GeminiCall, responses using GeminiCall.stream() will return a GeminiCallResponseChunk, whereby the implemented properties allow for simpler syntax and a convenient developer experience.

Example:

from mirascope.gemini import GeminiCall


class BookRecommender(GeminiCall):
    prompt_template = "Please recommend a {genre} book"

    genre: str


for chunk in BookRecommender(genre="science fiction").stream():
    print(chunk)

#> D
#  u
#
#  ne
#
#  by F
#  r
#  an
#  k
#  .
Source code in mirascope/gemini/types.py
class GeminiCallResponseChunk(
    BaseCallResponseChunk[GenerateContentResponse, GeminiTool]
):
    """Convenience wrapper around chat completion streaming chunks.

    When using Mirascope's convenience wrappers to interact with Gemini models via
    `GeminiCall`, responses using `GeminiCall.stream()` will return a
    `GeminiCallResponseChunk`, whereby the implemented properties allow for simpler
    syntax and a convenient developer experience.

    Example:

    ```python
    from mirascope.gemini import GeminiCall


    class BookRecommender(GeminiCall):
        prompt_template = "Please recommend a {genre} book"

        genre: str


    for chunk in BookRecommender(genre="science fiction").stream():
        print(chunk)

    #> D
    #  u
    #
    #  ne
    #
    #  by F
    #  r
    #  an
    #  k
    #  .
    ```
    """

    @property
    def content(self) -> str:
        """Returns the chunk content for the 0th choice."""
        return self.chunk.candidates[0].content.parts[0].text

content: str property

Returns the chunk content for the 0th choice.

GeminiExtractor

Bases: BaseExtractor[GeminiCall, GeminiTool, Any, T], Generic[T]

A class for extracting structured information using Google's Gemini Chat models.

Example:

from typing import Literal, Type
from pydantic import BaseModel
from mirascope.gemini import GeminiExtractor

class TaskDetails(BaseModel):
    title: str
    priority: Literal["low", "normal", "high"]
    due_date: str

class TaskExtractor(GeminiExtractor[TaskDetails]):
    extract_schema: Type[TaskDetails] = TaskDetails

    prompt_template = """
    USER: I need to extract task details.
    MODEL: Sure, please provide the task description.
    USER: {task}
    """

    task: str

task_description = "Prepare the budget report by next Monday. It's a high priority task."
task = TaskExtractor(task=task_description).extract(retries=3)
assert isinstance(task, TaskDetails)
print(task)
#> title='Prepare the budget report' priority='high' due_date='next Monday'
Source code in mirascope/gemini/extractors.py
class GeminiExtractor(BaseExtractor[GeminiCall, GeminiTool, Any, T], Generic[T]):
    '''A class for extracting structured information using Google's Gemini Chat models.

    Example:

    ```python
    from typing import Literal, Type
    from pydantic import BaseModel
    from mirascope.gemini import GeminiExtractor

    class TaskDetails(BaseModel):
        title: str
        priority: Literal["low", "normal", "high"]
        due_date: str

    class TaskExtractor(GeminiExtractor[TaskDetails]):
        extract_schema: Type[TaskDetails] = TaskDetails

        prompt_template = """
        USER: I need to extract task details.
        MODEL: Sure, please provide the task description.
        USER: {task}
        """

        task: str

    task_description = "Prepare the budget report by next Monday. It's a high priority task."
    task = TaskExtractor(task=task_description).extract(retries=3)
    assert isinstance(task, TaskDetails)
    print(task)
    #> title='Prepare the budget report' priority='high' due_date='next Monday'
    ```
    '''

    call_params: ClassVar[GeminiCallParams] = GeminiCallParams()

    def extract(self, retries: Union[int, Retrying] = 0, **kwargs: Any) -> T:
        """Extracts `extract_schema` from the Gemini call response.

        The `extract_schema` is converted into a `GeminiTool`, complete with a
        description of the tool, all of the fields, and their types. This allows us to
        take advantage of Gemini's tool/function calling functionality to extract
        information from a prompt according to the context provided by the `BaseModel`
        schema.

        Args:
            retries: The maximum number of times to retry the query on validation error.
            **kwargs: Additional keyword arguments parameters to pass to the call. These
                will override any existing arguments in `call_params`.

        Returns:
            The `Schema` instance extracted from the completion.

        Raises:
            AttributeError: if there is no tool in the call creation.
            ValidationError: if the schema cannot be instantiated from the completion.
            GeminiError: raises any Gemini errors.
        """
        return self._extract(GeminiCall, GeminiTool, retries, **kwargs)

    async def extract_async(
        self, retries: Union[int, AsyncRetrying] = 0, **kwargs: Any
    ) -> T:
        """Asynchronously extracts `extract_schema` from the Gemini call response.

        The `extract_schema` is converted into a `GeminiTool`, complete with a
        description of the tool, all of the fields, and their types. This allows us to
        take advantage of Gemini's tool/function calling functionality to extract
        information from a prompt according to the context provided by the `BaseModel`
        schema.

        Args:
            retries: The maximum number of times to retry the query on validation error.
            **kwargs: Additional keyword arguments parameters to pass to the call. These
                will override any existing arguments in `call_params`.

        Returns:
            The `Schema` instance extracted from the completion.

        Raises:
            AttributeError: if there is no tool in the call creation.
            ValidationError: if the schema cannot be instantiated from the completion.
            GeminiError: raises any Gemini errors.
        """
        return await self._extract_async(GeminiCall, GeminiTool, retries, **kwargs)

extract(retries=0, **kwargs)

Extracts extract_schema from the Gemini call response.

The extract_schema is converted into a GeminiTool, complete with a description of the tool, all of the fields, and their types. This allows us to take advantage of Gemini's tool/function calling functionality to extract information from a prompt according to the context provided by the BaseModel schema.

Parameters:

Name Type Description Default
retries Union[int, Retrying]

The maximum number of times to retry the query on validation error.

0
**kwargs Any

Additional keyword arguments parameters to pass to the call. These will override any existing arguments in call_params.

{}

Returns:

Type Description
T

The Schema instance extracted from the completion.

Raises:

Type Description
AttributeError

if there is no tool in the call creation.

ValidationError

if the schema cannot be instantiated from the completion.

GeminiError

raises any Gemini errors.

Source code in mirascope/gemini/extractors.py
def extract(self, retries: Union[int, Retrying] = 0, **kwargs: Any) -> T:
    """Extracts `extract_schema` from the Gemini call response.

    The `extract_schema` is converted into a `GeminiTool`, complete with a
    description of the tool, all of the fields, and their types. This allows us to
    take advantage of Gemini's tool/function calling functionality to extract
    information from a prompt according to the context provided by the `BaseModel`
    schema.

    Args:
        retries: The maximum number of times to retry the query on validation error.
        **kwargs: Additional keyword arguments parameters to pass to the call. These
            will override any existing arguments in `call_params`.

    Returns:
        The `Schema` instance extracted from the completion.

    Raises:
        AttributeError: if there is no tool in the call creation.
        ValidationError: if the schema cannot be instantiated from the completion.
        GeminiError: raises any Gemini errors.
    """
    return self._extract(GeminiCall, GeminiTool, retries, **kwargs)

extract_async(retries=0, **kwargs) async

Asynchronously extracts extract_schema from the Gemini call response.

The extract_schema is converted into a GeminiTool, complete with a description of the tool, all of the fields, and their types. This allows us to take advantage of Gemini's tool/function calling functionality to extract information from a prompt according to the context provided by the BaseModel schema.

Parameters:

Name Type Description Default
retries Union[int, AsyncRetrying]

The maximum number of times to retry the query on validation error.

0
**kwargs Any

Additional keyword arguments parameters to pass to the call. These will override any existing arguments in call_params.

{}

Returns:

Type Description
T

The Schema instance extracted from the completion.

Raises:

Type Description
AttributeError

if there is no tool in the call creation.

ValidationError

if the schema cannot be instantiated from the completion.

GeminiError

raises any Gemini errors.

Source code in mirascope/gemini/extractors.py
async def extract_async(
    self, retries: Union[int, AsyncRetrying] = 0, **kwargs: Any
) -> T:
    """Asynchronously extracts `extract_schema` from the Gemini call response.

    The `extract_schema` is converted into a `GeminiTool`, complete with a
    description of the tool, all of the fields, and their types. This allows us to
    take advantage of Gemini's tool/function calling functionality to extract
    information from a prompt according to the context provided by the `BaseModel`
    schema.

    Args:
        retries: The maximum number of times to retry the query on validation error.
        **kwargs: Additional keyword arguments parameters to pass to the call. These
            will override any existing arguments in `call_params`.

    Returns:
        The `Schema` instance extracted from the completion.

    Raises:
        AttributeError: if there is no tool in the call creation.
        ValidationError: if the schema cannot be instantiated from the completion.
        GeminiError: raises any Gemini errors.
    """
    return await self._extract_async(GeminiCall, GeminiTool, retries, **kwargs)

GeminiTool

Bases: BaseTool[FunctionCall]

A base class for easy use of tools with the Gemini API.

GeminiTool internally handles the logic that allows you to use tools with simple calls such as GeminiCompletion.tool or GeminiTool.fn, as seen in the examples below.

Example:

from mirascope.gemini import GeminiCall, GeminiCallParams, GeminiTool


class CurrentWeather(GeminiTool):
    """A tool for getting the current weather in a location."""

    location: str


class WeatherForecast(GeminiPrompt):
    prompt_template = "What is the current weather in {city}?"

    city: str

    call_params = GeminiCallParams(
        model="gemini-pro",
        tools=[CurrentWeather],
    )


prompt = WeatherPrompt()
forecast = WeatherForecast(city="Tokyo").call().tool
print(forecast.location)
#> Tokyo
Source code in mirascope/gemini/tools.py
class GeminiTool(BaseTool[FunctionCall]):
    '''A base class for easy use of tools with the Gemini API.

    `GeminiTool` internally handles the logic that allows you to use tools with simple
    calls such as `GeminiCompletion.tool` or `GeminiTool.fn`, as seen in the
    examples below.

    Example:

    ```python
    from mirascope.gemini import GeminiCall, GeminiCallParams, GeminiTool


    class CurrentWeather(GeminiTool):
        """A tool for getting the current weather in a location."""

        location: str


    class WeatherForecast(GeminiPrompt):
        prompt_template = "What is the current weather in {city}?"

        city: str

        call_params = GeminiCallParams(
            model="gemini-pro",
            tools=[CurrentWeather],
        )


    prompt = WeatherPrompt()
    forecast = WeatherForecast(city="Tokyo").call().tool
    print(forecast.location)
    #> Tokyo
    ```
    '''

    model_config = ConfigDict(arbitrary_types_allowed=True)

    @classmethod
    def tool_schema(cls) -> Tool:
        """Constructs a tool schema for use with the Gemini API.

        A Mirascope `GeminiTool` is deconstructed into a `Tool` schema for use with the
        Gemini API.

        Returns:
            The constructed `Tool` schema.
        """
        tool_schema = super().tool_schema()
        if "parameters" in tool_schema:
            if "$defs" in tool_schema["parameters"]:
                raise ValueError(
                    "Unfortunately Google's Gemini API cannot handle nested structures "
                    "with $defs."
                )
            tool_schema["parameters"]["properties"] = {
                prop: {
                    key: value for key, value in prop_schema.items() if key != "title"
                }
                for prop, prop_schema in tool_schema["parameters"]["properties"].items()
            }
        return Tool(function_declarations=[FunctionDeclaration(**tool_schema)])

    @classmethod
    def from_tool_call(cls, tool_call: FunctionCall) -> GeminiTool:
        """Extracts an instance of the tool constructed from a tool call response.

        Given a `GenerateContentResponse` from a Gemini chat completion response, this
        method extracts the tool call and constructs an instance of the tool.

        Args:
            tool_call: The `GenerateContentResponse` from which to extract the tool.

        Returns:
            An instance of the tool constructed from the tool call.

        Raises:
            ValueError: if the tool call doesn't have any arguments.
            ValidationError: if the tool call doesn't match the tool schema.
        """
        if not tool_call.args:
            raise ValueError("Tool call doesn't have any arguments.")
        model_json = {key: value for key, value in tool_call.args.items()}
        model_json["tool_call"] = tool_call
        return cls.model_validate(model_json)

    @classmethod
    def from_model(cls, model: Type[BaseModel]) -> Type[GeminiTool]:
        """Constructs a `GeminiTool` type from a `BaseModel` type."""
        return convert_base_model_to_tool(model, GeminiTool)

    @classmethod
    def from_fn(cls, fn: Callable) -> Type[GeminiTool]:
        """Constructs a `GeminiTool` type from a function."""
        return convert_function_to_tool(fn, GeminiTool)

    @classmethod
    def from_base_type(cls, base_type: Type[BaseType]) -> Type[GeminiTool]:
        """Constructs a `GeminiTool` type from a `BaseType` type."""
        return convert_base_type_to_tool(base_type, GeminiTool)

from_base_type(base_type) classmethod

Constructs a GeminiTool type from a BaseType type.

Source code in mirascope/gemini/tools.py
@classmethod
def from_base_type(cls, base_type: Type[BaseType]) -> Type[GeminiTool]:
    """Constructs a `GeminiTool` type from a `BaseType` type."""
    return convert_base_type_to_tool(base_type, GeminiTool)

from_fn(fn) classmethod

Constructs a GeminiTool type from a function.

Source code in mirascope/gemini/tools.py
@classmethod
def from_fn(cls, fn: Callable) -> Type[GeminiTool]:
    """Constructs a `GeminiTool` type from a function."""
    return convert_function_to_tool(fn, GeminiTool)

from_model(model) classmethod

Constructs a GeminiTool type from a BaseModel type.

Source code in mirascope/gemini/tools.py
@classmethod
def from_model(cls, model: Type[BaseModel]) -> Type[GeminiTool]:
    """Constructs a `GeminiTool` type from a `BaseModel` type."""
    return convert_base_model_to_tool(model, GeminiTool)

from_tool_call(tool_call) classmethod

Extracts an instance of the tool constructed from a tool call response.

Given a GenerateContentResponse from a Gemini chat completion response, this method extracts the tool call and constructs an instance of the tool.

Parameters:

Name Type Description Default
tool_call FunctionCall

The GenerateContentResponse from which to extract the tool.

required

Returns:

Type Description
GeminiTool

An instance of the tool constructed from the tool call.

Raises:

Type Description
ValueError

if the tool call doesn't have any arguments.

ValidationError

if the tool call doesn't match the tool schema.

Source code in mirascope/gemini/tools.py
@classmethod
def from_tool_call(cls, tool_call: FunctionCall) -> GeminiTool:
    """Extracts an instance of the tool constructed from a tool call response.

    Given a `GenerateContentResponse` from a Gemini chat completion response, this
    method extracts the tool call and constructs an instance of the tool.

    Args:
        tool_call: The `GenerateContentResponse` from which to extract the tool.

    Returns:
        An instance of the tool constructed from the tool call.

    Raises:
        ValueError: if the tool call doesn't have any arguments.
        ValidationError: if the tool call doesn't match the tool schema.
    """
    if not tool_call.args:
        raise ValueError("Tool call doesn't have any arguments.")
    model_json = {key: value for key, value in tool_call.args.items()}
    model_json["tool_call"] = tool_call
    return cls.model_validate(model_json)

tool_schema() classmethod

Constructs a tool schema for use with the Gemini API.

A Mirascope GeminiTool is deconstructed into a Tool schema for use with the Gemini API.

Returns:

Type Description
Tool

The constructed Tool schema.

Source code in mirascope/gemini/tools.py
@classmethod
def tool_schema(cls) -> Tool:
    """Constructs a tool schema for use with the Gemini API.

    A Mirascope `GeminiTool` is deconstructed into a `Tool` schema for use with the
    Gemini API.

    Returns:
        The constructed `Tool` schema.
    """
    tool_schema = super().tool_schema()
    if "parameters" in tool_schema:
        if "$defs" in tool_schema["parameters"]:
            raise ValueError(
                "Unfortunately Google's Gemini API cannot handle nested structures "
                "with $defs."
            )
        tool_schema["parameters"]["properties"] = {
            prop: {
                key: value for key, value in prop_schema.items() if key != "title"
            }
            for prop, prop_schema in tool_schema["parameters"]["properties"].items()
        }
    return Tool(function_declarations=[FunctionDeclaration(**tool_schema)])