Skip to content

mirascope.core.base.prompt

The BasePrompt class for better prompt engineering.

BasePrompt

Bases: BaseModel

The base class for engineering prompts.

Usage Documentation

Prompts

This class is implemented as the base for all prompting needs. It is intended to work across various providers by providing a common prompt interface.

Example:

from mirascope.core import BasePrompt, metadata, prompt_template

@prompt_template("Recommend a {genre} book")
@metadata({"tags": {"version:0001", "books"}})
class BookRecommendationPrompt(BasePrompt):
    genre: str

prompt = BookRecommendationPrompt(genre="fantasy")

print(prompt)
# > Recommend a fantasy book

print(prompt.message_params())
# > [BaseMessageParam(role="user", content="Recommend a fantasy book")]

print(prompt.dump()["metadata"])
# > {"metadata": {"version:0001", "books"}}

message_params

message_params() -> list[BaseMessageParam]

Returns the list of parsed message parameters.

Source code in mirascope/core/base/prompt.py
def message_params(self) -> list[BaseMessageParam]:
    """Returns the list of parsed message parameters."""
    return parse_prompt_messages(
        roles=["system", "user", "assistant"],
        template=get_prompt_template(self),
        attrs={field: getattr(self, field) for field in self.model_fields},
    )

dynamic_config

dynamic_config() -> BaseDynamicConfig

Returns the dynamic config of the prompt.

Source code in mirascope/core/base/prompt.py
def dynamic_config(self) -> BaseDynamicConfig:
    """Returns the dynamic config of the prompt."""
    return None

dump

dump() -> dict[str, Any]

Dumps the contents of the prompt into a dictionary.

Source code in mirascope/core/base/prompt.py
def dump(self) -> dict[str, Any]:
    """Dumps the contents of the prompt into a dictionary."""
    return {
        "metadata": get_metadata(self, None),
        "prompt": str(self),
        "template": get_prompt_template(self),
        "inputs": self.model_dump(),
    }

run

run(
    call_decorator: (
        Callable[
            [Callable[..., BaseDynamicConfig]],
            Callable[..., _BaseCallResponseT],
        ]
        | Callable[
            [Callable[..., BaseDynamicConfig]],
            Callable[..., _BaseStreamT],
        ]
        | Callable[
            [Callable[..., BaseDynamicConfig]],
            Callable[..., _ResponseModelT],
        ]
        | Callable[
            [Callable[..., BaseDynamicConfig]],
            Callable[..., Iterable[_ResponseModelT]],
        ]
    ),
    *additional_decorators: Callable[[_T], _T]
) -> (
    _BaseCallResponseT
    | _BaseStreamT
    | _ResponseModelT
    | Iterable[_ResponseModelT]
)

Returns the response of calling the API of the provided decorator.

Usage Documentation

Prompts

Example:

from mirascope.core import BasePrompt, openai, prompt_template


@prompt_template("Recommend a {genre} book")
class BookRecommendationPrompt(BasePrompt):
    genre: str


prompt = BookRecommendationPrompt(genre="fantasy")
response = prompt.run(openai.call("gpt-4o-mini"))
print(response.content)
Source code in mirascope/core/base/prompt.py
def run(
    self,
    call_decorator: Callable[
        [Callable[..., BaseDynamicConfig]],
        Callable[..., _BaseCallResponseT],
    ]
    | Callable[
        [Callable[..., BaseDynamicConfig]],
        Callable[..., _BaseStreamT],
    ]
    | Callable[
        [Callable[..., BaseDynamicConfig]],
        Callable[..., _ResponseModelT],
    ]
    | Callable[
        [Callable[..., BaseDynamicConfig]],
        Callable[..., Iterable[_ResponseModelT]],
    ],
    *additional_decorators: Callable[[_T], _T],
) -> (
    _BaseCallResponseT | _BaseStreamT | _ResponseModelT | Iterable[_ResponseModelT]
):
    """Returns the response of calling the API of the provided decorator.

    usage docs: learn/prompts.md#running-prompts

    Example:

    ```python
    from mirascope.core import BasePrompt, openai, prompt_template


    @prompt_template("Recommend a {genre} book")
    class BookRecommendationPrompt(BasePrompt):
        genre: str


    prompt = BookRecommendationPrompt(genre="fantasy")
    response = prompt.run(openai.call("gpt-4o-mini"))
    print(response.content)
    ```
    """
    kwargs = {field: getattr(self, field) for field in self.model_fields}
    args_str = ", ".join(kwargs.keys())
    namespace, fn_name = {}, self.__class__.__name__
    exec(f"def {fn_name}({args_str}): ...", namespace)
    return reduce(
        lambda res, f: f(res),  # pyright: ignore [reportArgumentType]
        [
            metadata(get_metadata(self, self.dynamic_config())),
            prompt_template(get_prompt_template(self)),
            call_decorator,
            *additional_decorators,
        ],
        namespace[fn_name],
    )(**kwargs)

run_async

run_async(
    call_decorator: (
        Callable[
            [Callable[..., Awaitable[BaseDynamicConfig]]],
            Callable[..., Awaitable[_BaseCallResponseT]],
        ]
        | Callable[
            [Callable[..., Awaitable[BaseDynamicConfig]]],
            Callable[..., Awaitable[_BaseStreamT]],
        ]
        | Callable[
            [Callable[..., Awaitable[BaseDynamicConfig]]],
            Callable[..., Awaitable[_ResponseModelT]],
        ]
        | Callable[
            [Callable[..., Awaitable[BaseDynamicConfig]]],
            Callable[
                ...,
                Awaitable[AsyncIterable[_ResponseModelT]],
            ],
        ]
    ),
    *additional_decorators: Callable[[_T], _T]
) -> (
    Awaitable[_BaseCallResponseT]
    | Awaitable[_BaseStreamT]
    | Awaitable[_ResponseModelT]
    | Awaitable[AsyncIterable[_ResponseModelT]]
)

Returns the response of calling the API of the provided decorator.

Usage Documentation

Prompts

Example:

import asyncio

from mirascope.core import BasePrompt, openai, prompt_template


@prompt_template("Recommend a {genre} book")
class BookRecommendationPrompt(BasePrompt):
    genre: str


async def run():
    prompt = BookRecommendationPrompt(genre="fantasy")
    response = await prompt.run_async(openai.call("gpt-4o-mini"))
    print(response.content)


asyncio.run(run())
Source code in mirascope/core/base/prompt.py
def run_async(
    self,
    call_decorator: Callable[
        [Callable[..., Awaitable[BaseDynamicConfig]]],
        Callable[..., Awaitable[_BaseCallResponseT]],
    ]
    | Callable[
        [Callable[..., Awaitable[BaseDynamicConfig]]],
        Callable[..., Awaitable[_BaseStreamT]],
    ]
    | Callable[
        [Callable[..., Awaitable[BaseDynamicConfig]]],
        Callable[..., Awaitable[_ResponseModelT]],
    ]
    | Callable[
        [Callable[..., Awaitable[BaseDynamicConfig]]],
        Callable[..., Awaitable[AsyncIterable[_ResponseModelT]]],
    ],
    *additional_decorators: Callable[[_T], _T],
) -> (
    Awaitable[_BaseCallResponseT]
    | Awaitable[_BaseStreamT]
    | Awaitable[_ResponseModelT]
    | Awaitable[AsyncIterable[_ResponseModelT]]
):
    """Returns the response of calling the API of the provided decorator.

    usage docs: learn/prompts.md#running-prompts

    Example:

    ```python
    import asyncio

    from mirascope.core import BasePrompt, openai, prompt_template


    @prompt_template("Recommend a {genre} book")
    class BookRecommendationPrompt(BasePrompt):
        genre: str


    async def run():
        prompt = BookRecommendationPrompt(genre="fantasy")
        response = await prompt.run_async(openai.call("gpt-4o-mini"))
        print(response.content)


    asyncio.run(run())
    ```
    """
    kwargs = {field: getattr(self, field) for field in self.model_fields}
    args_str = ", ".join(kwargs.keys())
    namespace, fn_name = {}, self.__class__.__name__
    exec(f"async def {fn_name}({args_str}): ...", namespace)
    return reduce(
        lambda res, f: f(res),  # pyright: ignore [reportArgumentType]
        [
            metadata(get_metadata(self, self.dynamic_config())),
            prompt_template(get_prompt_template(self)),
            call_decorator,
            *additional_decorators,
        ],
        namespace[fn_name],
    )(**kwargs)

prompt_template

prompt_template(template: str) -> PromptDecorator

A decorator for setting the prompt_template of a BasePrompt or call.

Usage Documentation

Prompts

Example:

from mirascope.core import openai, prompt_template


@openai.call("gpt-4o-mini")
@prompt_template("Recommend a {genre} book")
def recommend_book(genre: str):
    ...


response = recommend_book("fantasy")
print(response.prompt_template)
print(response.fn_args)

Returns:

Name Type Description
decorator Callable

The decorator function that updates the _prompt_template attribute of the decorated input prompt or call.

Source code in mirascope/core/base/prompt.py
def prompt_template(template: str) -> PromptDecorator:
    """A decorator for setting the `prompt_template` of a `BasePrompt` or `call`.

    usage docs: learn/prompts.md#prompt-templates

    Example:

    ```python
    from mirascope.core import openai, prompt_template


    @openai.call("gpt-4o-mini")
    @prompt_template("Recommend a {genre} book")
    def recommend_book(genre: str):
        ...


    response = recommend_book("fantasy")
    print(response.prompt_template)
    print(response.fn_args)
    ```

    Returns:
        decorator (Callable): The decorator function that updates the `_prompt_template`
            attribute of the decorated input prompt or call.
    """

    @overload
    def inner(prompt: type[_BasePromptT]) -> type[_BasePromptT]: ...

    @overload
    def inner(prompt: Callable[_P, _R]) -> Callable[_P, _R]: ...

    def inner(
        prompt: type[_BasePromptT] | Callable[_P, _R],
    ) -> type[_BasePromptT] | Callable[_P, _R]:
        prompt._prompt_template = template  # pyright: ignore [reportAttributeAccessIssue,reportFunctionMemberAccess]
        return prompt

    return inner

metadata

metadata(metadata: Metadata) -> MetadataDecorator

A decorator for adding metadata to a BasePrompt or call.

Usage Documentation

Prompts

Adding this decorator to a BasePrompt or call updates the metadata annotation to the given value. This is useful for adding metadata to a BasePrompt or call that can be used for logging or filtering.

Example:

from mirascope.core import metadata, openai, prompt_template


@openai.call("gpt-4o-mini")
@prompt_template("Recommend a {genre} book")
@metadata({"tags": {"version:0001", "books"}})
def recommend_book(genre: str):
    ...


response = recommend_book("fantasy")
print(response.metadata)

Returns:

Name Type Description
decorator Callable

The decorator function that updates the _metadata attribute of the decorated input prompt or call.

Source code in mirascope/core/base/prompt.py
def metadata(metadata: Metadata) -> MetadataDecorator:
    """A decorator for adding metadata to a `BasePrompt` or `call`.

    usage docs: learn/prompts.md#metadata

    Adding this decorator to a `BasePrompt` or `call` updates the `metadata` annotation
    to the given value. This is useful for adding metadata to a `BasePrompt` or `call`
    that can be used for logging or filtering.

    Example:

    ```python
    from mirascope.core import metadata, openai, prompt_template


    @openai.call("gpt-4o-mini")
    @prompt_template("Recommend a {genre} book")
    @metadata({"tags": {"version:0001", "books"}})
    def recommend_book(genre: str):
        ...


    response = recommend_book("fantasy")
    print(response.metadata)
    ```

    Returns:
        decorator (Callable): The decorator function that updates the `_metadata`
            attribute of the decorated input prompt or call.
    """

    @overload
    def inner(prompt: type[_BasePromptT]) -> type[_BasePromptT]: ...

    @overload
    def inner(prompt: Callable[_P, _R]) -> Callable[_P, _R]: ...

    def inner(
        prompt: type[_BasePromptT] | Callable[_P, _R],
    ) -> type[_BasePromptT] | Callable[_P, _R]:
        """Updates the `metadata` class attribute to the given value."""
        prompt._metadata = metadata  # pyright: ignore [reportAttributeAccessIssue,reportFunctionMemberAccess]
        return prompt

    return inner