"""String output parser."""

from typing_extensions import override

from langchain_core.output_parsers.transform import BaseTransformOutputParser


class StrOutputParser(BaseTransformOutputParser[str]):
    """Extract text content from model outputs as a string.

    Converts model outputs (such as `AIMessage` or `AIMessageChunk` objects) into plain
    text strings. It's the simplest output parser and is useful when you need string
    responses for downstream processing, display, or storage.

    Supports streaming, yielding text chunks as they're generated by the model.

    Example:
        ```python
        from langchain_core.output_parsers import StrOutputParser
        from langchain_openai import ChatOpenAI

        model = ChatOpenAI(model="gpt-4o")
        parser = StrOutputParser()

        # Get string output from a model
        message = model.invoke("Tell me a joke")
        result = parser.invoke(message)
        print(result)  # plain string

        # With streaming - use transform() to process a stream
        stream = model.stream("Tell me a story")
        for chunk in parser.transform(stream):
            print(chunk, end="", flush=True)
        ```
    """

    @classmethod
    def is_lc_serializable(cls) -> bool:
        """`StrOutputParser` is serializable.

        Returns:
            `True`
        """
        return True

    @classmethod
    def get_lc_namespace(cls) -> list[str]:
        """Get the namespace of the LangChain object.

        Returns:
            `["langchain", "schema", "output_parser"]`
        """
        return ["langchain", "schema", "output_parser"]

    @property
    def _type(self) -> str:
        """Return the output parser type for serialization."""
        return "default"

    @override
    def parse(self, text: str) -> str:
        """Returns the input text with no changes."""
        return text
