Skip to content

Commit

Permalink
docs: BaseChatModel key methods table (langchain-ai#23238)
Browse files Browse the repository at this point in the history
If we're moving documenting inherited params think these kinds of tables
become more important

![Screenshot 2024-06-20 at 3 59 12
PM](https://github.com/langchain-ai/langchain/assets/22008038/722266eb-2353-4e85-8fae-76b19bd333e0)
  • Loading branch information
baskaryan authored and pprados committed Jun 21, 2024
1 parent 5da7eb9 commit ea15733
Show file tree
Hide file tree
Showing 3 changed files with 197 additions and 27 deletions.
120 changes: 120 additions & 0 deletions docs/scripts/create_chat_model_docstring_tables.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
imperative = [
[
"invoke",
"str | List[dict | tuple | BaseMessage] | PromptValue",
"BaseMessage",
"A single chat model call.",
],
[
"ainvoke",
"'''",
"BaseMessage",
"Defaults to running invoke in an async executor.",
],
[
"stream",
"'''",
"Iterator[BaseMessageChunk]",
"Defaults to yielding output of invoke.",
],
[
"astream",
"'''",
"AsyncIterator[BaseMessageChunk]",
"Defaults to yielding output of ainvoke.",
],
[
"astream_events",
"'''",
"AsyncIterator[StreamEvent]",
"Event types: 'on_chat_model_start', 'on_chat_model_stream', 'on_chat_model_end'.",
],
[
"batch",
"List[''']",
"List[BaseMessage]",
"Defaults to running invoke in concurrent threads.",
],
[
"abatch",
"List[''']",
"List[BaseMessage]",
"Defaults to running ainvoke in concurrent threads.",
],
[
"batch_as_completed",
"List[''']",
"Iterator[Tuple[int, Union[BaseMessage, Exception]]]",
"Defaults to running invoke in concurrent threads.",
],
[
"abatch_as_completed",
"List[''']",
"AsyncIterator[Tuple[int, Union[BaseMessage, Exception]]]",
"Defaults to running ainvoke in concurrent threads.",
],
]
declarative = [
[
"bind_tools",
# "Tools, ...",
# "Runnable with same inputs/outputs as ChatModel",
"Create ChatModel that can call tools.",
],
[
"with_structured_output",
# "An output schema, ...",
# "Runnable that takes ChatModel inputs and returns a dict or Pydantic object",
"Create wrapper that structures model output using schema.",
],
[
"with_retry",
# "Max retries, exceptions to handle, ...",
# "Runnable with same inputs/outputs as ChatModel",
"Create wrapper that retries model calls on failure.",
],
[
"with_fallbacks",
# "List of models to fall back on",
# "Runnable with same inputs/outputs as ChatModel",
"Create wrapper that falls back to other models on failure.",
],
[
"configurable_fields",
# "*ConfigurableField",
# "Runnable with same inputs/outputs as ChatModel",
"Specify init args of the model that can be configured at runtime via the RunnableConfig.",
],
[
"configurable_alternatives",
# "ConfigurableField, ...",
# "Runnable with same inputs/outputs as ChatModel",
"Specify alternative models which can be swapped in at runtime via the RunnableConfig.",
],
]


def create_table(to_build: list) -> str:
for x in to_build:
x[0] = "`" + x[0] + "`"
longest = [max(len(x[i]) for x in to_build) for i in range(len(to_build[0]))]
widths = [int(1.2 * col) for col in longest]
headers = (
["Method", "Input", "Output", "Description"]
if len(widths) == 4
else ["Method", "Description"]
)
rows = [[h + " " * (w - len(h)) for w, h in zip(widths, headers)]]
for x in to_build:
rows.append([y + " " * (w - len(y)) for w, y in zip(widths, x)])

table = [" | ".join(([""] + x + [""])).strip() for x in rows]
lines = [
"+".join(([""] + ["-" * (len(y) + 2) for y in x] + [""])).strip() for x in rows
]
lines[1] = lines[1].replace("-", "=")
lines.append(lines[-1])
rst = lines[0]
for r, li in zip(table, lines[1:]):
rst += "\n" + r + "\n" + li
return rst
100 changes: 74 additions & 26 deletions libs/core/langchain_core/language_models/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,32 +115,80 @@ async def agenerate_from_stream(


class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
"""Base class for Chat models.
Custom chat model implementations should inherit from this class.
Follow the guide for more information on how to implement a
custom Chat Model:
[Guide](https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/).
Please reference the table below for information about which
methods and properties are required or optional for implementations.
+----------------------------------+--------------------------------------------------------------------+-------------------+
| Method/Property | Description | Required/Optional |
+==================================+====================================================================+===================+
| `_generate` | Use to generate a chat result from a prompt | Required |
+----------------------------------+--------------------------------------------------------------------+-------------------+
| `_llm_type` (property) | Used to uniquely identify the type of the model. Used for logging. | Required |
+----------------------------------+--------------------------------------------------------------------+-------------------+
| `_identifying_params` (property) | Represent model parameterization for tracing purposes. | Optional |
+----------------------------------+--------------------------------------------------------------------+-------------------+
| `_stream` | Use to implement streaming | Optional |
+----------------------------------+--------------------------------------------------------------------+-------------------+
| `_agenerate` | Use to implement a native async method | Optional |
+----------------------------------+--------------------------------------------------------------------+-------------------+
| `_astream` | Use to implement async version of `_stream` | Optional |
+----------------------------------+--------------------------------------------------------------------+-------------------+
"""Base class for chat models.
Key imperative methods:
Methods that actually call the underlying model.
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| Method | Input | Output | Description |
+===========================+================================================================+=====================================================================+==================================================================================================+
| `invoke` | str | List[dict | tuple | BaseMessage] | PromptValue | BaseMessage | A single chat model call. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `ainvoke` | ''' | BaseMessage | Defaults to running invoke in an async executor. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `stream` | ''' | Iterator[BaseMessageChunk] | Defaults to yielding output of invoke. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `astream` | ''' | AsyncIterator[BaseMessageChunk] | Defaults to yielding output of ainvoke. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `astream_events` | ''' | AsyncIterator[StreamEvent] | Event types: 'on_chat_model_start', 'on_chat_model_stream', 'on_chat_model_end'. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `batch` | List['''] | List[BaseMessage] | Defaults to running invoke in concurrent threads. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `abatch` | List['''] | List[BaseMessage] | Defaults to running ainvoke in concurrent threads. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `batch_as_completed` | List['''] | Iterator[Tuple[int, Union[BaseMessage, Exception]]] | Defaults to running invoke in concurrent threads. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
| `abatch_as_completed` | List['''] | AsyncIterator[Tuple[int, Union[BaseMessage, Exception]]] | Defaults to running ainvoke in concurrent threads. |
+---------------------------+----------------------------------------------------------------+---------------------------------------------------------------------+--------------------------------------------------------------------------------------------------+
This table provides a brief overview of the main imperative methods. Please see the base Runnable reference for full documentation.
Key declarative methods:
Methods for creating another Runnable using the ChatModel.
+----------------------------------+-----------------------------------------------------------------------------------------------------------+
| Method | Description |
+==================================+===========================================================================================================+
| `bind_tools` | Create ChatModel that can call tools. |
+----------------------------------+-----------------------------------------------------------------------------------------------------------+
| `with_structured_output` | Create wrapper that structures model output using schema. |
+----------------------------------+-----------------------------------------------------------------------------------------------------------+
| `with_retry` | Create wrapper that retries model calls on failure. |
+----------------------------------+-----------------------------------------------------------------------------------------------------------+
| `with_fallbacks` | Create wrapper that falls back to other models on failure. |
+----------------------------------+-----------------------------------------------------------------------------------------------------------+
| `configurable_fields` | Specify init args of the model that can be configured at runtime via the RunnableConfig. |
+----------------------------------+-----------------------------------------------------------------------------------------------------------+
| `configurable_alternatives` | Specify alternative models which can be swapped in at runtime via the RunnableConfig. |
+----------------------------------+-----------------------------------------------------------------------------------------------------------+
This table provides a brief overview of the main declarative methods. Please see the reference for each method for full documentation.
Creating custom chat model:
Custom chat model implementations should inherit from this class.
Please reference the table below for information about which
methods and properties are required or optional for implementations.
+----------------------------------+--------------------------------------------------------------------+-------------------+
| Method/Property | Description | Required/Optional |
+==================================+====================================================================+===================+
| `_generate` | Use to generate a chat result from a prompt | Required |
+----------------------------------+--------------------------------------------------------------------+-------------------+
| `_llm_type` (property) | Used to uniquely identify the type of the model. Used for logging. | Required |
+----------------------------------+--------------------------------------------------------------------+-------------------+
| `_identifying_params` (property) | Represent model parameterization for tracing purposes. | Optional |
+----------------------------------+--------------------------------------------------------------------+-------------------+
| `_stream` | Use to implement streaming | Optional |
+----------------------------------+--------------------------------------------------------------------+-------------------+
| `_agenerate` | Use to implement a native async method | Optional |
+----------------------------------+--------------------------------------------------------------------+-------------------+
| `_astream` | Use to implement async version of `_stream` | Optional |
+----------------------------------+--------------------------------------------------------------------+-------------------+
Follow the guide for more information on how to implement a custom Chat Model:
[Guide](https://python.langchain.com/v0.2/docs/how_to/custom_chat_model/).
""" # noqa: E501

callback_manager: Optional[BaseCallbackManager] = Field(default=None, exclude=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,6 @@
Here is the output schema:
```
{schema}
```"""
```
Produce only the json result, without the schema.
"""

0 comments on commit ea15733

Please sign in to comment.