From f7616890c00bdc25cf7d2d1d8297e59d80041053 Mon Sep 17 00:00:00 2001 From: Willy Douhard Date: Mon, 8 Jan 2024 12:35:55 +0100 Subject: [PATCH] literalai (#642) --- CHANGELOG.md | 17 ++++++++++++ backend/chainlit/__init__.py | 2 +- backend/chainlit/data/__init__.py | 26 +++++++++---------- backend/chainlit/langchain/callbacks.py | 2 +- backend/chainlit/llama_index/callbacks.py | 7 +++-- backend/chainlit/message.py | 2 +- backend/chainlit/playground/provider.py | 2 +- .../playground/providers/anthropic.py | 2 +- .../playground/providers/langchain.py | 5 ++-- backend/chainlit/step.py | 4 +-- backend/chainlit/types.py | 2 +- backend/pyproject.toml | 4 +-- 12 files changed, 46 insertions(+), 29 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7b27df7733..21be96b2f2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,23 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). Nothing is unreleased! +## [1.0.0] - 2023-01-08 + +### Added + +- Scroll down button +- If `hide_cot` is set to `true`, a `running` loader is displayed by default under the last message when a task is running. + +### Changed + +- Avatars are now always displayed +- Chat history sidebar has been revamped +- Stop task button has been moved to the input bar + +### Fixed + +- If `hide_cot` is set to `true`, the UI will never get the intermediary steps (but they will still be persisted) + ## [1.0.0rc3] - 2023-12-21 ### Fixed diff --git a/backend/chainlit/__init__.py b/backend/chainlit/__init__.py index c6796a755b..7784fb0d6e 100644 --- a/backend/chainlit/__init__.py +++ b/backend/chainlit/__init__.py @@ -54,7 +54,7 @@ from chainlit.user_session import user_session from chainlit.utils import make_module_getattr, wrap_user_function from chainlit.version import __version__ -from chainlit_client import ChatGeneration, CompletionGeneration, GenerationMessage +from literalai import ChatGeneration, CompletionGeneration, GenerationMessage if env_found: logger.info("Loaded .env file") diff --git a/backend/chainlit/data/__init__.py b/backend/chainlit/data/__init__.py index 43efe46fcd..42bc200240 100644 --- a/backend/chainlit/data/__init__.py +++ b/backend/chainlit/data/__init__.py @@ -10,12 +10,12 @@ from chainlit.session import WebsocketSession from chainlit.types import Feedback, Pagination, ThreadDict, ThreadFilter from chainlit.user import PersistedUser, User, UserDict -from chainlit_client import Attachment -from chainlit_client import Feedback as ClientFeedback -from chainlit_client import PageInfo, PaginatedResponse -from chainlit_client import Step as ClientStep -from chainlit_client.thread import NumberListFilter, StringFilter, StringListFilter -from chainlit_client.thread import ThreadFilter as ClientThreadFilter +from literalai import Attachment +from literalai import Feedback as ClientFeedback +from literalai import PageInfo, PaginatedResponse +from literalai import Step as ClientStep +from literalai.thread import NumberListFilter, StringFilter, StringListFilter +from literalai.thread import ThreadFilter as ClientThreadFilter if TYPE_CHECKING: from chainlit.element import Element, ElementDict @@ -132,12 +132,10 @@ async def delete_user_session(self, id: str) -> bool: class ChainlitDataLayer: - def __init__( - self, api_key: str, chainlit_server: Optional[str] = "https://cloud.chainlit.io" - ): - from chainlit_client import ChainlitClient + def __init__(self, api_key: str, server: Optional[str]): + from literalai import LiteralClient - self.client = ChainlitClient(api_key=api_key, url=chainlit_server) + self.client = LiteralClient(api_key=api_key, url=server) logger.info("Chainlit data layer initialized") def attachment_to_element_dict(self, attachment: Attachment) -> "ElementDict": @@ -451,9 +449,9 @@ async def delete_user_session(self, id: str) -> bool: return True -if api_key := os.environ.get("CHAINLIT_API_KEY"): - chainlit_server = os.environ.get("CHAINLIT_SERVER") - _data_layer = ChainlitDataLayer(api_key=api_key, chainlit_server=chainlit_server) +if api_key := os.environ.get("LITERAL_API_KEY"): + server = os.environ.get("LITERAL_SERVER") + _data_layer = ChainlitDataLayer(api_key=api_key, server=server) def get_data_layer(): diff --git a/backend/chainlit/langchain/callbacks.py b/backend/chainlit/langchain/callbacks.py index 57eedd9fd0..b0aec29cec 100644 --- a/backend/chainlit/langchain/callbacks.py +++ b/backend/chainlit/langchain/callbacks.py @@ -6,11 +6,11 @@ from chainlit.message import Message from chainlit.playground.providers.openai import stringify_function_call from chainlit.step import Step, TrueStepType -from chainlit_client import ChatGeneration, CompletionGeneration, GenerationMessage from langchain.callbacks.tracers.base import BaseTracer from langchain.callbacks.tracers.schemas import Run from langchain.schema import BaseMessage from langchain.schema.output import ChatGenerationChunk, GenerationChunk +from literalai import ChatGeneration, CompletionGeneration, GenerationMessage DEFAULT_ANSWER_PREFIX_TOKENS = ["Final", "Answer", ":"] diff --git a/backend/chainlit/llama_index/callbacks.py b/backend/chainlit/llama_index/callbacks.py index 6f43202e49..c7a130e8ff 100644 --- a/backend/chainlit/llama_index/callbacks.py +++ b/backend/chainlit/llama_index/callbacks.py @@ -4,7 +4,7 @@ from chainlit.context import context_var from chainlit.element import Text from chainlit.step import Step, StepType -from chainlit_client import ChatGeneration, CompletionGeneration, GenerationMessage +from literalai import ChatGeneration, CompletionGeneration, GenerationMessage from llama_index.callbacks import TokenCountingHandler from llama_index.callbacks.schema import CBEventType, EventPayload from llama_index.llms.base import ChatMessage, ChatResponse, CompletionResponse @@ -118,7 +118,10 @@ def on_event_end( [f"Source {idx}" for idx, _ in enumerate(sources)] ) step.elements = [ - Text(name=f"Source {idx}", content=source.node.get_text() or "Empty node") + Text( + name=f"Source {idx}", + content=source.node.get_text() or "Empty node", + ) for idx, source in enumerate(sources) ] step.output = f"Retrieved the following sources: {source_refs}" diff --git a/backend/chainlit/message.py b/backend/chainlit/message.py index ad7937de54..3e9da52afa 100644 --- a/backend/chainlit/message.py +++ b/backend/chainlit/message.py @@ -22,7 +22,7 @@ AskSpec, FileDict, ) -from chainlit_client.step import MessageStepType +from literalai.step import MessageStepType class MessageBase(ABC): diff --git a/backend/chainlit/playground/provider.py b/backend/chainlit/playground/provider.py index 82b71b5486..d70e868d08 100644 --- a/backend/chainlit/playground/provider.py +++ b/backend/chainlit/playground/provider.py @@ -4,8 +4,8 @@ from chainlit.config import config from chainlit.telemetry import trace_event from chainlit.types import GenerationRequest -from chainlit_client import BaseGeneration, ChatGeneration, GenerationMessage from fastapi import HTTPException +from literalai import BaseGeneration, ChatGeneration, GenerationMessage from pydantic.dataclasses import dataclass from chainlit import input_widget diff --git a/backend/chainlit/playground/providers/anthropic.py b/backend/chainlit/playground/providers/anthropic.py index c9b8a61c99..e895536380 100644 --- a/backend/chainlit/playground/providers/anthropic.py +++ b/backend/chainlit/playground/providers/anthropic.py @@ -1,8 +1,8 @@ from chainlit.input_widget import Select, Slider, Tags from chainlit.playground.provider import BaseProvider -from chainlit_client import GenerationMessage from fastapi import HTTPException from fastapi.responses import StreamingResponse +from literalai import GenerationMessage class AnthropicProvider(BaseProvider): diff --git a/backend/chainlit/playground/providers/langchain.py b/backend/chainlit/playground/providers/langchain.py index eabe9a6074..bddfb6584e 100644 --- a/backend/chainlit/playground/providers/langchain.py +++ b/backend/chainlit/playground/providers/langchain.py @@ -3,8 +3,8 @@ from chainlit.input_widget import InputWidget from chainlit.playground.provider import BaseProvider from chainlit.sync import make_async -from chainlit_client import GenerationMessage from fastapi.responses import StreamingResponse +from literalai import GenerationMessage class LangchainGenericProvider(BaseProvider): @@ -69,8 +69,7 @@ async def create_completion(self, request): # https://github.com/langchain-ai/langchain/issues/14980 result = await make_async(self.llm.stream)( - input=messages, - **request.generation.settings + input=messages, **request.generation.settings ) def create_event_stream(): diff --git a/backend/chainlit/step.py b/backend/chainlit/step.py index 6deba648d7..dd6bb330ae 100644 --- a/backend/chainlit/step.py +++ b/backend/chainlit/step.py @@ -14,8 +14,8 @@ from chainlit.logger import logger from chainlit.telemetry import trace_event from chainlit.types import FeedbackDict -from chainlit_client import BaseGeneration -from chainlit_client.step import StepType, TrueStepType +from literalai import BaseGeneration +from literalai.step import StepType, TrueStepType class StepDict(TypedDict, total=False): diff --git a/backend/chainlit/types.py b/backend/chainlit/types.py index f601f6ee85..240f25ad9c 100644 --- a/backend/chainlit/types.py +++ b/backend/chainlit/types.py @@ -6,8 +6,8 @@ from chainlit.user import UserDict from chainlit.step import StepDict -from chainlit_client import ChatGeneration, CompletionGeneration from dataclasses_json import DataClassJsonMixin +from literalai import ChatGeneration, CompletionGeneration from pydantic import BaseModel from pydantic.dataclasses import dataclass diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 888bce094b..aa87ccdd57 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "chainlit" -version = "1.0.0rc3" +version = "1.0.0" keywords = ['LLM', 'Agents', 'gen ai', 'chat ui', 'chatbot ui', 'langchain'] description = "A faster way to build chatbot UIs." authors = ["Chainlit"] @@ -21,7 +21,7 @@ chainlit = 'chainlit.cli:cli' [tool.poetry.dependencies] python = ">=3.8.1,<4.0.0" httpx = ">=0.23.0,<0.25.0" -chainlit_client = "0.1.0rc10" +literalai = "0.0.1" dataclasses_json = "^0.5.7" uvicorn = "^0.23.2" fastapi = "^0.100"