Skip to content

Commit

Permalink
literalai (#642)
Browse files Browse the repository at this point in the history
  • Loading branch information
willydouhard authored Jan 8, 2024
1 parent 5ee2f7e commit f761689
Show file tree
Hide file tree
Showing 12 changed files with 46 additions and 29 deletions.
17 changes: 17 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,23 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

Nothing is unreleased!

## [1.0.0] - 2023-01-08

### Added

- Scroll down button
- If `hide_cot` is set to `true`, a `running` loader is displayed by default under the last message when a task is running.

### Changed

- Avatars are now always displayed
- Chat history sidebar has been revamped
- Stop task button has been moved to the input bar

### Fixed

- If `hide_cot` is set to `true`, the UI will never get the intermediary steps (but they will still be persisted)

## [1.0.0rc3] - 2023-12-21

### Fixed
Expand Down
2 changes: 1 addition & 1 deletion backend/chainlit/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
from chainlit.user_session import user_session
from chainlit.utils import make_module_getattr, wrap_user_function
from chainlit.version import __version__
from chainlit_client import ChatGeneration, CompletionGeneration, GenerationMessage
from literalai import ChatGeneration, CompletionGeneration, GenerationMessage

if env_found:
logger.info("Loaded .env file")
Expand Down
26 changes: 12 additions & 14 deletions backend/chainlit/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,12 @@
from chainlit.session import WebsocketSession
from chainlit.types import Feedback, Pagination, ThreadDict, ThreadFilter
from chainlit.user import PersistedUser, User, UserDict
from chainlit_client import Attachment
from chainlit_client import Feedback as ClientFeedback
from chainlit_client import PageInfo, PaginatedResponse
from chainlit_client import Step as ClientStep
from chainlit_client.thread import NumberListFilter, StringFilter, StringListFilter
from chainlit_client.thread import ThreadFilter as ClientThreadFilter
from literalai import Attachment
from literalai import Feedback as ClientFeedback
from literalai import PageInfo, PaginatedResponse
from literalai import Step as ClientStep
from literalai.thread import NumberListFilter, StringFilter, StringListFilter
from literalai.thread import ThreadFilter as ClientThreadFilter

if TYPE_CHECKING:
from chainlit.element import Element, ElementDict
Expand Down Expand Up @@ -132,12 +132,10 @@ async def delete_user_session(self, id: str) -> bool:


class ChainlitDataLayer:
def __init__(
self, api_key: str, chainlit_server: Optional[str] = "https://cloud.chainlit.io"
):
from chainlit_client import ChainlitClient
def __init__(self, api_key: str, server: Optional[str]):
from literalai import LiteralClient

self.client = ChainlitClient(api_key=api_key, url=chainlit_server)
self.client = LiteralClient(api_key=api_key, url=server)
logger.info("Chainlit data layer initialized")

def attachment_to_element_dict(self, attachment: Attachment) -> "ElementDict":
Expand Down Expand Up @@ -451,9 +449,9 @@ async def delete_user_session(self, id: str) -> bool:
return True


if api_key := os.environ.get("CHAINLIT_API_KEY"):
chainlit_server = os.environ.get("CHAINLIT_SERVER")
_data_layer = ChainlitDataLayer(api_key=api_key, chainlit_server=chainlit_server)
if api_key := os.environ.get("LITERAL_API_KEY"):
server = os.environ.get("LITERAL_SERVER")
_data_layer = ChainlitDataLayer(api_key=api_key, server=server)


def get_data_layer():
Expand Down
2 changes: 1 addition & 1 deletion backend/chainlit/langchain/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@
from chainlit.message import Message
from chainlit.playground.providers.openai import stringify_function_call
from chainlit.step import Step, TrueStepType
from chainlit_client import ChatGeneration, CompletionGeneration, GenerationMessage
from langchain.callbacks.tracers.base import BaseTracer
from langchain.callbacks.tracers.schemas import Run
from langchain.schema import BaseMessage
from langchain.schema.output import ChatGenerationChunk, GenerationChunk
from literalai import ChatGeneration, CompletionGeneration, GenerationMessage

DEFAULT_ANSWER_PREFIX_TOKENS = ["Final", "Answer", ":"]

Expand Down
7 changes: 5 additions & 2 deletions backend/chainlit/llama_index/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from chainlit.context import context_var
from chainlit.element import Text
from chainlit.step import Step, StepType
from chainlit_client import ChatGeneration, CompletionGeneration, GenerationMessage
from literalai import ChatGeneration, CompletionGeneration, GenerationMessage
from llama_index.callbacks import TokenCountingHandler
from llama_index.callbacks.schema import CBEventType, EventPayload
from llama_index.llms.base import ChatMessage, ChatResponse, CompletionResponse
Expand Down Expand Up @@ -118,7 +118,10 @@ def on_event_end(
[f"Source {idx}" for idx, _ in enumerate(sources)]
)
step.elements = [
Text(name=f"Source {idx}", content=source.node.get_text() or "Empty node")
Text(
name=f"Source {idx}",
content=source.node.get_text() or "Empty node",
)
for idx, source in enumerate(sources)
]
step.output = f"Retrieved the following sources: {source_refs}"
Expand Down
2 changes: 1 addition & 1 deletion backend/chainlit/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
AskSpec,
FileDict,
)
from chainlit_client.step import MessageStepType
from literalai.step import MessageStepType


class MessageBase(ABC):
Expand Down
2 changes: 1 addition & 1 deletion backend/chainlit/playground/provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
from chainlit.config import config
from chainlit.telemetry import trace_event
from chainlit.types import GenerationRequest
from chainlit_client import BaseGeneration, ChatGeneration, GenerationMessage
from fastapi import HTTPException
from literalai import BaseGeneration, ChatGeneration, GenerationMessage
from pydantic.dataclasses import dataclass

from chainlit import input_widget
Expand Down
2 changes: 1 addition & 1 deletion backend/chainlit/playground/providers/anthropic.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from chainlit.input_widget import Select, Slider, Tags
from chainlit.playground.provider import BaseProvider
from chainlit_client import GenerationMessage
from fastapi import HTTPException
from fastapi.responses import StreamingResponse
from literalai import GenerationMessage


class AnthropicProvider(BaseProvider):
Expand Down
5 changes: 2 additions & 3 deletions backend/chainlit/playground/providers/langchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
from chainlit.input_widget import InputWidget
from chainlit.playground.provider import BaseProvider
from chainlit.sync import make_async
from chainlit_client import GenerationMessage
from fastapi.responses import StreamingResponse
from literalai import GenerationMessage


class LangchainGenericProvider(BaseProvider):
Expand Down Expand Up @@ -69,8 +69,7 @@ async def create_completion(self, request):

# https://github.com/langchain-ai/langchain/issues/14980
result = await make_async(self.llm.stream)(
input=messages,
**request.generation.settings
input=messages, **request.generation.settings
)

def create_event_stream():
Expand Down
4 changes: 2 additions & 2 deletions backend/chainlit/step.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
from chainlit.logger import logger
from chainlit.telemetry import trace_event
from chainlit.types import FeedbackDict
from chainlit_client import BaseGeneration
from chainlit_client.step import StepType, TrueStepType
from literalai import BaseGeneration
from literalai.step import StepType, TrueStepType


class StepDict(TypedDict, total=False):
Expand Down
2 changes: 1 addition & 1 deletion backend/chainlit/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
from chainlit.user import UserDict
from chainlit.step import StepDict

from chainlit_client import ChatGeneration, CompletionGeneration
from dataclasses_json import DataClassJsonMixin
from literalai import ChatGeneration, CompletionGeneration
from pydantic import BaseModel
from pydantic.dataclasses import dataclass

Expand Down
4 changes: 2 additions & 2 deletions backend/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "chainlit"
version = "1.0.0rc3"
version = "1.0.0"
keywords = ['LLM', 'Agents', 'gen ai', 'chat ui', 'chatbot ui', 'langchain']
description = "A faster way to build chatbot UIs."
authors = ["Chainlit"]
Expand All @@ -21,7 +21,7 @@ chainlit = 'chainlit.cli:cli'
[tool.poetry.dependencies]
python = ">=3.8.1,<4.0.0"
httpx = ">=0.23.0,<0.25.0"
chainlit_client = "0.1.0rc10"
literalai = "0.0.1"
dataclasses_json = "^0.5.7"
uvicorn = "^0.23.2"
fastapi = "^0.100"
Expand Down

0 comments on commit f761689

Please sign in to comment.