Skip to content

Commit

Permalink
fixbug (#29)
Browse files Browse the repository at this point in the history
Co-authored-by: wangyuxin <[email protected]>
  • Loading branch information
wangyuxinwhy and wangyuxin authored Feb 5, 2024
1 parent 60ba6af commit 690fbf1
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 7 deletions.
2 changes: 1 addition & 1 deletion generate/chat_completion/models/dashscope.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class DashScopeChatParameters(ModelParameters):
enable_search: Optional[bool] = None


class DashScopeChatParametersDict(ModelParametersDict):
class DashScopeChatParametersDict(ModelParametersDict, total=False):
seed: int
max_tokens: int
top_p: float
Expand Down
2 changes: 2 additions & 0 deletions generate/chat_completion/models/zhipu.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,8 @@ class ZhipuChatParameters(ModelParameters):
def can_not_equal_zero(cls, v: Optional[Temperature]) -> Optional[Temperature]:
if v == 0:
return 0.01
if v == 1:
return 0.99
return v


Expand Down
4 changes: 2 additions & 2 deletions generate/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@ def get_pytest_params(
include: Sequence[str] | None = None,
) -> list[Any]:
exclude = exclude or []
include = include or []
include = include
if isinstance(types, str):
types = [types]

pytest_params: list[Any] = []
for model_name, (model_cls, paramter_cls) in model_registry.items():
if model_name in exclude:
continue
if model_name not in include:
if include and model_name not in include:
continue
values: list[Any] = []
for t in types:
Expand Down
9 changes: 5 additions & 4 deletions generate/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@


class UserState(BaseModel):
chat_model_id: str = 'openai/gpt-3.5-turbo'
chat_model_id: str = 'openai'
temperature: float = 1.0
system_message: str = ''
max_tokens: int = 4000
max_tokens: Optional[int] = None
_chat_history: Messages = []

@property
Expand Down Expand Up @@ -89,7 +89,7 @@ def get_generate_settings() -> List[Any]:
initial='',
)
temperature_slider = Slider(id='Temperature', label='Temperature', min=0, max=1.0, step=0.1, initial=1)
max_tokens = Slider(id='MaxTokens', label='Max Tokens', min=1, max=5000, step=100, initial=4000)
max_tokens = Slider(id='MaxTokens', label='Max Tokens', min=1, max=5000, step=100, initial=0)
return [model_select, model_id, system_message_input, temperature_slider, max_tokens]


Expand All @@ -111,7 +111,8 @@ async def settings_update(settings: dict) -> None:
state.chat_model_id = settings['Model']
state.temperature = settings['Temperature']
state.system_message = settings['SystemMessage']
state.max_tokens = settings['MaxTokens']
if settings['MaxTokens']:
state.max_tokens = settings['MaxTokens']


@cl.on_message
Expand Down

0 comments on commit 690fbf1

Please sign in to comment.