Skip to content

Commit

Permalink
Fix errors in test documentation (#373)
Browse files Browse the repository at this point in the history
  • Loading branch information
wangjian052163 authored Dec 2, 2024
1 parent 170a6e6 commit 194ac47
Show file tree
Hide file tree
Showing 6 changed files with 38 additions and 62 deletions.
8 changes: 4 additions & 4 deletions docs/en/Cookbook/streaming.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ Let's first simply implement a streaming conversational robot with a front-end i
import lazyllm

llm = lazyllm.TrainableModule("internlm2-chat-20b", stream=True) # or llm = lazyllm.OnlineChatModule(stream=True)
lazyllm.WebModule(llm, port=23333).start().wait()
lazyllm.WebModule(llm, port=23333, stream=True).start().wait()
```

Isn't the implementation very simple? You just need to define the model using streaming, and leave the rest of the work to [WebModule][lazyllm.tools.webpages.WebModule] to handle it. Then the messages displayed to the user will be displayed in a streaming manner on the front-end interface.
Expand Down Expand Up @@ -129,7 +129,7 @@ Now there is only one last step left. We use [WebModule][lazyllm.tools.webpages.

```python
import lazyllm
lazyllm.WebModule(agent, port=23333).start().wait()
lazyllm.WebModule(agent, port=23333, stream=True).start().wait()
```

Now we have completed a conversational robot that supports streaming output and [FunctionCall][lazyllm.tools.agent.FunctionCall]. When there is information to show to the user, the interface will stream the message content. And [FunctionCall][lazyllm.tools.agent.FunctionCall] will execute normally.
Expand Down Expand Up @@ -186,7 +186,7 @@ def get_n_day_weather_forecast(location: str, num_days: int, unit: Literal["cels
llm = lazyllm.TrainableModule("internlm2-chat-20b", stream=True).start() # or llm = lazyllm.OnlineChatModule()
tools = ["get_current_weather", "get_n_day_weather_forecast"]
agent = FunctionCallAgent(llm, tools)
lazyllm.WebModule(agent, port=23333).start().wait()
lazyllm.WebModule(agent, port=23333, stream=True).start().wait()
```

The effect is as follows:
Expand Down Expand Up @@ -255,7 +255,7 @@ def get_n_day_weather_forecast(location: str, num_days: int, unit: Literal["cels
llm = lazyllm.TrainableModule("internlm2-chat-20b", stream=True, return_trace=True).start() # or llm = lazyllm.OnlineChatModule(return_trace=True)
tools = ["get_current_weather", "get_n_day_weather_forecast"]
agent = FunctionCallAgent(llm, tools, return_trace=True)
lazyllm.WebModule(agent, port=23333).start().wait()
lazyllm.WebModule(agent, port=23333, stream=True).start().wait()
```

The effect is as follows:
Expand Down
8 changes: 4 additions & 4 deletions docs/zh/Cookbook/streaming.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import lazyllm

llm = lazyllm.TrainableModule("internlm2-chat-20b", stream=True) # or llm = lazyllm.OnlineChatModule(stream=True)
lazyllm.WebModule(llm, port=23333).start().wait()
lazyllm.WebModule(llm, port=23333, stream=True).start().wait()
```

实现是不是很简单,只需要定义好模型使用流式,其余工作交给 [WebModule][lazyllm.tools.webpages.WebModule] 来处理即可,则在前端界面上会流式的显示展示给用户的消息。
Expand Down Expand Up @@ -129,7 +129,7 @@ agent = FunctionCallAgent(llm, tools)

```python
import lazyllm
lazyllm.WebModule(agent, port=23333).start().wait()
lazyllm.WebModule(agent, port=23333, stream=True).start().wait()
```

现在便完成了支持流式输出和 [FunctionCall][lazyllm.tools.agent.FunctionCall] 的对话机器人。当有给用户展示的信息时,界面便会流式的输出消息内容。而 [FunctionCall][lazyllm.tools.agent.FunctionCall] 会正常执行。
Expand Down Expand Up @@ -186,7 +186,7 @@ def get_n_day_weather_forecast(location: str, num_days: int, unit: Literal["cels
llm = lazyllm.TrainableModule("internlm2-chat-20b", stream=True).start() # or llm = lazyllm.OnlineChatModule()
tools = ["get_current_weather", "get_n_day_weather_forecast"]
agent = FunctionCallAgent(llm, tools)
lazyllm.WebModule(agent, port=23333).start().wait()
lazyllm.WebModule(agent, port=23333, stream=True).start().wait()
```

效果如下:
Expand Down Expand Up @@ -256,7 +256,7 @@ def get_n_day_weather_forecast(location: str, num_days: int, unit: Literal["cels
llm = lazyllm.TrainableModule("internlm2-chat-20b", stream=True, return_trace=True).start() # or llm = lazyllm.OnlineChatModule(return_trace=True)
tools = ["get_current_weather", "get_n_day_weather_forecast"]
agent = FunctionCallAgent(llm, tools, return_trace=True)
lazyllm.WebModule(agent, port=23333).start().wait()
lazyllm.WebModule(agent, port=23333, stream=True).start().wait()
```

效果如下:
Expand Down
10 changes: 5 additions & 5 deletions lazyllm/docs/components.py
Original file line number Diff line number Diff line change
Expand Up @@ -963,23 +963,23 @@
>>> from lazyllm import ChatPrompter
>>> p = ChatPrompter('hello world')
>>> p.generate_prompt('this is my input')
'<|start_system|>You are an AI-Agent developed by LazyLLM.hello world\\\\n\\\\n<|end_system|>\\\\n\\\\n\\\\n<|Human|>:\\\\nthis is my input\\\\n<|Assistant|>:\\\\n'
'You are an AI-Agent developed by LazyLLM.hello world\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nthis is my input\\\\n\\\\n'
>>> p.generate_prompt('this is my input', return_dict=True)
{'messages': [{'role': 'system', 'content': 'You are an AI-Agent developed by LazyLLM.\\\\nhello world\\\\n\\\\n'}, {'role': 'user', 'content': 'this is my input'}]}
>>>
>>> p = ChatPrompter('hello world {instruction}', extro_keys=['knowledge'])
>>> p.generate_prompt(dict(instruction='this is my ins', input='this is my inp', knowledge='LazyLLM-Knowledge'))
'<|start_system|>You are an AI-Agent developed by LazyLLM.hello world this is my ins\\\\nHere are some extra messages you can referred to:\\\\n\\\\n### knowledge:\\\\nLazyLLM-Knowledge\\\\n\\\\n\\\\n<|end_system|>\\\\n\\\\n\\\\n<|Human|>:\\\\nthis is my inp\\\\n<|Assistant|>:\\\\n'
'You are an AI-Agent developed by LazyLLM.hello world this is my ins\\\\nHere are some extra messages you can referred to:\\\\n\\\\n### knowledge:\\\\nLazyLLM-Knowledge\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nthis is my inp\\\\n\\\\n'
>>> p.generate_prompt(dict(instruction='this is my ins', input='this is my inp', knowledge='LazyLLM-Knowledge'), return_dict=True)
{'messages': [{'role': 'system', 'content': 'You are an AI-Agent developed by LazyLLM.\\\\nhello world this is my ins\\\\nHere are some extra messages you can referred to:\\\\n\\\\n### knowledge:\\\\nLazyLLM-Knowledge\\\\n\\\\n\\\\n'}, {'role': 'user', 'content': 'this is my inp'}]}
>>> p.generate_prompt(dict(instruction='this is my ins', input='this is my inp', knowledge='LazyLLM-Knowledge'), history=[['s1', 'e1'], ['s2', 'e2']])
'<|start_system|>You are an AI-Agent developed by LazyLLM.hello world this is my ins\\\\nHere are some extra messages you can referred to:\\\\n\\\\n### knowledge:\\\\nLazyLLM-Knowledge\\\\n\\\\n\\\\n<|end_system|>\\\\n\\\\n<|Human|>:s1<|Assistant|>:e1<|Human|>:s2<|Assistant|>:e2\\\\n<|Human|>:\\\\nthis is my inp\\\\n<|Assistant|>:\\\\n'
'You are an AI-Agent developed by LazyLLM.hello world this is my ins\\\\nHere are some extra messages you can referred to:\\\\n\\\\n### knowledge:\\\\nLazyLLM-Knowledge\\\\n\\\\n\\\\n\\\\n\\\\ns1e1s2e2\\\\n\\\\nthis is my inp\\\\n\\\\n'
>>>
>>> p = ChatPrompter(dict(system="hello world", user="this is user instruction {input} "))
>>> p.generate_prompt(dict(input="my input", query="this is user query"))
'<|start_system|>You are an AI-Agent developed by LazyLLM.hello world\\\\n\\\\n<|end_system|>\\\\n\\\\n\\\\n<|Human|>:\\\\nthis is user instruction my input this is user query\\\\n<|Assistant|>:\\\\n'
'You are an AI-Agent developed by LazyLLM.hello world\\\\n\\\\n\\\\n\\\\nthis is user instruction my input this is user query\\\\n\\\\n'
>>> p.generate_prompt(dict(input="my input", query="this is user query"), return_dict=True)
{'messages': [{'role': 'system', 'content': 'You are an AI-Agent developed by LazyLLM.\\\\nhello world\\\\n\\\\n'}, {'role': 'user', 'content': 'this is user instruction my input this is user query'}]}
{'messages': [{'role': 'system', 'content': 'You are an AI-Agent developed by LazyLLM.\\\\nhello world'}, {'role': 'user', 'content': 'this is user instruction my input this is user query'}]}
''')

# ============= MultiModal
Expand Down
6 changes: 3 additions & 3 deletions lazyllm/docs/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,9 +115,9 @@
...
>>> flow = lazyllm.pipeline(test1, lazyllm.pipeline(test2, test3))
>>> flow.for_each(lambda x: callable(x), lambda x: print(x))
<function test1 at 0x7f389c3d3ac0>
<function test2 at 0x7f389c3d3b50>
<function test3 at 0x7f389c3d3be0>
<Function type=test1>
<Function type=test2>
<Function type=test3>
""")

add_chinese_doc('Parallel', """\
Expand Down
56 changes: 16 additions & 40 deletions lazyllm/docs/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@
...
>>> m = Module2()
>>> m.submodules
[<__main__.Module object at 0x7f3dc3bb16f0>]
[<Module type=Module>]
>>> m.m3 = Module()
>>> m.submodules
[<__main__.Module object at 0x7f3dc3bb16f0>, <__main__.Module object at 0x7f3dc3bb0be0>]
[<Module type=Module>, <Module type=Module>]
''')

add_chinese_doc('ModuleBase.forward', '''\
Expand Down Expand Up @@ -116,7 +116,7 @@

add_example('ModuleBase.update', '''\
>>> import lazyllm
>>> m = lazyllm.module.TrainableModule().finetune_method(lazyllm.finetune.dummy).deploy_method(lazyllm.deploy.dummy).mode('finetune').prompt(None)
>>> m = lazyllm.module.TrainableModule().finetune_method(lazyllm.finetune.dummy).trainset("").deploy_method(lazyllm.deploy.dummy).mode('finetune').prompt(None)
>>> m.evalset([1, 2, 3])
>>> m.update()
INFO: (lazyllm.launcher) PID: dummy finetune!, and init-args is {}
Expand All @@ -134,7 +134,7 @@

add_example('ModuleBase.evalset', '''\
>>> import lazyllm
>>> m = lazyllm.module.TrainableModule().deploy_method(layzllm.deploy.dummy).finetune_method(lazyllm.finetune.dummy).mode("finetune").prompt(None)
>>> m = lazyllm.module.TrainableModule().deploy_method(lazyllm.deploy.dummy).finetune_method(lazyllm.finetune.dummy).trainset("").mode("finetune").prompt(None)
>>> m.evalset([1, 2, 3])
>>> m.update()
INFO: (lazyllm.launcher) PID: dummy finetune!, and init-args is {}
Expand Down Expand Up @@ -743,45 +743,21 @@

add_example('OnlineChatModule', '''\
>>> import lazyllm
>>> from functools import partial
>>> m = lazyllm.OnlineChatModule(source="sensenova", stream=True)
>>> query = "Hello!"
>>> resp = m(query)
>>> for r in resp:
... print(r)
>>> with lazyllm.ThreadPoolExecutor(1) as executor:
... future = executor.submit(partial(m, llm_chat_history=[]), query)
... while True:
... if value := lazyllm.FileSystemQueue().dequeue():
... print(f"output: {''.join(value)}")
... elif future.done():
... break
... print(f"ret: {future.result()}")
...
{'content': '你好'}
{'content': '!'}
{'content': '有什么'}
{'content': '我可以'}
{'content': '帮助'}
{'content': '你的'}
{'content': '吗'}
{'content': '?'}
{'content': ''}
>>> m = lazyllm.OnlineChatModule(source="sensenova", model="nova-ptc-s-v2", stream=False)
>>> train_file = "toy_chat_fine_tuning.jsonl"
>>> m.set_train_tasks(train_file=train_file, upload_url="https://file.sensenova.cn/v1/files")
>>> m._get_train_tasks()
Num examples:
First example:
{'role': 'system', 'content': 'Marv is a factual chatbot that is also sarcastic.'}
{'role': 'user', 'content': "What's the capital of France?"}
{'role': 'assistant', 'content': "Paris, as if everyone doesn't know that already."}
No errors found
train file id: 7193d9a3-8b6e-4332-99cc-724dec75d9dd
toy_chat_fine_tuning.jsonl upload success! file id is d632e591-f668-43a1-b5bf-49418e9c0fec
fine tuning job ft-85f7bc96034441f2b64f9a5fff5d5b9c created, status: SUBMITTED
fine tuning job ft-85f7bc96034441f2b64f9a5fff5d5b9c status: RUNNING
...
fine tuning job ft-85f7bc96034441f2b64f9a5fff5d5b9c status: SUCCEEDED
fine tuned model: nova-ptc-s-v2:ft-fee492082cbe4a6d880d396f34f1bc50 finished
>>> m._get_deploy_tasks()
deployment c5aaf3bf-ef9b-4797-8c15-12ff04ed5372 created, status: SUBMITTED
...
deployment c5aaf3bf-ef9b-4797-8c15-12ff04ed5372 status: PENDING
...
deployment c5aaf3bf-ef9b-4797-8c15-12ff04ed5372 status: RUNNING
deployment c5aaf3bf-ef9b-4797-8c15-12ff04ed5372 finished
output: Hello
output: ! How can I assist you today?
ret: Hello! How can I assist you today?
''')

add_chinese_doc('OnlineEmbeddingModule', '''\
Expand Down
12 changes: 6 additions & 6 deletions lazyllm/docs/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,16 +201,16 @@
>>> doc1 = Document(dataset_path="your_files_path", create_ui=False)
>>> doc2 = Document(dataset_path="your_files_path", create_ui=False)
>>> doc1.add_reader("**/*.yml", YmlReader)
>>> print(doc1._local_file_reader)
# {'**/*.yml': <class '__main__.YmlReader'>}
>>> print(doc2._local_file_reader)
# {}
>>> print(doc1._impl._local_file_reader)
{'**/*.yml': <class '__main__.YmlReader'>}
>>> print(doc2._impl._local_file_reader)
{}
>>> files = ["your_yml_files"]
>>> Document.register_global_reader("**/*.yml", processYml)
>>> doc1._impl._reader.load_data(input_files=files)
# Call the class YmlReader.
Call the class YmlReader.
>>> doc2._impl._reader.load_data(input_files=files)
# Call the function processYml.
Call the function processYml.
''')

add_english_doc('rag.readers.ReaderBase', '''
Expand Down

0 comments on commit 194ac47

Please sign in to comment.