From 3be037bf8b2b1682cd9457c83318c42f53fc13f0 Mon Sep 17 00:00:00 2001 From: wangjian Date: Thu, 8 Aug 2024 20:45:41 +0800 Subject: [PATCH 1/8] add lazyllm before group --- README.CN.md | 4 ++-- README.md | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.CN.md b/README.CN.md index 8942563c..6aca225d 100644 --- a/README.CN.md +++ b/README.CN.md @@ -269,9 +269,9 @@ def test(input): def test_cmd(input): return f'echo input is {input}' -# >>> demo.test()(1) +# >>> lazyllm.demo.test()(1) # 'input is 1' -# >>> demo.test_cmd(launcher=launchers.slurm)(2) +# >>> lazyllm.demo.test_cmd(launcher=launchers.slurm)(2) # Command: srun -p pat_rd -N 1 --job-name=xf488db3 -n1 bash -c 'echo input is 2' ``` diff --git a/README.md b/README.md index 57dbd723..34533877 100644 --- a/README.md +++ b/README.md @@ -276,9 +276,9 @@ def test(input): def test_cmd(input): return f'echo input is {input}' -# >>> demo.test()(1) +# >>> lazyllm.demo.test()(1) # 'input is 1' -# >>> demo.test_cmd(launcher=launchers.slurm)(2) +# >>> lazyllm.demo.test_cmd(launcher=launchers.slurm)(2) # Command: srun -p pat_rd -N 1 --job-name=xf488db3 -n1 bash -c 'echo input is 2' ``` From 14981452953d5b47aa8284b4444d46344559c122 Mon Sep 17 00:00:00 2001 From: wangjian Date: Fri, 29 Nov 2024 17:22:53 +0800 Subject: [PATCH 2/8] fix document bugs --- docs/en/Cookbook/streaming.md | 8 ++-- docs/zh/Cookbook/streaming.md | 8 ++-- lazyllm/docs/components.py | 8 ++-- lazyllm/docs/flow.py | 6 +-- lazyllm/docs/module.py | 75 ++++++++++++++++++----------------- lazyllm/docs/tools.py | 6 +-- 6 files changed, 56 insertions(+), 55 deletions(-) diff --git a/docs/en/Cookbook/streaming.md b/docs/en/Cookbook/streaming.md index 0fad7c3d..cb609e8d 100644 --- a/docs/en/Cookbook/streaming.md +++ b/docs/en/Cookbook/streaming.md @@ -18,7 +18,7 @@ Let's first simply implement a streaming conversational robot with a front-end i import lazyllm llm = lazyllm.TrainableModule("internlm2-chat-20b", stream=True) # or llm = lazyllm.OnlineChatModule(stream=True) -lazyllm.WebModule(llm, port=23333).start().wait() +lazyllm.WebModule(llm, port=23333, stream=True).start().wait() ``` Isn't the implementation very simple? You just need to define the model using streaming, and leave the rest of the work to [WebModule][lazyllm.tools.webpages.WebModule] to handle it. Then the messages displayed to the user will be displayed in a streaming manner on the front-end interface. @@ -129,7 +129,7 @@ Now there is only one last step left. We use [WebModule][lazyllm.tools.webpages. ```python import lazyllm -lazyllm.WebModule(agent, port=23333).start().wait() +lazyllm.WebModule(agent, port=23333, stream=True).start().wait() ``` Now we have completed a conversational robot that supports streaming output and [FunctionCall][lazyllm.tools.agent.FunctionCall]. When there is information to show to the user, the interface will stream the message content. And [FunctionCall][lazyllm.tools.agent.FunctionCall] will execute normally. @@ -186,7 +186,7 @@ def get_n_day_weather_forecast(location: str, num_days: int, unit: Literal["cels llm = lazyllm.TrainableModule("internlm2-chat-20b", stream=True).start() # or llm = lazyllm.OnlineChatModule() tools = ["get_current_weather", "get_n_day_weather_forecast"] agent = FunctionCallAgent(llm, tools) -lazyllm.WebModule(agent, port=23333).start().wait() +lazyllm.WebModule(agent, port=23333, stream=True).start().wait() ``` The effect is as follows: @@ -255,7 +255,7 @@ def get_n_day_weather_forecast(location: str, num_days: int, unit: Literal["cels llm = lazyllm.TrainableModule("internlm2-chat-20b", stream=True, return_trace=True).start() # or llm = lazyllm.OnlineChatModule(return_trace=True) tools = ["get_current_weather", "get_n_day_weather_forecast"] agent = FunctionCallAgent(llm, tools, return_trace=True) -lazyllm.WebModule(agent, port=23333).start().wait() +lazyllm.WebModule(agent, port=23333, stream=True).start().wait() ``` The effect is as follows: diff --git a/docs/zh/Cookbook/streaming.md b/docs/zh/Cookbook/streaming.md index d46f79d8..3714e6be 100644 --- a/docs/zh/Cookbook/streaming.md +++ b/docs/zh/Cookbook/streaming.md @@ -18,7 +18,7 @@ import lazyllm llm = lazyllm.TrainableModule("internlm2-chat-20b", stream=True) # or llm = lazyllm.OnlineChatModule(stream=True) -lazyllm.WebModule(llm, port=23333).start().wait() +lazyllm.WebModule(llm, port=23333, stream=True).start().wait() ``` 实现是不是很简单,只需要定义好模型使用流式,其余工作交给 [WebModule][lazyllm.tools.webpages.WebModule] 来处理即可,则在前端界面上会流式的显示展示给用户的消息。 @@ -129,7 +129,7 @@ agent = FunctionCallAgent(llm, tools) ```python import lazyllm -lazyllm.WebModule(agent, port=23333).start().wait() +lazyllm.WebModule(agent, port=23333, stream=True).start().wait() ``` 现在便完成了支持流式输出和 [FunctionCall][lazyllm.tools.agent.FunctionCall] 的对话机器人。当有给用户展示的信息时,界面便会流式的输出消息内容。而 [FunctionCall][lazyllm.tools.agent.FunctionCall] 会正常执行。 @@ -186,7 +186,7 @@ def get_n_day_weather_forecast(location: str, num_days: int, unit: Literal["cels llm = lazyllm.TrainableModule("internlm2-chat-20b", stream=True).start() # or llm = lazyllm.OnlineChatModule() tools = ["get_current_weather", "get_n_day_weather_forecast"] agent = FunctionCallAgent(llm, tools) -lazyllm.WebModule(agent, port=23333).start().wait() +lazyllm.WebModule(agent, port=23333, stream=True).start().wait() ``` 效果如下: @@ -256,7 +256,7 @@ def get_n_day_weather_forecast(location: str, num_days: int, unit: Literal["cels llm = lazyllm.TrainableModule("internlm2-chat-20b", stream=True, return_trace=True).start() # or llm = lazyllm.OnlineChatModule(return_trace=True) tools = ["get_current_weather", "get_n_day_weather_forecast"] agent = FunctionCallAgent(llm, tools, return_trace=True) -lazyllm.WebModule(agent, port=23333).start().wait() +lazyllm.WebModule(agent, port=23333, stream=True).start().wait() ``` 效果如下: diff --git a/lazyllm/docs/components.py b/lazyllm/docs/components.py index 39e03b37..7f143e99 100644 --- a/lazyllm/docs/components.py +++ b/lazyllm/docs/components.py @@ -963,21 +963,21 @@ >>> from lazyllm import ChatPrompter >>> p = ChatPrompter('hello world') >>> p.generate_prompt('this is my input') -'<|start_system|>You are an AI-Agent developed by LazyLLM.hello world\\\\n\\\\n<|end_system|>\\\\n\\\\n\\\\n<|Human|>:\\\\nthis is my input\\\\n<|Assistant|>:\\\\n' +'You are an AI-Agent developed by LazyLLM.hello world\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nthis is my input\\\\n\\\\n' >>> p.generate_prompt('this is my input', return_dict=True) {'messages': [{'role': 'system', 'content': 'You are an AI-Agent developed by LazyLLM.\\\\nhello world\\\\n\\\\n'}, {'role': 'user', 'content': 'this is my input'}]} >>> >>> p = ChatPrompter('hello world {instruction}', extro_keys=['knowledge']) >>> p.generate_prompt(dict(instruction='this is my ins', input='this is my inp', knowledge='LazyLLM-Knowledge')) -'<|start_system|>You are an AI-Agent developed by LazyLLM.hello world this is my ins\\\\nHere are some extra messages you can referred to:\\\\n\\\\n### knowledge:\\\\nLazyLLM-Knowledge\\\\n\\\\n\\\\n<|end_system|>\\\\n\\\\n\\\\n<|Human|>:\\\\nthis is my inp\\\\n<|Assistant|>:\\\\n' +'You are an AI-Agent developed by LazyLLM.hello world this is my ins\\\\nHere are some extra messages you can referred to:\\\\n\\\\n### knowledge:\\\\nLazyLLM-Knowledge\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nthis is my inp\\\\n\\\\n' >>> p.generate_prompt(dict(instruction='this is my ins', input='this is my inp', knowledge='LazyLLM-Knowledge'), return_dict=True) {'messages': [{'role': 'system', 'content': 'You are an AI-Agent developed by LazyLLM.\\\\nhello world this is my ins\\\\nHere are some extra messages you can referred to:\\\\n\\\\n### knowledge:\\\\nLazyLLM-Knowledge\\\\n\\\\n\\\\n'}, {'role': 'user', 'content': 'this is my inp'}]} >>> p.generate_prompt(dict(instruction='this is my ins', input='this is my inp', knowledge='LazyLLM-Knowledge'), history=[['s1', 'e1'], ['s2', 'e2']]) -'<|start_system|>You are an AI-Agent developed by LazyLLM.hello world this is my ins\\\\nHere are some extra messages you can referred to:\\\\n\\\\n### knowledge:\\\\nLazyLLM-Knowledge\\\\n\\\\n\\\\n<|end_system|>\\\\n\\\\n<|Human|>:s1<|Assistant|>:e1<|Human|>:s2<|Assistant|>:e2\\\\n<|Human|>:\\\\nthis is my inp\\\\n<|Assistant|>:\\\\n' +'You are an AI-Agent developed by LazyLLM.hello world this is my ins\\\\nHere are some extra messages you can referred to:\\\\n\\\\n### knowledge:\\\\nLazyLLM-Knowledge\\\\n\\\\n\\\\n\\\\n\\\\ns1e1s2e2\\\\n\\\\nthis is my inp\\\\n\\\\n' >>> >>> p = ChatPrompter(dict(system="hello world", user="this is user instruction {input} ")) >>> p.generate_prompt(dict(input="my input", query="this is user query")) -'<|start_system|>You are an AI-Agent developed by LazyLLM.hello world\\\\n\\\\n<|end_system|>\\\\n\\\\n\\\\n<|Human|>:\\\\nthis is user instruction my input this is user query\\\\n<|Assistant|>:\\\\n' +'You are an AI-Agent developed by LazyLLM.hello world\\\\n\\\\n\\\\n\\\\nthis is user instruction my input this is user query\\\\n\\\\n' >>> p.generate_prompt(dict(input="my input", query="this is user query"), return_dict=True) {'messages': [{'role': 'system', 'content': 'You are an AI-Agent developed by LazyLLM.\\\\nhello world\\\\n\\\\n'}, {'role': 'user', 'content': 'this is user instruction my input this is user query'}]} ''') diff --git a/lazyllm/docs/flow.py b/lazyllm/docs/flow.py index 2a97cd6b..1c424d3f 100644 --- a/lazyllm/docs/flow.py +++ b/lazyllm/docs/flow.py @@ -115,9 +115,9 @@ ... >>> flow = lazyllm.pipeline(test1, lazyllm.pipeline(test2, test3)) >>> flow.for_each(lambda x: callable(x), lambda x: print(x)) - - - + + + """) add_chinese_doc('Parallel', """\ diff --git a/lazyllm/docs/module.py b/lazyllm/docs/module.py index 3bd3a11e..3aa5b847 100644 --- a/lazyllm/docs/module.py +++ b/lazyllm/docs/module.py @@ -42,10 +42,10 @@ ... >>> m = Module2() >>> m.submodules -[<__main__.Module object at 0x7f3dc3bb16f0>] +[] >>> m.m3 = Module() >>> m.submodules -[<__main__.Module object at 0x7f3dc3bb16f0>, <__main__.Module object at 0x7f3dc3bb0be0>] +[, ] ''') add_chinese_doc('ModuleBase.forward', '''\ @@ -116,7 +116,7 @@ add_example('ModuleBase.update', '''\ >>> import lazyllm ->>> m = lazyllm.module.TrainableModule().finetune_method(lazyllm.finetune.dummy).deploy_method(lazyllm.deploy.dummy).mode('finetune').prompt(None) +>>> m = lazyllm.module.TrainableModule().finetune_method(lazyllm.finetune.dummy).deploy_method(lazyllm.deploy.dummy).trainset("").mode('finetune').prompt(None) >>> m.evalset([1, 2, 3]) >>> m.update() INFO: (lazyllm.launcher) PID: dummy finetune!, and init-args is {} @@ -134,7 +134,7 @@ add_example('ModuleBase.evalset', '''\ >>> import lazyllm ->>> m = lazyllm.module.TrainableModule().deploy_method(layzllm.deploy.dummy).finetune_method(lazyllm.finetune.dummy).mode("finetune").prompt(None) +>>> m = lazyllm.module.TrainableModule().deploy_method(lazyllm.deploy.dummy).finetune_method(lazyllm.finetune.dummy).trainset("").mode("finetune").prompt(None) >>> m.evalset([1, 2, 3]) >>> m.update() INFO: (lazyllm.launcher) PID: dummy finetune!, and init-args is {} @@ -749,39 +749,40 @@ >>> for r in resp: ... print(r) ... -{'content': '你好'} -{'content': '!'} -{'content': '有什么'} -{'content': '我可以'} -{'content': '帮助'} -{'content': '你的'} -{'content': '吗'} -{'content': '?'} -{'content': ''} ->>> m = lazyllm.OnlineChatModule(source="sensenova", model="nova-ptc-s-v2", stream=False) ->>> train_file = "toy_chat_fine_tuning.jsonl" ->>> m.set_train_tasks(train_file=train_file, upload_url="https://file.sensenova.cn/v1/files") ->>> m._get_train_tasks() -Num examples: -First example: -{'role': 'system', 'content': 'Marv is a factual chatbot that is also sarcastic.'} -{'role': 'user', 'content': "What's the capital of France?"} -{'role': 'assistant', 'content': "Paris, as if everyone doesn't know that already."} -No errors found -train file id: 7193d9a3-8b6e-4332-99cc-724dec75d9dd -toy_chat_fine_tuning.jsonl upload success! file id is d632e591-f668-43a1-b5bf-49418e9c0fec -fine tuning job ft-85f7bc96034441f2b64f9a5fff5d5b9c created, status: SUBMITTED -fine tuning job ft-85f7bc96034441f2b64f9a5fff5d5b9c status: RUNNING -... -fine tuning job ft-85f7bc96034441f2b64f9a5fff5d5b9c status: SUCCEEDED -fine tuned model: nova-ptc-s-v2:ft-fee492082cbe4a6d880d396f34f1bc50 finished ->>> m._get_deploy_tasks() -deployment c5aaf3bf-ef9b-4797-8c15-12ff04ed5372 created, status: SUBMITTED -... -deployment c5aaf3bf-ef9b-4797-8c15-12ff04ed5372 status: PENDING -... -deployment c5aaf3bf-ef9b-4797-8c15-12ff04ed5372 status: RUNNING -deployment c5aaf3bf-ef9b-4797-8c15-12ff04ed5372 finished +H +e +l +l +o +! + +H +o +w + +c +a +n + +I + +a +s +s +i +s +t + +y +o +u + +t +o +d +a +y +? ''') add_chinese_doc('OnlineEmbeddingModule', '''\ diff --git a/lazyllm/docs/tools.py b/lazyllm/docs/tools.py index 65ea5b2f..4ca28faf 100644 --- a/lazyllm/docs/tools.py +++ b/lazyllm/docs/tools.py @@ -201,9 +201,9 @@ >>> doc1 = Document(dataset_path="your_files_path", create_ui=False) >>> doc2 = Document(dataset_path="your_files_path", create_ui=False) >>> doc1.add_reader("**/*.yml", YmlReader) ->>> print(doc1._local_file_reader) +>>> print(doc1._impl._local_file_reader) # {'**/*.yml': } ->>> print(doc2._local_file_reader) +>>> print(doc2._impl._local_file_reader) # {} >>> files = ["your_yml_files"] >>> Document.register_global_reader("**/*.yml", processYml) @@ -1337,7 +1337,7 @@ >>> agent = ReWOOAgent(llm, tools) >>> query = "What is the name of the cognac house that makes the main ingredient in The Hennchata?" >>> res = agent(query) ->>> print(res) +>>> print(f"{res!r}") '\nHennessy ' """, ) From a632881a8f3866d1ab18c763c33a8adb3d8ee51f Mon Sep 17 00:00:00 2001 From: wangjian Date: Fri, 29 Nov 2024 19:04:13 +0800 Subject: [PATCH 3/8] remove '\n' --- lazyllm/docs/components.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lazyllm/docs/components.py b/lazyllm/docs/components.py index 7f143e99..ac6f2029 100644 --- a/lazyllm/docs/components.py +++ b/lazyllm/docs/components.py @@ -979,7 +979,7 @@ >>> p.generate_prompt(dict(input="my input", query="this is user query")) 'You are an AI-Agent developed by LazyLLM.hello world\\\\n\\\\n\\\\n\\\\nthis is user instruction my input this is user query\\\\n\\\\n' >>> p.generate_prompt(dict(input="my input", query="this is user query"), return_dict=True) -{'messages': [{'role': 'system', 'content': 'You are an AI-Agent developed by LazyLLM.\\\\nhello world\\\\n\\\\n'}, {'role': 'user', 'content': 'this is user instruction my input this is user query'}]} +{'messages': [{'role': 'system', 'content': 'You are an AI-Agent developed by LazyLLM.\\\\nhello world'}, {'role': 'user', 'content': 'this is user instruction my input this is user query'}]} ''') # ============= MultiModal From a9d7102118d3e2fc7078f25f8c065f669b425b0e Mon Sep 17 00:00:00 2001 From: wangjian Date: Fri, 29 Nov 2024 19:27:45 +0800 Subject: [PATCH 4/8] modify trainset location --- lazyllm/docs/module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lazyllm/docs/module.py b/lazyllm/docs/module.py index 3aa5b847..0015b240 100644 --- a/lazyllm/docs/module.py +++ b/lazyllm/docs/module.py @@ -116,7 +116,7 @@ add_example('ModuleBase.update', '''\ >>> import lazyllm ->>> m = lazyllm.module.TrainableModule().finetune_method(lazyllm.finetune.dummy).deploy_method(lazyllm.deploy.dummy).trainset("").mode('finetune').prompt(None) +>>> m = lazyllm.module.TrainableModule().finetune_method(lazyllm.finetune.dummy).trainset("").deploy_method(lazyllm.deploy.dummy).mode('finetune').prompt(None) >>> m.evalset([1, 2, 3]) >>> m.update() INFO: (lazyllm.launcher) PID: dummy finetune!, and init-args is {} From 83fa969a59005de1a8d45765f44c650407bdc6c0 Mon Sep 17 00:00:00 2001 From: wangjian Date: Fri, 29 Nov 2024 19:39:21 +0800 Subject: [PATCH 5/8] modify onlineChatModule output --- lazyllm/docs/module.py | 39 ++------------------------------------- 1 file changed, 2 insertions(+), 37 deletions(-) diff --git a/lazyllm/docs/module.py b/lazyllm/docs/module.py index 0015b240..658dba80 100644 --- a/lazyllm/docs/module.py +++ b/lazyllm/docs/module.py @@ -746,43 +746,8 @@ >>> m = lazyllm.OnlineChatModule(source="sensenova", stream=True) >>> query = "Hello!" >>> resp = m(query) ->>> for r in resp: -... print(r) -... -H -e -l -l -o -! - -H -o -w - -c -a -n - -I - -a -s -s -i -s -t - -y -o -u - -t -o -d -a -y -? +>>> print(resp) +"Hello! How can I assist you today?" ''') add_chinese_doc('OnlineEmbeddingModule', '''\ From 2cc19708853632e54bec3896066ac33a1c12c728 Mon Sep 17 00:00:00 2001 From: wangjian Date: Fri, 29 Nov 2024 19:51:01 +0800 Subject: [PATCH 6/8] modify onlineChatModule stream output --- lazyllm/docs/module.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/lazyllm/docs/module.py b/lazyllm/docs/module.py index 658dba80..92625350 100644 --- a/lazyllm/docs/module.py +++ b/lazyllm/docs/module.py @@ -743,11 +743,21 @@ add_example('OnlineChatModule', '''\ >>> import lazyllm +>>> from functools import partial >>> m = lazyllm.OnlineChatModule(source="sensenova", stream=True) >>> query = "Hello!" ->>> resp = m(query) ->>> print(resp) -"Hello! How can I assist you today?" +>>> with lazyllm.ThreadPoolExecutor(1) as executor: +... future = executor.submit(partial(m, llm_chat_history=[]), query) +... while True: +... if value := lazyllm.FileSystemQueue().dequeue(): +... print(f"output: {''.join(value)}") +... elif future.done(): +... break +... print(f"ret: {future.result()}") +... +output: Hello +output: ! How can I assist you today? +ret: Hello! How can I assist you today? ''') add_chinese_doc('OnlineEmbeddingModule', '''\ From 1b252ae323fcb750ee57abfb3560480f7756be84 Mon Sep 17 00:00:00 2001 From: wangjian Date: Fri, 29 Nov 2024 20:09:17 +0800 Subject: [PATCH 7/8] delete comments --- lazyllm/docs/tools.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lazyllm/docs/tools.py b/lazyllm/docs/tools.py index 4ca28faf..e07c56d8 100644 --- a/lazyllm/docs/tools.py +++ b/lazyllm/docs/tools.py @@ -202,15 +202,15 @@ >>> doc2 = Document(dataset_path="your_files_path", create_ui=False) >>> doc1.add_reader("**/*.yml", YmlReader) >>> print(doc1._impl._local_file_reader) -# {'**/*.yml': } +{'**/*.yml': } >>> print(doc2._impl._local_file_reader) -# {} +{} >>> files = ["your_yml_files"] >>> Document.register_global_reader("**/*.yml", processYml) >>> doc1._impl._reader.load_data(input_files=files) -# Call the class YmlReader. +Call the class YmlReader. >>> doc2._impl._reader.load_data(input_files=files) -# Call the function processYml. +Call the function processYml. ''') add_english_doc('rag.readers.ReaderBase', ''' From dc16227be1d04759e20ea033d35c63aa253b38e0 Mon Sep 17 00:00:00 2001 From: wangjian Date: Fri, 29 Nov 2024 20:24:08 +0800 Subject: [PATCH 8/8] modify print --- lazyllm/docs/tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lazyllm/docs/tools.py b/lazyllm/docs/tools.py index e07c56d8..717e57fc 100644 --- a/lazyllm/docs/tools.py +++ b/lazyllm/docs/tools.py @@ -1337,7 +1337,7 @@ >>> agent = ReWOOAgent(llm, tools) >>> query = "What is the name of the cognac house that makes the main ingredient in The Hennchata?" >>> res = agent(query) ->>> print(f"{res!r}") +>>> print(res) '\nHennessy ' """, )