Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Training-Service for local sft and unify sft for local and online #348

Merged
merged 18 commits into from
Nov 22, 2024
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 14 additions & 1 deletion lazyllm/cli/run.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import sys
import argparse
import json

import lazyllm
from lazyllm.engine.lightengine import LightEngine
from lazyllm.tools.train_service.serve import TrainServer

# lazyllm run xx.json / xx.dsl / xx.lazyml
# lazyllm run chatbot --model=xx --framework=xx --source=xx
Expand Down Expand Up @@ -47,9 +50,17 @@ def graph(json_file):
res = engine.run(eid, query)
print(f'answer: {res}')

def training_service():
train_server = TrainServer()
local_server = lazyllm.ServerModule(train_server, launcher=lazyllm.launcher.EmptyLauncher(sync=False))
local_server.start()
local_server()
local_server.wait()

def run(commands):
if not commands:
print('Usage:\n lazyllm run graph.json\n lazyllm run chatbot\n lazyllm run rag\n')
print('Usage:\n lazyllm run graph.json\n lazyllm run chatbot\n '
'lazyllm run rag\n lazyllm run training_service\n')

parser = argparse.ArgumentParser(description='lazyllm deploy command')
parser.add_argument('command', type=str, help='command')
Expand All @@ -75,6 +86,8 @@ def run(commands):
rag(llm, args.documents)
elif args.command.endswith('.json'):
graph(args.command)
elif args.command == 'training_service':
training_service()
else:
print('lazyllm run is not ready yet.')
sys.exit(0)
2 changes: 1 addition & 1 deletion lazyllm/components/finetune/alpacalora.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def __init__(self,
**kw
):
if not merge_path:
save_path = os.path.join(os.getcwd(), target_path)
save_path = os.path.join(lazyllm.config['train_target_root'], target_path)
target_path, merge_path = os.path.join(save_path, "lazyllm_lora"), os.path.join(save_path, "lazyllm_merge")
os.system(f'mkdir -p {target_path} {merge_path}')
super().__init__(
Expand Down
2 changes: 1 addition & 1 deletion lazyllm/components/finetune/collie.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def __init__(self,
**kw
):
if not merge_path:
save_path = os.path.join(os.getcwd(), target_path)
save_path = os.path.join(lazyllm.config['train_target_root'], target_path)
target_path, merge_path = os.path.join(save_path, "lazyllm_lora"), os.path.join(save_path, "lazyllm_merge")
os.system(f'mkdir -p {target_path} {merge_path}')
super().__init__(
Expand Down
13 changes: 10 additions & 3 deletions lazyllm/components/finetune/llamafactory.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
import yaml
import json
import tempfile
import random
from datetime import datetime

import lazyllm
from lazyllm import launchers, ArgsDict, thirdparty, CaseInsensitiveDict
Expand Down Expand Up @@ -30,7 +32,7 @@ def __init__(self,
if os.path.exists(defatult_path):
base_model = defatult_path
if not merge_path:
save_path = os.path.join(os.getcwd(), target_path)
save_path = os.path.join(lazyllm.config['train_target_root'], target_path)
target_path, merge_path = os.path.join(save_path, "lazyllm_lora"), os.path.join(save_path, "lazyllm_merge")
os.system(f'mkdir -p {target_path} {merge_path}')
super().__init__(
Expand Down Expand Up @@ -73,9 +75,10 @@ def __init__(self,
self.export_dict['export_dir'] = merge_path
self.export_dict['template'] = self.template_dict['template']

self.temp_folder = os.path.join(os.getcwd(), '.temp')
self.temp_folder = os.path.join(lazyllm.config['temp_dir'], 'llamafactory_config')
if not os.path.exists(self.temp_folder):
os.makedirs(self.temp_folder)
self.log_file_path = None

def get_template_name(self, base_model):
try:
Expand Down Expand Up @@ -144,8 +147,12 @@ def cmd(self, trainset, valset=None) -> str:
updated_template_str = yaml.dump(dict(self.template_dict), default_flow_style=False)
self.temp_yaml_file = self.build_temp_yaml(updated_template_str)

formatted_date = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
random_value = random.randint(1000, 9999)
self.log_file_path = f'{self.target_path}/train_log_{formatted_date}_{random_value}.log'

cmds = f'llamafactory-cli train {self.temp_yaml_file}'
cmds += f' 2>&1 | tee {self.target_path}/llm_$(date +"%Y-%m-%d_%H-%M-%S").log'
cmds += f' 2>&1 | tee {self.log_file_path}'
if self.temp_export_yaml_file:
cmds += f' && llamafactory-cli export {self.temp_export_yaml_file}'
return cmds
2 changes: 1 addition & 1 deletion lazyllm/components/stable_diffusion/stable_diffusion3.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def __init__(self, base_sd, source=None, embed_batch_size=30, trust_remote_code=
self.trust_remote_code = trust_remote_code
self.sd = None
self.init_flag = lazyllm.once_flag()
self.save_path = save_path if save_path else os.path.join(os.getcwd(), '.temp/sd3')
self.save_path = save_path or os.path.join(lazyllm.config['temp_dir'], 'sd3')
if init:
lazyllm.call_once(self.init_flag, self.load_sd)

Expand Down
2 changes: 1 addition & 1 deletion lazyllm/components/text_to_speech/bark.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def __init__(self, base_path, source=None, trust_remote_code=True, save_path=Non
self.processor, self.bark = None, None
self.init_flag = lazyllm.once_flag()
self.device = 'cpu'
self.save_path = save_path if save_path else os.path.join(os.getcwd(), '.temp/bark')
self.save_path = save_path or os.path.join(lazyllm.config['temp_dir'], 'bark')
if init:
lazyllm.call_once(self.init_flag, self.load_bark)

Expand Down
2 changes: 1 addition & 1 deletion lazyllm/components/text_to_speech/chattts.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def __init__(self, base_path, source=None, save_path=None, init=False):
self.init_flag = lazyllm.once_flag()
self.device = 'cpu'
self.seed = 1024
self.save_path = save_path if save_path else os.path.join(os.getcwd(), '.temp/chattts')
self.save_path = save_path or os.path.join(lazyllm.config['temp_dir'], 'chattts')
if init:
lazyllm.call_once(self.init_flag, self.load_tts)

Expand Down
2 changes: 1 addition & 1 deletion lazyllm/components/text_to_speech/musicgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def __init__(self, base_path, source=None, save_path=None, init=False):
self.base_path = ModelManager(source).download(base_path)
self.model = None
self.init_flag = lazyllm.once_flag()
self.save_path = save_path if save_path else os.path.join(os.getcwd(), '.temp/musicgen')
self.save_path = save_path or os.path.join(lazyllm.config['temp_dir'], 'musicgen')
if init:
lazyllm.call_once(self.init_flag, self.load_tts)

Expand Down
2 changes: 2 additions & 0 deletions lazyllm/configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,4 +91,6 @@ def refresh(self, targets: Union[str, List[str]] = None) -> None:
).add('repr_ml', bool, False, 'REPR_USE_ML'
).add('rag_store', str, 'none', 'RAG_STORE'
).add('gpu_type', str, 'A100', 'GPU_TYPE'
).add('train_target_root', str, os.path.join(os.getcwd(), 'save_ckpt'), 'TRAIN_TARGET_ROOT'
).add('temp_dir', str, os.path.join(os.getcwd(), '.temp'), 'TEMP_DIR'
)
Loading
Loading