From 93f9760de9dbbc389889942b6ba1f148c03aa9ff Mon Sep 17 00:00:00 2001 From: charlieyl Date: Fri, 20 Dec 2024 16:39:45 +0800 Subject: [PATCH] =?UTF-8?q?[update]Upgrade=20official=20website=20address:?= =?UTF-8?q?=20https://tensoropera.ai=20,=20an=20the=20brand:=20TensorOpera?= =?UTF-8?q?=20=C2=AE=20=20AI?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- python/README.md | 4 +- python/examples/README.md | 14 +++---- .../examples/deploy/complex_example/README.md | 2 +- .../deploy/complex_example/config.yaml | 2 +- python/examples/deploy/mnist/README.md | 4 +- python/examples/deploy/mnist/mnist.yaml | 2 +- .../examples/deploy/multi_service/README.md | 6 +-- python/examples/deploy/quick_start/README.md | 2 +- .../deploy/scalellm-multi-engine/README.md | 2 +- python/examples/deploy/scalellm/README.md | 2 +- .../deploy/streaming_response/README.md | 2 +- .../deploy/streaming_response/config.yaml | 2 +- python/examples/deploy/triton/README.md | 6 +-- python/examples/deploy/your_own_llm/README.md | 4 +- python/examples/deploy/your_own_llm/llm.yaml | 2 +- .../grpc_docker_fedmlai/README.md | 4 +- python/examples/launch/README.md | 4 +- .../launch/federate_build_package/README.md | 2 +- .../launch/train_build_package/README.md | 2 +- python/examples/train/README.md | 2 +- python/examples/train/llm_train/job.yaml | 2 +- python/fedml/api/constants.py | 2 +- python/fedml/api/modules/build.py | 4 +- python/fedml/api/modules/device.py | 6 +-- python/fedml/api/modules/model.py | 4 +- python/fedml/api/modules/utils.py | 2 +- python/fedml/cli/README.md | 20 +++++----- python/fedml/cli/modules/build.py | 4 +- python/fedml/cli/modules/cluster.py | 8 ++-- python/fedml/cli/modules/device.py | 18 ++++----- python/fedml/cli/modules/federate.py | 4 +- python/fedml/cli/modules/launch.py | 4 +- python/fedml/cli/modules/login.py | 12 +++--- python/fedml/cli/modules/logout.py | 4 +- python/fedml/cli/modules/run.py | 40 +++++++++---------- python/fedml/cli/modules/storage.py | 16 ++++---- python/fedml/cli/modules/train.py | 4 +- .../computing/scheduler/env/collect_env.py | 2 +- .../scheduler_core/account_manager.py | 2 +- .../scheduler/scheduler_entry/README.md | 4 +- .../scheduler/scheduler_entry/app_manager.py | 4 +- python/fedml/core/mlops/__init__.py | 8 ++-- .../serving/templates/hf_template/config.yaml | 2 +- .../customized_job_example/README.md | 4 +- python/spotlight_prj/unitedllm/README.md | 2 +- 45 files changed, 126 insertions(+), 126 deletions(-) diff --git a/python/README.md b/python/README.md index ad85023e6d..1e30f2b167 100644 --- a/python/README.md +++ b/python/README.md @@ -43,5 +43,5 @@ Other low-level APIs related to security and privacy are also supported. All alg **utils**: Common utilities shared by other modules. -## About FedML, Inc. -https://FedML.ai +## About TensorOpera, Inc. +https://tensoropera.ai diff --git a/python/examples/README.md b/python/examples/README.md index 45086c27cf..32831a63f3 100644 --- a/python/examples/README.md +++ b/python/examples/README.md @@ -2,14 +2,14 @@ # FEDML Examples (Including Prebuilt Jobs in Jobs Store) - `FedML/python/examples` -- examples for training, deployment, and federated learning - - `FedML/python/examples/launch` -- examples for FEDML®Launch - - `FedML/python/examples/serving` -- examples for FEDML®Deploy - - `FedML/python/examples/train` -- examples for FEDML®Train - - `FedML/python/examples/cross_cloud` -- examples for FEDML®Train cross-cloud distributed training + - `FedML/python/examples/launch` -- examples for TensorOpera®Launch + - `FedML/python/examples/serving` -- examples for TensorOpera®Deploy + - `FedML/python/examples/train` -- examples for TensorOpera®Train + - `FedML/python/examples/cross_cloud` -- examples for TensorOpera®Train cross-cloud distributed training - `FedML/python/examples/federate/prebuilt_jobs` -- examples for federated learning prebuilt jobs (FedCV, FedNLP, FedGraphNN, Healthcare, etc.) - `FedML/python/examples/federate/cross_silo` -- examples for cross-silo federated learning - `FedML/python/examples/federate/cross_device` -- examples for cross-device federated learning - `FedML/python/examples/federate/simulation` -- examples for federated learning simulation - - `FedML/python/examples/federate/security` -- examples for FEDML®Federate security related features - - `FedML/python/examples/federate/privacy` -- examples for FEDML®Federate privacy related features - - `FedML/python/examples/federate/federated_analytics` -- examples for FEDML®Federate federated analytics (FA) + - `FedML/python/examples/federate/security` -- examples for TensorOpera®Federate security related features + - `FedML/python/examples/federate/privacy` -- examples for TensorOpera®Federate privacy related features + - `FedML/python/examples/federate/federated_analytics` -- examples for TensorOpera®Federate federated analytics (FA) diff --git a/python/examples/deploy/complex_example/README.md b/python/examples/deploy/complex_example/README.md index 1f67f587fd..b7a03aeea6 100644 --- a/python/examples/deploy/complex_example/README.md +++ b/python/examples/deploy/complex_example/README.md @@ -16,7 +16,7 @@ Use -cf to indicate the configuration file. curl -XPOST localhost:2345/predict -d '{"text": "Hello"}' ``` -## Option 2: Deploy to the Cloud (Using fedml®launch platform) +## Option 2: Deploy to the Cloud (Using TensorOpera®launch platform) - Uncomment the following line in config.yaml For information about the configuration, please refer to fedml ® launch. diff --git a/python/examples/deploy/complex_example/config.yaml b/python/examples/deploy/complex_example/config.yaml index 037183a066..cd658aae33 100644 --- a/python/examples/deploy/complex_example/config.yaml +++ b/python/examples/deploy/complex_example/config.yaml @@ -15,7 +15,7 @@ environment_variables: LOCAL_RANK: "0" # If you do not have any GPU resource but want to serve the model -# Try FedML® Nexus AI Platform, and Uncomment the following lines. +# Try TensorOpera® Nexus AI Platform, and Uncomment the following lines. # ------------------------------------------------------------ computing: minimum_num_gpus: 1 # minimum # of GPUs to provision diff --git a/python/examples/deploy/mnist/README.md b/python/examples/deploy/mnist/README.md index 11dd696234..b64b4bd70e 100644 --- a/python/examples/deploy/mnist/README.md +++ b/python/examples/deploy/mnist/README.md @@ -11,9 +11,9 @@ curl -XPOST localhost:2345/predict -d '{"arr":[$DATA]}' #For $DATA, please check the request_input_example, it is a 28*28=784 float array #Output:{"generated_text":"tensor([0.2333, 0.5296, 0.4350, 0.4537, 0.5424, 0.4583, 0.4803, 0.2862, 0.5507,\n 0.8683], grad_fn=)"} ``` -## Option 2: Deploy to the Cloud (Using fedml® launch platform) +## Option 2: Deploy to the Cloud (Using TensorOpera® launch platform) Uncomment the following line in mnist.yaml, -for infomation about the configuration, please refer to fedml® launch. +for infomation about the configuration, please refer to TensorOpera® launch. ```yaml # computing: # minimum_num_gpus: 1 diff --git a/python/examples/deploy/mnist/mnist.yaml b/python/examples/deploy/mnist/mnist.yaml index fe419abb1c..cae8050674 100644 --- a/python/examples/deploy/mnist/mnist.yaml +++ b/python/examples/deploy/mnist/mnist.yaml @@ -5,7 +5,7 @@ data_cache_dir: "" bootstrap: "" # If you do not have any GPU resource but want to serve the model -# Try FedML® Nexus AI Platform, and Uncomment the following lines. +# Try TensorOpera® Nexus AI Platform, and Uncomment the following lines. # ------------------------------------------------------------ computing: minimum_num_gpus: 1 # minimum # of GPUs to provision diff --git a/python/examples/deploy/multi_service/README.md b/python/examples/deploy/multi_service/README.md index 2b897d087a..59bd7429f3 100644 --- a/python/examples/deploy/multi_service/README.md +++ b/python/examples/deploy/multi_service/README.md @@ -15,7 +15,7 @@ fedml model create --name $model_name --config_file config.yaml ``` ## On-premsie Deploy -Register an account on FedML website: https://fedml.ai +Register an account on TensorOpera website: https://tensoropera.ai You will have a user id and api key, which can be found in the profile page. @@ -44,8 +44,8 @@ You will have a user id and api key, which can be found in the profile page. ``` - Result - See the deployment result in https://fedml.ai + See the deployment result in https://tensoropera.ai - OPT2: Deploy - UI - Follow the instructions on https://fedml.ai + Follow the instructions on https://tensoropera.ai diff --git a/python/examples/deploy/quick_start/README.md b/python/examples/deploy/quick_start/README.md index 1f67f587fd..b7a03aeea6 100644 --- a/python/examples/deploy/quick_start/README.md +++ b/python/examples/deploy/quick_start/README.md @@ -16,7 +16,7 @@ Use -cf to indicate the configuration file. curl -XPOST localhost:2345/predict -d '{"text": "Hello"}' ``` -## Option 2: Deploy to the Cloud (Using fedml®launch platform) +## Option 2: Deploy to the Cloud (Using TensorOpera®launch platform) - Uncomment the following line in config.yaml For information about the configuration, please refer to fedml ® launch. diff --git a/python/examples/deploy/scalellm-multi-engine/README.md b/python/examples/deploy/scalellm-multi-engine/README.md index 4de6058c95..b65ad7dd5c 100644 --- a/python/examples/deploy/scalellm-multi-engine/README.md +++ b/python/examples/deploy/scalellm-multi-engine/README.md @@ -40,7 +40,7 @@ computing: #device_type: CPU # options: GPU, CPU, hybrid resource_type: A100-80G # e.g., A100-80G, # please check the resource type list by "fedml show-resource-type" - # or visiting URL: https://fedml.ai/accelerator_resource_type + # or visiting URL: https://tensoropera.ai/accelerator_resource_type ``` ```bash diff --git a/python/examples/deploy/scalellm/README.md b/python/examples/deploy/scalellm/README.md index 4de6058c95..b65ad7dd5c 100644 --- a/python/examples/deploy/scalellm/README.md +++ b/python/examples/deploy/scalellm/README.md @@ -40,7 +40,7 @@ computing: #device_type: CPU # options: GPU, CPU, hybrid resource_type: A100-80G # e.g., A100-80G, # please check the resource type list by "fedml show-resource-type" - # or visiting URL: https://fedml.ai/accelerator_resource_type + # or visiting URL: https://tensoropera.ai/accelerator_resource_type ``` ```bash diff --git a/python/examples/deploy/streaming_response/README.md b/python/examples/deploy/streaming_response/README.md index f91cda5278..b190b50dc7 100644 --- a/python/examples/deploy/streaming_response/README.md +++ b/python/examples/deploy/streaming_response/README.md @@ -16,7 +16,7 @@ Use -cf to indicate the configuration file. curl -XPOST localhost:2345/predict -d '{"text": "Hello"}' ``` -## Option 2: Deploy to the Cloud (Using fedml®launch platform) +## Option 2: Deploy to the Cloud (Using TensorOpera®launch platform) - Uncomment the following line in config.yaml For information about the configuration, please refer to fedml ® launch. diff --git a/python/examples/deploy/streaming_response/config.yaml b/python/examples/deploy/streaming_response/config.yaml index 83479068e6..1a18b9d85b 100644 --- a/python/examples/deploy/streaming_response/config.yaml +++ b/python/examples/deploy/streaming_response/config.yaml @@ -8,7 +8,7 @@ bootstrap: | echo "Bootstrap finished" # If you do not have any GPU resource but want to serve the model -# Try FedML® Nexus AI Platform, and Uncomment the following lines. +# Try TensorOpera® Nexus AI Platform, and Uncomment the following lines. # ------------------------------------------------------------ computing: minimum_num_gpus: 1 # minimum # of GPUs to provision diff --git a/python/examples/deploy/triton/README.md b/python/examples/deploy/triton/README.md index 4d861fb7ff..5430939d28 100644 --- a/python/examples/deploy/triton/README.md +++ b/python/examples/deploy/triton/README.md @@ -39,7 +39,7 @@ fedml model create --name $model_name --config_file config.yaml ``` ## On-premsie Deploy -Register an account on FedML website: https://fedml.ai +Register an account on TensorOpera website: https://tensoropera.ai You will have a user id and api key, which can be found in the profile page. @@ -68,8 +68,8 @@ You will have a user id and api key, which can be found in the profile page. ``` - Result - See the deployment result in https://fedml.ai + See the deployment result in https://tensoropera.ai - OPT2: Deploy - UI - Follow the instructions on https://fedml.ai + Follow the instructions on https://tensoropera.ai diff --git a/python/examples/deploy/your_own_llm/README.md b/python/examples/deploy/your_own_llm/README.md index fc7234293b..415db7fe92 100644 --- a/python/examples/deploy/your_own_llm/README.md +++ b/python/examples/deploy/your_own_llm/README.md @@ -9,9 +9,9 @@ fedml model deploy --name llm --local #INFO: Uvicorn running on http://0.0.0.0:2345 (Press CTRL+C to quit) curl -XPOST localhost:2345/predict -d '{"text": "Hello"}' ``` -## Option 2: Deploy to the Cloud (Using fedml®launch platform) +## Option 2: Deploy to the Cloud (Using TensorOpera®launch platform) Uncomment the following line in llm.yaml, -for infomation about the configuration, please refer to fedml®launch. +for infomation about the configuration, please refer to TensorOpera®launch. ```yaml # computing: # minimum_num_gpus: 1 diff --git a/python/examples/deploy/your_own_llm/llm.yaml b/python/examples/deploy/your_own_llm/llm.yaml index 5e5e09730b..b3b3d5da15 100644 --- a/python/examples/deploy/your_own_llm/llm.yaml +++ b/python/examples/deploy/your_own_llm/llm.yaml @@ -11,7 +11,7 @@ bootstrap: | echo "Bootstrap finished" # If you do not have any GPU resource but want to serve the model -# Try FedML® Nexus AI Platform, and Uncomment the following lines. +# Try TensorOpera® Nexus AI Platform, and Uncomment the following lines. # ------------------------------------------------------------ # computing: # minimum_num_gpus: 1 # minimum # of GPUs to provision diff --git a/python/examples/federate/cross_silo/grpc_fedavg_mnist_lr_example/grpc_docker_fedmlai/README.md b/python/examples/federate/cross_silo/grpc_fedavg_mnist_lr_example/grpc_docker_fedmlai/README.md index 174309aa55..8c56622d06 100644 --- a/python/examples/federate/cross_silo/grpc_fedavg_mnist_lr_example/grpc_docker_fedmlai/README.md +++ b/python/examples/federate/cross_silo/grpc_fedavg_mnist_lr_example/grpc_docker_fedmlai/README.md @@ -1,6 +1,6 @@ # Introduction -In this working example, we will run 1 aggregation server and 2 clients on the same machine using Docker + gRPC and we will use the FEDML.ai platform to run the FL job. +In this working example, we will run 1 aggregation server and 2 clients on the same machine using Docker + gRPC and we will use the TensorOpera.ai platform to run the FL job. # gRPC Configuration File The content of the gRPC configuration file is as follows: @@ -47,5 +47,5 @@ source /fedml/bin/activate fedml login -c ``` -Then we only need to compile our job and submit to our dockerb-based cluster as it is also discussed in detail in the official FEDML documentation: https://fedml.ai/octopus/userGuides +Then we only need to compile our job and submit to our dockerb-based cluster as it is also discussed in detail in the official TensorOpera documentation: https://tensoropera.ai/octopus/userGuides diff --git a/python/examples/launch/README.md b/python/examples/launch/README.md index 1ded267276..fc79cbfe26 100644 --- a/python/examples/launch/README.md +++ b/python/examples/launch/README.md @@ -132,7 +132,7 @@ You just need to customize the following config items. 3. `bootstrap`, It is the bootstrap shell command which will be executed before running entry commands. -Then you can use the following example CLI to launch the job at FedML® Nexus AI Platform +Then you can use the following example CLI to launch the job at TensorOpera® Nexus AI Platform (Replace $YourApiKey with your own account API key from open.fedml.ai) Example: @@ -142,7 +142,7 @@ fedml launch hello_job.yaml After the launch CLI is executed, the output is as follows. Here you may open the job url to confirm and actually start the job. ``` -Submitting your job to FedML® Nexus AI Platform: 100%|████████████████████████████████████████████████████████████████████████████████████████| 6.07k/6.07k [00:01<00:00, 4.94kB/s] +Submitting your job to TensorOpera® Nexus AI Platform: 100%|████████████████████████████████████████████████████████████████████████████████████████| 6.07k/6.07k [00:01<00:00, 4.94kB/s] Searched and matched the following GPU resource for your job: +-----------+-------------------+---------+------------+-------------------------+---------+-------+----------+ diff --git a/python/examples/launch/federate_build_package/README.md b/python/examples/launch/federate_build_package/README.md index c0d3356150..325258407e 100644 --- a/python/examples/launch/federate_build_package/README.md +++ b/python/examples/launch/federate_build_package/README.md @@ -3,7 +3,7 @@ ``` Usage: fedml federate build [OPTIONS] [YAML_FILE] - Build federate packages for the FedML® Nexus AI Platform. + Build federate packages for the TensorOpera® Nexus AI Platform. Options: -h, --help Show this message and exit. diff --git a/python/examples/launch/train_build_package/README.md b/python/examples/launch/train_build_package/README.md index 03c8dbe71b..f0f1dff857 100644 --- a/python/examples/launch/train_build_package/README.md +++ b/python/examples/launch/train_build_package/README.md @@ -3,7 +3,7 @@ ``` Usage: fedml train build [OPTIONS] [YAML_FILE] - Build training packages for the FedML® Nexus AI Platform. + Build training packages for the TensorOpera® Nexus AI Platform. Options: -h, --help Show this message and exit. diff --git a/python/examples/train/README.md b/python/examples/train/README.md index 9a6853d740..0e301c86b2 100644 --- a/python/examples/train/README.md +++ b/python/examples/train/README.md @@ -1 +1 @@ -# Examples (Prebuilt Jobs) for FEDML®Train \ No newline at end of file +# Examples (Prebuilt Jobs) for TensorOpera®Train \ No newline at end of file diff --git a/python/examples/train/llm_train/job.yaml b/python/examples/train/llm_train/job.yaml index d1ba08ed4c..a9e81c91f7 100644 --- a/python/examples/train/llm_train/job.yaml +++ b/python/examples/train/llm_train/job.yaml @@ -44,4 +44,4 @@ computing: allow_cross_cloud_resources: false # true, false device_type: GPU # options: GPU, CPU, hybrid - resource_type: A100-80G # e.g., A100-80G, please check the resource type list by "fedml show-resource-type" or visiting URL: https://fedml.ai/accelerator_resource_type + resource_type: A100-80G # e.g., A100-80G, please check the resource type list by "fedml show-resource-type" or visiting URL: https://tensoropera.ai/accelerator_resource_type diff --git a/python/fedml/api/constants.py b/python/fedml/api/constants.py index b284d7a056..313da61798 100755 --- a/python/fedml/api/constants.py +++ b/python/fedml/api/constants.py @@ -18,7 +18,7 @@ class ApiConstants: RESOURCE_MATCHED_STATUS_BIND_CREDIT_CARD_FIRST = \ """ - Before we can start a job, please add a credit card to your FEDML account at https://fedml.ai/billing/home. + Before we can start a job, please add a credit card to your FEDML account at https://tensoropera.ai/billing. Once it's added, please try to run the launch command again """ diff --git a/python/fedml/api/modules/build.py b/python/fedml/api/modules/build.py index 7d23bc02ed..9299944bb0 100644 --- a/python/fedml/api/modules/build.py +++ b/python/fedml/api/modules/build.py @@ -22,7 +22,7 @@ def build(platform, type, source_folder, entry_point, config_folder, dest_folder if type == "client" or type == "server": click.echo( - "Now, you are building the fedml packages which will be used in the FedML® Nexus AI Platform " + "Now, you are building the fedml packages which will be used in the TensorOpera® Nexus AI Platform " "platform." ) click.echo( @@ -34,7 +34,7 @@ def build(platform, type, source_folder, entry_point, config_folder, dest_folder + "." ) click.echo( - "Then you may upload the packages on the configuration page in the FedML® Nexus AI Platform to " + "Then you may upload the packages on the configuration page in the TensorOpera® Nexus AI Platform to " "start your training flow." ) click.echo("Building...") diff --git a/python/fedml/api/modules/device.py b/python/fedml/api/modules/device.py index 27b2d0d198..1b578aa903 100644 --- a/python/fedml/api/modules/device.py +++ b/python/fedml/api/modules/device.py @@ -78,7 +78,7 @@ def _bind( else: docker_install_url = "https://docs.docker.com/engine/install/" docker_config_text = " Moreover, you need to config the docker engine to run as a non-root user. Here is the docs. https://docs.docker.com/engine/install/linux-postinstall/" - print("\n Welcome to FedML.ai! \n Start to login the current device to the FedML® Nexus AI Platform\n") + print("\n Welcome toTensorOpera.ai! \n Start to login the current device to the TensorOpera® Nexus AI Platform\n") print(" If you want to deploy models into this computer, you need to install the docker engine to serve your models.") print(f" Here is the docs for installation docker engine. {docker_install_url}") if docker_config_text is not None: @@ -137,7 +137,7 @@ def _bind( client_daemon_cmd = "client_daemon.py" client_daemon_pids = RunProcessUtils.get_pid_from_cmd_line(client_daemon_cmd) if client_daemon_pids is not None and len(client_daemon_pids) > 0: - print("Your computer has been logged into the FedML® Nexus AI Platform. " + print("Your computer has been logged into the TensorOpera® Nexus AI Platform. " "Before logging in again, please log out of the previous login using the command " "'fedml logout -c'. If it still doesn't work, run the command 'fedml logout -c' " "using your computer's administrator account.") @@ -193,7 +193,7 @@ def _bind( server_daemon_cmd = "server_daemon.py" server_daemon_pids = RunProcessUtils.get_pid_from_cmd_line(server_daemon_cmd) if server_daemon_pids is not None and len(server_daemon_pids) > 0: - print("Your computer has been logged into the FedML® Nexus AI Platform. " + print("Your computer has been logged into the TensorOpera® Nexus AI Platform. " "Before logging in again, please log out of the previous login using the command " "'fedml logout -s'. If it still doesn't work, run the command 'fedml logout -s' " "using your computer's administrator account.") diff --git a/python/fedml/api/modules/model.py b/python/fedml/api/modules/model.py index a02e674f47..3b4a7afd0b 100644 --- a/python/fedml/api/modules/model.py +++ b/python/fedml/api/modules/model.py @@ -252,9 +252,9 @@ def deploy(name: str, endpoint_name: str = "", endpoint_id: str = None, local: b return FedMLModelCards.get_instance().serve_model_on_premise( name, endpoint_name, master_ids, worker_ids, use_remote, endpoint_id) else: - # FedML® Launch deploy mode + # TensorOpera® Launch deploy mode click.echo("Warning: You did not indicate the master device id and worker device id\n\ - Do you want to use FedML® Nexus AI Platform to find GPU Resources deploy your model?") + Do you want to use TensorOpera® Nexus AI Platform to find GPU Resources deploy your model?") answer = click.prompt("Please input your answer: (y/n)") if answer == "y" or answer == "Y": api_key = get_api_key() diff --git a/python/fedml/api/modules/utils.py b/python/fedml/api/modules/utils.py index 76801ffe81..abbea71f9f 100644 --- a/python/fedml/api/modules/utils.py +++ b/python/fedml/api/modules/utils.py @@ -21,7 +21,7 @@ def _check_api_key(api_key=None): if api_key is None or api_key == "": saved_api_key = get_api_key() if saved_api_key is None or saved_api_key == "": - api_key = click.prompt("FedML® Launch API Key is not set yet, please input your API key") + api_key = click.prompt("TensorOpera® Launch API Key is not set yet, please input your API key") else: api_key = saved_api_key diff --git a/python/fedml/cli/README.md b/python/fedml/cli/README.md index f94200f258..425bf0c5de 100644 --- a/python/fedml/cli/README.md +++ b/python/fedml/cli/README.md @@ -27,7 +27,7 @@ fedml build \ --ignore __pycache__,*.git ``` -## 2. Login into the FedML® Nexus AI Platform (fedml.ai) +## 2. Login into the TensorOpera® Nexus AI Platform (fedml.ai) login as general computing device with local pip mode: ``` fedml login $YourApiKey @@ -38,7 +38,7 @@ login as federated-learning server with local pip mode: fedml login $YourApiKey -s ``` -### 2.1. Examples for Logining into the FedML® Nexus AI Platform (fedml.ai) +### 2.1. Examples for Logining into the TensorOpera® Nexus AI Platform (fedml.ai) ``` fedml login 113343dad999933 @@ -48,7 +48,7 @@ fedml login 113343dad999933 fedml login 113343dad999933 -s ``` -## 3. Logout from the FedML FedML® Nexus AI Platform (fedml.ai) +## 3. Logout from the FedML TensorOpera® Nexus AI Platform (fedml.ai) logout from computing device with local pip mode: ``` fedml logout @@ -81,17 +81,17 @@ fedml diagnosis --open --s3 --mqtt ``` ## 7. Jobs -Start a job at FedML® Nexus AI Platform +Start a job at TensorOpera® Nexus AI Platform ``` Usage: fedml jobs start [OPTIONS] -Start a job at FedML® Nexus AI Platform +Start a job at TensorOpera® Nexus AI Platform Options: -pf, --platform TEXT The platform name at the MLOps platform(options: octopus, parrot, spider, beehive). --prj, --project_name TEXT The project name at FedML® Nexus AI Platform --app, --application_name TEXT Application name in the My Application list at FedML® Nexus AI Platform --jn, --job_name TEXT The job name at FedML® Nexus AI Platform If you don't specify here, the job name from the job yaml file will be used. +-prj, --project_name TEXT The project name at TensorOpera® Nexus AI Platform +-app, --application_name TEXT Application name in the My Application list at TensorOpera® Nexus AI Platform +-jn, --job_name TEXT The job name at TensorOpera® Nexus AI Platform If you don't specify here, the job name from the job yaml file will be used. -ds, --devices_server TEXT The server to run the launching job, for the launch platform, we do not need to set this option. -de, --devices_edges TEXT The edge devices to run the launching job. Separated with ',', e.g. 705,704. For the launch platform, we do not need to set this option. -u, --user TEXT user id or api key. @@ -238,7 +238,7 @@ You just need to customize the following config items. 3. `bootstrap`, It is the bootstrap shell command which will be executed before running entry commands. -Then you can use the following example CLI to launch the job at FedML® Nexus AI Platform +Then you can use the following example CLI to launch the job at TensorOpera® Nexus AI Platform (Replace $YourApiKey with your own account API key from open.fedml.ai) Example: @@ -248,7 +248,7 @@ fedml launch hello_job.yaml After the launch CLI is executed, the output is as follows. Here you may open the job url to confirm and actually start the job. ``` -Submitting your job to FedML® Nexus AI Platform: 100%|████████████████████████████████████████████████████████████████████████████████████████| 6.07k/6.07k [00:01<00:00, 4.94kB/s] +Submitting your job to TensorOpera® Nexus AI Platform: 100%|████████████████████████████████████████████████████████████████████████████████████████| 6.07k/6.07k [00:01<00:00, 4.94kB/s] Searched and matched the following GPU resource for your job: +-----------+-------------------+---------+------------+-------------------------+---------+-------+----------+ diff --git a/python/fedml/cli/modules/build.py b/python/fedml/cli/modules/build.py index 4674a88e9e..2fd68492fd 100644 --- a/python/fedml/cli/modules/build.py +++ b/python/fedml/cli/modules/build.py @@ -3,14 +3,14 @@ import fedml.api -@click.command("build", help="Build packages for the FedML® Nexus AI Platform") +@click.command("build", help="Build packages for the TensorOpera® AI Platform") @click.help_option("--help", "-h") @click.option( "--platform", "-pf", type=str, default="octopus", - help="The platform name at the FedML® Nexus AI Platform (options: octopus, parrot, spider, beehive, falcon, launch).", + help="The platform name at the TensorOpera® AI Platform (options: octopus, parrot, spider, beehive, falcon, launch).", ) @click.option( "--type", diff --git a/python/fedml/cli/modules/cluster.py b/python/fedml/cli/modules/cluster.py index 95822e1c18..47617b1f12 100644 --- a/python/fedml/cli/modules/cluster.py +++ b/python/fedml/cli/modules/cluster.py @@ -7,10 +7,10 @@ # Message strings constants confirmation_message: str = "Are you sure you want to {} these clusters?" failure_message: str = ("Failed to {} the clusters, please check the arguments are valid and your network " - "connection and make sure be able to access the FedML® Nexus AI Platform.") -version_help: str = "specify version of FedML® Nexus AI Platform. It should be dev, test or release" + "connection and make sure be able to access the TensorOpera® AI Platform.") +version_help: str = "specify version of TensorOpera® AI Platform. It should be dev, test or release" api_key_help: str = "user api key." -cluster_action_help: str = "{} clusters from FedML® Nexus AI Platform" +cluster_action_help: str = "{} clusters from TensorOpera® AI Platform" @click.group("cluster") @@ -27,7 +27,7 @@ ) def fedml_clusters(api_key, version): """ - Manage clusters on FedML® Nexus AI Platform + Manage clusters on TensorOpera® AI Platform """ pass diff --git a/python/fedml/cli/modules/device.py b/python/fedml/cli/modules/device.py index 5c4804fa69..b21b3d09d2 100644 --- a/python/fedml/cli/modules/device.py +++ b/python/fedml/cli/modules/device.py @@ -7,12 +7,12 @@ @click.help_option("--help", "-h") def fedml_device(): """ - Bind/unbind devices to the FedML® Nexus AI Platform + Bind/unbind devices to the TensorOpera® AI Platform """ pass -@fedml_device.command("bind", help="Bind to the FedML® Nexus AI Platform") +@fedml_device.command("bind", help="Bind to the TensorOpera® AI Platform") @click.help_option("--help", "-h") @click.argument("api_key", nargs=-1) @click.option( @@ -20,13 +20,13 @@ def fedml_device(): "-v", type=str, default="release", - help="Bind to which version of FedML® Nexus AI Platform. It should be dev, test or release.", + help="Bind to which version of TensorOpera® AI Platform. It should be dev, test or release.", ) @click.option( "--compute_node", "-c", default=None, is_flag=True, help="Bind as the general compute node in FEDML Nexus AI compute network. This is enabled by default. " - "After binding, you can view and manage the device in the FEDML® Nexus AI Platform: https://fedml.ai/compute. " - "It can be grouped as a cluster and then you can use FEDML®Launch to schedule any job (training, deployment, federated learning) to it. " + "After binding, you can view and manage the device in the TensorOpera® AI Platform: https://tensoropera.ai/gpu/local?label=Private. " + "It can be grouped as a cluster and then you can use TensorOpera®Launch to schedule any job (training, deployment, federated learning) to it. " "You can not specify the option -c and -s simultaneously.", ) @click.option( @@ -36,7 +36,7 @@ def fedml_device(): ) @click.option( "--provider", "-p", default=None, is_flag=True, - help="Bind as the FedML compute node (GPU) provider (supplier). This is used by Nexus AI Platform - Share and Earn: https://fedml.ai/gpu-supplier. You can share your GPUs in this way and earn money. " + help="Bind as the FedML compute node (GPU) provider (supplier). This is used by Nexus AI Platform - Share and Earn: https://tensoropera.ai/share-and-earn. You can share your GPUs in this way and earn money. " "You can specify the option -p and -c simultaneously (can be used as provider for others as well compute node for your own jobs), but you can not specify -p and -s simultaneously.", ) def fedml_device_bind(api_key, version, compute_node, server, provider): @@ -47,14 +47,14 @@ def fedml_device_bind(api_key, version, compute_node, server, provider): fedml.api.device_bind(api_key, compute_node, server, provider) -@fedml_device.command("unbind", help="Unbind from the FedML® Nexus AI Platform") +@fedml_device.command("unbind", help="Unbind from the TensorOpera® AI Platform") @click.help_option("--help", "-h") @click.option( "--version", "-v", type=str, default="release", - help="Unbind which backend environment version of FedML® Nexus AI Platform. It should be dev, test, or release.", + help="Unbind which backend environment version of TensorOpera® AI Platform. It should be dev, test, or release.", ) @click.option( "--compute_node", "-c", default=None, is_flag=True, help="Unbind from the FedML general compute node.", @@ -75,7 +75,7 @@ def fedml_device_unbind(version, computing, server): "-v", type=str, default="release", - help="show resource type at which version of FedML® Nexus AI Platform. It should be dev, test or release", + help="show resource type at which version of TensorOpera® AI Platform. It should be dev, test or release", ) def resource_type(version): fedml.set_env_version(version) diff --git a/python/fedml/cli/modules/federate.py b/python/fedml/cli/modules/federate.py index 6f26b2bea8..ff4fd6c791 100644 --- a/python/fedml/cli/modules/federate.py +++ b/python/fedml/cli/modules/federate.py @@ -7,12 +7,12 @@ @click.help_option("--help", "-h") def fedml_federate(): """ - Manage federated learning resources on FedML® Nexus AI Platform + Manage federated learning resources on TensorOpera® AI Platform """ pass -@fedml_federate.command("build", help="Build federate packages for the FedML® Nexus AI Platform.") +@fedml_federate.command("build", help="Build federate packages for the TensorOpera® AI Platform.") @click.help_option("--help", "-h") @click.option( "--dest_folder", diff --git a/python/fedml/cli/modules/launch.py b/python/fedml/cli/modules/launch.py index 16450e08a9..c14bbac353 100644 --- a/python/fedml/cli/modules/launch.py +++ b/python/fedml/cli/modules/launch.py @@ -13,7 +13,7 @@ from fedml.computing.scheduler.scheduler_entry.run_manager import FedMLRunStartedModel, FeatureEntryPoint -@click.command("launch", help="Launch job at the FedML® Nexus AI Platform") +@click.command("launch", help="Launch job at the TensorOpera® AI Platform") @click.help_option("--help", "-h") @click.option( "--api_key", "-k", type=str, help="user api key.", @@ -56,7 +56,7 @@ @click.argument("yaml_file", nargs=-1) def fedml_launch(yaml_file, cluster, version, api_key, group, local_on_premise_platform, local_on_premise_platform_port): """ - Manage resources on the FedML® Nexus AI Platform. + Manage resources on the TensorOpera® AI Platform. """ set_env_version(version) fedml.set_local_on_premise_platform_host(local_on_premise_platform) diff --git a/python/fedml/cli/modules/login.py b/python/fedml/cli/modules/login.py index b76346ec1b..5e77910cbb 100644 --- a/python/fedml/cli/modules/login.py +++ b/python/fedml/cli/modules/login.py @@ -10,7 +10,7 @@ from fedml.computing.scheduler.scheduler_core.general_constants import MarketplaceType -@click.command("login", help="Login the FedML® Nexus AI Platform") +@click.command("login", help="Login the TensorOpera® AI Platform") @click.help_option("--help", "-h") @click.argument("api_key", nargs=-1) @click.option( @@ -18,13 +18,13 @@ "-v", type=str, default="release", - help="Login which backend environment version of FedML® Nexus AI Platform. It should be dev, test, or release.", + help="Login which backend environment version of TensorOpera® AI Platform. It should be dev, test, or release.", ) @click.option( "--compute_node", "-c", default=None, is_flag=True, help="Login as the general compute node in FEDML Nexus AI compute network. This is enabled by default. " - "After login, you can view and manage the device in the FEDML® Nexus AI Platform: https://fedml.ai/compute. " - "It can be grouped as a cluster and then you can use FEDML®Launch to schedule any job (training, deployment, federated learning) to it. " + "After login, you can view and manage the device in the TensorOpera® AI Platform: https://tensoropera.ai/gpu/local?label=Private. " + "It can be grouped as a cluster and then you can use TensorOpera®Launch to schedule any job (training, deployment, federated learning) to it. " "You can not specify the option -c and -s simultaneously.", ) @click.option( @@ -34,7 +34,7 @@ ) @click.option( "--provider", "-p", default=None, is_flag=True, - help="Login as the FedML compute node (GPU) provider (supplier). This is used by Nexus AI Platform - Share and Earn: https://fedml.ai/gpu-supplier. You can share your GPUs in this way and earn money. " + help="Login as the FedML compute node (GPU) provider (supplier). This is used by Nexus AI Platform - Share and Earn: https://tensoropera.ai/share-and-earn. You can share your GPUs in this way and earn money. " "You can specify the option -p and -c simultaneously (can be used as provider for others as well compute node for your own jobs), but you can not specify -p and -s simultaneously.", ) @click.option( @@ -94,7 +94,7 @@ "for one hour is $1.5 per GPU, then you would input 1.5. Do not multiply this number by the total number of " "GPUs in the node, as the system will automatically detect the number of GPUs and include it in the cost " "calculation. Default is 0.0." - "Optionally, you can also set this price later through supplier page on the FEDML® Nexus AI Platform." + "Optionally, you can also set this price later through supplier page on the TensorOpera® AI Platform." ) @click.option( "--name", diff --git a/python/fedml/cli/modules/logout.py b/python/fedml/cli/modules/logout.py index 94a51b395a..ab2abfde95 100644 --- a/python/fedml/cli/modules/logout.py +++ b/python/fedml/cli/modules/logout.py @@ -3,7 +3,7 @@ import fedml.api -@click.command("logout", help="Logout from the FedML® Nexus AI Platform") +@click.command("logout", help="Logout from the TensorOpera® AI Platform") @click.help_option("--help", "-h") @click.option( "--computing", "-c", default=None, is_flag=True, help="Logout from the FedML general compute node.", @@ -16,7 +16,7 @@ "-v", type=str, default="release", - help="Logout which backend environment version of FedML® Nexus AI Platform. It should be dev, test, or release.", + help="Logout which backend environment version of TensorOpera® AI Platform. It should be dev, test, or release.", ) def fedml_logout(computing, server, version): fedml.set_env_version(version) diff --git a/python/fedml/cli/modules/run.py b/python/fedml/cli/modules/run.py index f2c24b445a..a2c479897b 100644 --- a/python/fedml/cli/modules/run.py +++ b/python/fedml/cli/modules/run.py @@ -15,24 +15,24 @@ "-v", type=str, default="release", - help="version of FedML® Nexus AI Platform. It should be dev, test or release", + help="version of TensorOpera® AI Platform. It should be dev, test or release", ) @click.option( "--platform", "-pf", type=str, default="falcon", - help="The platform name at the FedML® Nexus AI Platform (options: octopus, parrot, spider, beehive, falcon, launch," + help="The platform name at the TensorOpera® AI Platform (options: octopus, parrot, spider, beehive, falcon, launch," "default is falcon).", ) def fedml_run(api_key, version, platform): """ - Manage runs on the FedML® Nexus AI Platform. + Manage runs on the TensorOpera® AI Platform. """ pass -@fedml_run.command("stop", help="Stop a run from the FedML® Nexus AI Platform.") +@fedml_run.command("stop", help="Stop a run from the TensorOpera® AI Platform.") @click.help_option("--help", "-h") @click.option( "--run_id", @@ -49,14 +49,14 @@ def fedml_run(api_key, version, platform): "-v", type=str, default="release", - help="stop a run at which version of FedML® Nexus AI Platform. It should be dev, test or release", + help="stop a run at which version of TensorOpera® AI Platform. It should be dev, test or release", ) @click.option( "--platform", "-pf", type=str, default="falcon", - help="The platform name at the FedML® Nexus AI Platform (options: octopus, parrot, spider, beehive, falcon, launch, " + help="The platform name at the TensorOpera® AI Platform (options: octopus, parrot, spider, beehive, falcon, launch, " "default is falcon).", ) def stop_run(platform, run_id, api_key, version): @@ -68,14 +68,14 @@ def stop_run(platform, run_id, api_key, version): click.echo(f"Failed to stop Run {run_id}. Please check if the run id is valid.") -@fedml_run.command("list", help="List runs from the FedML® Nexus AI Platform.") +@fedml_run.command("list", help="List runs from the TensorOpera® AI Platform.") @click.help_option("--help", "-h") @click.option( "--platform", "-pf", type=str, default="falcon", - help="The platform name at the FedML® Nexus AI Platform (options: octopus, parrot, spider, beehive, falcon, launch, " + help="The platform name at the TensorOpera® AI Platform (options: octopus, parrot, spider, beehive, falcon, launch, " "default is falcon).", ) @click.option( @@ -83,14 +83,14 @@ def stop_run(platform, run_id, api_key, version): "-r", type=str, default="", - help="Run name at the FedML® Nexus AI Platform.", + help="Run name at the TensorOpera® AI Platform.", ) @click.option( "--run_id", "-rid", type=str, default="", - help="Run id at the FedML® Nexus AI Platform.", + help="Run id at the TensorOpera® AI Platform.", ) @click.option( "--api_key", "-k", type=str, help="user api key.", @@ -100,7 +100,7 @@ def stop_run(platform, run_id, api_key, version): "-v", type=str, default="release", - help="list runs at which version of FedML® Nexus AI Platform. It should be dev, test or release", + help="list runs at which version of TensorOpera® AI Platform. It should be dev, test or release", ) def list_runs(platform, run_name, run_id, api_key, version): fedml.set_env_version(version) @@ -109,14 +109,14 @@ def list_runs(platform, run_name, run_id, api_key, version): _print_run_table(run_list_obj) -@fedml_run.command("status", help="Get status of run from the FedML® Nexus AI Platform.") +@fedml_run.command("status", help="Get status of run from the TensorOpera® AI Platform.") @click.help_option("--help", "-h") @click.option( "--platform", "-pf", type=str, default="falcon", - help="The platform name at the FedML® Nexus AI Platform (options: octopus, parrot, spider, beehive, falcon, launch, " + help="The platform name at the TensorOpera® AI Platform (options: octopus, parrot, spider, beehive, falcon, launch, " "default is falcon).", ) @click.option( @@ -124,14 +124,14 @@ def list_runs(platform, run_name, run_id, api_key, version): "-r", type=str, default=None, - help="Run name at the FedML® Nexus AI Platform.", + help="Run name at the TensorOpera® AI Platform.", ) @click.option( "--run_id", "-rid", type=str, default=None, - help="Run id at the FedML® Nexus AI Platform.", + help="Run id at the TensorOpera® AI Platform.", ) @click.option( "--api_key", "-k", type=str, help="user api key.", @@ -141,7 +141,7 @@ def list_runs(platform, run_name, run_id, api_key, version): "-v", type=str, default="release", - help="get status of run at which version of FedML® Nexus AI Platform. It should be dev, test or release", + help="get status of run at which version of TensorOpera® AI Platform. It should be dev, test or release", ) def status(platform, run_name, run_id, api_key, version): fedml.set_env_version(version) @@ -153,14 +153,14 @@ def status(platform, run_name, run_id, api_key, version): _print_run_table(run_list_obj) -@fedml_run.command("logs", help="Get logs of run from the FedML® Nexus AI Platform.") +@fedml_run.command("logs", help="Get logs of run from the TensorOpera® AI Platform.") @click.help_option("--help", "-h") @click.option( "--platform", "-pf", type=str, default="falcon", - help="The platform name at the FedML® Nexus AI Platform (options: octopus, parrot, spider, beehive, falcon, launch, " + help="The platform name at the TensorOpera® AI Platform (options: octopus, parrot, spider, beehive, falcon, launch, " "default is falcon).", ) @click.option( @@ -168,7 +168,7 @@ def status(platform, run_name, run_id, api_key, version): "-rid", type=str, default=None, - help="Run id at the FedML® Nexus AI Platform.", + help="Run id at the TensorOpera® AI Platform.", ) @click.option( "--api_key", "-k", type=str, help="user api key.", @@ -178,7 +178,7 @@ def status(platform, run_name, run_id, api_key, version): "-v", type=str, default="release", - help="get logs of run at which version of FedML® Nexus AI Platform. It should be dev, test or release", + help="get logs of run at which version of TensorOpera® AI Platform. It should be dev, test or release", ) @click.option( "--page_num", diff --git a/python/fedml/cli/modules/storage.py b/python/fedml/cli/modules/storage.py index 7e060fc12e..8b75075289 100644 --- a/python/fedml/cli/modules/storage.py +++ b/python/fedml/cli/modules/storage.py @@ -12,7 +12,7 @@ from fedml.api.fedml_response import ResponseCode # Message strings constants -version_help: str = "specify version of FedML® Nexus AI Platform. It should be dev, test or release" +version_help: str = "specify version of TensorOpera® AI Platform. It should be dev, test or release" api_key_help: str = "user api key." @@ -31,7 +31,7 @@ ) def fedml_storage(api_key, version): """ - Manage storage on FedML® Nexus AI Platform + Manage storage on TensorOpera® AI Platform """ pass @@ -43,7 +43,7 @@ def validate_argument(ctx, param, value): return value -@fedml_storage.command("upload", help="Upload data on FedML® Nexus AI Platform") +@fedml_storage.command("upload", help="Upload data on TensorOpera® AI Platform") @click.help_option("--help", "-h") @click.argument("data_path", nargs=1, callback=validate_argument) @click.option("--name", "-n", type=str, help="Name your data to store. If not provided, the name will be the same as " @@ -78,7 +78,7 @@ def upload(data_path: str, name: str, user_metadata: str, description: str, vers click.echo(f"Failed to upload data. Error message: {response.message}") -@fedml_storage.command("list", help="List data stored on FedML® Nexus AI Platform") +@fedml_storage.command("list", help="List data stored on TensorOpera® AI Platform") @click.help_option("--help", "-h") @click.option( "--api_key", "-k", type=str, help=api_key_help, @@ -108,7 +108,7 @@ def list_data(version, api_key): f"Error message: {response.message}") -@fedml_storage.command("get-user-metadata", help="Get user-defined metadata of data object stored on FedML® Nexus AI " +@fedml_storage.command("get-user-metadata", help="Get user-defined metadata of data object stored on TensorOpera® AI " "Platform") @click.help_option("--help", "-h") @click.argument("data_name", nargs=1, callback=validate_argument) @@ -136,7 +136,7 @@ def get_user_metadata(data_name, version, api_key): click.echo(f"Failed to fetch user-metadata for {data_name}. Error message: {response.message}") -@fedml_storage.command("get-metadata", help="Get metadata of data object stored on FedML® Nexus AI Platform") +@fedml_storage.command("get-metadata", help="Get metadata of data object stored on TensorOpera® AI Platform") @click.help_option("--help", "-h") @click.argument("data_name", nargs=1, callback=validate_argument) @click.option( @@ -167,7 +167,7 @@ def get_metadata(data_name, version, api_key): click.echo(f"Fetching metadata failed. Error message: {response.message}") -@fedml_storage.command("download", help="Download data stored on FedML® Nexus AI Platform") +@fedml_storage.command("download", help="Download data stored on TensorOpera® AI Platform") @click.help_option("--help", "-h") @click.argument("data_name", nargs=1, callback=validate_argument) @click.option("--dest_path", "-d", default=None, type=str, help="Destination path to download data. By default, " @@ -194,7 +194,7 @@ def download(data_name, dest_path, version, api_key, service): click.echo(f"Failed to download data {data_name}. Error message: {response.message}") -@fedml_storage.command("delete", help="Delete data stored on FedML® Nexus AI Platform") +@fedml_storage.command("delete", help="Delete data stored on TensorOpera® AI Platform") @click.argument("data_name", nargs=1, callback=validate_argument) @click.help_option("--help", "-h") @click.option( diff --git a/python/fedml/cli/modules/train.py b/python/fedml/cli/modules/train.py index b4c36d1663..ae9c5fcbb1 100644 --- a/python/fedml/cli/modules/train.py +++ b/python/fedml/cli/modules/train.py @@ -7,12 +7,12 @@ @click.help_option("--help", "-h") def fedml_train(): """ - Manage training resources on FedML® Nexus AI Platform + Manage training resources on TensorOpera® AI Platform """ pass -@fedml_train.command("build", help="Build training packages for the FedML® Nexus AI Platform.") +@fedml_train.command("build", help="Build training packages for the TensorOpera® AI Platform.") @click.help_option("--help", "-h") @click.option( "--dest_folder", diff --git a/python/fedml/computing/scheduler/env/collect_env.py b/python/fedml/computing/scheduler/env/collect_env.py index da4d54e7a0..39654eac6c 100644 --- a/python/fedml/computing/scheduler/env/collect_env.py +++ b/python/fedml/computing/scheduler/env/collect_env.py @@ -9,7 +9,7 @@ def collect_env(): - print("\n======== FedML (https://fedml.ai) ========") + print("\n======== FedML (https://tensoropera.ai) ========") print("FedML version: " + str(fedml.__version__)) env_version = fedml.get_env_version() print("FedML ENV version: " + str(env_version)) diff --git a/python/fedml/computing/scheduler/scheduler_core/account_manager.py b/python/fedml/computing/scheduler/scheduler_core/account_manager.py index 85d76d5973..4b6a628b43 100755 --- a/python/fedml/computing/scheduler/scheduler_core/account_manager.py +++ b/python/fedml/computing/scheduler/scheduler_core/account_manager.py @@ -84,7 +84,7 @@ def login(self, user_id, api_key="", device_id=None, os_name=None, role=None, ru print("Please check whether your network is normal!") return None - # Bind account id to FedML® Nexus AI Platform + # Bind account id to TensorOpera® Nexus AI Platform register_try_count = 0 edge_id = -1 user_name = None diff --git a/python/fedml/computing/scheduler/scheduler_entry/README.md b/python/fedml/computing/scheduler/scheduler_entry/README.md index 0d1da81950..41d32ff399 100644 --- a/python/fedml/computing/scheduler/scheduler_entry/README.md +++ b/python/fedml/computing/scheduler/scheduler_entry/README.md @@ -132,7 +132,7 @@ You just need to customize the following config items. 3. `bootstrap`, It is the bootstrap shell command which will be executed before running entry commands. -Then you can use the following example CLI to launch the job at FedML® Nexus AI Platform +Then you can use the following example CLI to launch the job at TensorOpera® Nexus AI Platform (Replace $YourApiKey with your own account API key from open.fedml.ai) Example: @@ -142,7 +142,7 @@ fedml launch hello_job.yaml After the launch CLI is executed, the output is as follows. Here you may open the job url to confirm and actually start the job. ``` -Submitting your job to FedML® Nexus AI Platform: 100%|████████████████████████████████████████████████████████████████████████████████████████| 6.07k/6.07k [00:01<00:00, 4.94kB/s] +Submitting your job to TensorOpera® Nexus AI Platform: 100%|████████████████████████████████████████████████████████████████████████████████████████| 6.07k/6.07k [00:01<00:00, 4.94kB/s] Searched and matched the following GPU resource for your job: +-----------+-------------------+---------+------------+-------------------------+---------+-------+----------+ diff --git a/python/fedml/computing/scheduler/scheduler_entry/app_manager.py b/python/fedml/computing/scheduler/scheduler_entry/app_manager.py index 91b5ff64cc..267db08901 100755 --- a/python/fedml/computing/scheduler/scheduler_entry/app_manager.py +++ b/python/fedml/computing/scheduler/scheduler_entry/app_manager.py @@ -278,7 +278,7 @@ def push_app_package_to_s3(self, app_name, app_package_path): app_storage_url = s3_storage.upload_file_with_progress(app_package_path, app_dst_key, out_progress_to_err=True, progress_desc="Submitting your job to " - "FedML® Nexus AI Platform") + "TensorOpera® Nexus AI Platform") return app_storage_url def pull_app_package_from_s3(self, model_storage_url, model_name): @@ -315,7 +315,7 @@ def push_model_to_s3(self, model_name, model_zip_path): return FedMLModelCards.get_instance().push_model_to_s3( model_name, model_zip_path, "FedMLLaunchServe", show_progress=False, - progress_desc="Submitting your job to FedML® Nexus AI Platform") + progress_desc="Submitting your job to TensorOpera® Nexus AI Platform") def check_model_package(self, workspace): model_config_file = os.path.join( diff --git a/python/fedml/core/mlops/__init__.py b/python/fedml/core/mlops/__init__.py index 148427fe1f..4d60534547 100644 --- a/python/fedml/core/mlops/__init__.py +++ b/python/fedml/core/mlops/__init__.py @@ -107,7 +107,7 @@ def init(args, should_init_logs=True): return else: if hasattr(args, "simulator_daemon"): - # Bind local device as simulation device on FedML® Nexus AI Platform + # Bind local device as simulation device on TensorOpera® Nexus AI Platform setattr(args, "using_mlops", True) setattr(args, "rank", 1) MLOpsStore.mlops_bind_result = bind_simulation_device(args, args.user) @@ -125,7 +125,7 @@ def init(args, should_init_logs=True): if project_name is None or api_key is None: raise Exception("Please check mlops_project_name and mlops_api_key params.") - # Bind local device as simulation device on FedML® Nexus AI Platform + # Bind local device as simulation device on TensorOpera® Nexus AI Platform setattr(args, "using_mlops", True) setattr(args, "rank", 1) MLOpsStore.mlops_bind_result = bind_simulation_device(args, api_key, args.config_version) @@ -753,7 +753,7 @@ def push_artifact_to_s3(artifact: fedml.mlops.Artifact, version="release", show_ show_progress=show_progress, out_progress_to_err=True, progress_desc="Submitting your artifact to " - "FedML® Nexus AI Platform") + "TensorOpera® Nexus AI Platform") artifact_storage_url = str(artifact_storage_url).split("?")[0] except Exception as e: pass @@ -1289,7 +1289,7 @@ def bind_simulation_device(args, userid): device_role = "Edge.Simulator" unique_device_id = "{}@{}.{}".format(args.device_id, args.os_name, device_role) - # Bind account id to FedML® Nexus AI Platform + # Bind account id to TensorOpera® Nexus AI Platform register_try_count = 0 edge_id = -1 while register_try_count < 5: diff --git a/python/fedml/serving/templates/hf_template/config.yaml b/python/fedml/serving/templates/hf_template/config.yaml index da512f4a46..72551635d4 100644 --- a/python/fedml/serving/templates/hf_template/config.yaml +++ b/python/fedml/serving/templates/hf_template/config.yaml @@ -62,7 +62,7 @@ environment_variables: VERBOSE: "True" # If you do not have any GPU resource but want to serve the model -# Try fedml® launch platform, and uncomment the following lines. +# Try TensorOpera® launch platform, and uncomment the following lines. # ------------------------------------------------------------ computing: minimum_num_gpus: 1 # minimum # of GPUs to provision diff --git a/python/fedml/workflow/driver_example/customized_job_example/README.md b/python/fedml/workflow/driver_example/customized_job_example/README.md index 647cddc290..cd95ef5c75 100644 --- a/python/fedml/workflow/driver_example/customized_job_example/README.md +++ b/python/fedml/workflow/driver_example/customized_job_example/README.md @@ -1,6 +1,6 @@ # Make your own workflow with multiple jobs -## Define the job yaml based on the FEDML® Launch docs (https://doc.fedml.ai/launch) +## Define the job yaml based on the TensorOpera® Launch docs (https://doc.fedml.ai/launch) ``` working_directory = os.path.dirname(os.path.abspath(__file__)) deploy_image_job_yaml = os.path.join(working_directory, "deploy_image_job.yaml") @@ -119,7 +119,7 @@ ``` The output of the above deploy workflow is as follows. ``` -Submitting your job to FedML® Nexus AI Platform: 100%|██████████| 3.00k/3.00k [00:00<00:00, 3.10kB/s] +Submitting your job to TensorOpera® Nexus AI Platform: 100%|██████████| 3.00k/3.00k [00:00<00:00, 3.10kB/s] Final status of the workflow is as follows. JobStatus.FINISHED Output of the workflow is as follows. {'endpoint_id': 2131, 'endpoint_name': 'endpoint_test1', 'inference_url': 'https://open-test.fedml.ai/inference', 'request_body': {'arr': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0100005, -0.0100005, -0.0100005, -0.013973799, -0.0189315247, -0.023184301, -0.0360728861, -0.0392619154, -0.0380269994, -0.0390143887, -0.0346046778, -0.0257765396, -0.0209733754, -0.0217809993, -0.0144984527, -0.0118807892, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0178081425, -0.0232058779, -0.0298662898, -0.0414395151, -0.0586512813, -0.0812643979, -0.105997038, -0.121704878, -0.134457288, -0.139756261, -0.141562422, -0.135229133, -0.120246727, -0.104490087, -0.0870044931, -0.0716699334, -0.0485892545, -0.0324260775, -0.0216926329, -0.0100005, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0132956624, -0.0225936238, -0.0383702224, -0.0598206019, -0.0842014426, -0.118390816, -0.154266827, -0.188282524, -0.219803054, -0.242936317, -0.255020324, -0.259481423, -0.249404582, -0.226727106, -0.200418885, -0.16716117, -0.134317009, -0.0958717755, -0.0736565245, -0.0503983075, -0.0269783475, -0.0168919, -0.0100005, 0.0, 0.0, 0.0, 0.0, -0.0147795885, -0.025122101, -0.0381226487, -0.0786317321, -0.119593671, -0.165704529, -0.228814281, -0.288620224, -0.354491034, -0.421140618, -0.480243669, -0.527064646, -0.540807419, -0.521388017, -0.474446021, -0.403948632, -0.336571539, -0.271580657, -0.20666741, -0.154539645, -0.108856709, -0.0677589146, -0.0340327281, -0.0215091205, 0.0, 0.0, -0.0100005, -0.0107381289, -0.0260253876, -0.0570600482, -0.0914378767, -0.143000013, -0.199005834, -0.266034404, -0.353401549, -0.450251488, -0.551598332, -0.647939202, -0.743171364, -0.818162561, -0.851073275, -0.83112168, -0.763764496, -0.659992784, -0.547527626, -0.439376979, -0.33557659, -0.254856553, -0.183933732, -0.126755715, -0.0706477667, -0.0388818206, 0.0, 0.0, 0.0, -0.0134176155, -0.0390612132, -0.0873974922, -0.133107017, -0.194532142, -0.27478633, -0.369886454, -0.482920333, -0.605294063, -0.735621386, -0.869509827, -0.989564738, -1.09132506, -1.13182948, -1.09408349, -0.996373436, -0.868781173, -0.717778845, -0.570649327, -0.439021868, -0.326889344, -0.235934504, -0.167697996, -0.0995100269, -0.0479392976, -0.0187851186, 0.0, -0.0117322667, -0.0288274493, -0.0646532861, -0.118956716, -0.17783758, 1.53795878, 2.57176245, 1.53212043, 1.00392168, -0.179355647, -0.591732991, -1.05273662, -1.15378689, -1.22142979, -1.2388156, -1.21321586, -1.14302847, -1.02018313, -0.857098743, -0.676706697, -0.516203262, -0.379287244, -0.271402545, -0.189934521, -0.119940614, -0.0556340911, -0.0145752163, 0.0, -0.0206611389, -0.0437166621, -0.0808756237, -0.140488164, -0.207699245, 3.7747726, 3.14033146, 2.28939169, 1.76127332, 1.4318542, 1.1313135, 0.679164893, 0.665484747, 0.666043389, 0.680680095, 0.677305174, 0.665508286, 0.721340316, 0.883661589, 0.91751869, 0.0282541074, -0.401002939, -0.283099723, -0.194831338, -0.123075256, -0.066612686, -0.0161462821, -0.0112546885, -0.0293918605, -0.0484646663, -0.093178326, -0.146682925, -0.218121209, 0.830460131, 1.04725853, 0.147086928, 0.259684517, 0.495679969, 0.998953721, 1.29535061, 1.12204782, 1.41528197, 1.4259952, 1.36416372, 1.22805443, 1.03395727, 1.40874227, 1.73166837, 1.00260058, -0.401823716, -0.275049233, -0.181713744, -0.107567122, -0.0566041118, -0.0189159236, -0.0121427928, -0.0243168731, -0.050270377, -0.0887358114, -0.138806025, -0.212706019, -0.321729999, -0.462313723, -0.652442841, -0.845524923, -0.961258323, -0.793125052, -0.226359955, -0.640468216, -0.12372009, -0.167157468, -0.255843161, -0.441448335, -0.792766628, 1.30597044, 1.81460411, 0.691054579, -0.383665051, -0.26310513, -0.166473946, -0.0799663431, -0.0455007946, -0.0195541446, -0.0100005, -0.0186206584, -0.0414986832, -0.0722615997, -0.123238725, -0.212256343, -0.331309824, -0.491126078, -0.687704902, -0.86260267, -0.939124713, -0.869991467, -0.758168797, -0.722198511, -0.739826964, -0.809980626, -0.911188613, -1.00032001, -0.221550751, 1.53134484, 1.47605194, -0.273150738, -0.363157263, -0.252975575, -0.157152039, -0.0652009258, -0.0335283586, -0.0124209728, 0.0, -0.014849279, -0.0329699917, -0.0601451792, -0.118353377, -0.219271688, -0.354392407, -0.523006773, -0.71568287, -0.862626101, -0.90524289, -0.831592288, -0.751312636, -0.762948163, -0.825877849, -0.930232292, -1.04727288, -0.879016953, 1.11455708, 1.61660969, 0.264000765, -0.464282235, -0.354907482, -0.256014147, -0.158427696, -0.0620647188, -0.0242921899, 0.0, 0.0, -0.0117874599, -0.0252632841, -0.0502423656, -0.115068847, -0.235195531, -0.377531303, -0.547311188, -0.723069536, -0.848981953, -0.878897369, -0.826469482, -0.795496372, -0.883536617, -0.994814123, -1.13364619, -1.20871511, 5.60198157e-05, 1.28700658, 1.50082995, -0.122561277, -0.462110102, -0.360151562, -0.263898374, -0.166295096, -0.0568635009, -0.0105441394, 0.0, 0.0, 0.0, -0.016636779, -0.0423254862, -0.119931644, -0.252550583, -0.39191634, -0.556171069, -0.717849905, -0.829516019, -0.854549188, -0.84598967, -0.889246054, -1.03761315, -1.16457617, -1.30025654, -0.740699086, 1.05188993, 1.3036988, -0.163440609, -0.59058464, -0.474233049, -0.368789557, -0.274082099, -0.174264813, -0.0696188843, -0.018003151, 0.0, 0.0, 0.0, -0.0168610568, -0.0451688568, -0.131668459, -0.267838929, -0.398906806, -0.548202377, -0.690077015, -0.789823563, -0.831599129, -0.861314493, -0.95681566, -1.11036634, -1.22743073, -1.31006468, -0.02573686, 1.14239899, 0.761423491, -0.706825874, -0.608999426, -0.492457882, -0.380502867, -0.279282191, -0.173984018, -0.0767235054, -0.0195871373, -0.0100005, 0.0, -0.0100005, -0.024817808, -0.0552275065, -0.148243512, -0.283202341, -0.4022125, -0.534598048, -0.656007943, -0.738083794, -0.781657503, -0.824620535, -0.918824463, -1.04078449, -1.13391454, -1.09212795, 0.70592031, 1.17679031, -0.37378182, -0.758547572, -0.62868064, -0.501492113, -0.381043892, -0.270505206, -0.168251255, -0.0784168728, -0.022799968, -0.0157856413, 0.0, 0.0, -0.0269850288, -0.0676999793, -0.167498207, -0.298089736, -0.411096027, -0.522810883, -0.625838621, -0.693423683, -0.731704263, -0.767086709, -0.82998003, -0.921590434, -1.00562716, 0.0779492952, 1.22959017, 0.636500653, -0.901400043, -0.769630793, -0.635363773, -0.494618472, -0.369117095, -0.255794246, -0.156732083, -0.0783809414, -0.0267109338, -0.0148726634, 0.0, -0.0100005, -0.0348385687, -0.0869311199, -0.185622432, -0.311777198, -0.427690033, -0.530457702, -0.612837575, -0.669073252, -0.706628103, -0.737178903, -0.779583917, -0.866698428, -0.288157768, 1.2193059, 1.10500698, -0.50413989, -0.909137779, -0.774520432, -0.619405771, -0.472096102, -0.344822207, -0.235626373, -0.144455008, -0.0769092863, -0.0286146987, -0.0100005, 0.0, -0.0100005, -0.0342628198, -0.101174053, -0.195711272, -0.324606261, -0.442716711, -0.545960978, -0.637281741, -0.703742928, -0.753441795, -0.788772419, -0.829773267, -0.745526297, 0.949893727, 1.18293215, 0.385795002, -1.023299, -0.89872884, -0.736858006, -0.575258663, -0.430322485, -0.30912025, -0.209889823, -0.13189517, -0.0731506415, -0.0276674735, -0.0100005, 0.0, -0.0100005, -0.0400234981, -0.10709374, -0.194645695, -0.316981297, -0.440895564, -0.560086039, -0.667605659, -0.763806998, -0.843535003, -0.903604039, -0.938010529, 0.763887624, 1.12176928, 0.784111, -0.818046093, -0.991046672, -0.828340182, -0.652780006, -0.495325185, -0.364891317, -0.261772085, -0.17529887, -0.112966586, -0.0617374486, -0.0270715466, 0.0, 0.0, 0.0, -0.0406825662, -0.0978606438, -0.177848987, -0.287783481, -0.412614752, -0.543271605, -0.671018812, -0.798159188, -0.916686263, -1.02499517, -0.773682132, 1.09355574, 1.05041156, -0.498209852, -1.05256459, -0.870980804, -0.688431167, -0.523166414, -0.391308572, -0.282035183, -0.199071147, -0.13652517, -0.0893688913, -0.041317086, -0.016850831, 0.0, 0.0, 0.0, -0.0283386899, -0.0765120563, -0.141969555, -0.232658498, -0.341261378, -0.469723228, -0.606194512, -0.747366354, -0.880786554, -0.729389144, 0.895224865, 1.11943124, -0.105438374, -1.00783177, -0.859696548, -0.683890026, -0.531181637, -0.395889778, -0.289956123, -0.203267966, -0.14295145, -0.0963532989, -0.0643914026, -0.0337070214, -0.0111853003, 0.0, 0.0, -0.0100005, -0.0151722732, -0.0480051146, -0.0951161616, -0.160643556, -0.245453283, -0.353245922, -0.474265429, -0.598667391, -0.729305101, 0.389322873, 1.38694264, 1.37486731, -0.403963644, -0.77444593, -0.638730244, -0.502999283, -0.387339921, -0.279971294, -0.198381814, -0.135822721, -0.0965383286, -0.0633365644, -0.0427549534, -0.0257581657, -0.0100005, 0.0, 0.0, 0.0, 0.0, -0.0237543896, -0.0522032466, -0.0858749627, -0.140703979, -0.208515621, -0.290149335, -0.368567087, 0.334201602, 2.33307288, 2.27286258, 2.23777229, 0.0412218057, -0.494890333, -0.422342015, -0.339048837, -0.257069088, -0.185534152, -0.136577185, -0.0860242391, -0.0578259874, -0.033636416, -0.0181122384, -0.0100005, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0136274661, -0.0285803164, -0.0474793553, -0.0779785591, -0.118532172, -0.167201555, -0.214787719, 2.22171299, 4.30500754, 4.03125111, 3.36505818, 0.379953648, -0.284269948, -0.247694588, -0.205869945, -0.155925102, -0.116435448, -0.0857647974, -0.0546508166, -0.0401800073, -0.023758997, -0.0165780693, -0.0100005, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0115748833, -0.0284271584, -0.0506655656, -0.0740332846, -0.100455604, -0.124744578, 4.17363552, 7.81243004, 5.7896979, 0.322149281, -0.181506609, -0.160333393, -0.139182079, -0.118875455, -0.0873316648, -0.0700227708, -0.0540690537, -0.0384297037, -0.0265616274, -0.0161844507, -0.0119683967, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0132918601, -0.0159980455, -0.0207236291, -0.0266997366, -0.0284703819, -0.0343035092, -0.0410336906, -0.0488886427, -0.0548357917, -0.0551988782, -0.0469971082, -0.0388769026, -0.0316010302, -0.0285226846, -0.021736589, -0.0100005, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'model': 'fedml-qa-customer-0219/endpoint_test1'}, 'key_token': '5d427244128c45f58a74f3ecdb09b1e0'} Output of all jobs is as follows. {'deploy_image_job': {'endpoint_id': 2131, 'endpoint_name': 'endpoint_test1', 'inference_url': 'https://open-test.fedml.ai/inference', 'request_body': {'arr': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0100005, -0.0100005, -0.0100005, -0.013973799, -0.0189315247, -0.023184301, -0.0360728861, -0.0392619154, -0.0380269994, -0.0390143887, -0.0346046778, -0.0257765396, -0.0209733754, -0.0217809993, -0.0144984527, -0.0118807892, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0178081425, -0.0232058779, -0.0298662898, -0.0414395151, -0.0586512813, -0.0812643979, -0.105997038, -0.121704878, -0.134457288, -0.139756261, -0.141562422, -0.135229133, -0.120246727, -0.104490087, -0.0870044931, -0.0716699334, -0.0485892545, -0.0324260775, -0.0216926329, -0.0100005, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0132956624, -0.0225936238, -0.0383702224, -0.0598206019, -0.0842014426, -0.118390816, -0.154266827, -0.188282524, -0.219803054, -0.242936317, -0.255020324, -0.259481423, -0.249404582, -0.226727106, -0.200418885, -0.16716117, -0.134317009, -0.0958717755, -0.0736565245, -0.0503983075, -0.0269783475, -0.0168919, -0.0100005, 0.0, 0.0, 0.0, 0.0, -0.0147795885, -0.025122101, -0.0381226487, -0.0786317321, -0.119593671, -0.165704529, -0.228814281, -0.288620224, -0.354491034, -0.421140618, -0.480243669, -0.527064646, -0.540807419, -0.521388017, -0.474446021, -0.403948632, -0.336571539, -0.271580657, -0.20666741, -0.154539645, -0.108856709, -0.0677589146, -0.0340327281, -0.0215091205, 0.0, 0.0, -0.0100005, -0.0107381289, -0.0260253876, -0.0570600482, -0.0914378767, -0.143000013, -0.199005834, -0.266034404, -0.353401549, -0.450251488, -0.551598332, -0.647939202, -0.743171364, -0.818162561, -0.851073275, -0.83112168, -0.763764496, -0.659992784, -0.547527626, -0.439376979, -0.33557659, -0.254856553, -0.183933732, -0.126755715, -0.0706477667, -0.0388818206, 0.0, 0.0, 0.0, -0.0134176155, -0.0390612132, -0.0873974922, -0.133107017, -0.194532142, -0.27478633, -0.369886454, -0.482920333, -0.605294063, -0.735621386, -0.869509827, -0.989564738, -1.09132506, -1.13182948, -1.09408349, -0.996373436, -0.868781173, -0.717778845, -0.570649327, -0.439021868, -0.326889344, -0.235934504, -0.167697996, -0.0995100269, -0.0479392976, -0.0187851186, 0.0, -0.0117322667, -0.0288274493, -0.0646532861, -0.118956716, -0.17783758, 1.53795878, 2.57176245, 1.53212043, 1.00392168, -0.179355647, -0.591732991, -1.05273662, -1.15378689, -1.22142979, -1.2388156, -1.21321586, -1.14302847, -1.02018313, -0.857098743, -0.676706697, -0.516203262, -0.379287244, -0.271402545, -0.189934521, -0.119940614, -0.0556340911, -0.0145752163, 0.0, -0.0206611389, -0.0437166621, -0.0808756237, -0.140488164, -0.207699245, 3.7747726, 3.14033146, 2.28939169, 1.76127332, 1.4318542, 1.1313135, 0.679164893, 0.665484747, 0.666043389, 0.680680095, 0.677305174, 0.665508286, 0.721340316, 0.883661589, 0.91751869, 0.0282541074, -0.401002939, -0.283099723, -0.194831338, -0.123075256, -0.066612686, -0.0161462821, -0.0112546885, -0.0293918605, -0.0484646663, -0.093178326, -0.146682925, -0.218121209, 0.830460131, 1.04725853, 0.147086928, 0.259684517, 0.495679969, 0.998953721, 1.29535061, 1.12204782, 1.41528197, 1.4259952, 1.36416372, 1.22805443, 1.03395727, 1.40874227, 1.73166837, 1.00260058, -0.401823716, -0.275049233, -0.181713744, -0.107567122, -0.0566041118, -0.0189159236, -0.0121427928, -0.0243168731, -0.050270377, -0.0887358114, -0.138806025, -0.212706019, -0.321729999, -0.462313723, -0.652442841, -0.845524923, -0.961258323, -0.793125052, -0.226359955, -0.640468216, -0.12372009, -0.167157468, -0.255843161, -0.441448335, -0.792766628, 1.30597044, 1.81460411, 0.691054579, -0.383665051, -0.26310513, -0.166473946, -0.0799663431, -0.0455007946, -0.0195541446, -0.0100005, -0.0186206584, -0.0414986832, -0.0722615997, -0.123238725, -0.212256343, -0.331309824, -0.491126078, -0.687704902, -0.86260267, -0.939124713, -0.869991467, -0.758168797, -0.722198511, -0.739826964, -0.809980626, -0.911188613, -1.00032001, -0.221550751, 1.53134484, 1.47605194, -0.273150738, -0.363157263, -0.252975575, -0.157152039, -0.0652009258, -0.0335283586, -0.0124209728, 0.0, -0.014849279, -0.0329699917, -0.0601451792, -0.118353377, -0.219271688, -0.354392407, -0.523006773, -0.71568287, -0.862626101, -0.90524289, -0.831592288, -0.751312636, -0.762948163, -0.825877849, -0.930232292, -1.04727288, -0.879016953, 1.11455708, 1.61660969, 0.264000765, -0.464282235, -0.354907482, -0.256014147, -0.158427696, -0.0620647188, -0.0242921899, 0.0, 0.0, -0.0117874599, -0.0252632841, -0.0502423656, -0.115068847, -0.235195531, -0.377531303, -0.547311188, -0.723069536, -0.848981953, -0.878897369, -0.826469482, -0.795496372, -0.883536617, -0.994814123, -1.13364619, -1.20871511, 5.60198157e-05, 1.28700658, 1.50082995, -0.122561277, -0.462110102, -0.360151562, -0.263898374, -0.166295096, -0.0568635009, -0.0105441394, 0.0, 0.0, 0.0, -0.016636779, -0.0423254862, -0.119931644, -0.252550583, -0.39191634, -0.556171069, -0.717849905, -0.829516019, -0.854549188, -0.84598967, -0.889246054, -1.03761315, -1.16457617, -1.30025654, -0.740699086, 1.05188993, 1.3036988, -0.163440609, -0.59058464, -0.474233049, -0.368789557, -0.274082099, -0.174264813, -0.0696188843, -0.018003151, 0.0, 0.0, 0.0, -0.0168610568, -0.0451688568, -0.131668459, -0.267838929, -0.398906806, -0.548202377, -0.690077015, -0.789823563, -0.831599129, -0.861314493, -0.95681566, -1.11036634, -1.22743073, -1.31006468, -0.02573686, 1.14239899, 0.761423491, -0.706825874, -0.608999426, -0.492457882, -0.380502867, -0.279282191, -0.173984018, -0.0767235054, -0.0195871373, -0.0100005, 0.0, -0.0100005, -0.024817808, -0.0552275065, -0.148243512, -0.283202341, -0.4022125, -0.534598048, -0.656007943, -0.738083794, -0.781657503, -0.824620535, -0.918824463, -1.04078449, -1.13391454, -1.09212795, 0.70592031, 1.17679031, -0.37378182, -0.758547572, -0.62868064, -0.501492113, -0.381043892, -0.270505206, -0.168251255, -0.0784168728, -0.022799968, -0.0157856413, 0.0, 0.0, -0.0269850288, -0.0676999793, -0.167498207, -0.298089736, -0.411096027, -0.522810883, -0.625838621, -0.693423683, -0.731704263, -0.767086709, -0.82998003, -0.921590434, -1.00562716, 0.0779492952, 1.22959017, 0.636500653, -0.901400043, -0.769630793, -0.635363773, -0.494618472, -0.369117095, -0.255794246, -0.156732083, -0.0783809414, -0.0267109338, -0.0148726634, 0.0, -0.0100005, -0.0348385687, -0.0869311199, -0.185622432, -0.311777198, -0.427690033, -0.530457702, -0.612837575, -0.669073252, -0.706628103, -0.737178903, -0.779583917, -0.866698428, -0.288157768, 1.2193059, 1.10500698, -0.50413989, -0.909137779, -0.774520432, -0.619405771, -0.472096102, -0.344822207, -0.235626373, -0.144455008, -0.0769092863, -0.0286146987, -0.0100005, 0.0, -0.0100005, -0.0342628198, -0.101174053, -0.195711272, -0.324606261, -0.442716711, -0.545960978, -0.637281741, -0.703742928, -0.753441795, -0.788772419, -0.829773267, -0.745526297, 0.949893727, 1.18293215, 0.385795002, -1.023299, -0.89872884, -0.736858006, -0.575258663, -0.430322485, -0.30912025, -0.209889823, -0.13189517, -0.0731506415, -0.0276674735, -0.0100005, 0.0, -0.0100005, -0.0400234981, -0.10709374, -0.194645695, -0.316981297, -0.440895564, -0.560086039, -0.667605659, -0.763806998, -0.843535003, -0.903604039, -0.938010529, 0.763887624, 1.12176928, 0.784111, -0.818046093, -0.991046672, -0.828340182, -0.652780006, -0.495325185, -0.364891317, -0.261772085, -0.17529887, -0.112966586, -0.0617374486, -0.0270715466, 0.0, 0.0, 0.0, -0.0406825662, -0.0978606438, -0.177848987, -0.287783481, -0.412614752, -0.543271605, -0.671018812, -0.798159188, -0.916686263, -1.02499517, -0.773682132, 1.09355574, 1.05041156, -0.498209852, -1.05256459, -0.870980804, -0.688431167, -0.523166414, -0.391308572, -0.282035183, -0.199071147, -0.13652517, -0.0893688913, -0.041317086, -0.016850831, 0.0, 0.0, 0.0, -0.0283386899, -0.0765120563, -0.141969555, -0.232658498, -0.341261378, -0.469723228, -0.606194512, -0.747366354, -0.880786554, -0.729389144, 0.895224865, 1.11943124, -0.105438374, -1.00783177, -0.859696548, -0.683890026, -0.531181637, -0.395889778, -0.289956123, -0.203267966, -0.14295145, -0.0963532989, -0.0643914026, -0.0337070214, -0.0111853003, 0.0, 0.0, -0.0100005, -0.0151722732, -0.0480051146, -0.0951161616, -0.160643556, -0.245453283, -0.353245922, -0.474265429, -0.598667391, -0.729305101, 0.389322873, 1.38694264, 1.37486731, -0.403963644, -0.77444593, -0.638730244, -0.502999283, -0.387339921, -0.279971294, -0.198381814, -0.135822721, -0.0965383286, -0.0633365644, -0.0427549534, -0.0257581657, -0.0100005, 0.0, 0.0, 0.0, 0.0, -0.0237543896, -0.0522032466, -0.0858749627, -0.140703979, -0.208515621, -0.290149335, -0.368567087, 0.334201602, 2.33307288, 2.27286258, 2.23777229, 0.0412218057, -0.494890333, -0.422342015, -0.339048837, -0.257069088, -0.185534152, -0.136577185, -0.0860242391, -0.0578259874, -0.033636416, -0.0181122384, -0.0100005, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0136274661, -0.0285803164, -0.0474793553, -0.0779785591, -0.118532172, -0.167201555, -0.214787719, 2.22171299, 4.30500754, 4.03125111, 3.36505818, 0.379953648, -0.284269948, -0.247694588, -0.205869945, -0.155925102, -0.116435448, -0.0857647974, -0.0546508166, -0.0401800073, -0.023758997, -0.0165780693, -0.0100005, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0115748833, -0.0284271584, -0.0506655656, -0.0740332846, -0.100455604, -0.124744578, 4.17363552, 7.81243004, 5.7896979, 0.322149281, -0.181506609, -0.160333393, -0.139182079, -0.118875455, -0.0873316648, -0.0700227708, -0.0540690537, -0.0384297037, -0.0265616274, -0.0161844507, -0.0119683967, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -0.0132918601, -0.0159980455, -0.0207236291, -0.0266997366, -0.0284703819, -0.0343035092, -0.0410336906, -0.0488886427, -0.0548357917, -0.0551988782, -0.0469971082, -0.0388769026, -0.0316010302, -0.0285226846, -0.021736589, -0.0100005, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'model': 'fedml-qa-customer-0219/endpoint_test1'}, 'key_token': '5d427244128c45f58a74f3ecdb09b1e0'}} diff --git a/python/spotlight_prj/unitedllm/README.md b/python/spotlight_prj/unitedllm/README.md index 5d5972cef1..5a300f2cfc 100644 --- a/python/spotlight_prj/unitedllm/README.md +++ b/python/spotlight_prj/unitedllm/README.md @@ -4,7 +4,7 @@ # UnitedLLM: Training and Serving LLM Collaboratively on Decentralized GPU Clouds -[FEDML® UnitedLLM](https://blog.fedml.ai/releasing-fedllm-build-your-own-large-language-models-on-proprietary-data-using-the-fedml-platform/) +[TensorOpera® UnitedLLM](https://blog.fedml.ai/releasing-fedllm-build-your-own-large-language-models-on-proprietary-data-using-the-fedml-platform/) is an MLOps-supported training pipeline for decentralized pretraining and finetuning of large language models. ## Getting Started