-
-
Notifications
You must be signed in to change notification settings - Fork 743
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #2160 from FedML-AI/dev/v0.7.0
Dev/v0.7.0
- Loading branch information
Showing
53 changed files
with
861 additions
and
9,055 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
10 changes: 10 additions & 0 deletions
10
python/examples/deploy/debug/inference_timeout/config.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
workspace: "./src" | ||
entry_point: "serve_main.py" | ||
bootstrap: | | ||
echo "Bootstrap start..." | ||
sleep 5 | ||
echo "Bootstrap finished" | ||
auto_detect_public_ip: true | ||
use_gpu: true | ||
|
||
request_timeout_sec: 10 |
32 changes: 32 additions & 0 deletions
32
python/examples/deploy/debug/inference_timeout/src/serve_main.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
from fedml.serving import FedMLPredictor | ||
from fedml.serving import FedMLInferenceRunner | ||
import uuid | ||
import torch | ||
|
||
# Calculate the number of elements | ||
num_elements = 1_073_741_824 // 4 # using integer division for whole elements | ||
|
||
|
||
class DummyPredictor(FedMLPredictor): | ||
def __init__(self): | ||
super().__init__() | ||
# Create a tensor with these many elements | ||
tensor = torch.empty(num_elements, dtype=torch.float32) | ||
|
||
# Move the tensor to GPU | ||
tensor_gpu = tensor.cuda() | ||
|
||
# for debug | ||
with open("/tmp/dummy_gpu_occupier.txt", "w") as f: | ||
f.write("GPU is occupied") | ||
|
||
self.worker_id = uuid.uuid4() | ||
|
||
def predict(self, request): | ||
return {f"AlohaV0From{self.worker_id}": request} | ||
|
||
|
||
if __name__ == "__main__": | ||
predictor = DummyPredictor() | ||
fedml_inference_runner = FedMLInferenceRunner(predictor) | ||
fedml_inference_runner.run() |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,21 +1,8 @@ | ||
workspace: "./src" | ||
workspace: "." | ||
entry_point: "main_entry.py" | ||
|
||
# If you want to install some packages | ||
# Please write the command in the bootstrap.sh | ||
bootstrap: | | ||
echo "Bootstrap start..." | ||
sh ./config/bootstrap.sh | ||
echo "Bootstrap finished" | ||
# If you do not have any GPU resource but want to serve the model | ||
# Try FedML® Nexus AI Platform, and Uncomment the following lines. | ||
# ------------------------------------------------------------ | ||
computing: | ||
minimum_num_gpus: 1 # minimum # of GPUs to provision | ||
maximum_cost_per_hour: $3000 # max cost per hour for your job per gpu card | ||
#allow_cross_cloud_resources: true # true, false | ||
#device_type: CPU # options: GPU, CPU, hybrid | ||
resource_type: A100-80G # e.g., A100-80G, | ||
# please check the resource type list by "fedml show-resource-type" | ||
# or visiting URL: https://open.fedml.ai/accelerator_resource_type | ||
# ------------------------------------------------------------ | ||
echo "Install some packages..." | ||
echo "Install finished!" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
from fedml.serving import FedMLPredictor | ||
from fedml.serving import FedMLInferenceRunner | ||
|
||
|
||
class Bot(FedMLPredictor): # Inherit FedMLClientPredictor | ||
def __init__(self): | ||
super().__init__() | ||
|
||
# --- Your model initialization code here --- | ||
|
||
# ------------------------------------------- | ||
|
||
def predict(self, request: dict): | ||
input_dict = request | ||
question: str = input_dict.get("text", "").strip() | ||
|
||
# --- Your model inference code here --- | ||
response = "I do not know the answer to your question." | ||
# --------------------------------------- | ||
|
||
return {"generated_text": f"The answer to your question {question} is: {response}"} | ||
|
||
|
||
if __name__ == "__main__": | ||
chatbot = Bot() | ||
fedml_inference_runner = FedMLInferenceRunner(chatbot) | ||
fedml_inference_runner.run() |
Empty file.
Empty file.
Empty file.
68 changes: 0 additions & 68 deletions
68
python/examples/deploy/quick_start/src/app/pipe/constants.py
This file was deleted.
Oops, something went wrong.
Oops, something went wrong.