Skip to content

Commit

Permalink
[heterps]remove api for heter pipeline ps (PaddlePaddle#37396)
Browse files Browse the repository at this point in the history
* fix api. test=develop

* fix api. test=develop
  • Loading branch information
zmxdream committed Nov 22, 2021
1 parent 7f020d9 commit 17a4b50
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 97 deletions.
3 changes: 0 additions & 3 deletions python/paddle/distributed/fleet/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,11 @@
server_index = fleet.server_index
server_endpoints = fleet.server_endpoints
is_server = fleet.is_server
is_heter_worker = fleet.is_heter_worker
util = UtilBase()
barrier_worker = fleet.barrier_worker
init_worker = fleet.init_worker
init_heter_worker = fleet.init_heter_worker
init_server = fleet.init_server
run_server = fleet.run_server
run_heter_worker = fleet.run_heter_worker
stop_worker = fleet.stop_worker
distributed_optimizer = fleet.distributed_optimizer
save_inference_model = fleet.save_inference_model
Expand Down
67 changes: 0 additions & 67 deletions python/paddle/distributed/fleet/base/fleet_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -565,24 +565,6 @@ def is_server(self):
"""
return self._role_maker._is_server()

def is_heter_worker(self):
"""
Check whether the node is an instance of heter worker.
Returns:
bool: True if this is a node of heter worker,
False if not.
Examples:
.. code-block:: python
import paddle.distributed.fleet as fleet
fleet.init()
fleet.is_heter_worker()
"""
return self._role_maker._is_heter_worker()

def barrier_worker(self):
"""
barrier all workers
Expand Down Expand Up @@ -617,30 +599,6 @@ def init_worker(self):
"""
self._runtime_handle._init_worker()

@is_non_distributed_check
@inited_runtime_handler
def init_heter_worker(self):
"""
init_heter_worker executor to initialize startup program,
Returns:
None
Examples:
.. code-block:: python
import paddle.distributed.fleet as fleet
fleet.init()
# build net
# fleet.distributed_optimizer(...)
fleet.init_heter_worker()
"""
self._runtime_handle._init_heter_worker()

@is_non_distributed_check
@inited_runtime_handler
def init_server(self, *args, **kwargs):
Expand Down Expand Up @@ -690,31 +648,6 @@ def load_model(self, path, mode):
"""
self._runtime_handle.load_model(path, mode)

@is_non_distributed_check
@inited_runtime_handler
def run_heter_worker(self, dataset):
"""
run_heter_worker will run heter trainer main program with executor.
Returns:
None
Examples:
.. code-block:: python
import paddle.distributed.fleet as fleet
fleet.init()
# build net
# fleet.distributed_optimizer(...)
dataset = ""
if fleet.is_heter_worker():
fleet.run_heter_worker(dataset)
"""
self._runtime_handle._run_heter_worker(dataset)

@is_non_distributed_check
@inited_runtime_handler
def run_server(self):
Expand Down
27 changes: 0 additions & 27 deletions python/paddle/distributed/fleet/runtime/the_one_ps.py
Original file line number Diff line number Diff line change
Expand Up @@ -880,33 +880,6 @@ def _run_server(self):
host, port = ep.split(":")
self._server.run_server(host, int(port))

def _init_heter_worker(self):
executor = self._get_executor()
startup_program = fluid.default_startup_program()
#real_startup_program = startup_program._heter_pipeline_opt[
# "startup_program"]
executor.run(startup_program)
self._init_worker()

def _run_heter_worker(self,
dataset=None,
scope=None,
thread=0,
debug=False,
fetch_list=None,
fetch_info=None,
print_period=100,
fetch_handler=None):
executor = self._get_executor()
# dataset is not needed for heter worker
executor.train_from_dataset(
program=fluid.default_main_program(),
dataset=None,
debug=debug,
fetch_list=fetch_list,
fetch_info=fetch_info,
print_period=print_period)

def _stop_worker(self):
self._communicator.stop()
if self.role_maker._is_heter_parameter_server_mode:
Expand Down

0 comments on commit 17a4b50

Please sign in to comment.