Skip to content

Commit

Permalink
Merge pull request PaddlePaddle#14 from sandyhouse/revert-12-lilong/moe
Browse files Browse the repository at this point in the history
Revert "upload assign pos op"
  • Loading branch information
lilong12 authored Sep 26, 2021
2 parents 0c459db + b3c1bd1 commit d81df39
Show file tree
Hide file tree
Showing 6 changed files with 0 additions and 362 deletions.
78 changes: 0 additions & 78 deletions paddle/fluid/operators/collective/assign_pos_op.cc

This file was deleted.

90 changes: 0 additions & 90 deletions paddle/fluid/operators/collective/assign_pos_op.cu

This file was deleted.

35 changes: 0 additions & 35 deletions paddle/fluid/operators/collective/assign_pos_op.h

This file was deleted.

59 changes: 0 additions & 59 deletions python/paddle/distributed/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,68 +50,9 @@
'pull_worker_log',
'global_scatter',
'global_gather',
'assign_pos',
]


def assign_pos(x,
cum_count):
"""
Assign pos decides which tokens should be fetched belong to
specially expert orderingly.
Args:
x (Tensor): Tensor. Every element in the list must be a Tensor whose data type
should be float16, float32, float64, int32 or int64.
cum_count (Tensor): The cumulative sum tokens of experts. Every element in the list must be a Tensor whose
data type should be int64.
Returns:
out (Tensor): Assemble tokens in the order of experts.
Examples:
.. code-block:: python
# required: distributed
import paddle
local_expert_count = [2, 0, 2, 0]
gate_idx = [
[0, 2],
[0, 2]
]
local_expert_count = paddle.to_tensor(local_expert_count)
gate_idx = paddle.to_tensor(gate_idx, dtype="int32")
lec_cum = paddle.cumsum(local_expert_count)
pos = paddle.distributed.utils.assign_pos(x=gate_idx, cum_count=lec_cum)
print(pos) # the result: (2, 0, 3, 1)
"""
if in_dygraph_mode():
return core.ops.assign_pos(x, cum_count, cum_count[-1])
else:
op_type = 'assign_pos'
# check_variable_and_dtype(
# x, 'x', ['float16', 'float32', 'float64', 'int32', 'int64'],
# 'global_scatter')
# check_variable_and_dtype(local_count, 'local_count', ['int64'],
# 'global_scatter')
# check_variable_and_dtype(global_count, 'global_count', ['int64'],
# 'global_scatter')

helper = LayerHelper(op_type, **locals())
out = helper.create_variable_for_type_inference(dtype=cum_count.dtype)

helper.append_op(
type=op_type,
inputs={
'X': [x],
'cum_count': [cum_count],
"eff_gates_len": [cum_count[-1]]
},
outputs={'Out': [out]})
return out



def global_scatter(x,
local_count,
global_count,
Expand Down
100 changes: 0 additions & 100 deletions python/paddle/fluid/tests/unittests/test_assign_pos_op.py

This file was deleted.

Binary file not shown.

0 comments on commit d81df39

Please sign in to comment.