Skip to content

Commit

Permalink
[functorch] Remove cross_entropy monkey patch (pytorch/functorch#901)
Browse files Browse the repository at this point in the history
We don't need it anymore because we upstreamed the changes into
pytorch/pytorch a while ago.
  • Loading branch information
zou3519 committed Jul 20, 2022
1 parent 2db868e commit aba69bb
Showing 1 changed file with 0 additions and 20 deletions.
20 changes: 0 additions & 20 deletions functorch/functorch/_src/monkey_patching.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,26 +62,6 @@ def _functorch_str(tensor, *, tensor_contents=None):
torch._tensor_str._str = _functorch_str


_old_cross_entropy = torch.nn.functional.cross_entropy


# **kwargs to handle the new label_smoothing arg
def cross_entropy(input, target, weight=None, size_average=None,
ignore_index=-100, reduce=None, reduction='mean', **kwargs):
if input.dim() == 1 and target.dim() == 0:
input = input.unsqueeze(0)
target = target.unsqueeze(0)

result = _old_cross_entropy(
input, target, weight, size_average,
ignore_index, reduce, reduction, **kwargs)
if reduction == 'none':
return result.squeeze(0)
return result


torch.nn.functional.cross_entropy = cross_entropy

# Monkeypatch .backward() to error out if any transforms are active.
# TODO: remove the monkeypatching and add an extension point into PyTorch core
_old_backward = torch.Tensor.backward
Expand Down

0 comments on commit aba69bb

Please sign in to comment.