From 8a5c3fbcf7274d39ecf4df61a1a07d2d03622656 Mon Sep 17 00:00:00 2001 From: Supadchaya Puangpontip Date: Thu, 8 Aug 2024 19:25:22 -0700 Subject: [PATCH] Add jit.ignore to prototype optimizers (#2958) Summary: X-link: https://github.com/facebookresearch/FBGEMM/pull/58 Pull Request resolved: https://github.com/pytorch/FBGEMM/pull/2958 `torch.compile` doesn't seem to cause errors if we deprecate an optimizer that is no longer used, but `torch.jit.script` will. `torch.jit.script` seems to check and ensure all decision branches are alive. To make prototype optimizers easily deprecated once included in production, we wrap the invoker function with `torch.jit.ignore`. This means that we need to always keep auto-generating the `lookup_{}.py` even the optimizers are deprecated and their backends are removed. [simplified Bento example](https://fburl.com/anp/rbktkl08) Reviewed By: q10 Differential Revision: D60943180 --- .../python/split_embedding_codegen_lookup_invoker.template | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/fbgemm_gpu/codegen/training/python/split_embedding_codegen_lookup_invoker.template b/fbgemm_gpu/codegen/training/python/split_embedding_codegen_lookup_invoker.template index d6920777ea..c454f87271 100644 --- a/fbgemm_gpu/codegen/training/python/split_embedding_codegen_lookup_invoker.template +++ b/fbgemm_gpu/codegen/training/python/split_embedding_codegen_lookup_invoker.template @@ -38,7 +38,12 @@ torch.ops.load_library("//deeplearning/fbgemm/fbgemm_gpu:embedding_inplace_updat {%- endif %} - +{%- if is_prototype_optimizer %} +# Decorate the prototype optimizers which may be deprecated in the future with jit.ignore to avoid +# possible errors from torch.jit.script. +# Note that backends can be removed but the lookup invoker is still needed for backward compatibility +@torch.jit.ignore +{%- endif %} def invoke( common_args: CommonArgs, optimizer_args: OptimizerArgs,