Skip to content

Commit

Permalink
(uncommitted/untracked changes)
Browse files Browse the repository at this point in the history
Differential Revision: D60977713
  • Loading branch information
Benson Ma authored and facebook-github-bot committed Aug 8, 2024
1 parent 43ed8f4 commit ea6dbd9
Show file tree
Hide file tree
Showing 5 changed files with 64 additions and 20 deletions.
44 changes: 28 additions & 16 deletions fbgemm_gpu/codegen/training/python/__init__.template
Original file line number Diff line number Diff line change
Expand Up @@ -6,22 +6,34 @@
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

# All optimizers
import fbgemm_gpu.split_embedding_codegen_lookup_invokers.lookup_args as lookup_args # noqa: F401
{%- for optim in all_optimizers %}
import fbgemm_gpu.split_embedding_codegen_lookup_invokers.lookup_{{ optim }} as lookup_{{optim}} # noqa: F401
{%- endfor %}
import warnings

# SSD optimizers (putting them under try-except for BC as they are
# experimental ops which can be removed/updated in the future)
{%- macro force_import(name) %}
import fbgemm_gpu.split_embedding_codegen_lookup_invokers.{{ name }} as {{ name }} # noqa: F401
{%- endmacro %}

{%- macro try_import(name) %}
try:
import fbgemm_gpu.split_embedding_codegen_lookup_invokers.lookup_args_ssd as lookup_args_ssd
{%- for optim in ssd_optimizers %}
import fbgemm_gpu.split_embedding_codegen_lookup_invokers.lookup_{{ optim }}_ssd as lookup_{{ optim }}_ssd
{%- endfor %}
# Import is placed under a try-except bc the op is experimental and can be
# removed/updated in the future
import fbgemm_gpu.split_embedding_codegen_lookup_invokers.{{ name }} as {{ name }} # noqa: F401
except:
import logging
logging.warn("fbgemm_gpu.split_embedding_codegen_lookup_invokers.lookup_args_ssd import failed")
{%- for optim in ssd_optims %}
logging.warn("fbgemm_gpu.split_embedding_codegen_lookup_invokers.lookup_{{ optim }}_ssd import failed")
{%- endfor %}
warnings.warn(
f"""\033[93m
Failed to import: fbgemm_gpu.split_embedding_codegen_lookup_invokers.{{ name }}
\033[0m""",
DeprecationWarning,
)
{%- endmacro %}

# TBE optimizers
{{- force_import("lookup_args") }}
{%- for optim in all_optimizers %}
{{ try_import("lookup_" + optim) }}
{%- endfor %}

# SSD TBE optimizers
{{- try_import("lookup_args_ssd") }}
{%- for optim in ssd_optimizers %}
{{ try_import("lookup_" + optim + "_ssd") }}
{%- endfor %}
2 changes: 1 addition & 1 deletion fbgemm_gpu/fbgemm_gpu/config/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@

# pyre-strict

from .feature_list import FeatureGateName # noqa F401
from .feature_list import FeatureGate, FeatureGateName # noqa F401
27 changes: 26 additions & 1 deletion fbgemm_gpu/fbgemm_gpu/config/feature_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,32 @@ def foo():
"""

# Enable TBE V2 APIs
TBE_V2 = auto()

# Enable Ensemble Rowwise Adagrad (D60189486 stack)
TBE_ENSEMBLE_ROWWISE_ADAGRAD = auto()

def is_enabled(self) -> bool:
return torch.ops.fbgemm.check_feature_gate_key(self.name)
return FeatureGate.is_enabled(self)


class FeatureGate:
"""
FBGEMM_GPU feature gate.
This class exists because methods defined on enums cannot be invoked when
the enum is packaged into a model (the mechanism is unclear).
**Code Example:**
.. code-block:: python
from deeplearning.fbgemm.fbgemm_gpu.config import FeatureGate, FeatureGateName
FeatureGate.is_enabled(FeatureGateName.TBE_V2)
"""

@classmethod
def is_enabled(cls, feature: FeatureGateName) -> bool:
return torch.ops.fbgemm.check_feature_gate_key(feature.name)
4 changes: 3 additions & 1 deletion fbgemm_gpu/include/fbgemm_gpu/config/feature_gates.h
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,9 @@ namespace fbgemm_gpu::config {
/// UI.
///
/// For OSS: The environment variable will be evaluated as f"FBGEMM_{ENUM}"
#define ENUMERATE_ALL_FEATURE_FLAGS X(TBE_V2)
#define ENUMERATE_ALL_FEATURE_FLAGS \
X(TBE_V2) \
X(TBE_ENSEMBLE_ROWWISE_ADAGRAD)
// X(EXAMPLE_FEATURE_FLAG)

/// @ingroup fbgemm-gpu-config
Expand Down
7 changes: 6 additions & 1 deletion fbgemm_gpu/test/config/feature_gate_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

# pyre-fixme[21]
import fbgemm_gpu
from fbgemm_gpu.config import FeatureGateName
from fbgemm_gpu.config import FeatureGate, FeatureGateName

# pyre-fixme[16]: Module `fbgemm_gpu` has no attribute `open_source`.
open_source: bool = getattr(fbgemm_gpu, "open_source", False)
Expand All @@ -38,6 +38,11 @@ def test_feature_gates(self) -> None:
with self.assertNotRaised(Exception):
print(f"\n[OSS] Feature {feature.name} enabled: {feature.is_enabled()}")

with self.assertNotRaised(Exception):
print(
f"\n[OSS] Feature {feature.name} enabled: {FeatureGate.is_enabled(feature)}"
)

@unittest.skipIf(open_source, "Not supported in open source")
def test_feature_gates_fb(self) -> None:
# pyre-fixme[16]
Expand Down

0 comments on commit ea6dbd9

Please sign in to comment.