Skip to content

Commit

Permalink
Remove Gelu Fusion for TF Newapi (#1886)
Browse files Browse the repository at this point in the history
Signed-off-by: zehao-intel <[email protected]>
  • Loading branch information
zehao-intel authored Jul 2, 2024
1 parent 4372a76 commit 5592acc
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 415 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,14 @@
from neural_compressor.tensorflow.quantization.utils.graph_rewriter.graph_base import GraphRewriterBase
from neural_compressor.tensorflow.quantization.utils.graph_util import GraphAnalyzer
from neural_compressor.tensorflow.quantization.utils.graph_util import GraphRewriterHelper as Helper
from neural_compressor.tensorflow.utils import SPR_BASE_VERSIONS


class FuseGeluOptimizer(GraphRewriterBase): # pragma: no cover
"""Fuse Sqrt + RealDiv + Erf + AddV2 + Mul + Mul into Gelu op."""

def do_transformation(self):
"""Execute the fusion from small ops to Gelu."""
if not (tf.version.VERSION in ("1.15.0-up2", "1.15.0-up3") or tf.version.VERSION in SPR_BASE_VERSIONS):
if tf.version.VERSION not in ("1.15.0-up2", "1.15.0-up3"):
return self.model

cur_graph = GraphAnalyzer()
Expand Down
Loading

0 comments on commit 5592acc

Please sign in to comment.