From 4675607aa82297990923e51128966643ccf019c5 Mon Sep 17 00:00:00 2001 From: Vladimir Loncar Date: Mon, 29 Jan 2024 19:04:30 +0100 Subject: [PATCH 1/3] Fix docstring in ObjectiveEstimator --- hls4ml/optimization/objectives/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/hls4ml/optimization/objectives/__init__.py b/hls4ml/optimization/objectives/__init__.py index 8ab4535060..fcbef305b6 100644 --- a/hls4ml/optimization/objectives/__init__.py +++ b/hls4ml/optimization/objectives/__init__.py @@ -31,8 +31,10 @@ def is_layer_optimizable(self, layer_attributes): layer_attributes (hls4ml.optimization.attributes.LayerAttributes): Layer attributes Returns: - optimizable (boolean): can optimizations be applied to this layer - optimization_attributes (hls4ml.optimization.attributes.OptimizationAttributes): + tuple containing + + - optimizable (boolean): can optimizations be applied to this layer + - optimization_attributes (hls4ml.optimization.attributes.OptimizationAttributes): Most suitable approach for optimization Examples: From 8503c867c4301a5a29981a3b45689c7046bd3edd Mon Sep 17 00:00:00 2001 From: Vladimir Loncar Date: Mon, 29 Jan 2024 19:04:54 +0100 Subject: [PATCH 2/3] Add optimization API paper to reference.rst --- docs/reference.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/reference.rst b/docs/reference.rst index c9f3493437..1b25d12d65 100644 --- a/docs/reference.rst +++ b/docs/reference.rst @@ -86,6 +86,19 @@ binary/ternary networks: year = "2021" } +optimization API: + +.. code-block:: bibtex + + @article{Ramhorst:2023fpga, + author = "Benjamin Ramhorst and others", + title = "{FPGA Resource-aware Structured Pruning for Real-Time Neural Networks}", + eprint = "2308.05170", + archivePrefix = "arXiv", + primaryClass = "cs.AR", + year = "2023" + } + Acknowledgments =============== If you benefited from participating in our community, we ask that you please acknowledge the Fast Machine Learning collaboration, and particular individuals who helped you, in any publications. From 6eb391f3cd8d20f1face2190017c0d47097e9578 Mon Sep 17 00:00:00 2001 From: Vladimir Loncar Date: Mon, 29 Jan 2024 19:18:00 +0100 Subject: [PATCH 3/3] Rename optimize_keras_for_hls4ml --- docs/advanced/model_optimization.rst | 8 ++++---- hls4ml/optimization/__init__.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/advanced/model_optimization.rst b/docs/advanced/model_optimization.rst index 14813f7217..a75224b8cc 100644 --- a/docs/advanced/model_optimization.rst +++ b/docs/advanced/model_optimization.rst @@ -100,9 +100,9 @@ Finally, optimizing Vivado DSPs is possible, given a hls4ml config: from hls4ml.utils.config import config_from_keras_model from hls4ml.optimization.objectives.vivado_objectives import VivadoDSPEstimator - # Note the change from optimize_model to optimize_keras_for_hls4ml - # The function optimize_keras_for_hls4ml acts as a wrapper for the function, parsing hls4ml config to model attributes - from hls4ml.optimization import optimize_keras_for_hls4ml + # Note the change from optimize_model to optimize_keras_model_for_hls4ml + # The function optimize_keras_model_for_hls4ml acts as a wrapper for the function, parsing hls4ml config to model attributes + from hls4ml.optimization import optimize_keras_model_for_hls4ml # Create hls4ml config default_reuse_factor = 4 @@ -113,7 +113,7 @@ Finally, optimizing Vivado DSPs is possible, given a hls4ml config: # Optimize model # Note the change from ParameterEstimator to VivadoDSPEstimator - optimized_model = optimize_keras_for_hls4ml( + optimized_model = optimize_keras_model_for_hls4ml( baseline_model, model_attributes, VivadoDSPEstimator, scheduler, X_train, y_train, X_val, y_val, batch_size, epochs, optimizer, loss_fn, metric, increasing, rtol diff --git a/hls4ml/optimization/__init__.py b/hls4ml/optimization/__init__.py index da913f7dcb..ab51ce1eb3 100644 --- a/hls4ml/optimization/__init__.py +++ b/hls4ml/optimization/__init__.py @@ -6,7 +6,7 @@ default_regularization_range = np.logspace(-6, -2, num=16).tolist() -def optimize_keras_for_hls4ml( +def optimize_keras_model_for_hls4ml( keras_model, hls_config, objective,