diff --git a/qiskit/algorithms/minimum_eigen_solvers/vqe.py b/qiskit/algorithms/minimum_eigen_solvers/vqe.py index 29d10b7be31e..87ab8707b0f1 100755 --- a/qiskit/algorithms/minimum_eigen_solvers/vqe.py +++ b/qiskit/algorithms/minimum_eigen_solvers/vqe.py @@ -92,7 +92,7 @@ class VQE(VariationalAlgorithm, MinimumEigensolver): The callable _must_ have the argument names ``fun, x0, jac, bounds`` as indicated in the following code block. - .. code-block::python + .. code-block:: python from qiskit.algorithms.optimizers import OptimizerResult @@ -111,7 +111,7 @@ def my_minimizer(fun, x0, jac=None, bounds=None) -> OptimizerResult: The above signature also allows to directly pass any SciPy minimizer, for instance as - .. code-block::python + .. code-block:: python from functools import partial from scipy.optimize import minimize diff --git a/qiskit/algorithms/optimizers/gradient_descent.py b/qiskit/algorithms/optimizers/gradient_descent.py index 5ca2e0f8cad8..38ed55048623 100644 --- a/qiskit/algorithms/optimizers/gradient_descent.py +++ b/qiskit/algorithms/optimizers/gradient_descent.py @@ -50,7 +50,7 @@ class GradientDescent(Optimizer): A minimum example that will use finite difference gradients with a default perturbation of 0.01 and a default learning rate of 0.01. - .. code-block::python + .. code-block:: python from qiskit.algorithms.optimizers import GradientDescent @@ -70,7 +70,7 @@ def f(x): Note how much faster this convergences (i.e. less ``nfevs``) compared to the previous example. - .. code-block::python + .. code-block:: python from qiskit.algorithms.optimizers import GradientDescent