diff --git a/doc/examples/derivative.ipynb b/doc/examples/derivative.ipynb index 0fbd8e7..752ffd2 100644 --- a/doc/examples/derivative.ipynb +++ b/doc/examples/derivative.ipynb @@ -5,13 +5,13 @@ "id": "1766a5fc", "metadata": {}, "source": [ - "# Demo\n", + "# Example: Gradient approximation\n", "\n", - "We will approximate the derivative of the Rosenbrock function at `(1,0,0)`, with the forward and backward difference methods, and with two different step sizes.\n", + "We will approximate the derivative of the Rosenbrock function at `(1,0,0)`, with the [forward and backward difference methods](https://en.wikipedia.org/wiki/Finite_difference#Basic_types), and with two different step sizes.\n", "\n", "We will also compute an approximation of the central difference, as the average of the forward and backward results.\n", "\n", - "Success will be determined by whether results between the different methods (forward, backward, central) are consistent (i.e. equal, within some tolerance).\n", + "Success will be determined by whether results between the different methods (forward, backward, central) are consistent (i.e., equal, within some tolerance).\n", "\n", "Function inputs and outputs are NumPy arrays of arbitrary positive dimension." ] @@ -39,18 +39,21 @@ "from fiddy.analysis import ApproximateCentral\n", "from fiddy.success import Consistency\n", "\n", + "# Point at which to compute the derivative\n", "point = np.array([1, 0, 0])\n", + "# Step sizes for finite differences\n", "sizes = [1e-10, 1e-5]\n", + "\n", "derivative = get_derivative(\n", " function=rosen,\n", " point=point,\n", - " sizes=[1e-10, 1e-5],\n", + " sizes=sizes,\n", " method_ids=[MethodId.FORWARD, MethodId.BACKWARD],\n", " direction_ids=[\"x\", \"y\", \"z\"],\n", " analysis_classes=[ApproximateCentral],\n", " success_checker=Consistency(rtol=1e-2, atol=1e-15),\n", ")\n", - "print(derivative.value)" + "print(\"Computed derivative:\", derivative.value)" ] }, { @@ -60,7 +63,7 @@ "source": [ "The full (`derivative.df_full`) or the concise (`derivative.df`) dataframe can be used for debugging gradients.\n", "\n", - "The IDs correspond to the directions in which finite differences were computed. These directions can be any vector in the function's parameter space. In this case directions were not specified, so the default directions were used, which is the standard basis." + "The IDs correspond to the directions in which finite differences were computed. These directions can be any vector in the function's parameter space. In this case, directions were not specified, so the default directions were used, which is the standard basis." ] }, { @@ -175,7 +178,7 @@ "id": "14799145", "metadata": {}, "source": [ - "The `*_results` columns can be printed separatel to view the specific derivative values that were computed.\n", + "The `*_results` columns can be printed separately to view the specific derivative values that were computed.\n", "\n", "These values differ from the values reported in `derivative.values`. This is because the `success_checker` (`Consistency`) provides the derivative values as the average of all consistent derivative values. Consistency is checked on the level of `size`, so if any of the values for `1e-05` were inconsistent to the rest, they would not contribute to the average reported by the `Consistency` success checker." ] @@ -346,7 +349,7 @@ ], "source": [ "expected_derivative = rosen_der(point)\n", - "print(expected_derivative)" + "print(f\"{expected_derivative=}\")" ] }, {