diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index c010fb04..fc9258d4 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -14,6 +14,11 @@ jobs: main: runs-on: ubuntu-latest steps: + - uses: codespell-project/actions-codespell@v2 + with: + skip: deps + ignore_words_list: whn,ehr + - uses: actions/checkout@v3 - name: Install Erlang & Elixir uses: erlef/setup-beam@v1 diff --git a/guides/guides.md b/guides/guides.md index b7f92c3b..4f3a32ea 100644 --- a/guides/guides.md +++ b/guides/guides.md @@ -20,7 +20,7 @@ Axon is a library for creating and training neural networks in Elixir. The Axon * [Your first training loop](training_and_evaluation/your_first_training_loop.livemd) * [Instrumenting loops with metrics](training_and_evaluation/instrumenting_loops_with_metrics.livemd) -* [Your first evalutaion loop](training_and_evaluation/your_first_evaluation_loop.livemd) +* [Your first evaluation loop](training_and_evaluation/your_first_evaluation_loop.livemd) * [Using loop event handlers](training_and_evaluation/using_loop_event_handlers.livemd) * [Custom models, loss functions, and optimizers](training_and_evaluation/custom_models_loss_optimizers.livemd) * [Writing custom metrics](training_and_evaluation/writing_custom_metrics.livemd) diff --git a/lib/axon/loop.ex b/lib/axon/loop.ex index c95ab651..f7de74ae 100644 --- a/lib/axon/loop.ex +++ b/lib/axon/loop.ex @@ -935,7 +935,7 @@ defmodule Axon.Loop do only: N # Trigger on `N` event **Warning: If you modify the step state in an event handler, it will trigger - potentially excessive recompilation and result in significant additinal overhead + potentially excessive recompilation and result in significant additional overhead during loop execution.** """ def handle_event(%Loop{handlers: handle_fns} = loop, event, handler, filter \\ :always) do diff --git a/lib/axon/losses.ex b/lib/axon/losses.ex index 2c433a43..b5068fa5 100644 --- a/lib/axon/losses.ex +++ b/lib/axon/losses.ex @@ -221,7 +221,7 @@ defmodule Axon.Losses do $$l_i = -\sum_i^C \hat{y_i} \cdot \log(y_i)$$ - Categorical cross-entropy is typically used for multi-class classifcation problems. + Categorical cross-entropy is typically used for multi-class classification problems. By default, it expects `y_pred` to encode a probability distribution along the last axis. You can specify `from_logits: true` to indicate `y_pred` is a logits tensor. @@ -962,7 +962,7 @@ defmodule Axon.Losses do @doc """ Huber loss. - ## Argumet Shapes + ## Argument Shapes * `y_true` - $(d_0, d_1, ..., d_n)$ * `y_pred` - $(d_0, d_1, ..., d_n)$ diff --git a/mix.exs b/mix.exs index a3f074f4..4154f74d 100644 --- a/mix.exs +++ b/mix.exs @@ -116,7 +116,7 @@ defmodule Axon.MixProject do groups_for_extras: [ "Guides: Model Creation": Path.wildcard("guides/model_creation/*.livemd"), "Guides: Model Execution": Path.wildcard("guides/model_execution/*.livemd"), - "Guides: Training and Evalutaion": + "Guides: Training and Evaluation": Path.wildcard("guides/training_and_evaluation/*.livemd"), "Guides: Serialization": Path.wildcard("guides/serialization/*.livemd"), "Examples: Basics": Path.wildcard("notebooks/basics/*.livemd"), @@ -156,7 +156,7 @@ defmodule Axon.MixProject do Axon.MixedPrecision, Axon.None, Axon.StatefulOutput, - Axon.Initalizers + Axon.Initializers ], Summary: [ Axon.Display diff --git a/test/axon/compiler_test.exs b/test/axon/compiler_test.exs index 8a839a6b..dbd021e6 100644 --- a/test/axon/compiler_test.exs +++ b/test/axon/compiler_test.exs @@ -4901,7 +4901,7 @@ defmodule CompilerTest do assert Nx.type(b) == {:f, 32} end - test "initializes correclty with single namespace no params" do + test "initializes correctly with single namespace no params" do model = Axon.input("input_0", shape: {nil, 1}) |> Axon.namespace("model") {init_fn, _} = Axon.build(model) diff --git a/test/axon/loop_test.exs b/test/axon/loop_test.exs index 84358f8c..05e8f0bb 100644 --- a/test/axon/loop_test.exs +++ b/test/axon/loop_test.exs @@ -203,7 +203,7 @@ defmodule Axon.LoopTest do end) =~ "Batch" end - test "eval_step/1 evalutes model on a single batch" do + test "eval_step/1 evaluates model on a single batch" do inp = Nx.tensor([0, 1, 0, 1, 0, 1]) |> Nx.new_axis(-1) tar = Nx.tensor([1, 0, 1, 0, 1, 0]) |> Nx.new_axis(-1)