Skip to content

Commit

Permalink
fix CI
Browse files Browse the repository at this point in the history
  • Loading branch information
pascalkuthe committed Jul 26, 2024
1 parent c52a259 commit 17bcaa8
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 24 deletions.
7 changes: 0 additions & 7 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,6 @@ jobs:
- name: Run cargo test
run: cargo test --workspace

- name: Install nightly toolchain
uses: dtolnay/rust-toolchain@nightly

- name: Run cargo miri test
run: cargo miri test --workspace


miri:
name: Miri
runs-on: ubuntu-latest
Expand Down
4 changes: 2 additions & 2 deletions src/intern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use hashbrown::raw::RawTable;
/// of using the token data directly.
/// This allows for much better performance by amortizing the cost hashing/equality.
///
/// While you can intern tokens yourself it is strongly recommended to use [`InternedInput`](crate::intern::InternedInput) module.
/// While you can intern tokens yourself it is strongly recommended to use [`InternedInput`] module.
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
#[repr(transparent)]
pub struct Token(pub u32);
Expand Down Expand Up @@ -44,7 +44,7 @@ pub trait TokenSource {
/// of using the token data directly.
/// This allows for much better performance by amortizing the cost hashing/equality.
///
/// While you can intern tokens yourself it is strongly recommended to use [`InternedInput`](crate::intern::InternedInput) module.
/// While you can intern tokens yourself it is strongly recommended to use [`InternedInput`] module.
#[derive(Default)]
pub struct InternedInput<T: Eq + Hash> {
pub before: Vec<Token>,
Expand Down
8 changes: 4 additions & 4 deletions src/sink.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ pub trait Sink: Sized {
/// to obtain the final diff result
fn finish(self) -> Self::Out;

/// Utility method that constructs a [`Counter`](crate::sink::Counter) that tracks the total number
/// Utility method that constructs a [`Counter`] that tracks the total number
/// of inserted and removed tokens in the changes passed to [`process_change`](crate::Sink::process_change).
fn with_counter(self) -> Counter<Self> {
Counter::new(self)
Expand All @@ -58,7 +58,7 @@ impl Sink for () {
fn finish(self) -> Self::Out {}
}

/// A [`Sink`](crate::Sink) which wraps a different sink
/// A [`Sink`] which wraps a different sink
/// and counts the number of `removed` and `inserted` [tokens](crate::intern::Token).
pub struct Counter<T> {
/// Total number of recorded inserted [`tokens`](crate::intern::Token).
Expand All @@ -67,10 +67,10 @@ pub struct Counter<T> {
/// Total number of recorded inserted [`tokens`](crate::intern::Token).
/// Computed by summing the lengths of the `after` subsequences pass to [`process_change`](crate::Sink::process_change).
pub insertions: u32,
/// The [`Sink`](crate::Sink) for which the counter records [`tokens`](crate::intern::Token).
/// The [`Sink`] for which the counter records [`tokens`](crate::intern::Token).
/// All calls to [`process_change`](crate::Sink::process_change) are forwarded to the `sink` by the counter.
/// After [`finish`](crate::Sink::finish) is called, this field contains the output returned by the [`finish`](crate::Sink::finish)
/// method of the wrapped [`Sink`](crate::Sink)
/// method of the wrapped [`Sink`].
pub wrapped: T,
}

Expand Down
16 changes: 8 additions & 8 deletions src/sources.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::str::from_utf8_unchecked;

use crate::TokenSource;

/// Returns a [`TokenSource`](crate::intern::TokenSource) that uses
/// Returns a [`TokenSource`] that uses
/// the lines in `data` as Tokens. The newline seperator (`\r\n` or `\n`) is
/// not included in the emitted tokens.
/// This means that changing the newline seperator from `\r\n` to `\n`
Expand All @@ -12,7 +12,7 @@ pub fn lines(data: &str) -> Lines<'_, false> {
Lines(ByteLines(data.as_bytes()))
}

/// Returns a [`TokenSource`](crate::intern::TokenSource) that uses
/// Returns a [`TokenSource`] that uses
/// the lines in `data` as Tokens. The newline seperator (`\r\n` or `\n`) is
/// included in the emitted tokens.
/// This means that changing the newline seperator from `\r\n` to `\n`
Expand All @@ -21,7 +21,7 @@ pub fn lines_with_terminator(data: &str) -> Lines<'_, true> {
Lines(ByteLines(data.as_bytes()))
}

/// Returns a [`TokenSource`](crate::intern::TokenSource) that uses
/// Returns a [`TokenSource`] that uses
/// the lines in `data` as Tokens. A lines is a continous subslice of
/// `data` which does not contain `\n` (or `\r\n`).
/// The newline seperator (`\r\n` or `\n`) is not included in the emitted tokens.
Expand All @@ -31,7 +31,7 @@ pub fn byte_lines_with_terminator(data: &[u8]) -> ByteLines<'_, true> {
ByteLines(data)
}

/// Returns a [`TokenSource`](crate::intern::TokenSource) that uses
/// Returns a [`TokenSource`] that uses
/// the lines in `data` as Tokens. The newline seperator (`\r\n` or `\n`) is
/// included in the emitted tokens.
/// This means that changing the newline seperator from `\r\n` to `\n`
Expand Down Expand Up @@ -69,8 +69,8 @@ impl<'a> TokenSource for &'a [u8] {
}
}

/// A [`TokenSource`](crate::intern::TokenSource) that returns the lines of a `str` as tokens.
/// See [`lines`](crate::sources::lines) and [`lines_with_terminator`](crate::sources::lines_with_terminator) for details
/// A [`TokenSource`] that returns the lines of a `str` as tokens.
/// See [`lines`] and [`lines_with_terminator`] for details
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct Lines<'a, const INCLUDE_LINE_TERMINATOR: bool>(ByteLines<'a, INCLUDE_LINE_TERMINATOR>);

Expand Down Expand Up @@ -99,8 +99,8 @@ impl<'a, const INCLUDE_LINE_TERMINATOR: bool> TokenSource for Lines<'a, INCLUDE_
}
}

/// A [`TokenSource`](crate::intern::TokenSource) that returns the lines of a byte slice as tokens.
/// See [`byte_lines`](crate::sources::lines) and [`byte_lines_with_terminator`](crate::sources::byte_lines_with_terminator) for details
/// A [`TokenSource`] that returns the lines of a byte slice as tokens.
/// See [`byte_lines`] and [`byte_lines_with_terminator`] for details
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct ByteLines<'a, const INCLUDE_LINE_TERMINATOR: bool>(&'a [u8]);

Expand Down
6 changes: 3 additions & 3 deletions src/unified_diff.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use std::ops::Range;
use crate::intern::{InternedInput, Interner, Token};
use crate::Sink;

/// A [`Sink`](crate::sink::Sink) that creates a textual diff
/// A [`Sink`] that creates a textual diff
/// in the format typically output by git or gnu-diff if the `-u` option is used
pub struct UnifiedDiffBuilder<'a, W, T>
where
Expand All @@ -31,7 +31,7 @@ where
T: Hash + Eq + Display,
{
/// Create a new `UnifiedDiffBuilder` for the given `input`,
/// that will return a [`String`](std::string::String).
/// that will return a [`String`].
pub fn new(input: &'a InternedInput<T>) -> Self {
Self {
before_hunk_start: 0,
Expand All @@ -54,7 +54,7 @@ where
T: Hash + Eq + Display,
{
/// Create a new `UnifiedDiffBuilder` for the given `input`,
/// that will writes it output to the provided implementation of [`Write`](std::fmt::Write).
/// that will writes it output to the provided implementation of [`Write`].
pub fn with_writer(input: &'a InternedInput<T>, writer: W) -> Self {
Self {
before_hunk_start: 0,
Expand Down

0 comments on commit 17bcaa8

Please sign in to comment.