Skip to content

Commit

Permalink
Clippy, cleanup, prepare for release
Browse files Browse the repository at this point in the history
  • Loading branch information
ecton committed Nov 16, 2023
1 parent 71b5685 commit 5d5f425
Show file tree
Hide file tree
Showing 7 changed files with 128 additions and 21 deletions.
3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
[package]
name = "rsn"
description = "A Rust-inspired, human-readable object notation."
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
repository = "https://github.com/khonsulabs/rsn"
readme = "./README.md"
rust-version = "1.65"
categories = ["no-std", "parser-implementations", "encoding"]
keywords = ["serde", "parser", "serialization"]

[features]
default = ["serde", "std"]
Expand Down
34 changes: 30 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
# Rsn - Rusty Notation

**This crate is very early in development and is not ready for consumption.**
**This crate is very early in development. Please report any issues [on our
GitHub](https://github.com/khonsulabs/rsn).**

![rsn forbids unsafe code](https://img.shields.io/badge/unsafe-forbid-success)
![rsn is considered alpha](https://img.shields.io/badge/status-alpha-orange)
[![crate version](https://img.shields.io/crates/v/rsn.svg)](https://crates.io/crates/rsn)
[![Live Build Status](https://img.shields.io/github/actions/workflow/status/khonsulabs/rsn/rust.yml?branch=main)](https://github.com/khonsulabs/rsn/actions?query=workflow:Tests)
[![HTML Coverage Report for `main`](https://khonsulabs.github.io/rsn/coverage/badge.svg)]($pages-base$/coverage/)
[![HTML Coverage Report for `main`](https://khonsulabs.github.io/rsn/coverage/badge.svg)](https://khonsulabs.github.io/rsn/coverage/)
[![Documentation for `main`](https://img.shields.io/badge/docs-main-informational)](https://khonsulabs.github.io/rsn/main/rsn/)

A UTF-8 based text format that looks very similar to valid Rust code. This format adheres closely to [Rust's lexical rules][rust-lexer]
Expand All @@ -17,15 +18,40 @@ This crate supports `no_std` targets that support the `alloc` crate.

## Data Types

```rsn
ExampleStruct {
integers: [42, 0xFF, 0o77, 0b101],
floats: [42., 3.14, 1e10],
bools: [true, false],
chars: ['a', '\''],
string: "Hello, World!",
raw_string: r#"I said, "Hello, World!""#,
bytes: [b'a', b'\''],
byte_string: b"Hello, World!",
raw_byte_string: br#"I said, "Hello, World!""#,
named_map: StructLike {
field: 42,
},
named_tuple: TupleLike(42),
r#raw_identifiers: true,
array: [1, 2, 3],
tuple: (1, 2, 3),
map: {
"a": 1,
"b": 2,
},
}
```

- Integers (`42`, `0xFF`, `0o77`, `0b101`)
- Floats (`42.`, `3.14`, `)
- Floats (`42.`, `3.14`)
- Bool (`true`, `false`)
- Character (`'a'`, `'\''`)
- Byte (`b'a'`, `b'\''`)
- String (`"hello, world"`)
- Raw Strings (`r#"They said, "Hello World!""#`)
- Byte Strings (`b"hello, world"`)
- Struct
- Named
- Ident or Raw Ident (`r#foo`)
- Map or Tuple
- Map
Expand Down
41 changes: 41 additions & 0 deletions examples/alltypes.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
use std::collections::HashMap;

use serde::{Deserialize, Serialize};
use serde_bytes::ByteBuf;

#[derive(Serialize, Deserialize, Debug)]
struct ExampleStruct {
integers: Vec<usize>,
floats: Vec<f64>,
bools: Vec<bool>,
chars: Vec<char>,
string: String,
raw_string: String,
bytes: Vec<u8>,
byte_string: ByteBuf,
raw_byte_string: ByteBuf,
named_map: NamedExample,
named_tuple: NamedExample,
r#raw_identifiers: bool,
array: Vec<usize>,
tuple: Vec<usize>,
map: HashMap<String, usize>,
}

#[derive(Serialize, Deserialize, Debug)]
enum NamedExample {
StructLike { field: usize },
TupleLike(usize),
}

fn main() {
let example: ExampleStruct =
rsn::from_str(include_str!("./alltypes.rsn")).expect("error deserializing alltypes.rsn");

println!("Loaded blog posts: {example:?}");
}

#[test]
fn runs() {
main();
}
22 changes: 22 additions & 0 deletions examples/alltypes.rsn
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
ExampleStruct {
integers: [42, 0xFF, 0o77, 0b101],
floats: [42., 3.14, 1e10],
bools: [true, false],
chars: ['a', '\''],
string: "Hello, World!",
raw_string: r#"I said, "Hello, World!""#,
bytes: [b'a', b'\''],
byte_string: b"Hello, World!",
raw_byte_string: br#"I said, "Hello, World!""#,
named_map: StructLike {
field: 42,
},
named_tuple: TupleLike(42),
r#raw_identifiers: true,
array: [1, 2, 3],
tuple: (1, 2, 3),
map: {
"a": 1,
"b": 2,
},
}
4 changes: 2 additions & 2 deletions src/de.rs
Original file line number Diff line number Diff line change
Expand Up @@ -424,11 +424,11 @@ impl<'de> serde::de::Deserializer<'de> for &mut Deserializer<'de> {
Some(Ok(Event {
kind:
EventKind::BeginNested {
name,
name: Some(Name { name: "Some", .. }),
kind: Nested::Tuple,
},
..
})) if matches!(name, Some(Name { name: "Some", .. })) => {
})) => {
de.parser.next();
let result = visitor.visit_some(&mut *de)?;
match de.parser.next().transpose()? {
Expand Down
38 changes: 28 additions & 10 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,13 @@ impl<'s> Parser<'s> {
..
})
) {
let Some(Ok(Token { kind: TokenKind::Open(balanced), location: open_location })) = self.next_token() else { unreachable!("matched above") };
let Some(Ok(Token {
kind: TokenKind::Open(balanced),
location: open_location,
})) = self.next_token()
else {
unreachable!("matched above")
};

let kind = match balanced {
Balanced::Paren => {
Expand Down Expand Up @@ -251,7 +257,9 @@ impl<'s> Parser<'s> {
}

fn map_state_mut(&mut self) -> &mut MapState {
let Some((_,NestedState::Map(map_state))) = self.nested.last_mut() else { unreachable!("not a map state") };
let Some((_, NestedState::Map(map_state))) = self.nested.last_mut() else {
unreachable!("not a map state")
};
map_state
}

Expand Down Expand Up @@ -296,7 +304,7 @@ impl<'s> Parser<'s> {
)),
},
MapState::ExpectingComma => match self.next_token_parts()? {
(location, Some(TokenKind::Close(closed))) if closed == Balanced::Brace => {
(location, Some(TokenKind::Close(Balanced::Brace))) => {
self.nested.pop();
Ok(Event::new(location, EventKind::EndNested))
}
Expand Down Expand Up @@ -359,7 +367,7 @@ impl<'s> Parser<'s> {
)),
},
MapState::ExpectingComma => match self.next_token_parts()? {
(location, Some(TokenKind::Close(closed))) if closed == Balanced::Brace => {
(location, Some(TokenKind::Close(Balanced::Brace))) => {
self.root_state = State::Finished;
Ok(Event::new(location, EventKind::EndNested))
}
Expand Down Expand Up @@ -396,8 +404,9 @@ impl<'s> Parser<'s> {
};
match &token.kind {
TokenKind::Identifier(_) if self.config.allow_implicit_map => {
let TokenKind::Identifier(identifier) = token.kind
else { unreachable!("just matched")};
let TokenKind::Identifier(identifier) = token.kind else {
unreachable!("just matched")
};
match self.peek() {
Some(colon) if matches!(colon.kind, TokenKind::Colon) => {
// Switch to parsing an implicit map
Expand All @@ -417,8 +426,13 @@ impl<'s> Parser<'s> {
TokenKind::Open(Balanced::Brace | Balanced::Paren,)
) =>
{
let Some(Ok(Token{ kind: TokenKind::Open(kind), location: open_location})) = self.next_token()
else { unreachable!("just peeked") };
let Some(Ok(Token {
kind: TokenKind::Open(kind),
location: open_location,
})) = self.next_token()
else {
unreachable!("just peeked")
};
self.root_state = State::Finished;
Ok(Event::new(
token.location,
Expand Down Expand Up @@ -456,8 +470,12 @@ impl<'s> Parser<'s> {
}
}
State::StartingImplicitMap(_) => {
let State::StartingImplicitMap((location, identifier)) = mem::replace(&mut self.root_state, State::ImplicitMap(MapState::ExpectingColon))
else { unreachable!("just matched") };
let State::StartingImplicitMap((location, identifier)) = mem::replace(
&mut self.root_state,
State::ImplicitMap(MapState::ExpectingColon),
) else {
unreachable!("just matched")
};
Ok(Event::new(
location,
EventKind::Primitive(Primitive::Identifier(identifier)),
Expand Down
7 changes: 2 additions & 5 deletions src/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -449,11 +449,8 @@ impl<'a, const INCLUDE_ALL: bool> Tokenizer<'a, INCLUDE_ALL> {
self.scratch.clear();
let already_read_chars = self.chars.marked_str();
if had_underscores {
self.scratch.extend(
already_read_chars
.chars()
.filter_map(|ch| (ch != '_').then_some(ch)),
);
self.scratch
.extend(already_read_chars.chars().filter(|ch| ch != &'_'));
} else {
self.scratch.push_str(already_read_chars);
}
Expand Down

0 comments on commit 5d5f425

Please sign in to comment.