diff --git a/crates/wit-component/src/printing.rs b/crates/wit-component/src/printing.rs index 449d7e0373..6a4074c274 100644 --- a/crates/wit-component/src/printing.rs +++ b/crates/wit-component/src/printing.rs @@ -6,7 +6,7 @@ use wit_parser::*; // NB: keep in sync with `crates/wit-parser/src/ast/lex.rs` const PRINT_SEMICOLONS_DEFAULT: bool = true; -const PRINT_F32_F64_DEFAULT: bool = true; +const PRINT_F32_F64_DEFAULT: bool = false; /// A utility for printing WebAssembly interface definitions to a string. pub struct WitPrinter { diff --git a/crates/wit-parser/src/ast.rs b/crates/wit-parser/src/ast.rs index 3df0a7c57b..694519b3c3 100644 --- a/crates/wit-parser/src/ast.rs +++ b/crates/wit-parser/src/ast.rs @@ -1160,6 +1160,7 @@ pub struct SourceMap { sources: Vec, offset: u32, require_semicolons: Option, + require_f32_f64: Option, } #[derive(Clone)] @@ -1180,6 +1181,11 @@ impl SourceMap { self.require_semicolons = Some(enable); } + #[doc(hidden)] // NB: only here for a transitionary period + pub fn set_require_f32_f64(&mut self, enable: bool) { + self.require_f32_f64 = Some(enable); + } + /// Reads the file `path` on the filesystem and appends its contents to this /// [`SourceMap`]. pub fn push_file(&mut self, path: &Path) -> Result<()> { @@ -1214,8 +1220,13 @@ impl SourceMap { let mut srcs = self.sources.iter().collect::>(); srcs.sort_by_key(|src| &src.path); for src in srcs { - let mut tokens = Tokenizer::new(&src.contents, src.offset, self.require_semicolons) - .with_context(|| format!("failed to tokenize path: {}", src.path.display()))?; + let mut tokens = Tokenizer::new( + &src.contents, + src.offset, + self.require_semicolons, + self.require_f32_f64, + ) + .with_context(|| format!("failed to tokenize path: {}", src.path.display()))?; let ast = Ast::parse(&mut tokens)?; resolver.push(ast).with_context(|| { format!("failed to start resolving path: {}", src.path.display()) @@ -1324,7 +1335,7 @@ pub(crate) enum AstUsePath { } pub(crate) fn parse_use_path(s: &str) -> Result { - let mut tokens = Tokenizer::new(s, 0, Some(true))?; + let mut tokens = Tokenizer::new(s, 0, Some(true), None)?; let path = UsePath::parse(&mut tokens)?; if tokens.next()?.is_some() { bail!("trailing tokens in path specifier"); diff --git a/crates/wit-parser/src/ast/lex.rs b/crates/wit-parser/src/ast/lex.rs index 8bcead10d7..78483c57d6 100644 --- a/crates/wit-parser/src/ast/lex.rs +++ b/crates/wit-parser/src/ast/lex.rs @@ -119,7 +119,7 @@ pub enum Error { // NB: keep in sync with `crates/wit-component/src/printing.rs`. const REQUIRE_SEMICOLONS_BY_DEFAULT: bool = true; -const REQUIRE_F32_F64_BY_DEFAULT: bool = true; +const REQUIRE_F32_F64_BY_DEFAULT: bool = false; impl<'a> Tokenizer<'a> { pub fn new( @@ -665,7 +665,7 @@ fn test_validate_id() { #[test] fn test_tokenizer() { fn collect(s: &str) -> Result> { - let mut t = Tokenizer::new(s, 0, Some(true))?; + let mut t = Tokenizer::new(s, 0, Some(true), None)?; let mut tokens = Vec::new(); while let Some(token) = t.next()? { tokens.push(token.1);