diff --git a/derive/src/decode.rs b/derive/src/decode.rs index 593305c2..e7431625 100644 --- a/derive/src/decode.rs +++ b/derive/src/decode.rs @@ -100,8 +100,9 @@ pub fn quote( } } }, - Data::Union(_) => - Error::new(Span::call_site(), "Union types are not supported.").to_compile_error(), + Data::Union(_) => { + Error::new(Span::call_site(), "Union types are not supported.").to_compile_error() + }, } } @@ -120,8 +121,8 @@ pub fn quote_decode_into( let fields = match data { Data::Struct(syn::DataStruct { fields: - Fields::Named(syn::FieldsNamed { named: fields, .. }) | - Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }), + Fields::Named(syn::FieldsNamed { named: fields, .. }) + | Fields::Unnamed(syn::FieldsUnnamed { unnamed: fields, .. }), .. }) => fields, _ => return None, @@ -133,9 +134,9 @@ pub fn quote_decode_into( // Bail if there are any extra attributes which could influence how the type is decoded. if fields.iter().any(|field| { - utils::get_encoded_as_type(field).is_some() || - utils::is_compact(field) || - utils::should_skip(&field.attrs) + utils::get_encoded_as_type(field).is_some() + || utils::is_compact(field) + || utils::should_skip(&field.attrs) }) { return None; } diff --git a/derive/src/encode.rs b/derive/src/encode.rs index 9140b033..966d26c8 100644 --- a/derive/src/encode.rs +++ b/derive/src/encode.rs @@ -345,7 +345,7 @@ fn impl_encode(data: &Data, type_name: &Ident, crate_path: &syn::Path) -> TokenS } }; - [hinting, encoding] + [hinting, encoding, quote! { #index }] }, Fields::Unnamed(ref fields) => { let fields = &fields.unnamed; @@ -378,7 +378,7 @@ fn impl_encode(data: &Data, type_name: &Ident, crate_path: &syn::Path) -> TokenS } }; - [hinting, encoding] + [hinting, encoding, quote! { #index }] }, Fields::Unit => { let hinting = quote_spanned! { f.span() => @@ -394,15 +394,15 @@ fn impl_encode(data: &Data, type_name: &Ident, crate_path: &syn::Path) -> TokenS } }; - [hinting, encoding] + [hinting, encoding, quote! { #index }] }, }; items.push(item) } - let recurse_hinting = items.iter().map(|[hinting, _]| hinting); - let recurse_encoding = items.iter().map(|[_, encoding]| encoding); - + let recurse_hinting = items.iter().map(|[hinting, _, _]| hinting); + let recurse_encoding = items.iter().map(|[_, encoding, _]| encoding); + let recurse_indices = items.iter().map(|[_, _, index]| index); let hinting = quote! { // The variant index uses 1 byte. 1_usize + match *#self_ { @@ -412,6 +412,23 @@ fn impl_encode(data: &Data, type_name: &Ident, crate_path: &syn::Path) -> TokenS }; let encoding = quote! { + const _: () = { + let indices = [#( #recurse_indices ,)*]; + let len = indices.len(); + + // Check each pair for uniqueness + let mut index = 0; + while index < len { + let mut next_index = index + 1; + while next_index < len { + if indices[index] == indices[next_index] { + panic!("TODO: good error message with variant names and indices"); + } + next_index += 1; + } + index += 1; + } + }; match *#self_ { #( #recurse_encoding )*, _ => (), diff --git a/derive/src/lib.rs b/derive/src/lib.rs index 8ba6d3de..929d7081 100644 --- a/derive/src/lib.rs +++ b/derive/src/lib.rs @@ -367,17 +367,19 @@ pub fn compact_as_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStr let constructor = quote!( #name(#( #recurse, )*)); (&field.ty, quote!(&self.#id), constructor) }, - _ => + _ => { return Error::new( data.fields.span(), "Only structs with a single non-skipped field can derive CompactAs", ) .to_compile_error() - .into(), + .into() + }, + }, + Data::Enum(syn::DataEnum { enum_token: syn::token::Enum { span }, .. }) + | Data::Union(syn::DataUnion { union_token: syn::token::Union { span }, .. }) => { + return Error::new(span, "Only structs can derive CompactAs").to_compile_error().into() }, - Data::Enum(syn::DataEnum { enum_token: syn::token::Enum { span }, .. }) | - Data::Union(syn::DataUnion { union_token: syn::token::Union { span }, .. }) => - return Error::new(span, "Only structs can derive CompactAs").to_compile_error().into(), }; let impl_block = quote! { diff --git a/derive/src/max_encoded_len.rs b/derive/src/max_encoded_len.rs index b9bcf580..4bb9099b 100644 --- a/derive/src/max_encoded_len.rs +++ b/derive/src/max_encoded_len.rs @@ -66,10 +66,12 @@ pub fn derive_max_encoded_len(input: proc_macro::TokenStream) -> proc_macro::Tok /// generate an expression to sum up the max encoded length from several fields fn fields_length_expr(fields: &Fields, crate_path: &syn::Path) -> proc_macro2::TokenStream { let fields_iter: Box> = match fields { - Fields::Named(ref fields) => - Box::new(fields.named.iter().filter(|field| !should_skip(&field.attrs))), - Fields::Unnamed(ref fields) => - Box::new(fields.unnamed.iter().filter(|field| !should_skip(&field.attrs))), + Fields::Named(ref fields) => { + Box::new(fields.named.iter().filter(|field| !should_skip(&field.attrs))) + }, + Fields::Unnamed(ref fields) => { + Box::new(fields.unnamed.iter().filter(|field| !should_skip(&field.attrs))) + }, Fields::Unit => Box::new(std::iter::empty()), }; // expands to an expression like diff --git a/derive/src/trait_bounds.rs b/derive/src/trait_bounds.rs index 40cb97db..b5930228 100644 --- a/derive/src/trait_bounds.rs +++ b/derive/src/trait_bounds.rs @@ -121,8 +121,9 @@ pub fn add( generics.make_where_clause().predicates.extend(bounds); return Ok(()); }, - Some(CustomTraitBound::SkipTypeParams { type_names, .. }) => - type_names.into_iter().collect::>(), + Some(CustomTraitBound::SkipTypeParams { type_names, .. }) => { + type_names.into_iter().collect::>() + }, None => Vec::new(), }; @@ -189,9 +190,9 @@ fn get_types_to_add_trait_bound( Ok(ty_params.iter().map(|t| parse_quote!( #t )).collect()) } else { let needs_codec_bound = |f: &syn::Field| { - !utils::is_compact(f) && - utils::get_encoded_as_type(f).is_none() && - !utils::should_skip(&f.attrs) + !utils::is_compact(f) + && utils::get_encoded_as_type(f).is_none() + && !utils::should_skip(&f.attrs) }; let res = collect_types(data, needs_codec_bound)? .into_iter() @@ -222,9 +223,10 @@ fn collect_types(data: &syn::Data, type_filter: fn(&syn::Field) -> bool) -> Resu let types = match *data { Data::Struct(ref data) => match &data.fields { - | Fields::Named(FieldsNamed { named: fields, .. }) | - Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) => - fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect(), + | Fields::Named(FieldsNamed { named: fields, .. }) + | Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) => { + fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect() + }, Fields::Unit => Vec::new(), }, @@ -234,16 +236,18 @@ fn collect_types(data: &syn::Data, type_filter: fn(&syn::Field) -> bool) -> Resu .iter() .filter(|variant| !utils::should_skip(&variant.attrs)) .flat_map(|variant| match &variant.fields { - | Fields::Named(FieldsNamed { named: fields, .. }) | - Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) => - fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect(), + | Fields::Named(FieldsNamed { named: fields, .. }) + | Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) => { + fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect() + }, Fields::Unit => Vec::new(), }) .collect(), - Data::Union(ref data) => - return Err(Error::new(data.union_token.span(), "Union types are not supported.")), + Data::Union(ref data) => { + return Err(Error::new(data.union_token.span(), "Union types are not supported.")) + }, }; Ok(types) diff --git a/derive/src/utils.rs b/derive/src/utils.rs index 2812fc6a..ebc0c0aa 100644 --- a/derive/src/utils.rs +++ b/derive/src/utils.rs @@ -43,45 +43,14 @@ where pub fn check_indexes<'a, I: Iterator>(values: I) -> syn::Result<()> { let mut map: HashMap = HashMap::new(); for (i, v) in values.enumerate() { - if let Some(index) = find_meta_item(v.attrs.iter(), |meta| { - if let Meta::NameValue(ref nv) = meta { - if nv.path.is_ident("index") { - if let Expr::Lit(ExprLit { lit: Lit::Int(ref v), .. }) = nv.value { - let byte = v - .base10_parse::() - .expect("Internal error, index attribute must have been checked"); - return Some(byte); - } - } - } - None - }) { - if let Some(span) = map.insert(index, v.span()) { - let mut error = syn::Error::new(v.span(), "Duplicate variant index. qed"); - error.combine(syn::Error::new(span, "Variant index already defined here.")); - return Err(error) - } - } else { - match v.discriminant.as_ref() { - Some((_, syn::Expr::Lit(ExprLit { lit: syn::Lit::Int(lit_int), .. }))) => { - let index = lit_int - .base10_parse::() - .expect("Internal error, index attribute must have been checked"); - if let Some(span) = map.insert(index, v.span()) { - let mut error = syn::Error::new(v.span(), "Duplicate variant index. qed"); - error.combine(syn::Error::new(span, "Variant index already defined here.")); - return Err(error) - } - }, - Some((_, _)) => return Err(syn::Error::new(v.span(), "Invalid discriminant. qed")), - None => - if let Some(span) = map.insert(i.try_into().unwrap(), v.span()) { - let mut error = - syn::Error::new(span, "Custom variant index is duplicated later. qed"); - error.combine(syn::Error::new(v.span(), "Variant index derived here.")); - return Err(error) - }, - } + let index = variant_index(v, i)?; + if let Some(span) = map.insert(index, v.span()) { + let mut error = syn::Error::new( + v.span(), + "scale codec error: Invalid variant index, the variant index is duplicated.", + ); + error.combine(syn::Error::new(span, "Variant index used here.")); + return Err(error); } } Ok(()) @@ -89,7 +58,7 @@ pub fn check_indexes<'a, I: Iterator>(values: I) -> syn: /// Look for a `#[scale(index = $int)]` attribute on a variant. If no attribute /// is found, fall back to the discriminant or just the variant index. -pub fn variant_index(v: &Variant, index: usize) -> syn::Result { +pub fn variant_index(v: &Variant, index: usize) -> syn::Result { // first look for an attribute let codec_index = find_meta_item(v.attrs.iter(), |meta| { if let Meta::NameValue(ref nv) = meta { @@ -106,13 +75,27 @@ pub fn variant_index(v: &Variant, index: usize) -> syn::Result { None }); if let Some(index) = codec_index { - Ok(quote! { #index }) + Ok(index) } else { match v.discriminant.as_ref() { - Some((_, expr @ syn::Expr::Lit(ExprLit { lit: syn::Lit::Int(_), .. }))) => - Ok(quote! { #expr }), - Some((_, expr)) => Err(syn::Error::new(expr.span(), "Invalid discriminant. qed")), - None => Ok(quote! { #index }), + Some((_, syn::Expr::Lit(ExprLit { lit: syn::Lit::Int(v), .. }))) => { + let byte = v.base10_parse::().expect( + "scale codec error: Invalid variant index, discriminant doesn't fit u8.", + ); + Ok(byte) + }, + Some((_, expr)) => Err(syn::Error::new( + expr.span(), + "scale codec error: Invalid discriminant, only int literal are accepted, e.g. \ + `= 32`.", + )), + None => index.try_into().map_err(|_| { + syn::Error::new( + v.span(), + "scale codec error: Variant index is too large, only 256 variants are \ + supported.", + ) + }), } } } @@ -363,16 +346,17 @@ pub fn check_attributes(input: &DeriveInput) -> syn::Result<()> { match input.data { Data::Struct(ref data) => match &data.fields { - | Fields::Named(FieldsNamed { named: fields, .. }) | - Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) => + | Fields::Named(FieldsNamed { named: fields, .. }) + | Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) => { for field in fields { for attr in &field.attrs { check_field_attribute(attr)?; } - }, + } + }, Fields::Unit => (), }, - Data::Enum(ref data) => + Data::Enum(ref data) => { for variant in data.variants.iter() { for attr in &variant.attrs { check_variant_attribute(attr)?; @@ -382,7 +366,8 @@ pub fn check_attributes(input: &DeriveInput) -> syn::Result<()> { check_field_attribute(attr)?; } } - }, + } + }, Data::Union(_) => (), } Ok(()) @@ -390,10 +375,10 @@ pub fn check_attributes(input: &DeriveInput) -> syn::Result<()> { // Check if the attribute is `#[allow(..)]`, `#[deny(..)]`, `#[forbid(..)]` or `#[warn(..)]`. pub fn is_lint_attribute(attr: &Attribute) -> bool { - attr.path().is_ident("allow") || - attr.path().is_ident("deny") || - attr.path().is_ident("forbid") || - attr.path().is_ident("warn") + attr.path().is_ident("allow") + || attr.path().is_ident("deny") + || attr.path().is_ident("forbid") + || attr.path().is_ident("warn") } // Ensure a field is decorated only with the following attributes: @@ -418,10 +403,11 @@ fn check_field_attribute(attr: &Attribute) -> syn::Result<()> { path, value: Expr::Lit(ExprLit { lit: Lit::Str(lit_str), .. }), .. - }) if path.get_ident().map_or(false, |i| i == "encoded_as") => + }) if path.get_ident().map_or(false, |i| i == "encoded_as") => { TokenStream::from_str(&lit_str.value()) .map(|_| ()) - .map_err(|_e| syn::Error::new(lit_str.span(), "Invalid token stream")), + .map_err(|_e| syn::Error::new(lit_str.span(), "Invalid token stream")) + }, elt => Err(syn::Error::new(elt.span(), field_error)), } @@ -468,20 +454,21 @@ fn check_top_attribute(attr: &Attribute) -> syn::Result<()> { `#[codec(decode_bound(T: Decode))]`, \ `#[codec(decode_bound_with_mem_tracking_bound(T: DecodeWithMemTracking))]` or \ `#[codec(mel_bound(T: MaxEncodedLen))]` are accepted as top attribute"; - if attr.path().is_ident("codec") && - attr.parse_args::>().is_err() && - attr.parse_args::>().is_err() && - attr.parse_args::>().is_err() && - attr.parse_args::>().is_err() && - codec_crate_path_inner(attr).is_none() + if attr.path().is_ident("codec") + && attr.parse_args::>().is_err() + && attr.parse_args::>().is_err() + && attr.parse_args::>().is_err() + && attr.parse_args::>().is_err() + && codec_crate_path_inner(attr).is_none() { let nested = attr.parse_args_with(Punctuated::::parse_terminated)?; if nested.len() != 1 { return Err(syn::Error::new(attr.meta.span(), top_error)); } match nested.first().expect("Just checked that there is one item; qed") { - Meta::Path(path) if path.get_ident().map_or(false, |i| i == "dumb_trait_bound") => - Ok(()), + Meta::Path(path) if path.get_ident().map_or(false, |i| i == "dumb_trait_bound") => { + Ok(()) + }, elt => Err(syn::Error::new(elt.span(), top_error)), } diff --git a/fuzzer/src/main.rs b/fuzzer/src/main.rs index 198058e6..10c48520 100644 --- a/fuzzer/src/main.rs +++ b/fuzzer/src/main.rs @@ -269,12 +269,13 @@ fn fuzz_encode(data: T let mut obj: &[u8] = &data.encode(); let decoded = ::decode(&mut obj); match decoded { - Ok(object) => + Ok(object) => { if object != original { println!("original object: {:?}", original); println!("decoded object: {:?}", object); panic!("Original object differs from decoded object") - }, + } + }, Err(e) => { println!("original object: {:?}", original); println!("decoding error: {:?}", e); diff --git a/git.pach b/git.pach new file mode 100644 index 00000000..04d2232a --- /dev/null +++ b/git.pach @@ -0,0 +1,97 @@ +index 2812fc6..c803ff7 100644 +--- a/derive/src/utils.rs ++++ b/derive/src/utils.rs +@@ -43,45 +43,14 @@ where + pub fn check_indexes<'a, I: Iterator>(values: I) -> syn::Result<()> { + let mut map: HashMap = HashMap::new(); + for (i, v) in values.enumerate() { +- if let Some(index) = find_meta_item(v.attrs.iter(), |meta| { +- if let Meta::NameValue(ref nv) = meta { +- if nv.path.is_ident("index") { +- if let Expr::Lit(ExprLit { lit: Lit::Int(ref v), .. }) = nv.value { +- let byte = v +- .base10_parse::() +- .expect("Internal error, index attribute must have been checked"); +- return Some(byte); +- } +- } +- } +- None +- }) { +- if let Some(span) = map.insert(index, v.span()) { +- let mut error = syn::Error::new(v.span(), "Duplicate variant index. qed"); +- error.combine(syn::Error::new(span, "Variant index already defined here.")); +- return Err(error) +- } +- } else { +- match v.discriminant.as_ref() { +- Some((_, syn::Expr::Lit(ExprLit { lit: syn::Lit::Int(lit_int), .. }))) => { +- let index = lit_int +- .base10_parse::() +- .expect("Internal error, index attribute must have been checked"); +- if let Some(span) = map.insert(index, v.span()) { +- let mut error = syn::Error::new(v.span(), "Duplicate variant index. qed"); +- error.combine(syn::Error::new(span, "Variant index already defined here.")); +- return Err(error) +- } +- }, +- Some((_, _)) => return Err(syn::Error::new(v.span(), "Invalid discriminant. qed")), +- None => +- if let Some(span) = map.insert(i.try_into().unwrap(), v.span()) { +- let mut error = +- syn::Error::new(span, "Custom variant index is duplicated later. qed"); +- error.combine(syn::Error::new(v.span(), "Variant index derived here.")); +- return Err(error) +- }, +- } ++ let index = variant_index(v, i)?; ++ if let Some(span) = map.insert(index, v.span()) { ++ let mut error = syn::Error::new( ++ v.span(), ++ "scale codec error: Invalid variant index, the variant index is duplicated.", ++ ); ++ error.combine(syn::Error::new(span, "Variant index used here.")); ++ return Err(error) + } + } + Ok(()) +@@ -89,7 +58,7 @@ pub fn check_indexes<'a, I: Iterator>(values: I) -> syn: + + /// Look for a `#[scale(index = $int)]` attribute on a variant. If no attribute + /// is found, fall back to the discriminant or just the variant index. +-pub fn variant_index(v: &Variant, index: usize) -> syn::Result { ++pub fn variant_index(v: &Variant, index: usize) -> syn::Result { + // first look for an attribute + let codec_index = find_meta_item(v.attrs.iter(), |meta| { + if let Meta::NameValue(ref nv) = meta { +@@ -106,13 +75,25 @@ pub fn variant_index(v: &Variant, index: usize) -> syn::Result { + None + }); + if let Some(index) = codec_index { +- Ok(quote! { #index }) ++ Ok(index) + } else { + match v.discriminant.as_ref() { +- Some((_, expr @ syn::Expr::Lit(ExprLit { lit: syn::Lit::Int(_), .. }))) => +- Ok(quote! { #expr }), +- Some((_, expr)) => Err(syn::Error::new(expr.span(), "Invalid discriminant. qed")), +- None => Ok(quote! { #index }), ++ Some((_, syn::Expr::Lit(ExprLit { lit: syn::Lit::Int(v), .. }))) => { ++ let byte = v ++ .base10_parse::() ++ .expect("scale codec error: Invalid variant index, discriminant doesn't fit u8."); ++ Ok(byte) ++ }, ++ Some((_, expr)) => Err(syn::Error::new( ++ expr.span(), ++ "scale codec error: Invalid discriminant, only int literal are accepted, e.g. \ ++ `= 32`.", ++ )), ++ None => index.try_into().map_err(|_| syn::Error::new( ++ v.span(), ++ "scale codec error: Variant index is too large, only 256 variants are \ ++ supported.", ++ )), + } + } + } diff --git a/src/codec.rs b/src/codec.rs index 1dce353a..9307a43e 100644 --- a/src/codec.rs +++ b/src/codec.rs @@ -918,16 +918,16 @@ impl Decode for [T; N] { ) -> Result { let is_primitive = match ::TYPE_INFO { | TypeInfo::U8 | TypeInfo::I8 => true, - | TypeInfo::U16 | - TypeInfo::I16 | - TypeInfo::U32 | - TypeInfo::I32 | - TypeInfo::U64 | - TypeInfo::I64 | - TypeInfo::U128 | - TypeInfo::I128 | - TypeInfo::F32 | - TypeInfo::F64 => cfg!(target_endian = "little"), + | TypeInfo::U16 + | TypeInfo::I16 + | TypeInfo::U32 + | TypeInfo::I32 + | TypeInfo::U64 + | TypeInfo::I64 + | TypeInfo::U128 + | TypeInfo::I128 + | TypeInfo::F32 + | TypeInfo::F64 => cfg!(target_endian = "little"), TypeInfo::Unknown => false, }; diff --git a/src/compact.rs b/src/compact.rs index ffcd2ae4..c74b432c 100644 --- a/src/compact.rs +++ b/src/compact.rs @@ -364,8 +364,9 @@ impl<'a> Encode for CompactRef<'a, u32> { match self.0 { 0..=0b0011_1111 => dest.push_byte((*self.0 as u8) << 2), 0..=0b0011_1111_1111_1111 => (((*self.0 as u16) << 2) | 0b01).encode_to(dest), - 0..=0b0011_1111_1111_1111_1111_1111_1111_1111 => - ((*self.0 << 2) | 0b10).encode_to(dest), + 0..=0b0011_1111_1111_1111_1111_1111_1111_1111 => { + ((*self.0 << 2) | 0b10).encode_to(dest) + }, _ => { dest.push_byte(0b11); self.0.encode_to(dest); @@ -400,8 +401,9 @@ impl<'a> Encode for CompactRef<'a, u64> { match self.0 { 0..=0b0011_1111 => dest.push_byte((*self.0 as u8) << 2), 0..=0b0011_1111_1111_1111 => (((*self.0 as u16) << 2) | 0b01).encode_to(dest), - 0..=0b0011_1111_1111_1111_1111_1111_1111_1111 => - (((*self.0 as u32) << 2) | 0b10).encode_to(dest), + 0..=0b0011_1111_1111_1111_1111_1111_1111_1111 => { + (((*self.0 as u32) << 2) | 0b10).encode_to(dest) + }, _ => { let bytes_needed = 8 - self.0.leading_zeros() / 8; assert!( @@ -446,8 +448,9 @@ impl<'a> Encode for CompactRef<'a, u128> { match self.0 { 0..=0b0011_1111 => dest.push_byte((*self.0 as u8) << 2), 0..=0b0011_1111_1111_1111 => (((*self.0 as u16) << 2) | 0b01).encode_to(dest), - 0..=0b0011_1111_1111_1111_1111_1111_1111_1111 => - (((*self.0 as u32) << 2) | 0b10).encode_to(dest), + 0..=0b0011_1111_1111_1111_1111_1111_1111_1111 => { + (((*self.0 as u32) << 2) | 0b10).encode_to(dest) + }, _ => { let bytes_needed = 16 - self.0.leading_zeros() / 8; assert!( diff --git a/tests/scale_codec_ui.rs b/tests/scale_codec_ui.rs index e0100b1f..663217f6 100644 --- a/tests/scale_codec_ui.rs +++ b/tests/scale_codec_ui.rs @@ -19,5 +19,4 @@ fn scale_codec_ui_tests() { let t = trybuild::TestCases::new(); t.compile_fail("tests/scale_codec_ui/*.rs"); t.pass("tests/scale_codec_ui/pass/*.rs"); - t.compile_fail("tests/scale_codec_ui/fail/*.rs"); } diff --git a/tests/scale_codec_ui/fail/codec_duplicate_index.rs b/tests/scale_codec_ui/codec_duplicate_index.rs similarity index 100% rename from tests/scale_codec_ui/fail/codec_duplicate_index.rs rename to tests/scale_codec_ui/codec_duplicate_index.rs diff --git a/tests/scale_codec_ui/fail/codec_duplicate_index.stderr b/tests/scale_codec_ui/codec_duplicate_index.stderr similarity index 100% rename from tests/scale_codec_ui/fail/codec_duplicate_index.stderr rename to tests/scale_codec_ui/codec_duplicate_index.stderr diff --git a/tests/scale_codec_ui/fail/discriminant_variant_counted_in_default_index.rs b/tests/scale_codec_ui/discriminant_variant_counted_in_default_index.rs similarity index 100% rename from tests/scale_codec_ui/fail/discriminant_variant_counted_in_default_index.rs rename to tests/scale_codec_ui/discriminant_variant_counted_in_default_index.rs diff --git a/tests/scale_codec_ui/fail/discriminant_variant_counted_in_default_index.stderr b/tests/scale_codec_ui/discriminant_variant_counted_in_default_index.stderr similarity index 100% rename from tests/scale_codec_ui/fail/discriminant_variant_counted_in_default_index.stderr rename to tests/scale_codec_ui/discriminant_variant_counted_in_default_index.stderr