diff --git a/crates/dump/src/lib.rs b/crates/dump/src/lib.rs index 00cfd8ff60..e5c0efcfce 100644 --- a/crates/dump/src/lib.rs +++ b/crates/dump/src/lib.rs @@ -541,7 +541,7 @@ impl<'a> Dump<'a> { fn print_ops(&mut self, mut i: OperatorsReader) -> Result<()> { while !i.eof() { - match i.visit_with_offset(self) { + match i.visit_operator(self) { Ok(()) => {} Err(_) => write!(self.state, "??")?, } @@ -610,7 +610,7 @@ fn inc(spot: &mut u32) -> u32 { macro_rules! define_visit_operator { ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => { $( - fn $visit(&mut self, _offset: usize $($(,$arg: $argty)*)?) { + fn $visit(&mut self $($(,$arg: $argty)*)?) { write!( self.state, concat!( diff --git a/crates/wasmparser/benches/benchmark.rs b/crates/wasmparser/benches/benchmark.rs index 7da82f9ec2..c853c7c48d 100644 --- a/crates/wasmparser/benches/benchmark.rs +++ b/crates/wasmparser/benches/benchmark.rs @@ -314,7 +314,7 @@ struct NopVisit; macro_rules! define_visit_operator { ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => { $( - fn $visit(&mut self, _offset: usize $($(,$arg: $argty)*)?) { + fn $visit(&mut self $($(,$arg: $argty)*)?) { define_visit_operator!(@visit $op $( $($arg)* )?); } )* diff --git a/crates/wasmparser/src/binary_reader.rs b/crates/wasmparser/src/binary_reader.rs index 1987b27b32..a7a3256d81 100644 --- a/crates/wasmparser/src/binary_reader.rs +++ b/crates/wasmparser/src/binary_reader.rs @@ -1358,12 +1358,51 @@ impl<'a> BinaryReader<'a> { Ok(BlockType::FuncType(idx as u32)) } - /// Reads the next available `Operator` and calls the respective visit method. + /// Visit the next available operator with the specified [`VisitOperator`] instance. + /// + /// Note that this does not implicitly propagate any additional information such as instruction + /// offsets. In order to do so, consider storing such data within the visitor before visiting. /// /// # Errors /// - /// If `BinaryReader` has less bytes remaining than required to parse - /// the `Operator`. + /// If `BinaryReader` has less bytes remaining than required to parse the `Operator`. + /// + /// # Examples + /// + /// Store an offset for use in diagnostics or any other purposes: + /// + /// ``` + /// # use wasmparser::{BinaryReader, VisitOperator, Result, for_each_operator}; + /// + /// pub fn dump(mut reader: BinaryReader) -> Result<()> { + /// let mut visitor = Dumper { offset: 0 }; + /// while !reader.eof() { + /// visitor.offset = reader.original_position(); + /// reader.visit_operator(&mut visitor)?; + /// } + /// Ok(()) + /// } + /// + /// struct Dumper { + /// offset: usize + /// } + /// + /// macro_rules! define_visit_operator { + /// ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => { + /// $( + /// fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output { + /// println!("{}: {}", self.offset, stringify!($visit)); + /// } + /// )* + /// } + /// } + /// + /// impl<'a> VisitOperator<'a> for Dumper { + /// type Output = (); + /// for_each_operator!(define_visit_operator); + /// } + /// + /// ``` pub fn visit_operator(&mut self, visitor: &mut T) -> Result<>::Output> where T: VisitOperator<'a>, @@ -1371,35 +1410,33 @@ impl<'a> BinaryReader<'a> { let pos = self.original_position(); let code = self.read_u8()? as u8; Ok(match code { - 0x00 => visitor.visit_unreachable(pos), - 0x01 => visitor.visit_nop(pos), - 0x02 => visitor.visit_block(pos, self.read_block_type()?), - 0x03 => visitor.visit_loop(pos, self.read_block_type()?), - 0x04 => visitor.visit_if(pos, self.read_block_type()?), - 0x05 => visitor.visit_else(pos), - 0x06 => visitor.visit_try(pos, self.read_block_type()?), - 0x07 => visitor.visit_catch(pos, self.read_var_u32()?), - 0x08 => visitor.visit_throw(pos, self.read_var_u32()?), - 0x09 => visitor.visit_rethrow(pos, self.read_var_u32()?), - 0x0b => visitor.visit_end(pos), - 0x0c => visitor.visit_br(pos, self.read_var_u32()?), - 0x0d => visitor.visit_br_if(pos, self.read_var_u32()?), - 0x0e => visitor.visit_br_table(pos, self.read_br_table()?), - 0x0f => visitor.visit_return(pos), - 0x10 => visitor.visit_call(pos, self.read_var_u32()?), + 0x00 => visitor.visit_unreachable(), + 0x01 => visitor.visit_nop(), + 0x02 => visitor.visit_block(self.read_block_type()?), + 0x03 => visitor.visit_loop(self.read_block_type()?), + 0x04 => visitor.visit_if(self.read_block_type()?), + 0x05 => visitor.visit_else(), + 0x06 => visitor.visit_try(self.read_block_type()?), + 0x07 => visitor.visit_catch(self.read_var_u32()?), + 0x08 => visitor.visit_throw(self.read_var_u32()?), + 0x09 => visitor.visit_rethrow(self.read_var_u32()?), + 0x0b => visitor.visit_end(), + 0x0c => visitor.visit_br(self.read_var_u32()?), + 0x0d => visitor.visit_br_if(self.read_var_u32()?), + 0x0e => visitor.visit_br_table(self.read_br_table()?), + 0x0f => visitor.visit_return(), + 0x10 => visitor.visit_call(self.read_var_u32()?), 0x11 => { let index = self.read_var_u32()?; let (table_byte, table_index) = self.read_first_byte_and_var_u32()?; - visitor.visit_call_indirect(pos, index, table_index, table_byte) - } - 0x12 => visitor.visit_return_call(pos, self.read_var_u32()?), - 0x13 => { - visitor.visit_return_call_indirect(pos, self.read_var_u32()?, self.read_var_u32()?) + visitor.visit_call_indirect(index, table_index, table_byte) } - 0x18 => visitor.visit_delegate(pos, self.read_var_u32()?), - 0x19 => visitor.visit_catch_all(pos), - 0x1a => visitor.visit_drop(pos), - 0x1b => visitor.visit_select(pos), + 0x12 => visitor.visit_return_call(self.read_var_u32()?), + 0x13 => visitor.visit_return_call_indirect(self.read_var_u32()?, self.read_var_u32()?), + 0x18 => visitor.visit_delegate(self.read_var_u32()?), + 0x19 => visitor.visit_catch_all(), + 0x1a => visitor.visit_drop(), + 0x1b => visitor.visit_select(), 0x1c => { let results = self.read_var_u32()?; if results != 1 { @@ -1408,187 +1445,187 @@ impl<'a> BinaryReader<'a> { self.position, )); } - visitor.visit_typed_select(pos, self.read_val_type()?) + visitor.visit_typed_select(self.read_val_type()?) } - 0x20 => visitor.visit_local_get(pos, self.read_var_u32()?), - 0x21 => visitor.visit_local_set(pos, self.read_var_u32()?), - 0x22 => visitor.visit_local_tee(pos, self.read_var_u32()?), - 0x23 => visitor.visit_global_get(pos, self.read_var_u32()?), - 0x24 => visitor.visit_global_set(pos, self.read_var_u32()?), - 0x25 => visitor.visit_table_get(pos, self.read_var_u32()?), - 0x26 => visitor.visit_table_set(pos, self.read_var_u32()?), - - 0x28 => visitor.visit_i32_load(pos, self.read_memarg(2)?), - 0x29 => visitor.visit_i64_load(pos, self.read_memarg(3)?), - 0x2a => visitor.visit_f32_load(pos, self.read_memarg(2)?), - 0x2b => visitor.visit_f64_load(pos, self.read_memarg(3)?), - 0x2c => visitor.visit_i32_load8_s(pos, self.read_memarg(0)?), - 0x2d => visitor.visit_i32_load8_u(pos, self.read_memarg(0)?), - 0x2e => visitor.visit_i32_load16_s(pos, self.read_memarg(1)?), - 0x2f => visitor.visit_i32_load16_u(pos, self.read_memarg(1)?), - 0x30 => visitor.visit_i64_load8_s(pos, self.read_memarg(0)?), - 0x31 => visitor.visit_i64_load8_u(pos, self.read_memarg(0)?), - 0x32 => visitor.visit_i64_load16_s(pos, self.read_memarg(1)?), - 0x33 => visitor.visit_i64_load16_u(pos, self.read_memarg(1)?), - 0x34 => visitor.visit_i64_load32_s(pos, self.read_memarg(2)?), - 0x35 => visitor.visit_i64_load32_u(pos, self.read_memarg(2)?), - 0x36 => visitor.visit_i32_store(pos, self.read_memarg(2)?), - 0x37 => visitor.visit_i64_store(pos, self.read_memarg(3)?), - 0x38 => visitor.visit_f32_store(pos, self.read_memarg(2)?), - 0x39 => visitor.visit_f64_store(pos, self.read_memarg(3)?), - 0x3a => visitor.visit_i32_store8(pos, self.read_memarg(0)?), - 0x3b => visitor.visit_i32_store16(pos, self.read_memarg(1)?), - 0x3c => visitor.visit_i64_store8(pos, self.read_memarg(0)?), - 0x3d => visitor.visit_i64_store16(pos, self.read_memarg(1)?), - 0x3e => visitor.visit_i64_store32(pos, self.read_memarg(2)?), + 0x20 => visitor.visit_local_get(self.read_var_u32()?), + 0x21 => visitor.visit_local_set(self.read_var_u32()?), + 0x22 => visitor.visit_local_tee(self.read_var_u32()?), + 0x23 => visitor.visit_global_get(self.read_var_u32()?), + 0x24 => visitor.visit_global_set(self.read_var_u32()?), + 0x25 => visitor.visit_table_get(self.read_var_u32()?), + 0x26 => visitor.visit_table_set(self.read_var_u32()?), + + 0x28 => visitor.visit_i32_load(self.read_memarg(2)?), + 0x29 => visitor.visit_i64_load(self.read_memarg(3)?), + 0x2a => visitor.visit_f32_load(self.read_memarg(2)?), + 0x2b => visitor.visit_f64_load(self.read_memarg(3)?), + 0x2c => visitor.visit_i32_load8_s(self.read_memarg(0)?), + 0x2d => visitor.visit_i32_load8_u(self.read_memarg(0)?), + 0x2e => visitor.visit_i32_load16_s(self.read_memarg(1)?), + 0x2f => visitor.visit_i32_load16_u(self.read_memarg(1)?), + 0x30 => visitor.visit_i64_load8_s(self.read_memarg(0)?), + 0x31 => visitor.visit_i64_load8_u(self.read_memarg(0)?), + 0x32 => visitor.visit_i64_load16_s(self.read_memarg(1)?), + 0x33 => visitor.visit_i64_load16_u(self.read_memarg(1)?), + 0x34 => visitor.visit_i64_load32_s(self.read_memarg(2)?), + 0x35 => visitor.visit_i64_load32_u(self.read_memarg(2)?), + 0x36 => visitor.visit_i32_store(self.read_memarg(2)?), + 0x37 => visitor.visit_i64_store(self.read_memarg(3)?), + 0x38 => visitor.visit_f32_store(self.read_memarg(2)?), + 0x39 => visitor.visit_f64_store(self.read_memarg(3)?), + 0x3a => visitor.visit_i32_store8(self.read_memarg(0)?), + 0x3b => visitor.visit_i32_store16(self.read_memarg(1)?), + 0x3c => visitor.visit_i64_store8(self.read_memarg(0)?), + 0x3d => visitor.visit_i64_store16(self.read_memarg(1)?), + 0x3e => visitor.visit_i64_store32(self.read_memarg(2)?), 0x3f => { let (mem_byte, mem) = self.read_first_byte_and_var_u32()?; - visitor.visit_memory_size(pos, mem, mem_byte) + visitor.visit_memory_size(mem, mem_byte) } 0x40 => { let (mem_byte, mem) = self.read_first_byte_and_var_u32()?; - visitor.visit_memory_grow(pos, mem, mem_byte) + visitor.visit_memory_grow(mem, mem_byte) } - 0x41 => visitor.visit_i32_const(pos, self.read_var_i32()?), - 0x42 => visitor.visit_i64_const(pos, self.read_var_i64()?), - 0x43 => visitor.visit_f32_const(pos, self.read_f32()?), - 0x44 => visitor.visit_f64_const(pos, self.read_f64()?), - - 0x45 => visitor.visit_i32_eqz(pos), - 0x46 => visitor.visit_i32_eq(pos), - 0x47 => visitor.visit_i32_ne(pos), - 0x48 => visitor.visit_i32_lt_s(pos), - 0x49 => visitor.visit_i32_lt_u(pos), - 0x4a => visitor.visit_i32_gt_s(pos), - 0x4b => visitor.visit_i32_gt_u(pos), - 0x4c => visitor.visit_i32_le_s(pos), - 0x4d => visitor.visit_i32_le_u(pos), - 0x4e => visitor.visit_i32_ge_s(pos), - 0x4f => visitor.visit_i32_ge_u(pos), - 0x50 => visitor.visit_i64_eqz(pos), - 0x51 => visitor.visit_i64_eq(pos), - 0x52 => visitor.visit_i64_ne(pos), - 0x53 => visitor.visit_i64_lt_s(pos), - 0x54 => visitor.visit_i64_lt_u(pos), - 0x55 => visitor.visit_i64_gt_s(pos), - 0x56 => visitor.visit_i64_gt_u(pos), - 0x57 => visitor.visit_i64_le_s(pos), - 0x58 => visitor.visit_i64_le_u(pos), - 0x59 => visitor.visit_i64_ge_s(pos), - 0x5a => visitor.visit_i64_ge_u(pos), - 0x5b => visitor.visit_f32_eq(pos), - 0x5c => visitor.visit_f32_ne(pos), - 0x5d => visitor.visit_f32_lt(pos), - 0x5e => visitor.visit_f32_gt(pos), - 0x5f => visitor.visit_f32_le(pos), - 0x60 => visitor.visit_f32_ge(pos), - 0x61 => visitor.visit_f64_eq(pos), - 0x62 => visitor.visit_f64_ne(pos), - 0x63 => visitor.visit_f64_lt(pos), - 0x64 => visitor.visit_f64_gt(pos), - 0x65 => visitor.visit_f64_le(pos), - 0x66 => visitor.visit_f64_ge(pos), - 0x67 => visitor.visit_i32_clz(pos), - 0x68 => visitor.visit_i32_ctz(pos), - 0x69 => visitor.visit_i32_popcnt(pos), - 0x6a => visitor.visit_i32_add(pos), - 0x6b => visitor.visit_i32_sub(pos), - 0x6c => visitor.visit_i32_mul(pos), - 0x6d => visitor.visit_i32_div_s(pos), - 0x6e => visitor.visit_i32_div_u(pos), - 0x6f => visitor.visit_i32_rem_s(pos), - 0x70 => visitor.visit_i32_rem_u(pos), - 0x71 => visitor.visit_i32_and(pos), - 0x72 => visitor.visit_i32_or(pos), - 0x73 => visitor.visit_i32_xor(pos), - 0x74 => visitor.visit_i32_shl(pos), - 0x75 => visitor.visit_i32_shr_s(pos), - 0x76 => visitor.visit_i32_shr_u(pos), - 0x77 => visitor.visit_i32_rotl(pos), - 0x78 => visitor.visit_i32_rotr(pos), - 0x79 => visitor.visit_i64_clz(pos), - 0x7a => visitor.visit_i64_ctz(pos), - 0x7b => visitor.visit_i64_popcnt(pos), - 0x7c => visitor.visit_i64_add(pos), - 0x7d => visitor.visit_i64_sub(pos), - 0x7e => visitor.visit_i64_mul(pos), - 0x7f => visitor.visit_i64_div_s(pos), - 0x80 => visitor.visit_i64_div_u(pos), - 0x81 => visitor.visit_i64_rem_s(pos), - 0x82 => visitor.visit_i64_rem_u(pos), - 0x83 => visitor.visit_i64_and(pos), - 0x84 => visitor.visit_i64_or(pos), - 0x85 => visitor.visit_i64_xor(pos), - 0x86 => visitor.visit_i64_shl(pos), - 0x87 => visitor.visit_i64_shr_s(pos), - 0x88 => visitor.visit_i64_shr_u(pos), - 0x89 => visitor.visit_i64_rotl(pos), - 0x8a => visitor.visit_i64_rotr(pos), - 0x8b => visitor.visit_f32_abs(pos), - 0x8c => visitor.visit_f32_neg(pos), - 0x8d => visitor.visit_f32_ceil(pos), - 0x8e => visitor.visit_f32_floor(pos), - 0x8f => visitor.visit_f32_trunc(pos), - 0x90 => visitor.visit_f32_nearest(pos), - 0x91 => visitor.visit_f32_sqrt(pos), - 0x92 => visitor.visit_f32_add(pos), - 0x93 => visitor.visit_f32_sub(pos), - 0x94 => visitor.visit_f32_mul(pos), - 0x95 => visitor.visit_f32_div(pos), - 0x96 => visitor.visit_f32_min(pos), - 0x97 => visitor.visit_f32_max(pos), - 0x98 => visitor.visit_f32_copysign(pos), - 0x99 => visitor.visit_f64_abs(pos), - 0x9a => visitor.visit_f64_neg(pos), - 0x9b => visitor.visit_f64_ceil(pos), - 0x9c => visitor.visit_f64_floor(pos), - 0x9d => visitor.visit_f64_trunc(pos), - 0x9e => visitor.visit_f64_nearest(pos), - 0x9f => visitor.visit_f64_sqrt(pos), - 0xa0 => visitor.visit_f64_add(pos), - 0xa1 => visitor.visit_f64_sub(pos), - 0xa2 => visitor.visit_f64_mul(pos), - 0xa3 => visitor.visit_f64_div(pos), - 0xa4 => visitor.visit_f64_min(pos), - 0xa5 => visitor.visit_f64_max(pos), - 0xa6 => visitor.visit_f64_copysign(pos), - 0xa7 => visitor.visit_i32_wrap_i64(pos), - 0xa8 => visitor.visit_i32_trunc_f32_s(pos), - 0xa9 => visitor.visit_i32_trunc_f32_u(pos), - 0xaa => visitor.visit_i32_trunc_f64_s(pos), - 0xab => visitor.visit_i32_trunc_f64_u(pos), - 0xac => visitor.visit_i64_extend_i32_s(pos), - 0xad => visitor.visit_i64_extend_i32_u(pos), - 0xae => visitor.visit_i64_trunc_f32_s(pos), - 0xaf => visitor.visit_i64_trunc_f32_u(pos), - 0xb0 => visitor.visit_i64_trunc_f64_s(pos), - 0xb1 => visitor.visit_i64_trunc_f64_u(pos), - 0xb2 => visitor.visit_f32_convert_i32_s(pos), - 0xb3 => visitor.visit_f32_convert_i32_u(pos), - 0xb4 => visitor.visit_f32_convert_i64_s(pos), - 0xb5 => visitor.visit_f32_convert_i64_u(pos), - 0xb6 => visitor.visit_f32_demote_f64(pos), - 0xb7 => visitor.visit_f64_convert_i32_s(pos), - 0xb8 => visitor.visit_f64_convert_i32_u(pos), - 0xb9 => visitor.visit_f64_convert_i64_s(pos), - 0xba => visitor.visit_f64_convert_i64_u(pos), - 0xbb => visitor.visit_f64_promote_f32(pos), - 0xbc => visitor.visit_i32_reinterpret_f32(pos), - 0xbd => visitor.visit_i64_reinterpret_f64(pos), - 0xbe => visitor.visit_f32_reinterpret_i32(pos), - 0xbf => visitor.visit_f64_reinterpret_i64(pos), - - 0xc0 => visitor.visit_i32_extend8_s(pos), - 0xc1 => visitor.visit_i32_extend16_s(pos), - 0xc2 => visitor.visit_i64_extend8_s(pos), - 0xc3 => visitor.visit_i64_extend16_s(pos), - 0xc4 => visitor.visit_i64_extend32_s(pos), - - 0xd0 => visitor.visit_ref_null(pos, self.read_val_type()?), - 0xd1 => visitor.visit_ref_is_null(pos), - 0xd2 => visitor.visit_ref_func(pos, self.read_var_u32()?), + 0x41 => visitor.visit_i32_const(self.read_var_i32()?), + 0x42 => visitor.visit_i64_const(self.read_var_i64()?), + 0x43 => visitor.visit_f32_const(self.read_f32()?), + 0x44 => visitor.visit_f64_const(self.read_f64()?), + + 0x45 => visitor.visit_i32_eqz(), + 0x46 => visitor.visit_i32_eq(), + 0x47 => visitor.visit_i32_ne(), + 0x48 => visitor.visit_i32_lt_s(), + 0x49 => visitor.visit_i32_lt_u(), + 0x4a => visitor.visit_i32_gt_s(), + 0x4b => visitor.visit_i32_gt_u(), + 0x4c => visitor.visit_i32_le_s(), + 0x4d => visitor.visit_i32_le_u(), + 0x4e => visitor.visit_i32_ge_s(), + 0x4f => visitor.visit_i32_ge_u(), + 0x50 => visitor.visit_i64_eqz(), + 0x51 => visitor.visit_i64_eq(), + 0x52 => visitor.visit_i64_ne(), + 0x53 => visitor.visit_i64_lt_s(), + 0x54 => visitor.visit_i64_lt_u(), + 0x55 => visitor.visit_i64_gt_s(), + 0x56 => visitor.visit_i64_gt_u(), + 0x57 => visitor.visit_i64_le_s(), + 0x58 => visitor.visit_i64_le_u(), + 0x59 => visitor.visit_i64_ge_s(), + 0x5a => visitor.visit_i64_ge_u(), + 0x5b => visitor.visit_f32_eq(), + 0x5c => visitor.visit_f32_ne(), + 0x5d => visitor.visit_f32_lt(), + 0x5e => visitor.visit_f32_gt(), + 0x5f => visitor.visit_f32_le(), + 0x60 => visitor.visit_f32_ge(), + 0x61 => visitor.visit_f64_eq(), + 0x62 => visitor.visit_f64_ne(), + 0x63 => visitor.visit_f64_lt(), + 0x64 => visitor.visit_f64_gt(), + 0x65 => visitor.visit_f64_le(), + 0x66 => visitor.visit_f64_ge(), + 0x67 => visitor.visit_i32_clz(), + 0x68 => visitor.visit_i32_ctz(), + 0x69 => visitor.visit_i32_popcnt(), + 0x6a => visitor.visit_i32_add(), + 0x6b => visitor.visit_i32_sub(), + 0x6c => visitor.visit_i32_mul(), + 0x6d => visitor.visit_i32_div_s(), + 0x6e => visitor.visit_i32_div_u(), + 0x6f => visitor.visit_i32_rem_s(), + 0x70 => visitor.visit_i32_rem_u(), + 0x71 => visitor.visit_i32_and(), + 0x72 => visitor.visit_i32_or(), + 0x73 => visitor.visit_i32_xor(), + 0x74 => visitor.visit_i32_shl(), + 0x75 => visitor.visit_i32_shr_s(), + 0x76 => visitor.visit_i32_shr_u(), + 0x77 => visitor.visit_i32_rotl(), + 0x78 => visitor.visit_i32_rotr(), + 0x79 => visitor.visit_i64_clz(), + 0x7a => visitor.visit_i64_ctz(), + 0x7b => visitor.visit_i64_popcnt(), + 0x7c => visitor.visit_i64_add(), + 0x7d => visitor.visit_i64_sub(), + 0x7e => visitor.visit_i64_mul(), + 0x7f => visitor.visit_i64_div_s(), + 0x80 => visitor.visit_i64_div_u(), + 0x81 => visitor.visit_i64_rem_s(), + 0x82 => visitor.visit_i64_rem_u(), + 0x83 => visitor.visit_i64_and(), + 0x84 => visitor.visit_i64_or(), + 0x85 => visitor.visit_i64_xor(), + 0x86 => visitor.visit_i64_shl(), + 0x87 => visitor.visit_i64_shr_s(), + 0x88 => visitor.visit_i64_shr_u(), + 0x89 => visitor.visit_i64_rotl(), + 0x8a => visitor.visit_i64_rotr(), + 0x8b => visitor.visit_f32_abs(), + 0x8c => visitor.visit_f32_neg(), + 0x8d => visitor.visit_f32_ceil(), + 0x8e => visitor.visit_f32_floor(), + 0x8f => visitor.visit_f32_trunc(), + 0x90 => visitor.visit_f32_nearest(), + 0x91 => visitor.visit_f32_sqrt(), + 0x92 => visitor.visit_f32_add(), + 0x93 => visitor.visit_f32_sub(), + 0x94 => visitor.visit_f32_mul(), + 0x95 => visitor.visit_f32_div(), + 0x96 => visitor.visit_f32_min(), + 0x97 => visitor.visit_f32_max(), + 0x98 => visitor.visit_f32_copysign(), + 0x99 => visitor.visit_f64_abs(), + 0x9a => visitor.visit_f64_neg(), + 0x9b => visitor.visit_f64_ceil(), + 0x9c => visitor.visit_f64_floor(), + 0x9d => visitor.visit_f64_trunc(), + 0x9e => visitor.visit_f64_nearest(), + 0x9f => visitor.visit_f64_sqrt(), + 0xa0 => visitor.visit_f64_add(), + 0xa1 => visitor.visit_f64_sub(), + 0xa2 => visitor.visit_f64_mul(), + 0xa3 => visitor.visit_f64_div(), + 0xa4 => visitor.visit_f64_min(), + 0xa5 => visitor.visit_f64_max(), + 0xa6 => visitor.visit_f64_copysign(), + 0xa7 => visitor.visit_i32_wrap_i64(), + 0xa8 => visitor.visit_i32_trunc_f32_s(), + 0xa9 => visitor.visit_i32_trunc_f32_u(), + 0xaa => visitor.visit_i32_trunc_f64_s(), + 0xab => visitor.visit_i32_trunc_f64_u(), + 0xac => visitor.visit_i64_extend_i32_s(), + 0xad => visitor.visit_i64_extend_i32_u(), + 0xae => visitor.visit_i64_trunc_f32_s(), + 0xaf => visitor.visit_i64_trunc_f32_u(), + 0xb0 => visitor.visit_i64_trunc_f64_s(), + 0xb1 => visitor.visit_i64_trunc_f64_u(), + 0xb2 => visitor.visit_f32_convert_i32_s(), + 0xb3 => visitor.visit_f32_convert_i32_u(), + 0xb4 => visitor.visit_f32_convert_i64_s(), + 0xb5 => visitor.visit_f32_convert_i64_u(), + 0xb6 => visitor.visit_f32_demote_f64(), + 0xb7 => visitor.visit_f64_convert_i32_s(), + 0xb8 => visitor.visit_f64_convert_i32_u(), + 0xb9 => visitor.visit_f64_convert_i64_s(), + 0xba => visitor.visit_f64_convert_i64_u(), + 0xbb => visitor.visit_f64_promote_f32(), + 0xbc => visitor.visit_i32_reinterpret_f32(), + 0xbd => visitor.visit_i64_reinterpret_f64(), + 0xbe => visitor.visit_f32_reinterpret_i32(), + 0xbf => visitor.visit_f64_reinterpret_i64(), + + 0xc0 => visitor.visit_i32_extend8_s(), + 0xc1 => visitor.visit_i32_extend16_s(), + 0xc2 => visitor.visit_i64_extend8_s(), + 0xc3 => visitor.visit_i64_extend16_s(), + 0xc4 => visitor.visit_i64_extend32_s(), + + 0xd0 => visitor.visit_ref_null(self.read_val_type()?), + 0xd1 => visitor.visit_ref_is_null(), + 0xd2 => visitor.visit_ref_func(self.read_var_u32()?), 0xfc => self.visit_0xfc_operator(pos, visitor)?, 0xfd => self.visit_0xfd_operator(pos, visitor)?, @@ -1608,60 +1645,60 @@ impl<'a> BinaryReader<'a> { { let code = self.read_var_u32()?; Ok(match code { - 0x00 => visitor.visit_i32_trunc_sat_f32_s(pos), - 0x01 => visitor.visit_i32_trunc_sat_f32_u(pos), - 0x02 => visitor.visit_i32_trunc_sat_f64_s(pos), - 0x03 => visitor.visit_i32_trunc_sat_f64_u(pos), - 0x04 => visitor.visit_i64_trunc_sat_f32_s(pos), - 0x05 => visitor.visit_i64_trunc_sat_f32_u(pos), - 0x06 => visitor.visit_i64_trunc_sat_f64_s(pos), - 0x07 => visitor.visit_i64_trunc_sat_f64_u(pos), + 0x00 => visitor.visit_i32_trunc_sat_f32_s(), + 0x01 => visitor.visit_i32_trunc_sat_f32_u(), + 0x02 => visitor.visit_i32_trunc_sat_f64_s(), + 0x03 => visitor.visit_i32_trunc_sat_f64_u(), + 0x04 => visitor.visit_i64_trunc_sat_f32_s(), + 0x05 => visitor.visit_i64_trunc_sat_f32_u(), + 0x06 => visitor.visit_i64_trunc_sat_f64_s(), + 0x07 => visitor.visit_i64_trunc_sat_f64_u(), 0x08 => { let segment = self.read_var_u32()?; let mem = self.read_var_u32()?; - visitor.visit_memory_init(pos, segment, mem) + visitor.visit_memory_init(segment, mem) } 0x09 => { let segment = self.read_var_u32()?; - visitor.visit_data_drop(pos, segment) + visitor.visit_data_drop(segment) } 0x0a => { let dst = self.read_var_u32()?; let src = self.read_var_u32()?; - visitor.visit_memory_copy(pos, dst, src) + visitor.visit_memory_copy(dst, src) } 0x0b => { let mem = self.read_var_u32()?; - visitor.visit_memory_fill(pos, mem) + visitor.visit_memory_fill(mem) } 0x0c => { let segment = self.read_var_u32()?; let table = self.read_var_u32()?; - visitor.visit_table_init(pos, segment, table) + visitor.visit_table_init(segment, table) } 0x0d => { let segment = self.read_var_u32()?; - visitor.visit_elem_drop(pos, segment) + visitor.visit_elem_drop(segment) } 0x0e => { let dst_table = self.read_var_u32()?; let src_table = self.read_var_u32()?; - visitor.visit_table_copy(pos, dst_table, src_table) + visitor.visit_table_copy(dst_table, src_table) } 0x0f => { let table = self.read_var_u32()?; - visitor.visit_table_grow(pos, table) + visitor.visit_table_grow(table) } 0x10 => { let table = self.read_var_u32()?; - visitor.visit_table_size(pos, table) + visitor.visit_table_size(table) } 0x11 => { let table = self.read_var_u32()?; - visitor.visit_table_fill(pos, table) + visitor.visit_table_fill(table) } _ => bail!(pos, "unknown 0xfc subopcode: 0x{code:x}"), @@ -1678,308 +1715,308 @@ impl<'a> BinaryReader<'a> { { let code = self.read_var_u32()?; Ok(match code { - 0x00 => visitor.visit_v128_load(pos, self.read_memarg(4)?), - 0x01 => visitor.visit_v128_load8x8_s(pos, self.read_memarg(3)?), - 0x02 => visitor.visit_v128_load8x8_u(pos, self.read_memarg(3)?), - 0x03 => visitor.visit_v128_load16x4_s(pos, self.read_memarg(3)?), - 0x04 => visitor.visit_v128_load16x4_u(pos, self.read_memarg(3)?), - 0x05 => visitor.visit_v128_load32x2_s(pos, self.read_memarg(3)?), - 0x06 => visitor.visit_v128_load32x2_u(pos, self.read_memarg(3)?), - 0x07 => visitor.visit_v128_load8_splat(pos, self.read_memarg(0)?), - 0x08 => visitor.visit_v128_load16_splat(pos, self.read_memarg(1)?), - 0x09 => visitor.visit_v128_load32_splat(pos, self.read_memarg(2)?), - 0x0a => visitor.visit_v128_load64_splat(pos, self.read_memarg(3)?), - - 0x0b => visitor.visit_v128_store(pos, self.read_memarg(4)?), - 0x0c => visitor.visit_v128_const(pos, self.read_v128()?), + 0x00 => visitor.visit_v128_load(self.read_memarg(4)?), + 0x01 => visitor.visit_v128_load8x8_s(self.read_memarg(3)?), + 0x02 => visitor.visit_v128_load8x8_u(self.read_memarg(3)?), + 0x03 => visitor.visit_v128_load16x4_s(self.read_memarg(3)?), + 0x04 => visitor.visit_v128_load16x4_u(self.read_memarg(3)?), + 0x05 => visitor.visit_v128_load32x2_s(self.read_memarg(3)?), + 0x06 => visitor.visit_v128_load32x2_u(self.read_memarg(3)?), + 0x07 => visitor.visit_v128_load8_splat(self.read_memarg(0)?), + 0x08 => visitor.visit_v128_load16_splat(self.read_memarg(1)?), + 0x09 => visitor.visit_v128_load32_splat(self.read_memarg(2)?), + 0x0a => visitor.visit_v128_load64_splat(self.read_memarg(3)?), + + 0x0b => visitor.visit_v128_store(self.read_memarg(4)?), + 0x0c => visitor.visit_v128_const(self.read_v128()?), 0x0d => { let mut lanes: [u8; 16] = [0; 16]; for lane in &mut lanes { *lane = self.read_lane_index(32)? } - visitor.visit_i8x16_shuffle(pos, lanes) + visitor.visit_i8x16_shuffle(lanes) } - 0x0e => visitor.visit_i8x16_swizzle(pos), - 0x0f => visitor.visit_i8x16_splat(pos), - 0x10 => visitor.visit_i16x8_splat(pos), - 0x11 => visitor.visit_i32x4_splat(pos), - 0x12 => visitor.visit_i64x2_splat(pos), - 0x13 => visitor.visit_f32x4_splat(pos), - 0x14 => visitor.visit_f64x2_splat(pos), - - 0x15 => visitor.visit_i8x16_extract_lane_s(pos, self.read_lane_index(16)?), - 0x16 => visitor.visit_i8x16_extract_lane_u(pos, self.read_lane_index(16)?), - 0x17 => visitor.visit_i8x16_replace_lane(pos, self.read_lane_index(16)?), - 0x18 => visitor.visit_i16x8_extract_lane_s(pos, self.read_lane_index(8)?), - 0x19 => visitor.visit_i16x8_extract_lane_u(pos, self.read_lane_index(8)?), - 0x1a => visitor.visit_i16x8_replace_lane(pos, self.read_lane_index(8)?), - 0x1b => visitor.visit_i32x4_extract_lane(pos, self.read_lane_index(4)?), - - 0x1c => visitor.visit_i32x4_replace_lane(pos, self.read_lane_index(4)?), - 0x1d => visitor.visit_i64x2_extract_lane(pos, self.read_lane_index(2)?), - 0x1e => visitor.visit_i64x2_replace_lane(pos, self.read_lane_index(2)?), - 0x1f => visitor.visit_f32x4_extract_lane(pos, self.read_lane_index(4)?), - 0x20 => visitor.visit_f32x4_replace_lane(pos, self.read_lane_index(4)?), - 0x21 => visitor.visit_f64x2_extract_lane(pos, self.read_lane_index(2)?), - 0x22 => visitor.visit_f64x2_replace_lane(pos, self.read_lane_index(2)?), - - 0x23 => visitor.visit_i8x16_eq(pos), - 0x24 => visitor.visit_i8x16_ne(pos), - 0x25 => visitor.visit_i8x16_lt_s(pos), - 0x26 => visitor.visit_i8x16_lt_u(pos), - 0x27 => visitor.visit_i8x16_gt_s(pos), - 0x28 => visitor.visit_i8x16_gt_u(pos), - 0x29 => visitor.visit_i8x16_le_s(pos), - 0x2a => visitor.visit_i8x16_le_u(pos), - 0x2b => visitor.visit_i8x16_ge_s(pos), - 0x2c => visitor.visit_i8x16_ge_u(pos), - 0x2d => visitor.visit_i16x8_eq(pos), - 0x2e => visitor.visit_i16x8_ne(pos), - 0x2f => visitor.visit_i16x8_lt_s(pos), - 0x30 => visitor.visit_i16x8_lt_u(pos), - 0x31 => visitor.visit_i16x8_gt_s(pos), - 0x32 => visitor.visit_i16x8_gt_u(pos), - 0x33 => visitor.visit_i16x8_le_s(pos), - 0x34 => visitor.visit_i16x8_le_u(pos), - 0x35 => visitor.visit_i16x8_ge_s(pos), - 0x36 => visitor.visit_i16x8_ge_u(pos), - 0x37 => visitor.visit_i32x4_eq(pos), - 0x38 => visitor.visit_i32x4_ne(pos), - 0x39 => visitor.visit_i32x4_lt_s(pos), - 0x3a => visitor.visit_i32x4_lt_u(pos), - 0x3b => visitor.visit_i32x4_gt_s(pos), - 0x3c => visitor.visit_i32x4_gt_u(pos), - 0x3d => visitor.visit_i32x4_le_s(pos), - 0x3e => visitor.visit_i32x4_le_u(pos), - 0x3f => visitor.visit_i32x4_ge_s(pos), - 0x40 => visitor.visit_i32x4_ge_u(pos), - 0x41 => visitor.visit_f32x4_eq(pos), - 0x42 => visitor.visit_f32x4_ne(pos), - 0x43 => visitor.visit_f32x4_lt(pos), - 0x44 => visitor.visit_f32x4_gt(pos), - 0x45 => visitor.visit_f32x4_le(pos), - 0x46 => visitor.visit_f32x4_ge(pos), - 0x47 => visitor.visit_f64x2_eq(pos), - 0x48 => visitor.visit_f64x2_ne(pos), - 0x49 => visitor.visit_f64x2_lt(pos), - 0x4a => visitor.visit_f64x2_gt(pos), - 0x4b => visitor.visit_f64x2_le(pos), - 0x4c => visitor.visit_f64x2_ge(pos), - 0x4d => visitor.visit_v128_not(pos), - 0x4e => visitor.visit_v128_and(pos), - 0x4f => visitor.visit_v128_andnot(pos), - 0x50 => visitor.visit_v128_or(pos), - 0x51 => visitor.visit_v128_xor(pos), - 0x52 => visitor.visit_v128_bitselect(pos), - 0x53 => visitor.visit_v128_any_true(pos), + 0x0e => visitor.visit_i8x16_swizzle(), + 0x0f => visitor.visit_i8x16_splat(), + 0x10 => visitor.visit_i16x8_splat(), + 0x11 => visitor.visit_i32x4_splat(), + 0x12 => visitor.visit_i64x2_splat(), + 0x13 => visitor.visit_f32x4_splat(), + 0x14 => visitor.visit_f64x2_splat(), + + 0x15 => visitor.visit_i8x16_extract_lane_s(self.read_lane_index(16)?), + 0x16 => visitor.visit_i8x16_extract_lane_u(self.read_lane_index(16)?), + 0x17 => visitor.visit_i8x16_replace_lane(self.read_lane_index(16)?), + 0x18 => visitor.visit_i16x8_extract_lane_s(self.read_lane_index(8)?), + 0x19 => visitor.visit_i16x8_extract_lane_u(self.read_lane_index(8)?), + 0x1a => visitor.visit_i16x8_replace_lane(self.read_lane_index(8)?), + 0x1b => visitor.visit_i32x4_extract_lane(self.read_lane_index(4)?), + + 0x1c => visitor.visit_i32x4_replace_lane(self.read_lane_index(4)?), + 0x1d => visitor.visit_i64x2_extract_lane(self.read_lane_index(2)?), + 0x1e => visitor.visit_i64x2_replace_lane(self.read_lane_index(2)?), + 0x1f => visitor.visit_f32x4_extract_lane(self.read_lane_index(4)?), + 0x20 => visitor.visit_f32x4_replace_lane(self.read_lane_index(4)?), + 0x21 => visitor.visit_f64x2_extract_lane(self.read_lane_index(2)?), + 0x22 => visitor.visit_f64x2_replace_lane(self.read_lane_index(2)?), + + 0x23 => visitor.visit_i8x16_eq(), + 0x24 => visitor.visit_i8x16_ne(), + 0x25 => visitor.visit_i8x16_lt_s(), + 0x26 => visitor.visit_i8x16_lt_u(), + 0x27 => visitor.visit_i8x16_gt_s(), + 0x28 => visitor.visit_i8x16_gt_u(), + 0x29 => visitor.visit_i8x16_le_s(), + 0x2a => visitor.visit_i8x16_le_u(), + 0x2b => visitor.visit_i8x16_ge_s(), + 0x2c => visitor.visit_i8x16_ge_u(), + 0x2d => visitor.visit_i16x8_eq(), + 0x2e => visitor.visit_i16x8_ne(), + 0x2f => visitor.visit_i16x8_lt_s(), + 0x30 => visitor.visit_i16x8_lt_u(), + 0x31 => visitor.visit_i16x8_gt_s(), + 0x32 => visitor.visit_i16x8_gt_u(), + 0x33 => visitor.visit_i16x8_le_s(), + 0x34 => visitor.visit_i16x8_le_u(), + 0x35 => visitor.visit_i16x8_ge_s(), + 0x36 => visitor.visit_i16x8_ge_u(), + 0x37 => visitor.visit_i32x4_eq(), + 0x38 => visitor.visit_i32x4_ne(), + 0x39 => visitor.visit_i32x4_lt_s(), + 0x3a => visitor.visit_i32x4_lt_u(), + 0x3b => visitor.visit_i32x4_gt_s(), + 0x3c => visitor.visit_i32x4_gt_u(), + 0x3d => visitor.visit_i32x4_le_s(), + 0x3e => visitor.visit_i32x4_le_u(), + 0x3f => visitor.visit_i32x4_ge_s(), + 0x40 => visitor.visit_i32x4_ge_u(), + 0x41 => visitor.visit_f32x4_eq(), + 0x42 => visitor.visit_f32x4_ne(), + 0x43 => visitor.visit_f32x4_lt(), + 0x44 => visitor.visit_f32x4_gt(), + 0x45 => visitor.visit_f32x4_le(), + 0x46 => visitor.visit_f32x4_ge(), + 0x47 => visitor.visit_f64x2_eq(), + 0x48 => visitor.visit_f64x2_ne(), + 0x49 => visitor.visit_f64x2_lt(), + 0x4a => visitor.visit_f64x2_gt(), + 0x4b => visitor.visit_f64x2_le(), + 0x4c => visitor.visit_f64x2_ge(), + 0x4d => visitor.visit_v128_not(), + 0x4e => visitor.visit_v128_and(), + 0x4f => visitor.visit_v128_andnot(), + 0x50 => visitor.visit_v128_or(), + 0x51 => visitor.visit_v128_xor(), + 0x52 => visitor.visit_v128_bitselect(), + 0x53 => visitor.visit_v128_any_true(), 0x54 => { let memarg = self.read_memarg(0)?; let lane = self.read_lane_index(16)?; - visitor.visit_v128_load8_lane(pos, memarg, lane) + visitor.visit_v128_load8_lane(memarg, lane) } 0x55 => { let memarg = self.read_memarg(1)?; let lane = self.read_lane_index(8)?; - visitor.visit_v128_load16_lane(pos, memarg, lane) + visitor.visit_v128_load16_lane(memarg, lane) } 0x56 => { let memarg = self.read_memarg(2)?; let lane = self.read_lane_index(4)?; - visitor.visit_v128_load32_lane(pos, memarg, lane) + visitor.visit_v128_load32_lane(memarg, lane) } 0x57 => { let memarg = self.read_memarg(3)?; let lane = self.read_lane_index(2)?; - visitor.visit_v128_load64_lane(pos, memarg, lane) + visitor.visit_v128_load64_lane(memarg, lane) } 0x58 => { let memarg = self.read_memarg(0)?; let lane = self.read_lane_index(16)?; - visitor.visit_v128_store8_lane(pos, memarg, lane) + visitor.visit_v128_store8_lane(memarg, lane) } 0x59 => { let memarg = self.read_memarg(1)?; let lane = self.read_lane_index(8)?; - visitor.visit_v128_store16_lane(pos, memarg, lane) + visitor.visit_v128_store16_lane(memarg, lane) } 0x5a => { let memarg = self.read_memarg(2)?; let lane = self.read_lane_index(4)?; - visitor.visit_v128_store32_lane(pos, memarg, lane) + visitor.visit_v128_store32_lane(memarg, lane) } 0x5b => { let memarg = self.read_memarg(3)?; let lane = self.read_lane_index(2)?; - visitor.visit_v128_store64_lane(pos, memarg, lane) + visitor.visit_v128_store64_lane(memarg, lane) } - 0x5c => visitor.visit_v128_load32_zero(pos, self.read_memarg(2)?), - 0x5d => visitor.visit_v128_load64_zero(pos, self.read_memarg(3)?), - 0x5e => visitor.visit_f32x4_demote_f64x2_zero(pos), - 0x5f => visitor.visit_f64x2_promote_low_f32x4(pos), - 0x60 => visitor.visit_i8x16_abs(pos), - 0x61 => visitor.visit_i8x16_neg(pos), - 0x62 => visitor.visit_i8x16_popcnt(pos), - 0x63 => visitor.visit_i8x16_all_true(pos), - 0x64 => visitor.visit_i8x16_bitmask(pos), - 0x65 => visitor.visit_i8x16_narrow_i16x8_s(pos), - 0x66 => visitor.visit_i8x16_narrow_i16x8_u(pos), - 0x67 => visitor.visit_f32x4_ceil(pos), - 0x68 => visitor.visit_f32x4_floor(pos), - 0x69 => visitor.visit_f32x4_trunc(pos), - 0x6a => visitor.visit_f32x4_nearest(pos), - 0x6b => visitor.visit_i8x16_shl(pos), - 0x6c => visitor.visit_i8x16_shr_s(pos), - 0x6d => visitor.visit_i8x16_shr_u(pos), - 0x6e => visitor.visit_i8x16_add(pos), - 0x6f => visitor.visit_i8x16_add_sat_s(pos), - 0x70 => visitor.visit_i8x16_add_sat_u(pos), - 0x71 => visitor.visit_i8x16_sub(pos), - 0x72 => visitor.visit_i8x16_sub_sat_s(pos), - 0x73 => visitor.visit_i8x16_sub_sat_u(pos), - 0x74 => visitor.visit_f64x2_ceil(pos), - 0x75 => visitor.visit_f64x2_floor(pos), - 0x76 => visitor.visit_i8x16_min_s(pos), - 0x77 => visitor.visit_i8x16_min_u(pos), - 0x78 => visitor.visit_i8x16_max_s(pos), - 0x79 => visitor.visit_i8x16_max_u(pos), - 0x7a => visitor.visit_f64x2_trunc(pos), - 0x7b => visitor.visit_i8x16_avgr_u(pos), - 0x7c => visitor.visit_i16x8_extadd_pairwise_i8x16_s(pos), - 0x7d => visitor.visit_i16x8_extadd_pairwise_i8x16_u(pos), - 0x7e => visitor.visit_i32x4_extadd_pairwise_i16x8_s(pos), - 0x7f => visitor.visit_i32x4_extadd_pairwise_i16x8_u(pos), - 0x80 => visitor.visit_i16x8_abs(pos), - 0x81 => visitor.visit_i16x8_neg(pos), - 0x82 => visitor.visit_i16x8_q15mulr_sat_s(pos), - 0x83 => visitor.visit_i16x8_all_true(pos), - 0x84 => visitor.visit_i16x8_bitmask(pos), - 0x85 => visitor.visit_i16x8_narrow_i32x4_s(pos), - 0x86 => visitor.visit_i16x8_narrow_i32x4_u(pos), - 0x87 => visitor.visit_i16x8_extend_low_i8x16_s(pos), - 0x88 => visitor.visit_i16x8_extend_high_i8x16_s(pos), - 0x89 => visitor.visit_i16x8_extend_low_i8x16_u(pos), - 0x8a => visitor.visit_i16x8_extend_high_i8x16_u(pos), - 0x8b => visitor.visit_i16x8_shl(pos), - 0x8c => visitor.visit_i16x8_shr_s(pos), - 0x8d => visitor.visit_i16x8_shr_u(pos), - 0x8e => visitor.visit_i16x8_add(pos), - 0x8f => visitor.visit_i16x8_add_sat_s(pos), - 0x90 => visitor.visit_i16x8_add_sat_u(pos), - 0x91 => visitor.visit_i16x8_sub(pos), - 0x92 => visitor.visit_i16x8_sub_sat_s(pos), - 0x93 => visitor.visit_i16x8_sub_sat_u(pos), - 0x94 => visitor.visit_f64x2_nearest(pos), - 0x95 => visitor.visit_i16x8_mul(pos), - 0x96 => visitor.visit_i16x8_min_s(pos), - 0x97 => visitor.visit_i16x8_min_u(pos), - 0x98 => visitor.visit_i16x8_max_s(pos), - 0x99 => visitor.visit_i16x8_max_u(pos), - 0x9b => visitor.visit_i16x8_avgr_u(pos), - 0x9c => visitor.visit_i16x8_extmul_low_i8x16_s(pos), - 0x9d => visitor.visit_i16x8_extmul_high_i8x16_s(pos), - 0x9e => visitor.visit_i16x8_extmul_low_i8x16_u(pos), - 0x9f => visitor.visit_i16x8_extmul_high_i8x16_u(pos), - 0xa0 => visitor.visit_i32x4_abs(pos), - 0xa1 => visitor.visit_i32x4_neg(pos), - 0xa3 => visitor.visit_i32x4_all_true(pos), - 0xa4 => visitor.visit_i32x4_bitmask(pos), - 0xa7 => visitor.visit_i32x4_extend_low_i16x8_s(pos), - 0xa8 => visitor.visit_i32x4_extend_high_i16x8_s(pos), - 0xa9 => visitor.visit_i32x4_extend_low_i16x8_u(pos), - 0xaa => visitor.visit_i32x4_extend_high_i16x8_u(pos), - 0xab => visitor.visit_i32x4_shl(pos), - 0xac => visitor.visit_i32x4_shr_s(pos), - 0xad => visitor.visit_i32x4_shr_u(pos), - 0xae => visitor.visit_i32x4_add(pos), - 0xb1 => visitor.visit_i32x4_sub(pos), - 0xb5 => visitor.visit_i32x4_mul(pos), - 0xb6 => visitor.visit_i32x4_min_s(pos), - 0xb7 => visitor.visit_i32x4_min_u(pos), - 0xb8 => visitor.visit_i32x4_max_s(pos), - 0xb9 => visitor.visit_i32x4_max_u(pos), - 0xba => visitor.visit_i32x4_dot_i16x8_s(pos), - 0xbc => visitor.visit_i32x4_extmul_low_i16x8_s(pos), - 0xbd => visitor.visit_i32x4_extmul_high_i16x8_s(pos), - 0xbe => visitor.visit_i32x4_extmul_low_i16x8_u(pos), - 0xbf => visitor.visit_i32x4_extmul_high_i16x8_u(pos), - 0xc0 => visitor.visit_i64x2_abs(pos), - 0xc1 => visitor.visit_i64x2_neg(pos), - 0xc3 => visitor.visit_i64x2_all_true(pos), - 0xc4 => visitor.visit_i64x2_bitmask(pos), - 0xc7 => visitor.visit_i64x2_extend_low_i32x4_s(pos), - 0xc8 => visitor.visit_i64x2_extend_high_i32x4_s(pos), - 0xc9 => visitor.visit_i64x2_extend_low_i32x4_u(pos), - 0xca => visitor.visit_i64x2_extend_high_i32x4_u(pos), - 0xcb => visitor.visit_i64x2_shl(pos), - 0xcc => visitor.visit_i64x2_shr_s(pos), - 0xcd => visitor.visit_i64x2_shr_u(pos), - 0xce => visitor.visit_i64x2_add(pos), - 0xd1 => visitor.visit_i64x2_sub(pos), - 0xd5 => visitor.visit_i64x2_mul(pos), - 0xd6 => visitor.visit_i64x2_eq(pos), - 0xd7 => visitor.visit_i64x2_ne(pos), - 0xd8 => visitor.visit_i64x2_lt_s(pos), - 0xd9 => visitor.visit_i64x2_gt_s(pos), - 0xda => visitor.visit_i64x2_le_s(pos), - 0xdb => visitor.visit_i64x2_ge_s(pos), - 0xdc => visitor.visit_i64x2_extmul_low_i32x4_s(pos), - 0xdd => visitor.visit_i64x2_extmul_high_i32x4_s(pos), - 0xde => visitor.visit_i64x2_extmul_low_i32x4_u(pos), - 0xdf => visitor.visit_i64x2_extmul_high_i32x4_u(pos), - 0xe0 => visitor.visit_f32x4_abs(pos), - 0xe1 => visitor.visit_f32x4_neg(pos), - 0xe3 => visitor.visit_f32x4_sqrt(pos), - 0xe4 => visitor.visit_f32x4_add(pos), - 0xe5 => visitor.visit_f32x4_sub(pos), - 0xe6 => visitor.visit_f32x4_mul(pos), - 0xe7 => visitor.visit_f32x4_div(pos), - 0xe8 => visitor.visit_f32x4_min(pos), - 0xe9 => visitor.visit_f32x4_max(pos), - 0xea => visitor.visit_f32x4_pmin(pos), - 0xeb => visitor.visit_f32x4_pmax(pos), - 0xec => visitor.visit_f64x2_abs(pos), - 0xed => visitor.visit_f64x2_neg(pos), - 0xef => visitor.visit_f64x2_sqrt(pos), - 0xf0 => visitor.visit_f64x2_add(pos), - 0xf1 => visitor.visit_f64x2_sub(pos), - 0xf2 => visitor.visit_f64x2_mul(pos), - 0xf3 => visitor.visit_f64x2_div(pos), - 0xf4 => visitor.visit_f64x2_min(pos), - 0xf5 => visitor.visit_f64x2_max(pos), - 0xf6 => visitor.visit_f64x2_pmin(pos), - 0xf7 => visitor.visit_f64x2_pmax(pos), - 0xf8 => visitor.visit_i32x4_trunc_sat_f32x4_s(pos), - 0xf9 => visitor.visit_i32x4_trunc_sat_f32x4_u(pos), - 0xfa => visitor.visit_f32x4_convert_i32x4_s(pos), - 0xfb => visitor.visit_f32x4_convert_i32x4_u(pos), - 0xfc => visitor.visit_i32x4_trunc_sat_f64x2_s_zero(pos), - 0xfd => visitor.visit_i32x4_trunc_sat_f64x2_u_zero(pos), - 0xfe => visitor.visit_f64x2_convert_low_i32x4_s(pos), - 0xff => visitor.visit_f64x2_convert_low_i32x4_u(pos), - 0x100 => visitor.visit_i8x16_relaxed_swizzle(pos), - 0x101 => visitor.visit_i32x4_relaxed_trunc_sat_f32x4_s(pos), - 0x102 => visitor.visit_i32x4_relaxed_trunc_sat_f32x4_u(pos), - 0x103 => visitor.visit_i32x4_relaxed_trunc_sat_f64x2_s_zero(pos), - 0x104 => visitor.visit_i32x4_relaxed_trunc_sat_f64x2_u_zero(pos), - 0x105 => visitor.visit_f32x4_relaxed_fma(pos), - 0x106 => visitor.visit_f32x4_relaxed_fnma(pos), - 0x107 => visitor.visit_f64x2_relaxed_fma(pos), - 0x108 => visitor.visit_f64x2_relaxed_fnma(pos), - 0x109 => visitor.visit_i8x16_relaxed_laneselect(pos), - 0x10a => visitor.visit_i16x8_relaxed_laneselect(pos), - 0x10b => visitor.visit_i32x4_relaxed_laneselect(pos), - 0x10c => visitor.visit_i64x2_relaxed_laneselect(pos), - 0x10d => visitor.visit_f32x4_relaxed_min(pos), - 0x10e => visitor.visit_f32x4_relaxed_max(pos), - 0x10f => visitor.visit_f64x2_relaxed_min(pos), - 0x110 => visitor.visit_f64x2_relaxed_max(pos), - 0x111 => visitor.visit_i16x8_relaxed_q15mulr_s(pos), - 0x112 => visitor.visit_i16x8_dot_i8x16_i7x16_s(pos), - 0x113 => visitor.visit_i32x4_dot_i8x16_i7x16_add_s(pos), - 0x114 => visitor.visit_f32x4_relaxed_dot_bf16x8_add_f32x4(pos), + 0x5c => visitor.visit_v128_load32_zero(self.read_memarg(2)?), + 0x5d => visitor.visit_v128_load64_zero(self.read_memarg(3)?), + 0x5e => visitor.visit_f32x4_demote_f64x2_zero(), + 0x5f => visitor.visit_f64x2_promote_low_f32x4(), + 0x60 => visitor.visit_i8x16_abs(), + 0x61 => visitor.visit_i8x16_neg(), + 0x62 => visitor.visit_i8x16_popcnt(), + 0x63 => visitor.visit_i8x16_all_true(), + 0x64 => visitor.visit_i8x16_bitmask(), + 0x65 => visitor.visit_i8x16_narrow_i16x8_s(), + 0x66 => visitor.visit_i8x16_narrow_i16x8_u(), + 0x67 => visitor.visit_f32x4_ceil(), + 0x68 => visitor.visit_f32x4_floor(), + 0x69 => visitor.visit_f32x4_trunc(), + 0x6a => visitor.visit_f32x4_nearest(), + 0x6b => visitor.visit_i8x16_shl(), + 0x6c => visitor.visit_i8x16_shr_s(), + 0x6d => visitor.visit_i8x16_shr_u(), + 0x6e => visitor.visit_i8x16_add(), + 0x6f => visitor.visit_i8x16_add_sat_s(), + 0x70 => visitor.visit_i8x16_add_sat_u(), + 0x71 => visitor.visit_i8x16_sub(), + 0x72 => visitor.visit_i8x16_sub_sat_s(), + 0x73 => visitor.visit_i8x16_sub_sat_u(), + 0x74 => visitor.visit_f64x2_ceil(), + 0x75 => visitor.visit_f64x2_floor(), + 0x76 => visitor.visit_i8x16_min_s(), + 0x77 => visitor.visit_i8x16_min_u(), + 0x78 => visitor.visit_i8x16_max_s(), + 0x79 => visitor.visit_i8x16_max_u(), + 0x7a => visitor.visit_f64x2_trunc(), + 0x7b => visitor.visit_i8x16_avgr_u(), + 0x7c => visitor.visit_i16x8_extadd_pairwise_i8x16_s(), + 0x7d => visitor.visit_i16x8_extadd_pairwise_i8x16_u(), + 0x7e => visitor.visit_i32x4_extadd_pairwise_i16x8_s(), + 0x7f => visitor.visit_i32x4_extadd_pairwise_i16x8_u(), + 0x80 => visitor.visit_i16x8_abs(), + 0x81 => visitor.visit_i16x8_neg(), + 0x82 => visitor.visit_i16x8_q15mulr_sat_s(), + 0x83 => visitor.visit_i16x8_all_true(), + 0x84 => visitor.visit_i16x8_bitmask(), + 0x85 => visitor.visit_i16x8_narrow_i32x4_s(), + 0x86 => visitor.visit_i16x8_narrow_i32x4_u(), + 0x87 => visitor.visit_i16x8_extend_low_i8x16_s(), + 0x88 => visitor.visit_i16x8_extend_high_i8x16_s(), + 0x89 => visitor.visit_i16x8_extend_low_i8x16_u(), + 0x8a => visitor.visit_i16x8_extend_high_i8x16_u(), + 0x8b => visitor.visit_i16x8_shl(), + 0x8c => visitor.visit_i16x8_shr_s(), + 0x8d => visitor.visit_i16x8_shr_u(), + 0x8e => visitor.visit_i16x8_add(), + 0x8f => visitor.visit_i16x8_add_sat_s(), + 0x90 => visitor.visit_i16x8_add_sat_u(), + 0x91 => visitor.visit_i16x8_sub(), + 0x92 => visitor.visit_i16x8_sub_sat_s(), + 0x93 => visitor.visit_i16x8_sub_sat_u(), + 0x94 => visitor.visit_f64x2_nearest(), + 0x95 => visitor.visit_i16x8_mul(), + 0x96 => visitor.visit_i16x8_min_s(), + 0x97 => visitor.visit_i16x8_min_u(), + 0x98 => visitor.visit_i16x8_max_s(), + 0x99 => visitor.visit_i16x8_max_u(), + 0x9b => visitor.visit_i16x8_avgr_u(), + 0x9c => visitor.visit_i16x8_extmul_low_i8x16_s(), + 0x9d => visitor.visit_i16x8_extmul_high_i8x16_s(), + 0x9e => visitor.visit_i16x8_extmul_low_i8x16_u(), + 0x9f => visitor.visit_i16x8_extmul_high_i8x16_u(), + 0xa0 => visitor.visit_i32x4_abs(), + 0xa1 => visitor.visit_i32x4_neg(), + 0xa3 => visitor.visit_i32x4_all_true(), + 0xa4 => visitor.visit_i32x4_bitmask(), + 0xa7 => visitor.visit_i32x4_extend_low_i16x8_s(), + 0xa8 => visitor.visit_i32x4_extend_high_i16x8_s(), + 0xa9 => visitor.visit_i32x4_extend_low_i16x8_u(), + 0xaa => visitor.visit_i32x4_extend_high_i16x8_u(), + 0xab => visitor.visit_i32x4_shl(), + 0xac => visitor.visit_i32x4_shr_s(), + 0xad => visitor.visit_i32x4_shr_u(), + 0xae => visitor.visit_i32x4_add(), + 0xb1 => visitor.visit_i32x4_sub(), + 0xb5 => visitor.visit_i32x4_mul(), + 0xb6 => visitor.visit_i32x4_min_s(), + 0xb7 => visitor.visit_i32x4_min_u(), + 0xb8 => visitor.visit_i32x4_max_s(), + 0xb9 => visitor.visit_i32x4_max_u(), + 0xba => visitor.visit_i32x4_dot_i16x8_s(), + 0xbc => visitor.visit_i32x4_extmul_low_i16x8_s(), + 0xbd => visitor.visit_i32x4_extmul_high_i16x8_s(), + 0xbe => visitor.visit_i32x4_extmul_low_i16x8_u(), + 0xbf => visitor.visit_i32x4_extmul_high_i16x8_u(), + 0xc0 => visitor.visit_i64x2_abs(), + 0xc1 => visitor.visit_i64x2_neg(), + 0xc3 => visitor.visit_i64x2_all_true(), + 0xc4 => visitor.visit_i64x2_bitmask(), + 0xc7 => visitor.visit_i64x2_extend_low_i32x4_s(), + 0xc8 => visitor.visit_i64x2_extend_high_i32x4_s(), + 0xc9 => visitor.visit_i64x2_extend_low_i32x4_u(), + 0xca => visitor.visit_i64x2_extend_high_i32x4_u(), + 0xcb => visitor.visit_i64x2_shl(), + 0xcc => visitor.visit_i64x2_shr_s(), + 0xcd => visitor.visit_i64x2_shr_u(), + 0xce => visitor.visit_i64x2_add(), + 0xd1 => visitor.visit_i64x2_sub(), + 0xd5 => visitor.visit_i64x2_mul(), + 0xd6 => visitor.visit_i64x2_eq(), + 0xd7 => visitor.visit_i64x2_ne(), + 0xd8 => visitor.visit_i64x2_lt_s(), + 0xd9 => visitor.visit_i64x2_gt_s(), + 0xda => visitor.visit_i64x2_le_s(), + 0xdb => visitor.visit_i64x2_ge_s(), + 0xdc => visitor.visit_i64x2_extmul_low_i32x4_s(), + 0xdd => visitor.visit_i64x2_extmul_high_i32x4_s(), + 0xde => visitor.visit_i64x2_extmul_low_i32x4_u(), + 0xdf => visitor.visit_i64x2_extmul_high_i32x4_u(), + 0xe0 => visitor.visit_f32x4_abs(), + 0xe1 => visitor.visit_f32x4_neg(), + 0xe3 => visitor.visit_f32x4_sqrt(), + 0xe4 => visitor.visit_f32x4_add(), + 0xe5 => visitor.visit_f32x4_sub(), + 0xe6 => visitor.visit_f32x4_mul(), + 0xe7 => visitor.visit_f32x4_div(), + 0xe8 => visitor.visit_f32x4_min(), + 0xe9 => visitor.visit_f32x4_max(), + 0xea => visitor.visit_f32x4_pmin(), + 0xeb => visitor.visit_f32x4_pmax(), + 0xec => visitor.visit_f64x2_abs(), + 0xed => visitor.visit_f64x2_neg(), + 0xef => visitor.visit_f64x2_sqrt(), + 0xf0 => visitor.visit_f64x2_add(), + 0xf1 => visitor.visit_f64x2_sub(), + 0xf2 => visitor.visit_f64x2_mul(), + 0xf3 => visitor.visit_f64x2_div(), + 0xf4 => visitor.visit_f64x2_min(), + 0xf5 => visitor.visit_f64x2_max(), + 0xf6 => visitor.visit_f64x2_pmin(), + 0xf7 => visitor.visit_f64x2_pmax(), + 0xf8 => visitor.visit_i32x4_trunc_sat_f32x4_s(), + 0xf9 => visitor.visit_i32x4_trunc_sat_f32x4_u(), + 0xfa => visitor.visit_f32x4_convert_i32x4_s(), + 0xfb => visitor.visit_f32x4_convert_i32x4_u(), + 0xfc => visitor.visit_i32x4_trunc_sat_f64x2_s_zero(), + 0xfd => visitor.visit_i32x4_trunc_sat_f64x2_u_zero(), + 0xfe => visitor.visit_f64x2_convert_low_i32x4_s(), + 0xff => visitor.visit_f64x2_convert_low_i32x4_u(), + 0x100 => visitor.visit_i8x16_relaxed_swizzle(), + 0x101 => visitor.visit_i32x4_relaxed_trunc_sat_f32x4_s(), + 0x102 => visitor.visit_i32x4_relaxed_trunc_sat_f32x4_u(), + 0x103 => visitor.visit_i32x4_relaxed_trunc_sat_f64x2_s_zero(), + 0x104 => visitor.visit_i32x4_relaxed_trunc_sat_f64x2_u_zero(), + 0x105 => visitor.visit_f32x4_relaxed_fma(), + 0x106 => visitor.visit_f32x4_relaxed_fnma(), + 0x107 => visitor.visit_f64x2_relaxed_fma(), + 0x108 => visitor.visit_f64x2_relaxed_fnma(), + 0x109 => visitor.visit_i8x16_relaxed_laneselect(), + 0x10a => visitor.visit_i16x8_relaxed_laneselect(), + 0x10b => visitor.visit_i32x4_relaxed_laneselect(), + 0x10c => visitor.visit_i64x2_relaxed_laneselect(), + 0x10d => visitor.visit_f32x4_relaxed_min(), + 0x10e => visitor.visit_f32x4_relaxed_max(), + 0x10f => visitor.visit_f64x2_relaxed_min(), + 0x110 => visitor.visit_f64x2_relaxed_max(), + 0x111 => visitor.visit_i16x8_relaxed_q15mulr_s(), + 0x112 => visitor.visit_i16x8_dot_i8x16_i7x16_s(), + 0x113 => visitor.visit_i32x4_dot_i8x16_i7x16_add_s(), + 0x114 => visitor.visit_f32x4_relaxed_dot_bf16x8_add_f32x4(), _ => bail!(pos, "unknown 0xfd subopcode: 0x{code:x}"), }) @@ -1995,78 +2032,78 @@ impl<'a> BinaryReader<'a> { { let code = self.read_var_u32()?; Ok(match code { - 0x00 => visitor.visit_memory_atomic_notify(pos, self.read_memarg(2)?), - 0x01 => visitor.visit_memory_atomic_wait32(pos, self.read_memarg(2)?), - 0x02 => visitor.visit_memory_atomic_wait64(pos, self.read_memarg(3)?), + 0x00 => visitor.visit_memory_atomic_notify(self.read_memarg(2)?), + 0x01 => visitor.visit_memory_atomic_wait32(self.read_memarg(2)?), + 0x02 => visitor.visit_memory_atomic_wait64(self.read_memarg(3)?), 0x03 => { if self.read_u8()? != 0 { bail!(pos, "nonzero byte after `atomic.fence`"); } - visitor.visit_atomic_fence(pos) + visitor.visit_atomic_fence() } - 0x10 => visitor.visit_i32_atomic_load(pos, self.read_memarg(2)?), - 0x11 => visitor.visit_i64_atomic_load(pos, self.read_memarg(3)?), - 0x12 => visitor.visit_i32_atomic_load8_u(pos, self.read_memarg(0)?), - 0x13 => visitor.visit_i32_atomic_load16_u(pos, self.read_memarg(1)?), - 0x14 => visitor.visit_i64_atomic_load8_u(pos, self.read_memarg(0)?), - 0x15 => visitor.visit_i64_atomic_load16_u(pos, self.read_memarg(1)?), - 0x16 => visitor.visit_i64_atomic_load32_u(pos, self.read_memarg(2)?), - 0x17 => visitor.visit_i32_atomic_store(pos, self.read_memarg(2)?), - 0x18 => visitor.visit_i64_atomic_store(pos, self.read_memarg(3)?), - 0x19 => visitor.visit_i32_atomic_store8(pos, self.read_memarg(0)?), - 0x1a => visitor.visit_i32_atomic_store16(pos, self.read_memarg(1)?), - 0x1b => visitor.visit_i64_atomic_store8(pos, self.read_memarg(0)?), - 0x1c => visitor.visit_i64_atomic_store16(pos, self.read_memarg(1)?), - 0x1d => visitor.visit_i64_atomic_store32(pos, self.read_memarg(2)?), - 0x1e => visitor.visit_i32_atomic_rmw_add(pos, self.read_memarg(2)?), - 0x1f => visitor.visit_i64_atomic_rmw_add(pos, self.read_memarg(3)?), - 0x20 => visitor.visit_i32_atomic_rmw8_add_u(pos, self.read_memarg(0)?), - 0x21 => visitor.visit_i32_atomic_rmw16_add_u(pos, self.read_memarg(1)?), - 0x22 => visitor.visit_i64_atomic_rmw8_add_u(pos, self.read_memarg(0)?), - 0x23 => visitor.visit_i64_atomic_rmw16_add_u(pos, self.read_memarg(1)?), - 0x24 => visitor.visit_i64_atomic_rmw32_add_u(pos, self.read_memarg(2)?), - 0x25 => visitor.visit_i32_atomic_rmw_sub(pos, self.read_memarg(2)?), - 0x26 => visitor.visit_i64_atomic_rmw_sub(pos, self.read_memarg(3)?), - 0x27 => visitor.visit_i32_atomic_rmw8_sub_u(pos, self.read_memarg(0)?), - 0x28 => visitor.visit_i32_atomic_rmw16_sub_u(pos, self.read_memarg(1)?), - 0x29 => visitor.visit_i64_atomic_rmw8_sub_u(pos, self.read_memarg(0)?), - 0x2a => visitor.visit_i64_atomic_rmw16_sub_u(pos, self.read_memarg(1)?), - 0x2b => visitor.visit_i64_atomic_rmw32_sub_u(pos, self.read_memarg(2)?), - 0x2c => visitor.visit_i32_atomic_rmw_and(pos, self.read_memarg(2)?), - 0x2d => visitor.visit_i64_atomic_rmw_and(pos, self.read_memarg(3)?), - 0x2e => visitor.visit_i32_atomic_rmw8_and_u(pos, self.read_memarg(0)?), - 0x2f => visitor.visit_i32_atomic_rmw16_and_u(pos, self.read_memarg(1)?), - 0x30 => visitor.visit_i64_atomic_rmw8_and_u(pos, self.read_memarg(0)?), - 0x31 => visitor.visit_i64_atomic_rmw16_and_u(pos, self.read_memarg(1)?), - 0x32 => visitor.visit_i64_atomic_rmw32_and_u(pos, self.read_memarg(2)?), - 0x33 => visitor.visit_i32_atomic_rmw_or(pos, self.read_memarg(2)?), - 0x34 => visitor.visit_i64_atomic_rmw_or(pos, self.read_memarg(3)?), - 0x35 => visitor.visit_i32_atomic_rmw8_or_u(pos, self.read_memarg(0)?), - 0x36 => visitor.visit_i32_atomic_rmw16_or_u(pos, self.read_memarg(1)?), - 0x37 => visitor.visit_i64_atomic_rmw8_or_u(pos, self.read_memarg(0)?), - 0x38 => visitor.visit_i64_atomic_rmw16_or_u(pos, self.read_memarg(1)?), - 0x39 => visitor.visit_i64_atomic_rmw32_or_u(pos, self.read_memarg(2)?), - 0x3a => visitor.visit_i32_atomic_rmw_xor(pos, self.read_memarg(2)?), - 0x3b => visitor.visit_i64_atomic_rmw_xor(pos, self.read_memarg(3)?), - 0x3c => visitor.visit_i32_atomic_rmw8_xor_u(pos, self.read_memarg(0)?), - 0x3d => visitor.visit_i32_atomic_rmw16_xor_u(pos, self.read_memarg(1)?), - 0x3e => visitor.visit_i64_atomic_rmw8_xor_u(pos, self.read_memarg(0)?), - 0x3f => visitor.visit_i64_atomic_rmw16_xor_u(pos, self.read_memarg(1)?), - 0x40 => visitor.visit_i64_atomic_rmw32_xor_u(pos, self.read_memarg(2)?), - 0x41 => visitor.visit_i32_atomic_rmw_xchg(pos, self.read_memarg(2)?), - 0x42 => visitor.visit_i64_atomic_rmw_xchg(pos, self.read_memarg(3)?), - 0x43 => visitor.visit_i32_atomic_rmw8_xchg_u(pos, self.read_memarg(0)?), - 0x44 => visitor.visit_i32_atomic_rmw16_xchg_u(pos, self.read_memarg(1)?), - 0x45 => visitor.visit_i64_atomic_rmw8_xchg_u(pos, self.read_memarg(0)?), - 0x46 => visitor.visit_i64_atomic_rmw16_xchg_u(pos, self.read_memarg(1)?), - 0x47 => visitor.visit_i64_atomic_rmw32_xchg_u(pos, self.read_memarg(2)?), - 0x48 => visitor.visit_i32_atomic_rmw_cmpxchg(pos, self.read_memarg(2)?), - 0x49 => visitor.visit_i64_atomic_rmw_cmpxchg(pos, self.read_memarg(3)?), - 0x4a => visitor.visit_i32_atomic_rmw8_cmpxchg_u(pos, self.read_memarg(0)?), - 0x4b => visitor.visit_i32_atomic_rmw16_cmpxchg_u(pos, self.read_memarg(1)?), - 0x4c => visitor.visit_i64_atomic_rmw8_cmpxchg_u(pos, self.read_memarg(0)?), - 0x4d => visitor.visit_i64_atomic_rmw16_cmpxchg_u(pos, self.read_memarg(1)?), - 0x4e => visitor.visit_i64_atomic_rmw32_cmpxchg_u(pos, self.read_memarg(2)?), + 0x10 => visitor.visit_i32_atomic_load(self.read_memarg(2)?), + 0x11 => visitor.visit_i64_atomic_load(self.read_memarg(3)?), + 0x12 => visitor.visit_i32_atomic_load8_u(self.read_memarg(0)?), + 0x13 => visitor.visit_i32_atomic_load16_u(self.read_memarg(1)?), + 0x14 => visitor.visit_i64_atomic_load8_u(self.read_memarg(0)?), + 0x15 => visitor.visit_i64_atomic_load16_u(self.read_memarg(1)?), + 0x16 => visitor.visit_i64_atomic_load32_u(self.read_memarg(2)?), + 0x17 => visitor.visit_i32_atomic_store(self.read_memarg(2)?), + 0x18 => visitor.visit_i64_atomic_store(self.read_memarg(3)?), + 0x19 => visitor.visit_i32_atomic_store8(self.read_memarg(0)?), + 0x1a => visitor.visit_i32_atomic_store16(self.read_memarg(1)?), + 0x1b => visitor.visit_i64_atomic_store8(self.read_memarg(0)?), + 0x1c => visitor.visit_i64_atomic_store16(self.read_memarg(1)?), + 0x1d => visitor.visit_i64_atomic_store32(self.read_memarg(2)?), + 0x1e => visitor.visit_i32_atomic_rmw_add(self.read_memarg(2)?), + 0x1f => visitor.visit_i64_atomic_rmw_add(self.read_memarg(3)?), + 0x20 => visitor.visit_i32_atomic_rmw8_add_u(self.read_memarg(0)?), + 0x21 => visitor.visit_i32_atomic_rmw16_add_u(self.read_memarg(1)?), + 0x22 => visitor.visit_i64_atomic_rmw8_add_u(self.read_memarg(0)?), + 0x23 => visitor.visit_i64_atomic_rmw16_add_u(self.read_memarg(1)?), + 0x24 => visitor.visit_i64_atomic_rmw32_add_u(self.read_memarg(2)?), + 0x25 => visitor.visit_i32_atomic_rmw_sub(self.read_memarg(2)?), + 0x26 => visitor.visit_i64_atomic_rmw_sub(self.read_memarg(3)?), + 0x27 => visitor.visit_i32_atomic_rmw8_sub_u(self.read_memarg(0)?), + 0x28 => visitor.visit_i32_atomic_rmw16_sub_u(self.read_memarg(1)?), + 0x29 => visitor.visit_i64_atomic_rmw8_sub_u(self.read_memarg(0)?), + 0x2a => visitor.visit_i64_atomic_rmw16_sub_u(self.read_memarg(1)?), + 0x2b => visitor.visit_i64_atomic_rmw32_sub_u(self.read_memarg(2)?), + 0x2c => visitor.visit_i32_atomic_rmw_and(self.read_memarg(2)?), + 0x2d => visitor.visit_i64_atomic_rmw_and(self.read_memarg(3)?), + 0x2e => visitor.visit_i32_atomic_rmw8_and_u(self.read_memarg(0)?), + 0x2f => visitor.visit_i32_atomic_rmw16_and_u(self.read_memarg(1)?), + 0x30 => visitor.visit_i64_atomic_rmw8_and_u(self.read_memarg(0)?), + 0x31 => visitor.visit_i64_atomic_rmw16_and_u(self.read_memarg(1)?), + 0x32 => visitor.visit_i64_atomic_rmw32_and_u(self.read_memarg(2)?), + 0x33 => visitor.visit_i32_atomic_rmw_or(self.read_memarg(2)?), + 0x34 => visitor.visit_i64_atomic_rmw_or(self.read_memarg(3)?), + 0x35 => visitor.visit_i32_atomic_rmw8_or_u(self.read_memarg(0)?), + 0x36 => visitor.visit_i32_atomic_rmw16_or_u(self.read_memarg(1)?), + 0x37 => visitor.visit_i64_atomic_rmw8_or_u(self.read_memarg(0)?), + 0x38 => visitor.visit_i64_atomic_rmw16_or_u(self.read_memarg(1)?), + 0x39 => visitor.visit_i64_atomic_rmw32_or_u(self.read_memarg(2)?), + 0x3a => visitor.visit_i32_atomic_rmw_xor(self.read_memarg(2)?), + 0x3b => visitor.visit_i64_atomic_rmw_xor(self.read_memarg(3)?), + 0x3c => visitor.visit_i32_atomic_rmw8_xor_u(self.read_memarg(0)?), + 0x3d => visitor.visit_i32_atomic_rmw16_xor_u(self.read_memarg(1)?), + 0x3e => visitor.visit_i64_atomic_rmw8_xor_u(self.read_memarg(0)?), + 0x3f => visitor.visit_i64_atomic_rmw16_xor_u(self.read_memarg(1)?), + 0x40 => visitor.visit_i64_atomic_rmw32_xor_u(self.read_memarg(2)?), + 0x41 => visitor.visit_i32_atomic_rmw_xchg(self.read_memarg(2)?), + 0x42 => visitor.visit_i64_atomic_rmw_xchg(self.read_memarg(3)?), + 0x43 => visitor.visit_i32_atomic_rmw8_xchg_u(self.read_memarg(0)?), + 0x44 => visitor.visit_i32_atomic_rmw16_xchg_u(self.read_memarg(1)?), + 0x45 => visitor.visit_i64_atomic_rmw8_xchg_u(self.read_memarg(0)?), + 0x46 => visitor.visit_i64_atomic_rmw16_xchg_u(self.read_memarg(1)?), + 0x47 => visitor.visit_i64_atomic_rmw32_xchg_u(self.read_memarg(2)?), + 0x48 => visitor.visit_i32_atomic_rmw_cmpxchg(self.read_memarg(2)?), + 0x49 => visitor.visit_i64_atomic_rmw_cmpxchg(self.read_memarg(3)?), + 0x4a => visitor.visit_i32_atomic_rmw8_cmpxchg_u(self.read_memarg(0)?), + 0x4b => visitor.visit_i32_atomic_rmw16_cmpxchg_u(self.read_memarg(1)?), + 0x4c => visitor.visit_i64_atomic_rmw8_cmpxchg_u(self.read_memarg(0)?), + 0x4d => visitor.visit_i64_atomic_rmw16_cmpxchg_u(self.read_memarg(1)?), + 0x4e => visitor.visit_i64_atomic_rmw32_cmpxchg_u(self.read_memarg(2)?), _ => bail!(pos, "unknown 0xfe subopcode: 0x{code:x}"), }) @@ -2289,7 +2326,7 @@ impl<'a> OperatorFactory<'a> { macro_rules! define_visit_operator { ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => { $( - fn $visit(&mut self, _offset: usize $($(,$arg: $argty)*)?) -> Operator<'a> { + fn $visit(&mut self $($(,$arg: $argty)*)?) -> Operator<'a> { Operator::$op $({ $($arg),* })? } )* diff --git a/crates/wasmparser/src/lib.rs b/crates/wasmparser/src/lib.rs index d29fa9e32a..7505f8b3b0 100644 --- a/crates/wasmparser/src/lib.rs +++ b/crates/wasmparser/src/lib.rs @@ -97,7 +97,7 @@ /// // `VisitOperator` trait that this corresponds to. /// ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => { /// $( -/// fn $visit(&mut self, _offset: usize $($(,$arg: $argty)*)?) { +/// fn $visit(&mut self $($(,$arg: $argty)*)?) { /// // do nothing for this example /// } /// )* diff --git a/crates/wasmparser/src/readers/core/operators.rs b/crates/wasmparser/src/readers/core/operators.rs index 2b9726d0fb..9d9dbd8c62 100644 --- a/crates/wasmparser/src/readers/core/operators.rs +++ b/crates/wasmparser/src/readers/core/operators.rs @@ -198,11 +198,10 @@ impl<'a> OperatorsReader<'a> { Ok((self.read()?, pos)) } - /// Visits an operator with its offset. - pub fn visit_with_offset( - &mut self, - visitor: &mut T, - ) -> Result<>::Output> + /// Visit a single operator with the specified [`VisitOperator`] instance. + /// + /// See [`BinaryReader::visit_operator`] for more information. + pub fn visit_operator(&mut self, visitor: &mut T) -> Result<>::Output> where T: VisitOperator<'a>, { @@ -306,7 +305,7 @@ impl<'a> Iterator for OperatorsIteratorWithOffsets<'a> { macro_rules! define_visit_operator { ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => { $( - fn $visit(&mut self, offset: usize $($(,$arg: $argty)*)?) -> Self::Output; + fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output; )* } } @@ -325,12 +324,12 @@ pub trait VisitOperator<'a> { /// critical use cases. For performance critical implementations users /// are recommended to directly use the respective `visit` methods or /// implement [`VisitOperator`] on their own. - fn visit_operator(&mut self, offset: usize, op: &Operator<'a>) -> Self::Output { + fn visit_operator(&mut self, op: &Operator<'a>) -> Self::Output { macro_rules! visit_operator { ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => { match op { $( - Operator::$op $({ $($arg),* })? => self.$visit(offset, $($($arg.clone()),*)?), + Operator::$op $({ $($arg),* })? => self.$visit($($($arg.clone()),*)?), )* } } diff --git a/crates/wasmparser/src/validator/core.rs b/crates/wasmparser/src/validator/core.rs index 956e50ee05..1e35c57684 100644 --- a/crates/wasmparser/src/validator/core.rs +++ b/crates/wasmparser/src/validator/core.rs @@ -241,6 +241,7 @@ impl ModuleState { types: &TypeList, ) -> Result<()> { let mut validator = VisitConstOperator { + offset: 0, order: self.order, uninserted_funcref: false, ops: OperatorValidator::new_const_expr( @@ -256,7 +257,8 @@ impl ModuleState { let mut ops = expr.get_operators_reader(); while !ops.eof() { - ops.visit_with_offset(&mut validator)??; + validator.offset = ops.original_position(); + ops.visit_operator(&mut validator)??; } validator.ops.finish(ops.original_position())?; @@ -268,6 +270,7 @@ impl ModuleState { return Ok(()); struct VisitConstOperator<'a> { + offset: usize, uninserted_funcref: bool, ops: OperatorValidator, resources: OperatorValidatorResources<'a>, @@ -276,33 +279,33 @@ impl ModuleState { impl VisitConstOperator<'_> { fn validator(&mut self) -> impl VisitOperator<'_, Output = Result<()>> { - self.ops.with_resources(&self.resources) + self.ops.with_resources(&self.resources, self.offset) } - fn validate_extended_const(&mut self, offset: usize) -> Result<()> { + fn validate_extended_const(&mut self) -> Result<()> { if self.ops.features.extended_const { Ok(()) } else { Err(BinaryReaderError::new( "constant expression required: non-constant operator", - offset, + self.offset, )) } } - fn validate_global(&mut self, offset: usize, index: u32) -> Result<()> { + fn validate_global(&mut self, index: u32) -> Result<()> { let module = &self.resources.module; - let global = module.global_at(index, offset)?; + let global = module.global_at(index, self.offset)?; if index >= module.num_imported_globals { return Err(BinaryReaderError::new( "constant expression required: global.get of locally defined global", - offset, + self.offset, )); } if global.mutable { return Err(BinaryReaderError::new( "constant expression required: global.get of mutable global", - offset, + self.offset, )); } Ok(()) @@ -342,79 +345,79 @@ impl ModuleState { ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => { $( #[allow(unused_variables)] - fn $visit(&mut self, pos: usize $($(,$arg: $argty)*)?) -> Self::Output { - define_visit_operator!(@visit self $visit pos $($($arg)*)?) + fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output { + define_visit_operator!(@visit self $visit $($($arg)*)?) } )* }; // These are always valid in const expressions - (@visit $self:ident visit_i32_const $pos:ident $val:ident) => {{ - $self.validator().visit_i32_const($pos, $val) + (@visit $self:ident visit_i32_const $val:ident) => {{ + $self.validator().visit_i32_const($val) }}; - (@visit $self:ident visit_i64_const $pos:ident $val:ident) => {{ - $self.validator().visit_i64_const($pos, $val) + (@visit $self:ident visit_i64_const $val:ident) => {{ + $self.validator().visit_i64_const($val) }}; - (@visit $self:ident visit_f32_const $pos:ident $val:ident) => {{ - $self.validator().visit_f32_const($pos, $val) + (@visit $self:ident visit_f32_const $val:ident) => {{ + $self.validator().visit_f32_const($val) }}; - (@visit $self:ident visit_f64_const $pos:ident $val:ident) => {{ - $self.validator().visit_f64_const($pos, $val) + (@visit $self:ident visit_f64_const $val:ident) => {{ + $self.validator().visit_f64_const($val) }}; - (@visit $self:ident visit_v128_const $pos:ident $val:ident) => {{ - $self.validator().visit_v128_const($pos, $val) + (@visit $self:ident visit_v128_const $val:ident) => {{ + $self.validator().visit_v128_const($val) }}; - (@visit $self:ident visit_ref_null $pos:ident $val:ident) => {{ - $self.validator().visit_ref_null($pos, $val) + (@visit $self:ident visit_ref_null $val:ident) => {{ + $self.validator().visit_ref_null($val) }}; - (@visit $self:ident visit_end $pos:ident) => {{ - $self.validator().visit_end($pos) + (@visit $self:ident visit_end) => {{ + $self.validator().visit_end() }}; // These are valid const expressions when the extended-const proposal is enabled. - (@visit $self:ident visit_i32_add $pos:ident) => {{ - $self.validate_extended_const($pos)?; - $self.validator().visit_i32_add($pos) + (@visit $self:ident visit_i32_add) => {{ + $self.validate_extended_const()?; + $self.validator().visit_i32_add() }}; - (@visit $self:ident visit_i32_sub $pos:ident) => {{ - $self.validate_extended_const($pos)?; - $self.validator().visit_i32_sub($pos) + (@visit $self:ident visit_i32_sub) => {{ + $self.validate_extended_const()?; + $self.validator().visit_i32_sub() }}; - (@visit $self:ident visit_i32_mul $pos:ident) => {{ - $self.validate_extended_const($pos)?; - $self.validator().visit_i32_mul($pos) + (@visit $self:ident visit_i32_mul) => {{ + $self.validate_extended_const()?; + $self.validator().visit_i32_mul() }}; - (@visit $self:ident visit_i64_add $pos:ident) => {{ - $self.validate_extended_const($pos)?; - $self.validator().visit_i64_add($pos) + (@visit $self:ident visit_i64_add) => {{ + $self.validate_extended_const()?; + $self.validator().visit_i64_add() }}; - (@visit $self:ident visit_i64_sub $pos:ident) => {{ - $self.validate_extended_const($pos)?; - $self.validator().visit_i64_sub($pos) + (@visit $self:ident visit_i64_sub) => {{ + $self.validate_extended_const()?; + $self.validator().visit_i64_sub() }}; - (@visit $self:ident visit_i64_mul $pos:ident) => {{ - $self.validate_extended_const($pos)?; - $self.validator().visit_i64_mul($pos) + (@visit $self:ident visit_i64_mul) => {{ + $self.validate_extended_const()?; + $self.validator().visit_i64_mul() }}; // `global.get` is a valid const expression for imported, immutable // globals. - (@visit $self:ident visit_global_get $pos:ident $idx:ident) => {{ - $self.validate_global($pos, $idx)?; - $self.validator().visit_global_get($pos, $idx) + (@visit $self:ident visit_global_get $idx:ident) => {{ + $self.validate_global($idx)?; + $self.validator().visit_global_get($idx) }}; // `ref.func`, if it's in a `global` initializer, will insert into // the set of referenced functions so it's processed here. - (@visit $self:ident visit_ref_func $pos:ident $idx:ident) => {{ + (@visit $self:ident visit_ref_func $idx:ident) => {{ $self.insert_ref_func($idx); - $self.validator().visit_ref_func($pos, $idx) + $self.validator().visit_ref_func($idx) }}; - (@visit $self:ident $op:ident $pos:ident $($args:tt)*) => {{ + (@visit $self:ident $op:ident $($args:tt)*) => {{ Err(BinaryReaderError::new( "constant expression required: non-constant operator", - $pos, + $self.offset, )) }} } diff --git a/crates/wasmparser/src/validator/func.rs b/crates/wasmparser/src/validator/func.rs index 354ca4cbe8..2e87ca2f6e 100644 --- a/crates/wasmparser/src/validator/func.rs +++ b/crates/wasmparser/src/validator/func.rs @@ -94,7 +94,7 @@ impl FuncValidator { self.read_locals(&mut reader)?; reader.allow_memarg64(self.validator.features.memory64); while !reader.eof() { - reader.visit_operator(self)??; + reader.visit_operator(&mut self.visitor(reader.original_position()))??; } self.finish(reader.original_position()) } @@ -129,9 +129,33 @@ impl FuncValidator { /// the operator itself are passed to this function to provide more useful /// error messages. pub fn op(&mut self, offset: usize, operator: &Operator<'_>) -> Result<()> { - self.validator - .with_resources(&self.resources) - .visit_operator(offset, operator) + self.visitor(offset).visit_operator(operator) + } + + /// Get the operator visitor for the next operator in the function. + /// + /// The returned visitor is intended to visit just one instruction at the `offset`. + /// + /// # Example + /// + /// ``` + /// # use wasmparser::{WasmModuleResources, FuncValidator, FunctionBody, Result}; + /// pub fn validate(validator: &mut FuncValidator, body: &FunctionBody<'_>) -> Result<()> + /// where R: WasmModuleResources + /// { + /// let mut operator_reader = body.get_binary_reader(); + /// while !operator_reader.eof() { + /// let mut visitor = validator.visitor(operator_reader.original_position()); + /// operator_reader.visit_operator(&mut visitor)??; + /// } + /// validator.finish(operator_reader.original_position()) + /// } + /// ``` + pub fn visitor<'this, 'a: 'this>( + &'this mut self, + offset: usize, + ) -> impl VisitOperator<'a, Output = Result<()>> + 'this { + self.validator.with_resources(&self.resources, offset) } /// Function that must be called after the last opcode has been processed. @@ -307,24 +331,3 @@ mod tests { assert_eq!(v.operand_stack_height(), 2); } } - -macro_rules! define_visit_operator { - ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => { - $( - fn $visit(&mut self, offset: usize $($(,$arg: $argty)*)?) -> Result<()> { - self.validator.with_resources(&self.resources) - .$visit(offset $($(,$arg)*)?) - } - )* - } -} - -#[allow(unused_variables)] -impl<'a, T> VisitOperator<'a> for FuncValidator -where - T: WasmModuleResources, -{ - type Output = Result<()>; - - for_each_operator!(define_visit_operator); -} diff --git a/crates/wasmparser/src/validator/operators.rs b/crates/wasmparser/src/validator/operators.rs index 311322d30a..50fffc026e 100644 --- a/crates/wasmparser/src/validator/operators.rs +++ b/crates/wasmparser/src/validator/operators.rs @@ -126,6 +126,7 @@ pub enum FrameKind { } struct OperatorValidatorTemp<'validator, 'resources, T> { + offset: usize, inner: &'validator mut OperatorValidator, resources: &'resources T, } @@ -190,10 +191,12 @@ impl OperatorValidator { unreachable: false, }); let params = OperatorValidatorTemp { + // This offset is used by the `func_type_at` and `inputs`. + offset, inner: &mut ret, resources, } - .func_type_at(ty, offset)? + .func_type_at(ty)? .inputs(); for ty in params { ret.locals.define(1, ty); @@ -264,15 +267,17 @@ impl OperatorValidator { } /// Create a temporary [`OperatorValidatorTemp`] for validation. - pub fn with_resources<'validator, 'resources, T>( + pub fn with_resources<'a, 'validator, 'resources, T>( &'validator mut self, resources: &'resources T, - ) -> impl VisitOperator> + 'validator + offset: usize, + ) -> impl VisitOperator<'a, Output = Result<()>> + 'validator where T: WasmModuleResources, 'resources: 'validator, { WasmProposalValidator(OperatorValidatorTemp { + offset, inner: self, resources, }) @@ -361,7 +366,7 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R /// matches `expected`. If `None` is returned then it means that `None` was /// expected and a type was successfully popped, but its exact type is /// indeterminate because the current block is unreachable. - fn pop_operand(&mut self, offset: usize, expected: Option) -> Result> { + fn pop_operand(&mut self, expected: Option) -> Result> { // This method is one of the hottest methods in the validator so to // improve codegen this method contains a fast-path success case where // if the top operand on the stack is as expected it's returned @@ -386,7 +391,7 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R None }; - self._pop_operand(offset, expected, popped) + self._pop_operand(expected, popped) } // This is the "real" implementation of `pop_operand` which is 100% @@ -395,14 +400,13 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R #[cold] fn _pop_operand( &mut self, - offset: usize, expected: Option, popped: Option>, ) -> Result> { self.operands.extend(popped); let control = match self.control.last() { Some(c) => c, - None => return Err(self.err_beyond_end(offset)), + None => return Err(self.err_beyond_end(self.offset)), }; let actual = if self.operands.len() == control.height { if control.unreachable { @@ -413,7 +417,7 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R None => "a type", }; bail!( - offset, + self.offset, "type mismatch: expected {desc} but nothing on stack" ) } @@ -423,7 +427,7 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R if let (Some(actual_ty), Some(expected_ty)) = (actual, expected) { if actual_ty != expected_ty { bail!( - offset, + self.offset, "type mismatch: expected {}, found {}", ty_to_str(expected_ty), ty_to_str(actual_ty) @@ -435,19 +439,23 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R /// Fetches the type for the local at `idx`, returning an error if it's out /// of bounds. - fn local(&self, offset: usize, idx: u32) -> Result { + fn local(&self, idx: u32) -> Result { match self.locals.get(idx) { Some(ty) => Ok(ty), - None => bail!(offset, "unknown local {}: local index out of bounds", idx), + None => bail!( + self.offset, + "unknown local {}: local index out of bounds", + idx + ), } } /// Flags the current control frame as unreachable, additionally truncating /// the currently active operand stack. - fn unreachable(&mut self, offset: usize) -> Result<()> { + fn unreachable(&mut self) -> Result<()> { let control = match self.control.last_mut() { Some(frame) => frame, - None => return Err(self.err_beyond_end(offset)), + None => return Err(self.err_beyond_end(self.offset)), }; control.unreachable = true; let new_height = control.height; @@ -461,7 +469,7 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R /// or block itself. The `kind` of block is specified which indicates how /// breaks interact with this block's type. Additionally the type signature /// of the block is specified by `ty`. - fn push_ctrl(&mut self, offset: usize, kind: FrameKind, ty: BlockType) -> Result<()> { + fn push_ctrl(&mut self, kind: FrameKind, ty: BlockType) -> Result<()> { // Push a new frame which has a snapshot of the height of the current // operand stack. let height = self.operands.len(); @@ -473,7 +481,7 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R }); // All of the parameters are now also available in this control frame, // so we push them here in order. - for ty in self.params(offset, ty)? { + for ty in self.params(ty)? { self.push_operand(ty)?; } Ok(()) @@ -483,27 +491,27 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R /// /// This function is used when exiting a block and leaves a block scope. /// Internally this will validate that blocks have the correct result type. - fn pop_ctrl(&mut self, offset: usize) -> Result { + fn pop_ctrl(&mut self) -> Result { // Read the expected type and expected height of the operand stack the // end of the frame. let frame = match self.control.last() { Some(f) => f, - None => return Err(self.err_beyond_end(offset)), + None => return Err(self.err_beyond_end(self.offset)), }; let ty = frame.block_type; let height = frame.height; // Pop all the result types, in reverse order, from the operand stack. // These types will, possibly, be transferred to the next frame. - for ty in self.results(offset, ty)?.rev() { - self.pop_operand(offset, Some(ty))?; + for ty in self.results(ty)?.rev() { + self.pop_operand(Some(ty))?; } // Make sure that the operand stack has returned to is original // height... if self.operands.len() != height { bail!( - offset, + self.offset, "type mismatch: values remaining on stack at end of block" ); } @@ -516,83 +524,83 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R /// /// Returns the type signature of the block that we're jumping to as well /// as the kind of block if the jump is valid. Otherwise returns an error. - fn jump(&self, offset: usize, depth: u32) -> Result<(BlockType, FrameKind)> { + fn jump(&self, depth: u32) -> Result<(BlockType, FrameKind)> { if self.control.is_empty() { - return Err(self.err_beyond_end(offset)); + return Err(self.err_beyond_end(self.offset)); } match (self.control.len() - 1).checked_sub(depth as usize) { Some(i) => { let frame = &self.control[i]; Ok((frame.block_type, frame.kind)) } - None => bail!(offset, "unknown label: branch depth too large"), + None => bail!(self.offset, "unknown label: branch depth too large"), } } /// Validates that `memory_index` is valid in this module, and returns the /// type of address used to index the memory specified. - fn check_memory_index(&self, offset: usize, memory_index: u32) -> Result { + fn check_memory_index(&self, memory_index: u32) -> Result { match self.resources.memory_at(memory_index) { Some(mem) => Ok(mem.index_type()), - None => bail!(offset, "unknown memory {}", memory_index), + None => bail!(self.offset, "unknown memory {}", memory_index), } } /// Validates a `memarg for alignment and such (also the memory it /// references), and returns the type of index used to address the memory. - fn check_memarg(&self, memarg: MemArg, offset: usize) -> Result { - let index_ty = self.check_memory_index(offset, memarg.memory)?; + fn check_memarg(&self, memarg: MemArg) -> Result { + let index_ty = self.check_memory_index(memarg.memory)?; if memarg.align > memarg.max_align { - bail!(offset, "alignment must not be larger than natural"); + bail!(self.offset, "alignment must not be larger than natural"); } if index_ty == ValType::I32 && memarg.offset > u64::from(u32::MAX) { - bail!(offset, "offset out of range: must be <= 2**32"); + bail!(self.offset, "offset out of range: must be <= 2**32"); } Ok(index_ty) } #[cfg_attr(not(feature = "deterministic"), inline(always))] - fn check_non_deterministic_enabled(&self, offset: usize) -> Result<()> { + fn check_non_deterministic_enabled(&self) -> Result<()> { if cfg!(feature = "deterministic") && !self.features.deterministic_only { - bail!(offset, "deterministic_only support is not enabled"); + bail!(self.offset, "deterministic_only support is not enabled"); } Ok(()) } - fn check_shared_memarg(&self, offset: usize, memarg: MemArg) -> Result { + fn check_shared_memarg(&self, memarg: MemArg) -> Result { if memarg.align != memarg.max_align { bail!( - offset, + self.offset, "atomic instructions must always specify maximum alignment" ); } - self.check_memory_index(offset, memarg.memory) + self.check_memory_index(memarg.memory) } - fn check_simd_lane_index(&self, offset: usize, index: u8, max: u8) -> Result<()> { + fn check_simd_lane_index(&self, index: u8, max: u8) -> Result<()> { if index >= max { - bail!(offset, "SIMD index out of bounds"); + bail!(self.offset, "SIMD index out of bounds"); } Ok(()) } /// Validates a block type, primarily with various in-flight proposals. - fn check_block_type(&self, offset: usize, ty: BlockType) -> Result<()> { + fn check_block_type(&self, ty: BlockType) -> Result<()> { match ty { BlockType::Empty => Ok(()), BlockType::Type(ty) => self .features .check_value_type(ty) - .map_err(|e| BinaryReaderError::new(e, offset)), + .map_err(|e| BinaryReaderError::new(e, self.offset)), BlockType::FuncType(idx) => { if !self.features.multi_value { bail!( - offset, + self.offset, "blocks, loops, and ifs may only produce a resulttype \ when multi-value is not enabled", ); } - self.func_type_at(idx, offset)?; + self.func_type_at(idx)?; Ok(()) } } @@ -600,19 +608,19 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R /// Validates a `call` instruction, ensuring that the function index is /// in-bounds and the right types are on the stack to call the function. - fn check_call(&mut self, offset: usize, function_index: u32) -> Result<()> { + fn check_call(&mut self, function_index: u32) -> Result<()> { let ty = match self.resources.type_of_function(function_index) { Some(i) => i, None => { bail!( - offset, + self.offset, "unknown function {}: function index out of bounds", function_index ); } }; for ty in ty.inputs().rev() { - self.pop_operand(offset, Some(ty))?; + self.pop_operand(Some(ty))?; } for ty in ty.outputs() { self.push_operand(ty)?; @@ -621,21 +629,24 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R } /// Validates a call to an indirect function, very similar to `check_call`. - fn check_call_indirect(&mut self, offset: usize, index: u32, table_index: u32) -> Result<()> { + fn check_call_indirect(&mut self, index: u32, table_index: u32) -> Result<()> { match self.resources.table_at(table_index) { None => { - bail!(offset, "unknown table: table index out of bounds"); + bail!(self.offset, "unknown table: table index out of bounds"); } Some(tab) => { if tab.element_type != ValType::FuncRef { - bail!(offset, "indirect calls must go through a table of funcref"); + bail!( + self.offset, + "indirect calls must go through a table of funcref" + ); } } } - let ty = self.func_type_at(index, offset)?; - self.pop_operand(offset, Some(ValType::I32))?; + let ty = self.func_type_at(index)?; + self.pop_operand(Some(ValType::I32))?; for ty in ty.inputs().rev() { - self.pop_operand(offset, Some(ty))?; + self.pop_operand(Some(ty))?; } for ty in ty.outputs() { self.push_operand(ty)?; @@ -645,250 +656,226 @@ impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R /// Validates a `return` instruction, popping types from the operand /// stack that the function needs. - fn check_return(&mut self, offset: usize) -> Result<()> { + fn check_return(&mut self) -> Result<()> { if self.control.is_empty() { - return Err(self.err_beyond_end(offset)); + return Err(self.err_beyond_end(self.offset)); } - for ty in self.results(offset, self.control[0].block_type)?.rev() { - self.pop_operand(offset, Some(ty))?; + for ty in self.results(self.control[0].block_type)?.rev() { + self.pop_operand(Some(ty))?; } - self.unreachable(offset)?; + self.unreachable()?; Ok(()) } /// Checks the validity of a common comparison operator. - fn check_cmp_op(&mut self, offset: usize, ty: ValType) -> Result<()> { - self.pop_operand(offset, Some(ty))?; - self.pop_operand(offset, Some(ty))?; + fn check_cmp_op(&mut self, ty: ValType) -> Result<()> { + self.pop_operand(Some(ty))?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::I32)?; Ok(()) } /// Checks the validity of a common float comparison operator. - fn check_fcmp_op(&mut self, offset: usize, ty: ValType) -> Result<()> { + fn check_fcmp_op(&mut self, ty: ValType) -> Result<()> { debug_assert!(matches!(ty, ValType::F32 | ValType::F64)); - self.check_non_deterministic_enabled(offset)?; - self.check_cmp_op(offset, ty) + self.check_non_deterministic_enabled()?; + self.check_cmp_op(ty) } /// Checks the validity of a common unary operator. - fn check_unary_op(&mut self, offset: usize, ty: ValType) -> Result<()> { - self.pop_operand(offset, Some(ty))?; + fn check_unary_op(&mut self, ty: ValType) -> Result<()> { + self.pop_operand(Some(ty))?; self.push_operand(ty)?; Ok(()) } /// Checks the validity of a common unary float operator. - fn check_funary_op(&mut self, offset: usize, ty: ValType) -> Result<()> { + fn check_funary_op(&mut self, ty: ValType) -> Result<()> { debug_assert!(matches!(ty, ValType::F32 | ValType::F64)); - self.check_non_deterministic_enabled(offset)?; - self.check_unary_op(offset, ty) + self.check_non_deterministic_enabled()?; + self.check_unary_op(ty) } /// Checks the validity of a common conversion operator. - fn check_conversion_op(&mut self, offset: usize, into: ValType, from: ValType) -> Result<()> { - self.pop_operand(offset, Some(from))?; + fn check_conversion_op(&mut self, into: ValType, from: ValType) -> Result<()> { + self.pop_operand(Some(from))?; self.push_operand(into)?; Ok(()) } /// Checks the validity of a common conversion operator. - fn check_fconversion_op(&mut self, offset: usize, into: ValType, from: ValType) -> Result<()> { + fn check_fconversion_op(&mut self, into: ValType, from: ValType) -> Result<()> { debug_assert!(matches!(into, ValType::F32 | ValType::F64)); - self.check_non_deterministic_enabled(offset)?; - self.check_conversion_op(offset, into, from) + self.check_non_deterministic_enabled()?; + self.check_conversion_op(into, from) } /// Checks the validity of a common binary operator. - fn check_binary_op(&mut self, offset: usize, ty: ValType) -> Result<()> { - self.pop_operand(offset, Some(ty))?; - self.pop_operand(offset, Some(ty))?; + fn check_binary_op(&mut self, ty: ValType) -> Result<()> { + self.pop_operand(Some(ty))?; + self.pop_operand(Some(ty))?; self.push_operand(ty)?; Ok(()) } /// Checks the validity of a common binary float operator. - fn check_fbinary_op(&mut self, offset: usize, ty: ValType) -> Result<()> { + fn check_fbinary_op(&mut self, ty: ValType) -> Result<()> { debug_assert!(matches!(ty, ValType::F32 | ValType::F64)); - self.check_non_deterministic_enabled(offset)?; - self.check_binary_op(offset, ty) + self.check_non_deterministic_enabled()?; + self.check_binary_op(ty) } /// Checks the validity of an atomic load operator. - fn check_atomic_load(&mut self, offset: usize, memarg: MemArg, load_ty: ValType) -> Result<()> { - let ty = self.check_shared_memarg(offset, memarg)?; - self.pop_operand(offset, Some(ty))?; + fn check_atomic_load(&mut self, memarg: MemArg, load_ty: ValType) -> Result<()> { + let ty = self.check_shared_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(load_ty)?; Ok(()) } /// Checks the validity of an atomic store operator. - fn check_atomic_store( - &mut self, - offset: usize, - memarg: MemArg, - store_ty: ValType, - ) -> Result<()> { - let ty = self.check_shared_memarg(offset, memarg)?; - self.pop_operand(offset, Some(store_ty))?; - self.pop_operand(offset, Some(ty))?; + fn check_atomic_store(&mut self, memarg: MemArg, store_ty: ValType) -> Result<()> { + let ty = self.check_shared_memarg(memarg)?; + self.pop_operand(Some(store_ty))?; + self.pop_operand(Some(ty))?; Ok(()) } /// Checks the validity of a common atomic binary operator. - fn check_atomic_binary_op( - &mut self, - offset: usize, - memarg: MemArg, - op_ty: ValType, - ) -> Result<()> { - let ty = self.check_shared_memarg(offset, memarg)?; - self.pop_operand(offset, Some(op_ty))?; - self.pop_operand(offset, Some(ty))?; + fn check_atomic_binary_op(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> { + let ty = self.check_shared_memarg(memarg)?; + self.pop_operand(Some(op_ty))?; + self.pop_operand(Some(ty))?; self.push_operand(op_ty)?; Ok(()) } /// Checks the validity of an atomic compare exchange operator. - fn check_atomic_binary_cmpxchg( - &mut self, - offset: usize, - memarg: MemArg, - op_ty: ValType, - ) -> Result<()> { - let ty = self.check_shared_memarg(offset, memarg)?; - self.pop_operand(offset, Some(op_ty))?; - self.pop_operand(offset, Some(op_ty))?; - self.pop_operand(offset, Some(ty))?; + fn check_atomic_binary_cmpxchg(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> { + let ty = self.check_shared_memarg(memarg)?; + self.pop_operand(Some(op_ty))?; + self.pop_operand(Some(op_ty))?; + self.pop_operand(Some(ty))?; self.push_operand(op_ty)?; Ok(()) } /// Checks a [`V128`] splat operator. - fn check_v128_splat(&mut self, offset: usize, src_ty: ValType) -> Result<()> { - self.pop_operand(offset, Some(src_ty))?; + fn check_v128_splat(&mut self, src_ty: ValType) -> Result<()> { + self.pop_operand(Some(src_ty))?; self.push_operand(ValType::V128)?; Ok(()) } /// Checks a [`V128`] binary operator. - fn check_v128_binary_op(&mut self, offset: usize) -> Result<()> { - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn check_v128_binary_op(&mut self) -> Result<()> { + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } /// Checks a [`V128`] binary float operator. - fn check_v128_fbinary_op(&mut self, offset: usize) -> Result<()> { - self.check_non_deterministic_enabled(offset)?; - self.check_v128_binary_op(offset) + fn check_v128_fbinary_op(&mut self) -> Result<()> { + self.check_non_deterministic_enabled()?; + self.check_v128_binary_op() } /// Checks a [`V128`] binary operator. - fn check_v128_relaxed_binary_op(&mut self, offset: usize) -> Result<()> { - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn check_v128_relaxed_binary_op(&mut self) -> Result<()> { + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } /// Checks a [`V128`] binary operator. - fn check_v128_unary_op(&mut self, offset: usize) -> Result<()> { - self.pop_operand(offset, Some(ValType::V128))?; + fn check_v128_unary_op(&mut self) -> Result<()> { + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } /// Checks a [`V128`] binary operator. - fn check_v128_funary_op(&mut self, offset: usize) -> Result<()> { - self.check_non_deterministic_enabled(offset)?; - self.check_v128_unary_op(offset) + fn check_v128_funary_op(&mut self) -> Result<()> { + self.check_non_deterministic_enabled()?; + self.check_v128_unary_op() } /// Checks a [`V128`] binary operator. - fn check_v128_relaxed_unary_op(&mut self, offset: usize) -> Result<()> { - self.pop_operand(offset, Some(ValType::V128))?; + fn check_v128_relaxed_unary_op(&mut self) -> Result<()> { + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } /// Checks a [`V128`] relaxed ternary operator. - fn check_v128_relaxed_ternary_op(&mut self, offset: usize) -> Result<()> { - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn check_v128_relaxed_ternary_op(&mut self) -> Result<()> { + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } /// Checks a [`V128`] relaxed ternary operator. - fn check_v128_bitmask_op(&mut self, offset: usize) -> Result<()> { - self.pop_operand(offset, Some(ValType::V128))?; + fn check_v128_bitmask_op(&mut self) -> Result<()> { + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::I32)?; Ok(()) } /// Checks a [`V128`] relaxed ternary operator. - fn check_v128_shift_op(&mut self, offset: usize) -> Result<()> { - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn check_v128_shift_op(&mut self) -> Result<()> { + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } /// Checks a [`V128`] common load operator. - fn check_v128_load_op(&mut self, offset: usize, memarg: MemArg) -> Result<()> { - let idx = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(idx))?; + fn check_v128_load_op(&mut self, memarg: MemArg) -> Result<()> { + let idx = self.check_memarg(memarg)?; + self.pop_operand(Some(idx))?; self.push_operand(ValType::V128)?; Ok(()) } - fn func_type_at(&self, at: u32, offset: usize) -> Result<&'resources R::FuncType> { + fn func_type_at(&self, at: u32) -> Result<&'resources R::FuncType> { self.resources .func_type_at(at) - .ok_or_else(|| format_err!(offset, "unknown type: type index out of bounds")) + .ok_or_else(|| format_err!(self.offset, "unknown type: type index out of bounds")) } - fn tag_at(&self, at: u32, offset: usize) -> Result<&'resources R::FuncType> { + fn tag_at(&self, at: u32) -> Result<&'resources R::FuncType> { self.resources .tag_at(at) - .ok_or_else(|| format_err!(offset, "unknown tag {}: tag index out of bounds", at)) + .ok_or_else(|| format_err!(self.offset, "unknown tag {}: tag index out of bounds", at)) } - fn params( - &self, - offset: usize, - ty: BlockType, - ) -> Result + 'resources> { + fn params(&self, ty: BlockType) -> Result + 'resources> { Ok(match ty { BlockType::Empty | BlockType::Type(_) => Either::B(None.into_iter()), - BlockType::FuncType(t) => Either::A(self.func_type_at(t, offset)?.inputs()), + BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.inputs()), }) } - fn results( - &self, - offset: usize, - ty: BlockType, - ) -> Result + 'resources> { + fn results(&self, ty: BlockType) -> Result + 'resources> { Ok(match ty { BlockType::Empty => Either::B(None.into_iter()), BlockType::Type(t) => Either::B(Some(t).into_iter()), - BlockType::FuncType(t) => Either::A(self.func_type_at(t, offset)?.outputs()), + BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.outputs()), }) } fn label_types( &self, - offset: usize, ty: BlockType, kind: FrameKind, ) -> Result + 'resources> { Ok(match kind { - FrameKind::Loop => Either::A(self.params(offset, ty)?), - _ => Either::B(self.results(offset, ty)?), + FrameKind::Loop => Either::A(self.params(ty)?), + _ => Either::B(self.results(ty)?), }) } } @@ -918,27 +905,27 @@ struct WasmProposalValidator<'validator, 'resources, T>( ); impl WasmProposalValidator<'_, '_, T> { - fn check_enabled(&self, offset: usize, flag: bool, desc: &str) -> Result<()> { + fn check_enabled(&self, flag: bool, desc: &str) -> Result<()> { if flag { return Ok(()); } - bail!(offset, "{desc} support is not enabled"); + bail!(self.0.offset, "{desc} support is not enabled"); } } macro_rules! validate_proposal { ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => { $( - fn $visit(&mut self, offset: usize $($(,$arg: $argty)*)?) -> Result<()> { - validate_proposal!(validate self offset $proposal); - self.0.$visit(offset $( $(,$arg)* )?) + fn $visit(&mut self $($(,$arg: $argty)*)?) -> Result<()> { + validate_proposal!(validate self $proposal); + self.0.$visit($( $($arg),* )?) } )* }; - (validate self offset mvp) => {}; - (validate $self:ident $offset:ident $proposal:ident) => { - $self.check_enabled($offset, $self.0.features.$proposal, validate_proposal!(desc $proposal))? + (validate self mvp) => {}; + (validate $self:ident $proposal:ident) => { + $self.check_enabled($self.0.features.$proposal, validate_proposal!(desc $proposal))? }; (desc simd) => ("SIMD"); @@ -967,58 +954,58 @@ where { type Output = Result<()>; - fn visit_nop(&mut self, _: usize) -> Self::Output { + fn visit_nop(&mut self) -> Self::Output { Ok(()) } - fn visit_unreachable(&mut self, offset: usize) -> Self::Output { - self.unreachable(offset)?; + fn visit_unreachable(&mut self) -> Self::Output { + self.unreachable()?; Ok(()) } - fn visit_block(&mut self, offset: usize, ty: BlockType) -> Self::Output { - self.check_block_type(offset, ty)?; - for ty in self.params(offset, ty)?.rev() { - self.pop_operand(offset, Some(ty))?; + fn visit_block(&mut self, ty: BlockType) -> Self::Output { + self.check_block_type(ty)?; + for ty in self.params(ty)?.rev() { + self.pop_operand(Some(ty))?; } - self.push_ctrl(offset, FrameKind::Block, ty)?; + self.push_ctrl(FrameKind::Block, ty)?; Ok(()) } - fn visit_loop(&mut self, offset: usize, ty: BlockType) -> Self::Output { - self.check_block_type(offset, ty)?; - for ty in self.params(offset, ty)?.rev() { - self.pop_operand(offset, Some(ty))?; + fn visit_loop(&mut self, ty: BlockType) -> Self::Output { + self.check_block_type(ty)?; + for ty in self.params(ty)?.rev() { + self.pop_operand(Some(ty))?; } - self.push_ctrl(offset, FrameKind::Loop, ty)?; + self.push_ctrl(FrameKind::Loop, ty)?; Ok(()) } - fn visit_if(&mut self, offset: usize, ty: BlockType) -> Self::Output { - self.check_block_type(offset, ty)?; - self.pop_operand(offset, Some(ValType::I32))?; - for ty in self.params(offset, ty)?.rev() { - self.pop_operand(offset, Some(ty))?; + fn visit_if(&mut self, ty: BlockType) -> Self::Output { + self.check_block_type(ty)?; + self.pop_operand(Some(ValType::I32))?; + for ty in self.params(ty)?.rev() { + self.pop_operand(Some(ty))?; } - self.push_ctrl(offset, FrameKind::If, ty)?; + self.push_ctrl(FrameKind::If, ty)?; Ok(()) } - fn visit_else(&mut self, offset: usize) -> Self::Output { - let frame = self.pop_ctrl(offset)?; + fn visit_else(&mut self) -> Self::Output { + let frame = self.pop_ctrl()?; if frame.kind != FrameKind::If { - bail!(offset, "else found outside of an `if` block"); + bail!(self.offset, "else found outside of an `if` block"); } - self.push_ctrl(offset, FrameKind::Else, frame.block_type)?; + self.push_ctrl(FrameKind::Else, frame.block_type)?; Ok(()) } - fn visit_try(&mut self, offset: usize, ty: BlockType) -> Self::Output { - self.check_block_type(offset, ty)?; - for ty in self.params(offset, ty)?.rev() { - self.pop_operand(offset, Some(ty))?; + fn visit_try(&mut self, ty: BlockType) -> Self::Output { + self.check_block_type(ty)?; + for ty in self.params(ty)?.rev() { + self.pop_operand(Some(ty))?; } - self.push_ctrl(offset, FrameKind::Try, ty)?; + self.push_ctrl(FrameKind::Try, ty)?; Ok(()) } - fn visit_catch(&mut self, offset: usize, index: u32) -> Self::Output { - let frame = self.pop_ctrl(offset)?; + fn visit_catch(&mut self, index: u32) -> Self::Output { + let frame = self.pop_ctrl()?; if frame.kind != FrameKind::Try && frame.kind != FrameKind::Catch { - bail!(offset, "catch found outside of an `try` block"); + bail!(self.offset, "catch found outside of an `try` block"); } // Start a new frame and push `exnref` value. let height = self.operands.len(); @@ -1029,56 +1016,59 @@ where unreachable: false, }); // Push exception argument types. - let ty = self.tag_at(index, offset)?; + let ty = self.tag_at(index)?; for ty in ty.inputs() { self.push_operand(ty)?; } Ok(()) } - fn visit_throw(&mut self, offset: usize, index: u32) -> Self::Output { + fn visit_throw(&mut self, index: u32) -> Self::Output { // Check values associated with the exception. - let ty = self.tag_at(index, offset)?; + let ty = self.tag_at(index)?; for ty in ty.inputs().rev() { - self.pop_operand(offset, Some(ty))?; + self.pop_operand(Some(ty))?; } if ty.outputs().len() > 0 { - bail!(offset, "result type expected to be empty for exception"); + bail!( + self.offset, + "result type expected to be empty for exception" + ); } - self.unreachable(offset)?; + self.unreachable()?; Ok(()) } - fn visit_rethrow(&mut self, offset: usize, relative_depth: u32) -> Self::Output { + fn visit_rethrow(&mut self, relative_depth: u32) -> Self::Output { // This is not a jump, but we need to check that the `rethrow` // targets an actual `catch` to get the exception. - let (_, kind) = self.jump(offset, relative_depth)?; + let (_, kind) = self.jump(relative_depth)?; if kind != FrameKind::Catch && kind != FrameKind::CatchAll { bail!( - offset, + self.offset, "invalid rethrow label: target was not a `catch` block" ); } - self.unreachable(offset)?; + self.unreachable()?; Ok(()) } - fn visit_delegate(&mut self, offset: usize, relative_depth: u32) -> Self::Output { - let frame = self.pop_ctrl(offset)?; + fn visit_delegate(&mut self, relative_depth: u32) -> Self::Output { + let frame = self.pop_ctrl()?; if frame.kind != FrameKind::Try { - bail!(offset, "delegate found outside of an `try` block"); + bail!(self.offset, "delegate found outside of an `try` block"); } // This operation is not a jump, but we need to check the // depth for validity - let _ = self.jump(offset, relative_depth)?; - for ty in self.results(offset, frame.block_type)? { + let _ = self.jump(relative_depth)?; + for ty in self.results(frame.block_type)? { self.push_operand(ty)?; } Ok(()) } - fn visit_catch_all(&mut self, offset: usize) -> Self::Output { - let frame = self.pop_ctrl(offset)?; + fn visit_catch_all(&mut self) -> Self::Output { + let frame = self.pop_ctrl()?; if frame.kind == FrameKind::CatchAll { - bail!(offset, "only one catch_all allowed per `try` block"); + bail!(self.offset, "only one catch_all allowed per `try` block"); } else if frame.kind != FrameKind::Try && frame.kind != FrameKind::Catch { - bail!(offset, "catch_all found outside of a `try` block"); + bail!(self.offset, "catch_all found outside of a `try` block"); } let height = self.operands.len(); self.control.push(Frame { @@ -1089,63 +1079,64 @@ where }); Ok(()) } - fn visit_end(&mut self, offset: usize) -> Self::Output { - let mut frame = self.pop_ctrl(offset)?; + fn visit_end(&mut self) -> Self::Output { + let mut frame = self.pop_ctrl()?; // Note that this `if` isn't included in the appendix right // now, but it's used to allow for `if` statements that are // missing an `else` block which have the same parameter/return // types on the block (since that's valid). if frame.kind == FrameKind::If { - self.push_ctrl(offset, FrameKind::Else, frame.block_type)?; - frame = self.pop_ctrl(offset)?; + self.push_ctrl(FrameKind::Else, frame.block_type)?; + frame = self.pop_ctrl()?; } - for ty in self.results(offset, frame.block_type)? { + for ty in self.results(frame.block_type)? { self.push_operand(ty)?; } if self.control.is_empty() && self.end_which_emptied_control.is_none() { - self.end_which_emptied_control = Some(offset); + assert_ne!(self.offset, 0); + self.end_which_emptied_control = Some(self.offset); } Ok(()) } - fn visit_br(&mut self, offset: usize, relative_depth: u32) -> Self::Output { - let (ty, kind) = self.jump(offset, relative_depth)?; - for ty in self.label_types(offset, ty, kind)?.rev() { - self.pop_operand(offset, Some(ty))?; + fn visit_br(&mut self, relative_depth: u32) -> Self::Output { + let (ty, kind) = self.jump(relative_depth)?; + for ty in self.label_types(ty, kind)?.rev() { + self.pop_operand(Some(ty))?; } - self.unreachable(offset)?; + self.unreachable()?; Ok(()) } - fn visit_br_if(&mut self, offset: usize, relative_depth: u32) -> Self::Output { - self.pop_operand(offset, Some(ValType::I32))?; - let (ty, kind) = self.jump(offset, relative_depth)?; - let types = self.label_types(offset, ty, kind)?; + fn visit_br_if(&mut self, relative_depth: u32) -> Self::Output { + self.pop_operand(Some(ValType::I32))?; + let (ty, kind) = self.jump(relative_depth)?; + let types = self.label_types(ty, kind)?; for ty in types.clone().rev() { - self.pop_operand(offset, Some(ty))?; + self.pop_operand(Some(ty))?; } for ty in types { self.push_operand(ty)?; } Ok(()) } - fn visit_br_table(&mut self, offset: usize, table: BrTable) -> Self::Output { - self.pop_operand(offset, Some(ValType::I32))?; - let default = self.jump(offset, table.default())?; - let default_types = self.label_types(offset, default.0, default.1)?; + fn visit_br_table(&mut self, table: BrTable) -> Self::Output { + self.pop_operand(Some(ValType::I32))?; + let default = self.jump(table.default())?; + let default_types = self.label_types(default.0, default.1)?; for element in table.targets() { let relative_depth = element?; - let block = self.jump(offset, relative_depth)?; - let tys = self.label_types(offset, block.0, block.1)?; + let block = self.jump(relative_depth)?; + let tys = self.label_types(block.0, block.1)?; if tys.len() != default_types.len() { bail!( - offset, + self.offset, "type mismatch: br_table target labels have different number of types" ); } debug_assert!(self.br_table_tmp.is_empty()); for ty in tys.rev() { - let ty = self.pop_operand(offset, Some(ty))?; + let ty = self.pop_operand(Some(ty))?; self.br_table_tmp.push(ty); } for ty in self.inner.br_table_tmp.drain(..).rev() { @@ -1153,55 +1144,52 @@ where } } for ty in default_types.rev() { - self.pop_operand(offset, Some(ty))?; + self.pop_operand(Some(ty))?; } - self.unreachable(offset)?; + self.unreachable()?; Ok(()) } - fn visit_return(&mut self, offset: usize) -> Self::Output { - self.check_return(offset)?; + fn visit_return(&mut self) -> Self::Output { + self.check_return()?; Ok(()) } - fn visit_call(&mut self, offset: usize, function_index: u32) -> Self::Output { - self.check_call(offset, function_index)?; + fn visit_call(&mut self, function_index: u32) -> Self::Output { + self.check_call(function_index)?; Ok(()) } - fn visit_return_call(&mut self, offset: usize, function_index: u32) -> Self::Output { - self.check_call(offset, function_index)?; - self.check_return(offset)?; + fn visit_return_call(&mut self, function_index: u32) -> Self::Output { + self.check_call(function_index)?; + self.check_return()?; Ok(()) } fn visit_call_indirect( &mut self, - offset: usize, index: u32, table_index: u32, table_byte: u8, ) -> Self::Output { if table_byte != 0 && !self.features.reference_types { - bail!(offset, "reference-types not enabled: zero byte expected"); + bail!( + self.offset, + "reference-types not enabled: zero byte expected" + ); } - self.check_call_indirect(offset, index, table_index)?; + self.check_call_indirect(index, table_index)?; Ok(()) } - fn visit_return_call_indirect( - &mut self, - offset: usize, - index: u32, - table_index: u32, - ) -> Self::Output { - self.check_call_indirect(offset, index, table_index)?; - self.check_return(offset)?; + fn visit_return_call_indirect(&mut self, index: u32, table_index: u32) -> Self::Output { + self.check_call_indirect(index, table_index)?; + self.check_return()?; Ok(()) } - fn visit_drop(&mut self, offset: usize) -> Self::Output { - self.pop_operand(offset, None)?; + fn visit_drop(&mut self) -> Self::Output { + self.pop_operand(None)?; Ok(()) } - fn visit_select(&mut self, offset: usize) -> Self::Output { - self.pop_operand(offset, Some(ValType::I32))?; - let ty1 = self.pop_operand(offset, None)?; - let ty2 = self.pop_operand(offset, None)?; + fn visit_select(&mut self) -> Self::Output { + self.pop_operand(Some(ValType::I32))?; + let ty1 = self.pop_operand(None)?; + let ty2 = self.pop_operand(None)?; fn is_num(ty: Option) -> bool { matches!( ty, @@ -1214,867 +1202,870 @@ where ) } if !is_num(ty1) || !is_num(ty2) { - bail!(offset, "type mismatch: select only takes integral types") + bail!( + self.offset, + "type mismatch: select only takes integral types" + ) } if ty1 != ty2 && ty1 != None && ty2 != None { bail!( - offset, + self.offset, "type mismatch: select operands have different types" ) } self.push_operand(ty1.or(ty2))?; Ok(()) } - fn visit_typed_select(&mut self, offset: usize, ty: ValType) -> Self::Output { + fn visit_typed_select(&mut self, ty: ValType) -> Self::Output { self.features .check_value_type(ty) - .map_err(|e| BinaryReaderError::new(e, offset))?; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ty))?; - self.pop_operand(offset, Some(ty))?; + .map_err(|e| BinaryReaderError::new(e, self.offset))?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ty))?; + self.pop_operand(Some(ty))?; self.push_operand(ty)?; Ok(()) } - fn visit_local_get(&mut self, offset: usize, local_index: u32) -> Self::Output { - let ty = self.local(offset, local_index)?; + fn visit_local_get(&mut self, local_index: u32) -> Self::Output { + let ty = self.local(local_index)?; self.push_operand(ty)?; Ok(()) } - fn visit_local_set(&mut self, offset: usize, local_index: u32) -> Self::Output { - let ty = self.local(offset, local_index)?; - self.pop_operand(offset, Some(ty))?; + fn visit_local_set(&mut self, local_index: u32) -> Self::Output { + let ty = self.local(local_index)?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_local_tee(&mut self, offset: usize, local_index: u32) -> Self::Output { - let ty = self.local(offset, local_index)?; - self.pop_operand(offset, Some(ty))?; + fn visit_local_tee(&mut self, local_index: u32) -> Self::Output { + let ty = self.local(local_index)?; + self.pop_operand(Some(ty))?; self.push_operand(ty)?; Ok(()) } - fn visit_global_get(&mut self, offset: usize, global_index: u32) -> Self::Output { + fn visit_global_get(&mut self, global_index: u32) -> Self::Output { if let Some(ty) = self.resources.global_at(global_index) { self.push_operand(ty.content_type)?; } else { - bail!(offset, "unknown global: global index out of bounds"); + bail!(self.offset, "unknown global: global index out of bounds"); }; Ok(()) } - fn visit_global_set(&mut self, offset: usize, global_index: u32) -> Self::Output { + fn visit_global_set(&mut self, global_index: u32) -> Self::Output { if let Some(ty) = self.resources.global_at(global_index) { if !ty.mutable { bail!( - offset, + self.offset, "global is immutable: cannot modify it with `global.set`" ); } - self.pop_operand(offset, Some(ty.content_type))?; + self.pop_operand(Some(ty.content_type))?; } else { - bail!(offset, "unknown global: global index out of bounds"); + bail!(self.offset, "unknown global: global index out of bounds"); }; Ok(()) } - fn visit_i32_load(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_i32_load(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::I32)?; Ok(()) } - fn visit_i64_load(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_i64_load(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::I64)?; Ok(()) } - fn visit_f32_load(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_non_deterministic_enabled(offset)?; - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_f32_load(&mut self, memarg: MemArg) -> Self::Output { + self.check_non_deterministic_enabled()?; + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::F32)?; Ok(()) } - fn visit_f64_load(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_non_deterministic_enabled(offset)?; - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_f64_load(&mut self, memarg: MemArg) -> Self::Output { + self.check_non_deterministic_enabled()?; + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::F64)?; Ok(()) } - fn visit_i32_load8_s(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_i32_load8_s(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::I32)?; Ok(()) } - fn visit_i32_load8_u(&mut self, input: usize, memarg: MemArg) -> Self::Output { - self.visit_i32_load8_s(input, memarg) + fn visit_i32_load8_u(&mut self, memarg: MemArg) -> Self::Output { + self.visit_i32_load8_s(memarg) } - fn visit_i32_load16_s(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_i32_load16_s(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::I32)?; Ok(()) } - fn visit_i32_load16_u(&mut self, input: usize, memarg: MemArg) -> Self::Output { - self.visit_i32_load16_s(input, memarg) + fn visit_i32_load16_u(&mut self, memarg: MemArg) -> Self::Output { + self.visit_i32_load16_s(memarg) } - fn visit_i64_load8_s(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_i64_load8_s(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::I64)?; Ok(()) } - fn visit_i64_load8_u(&mut self, input: usize, memarg: MemArg) -> Self::Output { - self.visit_i64_load8_s(input, memarg) + fn visit_i64_load8_u(&mut self, memarg: MemArg) -> Self::Output { + self.visit_i64_load8_s(memarg) } - fn visit_i64_load16_s(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_i64_load16_s(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::I64)?; Ok(()) } - fn visit_i64_load16_u(&mut self, input: usize, memarg: MemArg) -> Self::Output { - self.visit_i64_load16_s(input, memarg) + fn visit_i64_load16_u(&mut self, memarg: MemArg) -> Self::Output { + self.visit_i64_load16_s(memarg) } - fn visit_i64_load32_s(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_i64_load32_s(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::I64)?; Ok(()) } - fn visit_i64_load32_u(&mut self, input: usize, memarg: MemArg) -> Self::Output { - self.visit_i64_load32_s(input, memarg) + fn visit_i64_load32_u(&mut self, memarg: MemArg) -> Self::Output { + self.visit_i64_load32_s(memarg) } - fn visit_i32_store(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ty))?; + fn visit_i32_store(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_i64_store(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ValType::I64))?; - self.pop_operand(offset, Some(ty))?; + fn visit_i64_store(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ValType::I64))?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_f32_store(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_non_deterministic_enabled(offset)?; - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ValType::F32))?; - self.pop_operand(offset, Some(ty))?; + fn visit_f32_store(&mut self, memarg: MemArg) -> Self::Output { + self.check_non_deterministic_enabled()?; + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ValType::F32))?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_f64_store(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_non_deterministic_enabled(offset)?; - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ValType::F64))?; - self.pop_operand(offset, Some(ty))?; + fn visit_f64_store(&mut self, memarg: MemArg) -> Self::Output { + self.check_non_deterministic_enabled()?; + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ValType::F64))?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_i32_store8(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ty))?; + fn visit_i32_store8(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_i32_store16(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ty))?; + fn visit_i32_store16(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_i64_store8(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ValType::I64))?; - self.pop_operand(offset, Some(ty))?; + fn visit_i64_store8(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ValType::I64))?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_i64_store16(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ValType::I64))?; - self.pop_operand(offset, Some(ty))?; + fn visit_i64_store16(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ValType::I64))?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_i64_store32(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ValType::I64))?; - self.pop_operand(offset, Some(ty))?; + fn visit_i64_store32(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ValType::I64))?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_memory_size(&mut self, offset: usize, mem: u32, mem_byte: u8) -> Self::Output { + fn visit_memory_size(&mut self, mem: u32, mem_byte: u8) -> Self::Output { if mem_byte != 0 && !self.features.multi_memory { - bail!(offset, "multi-memory not enabled: zero byte expected"); + bail!(self.offset, "multi-memory not enabled: zero byte expected"); } - let index_ty = self.check_memory_index(offset, mem)?; + let index_ty = self.check_memory_index(mem)?; self.push_operand(index_ty)?; Ok(()) } - fn visit_memory_grow(&mut self, offset: usize, mem: u32, mem_byte: u8) -> Self::Output { + fn visit_memory_grow(&mut self, mem: u32, mem_byte: u8) -> Self::Output { if mem_byte != 0 && !self.features.multi_memory { - bail!(offset, "multi-memory not enabled: zero byte expected"); + bail!(self.offset, "multi-memory not enabled: zero byte expected"); } - let index_ty = self.check_memory_index(offset, mem)?; - self.pop_operand(offset, Some(index_ty))?; + let index_ty = self.check_memory_index(mem)?; + self.pop_operand(Some(index_ty))?; self.push_operand(index_ty)?; Ok(()) } - fn visit_i32_const(&mut self, _offset: usize, _value: i32) -> Self::Output { + fn visit_i32_const(&mut self, _value: i32) -> Self::Output { self.push_operand(ValType::I32)?; Ok(()) } - fn visit_i64_const(&mut self, _offset: usize, _value: i64) -> Self::Output { + fn visit_i64_const(&mut self, _value: i64) -> Self::Output { self.push_operand(ValType::I64)?; Ok(()) } - fn visit_f32_const(&mut self, offset: usize, _value: Ieee32) -> Self::Output { - self.check_non_deterministic_enabled(offset)?; + fn visit_f32_const(&mut self, _value: Ieee32) -> Self::Output { + self.check_non_deterministic_enabled()?; self.push_operand(ValType::F32)?; Ok(()) } - fn visit_f64_const(&mut self, offset: usize, _value: Ieee64) -> Self::Output { - self.check_non_deterministic_enabled(offset)?; + fn visit_f64_const(&mut self, _value: Ieee64) -> Self::Output { + self.check_non_deterministic_enabled()?; self.push_operand(ValType::F64)?; Ok(()) } - fn visit_i32_eqz(&mut self, offset: usize) -> Self::Output { - self.pop_operand(offset, Some(ValType::I32))?; + fn visit_i32_eqz(&mut self) -> Self::Output { + self.pop_operand(Some(ValType::I32))?; self.push_operand(ValType::I32)?; Ok(()) } - fn visit_i32_eq(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I32) + fn visit_i32_eq(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I32) } - fn visit_i32_ne(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I32) + fn visit_i32_ne(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I32) } - fn visit_i32_lt_s(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I32) + fn visit_i32_lt_s(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I32) } - fn visit_i32_lt_u(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I32) + fn visit_i32_lt_u(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I32) } - fn visit_i32_gt_s(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I32) + fn visit_i32_gt_s(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I32) } - fn visit_i32_gt_u(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I32) + fn visit_i32_gt_u(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I32) } - fn visit_i32_le_s(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I32) + fn visit_i32_le_s(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I32) } - fn visit_i32_le_u(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I32) + fn visit_i32_le_u(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I32) } - fn visit_i32_ge_s(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I32) + fn visit_i32_ge_s(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I32) } - fn visit_i32_ge_u(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I32) + fn visit_i32_ge_u(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I32) } - fn visit_i64_eqz(&mut self, offset: usize) -> Self::Output { - self.pop_operand(offset, Some(ValType::I64))?; + fn visit_i64_eqz(&mut self) -> Self::Output { + self.pop_operand(Some(ValType::I64))?; self.push_operand(ValType::I32)?; Ok(()) } - fn visit_i64_eq(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I64) + fn visit_i64_eq(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I64) } - fn visit_i64_ne(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I64) + fn visit_i64_ne(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I64) } - fn visit_i64_lt_s(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I64) + fn visit_i64_lt_s(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I64) } - fn visit_i64_lt_u(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I64) + fn visit_i64_lt_u(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I64) } - fn visit_i64_gt_s(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I64) + fn visit_i64_gt_s(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I64) } - fn visit_i64_gt_u(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I64) + fn visit_i64_gt_u(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I64) } - fn visit_i64_le_s(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I64) + fn visit_i64_le_s(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I64) } - fn visit_i64_le_u(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I64) + fn visit_i64_le_u(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I64) } - fn visit_i64_ge_s(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I64) + fn visit_i64_ge_s(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I64) } - fn visit_i64_ge_u(&mut self, offset: usize) -> Self::Output { - self.check_cmp_op(offset, ValType::I64) + fn visit_i64_ge_u(&mut self) -> Self::Output { + self.check_cmp_op(ValType::I64) } - fn visit_f32_eq(&mut self, offset: usize) -> Self::Output { - self.check_fcmp_op(offset, ValType::F32) + fn visit_f32_eq(&mut self) -> Self::Output { + self.check_fcmp_op(ValType::F32) } - fn visit_f32_ne(&mut self, offset: usize) -> Self::Output { - self.check_fcmp_op(offset, ValType::F32) + fn visit_f32_ne(&mut self) -> Self::Output { + self.check_fcmp_op(ValType::F32) } - fn visit_f32_lt(&mut self, offset: usize) -> Self::Output { - self.check_fcmp_op(offset, ValType::F32) + fn visit_f32_lt(&mut self) -> Self::Output { + self.check_fcmp_op(ValType::F32) } - fn visit_f32_gt(&mut self, offset: usize) -> Self::Output { - self.check_fcmp_op(offset, ValType::F32) + fn visit_f32_gt(&mut self) -> Self::Output { + self.check_fcmp_op(ValType::F32) } - fn visit_f32_le(&mut self, offset: usize) -> Self::Output { - self.check_fcmp_op(offset, ValType::F32) + fn visit_f32_le(&mut self) -> Self::Output { + self.check_fcmp_op(ValType::F32) } - fn visit_f32_ge(&mut self, offset: usize) -> Self::Output { - self.check_fcmp_op(offset, ValType::F32) + fn visit_f32_ge(&mut self) -> Self::Output { + self.check_fcmp_op(ValType::F32) } - fn visit_f64_eq(&mut self, offset: usize) -> Self::Output { - self.check_fcmp_op(offset, ValType::F64) + fn visit_f64_eq(&mut self) -> Self::Output { + self.check_fcmp_op(ValType::F64) } - fn visit_f64_ne(&mut self, offset: usize) -> Self::Output { - self.check_fcmp_op(offset, ValType::F64) + fn visit_f64_ne(&mut self) -> Self::Output { + self.check_fcmp_op(ValType::F64) } - fn visit_f64_lt(&mut self, offset: usize) -> Self::Output { - self.check_fcmp_op(offset, ValType::F64) + fn visit_f64_lt(&mut self) -> Self::Output { + self.check_fcmp_op(ValType::F64) } - fn visit_f64_gt(&mut self, offset: usize) -> Self::Output { - self.check_fcmp_op(offset, ValType::F64) + fn visit_f64_gt(&mut self) -> Self::Output { + self.check_fcmp_op(ValType::F64) } - fn visit_f64_le(&mut self, offset: usize) -> Self::Output { - self.check_fcmp_op(offset, ValType::F64) + fn visit_f64_le(&mut self) -> Self::Output { + self.check_fcmp_op(ValType::F64) } - fn visit_f64_ge(&mut self, offset: usize) -> Self::Output { - self.check_fcmp_op(offset, ValType::F64) + fn visit_f64_ge(&mut self) -> Self::Output { + self.check_fcmp_op(ValType::F64) } - fn visit_i32_clz(&mut self, offset: usize) -> Self::Output { - self.check_unary_op(offset, ValType::I32) + fn visit_i32_clz(&mut self) -> Self::Output { + self.check_unary_op(ValType::I32) } - fn visit_i32_ctz(&mut self, offset: usize) -> Self::Output { - self.check_unary_op(offset, ValType::I32) + fn visit_i32_ctz(&mut self) -> Self::Output { + self.check_unary_op(ValType::I32) } - fn visit_i32_popcnt(&mut self, offset: usize) -> Self::Output { - self.check_unary_op(offset, ValType::I32) + fn visit_i32_popcnt(&mut self) -> Self::Output { + self.check_unary_op(ValType::I32) } - fn visit_i32_add(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_add(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_sub(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_sub(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_mul(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_mul(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_div_s(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_div_s(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_div_u(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_div_u(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_rem_s(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_rem_s(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_rem_u(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_rem_u(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_and(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_and(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_or(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_or(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_xor(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_xor(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_shl(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_shl(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_shr_s(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_shr_s(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_shr_u(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_shr_u(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_rotl(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_rotl(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i32_rotr(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I32) + fn visit_i32_rotr(&mut self) -> Self::Output { + self.check_binary_op(ValType::I32) } - fn visit_i64_clz(&mut self, offset: usize) -> Self::Output { - self.check_unary_op(offset, ValType::I64) + fn visit_i64_clz(&mut self) -> Self::Output { + self.check_unary_op(ValType::I64) } - fn visit_i64_ctz(&mut self, offset: usize) -> Self::Output { - self.check_unary_op(offset, ValType::I64) + fn visit_i64_ctz(&mut self) -> Self::Output { + self.check_unary_op(ValType::I64) } - fn visit_i64_popcnt(&mut self, offset: usize) -> Self::Output { - self.check_unary_op(offset, ValType::I64) + fn visit_i64_popcnt(&mut self) -> Self::Output { + self.check_unary_op(ValType::I64) } - fn visit_i64_add(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_add(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_sub(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_sub(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_mul(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_mul(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_div_s(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_div_s(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_div_u(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_div_u(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_rem_s(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_rem_s(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_rem_u(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_rem_u(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_and(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_and(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_or(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_or(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_xor(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_xor(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_shl(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_shl(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_shr_s(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_shr_s(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_shr_u(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_shr_u(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_rotl(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_rotl(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_i64_rotr(&mut self, offset: usize) -> Self::Output { - self.check_binary_op(offset, ValType::I64) + fn visit_i64_rotr(&mut self) -> Self::Output { + self.check_binary_op(ValType::I64) } - fn visit_f32_abs(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F32) + fn visit_f32_abs(&mut self) -> Self::Output { + self.check_funary_op(ValType::F32) } - fn visit_f32_neg(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F32) + fn visit_f32_neg(&mut self) -> Self::Output { + self.check_funary_op(ValType::F32) } - fn visit_f32_ceil(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F32) + fn visit_f32_ceil(&mut self) -> Self::Output { + self.check_funary_op(ValType::F32) } - fn visit_f32_floor(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F32) + fn visit_f32_floor(&mut self) -> Self::Output { + self.check_funary_op(ValType::F32) } - fn visit_f32_trunc(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F32) + fn visit_f32_trunc(&mut self) -> Self::Output { + self.check_funary_op(ValType::F32) } - fn visit_f32_nearest(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F32) + fn visit_f32_nearest(&mut self) -> Self::Output { + self.check_funary_op(ValType::F32) } - fn visit_f32_sqrt(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F32) + fn visit_f32_sqrt(&mut self) -> Self::Output { + self.check_funary_op(ValType::F32) } - fn visit_f32_add(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F32) + fn visit_f32_add(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F32) } - fn visit_f32_sub(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F32) + fn visit_f32_sub(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F32) } - fn visit_f32_mul(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F32) + fn visit_f32_mul(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F32) } - fn visit_f32_div(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F32) + fn visit_f32_div(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F32) } - fn visit_f32_min(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F32) + fn visit_f32_min(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F32) } - fn visit_f32_max(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F32) + fn visit_f32_max(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F32) } - fn visit_f32_copysign(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F32) + fn visit_f32_copysign(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F32) } - fn visit_f64_abs(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F64) + fn visit_f64_abs(&mut self) -> Self::Output { + self.check_funary_op(ValType::F64) } - fn visit_f64_neg(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F64) + fn visit_f64_neg(&mut self) -> Self::Output { + self.check_funary_op(ValType::F64) } - fn visit_f64_ceil(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F64) + fn visit_f64_ceil(&mut self) -> Self::Output { + self.check_funary_op(ValType::F64) } - fn visit_f64_floor(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F64) + fn visit_f64_floor(&mut self) -> Self::Output { + self.check_funary_op(ValType::F64) } - fn visit_f64_trunc(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F64) + fn visit_f64_trunc(&mut self) -> Self::Output { + self.check_funary_op(ValType::F64) } - fn visit_f64_nearest(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F64) + fn visit_f64_nearest(&mut self) -> Self::Output { + self.check_funary_op(ValType::F64) } - fn visit_f64_sqrt(&mut self, offset: usize) -> Self::Output { - self.check_funary_op(offset, ValType::F64) + fn visit_f64_sqrt(&mut self) -> Self::Output { + self.check_funary_op(ValType::F64) } - fn visit_f64_add(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F64) + fn visit_f64_add(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F64) } - fn visit_f64_sub(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F64) + fn visit_f64_sub(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F64) } - fn visit_f64_mul(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F64) + fn visit_f64_mul(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F64) } - fn visit_f64_div(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F64) + fn visit_f64_div(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F64) } - fn visit_f64_min(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F64) + fn visit_f64_min(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F64) } - fn visit_f64_max(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F64) + fn visit_f64_max(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F64) } - fn visit_f64_copysign(&mut self, offset: usize) -> Self::Output { - self.check_fbinary_op(offset, ValType::F64) + fn visit_f64_copysign(&mut self) -> Self::Output { + self.check_fbinary_op(ValType::F64) } - fn visit_i32_wrap_i64(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I32, ValType::I64) + fn visit_i32_wrap_i64(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I32, ValType::I64) } - fn visit_i32_trunc_f32_s(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I32, ValType::F32) + fn visit_i32_trunc_f32_s(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I32, ValType::F32) } - fn visit_i32_trunc_f32_u(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I32, ValType::F32) + fn visit_i32_trunc_f32_u(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I32, ValType::F32) } - fn visit_i32_trunc_f64_s(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I32, ValType::F64) + fn visit_i32_trunc_f64_s(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I32, ValType::F64) } - fn visit_i32_trunc_f64_u(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I32, ValType::F64) + fn visit_i32_trunc_f64_u(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I32, ValType::F64) } - fn visit_i64_extend_i32_s(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I64, ValType::I32) + fn visit_i64_extend_i32_s(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I64, ValType::I32) } - fn visit_i64_extend_i32_u(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I64, ValType::I32) + fn visit_i64_extend_i32_u(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I64, ValType::I32) } - fn visit_i64_trunc_f32_s(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I64, ValType::F32) + fn visit_i64_trunc_f32_s(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I64, ValType::F32) } - fn visit_i64_trunc_f32_u(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I64, ValType::F32) + fn visit_i64_trunc_f32_u(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I64, ValType::F32) } - fn visit_i64_trunc_f64_s(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I64, ValType::F64) + fn visit_i64_trunc_f64_s(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I64, ValType::F64) } - fn visit_i64_trunc_f64_u(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I64, ValType::F64) + fn visit_i64_trunc_f64_u(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I64, ValType::F64) } - fn visit_f32_convert_i32_s(&mut self, offset: usize) -> Self::Output { - self.check_fconversion_op(offset, ValType::F32, ValType::I32) + fn visit_f32_convert_i32_s(&mut self) -> Self::Output { + self.check_fconversion_op(ValType::F32, ValType::I32) } - fn visit_f32_convert_i32_u(&mut self, offset: usize) -> Self::Output { - self.check_fconversion_op(offset, ValType::F32, ValType::I32) + fn visit_f32_convert_i32_u(&mut self) -> Self::Output { + self.check_fconversion_op(ValType::F32, ValType::I32) } - fn visit_f32_convert_i64_s(&mut self, offset: usize) -> Self::Output { - self.check_fconversion_op(offset, ValType::F32, ValType::I64) + fn visit_f32_convert_i64_s(&mut self) -> Self::Output { + self.check_fconversion_op(ValType::F32, ValType::I64) } - fn visit_f32_convert_i64_u(&mut self, offset: usize) -> Self::Output { - self.check_fconversion_op(offset, ValType::F32, ValType::I64) + fn visit_f32_convert_i64_u(&mut self) -> Self::Output { + self.check_fconversion_op(ValType::F32, ValType::I64) } - fn visit_f32_demote_f64(&mut self, offset: usize) -> Self::Output { - self.check_fconversion_op(offset, ValType::F32, ValType::F64) + fn visit_f32_demote_f64(&mut self) -> Self::Output { + self.check_fconversion_op(ValType::F32, ValType::F64) } - fn visit_f64_convert_i32_s(&mut self, offset: usize) -> Self::Output { - self.check_fconversion_op(offset, ValType::F64, ValType::I32) + fn visit_f64_convert_i32_s(&mut self) -> Self::Output { + self.check_fconversion_op(ValType::F64, ValType::I32) } - fn visit_f64_convert_i32_u(&mut self, offset: usize) -> Self::Output { - self.check_fconversion_op(offset, ValType::F64, ValType::I32) + fn visit_f64_convert_i32_u(&mut self) -> Self::Output { + self.check_fconversion_op(ValType::F64, ValType::I32) } - fn visit_f64_convert_i64_s(&mut self, offset: usize) -> Self::Output { - self.check_fconversion_op(offset, ValType::F64, ValType::I64) + fn visit_f64_convert_i64_s(&mut self) -> Self::Output { + self.check_fconversion_op(ValType::F64, ValType::I64) } - fn visit_f64_convert_i64_u(&mut self, offset: usize) -> Self::Output { - self.check_fconversion_op(offset, ValType::F64, ValType::I64) + fn visit_f64_convert_i64_u(&mut self) -> Self::Output { + self.check_fconversion_op(ValType::F64, ValType::I64) } - fn visit_f64_promote_f32(&mut self, offset: usize) -> Self::Output { - self.check_fconversion_op(offset, ValType::F64, ValType::F32) + fn visit_f64_promote_f32(&mut self) -> Self::Output { + self.check_fconversion_op(ValType::F64, ValType::F32) } - fn visit_i32_reinterpret_f32(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I32, ValType::F32) + fn visit_i32_reinterpret_f32(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I32, ValType::F32) } - fn visit_i64_reinterpret_f64(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I64, ValType::F64) + fn visit_i64_reinterpret_f64(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I64, ValType::F64) } - fn visit_f32_reinterpret_i32(&mut self, offset: usize) -> Self::Output { - self.check_fconversion_op(offset, ValType::F32, ValType::I32) + fn visit_f32_reinterpret_i32(&mut self) -> Self::Output { + self.check_fconversion_op(ValType::F32, ValType::I32) } - fn visit_f64_reinterpret_i64(&mut self, offset: usize) -> Self::Output { - self.check_fconversion_op(offset, ValType::F64, ValType::I64) + fn visit_f64_reinterpret_i64(&mut self) -> Self::Output { + self.check_fconversion_op(ValType::F64, ValType::I64) } - fn visit_i32_trunc_sat_f32_s(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I32, ValType::F32) + fn visit_i32_trunc_sat_f32_s(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I32, ValType::F32) } - fn visit_i32_trunc_sat_f32_u(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I32, ValType::F32) + fn visit_i32_trunc_sat_f32_u(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I32, ValType::F32) } - fn visit_i32_trunc_sat_f64_s(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I32, ValType::F64) + fn visit_i32_trunc_sat_f64_s(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I32, ValType::F64) } - fn visit_i32_trunc_sat_f64_u(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I32, ValType::F64) + fn visit_i32_trunc_sat_f64_u(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I32, ValType::F64) } - fn visit_i64_trunc_sat_f32_s(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I64, ValType::F32) + fn visit_i64_trunc_sat_f32_s(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I64, ValType::F32) } - fn visit_i64_trunc_sat_f32_u(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I64, ValType::F32) + fn visit_i64_trunc_sat_f32_u(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I64, ValType::F32) } - fn visit_i64_trunc_sat_f64_s(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I64, ValType::F64) + fn visit_i64_trunc_sat_f64_s(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I64, ValType::F64) } - fn visit_i64_trunc_sat_f64_u(&mut self, offset: usize) -> Self::Output { - self.check_conversion_op(offset, ValType::I64, ValType::F64) + fn visit_i64_trunc_sat_f64_u(&mut self) -> Self::Output { + self.check_conversion_op(ValType::I64, ValType::F64) } - fn visit_i32_extend8_s(&mut self, offset: usize) -> Self::Output { - self.check_unary_op(offset, ValType::I32) + fn visit_i32_extend8_s(&mut self) -> Self::Output { + self.check_unary_op(ValType::I32) } - fn visit_i32_extend16_s(&mut self, offset: usize) -> Self::Output { - self.check_unary_op(offset, ValType::I32) + fn visit_i32_extend16_s(&mut self) -> Self::Output { + self.check_unary_op(ValType::I32) } - fn visit_i64_extend8_s(&mut self, offset: usize) -> Self::Output { - self.check_unary_op(offset, ValType::I64) + fn visit_i64_extend8_s(&mut self) -> Self::Output { + self.check_unary_op(ValType::I64) } - fn visit_i64_extend16_s(&mut self, offset: usize) -> Self::Output { - self.check_unary_op(offset, ValType::I64) + fn visit_i64_extend16_s(&mut self) -> Self::Output { + self.check_unary_op(ValType::I64) } - fn visit_i64_extend32_s(&mut self, offset: usize) -> Self::Output { - self.check_unary_op(offset, ValType::I64) + fn visit_i64_extend32_s(&mut self) -> Self::Output { + self.check_unary_op(ValType::I64) } - fn visit_i32_atomic_load(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_load(offset, memarg, ValType::I32) + fn visit_i32_atomic_load(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_load(memarg, ValType::I32) } - fn visit_i32_atomic_load16_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_load(offset, memarg, ValType::I32) + fn visit_i32_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_load(memarg, ValType::I32) } - fn visit_i32_atomic_load8_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_load(offset, memarg, ValType::I32) + fn visit_i32_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_load(memarg, ValType::I32) } - fn visit_i64_atomic_load(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_load(offset, memarg, ValType::I64) + fn visit_i64_atomic_load(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_load(memarg, ValType::I64) } - fn visit_i64_atomic_load32_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_load(offset, memarg, ValType::I64) + fn visit_i64_atomic_load32_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_load(memarg, ValType::I64) } - fn visit_i64_atomic_load16_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_load(offset, memarg, ValType::I64) + fn visit_i64_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_load(memarg, ValType::I64) } - fn visit_i64_atomic_load8_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_load(offset, memarg, ValType::I64) + fn visit_i64_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_load(memarg, ValType::I64) } - fn visit_i32_atomic_store(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_store(offset, memarg, ValType::I32) + fn visit_i32_atomic_store(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_store(memarg, ValType::I32) } - fn visit_i32_atomic_store16(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_store(offset, memarg, ValType::I32) + fn visit_i32_atomic_store16(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_store(memarg, ValType::I32) } - fn visit_i32_atomic_store8(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_store(offset, memarg, ValType::I32) + fn visit_i32_atomic_store8(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_store(memarg, ValType::I32) } - fn visit_i64_atomic_store(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_store(offset, memarg, ValType::I64) + fn visit_i64_atomic_store(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_store(memarg, ValType::I64) } - fn visit_i64_atomic_store32(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_store(offset, memarg, ValType::I64) + fn visit_i64_atomic_store32(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_store(memarg, ValType::I64) } - fn visit_i64_atomic_store16(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_store(offset, memarg, ValType::I64) + fn visit_i64_atomic_store16(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_store(memarg, ValType::I64) } - fn visit_i64_atomic_store8(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_store(offset, memarg, ValType::I64) + fn visit_i64_atomic_store8(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_store(memarg, ValType::I64) } - fn visit_i32_atomic_rmw_add(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw_sub(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw_and(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw_or(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw_xor(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw16_add_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw16_sub_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw16_and_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw16_or_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw16_xor_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw8_add_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw8_sub_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw8_and_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw8_or_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw8_xor_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i64_atomic_rmw_add(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw_sub(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw_and(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw_or(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw_xor(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw32_add_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw32_add_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw32_sub_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw32_sub_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw32_and_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw32_and_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw32_or_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw32_or_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw32_xor_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw32_xor_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw16_add_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw16_sub_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw16_and_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw16_or_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw16_xor_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw8_add_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw8_sub_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw8_and_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw8_or_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw8_xor_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i32_atomic_rmw_xchg(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw16_xchg_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw8_xchg_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_i32_atomic_rmw_cmpxchg(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_cmpxchg(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_cmpxchg(memarg, ValType::I32) } - fn visit_i32_atomic_rmw16_cmpxchg_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_cmpxchg(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_cmpxchg(memarg, ValType::I32) } - fn visit_i32_atomic_rmw8_cmpxchg_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_cmpxchg(offset, memarg, ValType::I32) + fn visit_i32_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_cmpxchg(memarg, ValType::I32) } - fn visit_i64_atomic_rmw_xchg(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw32_xchg_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw32_xchg_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw16_xchg_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw8_xchg_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I64) } - fn visit_i64_atomic_rmw_cmpxchg(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_cmpxchg(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_cmpxchg(memarg, ValType::I64) } - fn visit_i64_atomic_rmw32_cmpxchg_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_cmpxchg(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw32_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_cmpxchg(memarg, ValType::I64) } - fn visit_i64_atomic_rmw16_cmpxchg_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_cmpxchg(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_cmpxchg(memarg, ValType::I64) } - fn visit_i64_atomic_rmw8_cmpxchg_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_cmpxchg(offset, memarg, ValType::I64) + fn visit_i64_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_cmpxchg(memarg, ValType::I64) } - fn visit_memory_atomic_notify(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_atomic_binary_op(offset, memarg, ValType::I32) + fn visit_memory_atomic_notify(&mut self, memarg: MemArg) -> Self::Output { + self.check_atomic_binary_op(memarg, ValType::I32) } - fn visit_memory_atomic_wait32(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_shared_memarg(offset, memarg)?; - self.pop_operand(offset, Some(ValType::I64))?; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ty))?; + fn visit_memory_atomic_wait32(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_shared_memarg(memarg)?; + self.pop_operand(Some(ValType::I64))?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::I32)?; Ok(()) } - fn visit_memory_atomic_wait64(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_shared_memarg(offset, memarg)?; - self.pop_operand(offset, Some(ValType::I64))?; - self.pop_operand(offset, Some(ValType::I64))?; - self.pop_operand(offset, Some(ty))?; + fn visit_memory_atomic_wait64(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_shared_memarg(memarg)?; + self.pop_operand(Some(ValType::I64))?; + self.pop_operand(Some(ValType::I64))?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::I32)?; Ok(()) } - fn visit_atomic_fence(&mut self, _offset: usize) -> Self::Output { + fn visit_atomic_fence(&mut self) -> Self::Output { Ok(()) } - fn visit_ref_null(&mut self, offset: usize, ty: ValType) -> Self::Output { + fn visit_ref_null(&mut self, ty: ValType) -> Self::Output { self.features .check_value_type(ty) - .map_err(|e| BinaryReaderError::new(e, offset))?; + .map_err(|e| BinaryReaderError::new(e, self.offset))?; if !ty.is_reference_type() { - bail!(offset, "invalid non-reference type in ref.null"); + bail!(self.offset, "invalid non-reference type in ref.null"); } self.push_operand(ty)?; Ok(()) } - fn visit_ref_is_null(&mut self, offset: usize) -> Self::Output { - match self.pop_operand(offset, None)? { + fn visit_ref_is_null(&mut self) -> Self::Output { + match self.pop_operand(None)? { None => {} Some(t) => { if !t.is_reference_type() { bail!( - offset, + self.offset, "type mismatch: invalid reference type in ref.is_null" ); } @@ -2083,1046 +2074,1047 @@ where self.push_operand(ValType::I32)?; Ok(()) } - fn visit_ref_func(&mut self, offset: usize, function_index: u32) -> Self::Output { + fn visit_ref_func(&mut self, function_index: u32) -> Self::Output { if self.resources.type_of_function(function_index).is_none() { bail!( - offset, + self.offset, "unknown function {}: function index out of bounds", function_index, ); } if !self.resources.is_function_referenced(function_index) { - bail!(offset, "undeclared function reference"); + bail!(self.offset, "undeclared function reference"); } self.push_operand(ValType::FuncRef)?; Ok(()) } - fn visit_v128_load(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_v128_load(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_v128_store(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(ty))?; + fn visit_v128_store(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_v128_const(&mut self, _offset: usize, _value: V128) -> Self::Output { + fn visit_v128_const(&mut self, _value: V128) -> Self::Output { self.push_operand(ValType::V128)?; Ok(()) } - fn visit_i8x16_splat(&mut self, offset: usize) -> Self::Output { - self.check_v128_splat(offset, ValType::I32) + fn visit_i8x16_splat(&mut self) -> Self::Output { + self.check_v128_splat(ValType::I32) } - fn visit_i16x8_splat(&mut self, offset: usize) -> Self::Output { - self.check_v128_splat(offset, ValType::I32) + fn visit_i16x8_splat(&mut self) -> Self::Output { + self.check_v128_splat(ValType::I32) } - fn visit_i32x4_splat(&mut self, offset: usize) -> Self::Output { - self.check_v128_splat(offset, ValType::I32) + fn visit_i32x4_splat(&mut self) -> Self::Output { + self.check_v128_splat(ValType::I32) } - fn visit_i64x2_splat(&mut self, offset: usize) -> Self::Output { - self.check_v128_splat(offset, ValType::I64) + fn visit_i64x2_splat(&mut self) -> Self::Output { + self.check_v128_splat(ValType::I64) } - fn visit_f32x4_splat(&mut self, offset: usize) -> Self::Output { - self.check_non_deterministic_enabled(offset)?; - self.check_v128_splat(offset, ValType::F32) + fn visit_f32x4_splat(&mut self) -> Self::Output { + self.check_non_deterministic_enabled()?; + self.check_v128_splat(ValType::F32) } - fn visit_f64x2_splat(&mut self, offset: usize) -> Self::Output { - self.check_non_deterministic_enabled(offset)?; - self.check_v128_splat(offset, ValType::F64) + fn visit_f64x2_splat(&mut self) -> Self::Output { + self.check_non_deterministic_enabled()?; + self.check_v128_splat(ValType::F64) } - fn visit_i8x16_extract_lane_s(&mut self, offset: usize, lane: u8) -> Self::Output { - self.check_simd_lane_index(offset, lane, 16)?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_i8x16_extract_lane_s(&mut self, lane: u8) -> Self::Output { + self.check_simd_lane_index(lane, 16)?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::I32)?; Ok(()) } - fn visit_i8x16_extract_lane_u(&mut self, input: usize, lane: u8) -> Self::Output { - self.visit_i8x16_extract_lane_s(input, lane) + fn visit_i8x16_extract_lane_u(&mut self, lane: u8) -> Self::Output { + self.visit_i8x16_extract_lane_s(lane) } - fn visit_i16x8_extract_lane_s(&mut self, offset: usize, lane: u8) -> Self::Output { - self.check_simd_lane_index(offset, lane, 8)?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_i16x8_extract_lane_s(&mut self, lane: u8) -> Self::Output { + self.check_simd_lane_index(lane, 8)?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::I32)?; Ok(()) } - fn visit_i16x8_extract_lane_u(&mut self, input: usize, lane: u8) -> Self::Output { - self.visit_i16x8_extract_lane_s(input, lane) + fn visit_i16x8_extract_lane_u(&mut self, lane: u8) -> Self::Output { + self.visit_i16x8_extract_lane_s(lane) } - fn visit_i32x4_extract_lane(&mut self, offset: usize, lane: u8) -> Self::Output { - self.check_simd_lane_index(offset, lane, 4)?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_i32x4_extract_lane(&mut self, lane: u8) -> Self::Output { + self.check_simd_lane_index(lane, 4)?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::I32)?; Ok(()) } - fn visit_i8x16_replace_lane(&mut self, offset: usize, lane: u8) -> Self::Output { - self.check_simd_lane_index(offset, lane, 16)?; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_i8x16_replace_lane(&mut self, lane: u8) -> Self::Output { + self.check_simd_lane_index(lane, 16)?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_i16x8_replace_lane(&mut self, offset: usize, lane: u8) -> Self::Output { - self.check_simd_lane_index(offset, lane, 8)?; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_i16x8_replace_lane(&mut self, lane: u8) -> Self::Output { + self.check_simd_lane_index(lane, 8)?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_i32x4_replace_lane(&mut self, offset: usize, lane: u8) -> Self::Output { - self.check_simd_lane_index(offset, lane, 4)?; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_i32x4_replace_lane(&mut self, lane: u8) -> Self::Output { + self.check_simd_lane_index(lane, 4)?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_i64x2_extract_lane(&mut self, offset: usize, lane: u8) -> Self::Output { - self.check_simd_lane_index(offset, lane, 2)?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_i64x2_extract_lane(&mut self, lane: u8) -> Self::Output { + self.check_simd_lane_index(lane, 2)?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::I64)?; Ok(()) } - fn visit_i64x2_replace_lane(&mut self, offset: usize, lane: u8) -> Self::Output { - self.check_simd_lane_index(offset, lane, 2)?; - self.pop_operand(offset, Some(ValType::I64))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_i64x2_replace_lane(&mut self, lane: u8) -> Self::Output { + self.check_simd_lane_index(lane, 2)?; + self.pop_operand(Some(ValType::I64))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_f32x4_extract_lane(&mut self, offset: usize, lane: u8) -> Self::Output { - self.check_non_deterministic_enabled(offset)?; - self.check_simd_lane_index(offset, lane, 4)?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_f32x4_extract_lane(&mut self, lane: u8) -> Self::Output { + self.check_non_deterministic_enabled()?; + self.check_simd_lane_index(lane, 4)?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::F32)?; Ok(()) } - fn visit_f32x4_replace_lane(&mut self, offset: usize, lane: u8) -> Self::Output { - self.check_non_deterministic_enabled(offset)?; - self.check_simd_lane_index(offset, lane, 4)?; - self.pop_operand(offset, Some(ValType::F32))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_f32x4_replace_lane(&mut self, lane: u8) -> Self::Output { + self.check_non_deterministic_enabled()?; + self.check_simd_lane_index(lane, 4)?; + self.pop_operand(Some(ValType::F32))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_f64x2_extract_lane(&mut self, offset: usize, lane: u8) -> Self::Output { - self.check_non_deterministic_enabled(offset)?; - self.check_simd_lane_index(offset, lane, 2)?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_f64x2_extract_lane(&mut self, lane: u8) -> Self::Output { + self.check_non_deterministic_enabled()?; + self.check_simd_lane_index(lane, 2)?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::F64)?; Ok(()) } - fn visit_f64x2_replace_lane(&mut self, offset: usize, lane: u8) -> Self::Output { - self.check_non_deterministic_enabled(offset)?; - self.check_simd_lane_index(offset, lane, 2)?; - self.pop_operand(offset, Some(ValType::F64))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_f64x2_replace_lane(&mut self, lane: u8) -> Self::Output { + self.check_non_deterministic_enabled()?; + self.check_simd_lane_index(lane, 2)?; + self.pop_operand(Some(ValType::F64))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_f32x4_eq(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_eq(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_ne(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_ne(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_lt(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_lt(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_gt(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_gt(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_le(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_le(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_ge(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_ge(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_eq(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_eq(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_ne(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_ne(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_lt(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_lt(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_gt(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_gt(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_le(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_le(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_ge(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_ge(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_add(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_add(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_sub(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_sub(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_mul(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_mul(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_div(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_div(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_min(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_min(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_max(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_max(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_pmin(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_pmin(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_pmax(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f32x4_pmax(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_add(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_add(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_sub(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_sub(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_mul(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_mul(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_div(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_div(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_min(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_min(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_max(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_max(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_pmin(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_pmin(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f64x2_pmax(&mut self, offset: usize) -> Self::Output { - self.check_v128_fbinary_op(offset) + fn visit_f64x2_pmax(&mut self) -> Self::Output { + self.check_v128_fbinary_op() } - fn visit_f32x4_relaxed_min(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_binary_op(offset) + fn visit_f32x4_relaxed_min(&mut self) -> Self::Output { + self.check_v128_relaxed_binary_op() } - fn visit_f32x4_relaxed_max(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_binary_op(offset) + fn visit_f32x4_relaxed_max(&mut self) -> Self::Output { + self.check_v128_relaxed_binary_op() } - fn visit_f64x2_relaxed_min(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_binary_op(offset) + fn visit_f64x2_relaxed_min(&mut self) -> Self::Output { + self.check_v128_relaxed_binary_op() } - fn visit_f64x2_relaxed_max(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_binary_op(offset) + fn visit_f64x2_relaxed_max(&mut self) -> Self::Output { + self.check_v128_relaxed_binary_op() } - fn visit_i16x8_relaxed_q15mulr_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_binary_op(offset) + fn visit_i16x8_relaxed_q15mulr_s(&mut self) -> Self::Output { + self.check_v128_relaxed_binary_op() } - fn visit_i16x8_dot_i8x16_i7x16_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_binary_op(offset) + fn visit_i16x8_dot_i8x16_i7x16_s(&mut self) -> Self::Output { + self.check_v128_relaxed_binary_op() } - fn visit_i8x16_eq(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_eq(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_ne(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_ne(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_lt_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_lt_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_lt_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_lt_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_gt_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_gt_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_gt_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_gt_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_le_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_le_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_le_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_le_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_ge_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_ge_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_ge_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_ge_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_eq(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_eq(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_ne(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_ne(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_lt_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_lt_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_lt_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_lt_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_gt_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_gt_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_gt_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_gt_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_le_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_le_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_le_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_le_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_ge_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_ge_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_ge_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_ge_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_eq(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_eq(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_ne(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_ne(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_lt_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_lt_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_lt_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_lt_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_gt_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_gt_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_gt_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_gt_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_le_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_le_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_le_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_le_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_ge_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_ge_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_ge_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_ge_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_eq(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_eq(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_ne(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_ne(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_lt_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_lt_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_gt_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_gt_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_le_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_le_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_ge_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_ge_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_v128_and(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_v128_and(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_v128_andnot(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_v128_andnot(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_v128_or(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_v128_or(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_v128_xor(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_v128_xor(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_add(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_add(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_add_sat_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_add_sat_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_add_sat_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_add_sat_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_sub(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_sub(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_sub_sat_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_sub_sat_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_sub_sat_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_sub_sat_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_min_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_min_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_min_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_min_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_max_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_max_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_max_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_max_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_add(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_add(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_add_sat_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_add_sat_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_add_sat_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_add_sat_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_sub(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_sub(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_sub_sat_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_sub_sat_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_sub_sat_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_sub_sat_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_mul(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_mul(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_min_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_min_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_min_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_min_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_max_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_max_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_max_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_max_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_add(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_add(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_sub(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_sub(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_mul(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_mul(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_min_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_min_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_min_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_min_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_max_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_max_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_max_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_max_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_dot_i16x8_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_dot_i16x8_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_add(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_add(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_sub(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_sub(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_mul(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_mul(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_avgr_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_avgr_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_avgr_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_avgr_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_narrow_i16x8_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_narrow_i16x8_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i8x16_narrow_i16x8_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i8x16_narrow_i16x8_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_narrow_i32x4_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_narrow_i32x4_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_narrow_i32x4_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_narrow_i32x4_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_extmul_low_i8x16_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_extmul_low_i8x16_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_extmul_high_i8x16_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_extmul_high_i8x16_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_extmul_low_i8x16_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_extmul_low_i8x16_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_extmul_high_i8x16_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_extmul_high_i8x16_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_extmul_low_i16x8_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_extmul_low_i16x8_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_extmul_high_i16x8_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_extmul_high_i16x8_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_extmul_low_i16x8_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_extmul_low_i16x8_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i32x4_extmul_high_i16x8_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i32x4_extmul_high_i16x8_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_extmul_low_i32x4_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_extmul_low_i32x4_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_extmul_high_i32x4_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_extmul_high_i32x4_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_extmul_low_i32x4_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_extmul_low_i32x4_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i64x2_extmul_high_i32x4_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i64x2_extmul_high_i32x4_u(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_i16x8_q15mulr_sat_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_binary_op(offset) + fn visit_i16x8_q15mulr_sat_s(&mut self) -> Self::Output { + self.check_v128_binary_op() } - fn visit_f32x4_ceil(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f32x4_ceil(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f32x4_floor(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f32x4_floor(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f32x4_trunc(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f32x4_trunc(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f32x4_nearest(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f32x4_nearest(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f64x2_ceil(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f64x2_ceil(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f64x2_floor(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f64x2_floor(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f64x2_trunc(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f64x2_trunc(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f64x2_nearest(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f64x2_nearest(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f32x4_abs(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f32x4_abs(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f32x4_neg(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f32x4_neg(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f32x4_sqrt(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f32x4_sqrt(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f64x2_abs(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f64x2_abs(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f64x2_neg(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f64x2_neg(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f64x2_sqrt(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f64x2_sqrt(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f32x4_demote_f64x2_zero(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f32x4_demote_f64x2_zero(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f64x2_promote_low_f32x4(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f64x2_promote_low_f32x4(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f64x2_convert_low_i32x4_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f64x2_convert_low_i32x4_s(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f64x2_convert_low_i32x4_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f64x2_convert_low_i32x4_u(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_i32x4_trunc_sat_f32x4_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_i32x4_trunc_sat_f32x4_s(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_i32x4_trunc_sat_f32x4_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_i32x4_trunc_sat_f32x4_u(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_i32x4_trunc_sat_f64x2_s_zero(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_i32x4_trunc_sat_f64x2_s_zero(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_i32x4_trunc_sat_f64x2_u_zero(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_i32x4_trunc_sat_f64x2_u_zero(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f32x4_convert_i32x4_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f32x4_convert_i32x4_s(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_f32x4_convert_i32x4_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_funary_op(offset) + fn visit_f32x4_convert_i32x4_u(&mut self) -> Self::Output { + self.check_v128_funary_op() } - fn visit_v128_not(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_v128_not(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i8x16_abs(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i8x16_abs(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i8x16_neg(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i8x16_neg(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i8x16_popcnt(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i8x16_popcnt(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i16x8_abs(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i16x8_abs(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i16x8_neg(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i16x8_neg(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i32x4_abs(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i32x4_abs(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i32x4_neg(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i32x4_neg(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i64x2_abs(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i64x2_abs(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i64x2_neg(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i64x2_neg(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i16x8_extend_low_i8x16_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i16x8_extend_low_i8x16_s(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i16x8_extend_high_i8x16_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i16x8_extend_high_i8x16_s(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i16x8_extend_low_i8x16_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i16x8_extend_low_i8x16_u(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i16x8_extend_high_i8x16_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i16x8_extend_high_i8x16_u(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i32x4_extend_low_i16x8_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i32x4_extend_low_i16x8_s(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i32x4_extend_high_i16x8_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i32x4_extend_high_i16x8_s(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i32x4_extend_low_i16x8_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i32x4_extend_low_i16x8_u(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i32x4_extend_high_i16x8_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i32x4_extend_high_i16x8_u(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i64x2_extend_low_i32x4_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i64x2_extend_low_i32x4_s(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i64x2_extend_high_i32x4_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i64x2_extend_high_i32x4_s(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i64x2_extend_low_i32x4_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i64x2_extend_low_i32x4_u(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i64x2_extend_high_i32x4_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i64x2_extend_high_i32x4_u(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i16x8_extadd_pairwise_i8x16_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i16x8_extadd_pairwise_i8x16_s(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i16x8_extadd_pairwise_i8x16_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i16x8_extadd_pairwise_i8x16_u(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i32x4_extadd_pairwise_i16x8_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i32x4_extadd_pairwise_i16x8_s(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i32x4_extadd_pairwise_i16x8_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_unary_op(offset) + fn visit_i32x4_extadd_pairwise_i16x8_u(&mut self) -> Self::Output { + self.check_v128_unary_op() } - fn visit_i32x4_relaxed_trunc_sat_f32x4_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_unary_op(offset) + fn visit_i32x4_relaxed_trunc_sat_f32x4_s(&mut self) -> Self::Output { + self.check_v128_relaxed_unary_op() } - fn visit_i32x4_relaxed_trunc_sat_f32x4_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_unary_op(offset) + fn visit_i32x4_relaxed_trunc_sat_f32x4_u(&mut self) -> Self::Output { + self.check_v128_relaxed_unary_op() } - fn visit_i32x4_relaxed_trunc_sat_f64x2_s_zero(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_unary_op(offset) + fn visit_i32x4_relaxed_trunc_sat_f64x2_s_zero(&mut self) -> Self::Output { + self.check_v128_relaxed_unary_op() } - fn visit_i32x4_relaxed_trunc_sat_f64x2_u_zero(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_unary_op(offset) + fn visit_i32x4_relaxed_trunc_sat_f64x2_u_zero(&mut self) -> Self::Output { + self.check_v128_relaxed_unary_op() } - fn visit_v128_bitselect(&mut self, offset: usize) -> Self::Output { - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_v128_bitselect(&mut self) -> Self::Output { + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_f32x4_relaxed_fma(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_ternary_op(offset) + fn visit_f32x4_relaxed_fma(&mut self) -> Self::Output { + self.check_v128_relaxed_ternary_op() } - fn visit_f32x4_relaxed_fnma(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_ternary_op(offset) + fn visit_f32x4_relaxed_fnma(&mut self) -> Self::Output { + self.check_v128_relaxed_ternary_op() } - fn visit_f64x2_relaxed_fma(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_ternary_op(offset) + fn visit_f64x2_relaxed_fma(&mut self) -> Self::Output { + self.check_v128_relaxed_ternary_op() } - fn visit_f64x2_relaxed_fnma(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_ternary_op(offset) + fn visit_f64x2_relaxed_fnma(&mut self) -> Self::Output { + self.check_v128_relaxed_ternary_op() } - fn visit_i8x16_relaxed_laneselect(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_ternary_op(offset) + fn visit_i8x16_relaxed_laneselect(&mut self) -> Self::Output { + self.check_v128_relaxed_ternary_op() } - fn visit_i16x8_relaxed_laneselect(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_ternary_op(offset) + fn visit_i16x8_relaxed_laneselect(&mut self) -> Self::Output { + self.check_v128_relaxed_ternary_op() } - fn visit_i32x4_relaxed_laneselect(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_ternary_op(offset) + fn visit_i32x4_relaxed_laneselect(&mut self) -> Self::Output { + self.check_v128_relaxed_ternary_op() } - fn visit_i64x2_relaxed_laneselect(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_ternary_op(offset) + fn visit_i64x2_relaxed_laneselect(&mut self) -> Self::Output { + self.check_v128_relaxed_ternary_op() } - fn visit_i32x4_dot_i8x16_i7x16_add_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_ternary_op(offset) + fn visit_i32x4_dot_i8x16_i7x16_add_s(&mut self) -> Self::Output { + self.check_v128_relaxed_ternary_op() } - fn visit_f32x4_relaxed_dot_bf16x8_add_f32x4(&mut self, offset: usize) -> Self::Output { - self.check_v128_relaxed_ternary_op(offset) + fn visit_f32x4_relaxed_dot_bf16x8_add_f32x4(&mut self) -> Self::Output { + self.check_v128_relaxed_ternary_op() } - fn visit_v128_any_true(&mut self, offset: usize) -> Self::Output { - self.check_v128_bitmask_op(offset) + fn visit_v128_any_true(&mut self) -> Self::Output { + self.check_v128_bitmask_op() } - fn visit_i8x16_all_true(&mut self, offset: usize) -> Self::Output { - self.check_v128_bitmask_op(offset) + fn visit_i8x16_all_true(&mut self) -> Self::Output { + self.check_v128_bitmask_op() } - fn visit_i8x16_bitmask(&mut self, offset: usize) -> Self::Output { - self.check_v128_bitmask_op(offset) + fn visit_i8x16_bitmask(&mut self) -> Self::Output { + self.check_v128_bitmask_op() } - fn visit_i16x8_all_true(&mut self, offset: usize) -> Self::Output { - self.check_v128_bitmask_op(offset) + fn visit_i16x8_all_true(&mut self) -> Self::Output { + self.check_v128_bitmask_op() } - fn visit_i16x8_bitmask(&mut self, offset: usize) -> Self::Output { - self.check_v128_bitmask_op(offset) + fn visit_i16x8_bitmask(&mut self) -> Self::Output { + self.check_v128_bitmask_op() } - fn visit_i32x4_all_true(&mut self, offset: usize) -> Self::Output { - self.check_v128_bitmask_op(offset) + fn visit_i32x4_all_true(&mut self) -> Self::Output { + self.check_v128_bitmask_op() } - fn visit_i32x4_bitmask(&mut self, offset: usize) -> Self::Output { - self.check_v128_bitmask_op(offset) + fn visit_i32x4_bitmask(&mut self) -> Self::Output { + self.check_v128_bitmask_op() } - fn visit_i64x2_all_true(&mut self, offset: usize) -> Self::Output { - self.check_v128_bitmask_op(offset) + fn visit_i64x2_all_true(&mut self) -> Self::Output { + self.check_v128_bitmask_op() } - fn visit_i64x2_bitmask(&mut self, offset: usize) -> Self::Output { - self.check_v128_bitmask_op(offset) + fn visit_i64x2_bitmask(&mut self) -> Self::Output { + self.check_v128_bitmask_op() } - fn visit_i8x16_shl(&mut self, offset: usize) -> Self::Output { - self.check_v128_shift_op(offset) + fn visit_i8x16_shl(&mut self) -> Self::Output { + self.check_v128_shift_op() } - fn visit_i8x16_shr_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_shift_op(offset) + fn visit_i8x16_shr_s(&mut self) -> Self::Output { + self.check_v128_shift_op() } - fn visit_i8x16_shr_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_shift_op(offset) + fn visit_i8x16_shr_u(&mut self) -> Self::Output { + self.check_v128_shift_op() } - fn visit_i16x8_shl(&mut self, offset: usize) -> Self::Output { - self.check_v128_shift_op(offset) + fn visit_i16x8_shl(&mut self) -> Self::Output { + self.check_v128_shift_op() } - fn visit_i16x8_shr_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_shift_op(offset) + fn visit_i16x8_shr_s(&mut self) -> Self::Output { + self.check_v128_shift_op() } - fn visit_i16x8_shr_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_shift_op(offset) + fn visit_i16x8_shr_u(&mut self) -> Self::Output { + self.check_v128_shift_op() } - fn visit_i32x4_shl(&mut self, offset: usize) -> Self::Output { - self.check_v128_shift_op(offset) + fn visit_i32x4_shl(&mut self) -> Self::Output { + self.check_v128_shift_op() } - fn visit_i32x4_shr_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_shift_op(offset) + fn visit_i32x4_shr_s(&mut self) -> Self::Output { + self.check_v128_shift_op() } - fn visit_i32x4_shr_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_shift_op(offset) + fn visit_i32x4_shr_u(&mut self) -> Self::Output { + self.check_v128_shift_op() } - fn visit_i64x2_shl(&mut self, offset: usize) -> Self::Output { - self.check_v128_shift_op(offset) + fn visit_i64x2_shl(&mut self) -> Self::Output { + self.check_v128_shift_op() } - fn visit_i64x2_shr_s(&mut self, offset: usize) -> Self::Output { - self.check_v128_shift_op(offset) + fn visit_i64x2_shr_s(&mut self) -> Self::Output { + self.check_v128_shift_op() } - fn visit_i64x2_shr_u(&mut self, offset: usize) -> Self::Output { - self.check_v128_shift_op(offset) + fn visit_i64x2_shr_u(&mut self) -> Self::Output { + self.check_v128_shift_op() } - fn visit_i8x16_swizzle(&mut self, offset: usize) -> Self::Output { - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_i8x16_swizzle(&mut self) -> Self::Output { + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_i8x16_relaxed_swizzle(&mut self, offset: usize) -> Self::Output { - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_i8x16_relaxed_swizzle(&mut self) -> Self::Output { + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(ValType::V128))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_i8x16_shuffle(&mut self, offset: usize, lanes: [u8; 16]) -> Self::Output { - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(ValType::V128))?; + fn visit_i8x16_shuffle(&mut self, lanes: [u8; 16]) -> Self::Output { + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(ValType::V128))?; for i in lanes { - self.check_simd_lane_index(offset, i, 32)?; + self.check_simd_lane_index(i, 32)?; } self.push_operand(ValType::V128)?; Ok(()) } - fn visit_v128_load8_splat(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_v128_load8_splat(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_v128_load16_splat(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_v128_load16_splat(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_v128_load32_splat(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - let ty = self.check_memarg(memarg, offset)?; - self.pop_operand(offset, Some(ty))?; + fn visit_v128_load32_splat(&mut self, memarg: MemArg) -> Self::Output { + let ty = self.check_memarg(memarg)?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_v128_load32_zero(&mut self, input: usize, memarg: MemArg) -> Self::Output { - self.visit_v128_load32_splat(input, memarg) + fn visit_v128_load32_zero(&mut self, memarg: MemArg) -> Self::Output { + self.visit_v128_load32_splat(memarg) } - fn visit_v128_load64_splat(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_v128_load_op(offset, memarg) + fn visit_v128_load64_splat(&mut self, memarg: MemArg) -> Self::Output { + self.check_v128_load_op(memarg) } - fn visit_v128_load64_zero(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_v128_load_op(offset, memarg) + fn visit_v128_load64_zero(&mut self, memarg: MemArg) -> Self::Output { + self.check_v128_load_op(memarg) } - fn visit_v128_load8x8_s(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_v128_load_op(offset, memarg) + fn visit_v128_load8x8_s(&mut self, memarg: MemArg) -> Self::Output { + self.check_v128_load_op(memarg) } - fn visit_v128_load8x8_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_v128_load_op(offset, memarg) + fn visit_v128_load8x8_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_v128_load_op(memarg) } - fn visit_v128_load16x4_s(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_v128_load_op(offset, memarg) + fn visit_v128_load16x4_s(&mut self, memarg: MemArg) -> Self::Output { + self.check_v128_load_op(memarg) } - fn visit_v128_load16x4_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_v128_load_op(offset, memarg) + fn visit_v128_load16x4_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_v128_load_op(memarg) } - fn visit_v128_load32x2_s(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_v128_load_op(offset, memarg) + fn visit_v128_load32x2_s(&mut self, memarg: MemArg) -> Self::Output { + self.check_v128_load_op(memarg) } - fn visit_v128_load32x2_u(&mut self, offset: usize, memarg: MemArg) -> Self::Output { - self.check_v128_load_op(offset, memarg) + fn visit_v128_load32x2_u(&mut self, memarg: MemArg) -> Self::Output { + self.check_v128_load_op(memarg) } - fn visit_v128_load8_lane(&mut self, offset: usize, memarg: MemArg, lane: u8) -> Self::Output { - let idx = self.check_memarg(memarg, offset)?; - self.check_simd_lane_index(offset, lane, 16)?; - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(idx))?; + fn visit_v128_load8_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + let idx = self.check_memarg(memarg)?; + self.check_simd_lane_index(lane, 16)?; + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(idx))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_v128_load16_lane(&mut self, offset: usize, memarg: MemArg, lane: u8) -> Self::Output { - let idx = self.check_memarg(memarg, offset)?; - self.check_simd_lane_index(offset, lane, 8)?; - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(idx))?; + fn visit_v128_load16_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + let idx = self.check_memarg(memarg)?; + self.check_simd_lane_index(lane, 8)?; + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(idx))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_v128_load32_lane(&mut self, offset: usize, memarg: MemArg, lane: u8) -> Self::Output { - let idx = self.check_memarg(memarg, offset)?; - self.check_simd_lane_index(offset, lane, 4)?; - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(idx))?; + fn visit_v128_load32_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + let idx = self.check_memarg(memarg)?; + self.check_simd_lane_index(lane, 4)?; + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(idx))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_v128_load64_lane(&mut self, offset: usize, memarg: MemArg, lane: u8) -> Self::Output { - let idx = self.check_memarg(memarg, offset)?; - self.check_simd_lane_index(offset, lane, 2)?; - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(idx))?; + fn visit_v128_load64_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + let idx = self.check_memarg(memarg)?; + self.check_simd_lane_index(lane, 2)?; + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(idx))?; self.push_operand(ValType::V128)?; Ok(()) } - fn visit_v128_store8_lane(&mut self, offset: usize, memarg: MemArg, lane: u8) -> Self::Output { - let idx = self.check_memarg(memarg, offset)?; - self.check_simd_lane_index(offset, lane, 16)?; - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(idx))?; + fn visit_v128_store8_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + let idx = self.check_memarg(memarg)?; + self.check_simd_lane_index(lane, 16)?; + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(idx))?; Ok(()) } - fn visit_v128_store16_lane(&mut self, offset: usize, memarg: MemArg, lane: u8) -> Self::Output { - let idx = self.check_memarg(memarg, offset)?; - self.check_simd_lane_index(offset, lane, 8)?; - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(idx))?; + fn visit_v128_store16_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + let idx = self.check_memarg(memarg)?; + self.check_simd_lane_index(lane, 8)?; + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(idx))?; Ok(()) } - fn visit_v128_store32_lane(&mut self, offset: usize, memarg: MemArg, lane: u8) -> Self::Output { - let idx = self.check_memarg(memarg, offset)?; - self.check_simd_lane_index(offset, lane, 4)?; - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(idx))?; + fn visit_v128_store32_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + let idx = self.check_memarg(memarg)?; + self.check_simd_lane_index(lane, 4)?; + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(idx))?; Ok(()) } - fn visit_v128_store64_lane(&mut self, offset: usize, memarg: MemArg, lane: u8) -> Self::Output { - let idx = self.check_memarg(memarg, offset)?; - self.check_simd_lane_index(offset, lane, 2)?; - self.pop_operand(offset, Some(ValType::V128))?; - self.pop_operand(offset, Some(idx))?; + fn visit_v128_store64_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output { + let idx = self.check_memarg(memarg)?; + self.check_simd_lane_index(lane, 2)?; + self.pop_operand(Some(ValType::V128))?; + self.pop_operand(Some(idx))?; Ok(()) } - fn visit_memory_init(&mut self, offset: usize, segment: u32, mem: u32) -> Self::Output { - let ty = self.check_memory_index(offset, mem)?; + fn visit_memory_init(&mut self, segment: u32, mem: u32) -> Self::Output { + let ty = self.check_memory_index(mem)?; match self.resources.data_count() { - None => bail!(offset, "data count section required"), + None => bail!(self.offset, "data count section required"), Some(count) if segment < count => {} - Some(_) => bail!(offset, "unknown data segment {}", segment), + Some(_) => bail!(self.offset, "unknown data segment {}", segment), } - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ty))?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_data_drop(&mut self, offset: usize, segment: u32) -> Self::Output { + fn visit_data_drop(&mut self, segment: u32) -> Self::Output { match self.resources.data_count() { - None => bail!(offset, "data count section required"), + None => bail!(self.offset, "data count section required"), Some(count) if segment < count => {} - Some(_) => bail!(offset, "unknown data segment {}", segment), + Some(_) => bail!(self.offset, "unknown data segment {}", segment), } Ok(()) } - fn visit_memory_copy(&mut self, offset: usize, dst: u32, src: u32) -> Self::Output { - let dst_ty = self.check_memory_index(offset, dst)?; - let src_ty = self.check_memory_index(offset, src)?; + fn visit_memory_copy(&mut self, dst: u32, src: u32) -> Self::Output { + let dst_ty = self.check_memory_index(dst)?; + let src_ty = self.check_memory_index(src)?; // The length operand here is the smaller of src/dst, which is // i32 if one is i32 - self.pop_operand( - offset, - Some(match src_ty { - ValType::I32 => ValType::I32, - _ => dst_ty, - }), - )?; + self.pop_operand(Some(match src_ty { + ValType::I32 => ValType::I32, + _ => dst_ty, + }))?; // ... and the offset into each memory is required to be // whatever the indexing type is for that memory - self.pop_operand(offset, Some(src_ty))?; - self.pop_operand(offset, Some(dst_ty))?; + self.pop_operand(Some(src_ty))?; + self.pop_operand(Some(dst_ty))?; Ok(()) } - fn visit_memory_fill(&mut self, offset: usize, mem: u32) -> Self::Output { - let ty = self.check_memory_index(offset, mem)?; - self.pop_operand(offset, Some(ty))?; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ty))?; + fn visit_memory_fill(&mut self, mem: u32) -> Self::Output { + let ty = self.check_memory_index(mem)?; + self.pop_operand(Some(ty))?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ty))?; Ok(()) } - fn visit_table_init(&mut self, offset: usize, segment: u32, table: u32) -> Self::Output { + fn visit_table_init(&mut self, segment: u32, table: u32) -> Self::Output { if table > 0 {} let table = match self.resources.table_at(table) { Some(table) => table, - None => bail!(offset, "unknown table {}: table index out of bounds", table), + None => bail!( + self.offset, + "unknown table {}: table index out of bounds", + table + ), }; let segment_ty = match self.resources.element_type_at(segment) { Some(ty) => ty, None => bail!( - offset, + self.offset, "unknown elem segment {}: segment index out of bounds", segment ), }; if segment_ty != table.element_type { - bail!(offset, "type mismatch"); + bail!(self.offset, "type mismatch"); } - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ValType::I32))?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ValType::I32))?; Ok(()) } - fn visit_elem_drop(&mut self, offset: usize, segment: u32) -> Self::Output { + fn visit_elem_drop(&mut self, segment: u32) -> Self::Output { if segment >= self.resources.element_count() { bail!( - offset, + self.offset, "unknown elem segment {}: segment index out of bounds", segment ); } Ok(()) } - fn visit_table_copy(&mut self, offset: usize, dst_table: u32, src_table: u32) -> Self::Output { + fn visit_table_copy(&mut self, dst_table: u32, src_table: u32) -> Self::Output { if src_table > 0 || dst_table > 0 {} let (src, dst) = match ( self.resources.table_at(src_table), self.resources.table_at(dst_table), ) { (Some(a), Some(b)) => (a, b), - _ => bail!(offset, "table index out of bounds"), + _ => bail!(self.offset, "table index out of bounds"), }; if src.element_type != dst.element_type { - bail!(offset, "type mismatch"); + bail!(self.offset, "type mismatch"); } - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ValType::I32))?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ValType::I32))?; Ok(()) } - fn visit_table_get(&mut self, offset: usize, table: u32) -> Self::Output { + fn visit_table_get(&mut self, table: u32) -> Self::Output { let ty = match self.resources.table_at(table) { Some(ty) => ty.element_type, - None => bail!(offset, "table index out of bounds"), + None => bail!(self.offset, "table index out of bounds"), }; - self.pop_operand(offset, Some(ValType::I32))?; + self.pop_operand(Some(ValType::I32))?; self.push_operand(ty)?; Ok(()) } - fn visit_table_set(&mut self, offset: usize, table: u32) -> Self::Output { + fn visit_table_set(&mut self, table: u32) -> Self::Output { let ty = match self.resources.table_at(table) { Some(ty) => ty.element_type, - None => bail!(offset, "table index out of bounds"), + None => bail!(self.offset, "table index out of bounds"), }; - self.pop_operand(offset, Some(ty))?; - self.pop_operand(offset, Some(ValType::I32))?; + self.pop_operand(Some(ty))?; + self.pop_operand(Some(ValType::I32))?; Ok(()) } - fn visit_table_grow(&mut self, offset: usize, table: u32) -> Self::Output { + fn visit_table_grow(&mut self, table: u32) -> Self::Output { let ty = match self.resources.table_at(table) { Some(ty) => ty.element_type, - None => bail!(offset, "table index out of bounds"), + None => bail!(self.offset, "table index out of bounds"), }; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ty))?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ty))?; self.push_operand(ValType::I32)?; Ok(()) } - fn visit_table_size(&mut self, offset: usize, table: u32) -> Self::Output { + fn visit_table_size(&mut self, table: u32) -> Self::Output { if self.resources.table_at(table).is_none() { - bail!(offset, "table index out of bounds"); + bail!(self.offset, "table index out of bounds"); } self.push_operand(ValType::I32)?; Ok(()) } - fn visit_table_fill(&mut self, offset: usize, table: u32) -> Self::Output { + fn visit_table_fill(&mut self, table: u32) -> Self::Output { let ty = match self.resources.table_at(table) { Some(ty) => ty.element_type, - None => bail!(offset, "table index out of bounds"), + None => bail!(self.offset, "table index out of bounds"), }; - self.pop_operand(offset, Some(ValType::I32))?; - self.pop_operand(offset, Some(ty))?; - self.pop_operand(offset, Some(ValType::I32))?; + self.pop_operand(Some(ValType::I32))?; + self.pop_operand(Some(ty))?; + self.pop_operand(Some(ValType::I32))?; Ok(()) } } diff --git a/crates/wasmprinter/src/lib.rs b/crates/wasmprinter/src/lib.rs index 404d8b8f36..3a9d4b6c89 100644 --- a/crates/wasmprinter/src/lib.rs +++ b/crates/wasmprinter/src/lib.rs @@ -1141,7 +1141,7 @@ impl Printer { if reader.eof() { break; } - match reader.visit_with_offset(&mut op_printer)?? { + match reader.visit_operator(&mut op_printer)?? { operator::OpKind::End if reader.eof() => {} _ if i == 0 => first_op = Some(mem::take(&mut op_printer.printer.result)), @@ -1184,7 +1184,7 @@ impl Printer { } else { op_printer.printer.result.push(' '); } - match reader.visit_with_offset(&mut op_printer)?? { + match reader.visit_operator(&mut op_printer)?? { operator::OpKind::End if reader.eof() => {} _ => { result.push_str(&op_printer.printer.result); diff --git a/crates/wasmprinter/src/operator.rs b/crates/wasmprinter/src/operator.rs index d7596b5b4c..c87ccf212e 100644 --- a/crates/wasmprinter/src/operator.rs +++ b/crates/wasmprinter/src/operator.rs @@ -183,7 +183,7 @@ macro_rules! define_visit { // * Print any payload, as necessary // * Return the `OpKind`, as defined by this macro ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident )*) => ($( - fn $visit(&mut self, _pos: usize $( , $($arg: $argty),* )?) -> Self::Output { + fn $visit(&mut self $( , $($arg: $argty),* )?) -> Self::Output { self.push_str(define_visit!(name $op)); $( define_visit!(payload self $op $($arg)*); diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml index 29c3b67099..58f6d426a3 100644 --- a/fuzz/Cargo.toml +++ b/fuzz/Cargo.toml @@ -30,35 +30,42 @@ doctest = false name = "text-parser" path = "fuzz_targets/text-parser.rs" test = false +bench = false [[bin]] name = "validate" path = "fuzz_targets/validate.rs" test = false +bench = false [[bin]] name = "print" path = "fuzz_targets/print.rs" test = false +bench = false [[bin]] name = "roundtrip" path = "fuzz_targets/roundtrip.rs" test = false +bench = false [[bin]] name = "incremental-parse" path = "fuzz_targets/incremental-parse.rs" test = false +bench = false [[bin]] name = "validate-valid-module" path = "fuzz_targets/validate-valid-module.rs" test = false doc = false +bench = false [[bin]] name = "mutate" path = "fuzz_targets/mutate.rs" test = false doc = false +bench = false