wazevo: fuzz, fix load_splat (#1810)

Signed-off-by: Edoardo Vacchi <evacchi@users.noreply.github.com>
Co-authored-by: Takeshi Yoneda <t.y.mathetake@gmail.com>
This commit is contained in:
Edoardo Vacchi
2023-10-23 09:17:37 +02:00
committed by GitHub
parent 95a240370c
commit 1748dbcfe8
6 changed files with 159 additions and 17 deletions

View File

@@ -692,23 +692,8 @@ func (m *machine) LowerInstr(instr *ssa.Instruction) {
m.insert(dup)
case ssa.OpcodeLoadSplat:
x, offset, lane := instr.LoadSplatData()
rd := operandNR(m.compiler.VRegOf(instr.Return()))
arr := ssaLaneToArrangement(lane)
rn := m.getOperand_NR(m.compiler.ValueDefinition(x), extModeNone)
tmpReg := m.compiler.AllocateVReg(ssa.TypeI64)
// vecLoad1R has offset address mode (base+imm) for post index, so the only addressing mode
// we can use here is "no-offset" register addressing mode. Thus, we need to add the const offset to the base address.
add := m.allocateInstr()
add.asALU(aluOpAdd, operandNR(tmpReg), rn, operandImm12(uint16(offset), 0), true)
m.insert(add)
ld1r := m.allocateInstr()
ld1r.asVecLoad1R(rd, operandNR(tmpReg), arr)
m.insert(ld1r)
ptr, offset, lane := instr.LoadSplatData()
m.lowerLoadSplat(ptr, offset, lane, instr.Return())
default:
panic("TODO: lowering " + op.String())
}

View File

@@ -231,6 +231,49 @@ func (m *machine) lowerLoad(ptr ssa.Value, offset uint32, typ ssa.Type, ret ssa.
m.insert(load)
}
func (m *machine) lowerLoadSplat(ptr ssa.Value, offset uint32, lane ssa.VecLane, ret ssa.Value) {
var opSize byte
switch lane {
case ssa.VecLaneI8x16:
opSize = 8
case ssa.VecLaneI16x8:
opSize = 16
case ssa.VecLaneI32x4:
opSize = 32
case ssa.VecLaneI64x2:
opSize = 64
}
amode := m.lowerToAddressMode(ptr, offset, opSize)
rd := operandNR(m.compiler.VRegOf(ret))
m.lowerLoadSplatFromAddressMode(rd, amode, lane)
}
// lowerLoadSplatFromAddressMode is extracted from lowerLoadSplat for testing.
func (m *machine) lowerLoadSplatFromAddressMode(rd operand, amode addressMode, lane ssa.VecLane) {
tmpReg := operandNR(m.compiler.AllocateVReg(ssa.TypeI64))
// vecLoad1R has offset address mode (base+imm) only for post index, so the only addressing mode
// we can use here is "no-offset" register addressing mode, i.e. `addressModeKindRegReg`.
switch amode.kind {
case addressModeKindRegReg:
add := m.allocateInstr()
add.asALU(aluOpAdd, tmpReg, operandNR(amode.rn), operandNR(amode.rm), true)
m.insert(add)
case addressModeKindRegSignedImm9, addressModeKindRegUnsignedImm12:
add := m.allocateInstr()
add.asALU(aluOpAdd, tmpReg, operandNR(amode.rn), operandImm12(uint16(amode.imm), 0), true)
m.insert(add)
default:
panic("unsupported address mode for LoadSplat")
}
arr := ssaLaneToArrangement(lane)
ld1r := m.allocateInstr()
ld1r.asVecLoad1R(rd, tmpReg, arr)
m.insert(ld1r)
}
func (m *machine) lowerStore(si *ssa.Instruction) {
// TODO: merge consecutive stores into a single pair store instruction.
value, ptr, offset, storeSizeInBits := si.StoreData()

View File

@@ -3,6 +3,7 @@ package arm64
import (
"fmt"
"math"
"strconv"
"strings"
"testing"
@@ -847,3 +848,72 @@ func Test_extLoadSizeSign(t *testing.T) {
require.Equal(t, tc.signed, signed)
}
}
func Test_lowerLoadSplatFromAddressMode(t *testing.T) {
positiveTests := make(map[addressModeKind]bool)
nextVReg := regalloc.VReg(100).SetRegType(regalloc.RegTypeInt)
for _, tc := range []struct {
amode addressMode
expected string
expectPanic bool
}{
{
amode: addressMode{kind: addressModeKindRegReg, rn: v0VReg, rm: v1VReg},
expected: `
add x100?, d0, d1
ld1r {x10.4s}, [x100?]
`,
},
{
amode: addressMode{kind: addressModeKindRegUnsignedImm12, rn: v0VReg, imm: 42},
expected: `
add x100?, d0, #0x2a
ld1r {x10.4s}, [x100?]
`,
},
{
amode: addressMode{kind: addressModeKindRegSignedImm9, rn: v0VReg, imm: 42},
expected: `
add x100?, d0, #0x2a
ld1r {x10.4s}, [x100?]
`,
},
{
amode: addressMode{kind: addressModeKindRegSignedImm9, rn: v0VReg, imm: 42},
expected: `
add x100?, d0, #0x2a
ld1r {x10.4s}, [x100?]
`,
},
} {
tc := tc
t.Run("address mode "+strconv.Itoa(int(tc.amode.kind)), func(t *testing.T) {
ctx, _, m := newSetupWithMockContext()
ctx.vRegCounter = int(nextVReg.ID()) - 1
positiveTests[tc.amode.kind] = true
m.lowerLoadSplatFromAddressMode(operandNR(x10VReg), tc.amode, ssa.VecLaneI32x4)
require.Equal(t, tc.expected, "\n"+formatEmittedInstructionsInCurrentBlock(m)+"\n")
})
}
// Must panic for all other addressModeKinds.
for k := 0; k <= int(addressModeKindResultStackSpace); k++ {
amk := addressModeKind(k)
if positiveTests[amk] {
continue
}
ctx, _, m := newSetupWithMockContext()
ctx.vRegCounter = int(nextVReg.ID()) - 1
t.Run("address mode "+strconv.Itoa(k), func(t *testing.T) {
err := require.CapturePanic(func() {
m.lowerLoadSplatFromAddressMode(operandNR(x10VReg), addressMode{kind: amk}, ssa.VecLaneI32x4)
})
require.Contains(t, err.Error(), "unsupported address mode for LoadSplat")
})
}
}

View File

@@ -610,6 +610,21 @@ func Test1797d(t *testing.T) {
})
}
// Test1802 tests that load32_splat computes the load from the right offset
// when a nonzero value is on the stack.
func Test1802(t *testing.T) {
if !platform.CompilerSupported() {
return
}
run(t, func(t *testing.T, r wazero.Runtime) {
mod, err := r.Instantiate(ctx, getWasmBinary(t, "1802"))
require.NoError(t, err, "wasm binary should build successfully")
m := mod.(*wasm.ModuleInstance)
_, err = m.ExportedFunction("").Call(ctx)
require.Contains(t, err.Error(), "wasm error: unreachable")
})
}
// Test1812 tests that many constant block params work fine.
func Test1812(t *testing.T) {
if !platform.CompilerSupported() {

Binary file not shown.

View File

@@ -0,0 +1,29 @@
(module
(func (;0;)
(local i32)
loop ;; label = @4
loop ;; label = @5
global.get 0
i32.eqz
if ;; label = @6
unreachable
end
global.get 0
i32.const 1
i32.sub
global.set 0
block ;; label = @6
i32.const 0
v128.load32_splat offset=32768
i32x4.extract_lane 2
i64.load offset=32884
br 2 (;@5;)
end
end
end
)
(memory (;0;) 1 6)
(global (;0;) (mut i32) i32.const 1000)
(export "" (func 0))
(data (;5;) (i32.const 0) "\8b\8b\8b\8b\8b\96")
)