/*
**test_vld2_bf16:
** ...
-** vld2.16 {d0-d1}, \[r0\]
-** bx lr
+** vld[0-9]+.16 {d[0-9]+-d[0-9]+}, \[r[0-9]+\]
+** ...
*/
bfloat16x4x2_t
test_vld2_bf16 (bfloat16_t * ptr)
/*
**test_vld2q_bf16:
** ...
-** vld2.16 {d0-d3}, \[r0\]
-** bx lr
+** vld[0-9]+.16 {d[0-9]+-d[0-9]+}, \[r[0-9]+\]
+** ...
*/
bfloat16x8x2_t
test_vld2q_bf16 (bfloat16_t * ptr)
/*
**test_vld2_dup_bf16:
** ...
-** vld2.16 {d0\[\], d1\[\]}, \[r0\]
-** bx lr
+** vld[0-9]+.16 {d[0-9]+\[\], d[0-9]+\[\]}, \[r[0-9]+\]
+** ...
*/
bfloat16x4x2_t
test_vld2_dup_bf16 (bfloat16_t * ptr)
/*
**test_vld2q_dup_bf16:
** ...
-** vld2.16 {d0, d1, d2, d3}, \[r0\]
-** bx lr
+** vld[0-9]+.16 {d[0-9]+, d[0-9]+, d[0-9]+, d[0-9]+}, \[r[0-9]+\]
+** ...
*/
bfloat16x8x2_t
test_vld2q_dup_bf16 (bfloat16_t * ptr)
/*
**test_vld3_bf16:
** ...
-** vld3.16 {d0-d2}, \[r0\]
-** bx lr
+** vld[0-9]+.16 {d[0-9]+-d[0-9]+}, \[r[0-9]+\]
+** ...
*/
bfloat16x4x3_t
test_vld3_bf16 (bfloat16_t * ptr)
/*
**test_vld3q_bf16:
** ...
-** vld3.16 {d1, d3, d5}, \[r0\]
-** bx lr
+** vld[0-9]+.16 {d[0-9]+, d[0-9]+, d[0-9]+}, \[r[0-9]+\]
+** ...
*/
bfloat16x8x3_t
test_vld3q_bf16 (bfloat16_t * ptr)
/*
**test_vld3_dup_bf16:
** ...
-** vld3.16 {d0\[\], d1\[\], d2\[\]}, \[r0\]
-** bx lr
+** vld[0-9]+.16 {d[0-9]+\[\], d[0-9]+\[\], d[0-9]+\[\]}, \[r[0-9]+\]
+** ...
*/
bfloat16x4x3_t
test_vld3_dup_bf16 (bfloat16_t * ptr)
/*
**test_vld3q_dup_bf16:
** ...
-** vld3.16 {d0\[\], d1\[\], d2\[\]}, \[r0\]
-** bx lr
+** vld[0-9]+.16 {d[0-9]+\[\], d[0-9]+\[\], d[0-9]+\[\]}, \[r[0-9]+\]
+** ...
*/
bfloat16x8x3_t
test_vld3q_dup_bf16 (bfloat16_t * ptr)
/*
**test_vld4_bf16:
** ...
-** vld4.16 {d0-d3}, \[r0\]
-** bx lr
+** vld4.16 {d[0-9]+-d[0-9]+}, \[r[0-9]+\]
+** ...
*/
bfloat16x4x4_t
test_vld4_bf16 (bfloat16_t * ptr)
/*
**test_vld4q_bf16:
** ...
-** vld4.16 {d1, d3, d5, d7}, \[r0\]
-** bx lr
+** vld4.16 {d[0-9]+, d[0-9]+, d[0-9]+, d[0-9]+}, \[r[0-9]+\]
+** ...
*/
bfloat16x8x4_t
test_vld4q_bf16 (bfloat16_t * ptr)
/*
**test_vld4_dup_bf16:
** ...
-** vld4.16 {d0\[\], d1\[\], d2\[\], d3\[\]}, \[r0\]
-** bx lr
+** vld4.16 {d[0-9]+\[\], d[0-9]+\[\], d[0-9]+\[\], d[0-9]+\[\]}, \[r[0-9]+\]
+** ...
*/
bfloat16x4x4_t
test_vld4_dup_bf16 (bfloat16_t * ptr)
/*
**test_vld4q_dup_bf16:
** ...
-** vld4.16 {d0\[\], d1\[\], d2\[\], d3\[\]}, \[r0\]
-** bx lr
+** vld4.16 {d[0-9]+\[\], d[0-9]+\[\], d[0-9]+\[\], d[0-9]+\[\]}, \[r[0-9]+\]
+** ...
*/
bfloat16x8x4_t
test_vld4q_dup_bf16 (bfloat16_t * ptr)
/*
**test_vld2_lane_bf16:
-** vld2.16 {d0\[2\], d1\[2\]}, \[r0\]
-** bx lr
+** ...
+** vld2.16 {d[0-9]+\[2\], d[0-9]+\[2\]}, \[r[0-9]+\]
+** ...
*/
bfloat16x4x2_t
test_vld2_lane_bf16 (const bfloat16_t *a, bfloat16x4x2_t b)
/*
**test_vld2q_lane_bf16:
-** vld2.16 {d0\[2\], d2\[2\]}, \[r0\]
-** bx lr
+** ...
+** vld2.16 {d[0-9]+\[2\], d[0-9]+\[2\]}, \[r[0-9]+\]
+** ...
*/
bfloat16x8x2_t
test_vld2q_lane_bf16 (const bfloat16_t *a, bfloat16x8x2_t b)
/*
**test_vld3_lane_bf16:
-** vld3.16 {d0\[2\], d1\[2\], d2\[2\]}, \[r0\]
-** bx lr
+** ...
+** vld3.16 {d[0-9]+\[2\], d[0-9]+\[2\], d[0-9]+\[2\]}, \[r[0-9]+\]
+** ...
*/
bfloat16x4x3_t
test_vld3_lane_bf16 (const bfloat16_t *a, bfloat16x4x3_t b)
/*
**test_vld3q_lane_bf16:
-** vld3.16 {d0\[2\], d2\[2\], d4\[2\]}, \[r0\]
-** bx lr
+** ...
+** vld3.16 {d[0-9]+\[2\], d[0-9]+\[2\], d[0-9]+\[2\]}, \[r0\]
+** ...
*/
bfloat16x8x3_t
test_vld3q_lane_bf16 (const bfloat16_t *a, bfloat16x8x3_t b)
/*
**test_vld4_lane_bf16:
-** vld4.16 {d0\[2\], d1\[2\], d2\[2\], d3\[2\]}, \[r0\]
-** bx lr
+** ...
+** vld4.16 {d[0-9]+\[2\], d[0-9]+\[2\], d[0-9]+\[2\], d[0-9]+\[2\]}, \[r0\]
+** ...
*/
bfloat16x4x4_t
test_vld4_lane_bf16 (const bfloat16_t *a, bfloat16x4x4_t b)
/*
**test_vld4q_lane_bf16:
-** vld4.16 {d0\[2\], d2\[2\], d4\[2\], d6\[2\]}, \[r0\]
-** bx lr
+** ...
+** vld4.16 {d[0-9]+\[2\], d[0-9]+\[2\], d[0-9]+\[2\], d[0-9]+\[2\]}, \[r0\]
+** ...
*/
bfloat16x8x4_t
test_vld4q_lane_bf16 (const bfloat16_t *a, bfloat16x8x4_t b)