endef
-$(eval $(call compile_template,rv32ui,-m32))
-$(eval $(call compile_template,rv32uc,-m32))
-$(eval $(call compile_template,rv32um,-m32))
-$(eval $(call compile_template,rv32ua,-m32))
-$(eval $(call compile_template,rv32uf,-m32))
-$(eval $(call compile_template,rv32si,-m32))
-$(eval $(call compile_template,rv32mi,-m32))
+$(eval $(call compile_template,rv32ui,-march=rv32g -mabi=ilp32))
+$(eval $(call compile_template,rv32uc,-march=rv32g -mabi=ilp32))
+$(eval $(call compile_template,rv32um,-march=rv32g -mabi=ilp32))
+$(eval $(call compile_template,rv32ua,-march=rv32g -mabi=ilp32))
+$(eval $(call compile_template,rv32uf,-march=rv32g -mabi=ilp32))
+$(eval $(call compile_template,rv32si,-march=rv32g -mabi=ilp32))
+$(eval $(call compile_template,rv32mi,-march=rv32g -mabi=ilp32))
ifeq ($(XLEN),64)
-$(eval $(call compile_template,rv64ui))
-$(eval $(call compile_template,rv64uc))
-$(eval $(call compile_template,rv64um))
-$(eval $(call compile_template,rv64ua))
-$(eval $(call compile_template,rv64uf))
-$(eval $(call compile_template,rv64ud))
-$(eval $(call compile_template,rv64si))
-$(eval $(call compile_template,rv64mi))
+$(eval $(call compile_template,rv64ui,-march=rv64g -mabi=lp64))
+$(eval $(call compile_template,rv64uc,-march=rv64g -mabi=lp64))
+$(eval $(call compile_template,rv64um,-march=rv64g -mabi=lp64))
+$(eval $(call compile_template,rv64ua,-march=rv64g -mabi=lp64))
+$(eval $(call compile_template,rv64uf,-march=rv64g -mabi=lp64))
+$(eval $(call compile_template,rv64ud,-march=rv64g -mabi=lp64))
+$(eval $(call compile_template,rv64si,-march=rv64g -mabi=lp64))
+$(eval $(call compile_template,rv64mi,-march=rv64g -mabi=lp64))
endif
tests_dump = $(addsuffix .dump, $(tests))