A .gitmodules => .gitmodules +3 -0
@@ 0,0 1,3 @@
+[submodule "tests/official/riscv-tests"]
+ path = tests/official/riscv-tests
+ url = https://github.com/riscv-software-src/riscv-tests.git
M Makefile => Makefile +3 -1
@@ 87,7 87,7 @@ OBJCOPY=riscv32-none-elf-objcopy
$(OBJCOPY) $< -O binary $@
./programs/bin/%.dat: ./programs/bin/%.bin
- od $< -t x4 -A n > $@
+ od $< -t x4 -A n -v > $@
objdump/%: ./programs/bin/start-%.o
$(OBJDUMP) -d -M no-aliases $<
@@ 98,3 98,5 @@ clean:
rm -rf ./programs/bin
rm -rf ./obj_dir
rm -rf waveform.vcd
+ rm -rf tests/out
+ make -C tests/official clean
M src/file_program_memory.sv => src/file_program_memory.sv +1 -1
@@ 4,7 4,7 @@ module file_program_memory
output [31:0] instruction
);
parameter string FILE_NAME;
- parameter WIDTH = 12;
+ parameter WIDTH = 15;
parameter MEM_SIZE = 1 << (WIDTH - 2) - 1;
reg [31:0] imem[0:MEM_SIZE];
M testbench/tb_cpu_program.sv => testbench/tb_cpu_program.sv +2 -2
@@ 56,12 56,12 @@ module tb_cpu_program();
file_program_memory #(
.FILE_NAME(CPU_PROGRAM_PATH)
) prog_mem_inst(
- .addr(pc[11:0]),
+ .addr(pc[14:0]),
.instruction(instruction)
);
always_ff @ (posedge ebreak) begin
- $display("ebreak!");
+ $display("ebreak at %d", pc);
#15 $finish;
end
A tests/custom/custom_tests.py => tests/custom/custom_tests.py +57 -0
@@ 0,0 1,57 @@
+import sys
+import subprocess
+
+sys.path.append('../')
+
+from test_types import *
+from pathlib import Path
+
+def find_tests(groups_dir: Path, programs_dir: Path, out_dir: Path, group_name: str|None, test_name: str|None) -> list[TestGroup]:
+ group_names: list[Path] = []
+ if group_name is None:
+ group_names = [f for f in groups_dir.iterdir() if f.is_dir()]
+ else:
+ group_names = [groups_dir / group_name]
+
+ groups: list[TestGroup] = []
+ for group_dir in group_names:
+ tests: list[Test] = []
+ group = TestGroup(
+ tests = tests,
+ directory = group_dir,
+ name = group_dir.name,
+ c_test_file = programs_dir / f"{group_dir.name}.c",
+ dat_test_file = programs_dir / "bin" / f"{group_dir.name}.dat",
+ )
+
+ test_names = []
+ if test_name is None:
+ test_names = [f.name[:-len("-input.dat")] for f in group_dir.iterdir() if f.is_file() and f.name.endswith("-input.dat")]
+ else:
+ test_names = [test_name]
+
+ for test_name in test_names:
+ test = Test(
+ group,
+ test_name,
+ group_dir / f"{test_name}-input.dat",
+ out_dir / f"{test_name}-output.dat",
+ group_dir / f"{test_name}-expected.dat",
+ )
+
+ if not test.input_file.exists() or not test.expected_file.exists():
+ continue
+
+ tests.append(test)
+
+ groups.append(group)
+
+
+ return groups
+
+def compile_program(make_dir: Path, group: TestGroup) -> bool:
+ return subprocess.run(
+ ["make", "-C", make_dir, group.dat_test_file.relative_to(make_dir)],
+ stdout = subprocess.DEVNULL,
+ stderr = subprocess.DEVNULL,
+ ) == 0
A tests/official/Makefile => tests/official/Makefile +31 -0
@@ 0,0 1,31 @@
+XLEN = 32
+src_dir = .
+isa_dir = ./riscv-tests/isa
+
+CFLAGS=-march=rv32i -mabi=ilp32 -c
+
+CC=riscv32-none-elf-gcc
+LD=riscv32-none-elf-ld
+OBJCOPY=riscv32-none-elf-objcopy
+
+include ${isa_dir}/rv32ui/Makefrag
+
+./out/rv32ui_%.o: ${isa_dir}/rv32ui/%.S ./out
+ $(CC) $(CFLAGS) -I${isa_dir}/macros/scalar -I${src_dir}/env/p $< -o $@.out
+ $(LD) -T${src_dir}/env/p/link.ld $@.out -o $@
+
+./out/rv32ui_%.bin: ./out/rv32ui_%.o ./out
+ $(OBJCOPY) $< -O binary $@
+
+./out/rv32ui_%.dat: ./out/rv32ui_%.bin ./out
+ od $< -t x4 -A n -v > $@
+
+./out:
+ mkdir $@
+
+.PHONY: clean list
+list:
+ @echo ${rv32ui_sc_tests}
+
+clean:
+ rm -rf ./out
A tests/official/env/p/link.ld => tests/official/env/p/link.ld +4 -0
@@ 0,0 1,4 @@
+SECTIONS
+{
+ .text.init = 0x0;
+}
A tests/official/env/p/riscv_test.h => tests/official/env/p/riscv_test.h +162 -0
@@ 0,0 1,162 @@
+/*Copyright (c) 2012-2015, The Regents of the University of California (Regents).*/
+/* All Rights Reserved. */
+
+/* Redistribution and use in source and binary forms, with or without */
+/* modification, are permitted provided that the following conditions are met: */
+/* 1. Redistributions of source code must retain the above copyright */
+/* notice, this list of conditions and the following disclaimer. */
+/* 2. Redistributions in binary form must reproduce the above copyright */
+/* notice, this list of conditions and the following disclaimer in the */
+/* documentation and/or other materials provided with the distribution. */
+/* 3. Neither the name of the Regents nor the */
+/* names of its contributors may be used to endorse or promote products */
+/* derived from this software without specific prior written permission. */
+
+/* IN NO EVENT SHALL REGENTS BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, */
+/* SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS, ARISING */
+/* OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF REGENTS HAS */
+/* BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
+
+/* REGENTS SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, */
+/* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR */
+/* PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, PROVIDED */
+/* HEREUNDER IS PROVIDED "AS IS". REGENTS HAS NO OBLIGATION TO PROVIDE */
+/* MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. */
+
+#ifndef _ENV_PHYSICAL_SINGLE_CORE_H
+#define _ENV_PHYSICAL_SINGLE_CORE_H
+
+#include "../../riscv-tests/env/encoding.h"
+
+//-----------------------------------------------------------------------
+// Begin Macro
+//-----------------------------------------------------------------------
+
+#define RVTEST_RV64U \
+ .macro init; \
+ .endm
+
+#define RVTEST_RV64UF \
+ .macro init; \
+ RVTEST_FP_ENABLE; \
+ .endm
+
+#define RVTEST_RV64UV \
+ .macro init; \
+ RVTEST_VECTOR_ENABLE; \
+ .endm
+
+#define RVTEST_RV32U \
+ .macro init; \
+ .endm
+
+#define RVTEST_RV32UF \
+ .macro init; \
+ RVTEST_FP_ENABLE; \
+ .endm
+
+#define RVTEST_RV32UV \
+ .macro init; \
+ RVTEST_VECTOR_ENABLE; \
+ .endm
+
+#define RVTEST_RV64M \
+ .macro init; \
+ RVTEST_ENABLE_MACHINE; \
+ .endm
+
+#define RVTEST_RV64S \
+ .macro init; \
+ RVTEST_ENABLE_SUPERVISOR; \
+ .endm
+
+#define RVTEST_RV32M \
+ .macro init; \
+ RVTEST_ENABLE_MACHINE; \
+ .endm
+
+#define RVTEST_RV32S \
+ .macro init; \
+ RVTEST_ENABLE_SUPERVISOR; \
+ .endm
+
+#if __riscv_xlen == 64
+# define CHECK_XLEN li a0, 1; slli a0, a0, 31; bgez a0, 1f; RVTEST_PASS; 1:
+#else
+# define CHECK_XLEN li a0, 1; slli a0, a0, 31; bltz a0, 1f; RVTEST_PASS; 1:
+#endif
+
+#define INIT_XREG \
+ li x1, 0; \
+ li x2, 0; \
+ li x3, 0; \
+ li x4, 0; \
+ li x5, 0; \
+ li x6, 0; \
+ li x7, 0; \
+ li x8, 0; \
+ li x9, 0; \
+ li x10, 0; \
+ li x11, 0; \
+ li x12, 0; \
+ li x13, 0; \
+ li x14, 0; \
+ li x15, 0; \
+ li x16, 0; \
+ li x17, 0; \
+ li x18, 0; \
+ li x19, 0; \
+ li x20, 0; \
+ li x21, 0; \
+ li x22, 0; \
+ li x23, 0; \
+ li x24, 0; \
+ li x25, 0; \
+ li x26, 0; \
+ li x27, 0; \
+ li x28, 0; \
+ li x29, 0; \
+ li x30, 0; \
+ li x31, 0;
+
+#define RVTEST_CODE_BEGIN \
+ .global _start; \
+ .section .text.init; \
+ .align 6; \
+_start: \
+ INIT_XREG; \
+ CHECK_XLEN; \
+ .align 2; \
+
+//-----------------------------------------------------------------------
+// End Macro
+//-----------------------------------------------------------------------
+
+#define RVTEST_CODE_END
+
+//-----------------------------------------------------------------------
+// Pass/Fail Macro
+//-----------------------------------------------------------------------
+
+#define RVTEST_PASS \
+ addi x0, zero, 0; \
+ addi x1, zero, 0xFF; \
+ sw x1, 0(x0); \
+ ebreak;
+
+#define TESTNUM gp
+#define RVTEST_FAIL \
+ addi x0, zero, 0; \
+ addi x1, zero, 0; \
+ sw x1, 0(x0); \
+ ebreak;
+
+//-----------------------------------------------------------------------
+// Data Section Macro
+//-----------------------------------------------------------------------
+
+#define EXTRA_DATA
+#define RVTEST_DATA_BEGIN
+#define RVTEST_DATA_END
+
+#endif
A tests/official/official_tests.py => tests/official/official_tests.py +47 -0
@@ 0,0 1,47 @@
+import sys
+import subprocess
+
+sys.path.append('../')
+
+from test_types import *
+from pathlib import Path
+
+def find_tests(out_dir: Path) -> list[TestGroup]:
+ here = Path(__file__).parent
+ result = subprocess.run(
+ [ "make", "-C", here, "-s", "list" ],
+ check = True,
+ capture_output = True,
+ )
+
+ test_names = result.stdout.decode("utf-8").strip().split(' ')
+
+ groups = []
+ for test_name in test_names:
+ tests = []
+ group = TestGroup(
+ tests = tests,
+ directory = here,
+ name = "rv32ui",
+ c_test_file = out_dir / f"rv32ui_{test_name}.c",
+ dat_test_file = out_dir / f"rv32ui_{test_name}.dat",
+ )
+ tests.append(Test(
+ group = group,
+ name = test_name,
+ input_file = here / "input.dat",
+ output_file = out_dir / f"{group.name}-output.dat",
+ expected_file = here / "expected.dat",
+ ))
+
+ groups.append(group)
+
+ return groups
+
+def compile_program(make_dir: Path, test: Test) -> bool:
+ return subprocess.run(
+ ["make", "-C", Path(__file__).parent, f"./out/{test.group.name}_{test.name}.dat"],
+ check = True,
+ stdout = subprocess.DEVNULL,
+ stderr = subprocess.DEVNULL,
+ ) == 0
A tests/official/riscv-tests => tests/official/riscv-tests +1 -0
@@ 0,0 1,1 @@
+Subproject commit bd0a19c136927eaa3b7296a591a896c141affb6b
M tests/run.py => tests/run.py +54 -117
@@ 1,98 1,19 @@
#!/usr/bin/env python3
+import sys
import argparse
import sys
import subprocess
import re
from pathlib import Path
-from dataclasses import dataclass
-
-class bcolors:
- HEADER = '\033[95m'
- OKBLUE = '\033[94m'
- OKCYAN = '\033[96m'
- OKGREEN = '\033[92m'
- WARNING = '\033[93m'
- FAIL = '\033[91m'
- ENDC = '\033[0m'
- BOLD = '\033[1m'
- UNDERLINE = '\033[4m'
-
-class Test:
- pass
-
-@dataclass
-class TestGroup:
- tests: list[Test]
- directory: Path
- name: str
- c_test_file: Path # The C file to compile and use for this test
- dat_test_file: Path # The C file to compile and use for this test
-
- def __str__(self):
- return self.name
-
-@dataclass
-class Test:
- group: TestGroup
- name: str
-
- input_file: Path
- output_file: Path
- expected_file: Path
-
- def __str__(self):
- return f"{self.group.name}.{self.name}"
-
-@dataclass
-class Validation:
- test: Test
- expected: list[str]
- actual: list[str]
- matches: bool
-
-def find_tests(groups_dir: Path, programs_dir: Path, out_dir: Path, group_name: str|None, test_name: str|None) -> list[TestGroup]:
- group_names: list[Path] = []
- if group_name is None:
- group_names = [f for f in groups_dir.iterdir() if f.is_dir()]
- else:
- group_names = [groups_dir / group_name]
-
- groups: list[TestGroup] = []
- for group_dir in group_names:
- tests: list[Test] = []
- group = TestGroup(
- tests = tests,
- directory = group_dir,
- name = group_dir.name,
- c_test_file = programs_dir / f"{group_dir.name}.c",
- dat_test_file = programs_dir / "bin" / f"{group_dir.name}.dat",
- )
-
- test_names = []
- if test_name is None:
- test_names = [f.name[:-len("-input.dat")] for f in group_dir.iterdir() if f.is_file() and f.name.endswith("-input.dat")]
- else:
- test_names = [test_name]
-
- for test_name in test_names:
- test = Test(
- group,
- test_name,
- group_dir / f"{test_name}-input.dat",
- out_dir / f"{test_name}-output.dat",
- group_dir / f"{test_name}-expected.dat",
- )
-
- if not test.input_file.exists() or not test.expected_file.exists():
- continue
-
- tests.append(test)
-
- groups.append(group)
-
-
- return groups
+
+from test_types import bcolors, TestGroup, Test, Validation
+
+sys.path.append('./custom')
+sys.path.append('./official')
+
+import custom_tests
+import official_tests
def validate_test(test: Test) -> Validation:
expected = test.expected_file.read_text()
@@ 151,13 72,6 @@ def run_test(out_dir: Path, test: Test) -> bool:
check = True,
).returncode == 0
-def compile_program(make_dir: Path, group: TestGroup) -> bool:
- return subprocess.run(
- ["make", "-C", make_dir, group.dat_test_file.relative_to(make_dir)],
- stdout = subprocess.DEVNULL,
- stderr = subprocess.DEVNULL,
- ) == 0
-
# Program
parser = argparse.ArgumentParser("Test simple RISC-V processor written in Verilog.")
parser.add_argument(
@@ 191,29 105,52 @@ groups_dir = here / "custom"
# TODO support multiple tests
group_name, test_name = args.filter[0].split('.') if args.filter is not None else (None, None)
-test_groups: list[TestGroup] = find_tests(groups_dir, programs_dir, out_dir, group_name, test_name)
+if args.type == "custom":
+ test_groups: list[TestGroup] = custom_tests.find_tests(
+ groups_dir, programs_dir, out_dir, group_name, test_name
+ )
+ if args.command == "list":
+ print("Found these tests:")
+ for group in test_groups:
+ for test in group.tests:
+ print(f" {test}")
+ sys.exit(0)
-# Official
-# TODO
+ for group in test_groups:
+ custom_tests.compile_program(project_dir, group)
+ for test in group.tests:
+ compile_test(project_dir, here / "comp_list.lst", out_dir, test)
+ run_test(out_dir, test)
+
+ validation = validate_test(test)
+
+ if validation.matches:
+ print(f"{test.group.name}.{test.name} {bcolors.OKGREEN}passed{bcolors.ENDC}")
+ else:
+ print(f"{test.group.name}.{test.name} {bcolors.FAIL}failed{bcolors.ENDC}")
+ print(f" Got {validation.actual}. Expected {validation.expected}")
+else: # official
+ test_groups: list[TestGroup] = official_tests.find_tests(
+ here / "official" / "out"
+ )
+
+ if args.command == "list":
+ print("Found these tests:")
+ for group in test_groups:
+ for test in group.tests:
+ print(f" {test}")
+ sys.exit(0)
-# Custom
-if args.command == "list":
- print("Found these tests:")
for group in test_groups:
for test in group.tests:
- print(f" {test}")
- sys.exit(0)
-
-for group in test_groups:
- compile_program(project_dir, group)
- for test in group.tests:
- compile_test(project_dir, here / "comp_list.lst", out_dir, test)
- run_test(out_dir, test)
-
- validation = validate_test(test)
-
- if validation.matches:
- print(f"{test.group.name}.{test.name} {bcolors.OKGREEN}passed{bcolors.ENDC}")
- else:
- print(f"{test.group.name}.{test.name} {bcolors.FAIL}failed{bcolors.ENDC}")
- print(f" Got {validation.actual}. Expected {validation.expected}")
+ official_tests.compile_program(project_dir, test)
+ compile_test(project_dir, here / "comp_list.lst", out_dir, test)
+ run_test(out_dir, test)
+
+ validation = validate_test(test)
+
+ if validation.matches:
+ print(f"{test.group.name}.{test.name} {bcolors.OKGREEN}passed{bcolors.ENDC}")
+ else:
+ print(f"{test.group.name}.{test.name} {bcolors.FAIL}failed{bcolors.ENDC}")
+ print(f" Got {validation.actual}. Expected {validation.expected}")
A tests/test_types.py => tests/test_types.py +46 -0
@@ 0,0 1,46 @@
+from dataclasses import dataclass
+from pathlib import Path
+
+class bcolors:
+ HEADER = '\033[95m'
+ OKBLUE = '\033[94m'
+ OKCYAN = '\033[96m'
+ OKGREEN = '\033[92m'
+ WARNING = '\033[93m'
+ FAIL = '\033[91m'
+ ENDC = '\033[0m'
+ BOLD = '\033[1m'
+ UNDERLINE = '\033[4m'
+
+class Test:
+ pass
+
+@dataclass
+class TestGroup:
+ tests: list[Test]
+ directory: Path
+ name: str
+ c_test_file: Path # The C file to compile and use for this test
+ dat_test_file: Path # The C file to compile and use for this test
+
+ def __str__(self):
+ return self.name
+
+@dataclass
+class Test:
+ group: TestGroup
+ name: str
+
+ input_file: Path
+ output_file: Path
+ expected_file: Path
+
+ def __str__(self):
+ return f"{self.group.name}.{self.name}"
+
+@dataclass
+class Validation:
+ test: Test
+ expected: list[str]
+ actual: list[str]
+ matches: bool