Merge remote-tracking branch 'upstream/main' into fetch_buffer

This commit is contained in:
Jordan Carlin 2024-10-31 12:21:03 -07:00
commit 7c9edffee7
No known key found for this signature in database
132 changed files with 2228 additions and 1183 deletions

65
.github/cli-space-cleanup.sh vendored Executable file
View File

@ -0,0 +1,65 @@
#!/bin/bash
###########################################
## GitHub runner space cleanup
##
## Written: Jordan Carlin, jcarlin@hmc.edu
## Created: 30 June 2024
## Modified:
##
## Purpose: Remove unnecessary packages/directories from GitHub Actions runner
## A component of the CORE-V-WALLY configurable RISC-V project.
## https://github.com/openhwgroup/cvw
##
## Copyright (C) 2021-23 Harvey Mudd College & Oklahoma State University
##
## SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1
##
## Licensed under the Solderpad Hardware License v 2.1 (the “License”); you may not use this file
## except in compliance with the License, or, at your option, the Apache License version 2.0. You
## may obtain a copy of the License at
##
## https:##solderpad.org/licenses/SHL-2.1/
##
## Unless required by applicable law or agreed to in writing, any work distributed under the
## License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
## either express or implied. See the License for the specific language governing permissions
## and limitations under the License.
################################################################################################
# Remove unnecessary packages
removePacks=( '^llvm-.*' 'php.*' '^mongodb-.*' '^mysql-.*' '^dotnet-sdk-.*' 'azure-cli' 'google-cloud-cli' 'google-chrome-stable' 'firefox' '^powershell*' 'microsoft-edge-stable' 'mono-devel' 'hhvm' )
for pack in "${removePacks[@]}"; do
sudo apt-get purge -y "$pack" || true
done
sudo apt-get autoremove -y || true
sudo apt-get clean || true
# Remove unnecessary directories
sudo rm -rf /usr/local/lib/android
sudo rm -rf /usr/share/dotnet
sudo rm -rf /usr/share/swift
sudo rm -rf /usr/share/miniconda
sudo rm -rf /usr/share/az*
sudo rm -rf /usr/share/gradle-*
sudo rm -rf /usr/share/sbt
sudo rm -rf /opt/ghc
sudo rm -rf /usr/local/.ghcup
sudo rm -rf /usr/local/share/powershell
sudo rm -rf /usr/local/lib/node_modules
sudo rm -rf /usr/local/julia*
sudo rm -rf /usr/local/share/chromium
sudo rm -rf /usr/local/share/vcpkg
sudo rm -rf /usr/local/games
sudo rm -rf /usr/local/sqlpackage
sudo rm -rf /usr/lib/google-cloud-sdk
sudo rm -rf /usr/lib/jvm
sudo rm -rf /usr/lib/mono
sudo rm -rf /usr/lib/R
sudo rm -rf /usr/lib/postgresql
sudo rm -rf /usr/lib/heroku
sudo rm -rf /usr/lib/firefox
sudo rm -rf /opt/hostedtoolcache
# Clean up docker images
sudo docker image prune --all --force

160
.github/workflows/install.yml vendored Normal file
View File

@ -0,0 +1,160 @@
##################################
# install.yml
# jcarlin@hmc.edu October 2024
# SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1
#
# GitHub Action to test the wally-tool-chain-install.sh script for all supported linux distributions
# and configurations. Runs weekly and on pull requests that modify the installation scripts.
##################################
name: Installation
# Run on PR that modifies the installation scripts, weekly, or manually
on:
workflow_dispatch:
pull_request:
branches:
- main
paths:
- 'bin/wally-tool-chain-install.sh'
- 'bin/wally-distro-check.sh'
- 'bin/wally-package-install.sh'
schedule:
- cron: "0 7 * * 3" # Run at 12:00 AM Pacific Time on Wednesdays
# Use bash shell with extra GitHub Actions options for all jobs
defaults:
run:
shell: bash
jobs:
installation_test:
name: Test installation for ${{ matrix.name }}
strategy:
fail-fast: false
matrix:
include:
# Ubuntu Installations
- name: ubuntu-20.04
os: ubuntu-20.04
container: null
- name: ubuntu-22.04
os: ubuntu-22.04
container: null
- name: ubuntu-24.04
os: ubuntu-24.04
container: null
# Red Hat Installations
- name: rocky-8
os: ubuntu-latest
image: rockylinux:8
- name: rocky-9
os: ubuntu-latest
image: rockylinux:9
- name: almalinux-8
os: ubuntu-latest
image: almalinux:8
- name: almalinux-9
os: ubuntu-latest
image: almalinux:9
# User level installation
- name: user-install
os: ubuntu-latest
image: null
user: true
# Custom location installation
- name: custom-install
os: ubuntu-latest
image: null
riscv_path: /home/riscv
# Custom location user level installation
- name: custom-user-install
os: ubuntu-latest
image: null
user: true
riscv_path: $HOME/riscv-toolchain
# run on selected version of ubuntu or on ubuntu-latest with docker image
runs-on: ${{ matrix.os }}
container:
image: ${{ matrix.image }}
options: --privileged --mount type=bind,source=/,target=/host --pid=host --entrypoint /bin/bash # Allow for connection with host
steps:
# Docker images need git installed or the checkout action fails
- name: Install Dependencies for Red Hat
if: ${{ matrix.image != null }}
run: |
dnf install -y sudo git
dnf install curl -y --allowerasing || true
# Only clone submodules needed for standard tests/regression to save space
- uses: actions/checkout@v4
- name: Clone Necessary Submodules
run: |
git config --global --add safe.directory '*'
git submodule update --init addins/riscv-arch-test addins/verilog-ethernet
# Free up space on the host machine, either from the container or the host
- name: Free Up Storage
run: |
df -h
if [ -z ${{ matrix.image }} ]; then
./.github/cli-space-cleanup.sh
else
nsenter -t 1 -m -u -n -i bash -c "$(cat .github/cli-space-cleanup.sh)"
fi
df -h
# Run main tool chain installation script, either as a user or system wide
- name: Install
run: |
if [ -z ${{ matrix.user }} ]; then
sudo ./bin/wally-tool-chain-install.sh --clean ${{ matrix.riscv_path }}
else
sudo ./bin/wally-package-install.sh
./bin/wally-tool-chain-install.sh --clean ${{ matrix.riscv_path }}
fi
# Set environment variables for the rest of the job
- name: Set Environment Variables
if: always()
run: |
if [ ! -z ${{ matrix.riscv_path }} ]; then
sed -i 's,exit 1,export RISCV=${{ matrix.riscv_path }},g' setup.sh
fi
source setup.sh
echo "RISCV=$RISCV" >> "$GITHUB_ENV"
# Upload installation logs for debugging
- name: Upload Installation Logs
uses: actions/upload-artifact@v4
if: always()
with:
name: installation-logs-${{ matrix.name }}
path: ${{ env.RISCV }}/logs/
# Make riscof and zsbl only as that is the only testsuite used by standard regression
- name: make tests
run: |
source setup.sh
make riscof zsbl --jobs $(nproc --ignore 1)
# Only the linux-testvectors are needed, so remove the rest of the buildroot to save space
- name: Remove Buildroot to Save Space
run: |
sudo rm -rf $RISCV/buildroot/output/build
df -h
# Run standard regression, skipping distros that are known to be broken with Verilator
- name: Regression
if: ${{ matrix.name != 'ubuntu-20.04' && matrix.name != 'rocky-8' && matrix.name != 'almalinux-8'}}
run: |
source setup.sh
regression-wally
- name: Lint + wsim Test Only (for distros with broken Verilator sim)
if: ${{ matrix.name == 'ubuntu-20.04' || matrix.name == 'rocky-8' || matrix.name == 'almalinux-8'}}
run: |
source setup.sh
mkdir -p $WALLY/sim/verilator/logs/
lint-wally
wsim rv32i arch32i --sim verilator | tee $WALLY/sim/verilator/logs/rv32i_arch32i.log
# Upload regression logs for debugging
- name: Upload regression logs
uses: actions/upload-artifact@v4
if: always()
with:
name: regression-logs-${{ matrix.name }}
path: ${{ github.workspace }}/sim/verilator/logs/

1
.gitignore vendored
View File

@ -17,6 +17,7 @@
.vscode/
__pycache__/
**/work*
!.github/*
/**/obj_dir*
/**/gmon*

7
.gitmodules vendored
View File

@ -20,14 +20,11 @@
branch = dev
[submodule "addins/branch-predictor-simulator"]
path = addins/branch-predictor-simulator
url = https://github.com/ross144/branch-predictor-simulator
[submodule "addins/ahbsdc"]
path = addins/ahbsdc
url = https://github.com/JacobPease/ahbsdc.git
url = https://github.com/rosethompson/branch-predictor-simulator
[submodule "addins/verilog-ethernet"]
sparseCheckout = true
path = addins/verilog-ethernet
url = https://github.com/ross144/verilog-ethernet.git
url = https://github.com/rosethompson/verilog-ethernet.git
[submodule "cvw-arch-verif"]
path = addins/cvw-arch-verif
url = https://github.com/openhwgroup/cvw-arch-verif

200
README.md
View File

@ -1,3 +1,5 @@
![Installation CI](https://github.com/openhwgroup/cvw/actions/workflows/install.yml/badge.svg?branch=main)
# core-v-wally
Wally is a 5-stage pipelined processor configurable to support all the standard RISC-V options, including RV32/64, A, B, C, D, F, M, Q, and Zk* extensions, virtual memory, PMP, and the various privileged modes and CSRs. It provides optional caches, branch prediction, and standard RISC-V peripherals (CLINT, PLIC, UART, GPIO). Wally is written in SystemVerilog. It passes the [RISC-V Arch Tests](https://github.com/riscv-non-isa/riscv-arch-test) and boots Linux on an FPGA. Configurations range from a minimal RV32E core to a fully featured RV64GC application processor with all of the RVA22S64 profile extensions. Wally is part of the OpenHWGroup family of robust open RISC-V cores.
@ -14,58 +16,66 @@ Wally is presently at Technology Readiness Level 4, passing the RISC-V compatibi
New users may wish to do the following setup to access the server via a GUI and use a text editor.
Git started with Git configuration and authentication: B.1 (replace with your name and email)
- Git started with Git configuration and authentication: C.1 (replace with your name and email)
```bash
$ git config --global user.name "Ben Bitdiddle"
$ git config --global user.email "ben_bitdiddle@wally.edu"
$ git config --global pull.rebase false
Optional: Download and install x2go - A.1.1
Optional: Download and install VSCode - A.4.2
Optional: Make sure you can log into your server via x2go and via a terminal
Terminal on Mac, cmd on Windows, xterm on Linux
See A.1 about ssh -Y login from a terminal
```
- Optional: Download and install x2go - B.1.1
- Optional: Download and install VSCode - B.4.2
- Optional: Make sure you can log into your server via x2go and via a terminal
- Terminal on Mac, cmd on Windows, xterm on Linux
- See B.1 about ssh -Y login from a terminal
Then fork and clone the repo, source setup, make the tests and run regression
If you don't already have a Github account, create one
In a web browser, visit https://github.com/openhwgroup/cvw
In the upper right part of the screen, click on Fork
Create a fork, choosing the owner as your github account
and the repository as cvw.
On the Linux computer where you will be working, log in
Clone your fork of the repo. Change `<yourgithubid>` to your github id.
1. If you don't already have a Github account, create one
2. In a web browser, visit https://github.com/openhwgroup/cvw
3. In the upper right part of the screen, click on Fork
4. Create a fork, choosing the owner as your github account and the repository as cvw.
5. On the Linux computer where you will be working, log in.
6. Clone your fork of the repo. Change `<yourgithubid>` to your github id.
```bash
$ git clone --recurse-submodules https://github.com/<yourgithubid>/cvw
$ cd cvw
$ git remote add upstream https://github.com/openhwgroup/cvw
```
If you are installing on a new system without any tools installed, please jump to the next section, Toolchain Installation then come back here.
> [!NOTE]
> If you are installing on a new system without any tools installed, please jump to the next section, [Toolchain Installation](#toolchain-installation-and-configuration-sys-admin), then come back here.
Run the setup script to update your `PATH` and activate the python virtual environment.
7. Run the setup script to update your `PATH` and activate the python virtual environment.
```bash
$ source ./setup.sh
```
Add the following lines to your `.bashrc` or `.bash_profile` to run the setup script each time you log in.
8. Add the following lines to your `.bashrc` or `.bash_profile` to run the setup script each time you log in.
```bash
if [ -f ~/cvw/setup.sh ]; then
source ~/cvw/setup.sh
fi
```
9. Build the tests and run a regression simulation to prove everything is installed. Building tests may take a while.
Build the tests and run a regression simulation to prove everything is installed. Building tests will take a while.
```bash
$ make --jobs
$ regression-wally
```
# Toolchain Installation and Configuration (Sys Admin)
This section describes the open source toolchain installation.
> This section describes the open source toolchain installation.
### Compatibility
The current version of the toolchain has been tested on Ubuntu (versions 20.04 LTS, 22.04 LTS, and 24.04 LTS) and on Red Hat/Rocky/AlmaLinux (versions 8 and 9).
NOTE: Ubuntu 22.04LTS is incompatible with Synopsys Design Compiler.
> [!WARNING]
> - Ubuntu 22.04LTS is incompatible with Synopsys Design Compiler.
> - Verilator currently fails to simulate correctly on Ubuntu 20.04 LTS and Red Hat/Rocky/AlmaLinux 8.
### Overview
The toolchain installation script installs the following tools:
@ -74,32 +84,37 @@ The toolchain installation script installs the following tools:
- [QEMU](https://www.qemu.org/docs/master/system/target-riscv.html): emulator
- [Spike](https://github.com/riscv-software-src/riscv-isa-sim): functional RISC-V model
- [Verilator](https://github.com/verilator/verilator): open-source Verilog simulator
- NOTE: Verilator does not currently work reliably for simulating Wally on Ubuntu 20.04 LTS and Red Hat 8
- [RISC-V Sail Model](https://github.com/riscv/sail-riscv): golden reference model for RISC-V
- [OSU Skywater 130 cell library](https://foss-eda-tools.googlesource.com/skywater-pdk/libs/sky130_osu_sc_t12): standard cell library
- [RISCOF](https://github.com/riscv-software-src/riscof.git): RISC-V compliance test framework
Additionally, Buildroot Linux is built for Wally and linux test-vectors are generated for simulation. See the [Linux README](linux/README.md) for more details.
Additionally, Buildroot Linux is built for Wally and linux test-vectors are generated for simulation. See the [Linux README](linux/README.md) for more details. This can be skipped using the `--no-buildroot` flag.
### Installation
The tools can be installed by running
$ $WALLY/bin/wally-tool-chain-install.sh
```bash
$ $WALLY/bin/wally-tool-chain-install.sh
```
If this script is run as root or using `sudo`, it will also install all of the prerequisite packages using the system package manager. The default installation directory when run in this manner is `/opt/riscv`.
If a user-level installation is desired, the script can instead be run by any user without `sudo` and the installation directory will be `~/riscv`. In this case, the prerequisite packages must first be installed by running
$ sudo $WALLY/bin/wally-package-install.sh
```bash
$ sudo $WALLY/bin/wally-package-install.sh
```
In either case, the installation directory can be overridden by passing the desired directory as the last argument to the installation script. For example,
$ sudo $WALLY/bin/wally-tool-chain-install.sh /home/riscv
```bash
$ sudo $WALLY/bin/wally-tool-chain-install.sh /home/riscv
```
See `wally-tool-chain-install.sh` for a detailed description of each component, or to issue the commands one at a time to install on the command line.
**NOTE:** The complete installation process requires ~55 GB of free space. If the `--clean` flag is passed as the first argument to the installation script then the final consumed space is only ~26 GB, but upgrading the tools requires reinstalling from scratch.
> [!NOTE]
> The complete installation process requires ~55 GB of free space. If the `--clean` flag is passed to the installation script then the final consumed space is only ~26 GB, but upgrading the tools will reinstall everything from scratch.
### Configuration
`$WALLY/setup.sh` sources `$RISCV/site-setup.sh`. If the toolchain was installed in either of the default locations (`/opt/riscv` or `~/riscv`), `$RISCV` will automatically be set to the correct path when `setup.sh` is run. If a custom installation directory was used, then `$WALLY/setup.sh` must be modified to set the correct path.
@ -108,12 +123,13 @@ See `wally-tool-chain-install.sh` for a detailed description of each component,
Change the following lines to point to the path and license server for your Siemens Questa and Synopsys Design Compiler and VCS installations and license servers. If you only have Questa or VCS, you can still simulate but cannot run logic synthesis. If Questa, VSC, or Design Compiler are already setup on this system then don't set these variables.
export MGLS_LICENSE_FILE=.. # Change this to your Siemens license server
export SNPSLMD_LICENSE_FILE=.. # Change this to your Synopsys license server
export QUESTA_HOME=.. # Change this for your path to Questa
export DC_HOME=.. # Change this for your path to Synopsys Design Compiler
export VCS_HOME=.. # Change this for your path to Synopsys VCS
```bash
export MGLS_LICENSE_FILE=.. # Change this to your Siemens license server
export SNPSLMD_LICENSE_FILE=.. # Change this to your Synopsys license server
export QUESTA_HOME=.. # Change this for your path to Questa
export DC_HOME=.. # Change this for your path to Synopsys Design Compiler
export VCS_HOME=.. # Change this for your path to Synopsys VCS
```
# Installing EDA Tools
@ -127,39 +143,48 @@ Although most EDA tools are Linux-friendly, they tend to have issues when not in
### Siemens Questa
Siemens Questa simulates behavioral, RTL and gate-level HDL. To install Siemens Questa first go to a web browser and navigate to
https://eda.sw.siemens.com/en-US/ic/questa/simulation/advanced-simulator/. Click Sign In and log in with your credentials and the product can easily be downloaded and installed. Some Windows-based installations also require gcc libraries that are typically provided as a compressed zip download through Siemens.
Siemens Questa simulates behavioral, RTL and gate-level HDL. To install Siemens Questa first go to a web browser and navigate to https://eda.sw.siemens.com/en-US/ic/questa/simulation/advanced-simulator/. Click Sign In and log in with your credentials and the product can easily be downloaded and installed. Some Windows-based installations also require gcc libraries that are typically provided as a compressed zip download through Siemens.
### Synopsys Design Compiler (DC)
Many commercial synthesis and place and route tools require a common installer. These installers are provided by the EDA vendor and Synopsys has one called Synopsys Installer. To use Synopsys Installer, you will need to acquire a license through Synopsys that is typically Called Synopsys Common Licensing (SCL). Both the Synopsys Installer, license key file, and Design Compiler can all be downloaded through Synopsys Solvnet. First open a web browser, log into Synsopsy Solvnet, and download the installer and Design Compiler installation files. Then, install the Installer
Many commercial synthesis and place and route tools require a common installer. These installers are provided by the EDA vendor and Synopsys has one called Synopsys Installer. To use Synopsys Installer, you will need to acquire a license through Synopsys that is typically Called Synopsys Common Licensing (SCL). Both the Synopsys Installer, license key file, and Design Compiler can all be downloaded through Synopsys Solvnet. First open a web browser, log into Synsopsy Solvnet, and download the installer and Design Compiler installation files. Then, install the Installer.
$ firefox &
Navigate to https://solvnet.synopsys.com
Log in with your institutions username and password
Click on Downloads, then scroll down to Synopsys Installer
Select the latest version (currently 5.4). Click Download Here, agree,
Click on SynopsysInstaller_v5.4.run
Return to downloads and also get Design Compiler (synthesis) latest version, and any others you want.
Click on all parts and the .spf file, then click Download Files near the top
move the SynopsysInstaller into /cad/synopsys/Installer_5.4 with 755 permission for cad,
move other files into /cad/synopsys/downloads and work as user cad from here on
```bash
$ firefox &
```
$ cd /cad/synopsys/installer_5.4
$ ./SynopsysInstaller_v5.4.run
Accept default installation directory
$ ./installer
Enter source path as /cad/synopsys/downloads, and installation path as /cad/synopsys
When prompted, enter your site ID
Follow prompts
- Navigate to https://solvnet.synopsys.com
- Log in with your institutions username and password
- Click on Downloads, then scroll down to Synopsys Installer
- Select the latest version (currently 5.4). Click Download Here, agree,
- Click on SynopsysInstaller_v5.4.run
- Return to downloads and also get Design Compiler (synthesis) latest version, and any others you want.
- Click on all parts and the .spf file, then click Download Files near the top
- Move the SynopsysInstaller into `/cad/synopsys/Installer_5.4` with 755 permission for cad,
- move other files into `/cad/synopsys/downloads` and work as user cad from here on
```bash
$ cd /cad/synopsys/installer_5.4
$ ./SynopsysInstaller_v5.4.run
```
- Accept default installation directory
```bash
$ ./installer
```
- Enter source path as `/cad/synopsys/downloads`, and installation path as `/cad/synopsys`
- When prompted, enter your site ID
- Follow prompts
Installer can be utilized in graphical or text-based modes. It is far easier to use the text-based installation tool. To install DC, navigate to the location where your downloaded DC files are and type installer. You should be prompted with questions related to where you wish to have your files installed.
The Synopsys Installer automatically installs all downloaded product files into a single top-level target directory. You do not need to specify the installation directory for each product. For example, if you specify /import/programs/synopsys as the target directory, your installation directory structure might look like this after installation:
The Synopsys Installer automatically installs all downloaded product files into a single top-level target directory. You do not need to specify the installation directory for each product. For example, if you specify `/import/programs/synopsys` as the target directory, your installation directory structure might look like this after installation:
/import/programs/synopsys/syn/S-2021.06-SP1
```bash
/import/programs/synopsys/syn/S-2021.06-SP1
```
Note: Although most parts of Wally, including the Questa simulator, will work on most modern Linux platforms, as of 2022, the Synopsys CAD tools for SoC design are only supported on RedHat Enterprise Linux 7.4 or 8 or SUSE Linux Enterprise Server (SLES) 12 or 15. Moreover, the RISC-V formal specification (sail-riscv) does not build gracefully on RHEL7.
> [!Note]
> Although most parts of Wally, including the Questa simulator, will work on most modern Linux platforms, as of 2024, the Synopsys CAD tools for SoC design are only supported on Red Hat Enterprise Linux (or AlmaLinux/Rocky) 8.4+ or 9.1+ or SUSE Linux Enterprise Server (SLES) 15.
The Verilog simulation has been tested with Siemens Questa/ModelSim. This package is available to universities worldwide as part of the Design Verification Bundle through the Siemens Academic Partner Program members for $990/year.
@ -174,7 +199,7 @@ If you want to add a cronjob you can do the following:
1) Set up the email client `mutt` for your distribution
2) Enter `crontab -e` into a terminal
3) add this code to test building CVW and then running `regression-wally --nightly` at 9:30 PM each day
```
```bash
30 21 * * * bash -l -c "source ~/PATH/TO/CVW/setup.sh; PATH_TO_CVW/cvw/bin/wrapper_nightly_runs.sh --path {PATH_TO_TEST_LOCATION} --target all --tests nightly --send_email harris@hmc.edu,kaitlin.verilog@gmail.com"
```
@ -182,44 +207,57 @@ If you want to add a cronjob you can do the following:
wsim runs one of multiple simulators, Questa, VCS, or Verilator using a specific configuration and either a suite of tests or a specific elf file.
The general syntax is
wsim <config> <suite or elf file or directory> [--options]
`wsim <config> <suite or elf file or directory> [--options]`
Parameters and options:
-h, --help show this help message and exit
--sim {questa,verilator,vcs}, -s {questa,verilator,vcs} Simulator
--tb {testbench,testbench_fp}, -t {testbench,testbench_fp} Testbench
--gui, -g Simulate with GUI
--coverage, -c Code & Functional Coverage
--fcov, -f Code & Functional Coverage
--args ARGS, -a ARGS Optional arguments passed to simulator via $value$plusargs
--vcd, -v Generate testbench.vcd
--lockstep, -l Run ImperasDV lock, step, and compare.
--locksteplog LOCKSTEPLOG, -b LOCKSTEPLOG Retired instruction number to be begin logging.
--covlog COVLOG, -d COVLOG Log coverage after n instructions.
--elfext ELFEXT, -e ELFEXT When searching for elf files only includes ones which end in this extension
```
-h, --help show this help message and exit
--sim {questa,verilator,vcs}, -s {questa,verilator,vcs} Simulator
--tb {testbench,testbench_fp}, -t {testbench,testbench_fp} Testbench
--gui, -g Simulate with GUI
--coverage, -c Code & Functional Coverage
--fcov, -f Code & Functional Coverage
--args ARGS, -a ARGS Optional arguments passed to simulator via $value$plusargs
--vcd, -v Generate testbench.vcd
--lockstep, -l Run ImperasDV lock, step, and compare.
--locksteplog LOCKSTEPLOG, -b LOCKSTEPLOG Retired instruction number to be begin logging.
--covlog COVLOG, -d COVLOG Log coverage after n instructions.
--elfext ELFEXT, -e ELFEXT When searching for elf files only includes ones which end in this extension
```
Run basic test with questa
wsim rv64gc arch64i
```bash
wsim rv64gc arch64i
```
Run Questa with gui
wsim rv64gc wally64priv --gui
```bash
wsim rv64gc wally64priv --gui
```
Run lockstep against ImperasDV with a single elf file in the --gui. Lockstep requires single elf.
Run lockstep against ImperasDV with a single elf file in the gui. Lockstep requires single elf.
wsim rv64gc ../../tests/riscof/work/riscv-arch-test/rv64i_m/I/src/add-01.S/ref/ref.elf --lockstep --gui
```bash
wsim rv64gc ../../tests/riscof/work/riscv-arch-test/rv64i_m/I/src/add-01.S/ref/ref.elf --lockstep --gui
```
Run lockstep against ImperasDV with a single elf file. Compute coverage.
wsim rv64gc ../../tests/riscof/work/riscv-arch-test/rv64i_m/I/src/add-01.S/ref/ref.elf --lockstep --coverage
```bash
wsim rv64gc ../../tests/riscof/work/riscv-arch-test/rv64i_m/I/src/add-01.S/ref/ref.elf --lockstep --coverage
```
Run lockstep against ImperasDV with directory file.
wsim rv64gc ../../tests/riscof/work/riscv-arch-test/rv64i_m/I/src/ --lockstep
```bash
wsim rv64gc ../../tests/riscof/work/riscv-arch-test/rv64i_m/I/src/ --lockstep
```
Run lockstep against ImperasDV with directory file and specify specific extension.
wsim rv64gc ../../tests/riscof/work/riscv-arch-test/rv64i_m/I/src/ --lockstep --elfext ref.elf
```bash
wsim rv64gc ../../tests/riscof/work/riscv-arch-test/rv64i_m/I/src/ --lockstep --elfext ref.elf
```

@ -1 +0,0 @@
Subproject commit 33418c8dc11baf63e843b0d35f57d22c1e3182e3

@ -1 +1 @@
Subproject commit 80cdee231f924b3045054594d4a7769e6eddcdcc
Subproject commit bbcba78647080dee82e96bc1b8ff9cd9a3cf7fa1

View File

@ -1,7 +1,7 @@
#!/bin/bash
###########################################
## Written: ross1728@gmail.com
## Written: rose@rosethompson.net
## Created: 23 October 2023
## Modified:
##

View File

@ -1,7 +1,7 @@
#!/bin/bash
###########################################
## Written: ross1728@gmail.com
## Written: rose@rosethompson.net
## Created: 12 March 2023
## Modified:
##

View File

@ -1,7 +1,7 @@
#!/bin/bash
###########################################
## Written: ross1728@gmail.com
## Written: rose@rosethompson.net
## Created: 12 March 2023
## Modified:
##

View File

@ -4,7 +4,7 @@
## extractFunctionRadix.sh
##
## Written: Rose Thompson
## email: ross1728@gmail.com
## email: rose@rosethompson.net
## Created: March 1, 2021
## Modified: March 10, 2021
##

View File

@ -84,7 +84,7 @@ from pathlib import Path
class FolderManager:
"""A class for managing folders and repository cloning."""
def __init__(self):
def __init__(self, basedir):
"""
Initialize the FolderManager instance.
@ -92,8 +92,12 @@ class FolderManager:
base_dir (str): The base directory where folders will be managed and repository will be cloned.
"""
env_extract_var = 'WALLY'
if os.environ.get(env_extract_var):
self.base_dir = os.environ.get(env_extract_var)
self.base_parent_dir = os.path.dirname(self.base_dir)
else:
self.base_dir = basedir
self.base_parent_dir = self.base_dir
# logger.info(f"Base directory: {self.base_dir}")
# logger.info(f"Parent Base directory: {self.base_parent_dir}")
@ -284,36 +288,37 @@ class TestRunner:
makefile_location = self.cvw.joinpath(makefile_path)
os.chdir(makefile_location)
output_file = self.log_dir.joinpath(f"make-{target}-output.log")
command = ["make"]
# Add target to the command if specified
if target:
command.append(target)
self.logger.info(f"Command used in directory {makefile_location}: {command[0]} {command[1]}")
output_file = self.log_dir.joinpath(f"make-{target}-output.log")
else: output_file = self.log_dir.joinpath(f"make-output.log")
# Execute make with target and cores/2
if target:
command = ["make", target, "--jobs=$(($(nproc)/2))"]
else:
self.logger.info(f"Command used in directory {makefile_location}: {command[0]}")
command = ["make", "--jobs=$(($(nproc)/2))"]
self.logger.info(f"Command used in directory {makefile_location}: {' '.join(command)}")
# Execute the command using subprocess and save the output into a file
with open(output_file, "w") as f:
formatted_datetime = self.current_datetime.strftime("%Y-%m-%d %H:%M:%S")
f.write(formatted_datetime)
f.write("\n\n")
result = subprocess.run(command, stdout=f, stderr=subprocess.STDOUT, text=True)
result = subprocess.run(command, stdout=f, stderr=subprocess.STDOUT, text=True, shell=True)
# Execute the command using a subprocess and not save the output
#result = subprocess.run(command, text=True)
# Check the result
if result.returncode == 0:
self.logger.info(f"Tests have been made with target: {target}")
self.logger.info(f"Tests have been made with target: {' '.join(command)}")
return True
else:
self.logger.error(f"Error making the tests. Target: {target}")
self.logger.error(f"Error making the tests. Command: {' '.join(command)}")
return False
def run_tests(self, test_type=None, test_name=None, test_extension=None):
def run_tests(self, test_type=None, test_name=None, test_extensions=None):
"""
Run a script through the terminal and save the output to a file.
@ -329,12 +334,12 @@ class TestRunner:
output_file = self.log_dir.joinpath(f"{test_name}-output.log")
os.chdir(self.sim_dir)
if test_extension:
command = [test_type, test_name, test_extension]
self.logger.info(f"Command used to run tests: {test_type} {test_name} {test_extension}")
if test_extensions:
command = [test_type, test_name] + test_extensions
self.logger.info(f"Command used to run tests in directory {self.sim_dir}: {test_type} {test_name} {' '.join(test_extensions)}")
else:
command = [test_type, test_name]
self.logger.info(f"Command used to run tests: {test_type} {test_name}")
self.logger.info(f"Command used to run tests in directory {self.sim_dir}: {test_type} {test_name}")
# Execute the command using subprocess and save the output into a file
@ -348,10 +353,10 @@ class TestRunner:
self.logger.error("There was an error in running the tests in the run_tests function: {e}")
# Check if the command executed successfuly
if result.returncode or result.returncode == 0:
self.logger.info(f"Test ran successfuly. Test type: {test_type}, test name: {test_name}, test extention: {test_extension}")
self.logger.info(f"Test ran successfuly. Test type: {test_type}, test name: {test_name}, test extension: {' '.join(test_extensions)}")
return True, output_file
else:
self.logger.error(f"Error making test. Test type: {test_type}, test name: {test_name}, test extention: {test_extension}")
self.logger.error(f"Error making test. Test type: {test_type}, test name: {test_name}, test extension: {' '.join(test_extensions)}")
return False, output_file
@ -406,23 +411,31 @@ class TestRunner:
# Remove ANSI escape codes
line = re.sub(r'\x1b\[[0-9;]*[mGK]', '', lines[index])
if "Success" in line:
if "Success" in line: # test succeeds
passed_configs.append(line.split(':')[0].strip())
elif "passed lint" in line:
passed_configs.append(line.split(' ')[0].strip())
passed_configs.append(f"Lint: {line.split(' ')[0].strip()}")
#passed_configs.append(line) # potentially use a space
elif "failed lint" in line:
failed_configs.append(line.split(' ')[0].strip(), "no log file")
failed_configs.append([f"Lint: {line.split(' ')[0].strip()}", "No Log File"])
#failed_configs.append(line)
elif "Failures detected in output" in line:
elif "Failures detected in output" in line: # Text explicitly fails
try:
config_name = line.split(':')[0].strip()
log_file = os.path.abspath("logs/"+config_name+".log")
log_file = os.path.abspath(os.path.join("logs", config_name))
failed_configs.append((config_name, log_file))
except:
failed_configs.append((config_name, "Log file not found"))
elif "Timeout" in line: # Test times out
try:
config_name = line.split(':')[0].strip()
log_file = os.path.abspath("logs/"+config_name+".log")
failed_configs.append((f"Timeout: {config_name}", log_file))
except:
failed_configs.append((f"Timeout: {config_name}", "No Log File"))
index += 1
@ -535,7 +548,7 @@ class TestRunner:
md_file.write(f"\n**Total failed tests: {total_number_failures}**")
for (test_item, item) in zip(test_list, failed_tests):
md_file.write(f"\n\n### {test_item[1]} test")
md_file.write(f"\n**Command used:** {test_item[0]} {test_item[1]} {test_item[2]}\n\n")
md_file.write(f"\n**Command used:** {test_item[0]} {test_item[1]} {' '.join(test_item[2])}\n\n")
md_file.write(f"**Failed Tests:**\n")
@ -558,7 +571,7 @@ class TestRunner:
md_file.write(f"\n**Total successful tests: {total_number_success}**")
for (test_item, item) in zip(test_list, passed_tests):
md_file.write(f"\n\n### {test_item[1]} test")
md_file.write(f"\n**Command used:** {test_item[0]} {test_item[1]} {test_item[2]}\n\n")
md_file.write(f"\n**Command used:** {test_item[0]} {test_item[1]} {' '.join(test_item[2])}\n\n")
md_file.write(f"\n**Successful Tests:**\n")
@ -619,7 +632,7 @@ class TestRunner:
# check if there are any emails
if not receiver_emails:
self.logger.ERROR("No receiver emails provided.")
self.logger.error("No receiver emails provided.")
return
# grab the html file
@ -660,7 +673,7 @@ def main():
parser.add_argument('--path',default = "nightly", help='specify the path for where the nightly repositories will be cloned ex: "nightly-runs')
parser.add_argument('--repository',default = "https://github.com/openhwgroup/cvw", help='specify which github repository you want to clone')
parser.add_argument('--target', default = "all", help='types of tests you can make are: all, wally-riscv-arch-test, no')
parser.add_argument('--target', default = "", help='types of tests you can make are: all, wally-riscv-arch-test, no')
parser.add_argument('--tests', default = "nightly", help='types of tests you can run are: nightly, test, test_lint')
parser.add_argument('--send_email',default = "", nargs="+", help='What emails to send test results to. Example: "[email1],[email2],..."')
@ -682,7 +695,7 @@ def main():
log_file_path = log_path.joinpath("nightly_build.log")
previous_cvw_path = Path.home().joinpath(args.path,f"{yesterday}/cvw")
# creates the object
folder_manager = FolderManager()
folder_manager = FolderManager(basedir=args.path)
# setting the path on where to clone new repositories of cvw
folder_manager.create_folders([cvw_path, results_path, log_path])
@ -691,14 +704,18 @@ def main():
folder_manager.clone_repository(cvw_path, args.repository)
# Define tests that we can run
if (args.tests == "nightly"):
test_list = [["python", "regression-wally", "--nightly --buildroot"]]
elif (args.tests == "test"):
test_list = [["python", "regression-wally", ""]]
elif (args.tests == "test_lint"):
test_list = [["bash", "lint-wally", "-nightly"]]
#
# flags are a list
if (args.tests == "all"):
test_list = [["python", "./regression-wally", ["--nightly", "--buildroot"]]]
elif (args.tests == "nightly"):
test_list = [["python", "./regression-wally", ["--nightly"]]]
elif (args.tests == "regression"):
test_list = [["python", "./regression-wally", []]]
elif (args.tests == "lint"):
test_list = [["bash", "./lint-wally", ["--nightly"]]]
else:
print(f"Error: Invalid test '"+args.test+"' specified")
print(f"Error: Invalid test {args.tests} specified")
raise SystemExit
#############################################
@ -747,12 +764,12 @@ def main():
if args.target != "no":
test_runner.execute_makefile(target = args.target, makefile_path=test_runner.cvw)
if args.target == "all":
# Compile Linux for local testing
test_runner.set_env_var("RISCV",str(test_runner.cvw))
linux_path = test_runner.cvw / "linux"
test_runner.execute_makefile(target = "all_nosudo", makefile_path=linux_path)
test_runner.execute_makefile(target = "dumptvs_nosudo", makefile_path=linux_path)
# TODO: remove vestigial code if no longer wanted
# if args.target == "all":
# # Compile Linux for local testing
# test_runner.set_env_var("RISCV",str(test_runner.cvw))
# linux_path = test_runner.cvw / "linux"
# test_runner.execute_makefile(target = "all", makefile_path=linux_path)
#############################################
# RUN TESTS #
@ -766,9 +783,9 @@ def main():
total_failures = []
total_success = []
for test_type, test_name, test_extension in test_list:
for test_type, test_name, test_extensions in test_list:
check, output_location = test_runner.run_tests(test_type=test_type, test_name=test_name, test_extension=test_extension)
check, output_location = test_runner.run_tests(test_type=test_type, test_name=test_name, test_extensions=test_extensions)
try:
if check: # this checks if the test actually ran successfuly
output_log_list.append(output_location)
@ -778,7 +795,7 @@ def main():
passed, failed = test_runner.clean_format_output(input_file = output_location)
logger.info(f"{test_name} has been formatted to markdown")
except:
logger.ERROR(f"Error occured with formatting {test_name}")
logger.error(f"Error occured with formatting {test_name}")
logger.info(f"The # of failures are for {test_name}: {len(failed)}")
total_number_failures+= len(failed)
@ -789,14 +806,18 @@ def main():
total_success.append(passed)
test_runner.rewrite_to_markdown(test_name, passed, failed)
newlinechar = "\n"
logger.info(f"Failed tests: \n{newlinechar.join([x[0] for x in failed])}")
except Exception as e:
logger.error("There was an error in running the tests: {e}")
logger.error(f"There was an error in running the tests: {e}")
logger.info(f"The total sucesses for all tests ran are: {total_number_success}")
logger.info(f"The total failures for all tests ran are: {total_number_failures}")
# Copy actual test logs from sim/questa, sim/verilator
test_runner.copy_sim_logs([test_runner.cvw / "sim/questa/logs", test_runner.cvw / "sim/verilator/logs"])
# Copy actual test logs from sim/questa, sim/verilator, sim/vcs
if not args.tests == "test_lint":
test_runner.copy_sim_logs([test_runner.cvw / "sim/questa/logs", test_runner.cvw / "sim/verilator/logs", test_runner.cvw / "sim/vcs/logs"])
#############################################
# FORMAT TESTS #

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python3
###########################################
## Written: Rose Thompson ross1728@gmail.com
## Written: Rose Thompson rose@rosethompson.net
## Created: 20 September 2023
## Modified:
##

View File

@ -233,6 +233,7 @@ bpredtests = [
# list of tests not supported by ImperasDV yet that should be waived during lockstep testing
lockstepwaivers = [
"WALLY-q-01.S_ref.elf", # Q extension is not supported by ImperasDV
"coverage_tlbMisaligned.elf", # Issue 976: ImperasDV bug disagrees with Wally related to misaligned pages when PBMT makes page uncachable
"WALLY-cbom-01.S_ref.elf" #, # cbom extension is not supported by ImperasDV because there is no cache model in ImperasDV
]
@ -333,19 +334,27 @@ def search_log_for_text(text, grepfile):
# print(" search_log_for_text invoking %s" % grepcmd)
return os.system(grepcmd) == 0
def run_test_case(config):
"""Run the given test case, and return 0 if the test suceeds and 1 if it fails"""
def run_test_case(config, dryrun: bool = False):
"""
Run the given test case, and return 0 if the test suceeds and 1 if it fails
Do not execute commands if dryrun
"""
grepfile = config.grepfile
cmd = config.cmd
os.chdir(regressionDir)
# print(" run_test_case invoking %s" % cmd)
os.system(cmd)
if search_log_for_text(config.grepstr, grepfile):
# print(f"{bcolors.OKGREEN}%s_%s: Success{bcolors.ENDC}" % (config.variant, config.name))
print(f"{bcolors.OKGREEN}%s: Success{bcolors.ENDC}" % (config.cmd))
if dryrun:
print(f"Executing {cmd}", flush=True)
return 0
else:
print(f"{bcolors.FAIL}%s: Failures detected in output{bcolors.ENDC}" % (config.cmd))
os.system(cmd)
if search_log_for_text(config.grepstr, grepfile):
# Flush is needed to flush output to stdout when running in multiprocessing Pool
# print(f"{bcolors.OKGREEN}%s_%s: Success{bcolors.ENDC}" % (config.variant, config.name), flush=True)
print(f"{bcolors.OKGREEN}%s: Success{bcolors.ENDC}" % (config.cmd), flush=True)
return 0
else:
print(f"{bcolors.FAIL}%s: Failures detected in output{bcolors.ENDC}" % (config.cmd), flush=True)
print(" Check %s" % grepfile)
return 1
@ -361,15 +370,16 @@ os.chdir(regressionDir)
coveragesim = "questa" # Questa is required for code/functional coverage
#defaultsim = "questa" # Default simulator for all other tests; change to Verilator when flow is ready
defaultsim = "verilator" # Default simulator for all other tests
lockstepsim = "questa"
parser = argparse.ArgumentParser()
parser.add_argument("--ccov", help="Code Coverage", action="store_true")
parser.add_argument("--fcov", help="Functional Coverage", action="store_true")
parser.add_argument("--fcovrvvi", help="Functional Coverage RVVI", action="store_true")
parser.add_argument("--nightly", help="Run large nightly regression", action="store_true")
parser.add_argument("--buildroot", help="Include Buildroot Linux boot test (takes many hours, done along with --nightly)", action="store_true")
parser.add_argument("--testfloat", help="Include Testfloat floating-point unit tests", action="store_true")
parser.add_argument("--fp", help="Include floating-point tests in coverage (slower runtime)", action="store_true")
parser.add_argument("--dryrun", help="Print commands invoked to console without running regression", action="store_true")
args = parser.parse_args()
if (args.nightly):
@ -384,8 +394,6 @@ if (args.ccov): # only run RV64GC tests in coverage mode
coverStr = '--ccov'
elif (args.fcov): # only run RV64GC tests in lockstep in coverage mode
coverStr = '--fcov'
elif (args.fcovrvvi): # only run RV64GC tests in rvvi coverage mode
coverStr = '--fcovrvvi'
else:
coverStr = ''
@ -405,7 +413,7 @@ configs = [
# run full buildroot boot simulation (slow) if buildroot flag is set. Start it early to overlap with other tests
if (args.buildroot):
# addTests(tests_buildrootboot, defaultsim) # non-lockstep with Verilator runs in about 2 hours
addTests(tests_buildrootbootlockstep, "questa") # lockstep with Questa and ImperasDV runs overnight
addTests(tests_buildrootbootlockstep, lockstepsim) # lockstep with Questa and ImperasDV runs overnight
if (args.ccov): # only run RV64GC tests on Questa in code coverage mode
addTests(tests64gc_nofp, coveragesim)
@ -414,14 +422,11 @@ if (args.ccov): # only run RV64GC tests on Questa in code coverage mode
elif (args.fcov): # only run RV64GC tests on Questa in lockstep in functional coverage mode
addLockstepTestsByDir(WALLY+"/addins/cvw-arch-verif/tests/rv32/", "rv32gc", coveragesim, 1)
addLockstepTestsByDir(WALLY+"/addins/cvw-arch-verif/tests/rv64/", "rv64gc", coveragesim, 1)
# addLockstepTestsByDir(WALLY+"/tests/riscof/work/wally-riscv-arch-test/rv64i_m/privilege/src/", "rv64gc", coveragesim, 0)
elif (args.fcovrvvi): # only run RV64GC tests on Questa in rvvi coverage mode
addTests(tests64gc_nofp, coveragesim)
if (args.fp):
addTests(tests64gc_fp, coveragesim)
#addLockstepTestsByDir(WALLY+"/tests/riscof/work/wally-riscv-arch-test/rv64i_m/privilege/src/", "rv64gc", coveragesim, 0)
else:
for sim in sims:
if (not (args.buildroot and sim == defaultsim)): # skip short buildroot sim if running long one
if (not (args.buildroot and sim == lockstepsim)): # skip short buildroot sim if running long one
addTests(tests_buildrootshort, sim)
addTests(tests, sim)
addTests(tests64gc_nofp, sim)
@ -429,9 +434,10 @@ else:
# run derivative configurations and lockstep tests in nightly regression
if (args.nightly):
addLockstepTestsByDir(WALLY+"/tests/coverage", "rv64gc", "questa", 0)
addLockstepTestsByDir(WALLY+"/tests/riscof/work/wally-riscv-arch-test/rv64i_m", "rv64gc", "questa", 0)
addLockstepTestsByDir(WALLY+"/tests/coverage", "rv64gc", lockstepsim, 0)
addLockstepTestsByDir(WALLY+"/tests/riscof/work/wally-riscv-arch-test/rv64i_m", "rv64gc", lockstepsim, 0)
addTests(derivconfigtests, defaultsim)
# addTests(bpredtests, defaultsim) # This is currently broken in regression due to something related to the new wsim script.
# testfloat tests
if (args.testfloat): # for testfloat alone, just run testfloat tests
@ -510,11 +516,8 @@ def main():
if args.ccov:
TIMEOUT_DUR = 20*60 # seconds
os.system('rm -f questa/cov/*.ucdb')
elif args.fcovrvvi:
TIMEOUT_DUR = 20*60
os.system('rm -f questa/fcovrvvi_ucdb/* questa/fcovrvvi_logs/* questa/fcovrvvi/*')
elif args.fcov:
TIMEOUT_DUR = 2*60
TIMEOUT_DUR = 8*60
os.system('rm -f questa/fcov_ucdb/* questa/fcov_logs/* questa/fcov/*')
elif args.buildroot:
TIMEOUT_DUR = 60*1440 # 1 day
@ -541,16 +544,15 @@ def main():
try:
num_fail+=result.get(timeout=TIMEOUT_DUR)
except TimeoutError:
pool.terminate()
num_fail+=1
print(f"{bcolors.FAIL}%s_%s: Timeout - runtime exceeded %d seconds{bcolors.ENDC}" % (config.variant, config.name, TIMEOUT_DUR))
print(f"{bcolors.FAIL}%s: Timeout - runtime exceeded %d seconds{bcolors.ENDC}" % (config.cmd, TIMEOUT_DUR))
# Coverage report
if args.ccov:
os.system('make QuestaCodeCoverage')
if args.fcov:
os.system('make -f '+WALLY+'/addins/cvw-arch-verif/Makefile merge')
if args.fcovrvvi:
os.system('make QuestaFunctCoverageRvvi')
# Count the number of failures
if num_fail:
print(f"{bcolors.FAIL}Regression failed with %s failed configurations{bcolors.ENDC}" % num_fail)

View File

@ -3,12 +3,12 @@ lief>=0.14.1
Markdown>=3.6
matplotlib>=3.9.0
PyYAML>=5.2
riscv-isac @ git+https://github.com/riscv-non-isa/riscv-arch-test/#subdirectory=riscv-isac
riscof @ git+https://github.com/riscv/riscof.git
riscv-config>=3.18.3
riscv-isac>=0.18.0
riscv-isac @ git+https://github.com/riscv-non-isa/riscv-arch-test/#subdirectory=riscv-isac
scikit-learn>=1.5.0
scipy>=1.13.0
Sphinx>=7.3.7
setuptools
Sphinx~=7.3.7 # QEMU fails to build with Sphinx 8
sphinx-rtd-theme>=2.0.0
testresources>=2.0.1

View File

@ -123,5 +123,14 @@ else
eval "$UPDATE_COMMAND"
# Install packages listed above using appropriate package manager
sudo $PACKAGE_MANAGER install -y "${GENERAL_PACKAGES[@]}" "${GNU_PACKAGES[@]}" "${QEMU_PACKAGES[@]}" "${SPIKE_PACKAGES[@]}" "${VERILATOR_PACKAGES[@]}" "${BUILDROOT_PACKAGES[@]}" "${OTHER_PACKAGES[@]}" "${VIVADO_PACKAGES[@]}"
# Post install steps
# Vivado looks for ncurses5 libraries, but Ubuntu 24.04 only has ncurses6
# Create symbolic links to the ncurses6 libraries to fool Vivado
if (( UBUNTU_VERSION >= 24 )); then
sudo ln -vsf /lib/x86_64-linux-gnu/libncurses.so.6 /lib/x86_64-linux-gnu/libncurses.so.5
sudo ln -vsf /lib/x86_64-linux-gnu/libtinfo.so.6 /lib/x86_64-linux-gnu/libntinfo.so.5
fi
echo -e "${SUCCESS_COLOR}Packages successfully installed.${ENDC}"
fi

View File

@ -2,7 +2,7 @@
###########################################
## Tool chain install script.
##
## Written: Rose Thompson ross1728@gmail.com
## Written: Rose Thompson rose@rosethompson.net
## Created: 18 January 2023
## Modified: 22 January 2023
## Modified: 23 March 2023
@ -48,28 +48,64 @@ ENDC='\033[0m' # Reset to default color
error() {
echo -e "${FAIL_COLOR}Error: $STATUS installation failed"
echo -e "Error on line ${BASH_LINENO[0]} with command $BASH_COMMAND${ENDC}"
if [ -e "$RISCV/logs/$STATUS.log" ]; then
echo -e "Please check the log in $RISCV/logs/$STATUS.log for more information."
fi
exit 1
}
# Check if a git repository exists, is up to date, and has been installed
# Clones the repository if it doesn't exist
# clones the repository if it doesn't exist
# $1: repo name
# $2: repo url to clone from
# $3: file to check if already installed
# $4: upstream branch, optional, default is master
git_check() {
local repo=$1
local url=$2
local check=$3
local branch="${4:-master}"
if [[ ((! -e $repo) && ($(git clone "$url") || true)) || ($(cd "$repo"; git fetch; git rev-parse HEAD) != $(cd "$repo"; git rev-parse origin/"$branch")) || (! -e $check) ]]; then
return 0
# Clone repo if it doesn't exist
if [[ ! -e $repo ]]; then
for ((i=1; i<=5; i++)); do
git clone "$url" && break
echo -e "${WARNING_COLOR}Failed to clone $repo. Retrying.${ENDC}"
rm -rf "$repo"
sleep $i
done
if [[ ! -e $repo ]]; then
echo -e "${ERROR_COLOR}Failed to clone $repo after 5 attempts. Exiting.${ENDC}"
exit 1
fi
fi
# Get the current HEAD commit hash and the remote branch commit hash
cd "$repo"
git fetch
local local_head=$(git rev-parse HEAD)
local remote_head=$(git rev-parse origin/"$branch")
# Check if the git repository is not up to date or the specified file does not exist
if [[ "$local_head" != "$remote_head" ]]; then
echo "$repo is not up to date. Updating now."
true
elif [[ ! -e $check ]]; then
true
else
return 1
false
fi
}
# Log output to a file and only print lines with keywords
logger() {
local log="$RISCV/logs/$1.log"
cat < /dev/stdin | tee -a "$log" | (grep -iE --color=never "(\bwarning|\berror|\bfail|\bsuccess|\bstamp|\bdoesn't work)" || true) | (grep -viE --color=never "(_warning|warning_|_error|error_|-warning|warning-|-error|error-|Werror|error\.o|warning flags)" || true)
local log_file="$RISCV/logs/$1.log"
local keyword_pattern="(\bwarning|\berror|\bfail|\bsuccess|\bstamp|\bdoesn't work)"
local exclude_pattern="(_warning|warning_|_error|error_|-warning|warning-|-error|error-|Werror|error\.o|warning flags)"
cat < /dev/stdin | tee -a "$log_file" | \
(grep -iE --color=never "$keyword_pattern" || true) | \
(grep -viE --color=never "$exclude_pattern" || true)
}
set -e # break on error
@ -77,11 +113,17 @@ trap error ERR # run error handler on error
STATUS="setup" # keep track of what part of the installation is running for error messages
# Check for clean flag
if [ "$1" == "--clean" ]; then
if [ "$1" == "--clean" ] || [ "$2" == "--clean" ]; then
clean=true
shift
fi
# Check for clean flag
if [ "$1" == "--no-buildroot" ] || [ "$2" == "--no-buildroot" ]; then
no_buidroot=true
shift
fi
# Determine script directory to locate related scripts
dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
@ -105,6 +147,10 @@ fi
export PATH=$PATH:$RISCV/bin:/usr/bin
export PKG_CONFIG_PATH=$RISCV/lib64/pkgconfig:$RISCV/lib/pkgconfig:$RISCV/share/pkgconfig:$RISCV/lib/x86_64-linux-gnu/pkgconfig:$PKG_CONFIG_PATH
if (( RHEL_VERSION != 8 )); then
retry_on_host_error="--retry-on-host-error"
fi
# Check for incompatible PATH environment variable before proceeding with installation
if [[ ":$PATH:" == *::* || ":$PATH:" == *:.:* ]]; then
echo -e "${FAIL_COLOR}Error: You seem to have the current working directory in your \$PATH environment variable."
@ -112,11 +158,27 @@ if [[ ":$PATH:" == *::* || ":$PATH:" == *:.:* ]]; then
exit 1
fi
# Create installation directory
mkdir -p "$RISCV"/logs
# Check available memory
total_mem=$(grep MemTotal < /proc/meminfo | awk '{print $2}')
total_mem_gb=$((total_mem / 1024 / 1024))
# Print system information
echo "Running as root: $ROOT"
echo "Installation path: $RISCV"
echo "Number of cores: $(nproc)"
echo "Total memory: $total_mem_gb GB"
# Reduce number of threads for systems with less than 8 GB of memory
if ((total_mem < 8400000 )) ; then
NUM_THREADS=1
echo -e "${WARNING_COLOR}Detected less than or equal to 8 GB of memory. Using a single thread for compiling tools. This may take a while.${ENDC}"
fi
# Print number of threads
echo "Using $NUM_THREADS thread(s) for compilation"
# Create installation directory
mkdir -p "$RISCV"/logs
# Install/update system packages if root. Otherwise, check that packages are already installed.
STATUS="system packages"
@ -155,12 +217,11 @@ source "$RISCV"/riscv-python/bin/activate # activate python virtual environment
# Install python packages, including RISCOF (https://github.com/riscv-software-src/riscof.git)
# RISCOF is a RISC-V compliance test framework that is used to run the RISC-V Arch Tests.
STATUS="python packages"
pip install --upgrade pip && pip install -r "$dir"/requirements.txt
pip install --upgrade pip && pip install --upgrade -r "$dir"/requirements.txt
source "$RISCV"/riscv-python/bin/activate # reload python virtual environment
echo -e "${SUCCESS_COLOR}Python environment successfully configured!${ENDC}"
# Extra dependecies needed for older distros that don't have new enough versions available from package manager
if (( RHEL_VERSION == 8 )) || (( UBUNTU_VERSION == 20 )); then
# Newer versin of glib required for QEMU.
@ -170,11 +231,13 @@ if (( RHEL_VERSION == 8 )) || (( UBUNTU_VERSION == 20 )); then
section_header "Installing glib"
pip install -U meson # Meson is needed to build glib
cd "$RISCV"
curl --location https://download.gnome.org/sources/glib/2.70/glib-2.70.5.tar.xz | tar xJ
wget -nv --retry-connrefused $retry_on_host_error https://download.gnome.org/sources/glib/2.70/glib-2.70.5.tar.xz
tar -xJf glib-2.70.5.tar.xz
rm -f glib-2.70.5.tar.xz
cd glib-2.70.5
meson setup _build --prefix="$RISCV"
meson compile -C _build
meson install -C _build
meson compile -C _build -j "${NUM_THREADS}" 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
meson install -C _build 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
cd "$RISCV"
rm -rf glib-2.70.5
echo -e "${SUCCESS_COLOR}glib successfully installed!${ENDC}"
@ -187,11 +250,13 @@ if (( RHEL_VERSION == 8 )); then
if [ ! -e "$RISCV"/include/gmp.h ]; then
section_header "Installing gmp"
cd "$RISCV"
curl --location https://ftp.gnu.org/gnu/gmp/gmp-6.3.0.tar.xz | tar xJ
wget -nv --retry-connrefused $retry_on_host_error https://ftp.gnu.org/gnu/gmp/gmp-6.3.0.tar.xz
tar -xJf gmp-6.3.0.tar.xz
rm -f gmp-6.3.0.tar.xz
cd gmp-6.3.0
./configure --prefix="$RISCV"
make -j "${NUM_THREADS}"
make install
make -j "${NUM_THREADS}" 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
make install 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
cd "$RISCV"
rm -rf gmp-6.3.0
echo -e "${SUCCESS_COLOR}gmp successfully installed!${ENDC}"
@ -209,9 +274,9 @@ section_header "Installing/Updating RISC-V GNU Toolchain"
STATUS="riscv-gnu-toolchain"
cd "$RISCV"
# Temporarily pin riscv-gnu-toolchain to use GCC 13.2.0. GCC 14 does not work with the Q extension.
if git_check "riscv-gnu-toolchain" "https://github.com/riscv/riscv-gnu-toolchain" "$RISCV/riscv-gnu-toolchain/stamps/build-gcc-newlib-stage2" "b488ddb"; then
cd riscv-gnu-toolchain
git reset --hard && git clean -f && git checkout b488ddb #&& git pull
if git_check "riscv-gnu-toolchain" "https://github.com/riscv/riscv-gnu-toolchain" "$RISCV/riscv-gnu-toolchain/stamps/build-gcc-newlib-stage2"; then
cd "$RISCV"/riscv-gnu-toolchain
git reset --hard && git clean -f && git checkout master && git pull
./configure --prefix="${RISCV}" --with-multilib-generator="rv32e-ilp32e--;rv32i-ilp32--;rv32im-ilp32--;rv32iac-ilp32--;rv32imac-ilp32--;rv32imafc-ilp32f--;rv32imafdc-ilp32d--;rv64i-lp64--;rv64ic-lp64--;rv64iac-lp64--;rv64imac-lp64--;rv64imafdc-lp64d--;rv64im-lp64--;"
make -j "${NUM_THREADS}" 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
if [ "$clean" ]; then
@ -236,7 +301,7 @@ STATUS="elf2hex"
cd "$RISCV"
export PATH=$RISCV/bin:$PATH
if git_check "elf2hex" "https://github.com/sifive/elf2hex.git" "$RISCV/bin/riscv64-unknown-elf-elf2bin"; then
cd elf2hex
cd "$RISCV"/elf2hex
git reset --hard && git clean -f && git checkout master && git pull
autoreconf -i
./configure --target=riscv64-unknown-elf --prefix="$RISCV"
@ -258,7 +323,7 @@ section_header "Installing/Updating QEMU"
STATUS="qemu"
cd "$RISCV"
if git_check "qemu" "https://github.com/qemu/qemu" "$RISCV/include/qemu-plugin.h"; then
cd qemu
cd "$RISCV"/qemu
git reset --hard && git clean -f && git checkout master && git pull --recurse-submodules -j "${NUM_THREADS}"
git submodule update --init --recursive
./configure --target-list=riscv64-softmmu --prefix="$RISCV"
@ -280,7 +345,7 @@ section_header "Installing/Updating SPIKE"
STATUS="spike"
cd "$RISCV"
if git_check "riscv-isa-sim" "https://github.com/riscv-software-src/riscv-isa-sim" "$RISCV/lib/pkgconfig/riscv-riscv.pc"; then
cd riscv-isa-sim
cd "$RISCV"/riscv-isa-sim
git reset --hard && git clean -f && git checkout master && git pull
mkdir -p build
cd build
@ -306,7 +371,7 @@ STATUS="verilator"
cd "$RISCV"
if git_check "verilator" "https://github.com/verilator/verilator" "$RISCV/share/pkgconfig/verilator.pc"; then
unset VERILATOR_ROOT
cd verilator
cd "$RISCV"/verilator
git reset --hard && git clean -f && git checkout master && git pull
autoconf
./configure --prefix="$RISCV"
@ -331,7 +396,9 @@ section_header "Installing/Updating Sail Compiler"
STATUS="Sail Compiler"
if [ ! -e "$RISCV"/bin/sail ]; then
cd "$RISCV"
curl --location https://github.com/rems-project/sail/releases/latest/download/sail.tar.gz | tar xvz --directory="$RISCV" --strip-components=1
wget -nv --retry-connrefused $retry_on_host_error --output-document=sail.tar.gz https://github.com/rems-project/sail/releases/latest/download/sail.tar.gz
tar xz --directory="$RISCV" --strip-components=1 -f sail.tar.gz
rm -f sail.tar.gz
echo -e "${SUCCESS_COLOR}Sail Compiler successfully installed/updated!${ENDC}"
else
echo -e "${SUCCESS_COLOR}Sail Compiler already installed.${ENDC}"
@ -342,7 +409,7 @@ fi
section_header "Installing/Updating RISC-V Sail Model"
STATUS="riscv-sail-model"
if git_check "sail-riscv" "https://github.com/riscv/sail-riscv.git" "$RISCV/bin/riscv_sim_RV32"; then
cd sail-riscv
cd "$RISCV"/sail-riscv
git reset --hard && git clean -f && git checkout master && git pull
ARCH=RV64 make -j "${NUM_THREADS}" c_emulator/riscv_sim_RV64 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
ARCH=RV32 make -j "${NUM_THREADS}" c_emulator/riscv_sim_RV32 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
@ -365,7 +432,7 @@ STATUS="OSU Skywater 130 cell library"
mkdir -p "$RISCV"/cad/lib
cd "$RISCV"/cad/lib
if git_check "sky130_osu_sc_t12" "https://foss-eda-tools.googlesource.com/skywater-pdk/libs/sky130_osu_sc_t12" "$RISCV/cad/lib/sky130_osu_sc_t12" "main"; then
cd sky130_osu_sc_t12
cd "$RISCV"/sky130_osu_sc_t12
git reset --hard && git clean -f && git checkout main && git pull
echo -e "${SUCCESS_COLOR}OSU Skywater library successfully installed!${ENDC}"
else
@ -376,23 +443,27 @@ fi
# Buildroot and Linux testvectors
# Buildroot is used to boot a minimal versio of Linux on Wally.
# Testvectors are generated using QEMU.
section_header "Installing Buildroot and Creating Linux testvectors"
STATUS="buildroot"
if [ -z "$LD_LIBRARY_PATH" ]; then
if [ ! "$no_buidroot" ]; then
section_header "Installing Buildroot and Creating Linux testvectors"
STATUS="buildroot"
if [ -z "$LD_LIBRARY_PATH" ]; then
export LD_LIBRARY_PATH=$RISCV/lib:$RISCV/lib64:$RISCV/riscv64-unknown-elf/lib:$RISCV/lib/x86_64-linux-gnu/
else
else
export LD_LIBRARY_PATH=$RISCV/lib:$RISCV/lib64:$LD_LIBRARY_PATH:$RISCV/riscv64-unknown-elf/lib:$RISCV/lib/x86_64-linux-gnu/
fi
cd "$dir"/../linux
if [ ! -e "$RISCV"/buildroot ]; then
fi
cd "$dir"/../linux
if [ ! -e "$RISCV"/buildroot ]; then
make 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
echo -e "${SUCCESS_COLOR}Buildroot successfully installed and Linux testvectors created!${ENDC}"
elif [ ! -e "$RISCV"/linux-testvectors ]; then
elif [ ! -e "$RISCV"/linux-testvectors ]; then
echo -e "${OK_COLOR}Buildroot already exists, but Linux testvectors are missing. Generating them now.${ENDC}"
make dumptvs 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
echo -e "${SUCCESS_COLOR}Linux testvectors successfully generated!${ENDC}"
else
else
echo -e "${OK_COLOR}Buildroot and Linux testvectors already exist.${ENDC}"
fi
else
echo -e "${OK_COLOR}Skipping Buildroot and Linux testvectors.${ENDC}"
fi
@ -403,8 +474,8 @@ section_header "Downloading Site Setup Script"
STATUS="site-setup scripts"
cd "$RISCV"
if [ ! -e "${RISCV}"/site-setup.sh ]; then
wget https://raw.githubusercontent.com/openhwgroup/cvw/main/site-setup.sh
wget https://raw.githubusercontent.com/openhwgroup/cvw/main/site-setup.csh
wget -nv --retry-connrefused $retry_on_host_error https://raw.githubusercontent.com/openhwgroup/cvw/main/site-setup.sh
wget -nv --retry-connrefused $retry_on_host_error https://raw.githubusercontent.com/openhwgroup/cvw/main/site-setup.csh
echo -e "${SUCCESS_COLOR}Site setup script successfully downloaded!${ENDC}"
echo -e "${WARNING_COLOR}Make sure to edit the environment variables in $RISCV/site-setup.sh (or .csh) to point to your installation of EDA tools and licensce files.${ENDC}"
else

View File

@ -29,7 +29,6 @@ parser.add_argument("--gui", "-g", help="Simulate with GUI", action="store_true"
parser.add_argument("--ccov", "-c", help="Code Coverage", action="store_true")
parser.add_argument("--fcovimp", "-f2", help="Functional Coverage with Imperas licensed riscvISACOV, implies lockstep", action="store_true")
parser.add_argument("--fcov", "-f", help="Functional Coverage with cvw-arch-verif, implies lockstep", action="store_true")
parser.add_argument("--fcovrvvi", "-fr", help="Functional Coverage RVVI", action="store_true")
parser.add_argument("--args", "-a", help="Optional arguments passed to simulator via $value$plusargs", default="")
parser.add_argument("--params", "-p", help="Optional top-level parameter overrides of the form param=value", default="")
parser.add_argument("--vcd", "-v", help="Generate testbench.vcd", action="store_true")
@ -71,7 +70,7 @@ if(args.lockstep and not args.testsuite.endswith('.elf')):
exit(1)
# Validate arguments
if (args.gui or args.ccov or args.fcov or args.fcovimp or args.fcovrvvi or args.lockstep or args.lockstepverbose):
if (args.gui or args.ccov or args.fcov or args.fcovimp or args.lockstep or args.lockstepverbose):
if args.sim not in ["questa", "vcs"]:
print("Option only supported for Questa and VCS")
exit(1)
@ -92,7 +91,8 @@ prefix = ""
if (args.lockstep or args.lockstepverbose or args.fcov or args.fcovimp):
if (args.sim == "questa" or args.sim == "vcs"):
prefix = "IMPERAS_TOOLS=" + WALLY + "/config/"+args.config+"/imperas.ic"
if (args.sim == "questa"):
# Force Questa to use 64-bit mode, sometimes it defaults to 32-bit even on 64-bit machines
if (args.sim == "questa"):
prefix = "MTI_VCO_MODE=64 " + prefix
if (args.lockstep or args.lockstepverbose):
@ -129,12 +129,10 @@ if (args.fcov):
flags += " --fcov"
if (args.fcovimp):
flags += " --fcovimp"
if (args.fcovrvvi):
flags += "--fcovrvvi"
# create the output sub-directories.
regressionDir = WALLY + '/sim/'
for d in ["logs", "wkdir", "cov", "ucdb", "fcov", "fcov_ucdb", "fcovrvvi", "fcovrvvi_ucdb"]:
for d in ["logs", "wkdir", "cov", "ucdb", "fcov", "fcov_ucdb"]:
try:
os.mkdir(regressionDir+args.sim+"/"+d)
except:

View File

@ -5,9 +5,20 @@
// This file is needed in the config subdirectory for each config supporting coverage.
// It defines which extensions are enabled for that config.
// Unprivileged extensions
`include "RV32I_coverage.svh"
`include "RV32M_coverage.svh"
`include "RV32F_coverage.svh"
`include "RV32D_coverage.svh"
`include "RV32ZfhD_coverage.svh"
`include "RV32Zfh_coverage.svh"
`include "RV32Zicond_coverage.svh"
`include "RV32Zca_coverage.svh"
`include "RV32Zcb_coverage.svh"
`include "RV32ZcbM_coverage.svh"
`include "RV32ZcbZbb_coverage.svh"
`include "RV32Zcf_coverage.svh"
`include "RV32Zcd_coverage.svh"
// Privileged extensions
`include "ZicsrM_coverage.svh"

View File

@ -59,6 +59,28 @@
#--override cpu/instret_undefined=T
#--override cpu/hpmcounter_undefined=T
## context registers not implemented
#--override cpu/scontext_undefined=True
#--override cpu/mcontext_undefined=True
# Disable all features that might want mseccfg or CSRs 7a0-7af
--override cpu/Smepmp_version=none
--override cpu/Smmpm=none
#--override cpu/Zicfilp=F
--override cpu/trigger_num=0 # disable CSRs 7a0-7a8
--override no_pseudo_inst=T # For code coverage, don't produce pseudoinstructions
# mcause and scause only have 4 lsbs of code and 1 msb of interrupt flag
#--override cpu/ecode_mask=0x8000000F # for RV32
--override cpu/ecode_mask=0x800000000000000F # for RV64
# Debug mode not yet supported
--override cpu/debug_mode=none
--override cpu/reset_address=0x80000000
--override cpu/unaligned=F # Zicclsm (should be true)

View File

@ -5,14 +5,25 @@
// This file is needed in the config subdirectory for each config supporting coverage.
// It defines which extensions are enabled for that config.
// Unprivileged extensions
`include "RV64I_coverage.svh"
`include "RV64M_coverage.svh"
`include "RV64F_coverage.svh"
`include "RV64D_coverage.svh"
`include "RV64ZfhD_coverage.svh"
`include "RV64Zfh_coverage.svh"
// `include "RV64VM_coverage.svh"
`include "RV64Zicond_coverage.svh"
`include "RV64Zca_coverage.svh"
`include "RV64Zcb_coverage.svh"
`include "RV64ZcbM_coverage.svh"
`include "RV64ZcbZbb_coverage.svh"
`include "RV64ZcbZba_coverage.svh"
`include "RV64Zcd_coverage.svh"
// Privileged extensions
`include "RV64VM_coverage.svh"
`include "ZicsrM_coverage.svh"
// `include "RV64VM_PMP_coverage.svh"
// `include "RV64CBO_VM_coverage.svh"
// `include "RV64CBO_PMP_coverage.svh"
// `include "RV64Zicbom_coverage.svh"
`include "RV64Zicond_coverage.svh"
`include "RV64Zca_coverage.svh"

View File

@ -57,6 +57,34 @@
#--override cpu/instret_undefined=T
#--override cpu/hpmcounter_undefined=T
# context registers not implemented
#--override cpu/scontext_undefined=True
#--override cpu/mcontext_undefined=True
# Disable all features that might want mseccfg or CSRs 7a0-7af
--override cpu/Smepmp_version=none
--override cpu/Smmpm=none
#--override cpu/Zicfilp=F
--override cpu/trigger_num=0 # disable CSRs 7a0-7a8
# For code coverage, don't produce pseudoinstructions
--override no_pseudo_inst=T
# nonratified mnosie register not implemented
--override cpu/mnoise_undefined=T
# mcause and scause only have 4 lsbs of code and 1 msb of interrupt flag
#--override cpu/ecode_mask=0x8000000F # for RV32
--override cpu/ecode_mask=0x800000000000000F # for RV64
# Debug mode not yet supported
--override cpu/debug_mode=none
# Zkr entropy source and seed register not supported.
--override cpu/Zkr=F
--override cpu/reset_address=0x80000000
--override cpu/unaligned=T # Zicclsm (should be true)

View File

@ -1,41 +0,0 @@
### Cross-Compile Buildroot Linux
Building Linux is only necessary for exploring the boot process in Chapter 17. Building and generating a trace is a time-consuming operation that could be skipped for now; you can return to this section later if you are interested in the Linux details.
Buildroot depends on configuration files in riscv-wally, so the cad user must install Wally first according to the instructions in Section 2.2.2. However, dont source ~/wally-riscv/setup.sh because it will set LD_LIBRARY_PATH in a way to cause make to fail on buildroot.
To configure and build Buildroot:
$ cd $RISCV
$ export WALLY=~/riscv-wally # make sure you havent sourced ~/riscv-wally/setup.sh by now
$ git clone https://github.com/buildroot/buildroot.git
$ cd buildroot
$ git checkout 2021.05 # last tested working version
$ cp -r $WALLY/linux/buildroot-config-src/wally ./board
$ cp ./board/wally/main.config .config
$ make --jobs
To generate disassembly files and the device tree, run another make script. Note that you can expect some warnings about phandle references while running dtc on wally-virt.dtb.
Depending on your system configuration this makefile may need a bit of tweaking. It places the output buildroot images in $RISCV/linux-testvectors and the buildroot object dumps in $RISCV/buildroot/output/images/disassembly. If these directories are owned by root then the makefile will likely fail. You can either change the makefile's target directories or change temporarily change the owner of the two directories.
$ source ~/riscv-wally/setup.sh
$ cd $WALLY/linux/buildroot-scripts
$ make all
Note: When the make tasks complete, youll find source code in $RISCV/buildroot/output/build and the executables in $RISCV/buildroot/output/images.
### Generate load images for linux boot
The Questa linux boot uses preloaded bootram and ram memory. We use QEMU to generate these preloaded memory files. Files output in $RISCV/linux-testvectors
cd cvw/linux/testvector-generation
./genInitMem.sh
This may require changing file permissions to the linux-testvectors directory.
### Generate QEMU linux trace
The linux testbench can instruction by instruction compare Wally's committed instructions against QEMU. To do this QEMU outputs a log file consisting of all instructions executed. Interrupts are handled by forcing the testbench to generate an interrupt at the same cycle as in QEMU. Generating this trace will take more than 24 hours.
cd cvw/linux/testvector-generation
./genTrace.sh

View File

@ -1,46 +1,71 @@
The FPGA currently only targets the VCU118 board.
Wally supports the following boards
* Build Process
1. ArtyA7
2. vcu108
3. vcu118 (Do not recommend.)
# Quick Start
## Build FPGA
```bash
cd generator
make
make <board name>
```
* Description
Example:
```bash
make vcu108
```
The generator makefile creates 4 IP blocks; proc_sys_reset, ddr4,
axi_clock_converter, and ahblite_axi_bridge. Then it reads in the 4 IP blocks
and builds wally. fpga/src/fpgaTop.v is the top level which instanciates
wallypipelinedsoc.sv and the 4 IP blocks. The FPGA include and ILA (In logic
analyzer) which provides the current instruction PCM, instrM, etc along with
a large number of debuging signals.
## Make flash card image
`ls /dev/sd*` or `ls /dev/mmc*` to see which flash card devices you have.
Insert the flash card into the reader and `ls /dev/sd*` or `/dev/mmc*` again. The new device is the one you want to use. Make sure you select the root device (i.e. `/dev/sdb`) not the partition (i.e. `/dev/sdb1`).
* Programming the flash card
You'll need to write the linux image to the flash card. Use the convert2bin.py
script in linux-testgen/linux-testvectors/ [*** moved?] to convert the ram.txt
file from QEMU's preload to generate the binary. Then to copy
sudo dd if=ram.bin of=<path to flash card>.
```bash
cd $WALLY/linux/sd-card
```
* Loading the FPGA
This following script requires root.
After the build process is complete about 2 hrs on an i9-7900x. Launch vivado's
gui and open the WallyFPGA.xpr project file. Open the hardware manager under
program and debug. Open target and then program with the bit file.
```bash
./flash-sd.sh -b <path to buildroot> -d <path to compiled device tree file> <flash card device>
```
* Test Run
Example with vcu108, buildroot installed to `/opt/riscv/buildroot`, and the flash card is device `/dev/sdc`
Once the FPGA is programed the 3 MSB LEDs in the upper right corner provide
status of the reset and ddr4 calibration. LED 7 should always be lit.
LED 6 will light if the DDR4 is not calibrated. LED 6 will be lit once
wally begins running.
```bash
./flash-sd.sh -b /opt/riscv/buildroot -d /opt/riscv/buildroot/output/images/wally-vcu108.dtb /dev/sdc
```
Next the bootloader program will copy the flash card into the DDR4 memory.
When this done the lower 5 LEDs will blink 5 times and then try to boot
the program loaded in the DDR4 memory at physical address 0x8000_0000.
Wait until the the script completes then remove the card.
* Connecting uart
You'll need to connect both usb cables. The first connects the FPGA programer
while the connect connects UART. UART is configured to use 57600 baud with
no parity, 8 data bits, and 1 stop bit. sudo screen /dev/ttyUSB1 57600 should
let you view the com port.
## FPGA setup
For the Arty A7 insert the PMOD daughter board into the right most slot and insert the sd card.
For the VCU108 and VCU118 boards insert the PMOD daughter board into the only PMOD slot on the right side of the boards.
Power on the boards. For Arty A7 just plug in the USB connector. For the VCU boards make sure the power supply is connected and the two usb cables are connected. Flip on the switch.
The VCU118's on board UART converter does not work. Use a spark fun FTDI usb to UART adapter and plug into the mail PMOD on the right side of the board. Also the level sifters on the
VCU118 do not work correctly with the digilent sd PMOD board. We have a custom board which works instead.
```bash
cd $WALLY/fpga/generator
vivado &
```
Open the design in the current directory `WallyFPGA.xpr`.
Then click "Open Target" under "PROGRAM AND DEBUG". Then Program the device.
## Connect to UART
In another terminal `ls /dev/ttyUSB*`. One of these devices will be the UART connected to Wally. You may have to experiment by the running the following command multiple times.
```bash
screen /dev/ttyUSB1 115200
```
Swap out the `USB1` for `USB0` or `USB1` as needed.

View File

@ -47,7 +47,7 @@ if {$board=="ArtyA7"} {
# read in all other rtl
add_files [glob -type f ../src/CopiedFiles_do_not_add_to_repo/*/*.sv ../src/CopiedFiles_do_not_add_to_repo/*/*/*.sv]
set_property include_dirs {../src/CopiedFiles_do_not_add_to_repo/config ../../config/shared ../../addins/ahbsdc/sdc} [current_fileset]
set_property include_dirs {../src/CopiedFiles_do_not_add_to_repo/config ../../config/shared} [current_fileset]
# define top level

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// rvvi daemon
//
// Written: Rose Thomposn ross1728@gmail.com
// Written: Rose Thomposn rose@rosethompson.net
// Created: 31 May 2024
// Modified: 31 May 2024
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// fpgaTop.sv
//
// Written: ross1728@gmail.com November 17, 2021
// Written: rose@rosethompson.net November 17, 2021
// Modified:
//
// Purpose: This is a top level for the fpga's implementation of wally.

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// fpgaTop.sv
//
// Written: ross1728@gmail.com November 17, 2021
// Written: rose@rosethompson.net November 17, 2021
// Modified:
//
// Purpose: This is a top level for the fpga's implementation of wally.

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// wallypipelinedsocwrapper.sv
//
// Written: Rose Thompson ross1728@gmail.com 16 June 2023
// Written: Rose Thompson rose@rosethompson.net 16 June 2023
// Modified:
//
// Purpose: A wrapper to set parameters. Vivado cannot set the top level parameters because it only supports verilog,

102
fpga/zsbl/bios.S Normal file
View File

@ -0,0 +1,102 @@
#include "system.h"
PERIOD = (SYSTEMCLOCK / 2)
.section .init
.global _start
.type _start, @function
_start:
# Initialize global pointer
.option push
.option norelax
1:auipc gp, %pcrel_hi(__global_pointer$)
addi gp, gp, %pcrel_lo(1b)
.option pop
li x1, 0
li x2, 0
li x4, 0
li x5, 0
li x6, 0
li x7, 0
li x8, 0
li x9, 0
li x10, 0
li x11, 0
li x12, 0
li x13, 0
li x14, 0
li x15, 0
li x16, 0
li x17, 0
li x18, 0
li x19, 0
li x20, 0
li x21, 0
li x22, 0
li x23, 0
li x24, 0
li x25, 0
li x26, 0
li x27, 0
li x28, 0
li x29, 0
li x30, 0
li x31, 0
# set the stack pointer to the top of memory - 8 bytes (pointer size)
li sp, (EXT_MEM_END - 8)
li a0, 0x00000000
li a1, EXT_MEM_BASE
#li a2, 128*1024*1024/512 # copy 128MB
li a2, 127*1024*1024/512 # copy 127MB upper 1MB contains the return address (ra)
#li a2, 800 # copy 400KB
jal ra, copyFlash
fence.i
# now toggle led so we know the copy completed.
# write to gpio
li t2, 0xFF
la t3, 0x1006000C
li t4, 5
loop:
# delay
li t0, PERIOD/2
delay1:
addi t0, t0, -1
bge t0, x0, delay1
sw t2, 0x0(t3)
li t0, PERIOD/2
delay2:
addi t0, t0, -1
bge t0, x0, delay2
sw x0, 0x0(t3)
addi t4, t4, -1
bgt t4, x0, loop
# now that the card is copied and the led toggled we
# jump to the copied contents of the sd card.
jumpToLinux:
csrrs a0, 0xF14, x0 # copy hart ID to a0
li a1, FDT_ADDRESS # This is the device tree address
la a2, end_of_bios
li t0, EXT_MEM_BASE # start of code
jalr x0, t0, 0
end_of_bios:

View File

@ -27,9 +27,16 @@ BINARIES := fw_jump.elf vmlinux busybox
OBJDUMPS := $(foreach name, $(BINARIES), $(basename $(name) .elf))
OBJDUMPS := $(foreach name, $(OBJDUMPS), $(DIS)/$(name).objdump)
.PHONY: all generate disassemble install clean cleanDTB cleanDriver check_write_permissions
.PHONY: all generate disassemble install clean cleanDTB check_write_permissions check_environment
all: check_write_permissions clean download Image disassemble install dumptvs
all: check_environment check_write_permissions clean download Image disassemble install dumptvs
check_environment: $(RISCV)
ifeq ($(findstring :$(RISCV)/lib:,:$(LD_LIBRARY_PATH):),)
@(echo "ERROR: Your environment variables are not set correctly." >&2 \
&& echo "Make sure to source setup.sh or install buildroot using the wally-tool-chain-install.sh script." >&2 \
&& exit 1)
endif
check_write_permissions:
ifeq ($(SUDO), sudo)
@ -41,17 +48,17 @@ endif
&& exit 1)
@$(SUDO) rm -r $(RISCV)/.test
Image:
Image: check_environment
bash -c "unset LD_LIBRARY_PATH; $(MAKE) -C $(BUILDROOT)"
$(MAKE) generate
@echo "Buildroot Image successfully generated."
install: check_write_permissions
install: check_write_permissions check_environment
$(SUDO) rm -rf $(RISCV)/$(BUILDROOT)
$(SUDO) mv $(BUILDROOT) $(RISCV)/$(BUILDROOT)
@echo "Buildroot successfully installed."
dumptvs: check_write_permissions
dumptvs: check_write_permissions check_environment
$(SUDO) mkdir -p $(RISCV)/linux-testvectors
cd testvector-generation; ./genInitMem.sh
@echo "Testvectors successfully generated."
@ -70,7 +77,7 @@ $(RISCV):
@ echo "and sourced setup.sh"
# Disassembly rules ---------------------------------------------------
disassemble:
disassemble: check_environment
rm -rf $(BUILDROOT)/output/images/disassembly
find $(BUILDROOT)/output/build/linux-* -maxdepth 1 -name "vmlinux" | xargs cp -t $(BUILDROOT)/output/images/
mkdir -p $(DIS)
@ -114,9 +121,6 @@ $(BUILDROOT):
# ---------------------------------------------------------------------
cleanDriver:
rm -f $(DRIVER)
cleanDTB:
rm -f $(IMAGES)/*.dtb

View File

@ -12,23 +12,31 @@
In order to generate the Linux and boot stage binaries compatible with Wally, Buildroot is used for cross-compilation.
To set up a Buildroot directory, configuration files for Buildroot, Linux, and Busybox must be copied into the correct locations inside the main Buildroot directory. Buildroot and device tree binaries must be generated as well. This can all be done automatically using the Makefile inside Wally's Linux subdirectory (this one). To install a new buildroot directory, build the Buildroot binaries, generate the device tree binaries, generate test-vectors for simulation, and install the buildroot package needed to build the SD card driver for Linux, run:
To set up a Buildroot directory, configuration files for Buildroot, Linux, and Busybox must be copied into the correct locations inside the main Buildroot directory. Buildroot and device tree binaries must be generated as well.
$ make
This can all be done automatically using the Makefile inside Wally's Linux subdirectory (this one). The main Wally installation script (`bin/wally-tool-chain-install.sh`) runs this by default, so buildroot is likely already setup. Otherwise, to install a new buildroot directory, build the Buildroot binaries, generate the device tree binaries, and generate testvectors for simulation run:
```bash
$ make
```
This installs to the `$RISCV` directory. Buildroot itself is installed to `$RISCV/buildroot` and the test-vectors are installed to `$RISCV/linux-testvectors`.
Optionally, you can override the `BUILDROOT` variable to install a different buildroot source directory.
$ make install BUILDROOT=path/to/buildroot
```bash
$ make install BUILDROOT=<path/to/buildroot>
```
## Generating Device Tree Binaries <a name="devicetree"></a>
The device tree files for the various FPGA's Wally supports, as well as QEMU's device tree for the virt machine, are located in the `./devicetree` subdirectory. These device tree files are necessary for the boot process.
The device tree files for the various FPGAs Wally supports, as well as QEMU's device tree for the virt machine, are located in the `./devicetree` subdirectory. These device tree files are necessary for the boot process.
They are built automatically using the main `make` command. To build the device tree binaries (.dtb) from the device tree sources (.dts) separately, we can build all of them at once using:
$ make generate #optionally override BUILDROOT
```bash
$ make generate # optionally override BUILDROOT
```
The .dts files will end up in the `<BUILDROOT>/output/images` folder of your chosen buildroot directory.
@ -38,23 +46,30 @@ By using the `riscv64-unknown-elf-objdump` utility, we can disassemble the binar
The disassembled binaries are built automatically using the main `make` command. To create the disassembled binaries separately, run:
$ make disassemble #optionally override BUILDROOT
```bash
$ make disassemble # optionally override BUILDROOT
```
You'll find the resulting disassembled files in `<BUILDROOT>/output/images/disassembly`.
## Generate Memory Files for Linux Boot <a name="testvectors"></a>
Running a linux boot simulation uses a preloaded bootrom and ram memory. We use QEMU to generate these preloaded memory files. The files are output to $RISCV/linux-testvectors. The memory files are generated automatically when using the main `make` command. Alternatively, they can be generated by running
Running a linux boot simulation uses a preloaded bootrom and ram memory. We use QEMU to generate these preloaded memory files. The files are output to `$RISCV/linux-testvectors`. The memory files are generated automatically when using the main `make` command. Alternatively, they can be generated by running
make dumptvs
```bash
$ make dumptvs
```
## Creating a Bootable SD Card <a name="sdcard"></a>
To flash a bootable sd card for Wally's bootloader, use the `flash-sd.sh` script located in `<WALLY>/linux/sdcard`. The script allows you to specify which buildroot directory you would like to use and to specify the device tree. By default it is set up for the default location of buildroot in `$RISCV` and uses the vcu108 device tree. To use the script with your own buildroot directory and device tree, type:
$ cd sdcard
$ ./flash-sd.sh -b <path/to/buildroot> -d <device tree name> <DEVICE>
```bash
$ cd sdcard
$ ./flash-sd.sh -b <path/to/buildroot> -d <device tree name> <DEVICE>
```
for example
$ ./flash-sd.sh -b ~/repos/buildroot -d wally-vcu118.dtb /dev/sdb
```bash
$ ./flash-sd.sh -b ~/repos/buildroot -d wally-vcu118.dtb /dev/sdb
```

View File

@ -27,7 +27,7 @@ deriv:
.PHONY: QuestaCodeCoverage QuestaFunctCoverageRvvi collect_functcov combine_functcov remove_functcov_artifacts riscvdv riscvdv_functcov
.PHONY: QuestaCodeCoverage collect_functcov combine_functcov remove_functcov_artifacts riscvdv riscvdv_functcov
QuestaCodeCoverage: questa/ucdb/rv64gc_arch64i.ucdb
vcover merge -out questa/ucdb/cov.ucdb questa/ucdb/rv64gc_arch64i.ucdb questa/ucdb/rv64gc*.ucdb -logfile questa/cov/log
@ -51,14 +51,6 @@ QuestaCodeCoverage: questa/ucdb/rv64gc_arch64i.ucdb
# vcover report -recursive questa/ucdb/cov.ucdb > questa/cov/rv64gc_recursive.rpt
vcover report -details -threshH 100 -html questa/ucdb/cov.ucdb
QuestaFunctCoverageRvvi: ${WALLY}/addins/cvw-arch-verif/work/rv64gc_arch64i.ucdb
vcover merge -out ${SIM}/questa/fcovrvvi_ucdb/fcovrvvi.ucdb ${WALLY}/addins/cvw-arch-verif/work/rv64gc_arch64i.ucdb ${WALLY}/addins/cvw-arch-verif/work/rv64gc_*.ucdb -logfile ${SIM}/questa/fcovrvvi/log
vcover report -details -html ${SIM}/questa/fcovrvvi_ucdb/fcovrvvi.ucdb
vcover report ${SIM}/questa/fcovrvvi_ucdb/fcovrvvi.ucdb -details -cvg > ${SIM}/questa/fcovrvvi/fcovrvvi.log
vcover report ${SIM}/questa/fcovrvvi_ucdb/fcovrvvi.ucdb -testdetails -cvg > ${SIM}/questa/fcovrvvi/fcovrvvi.testdetails.log
vcover report ${SIM}/questa/fcovrvvi_ucdb/fcovrvvi.ucdb -details -cvg | egrep "Coverpoint|Covergroup|Cross|TYPE" > ${SIM}/questa/fcovrvvi/fcovrvvi.summary.log
grep "TOTAL COVERGROUP COVERAGE" ${SIM}/questa/fcovrvvi/fcovrvvi.log
collect_functcov: remove_functcov_artifacts riscvdv_functcov combine_functcov
riscvdv_functcov:

View File

@ -51,8 +51,6 @@ if [file exists ${WKDIR}] {
vdel -lib ${WKDIR} -all
}
vlib ${WKDIR}
# Create directory for functional coverage data
mkdir -p ${FCRVVI}
set PlusArgs ""
set ParamArgs ""
@ -62,9 +60,6 @@ set ccov 0
set CoverageVoptArg ""
set CoverageVsimArg ""
set FuncCovRVVI 0
set FCdefineRVVI_COVERAGE ""
set FunctCoverage 0
set FCvlog ""
set FCvopt ""
@ -108,12 +103,6 @@ if {[lcheck lst "--ccov"]} {
set CoverageVsimArg "-coverage"
}
# if --fcovrvvi found set flag and remove from list
if {[lcheck lst "--fcovrvvi"]} {
set FuncCovRVVI 1
set FCdefineRVVI_COVERAGE "+define+RVVI_COVERAGE"
}
# if --fcovimp found set flag and remove from list
if {[lcheck lst "--fcovimp"]} {
set FunctCoverage 1
@ -143,6 +132,7 @@ if {[lcheck lst "--fcov"]} {
+define+COVER_BASE_RV32I \
+incdir+$env(WALLY)/addins/riscvISACOV/source \
"
set FCvopt "+TRACE2COV_ENABLE=1 +IDV_TRACE2COV=1"
}
@ -181,7 +171,6 @@ if {$DEBUG > 0} {
echo "GUI = $GUI"
echo "ccov = $ccov"
echo "lockstep = $lockstep"
echo "FuncCovRVVI = $FuncCovRVVI"
echo "FunctCoverage = $FunctCoverage"
echo "remaining list = $lst"
echo "Extra +args = $PlusArgs"
@ -192,9 +181,9 @@ if {$DEBUG > 0} {
# suppress spurious warnngs about
# "Extra checking for conflicts with always_comb done at vopt time"
# because vsim will run vopt
set INC_DIRS "+incdir+${CONFIG}/${CFG} +incdir+${CONFIG}/deriv/${CFG} +incdir+${CONFIG}/shared +incdir+${FCRVVI} +incdir+${FCRVVI}/rv32 +incdir+${FCRVVI}/rv64 +incdir+${FCRVVI}/rv64_priv +incdir+${FCRVVI}/common +incdir+${FCRVVI}"
set INC_DIRS "+incdir+${CONFIG}/${CFG} +incdir+${CONFIG}/deriv/${CFG} +incdir+${CONFIG}/shared +incdir+${FCRVVI} +incdir+${FCRVVI}/rv32 +incdir+${FCRVVI}/rv64 +incdir+${FCRVVI}/rv64_priv +incdir+${FCRVVI}/priv +incdir+${FCRVVI}/common +incdir+${FCRVVI}"
set SOURCES "${SRC}/cvw.sv ${TB}/${TESTBENCH}.sv ${TB}/common/*.sv ${SRC}/*/*.sv ${SRC}/*/*/*.sv ${WALLY}/addins/verilog-ethernet/*/*.sv ${WALLY}/addins/verilog-ethernet/*/*/*/*.sv"
vlog -permissive -lint -work ${WKDIR} {*}${INC_DIRS} {*}${FCvlog} {*}${FCdefineCOVER_EXTS} {*}${lockstepvlog} ${FCdefineRVVI_COVERAGE} {*}${SOURCES} -suppress 2282,2583,7053,7063,2596,13286
vlog -permissive -lint -work ${WKDIR} {*}${INC_DIRS} {*}${FCvlog} {*}${FCdefineCOVER_EXTS} {*}${lockstepvlog} {*}${SOURCES} -suppress 2282,2583,7053,7063,2596,13286
# start and run simulation
# remove +acc flag for faster sim during regressions if there is no need to access internal signals
@ -220,11 +209,6 @@ if {$FunctCoverage} {
coverage save -onexit ${UCDB}
}
if {$FuncCovRVVI} {
set UCDB ${WALLY}/addins/cvw-arch-verif/work/${CFG}_${TESTSUITE}.ucdb
coverage save -onexit ${UCDB}
}
run -all
if {$ccov} {

2
src/cache/cache.sv vendored
View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// cache.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 7 July 2021
// Modified: 20 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// cacheLRU.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 20 July 2021
// Modified: 20 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// cachefsm.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 25 August 2021
// Modified: 20 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// cacheway
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 7 July 2021
// Modified: 20 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// subcachelineread.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 4 February 2022
// Modified: 20 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// ahbcacheinterface.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: August 29, 2022
// Modified: 18 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// ahbinterface.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: August 29, 2022
// Modified: 18 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// busfsm.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: December 29, 2021
// Modified: 18 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// busfsm.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: December 29, 2021
// Modified: 18 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// controllerinput.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: August 31, 2022
// Modified: 18 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// abhmulticontroller
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: August 29, 2022
// Modified: 18 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// ebufsmarb.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 23 January 2023
// Modified: 23 January 2023
//

View File

@ -76,7 +76,7 @@ module fround import cvw::*; #(parameter cvw_t P) (
assign Eeqm1 = ($signed(E) == -1);
// Logic for nonnegative mask and rounding bits
assign IMask = {1'b1, {P.NF{1'b0}}} >>> E; /// if E > Nf, this produces all 0s instead of all 1s. Hence exact handling is needed below.
assign IMask = $signed({1'b1, {P.NF{1'b0}}}) >>> E; /// if E > Nf, this produces all 0s instead of all 1s. Hence exact handling is needed below.
assign Tmasknonneg = ~IMask >>> 1'b1;
assign HotE = IMask & ~(IMask << 1'b1);
assign HotEP1 = HotE >> 1'b1;

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// arrs.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Modified: November 12, 2021
//
// Purpose: resets are typically asynchronous but need to be synchronized to

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// binencoder.sv
//
// Written: ross1728@gmail.com November 14, 2022
// Written: rose@rosethompson.net November 14, 2022
//
// Purpose: one-hot to binary encoding.
//

View File

@ -2,7 +2,7 @@
// ram1p1r2be.sv
// 1 port sram with byte enables
//
// Written: ross1728@gmail.com
// Written: rose@rosethompson.net
// Created: 3 May 2021
// Modified: 20 January 2023
//

View File

@ -2,7 +2,7 @@
// ram1p1rwe.sv
// 1 port sram.
//
// Written: avercruysse@hmc.edu (Modified from ram1p1rwbe, by ross1728@gmail.com)
// Written: avercruysse@hmc.edu (Modified from ram1p1rwbe, by rose@rosethompson.net)
// Created: 04 April 2023
//
// Purpose: ram1p1wre, but without byte-enable. Used for icache data.

View File

@ -2,7 +2,7 @@
// ram2p1r1wbe.sv
// 2 port sram.
//
// Written: ross1728@gmail.com May 3, 2021
// Written: rose@rosethompson.net May 3, 2021
// Two port SRAM 1 read port and 1 write port.
// When clk rises Addr and LineWriteData are sampled.
// Following the clk edge read data is output from the sampled Addr.

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// onehotdecoder.sv
//
// Written: ross1728@gmail.com July 09, 2021
// Written: rose@rosethompson.net July 09, 2021
// Modified:
//
// Purpose: Bin to one hot decoder. Power of 2 only.

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// RASPredictor.sv
//
// Written: Rose Thomposn ross1728@gmail.com
// Written: Rose Thomposn rose@rosethompson.net
// Created: 15 February 2021
// Modified: 25 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// bpred.sv
//
// Written: Rose Thomposn ross1728@gmail.com
// Written: Rose Thomposn rose@rosethompson.net
// Created: 12 February 2021
// Modified: 19 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// btb.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: February 15, 2021
// Modified: 24 January 2023
//

View File

@ -2,7 +2,7 @@
// gshare.sv
//
// Written: Rose Thompson
// Email: ross1728@gmail.com
// Email: rose@rosethompson.net
// Created: 16 March 2021
// Adapted from ssanghai@hmc.edu (Shreya Sanghai)
// Modified: 20 February 2023

View File

@ -2,7 +2,7 @@
// gsharebasic.sv
//
// Written: Rose Thompson
// Email: ross1728@gmail.com
// Email: rose@rosethompson.net
// Created: 16 March 2021
// Adapted from ssanghai@hmc.edu (Shreya Sanghai) global history predictor implementation.
// Modified: 20 February 2023

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// icpred.sv
//
// Written: Rose Thomposn ross1728@gmail.com
// Written: Rose Thomposn rose@rosethompson.net
// Created: February 26, 2023
// Modified: February 26, 2023
//

View File

@ -2,7 +2,7 @@
// localaheadbp
//
// Written: Rose Thompson
// Email: ross1728@gmail.com
// Email: rose@rosethompson.net
// Created: 16 March 2021
//
// Purpose: local history branch predictor with ahead pipelining and SRAM memories.

View File

@ -2,7 +2,7 @@
// localbpbasic
//
// Written: Rose Thompson
// Email: ross1728@gmail.com
// Email: rose@rosethompson.net
// Created: 16 March 2021
//
// Purpose: Local history branch predictor. Basic implementation without any repair and flop memories.

View File

@ -2,7 +2,7 @@
// localrepairbp
//
// Written: Rose Thompson
// Email: ross1728@gmail.com
// Email: rose@rosethompson.net
// Created: 15 April 2023
//
// Purpose: Local history branch predictor with speculation and repair using CBH.

View File

@ -2,7 +2,7 @@
// satCounter2.sv
//
// Written: Rose Thomposn
// Email: ross1728@gmail.com
// Email: rose@rosethompson.net
// Created: February 13, 2021
// Modified:
//

View File

@ -2,7 +2,7 @@
// twoBitPredictor.sv
//
// Written: Rose Thomposn
// Email: ross1728@gmail.com
// Email: rose@rosethompson.net
// Created: February 14, 2021
// Modified:
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// irom.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 30 January 2022
// Modified: 18 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// spill.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 28 January 2022
// Modified: 19 January 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// spill.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 26 October 2023
// Modified: 26 October 2023
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// atomic.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 31 January 2022
// Modified: 18 January 2023
//
@ -39,7 +39,7 @@ module atomic import cvw::*; #(parameter cvw_t P) (
input logic [2:0] LSUFunct3M, // IEU or HPTW memory operation size
input logic [1:0] LSUAtomicM, // 10: AMO operation, select AMOResultM as the writedata output, 01: LR/SC operation
input logic [1:0] PreLSURWM, // IEU or HPTW Read/Write signal
input logic IgnoreRequest, // On FlushM or TLB miss ignore memory operation
input logic LSUFlushW, // On FlushM or TLB miss ignore memory operation
output logic [P.XLEN-1:0] IMAWriteDataM, // IEU, HPTW, or AMO write data
output logic SquashSCW, // Store conditional failed disable write to GPR
output logic [1:0] LSURWM // IEU or HPTW Read/Write signal gated by LR/SC
@ -57,7 +57,7 @@ module atomic import cvw::*; #(parameter cvw_t P) (
// LRSC unit
if (P.ZALRSC_SUPPORTED) begin
assign MemReadM = PreLSURWM[1] & ~IgnoreRequest;
assign MemReadM = PreLSURWM[1] & ~LSUFlushW;
lrsc #(P) lrsc(.clk, .reset, .StallW, .MemReadM, .PreLSURWM, .LSUAtomicM, .PAdrM, .SquashSCW, .LSURWM);
end else begin
assign SquashSCW = 0;

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// dtim.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 30 January 2022
// Modified: 18 January 2023
//

View File

@ -1,7 +1,7 @@
/////////////////////////////////////////////////////////////////////////////////////////////////////////
// lsu.sv
//
// Written: David_Harris@hmc.edu, ross1728@gmail.com
// Written: David_Harris@hmc.edu, rose@rosethompson.net
// Created: 9 January 2021
// Modified: 11 January 2023
//
@ -110,8 +110,7 @@ module lsu import cvw::*; #(parameter cvw_t P) (
logic GatedStallW; // Hazard unit StallW gated when SelHPTW = 1
logic BusStall; // Bus interface busy with multicycle operation
logic LSUBusStallM; // Bus interface busy with multicycle operation masked by IgnoreRequestTLB
logic LSUBusStallM; // Bus interface busy with multicycle operation masked by HPTWFlushW
logic HPTWStall; // HPTW busy with multicycle operation
logic DCacheBusStallM; // Cache or bus stall
logic CacheBusHPWTStall; // Cache, bus, or hptw is requesting a stall
@ -146,8 +145,8 @@ module lsu import cvw::*; #(parameter cvw_t P) (
logic DTLBWriteM; // Writes PTE and PageType to DTLB
logic LSULoadAccessFaultM; // Load acces fault
logic LSUStoreAmoAccessFaultM; // Store access fault
logic IgnoreRequestTLB; // On either ITLB or DTLB miss, ignore miss so HPTW can handle
logic IgnoreRequest; // On FlushM or TLB miss ignore memory operation
logic HPTWFlushW; // HPTW needs to flush operation
logic LSUFlushW; // HPTW or hazard unit flushes operation
logic SelDTIM; // Select DTIM rather than bus or D$
logic [P.XLEN-1:0] WriteDataZM;
logic LSULoadPageFaultM, LSUStoreAmoPageFaultM;
@ -200,7 +199,7 @@ module lsu import cvw::*; #(parameter cvw_t P) (
.WriteDataM(WriteDataZM), .Funct3M, .LSUFunct3M, .Funct7M, .LSUFunct7M,
.IEUAdrExtM, .PTE, .IHWriteDataM, .PageType, .PreLSURWM, .LSUAtomicM,
.IHAdrM, .HPTWStall, .SelHPTW,
.IgnoreRequestTLB, .LSULoadAccessFaultM, .LSUStoreAmoAccessFaultM,
.HPTWFlushW, .LSULoadAccessFaultM, .LSUStoreAmoAccessFaultM,
.LoadAccessFaultM, .StoreAmoAccessFaultM, .HPTWInstrAccessFaultF,
.LoadPageFaultM, .StoreAmoPageFaultM, .LSULoadPageFaultM, .LSUStoreAmoPageFaultM, .HPTWInstrPageFaultF
);
@ -215,7 +214,7 @@ module lsu import cvw::*; #(parameter cvw_t P) (
assign StoreAmoAccessFaultM = LSUStoreAmoAccessFaultM;
assign LoadPageFaultM = LSULoadPageFaultM;
assign StoreAmoPageFaultM = LSUStoreAmoPageFaultM;
assign {HPTWStall, SelHPTW, PTE, PageType, DTLBWriteM, ITLBWriteF, IgnoreRequestTLB} = '0;
assign {HPTWStall, SelHPTW, PTE, PageType, DTLBWriteM, ITLBWriteF, HPTWFlushW} = '0;
assign {HPTWInstrAccessFaultF, HPTWInstrPageFaultF} = '0;
end
@ -274,7 +273,7 @@ module lsu import cvw::*; #(parameter cvw_t P) (
// Pause IEU memory request if TLB miss. After TLB fill, replay request.
// Discard memory request on pipeline flush
assign IgnoreRequest = IgnoreRequestTLB | FlushW;
assign LSUFlushW = HPTWFlushW | FlushW;
if (P.DTIM_SUPPORTED) begin : dtim
logic [P.PA_BITS-1:0] DTIMAdr;
@ -285,7 +284,7 @@ module lsu import cvw::*; #(parameter cvw_t P) (
assign DTIMMemRWM = SelDTIM ? LSURWM : 0;
dtim #(P) dtim(.clk, .reset, .ce(~GatedStallW),
.MemRWM(DTIMMemRWM),
.DTIMAdr, .FlushW(IgnoreRequest), .WriteDataM(LSUWriteDataM),
.DTIMAdr, .FlushW(LSUFlushW), .WriteDataM(LSUWriteDataM),
.ReadDataWordM(DTIMReadDataWordM[P.LLEN-1:0]), .ByteMaskM(ByteMaskM));
end else
assign DTIMReadDataWordM = '0;
@ -309,8 +308,6 @@ module lsu import cvw::*; #(parameter cvw_t P) (
logic CacheableOrFlushCacheM; // Memory address is cacheable or operation is a cache flush
logic [1:0] CacheRWM; // Cache read (10), write (01), AMO (11)
logic FlushDCache; // Suppress d cache flush if there is an ITLB miss.
logic CacheStall;
logic [1:0] CacheBusRWTemp;
logic BusCMOZero;
logic [3:0] CacheCMOpM;
logic BusAtomic;
@ -331,29 +328,26 @@ module lsu import cvw::*; #(parameter cvw_t P) (
cache #(.P(P), .PA_BITS(P.PA_BITS), .XLEN(P.XLEN), .LINELEN(P.DCACHE_LINELENINBITS), .NUMSETS(P.DCACHE_WAYSIZEINBYTES*8/LINELEN),
.NUMWAYS(P.DCACHE_NUMWAYS), .LOGBWPL(LLENLOGBWPL), .WORDLEN(CACHEWORDLEN), .MUXINTERVAL(P.LLEN), .READ_ONLY_CACHE(0)) dcache(
.clk, .reset, .Stall(GatedStallW & ~SelSpillE), .SelBusBeat, .FlushStage(IgnoreRequest),
.clk, .reset, .Stall(GatedStallW & ~SelSpillE), .SelBusBeat, .FlushStage(LSUFlushW),
.CacheRW(CacheRWM),
.FlushCache(FlushDCache), .NextSet(IEUAdrExtE[11:0]), .PAdr(PAdrM),
.ByteMask(ByteMaskSpillM), .BeatCount(BeatCount[AHBWLOGBWPL-1:AHBWLOGBWPL-LLENLOGBWPL]),
.WriteData(LSUWriteDataSpillM), .SelHPTW,
.CacheStall, .CacheMiss(DCacheMiss), .CacheAccess(DCacheAccess),
.CacheStall(DCacheStallM), .CacheMiss(DCacheMiss), .CacheAccess(DCacheAccess),
.CacheCommitted(DCacheCommittedM),
.CacheBusAdr(DCacheBusAdr), .ReadDataWord(DCacheReadDataWordM),
.FetchBuffer, .CacheBusRW(CacheBusRWTemp),
.FetchBuffer, .CacheBusRW(CacheBusRW),
.CacheBusAck(DCacheBusAck), .InvalidateCache(1'b0), .CMOpM(CacheCMOpM));
assign DCacheStallM = CacheStall & ~IgnoreRequestTLB;
assign CacheBusRW = CacheBusRWTemp;
ahbcacheinterface #(.P(P), .BEATSPERLINE(BEATSPERLINE), .AHBWLOGBWPL(AHBWLOGBWPL), .LINELEN(LINELEN), .LLENPOVERAHBW(LLENPOVERAHBW), .READ_ONLY_CACHE(0)) ahbcacheinterface(
.HCLK(clk), .HRESETn(~reset), .Flush(IgnoreRequest),
.HCLK(clk), .HRESETn(~reset), .Flush(LSUFlushW),
.HRDATA, .HWDATA(LSUHWDATA), .HWSTRB(LSUHWSTRB),
.HSIZE(LSUHSIZE), .HBURST(LSUHBURST), .HTRANS(LSUHTRANS), .HWRITE(LSUHWRITE), .HREADY(LSUHREADY),
.BeatCount, .SelBusBeat, .CacheReadDataWordM(DCacheReadDataWordM[P.LLEN-1:0]), .WriteDataM(LSUWriteDataM),
.Funct3(LSUFunct3M), .HADDR(LSUHADDR), .CacheBusAdr(DCacheBusAdr), .CacheBusRW, .BusAtomic, .BusCMOZero, .CacheableOrFlushCacheM,
.CacheBusAck(DCacheBusAck), .FetchBuffer, .PAdr(PAdrM),
.Cacheable(CacheableOrFlushCacheM), .BusRW, .Stall(GatedStallW),
.BusStall, .BusCommitted(BusCommittedM));
.BusStall(LSUBusStallM), .BusCommitted(BusCommittedM));
mux3 #(P.LLEN) UnCachedDataMux(.d0(DCacheReadDataWordSpillM), .d1({LLENPOVERAHBW{FetchBuffer[P.XLEN-1:0]}}),
.d2({{P.LLEN-P.XLEN{1'b0}}, DTIMReadDataWordM[P.XLEN-1:0]}),
@ -366,10 +360,10 @@ module lsu import cvw::*; #(parameter cvw_t P) (
assign LSUHADDR = PAdrM;
assign LSUHSIZE = LSUFunct3M;
ahbinterface #(P.XLEN, 1'b1) ahbinterface(.HCLK(clk), .HRESETn(~reset), .Flush(IgnoreRequest), .HREADY(LSUHREADY),
ahbinterface #(P.XLEN, 1'b1) ahbinterface(.HCLK(clk), .HRESETn(~reset), .Flush(LSUFlushW), .HREADY(LSUHREADY),
.HRDATA(HRDATA), .HTRANS(LSUHTRANS), .HWRITE(LSUHWRITE), .HWDATA(LSUHWDATA),
.HWSTRB(LSUHWSTRB), .BusRW, .BusAtomic(AtomicM[1]), .ByteMask(ByteMaskM[P.XLEN/8-1:0]), .WriteData(LSUWriteDataM[P.XLEN-1:0]),
.Stall(GatedStallW), .BusStall, .BusCommitted(BusCommittedM), .FetchBuffer(FetchBuffer));
.Stall(GatedStallW), .BusStall(LSUBusStallM), .BusCommitted(BusCommittedM), .FetchBuffer(FetchBuffer));
// Mux between the 2 sources of read data, 0: Bus, 1: DTIM
if(P.DTIM_SUPPORTED) mux2 #(P.XLEN) ReadDataMux2(FetchBuffer, DTIMReadDataWordM[P.XLEN-1:0], SelDTIM, ReadDataWordMuxM[P.XLEN-1:0]);
@ -381,20 +375,18 @@ module lsu import cvw::*; #(parameter cvw_t P) (
assign {LSUHWDATA, LSUHADDR, LSUHWRITE, LSUHSIZE, LSUHBURST, LSUHTRANS, LSUHWSTRB} = '0;
assign DCacheReadDataWordM = '0;
assign ReadDataWordMuxM = DTIMReadDataWordM;
assign {BusStall, BusCommittedM} = '0;
assign {LSUBusStallM, BusCommittedM} = '0;
assign {DCacheMiss, DCacheAccess} = '0;
assign {DCacheStallM, DCacheCommittedM} = '0;
end
assign LSUBusStallM = BusStall & ~IgnoreRequestTLB;
/////////////////////////////////////////////////////////////////////////////////////////////
// Atomic operations
/////////////////////////////////////////////////////////////////////////////////////////////
if (P.ZAAMO_SUPPORTED | P.ZALRSC_SUPPORTED) begin:atomic
atomic #(P) atomic(.clk, .reset, .StallW, .ReadDataM(ReadDataM[P.XLEN-1:0]), .IHWriteDataM, .PAdrM,
.LSUFunct7M, .LSUFunct3M, .LSUAtomicM, .PreLSURWM, .IgnoreRequest,
.LSUFunct7M, .LSUFunct3M, .LSUAtomicM, .PreLSURWM, .LSUFlushW,
.IMAWriteDataM, .SquashSCW, .LSURWM);
end else begin:lrsc
assign SquashSCW = 1'b0;

View File

@ -58,7 +58,7 @@ module hptw import cvw::*; #(parameter cvw_t P) (
output logic [1:0] LSUAtomicM,
output logic [2:0] LSUFunct3M,
output logic [6:0] LSUFunct7M,
output logic IgnoreRequestTLB,
output logic HPTWFlushW,
output logic SelHPTW,
output logic HPTWStall,
input logic LSULoadAccessFaultM, LSUStoreAmoAccessFaultM,
@ -105,6 +105,7 @@ module hptw import cvw::*; #(parameter cvw_t P) (
logic TakeHPTWFault;
logic PBMTFaultM;
logic HPTWFaultM;
logic ResetPTE;
// map hptw access faults onto either the original LSU load/store fault or instruction access fault
assign LSUAccessFaultM = LSULoadAccessFaultM | LSUStoreAmoAccessFaultM;
@ -143,7 +144,7 @@ module hptw import cvw::*; #(parameter cvw_t P) (
// State flops
flopenr #(1) TLBMissMReg(clk, reset, StartWalk, DTLBMissOrUpdateDAM, DTLBWalk); // when walk begins, record whether it was for DTLB (or record 0 for ITLB)
assign PRegEn = HPTWRW[1] & ~DCacheBusStallM | UpdatePTE;
flopenr #(P.XLEN) PTEReg(clk, reset, PRegEn, NextPTE, PTE); // Capture page table entry from data cache
flopenr #(P.XLEN) PTEReg(clk, ResetPTE, PRegEn, NextPTE, PTE); // Capture page table entry from data cache
// Assign PTE descriptors common across all XLEN values
// For non-leaf PTEs, D, A, U bits are reserved and ignored. They do not cause faults while walking the page table
@ -274,23 +275,26 @@ module hptw import cvw::*; #(parameter cvw_t P) (
IDLE: if (TLBMissOrUpdateDA) NextWalkerState = InitialWalkerState;
else NextWalkerState = IDLE;
L3_ADR: NextWalkerState = L3_RD; // First access in SV48
L3_RD: if (DCacheBusStallM) NextWalkerState = L3_RD;
else if (HPTWFaultM) NextWalkerState = FAULT;
L3_RD: if (HPTWFaultM) NextWalkerState = FAULT;
else if (DCacheBusStallM) NextWalkerState = L3_RD;
else NextWalkerState = L2_ADR;
L2_ADR: if (InitialWalkerState == L2_ADR | ValidNonLeafPTE) NextWalkerState = L2_RD; // First access in SV39
L2_ADR: if (HPTWFaultM) NextWalkerState = FAULT;
else if (InitialWalkerState == L2_ADR | ValidNonLeafPTE) NextWalkerState = L2_RD; // First access in SV39
else NextWalkerState = LEAF;
L2_RD: if (DCacheBusStallM) NextWalkerState = L2_RD;
else if (HPTWFaultM) NextWalkerState = FAULT;
L2_RD: if (HPTWFaultM) NextWalkerState = FAULT;
else if (DCacheBusStallM) NextWalkerState = L2_RD;
else NextWalkerState = L1_ADR;
L1_ADR: if (InitialWalkerState == L1_ADR | ValidNonLeafPTE) NextWalkerState = L1_RD; // First access in SV32
L1_ADR: if (HPTWFaultM) NextWalkerState = FAULT;
else if (InitialWalkerState == L1_ADR | ValidNonLeafPTE) NextWalkerState = L1_RD; // First access in SV32
else NextWalkerState = LEAF;
L1_RD: if (DCacheBusStallM) NextWalkerState = L1_RD;
else if (HPTWFaultM) NextWalkerState = FAULT;
L1_RD: if (HPTWFaultM) NextWalkerState = FAULT;
else if (DCacheBusStallM) NextWalkerState = L1_RD;
else NextWalkerState = L0_ADR;
L0_ADR: if (ValidNonLeafPTE) NextWalkerState = L0_RD;
L0_ADR: if (HPTWFaultM) NextWalkerState = FAULT;
else if (ValidNonLeafPTE) NextWalkerState = L0_RD;
else NextWalkerState = LEAF;
L0_RD: if (DCacheBusStallM) NextWalkerState = L0_RD;
else if (HPTWFaultM) NextWalkerState = FAULT;
L0_RD: if (HPTWFaultM) NextWalkerState = FAULT;
else if (DCacheBusStallM) NextWalkerState = L0_RD;
else NextWalkerState = LEAF;
LEAF: if (P.SVADU_SUPPORTED & HPTWUpdateDA) NextWalkerState = UPDATE_PTE;
else NextWalkerState = IDLE;
@ -300,7 +304,9 @@ module hptw import cvw::*; #(parameter cvw_t P) (
default: NextWalkerState = IDLE; // Should never be reached
endcase // case (WalkerState)
assign IgnoreRequestTLB = (WalkerState == IDLE & TLBMissOrUpdateDA) | (HPTWFaultM); // If hptw request has pmp/a fault suppress bus access.
assign HPTWFlushW = (WalkerState == IDLE & TLBMissOrUpdateDA) | (WalkerState != IDLE & HPTWFaultM);
assign ResetPTE = reset | (NextWalkerState == IDLE);
assign SelHPTW = WalkerState != IDLE;
assign HPTWStall = (WalkerState != IDLE & WalkerState != FAULT) | (WalkerState == IDLE & TLBMissOrUpdateDA);

View File

@ -8,7 +8,6 @@
// See RISC-V Privileged Mode Specification 20190608 3.1.10-11
//
// Documentation: RISC-V System on Chip Design
// MHPMEVENT is not supported
//
// A component of the CORE-V-WALLY configurable RISC-V project.
// https://github.com/openhwgroup/cvw
@ -66,7 +65,8 @@ module csrc import cvw::*; #(parameter cvw_t P) (
localparam MTIME = 12'hB01; // this is a memory-mapped register; no such CSR exists, and access should faul;
localparam MHPMCOUNTERHBASE = 12'hB80;
localparam MTIMEH = 12'hB81; // this is a memory-mapped register; no such CSR exists, and access should fault
localparam MHPMEVENTBASE = 12'h320;
localparam MHPMEVENTBASE = 12'h323;
localparam MHPMEVENTLAST = 12'h33F;
localparam HPMCOUNTERBASE = 12'hC00;
localparam HPMCOUNTERHBASE = 12'hC80;
localparam TIME = 12'hC01;
@ -156,6 +156,9 @@ module csrc import cvw::*; #(parameter cvw_t P) (
if (PrivilegeModeW == P.M_MODE |
MCOUNTEREN_REGW[CounterNumM] & (!P.S_SUPPORTED | PrivilegeModeW == P.S_MODE | SCOUNTEREN_REGW[CounterNumM])) begin
IllegalCSRCAccessM = 1'b0;
if (CSRAdrM >= MHPMEVENTBASE & CSRAdrM <= MHPMEVENTLAST) begin
CSRCReadValM = '0; // mphmevent[3:31] tied to read-only zero
end else begin
if (P.XLEN==64) begin // 64-bit counter reads
// Veri lator doesn't realize this only occurs for XLEN=64
/* verilator lint_off WIDTH */
@ -188,6 +191,7 @@ module csrc import cvw::*; #(parameter cvw_t P) (
IllegalCSRCAccessM = 1'b1; // requested CSR doesn't exist
end
end
end
end else begin
CSRCReadValM = '0;
IllegalCSRCAccessM = 1'b1; // no privileges for this csr

View File

@ -171,7 +171,7 @@ module csrm import cvw::*; #(parameter cvw_t P) (
flopenr #(P.XLEN) MEPCreg(clk, reset, WriteMEPCM, NextEPCM, MEPC_REGW);
flopenr #(P.XLEN) MCAUSEreg(clk, reset, WriteMCAUSEM, {NextCauseM[4], {(P.XLEN-5){1'b0}}, NextCauseM[3:0]}, MCAUSE_REGW);
flopenr #(P.XLEN) MTVALreg(clk, reset, WriteMTVALM, NextMtvalM, MTVAL_REGW);
flopenr #(32) MCOUNTINHIBITreg(clk, reset, WriteMCOUNTINHIBITM, CSRWriteValM[31:0], MCOUNTINHIBIT_REGW);
flopenr #(32) MCOUNTINHIBITreg(clk, reset, WriteMCOUNTINHIBITM, {CSRWriteValM[31:2], 1'b0, CSRWriteValM[0]}, MCOUNTINHIBIT_REGW);
if (P.U_SUPPORTED) begin: mcounteren // MCOUNTEREN only exists when user mode is supported
flopenr #(32) MCOUNTERENreg(clk, reset, WriteMCOUNTERENM, CSRWriteValM[31:0], MCOUNTEREN_REGW);
end else assign MCOUNTEREN_REGW = '0;
@ -180,7 +180,9 @@ module csrm import cvw::*; #(parameter cvw_t P) (
if (P.U_SUPPORTED) begin // menvcfg only exists if there is a lower privilege to control
logic WriteMENVCFGM;
logic [63:0] MENVCFG_PreWriteValM, MENVCFG_WriteValM;
logic [1:0] LegalizedCBIE;
assign WriteMENVCFGM = CSRMWriteM & (CSRAdrM == MENVCFG);
assign LegalizedCBIE = MENVCFG_PreWriteValM[5:4] == 2'b10 ? MENVCFG_REGW[5:4] : MENVCFG_PreWriteValM[5:4]; // Assume WARL for reserved CBIE = 10, keeps old value
// MENVCFG is always 64 bits even for RV32
assign MENVCFG_WriteValM = {
MENVCFG_PreWriteValM[63] & P.SSTC_SUPPORTED,
@ -188,7 +190,8 @@ module csrm import cvw::*; #(parameter cvw_t P) (
MENVCFG_PreWriteValM[61] & P.SVADU_SUPPORTED,
53'b0,
MENVCFG_PreWriteValM[7] & P.ZICBOZ_SUPPORTED,
MENVCFG_PreWriteValM[6:4] & {3{P.ZICBOM_SUPPORTED}},
MENVCFG_PreWriteValM[6] & P.ZICBOM_SUPPORTED,
LegalizedCBIE & {2{P.ZICBOM_SUPPORTED}},
3'b0,
MENVCFG_PreWriteValM[0] & P.S_SUPPORTED & P.VIRTMEM_SUPPORTED
};

View File

@ -128,10 +128,13 @@ module csrs import cvw::*; #(parameter cvw_t P) (
else
assign STimerInt = 1'b0;
logic [1:0] LegalizedCBIE;
assign LegalizedCBIE = CSRWriteValM[5:4] == 2'b10 ? SENVCFG_REGW[5:4] : CSRWriteValM[5:4]; // Assume WARL for reserved CBIE = 10, keeps old value
assign SENVCFG_WriteValM = {
{(P.XLEN-8){1'b0}},
CSRWriteValM[7] & P.ZICBOZ_SUPPORTED,
CSRWriteValM[6:4] & {3{P.ZICBOM_SUPPORTED}},
CSRWriteValM[6] & P.ZICBOM_SUPPORTED,
LegalizedCBIE & {2{P.ZICBOM_SUPPORTED}},
3'b0,
CSRWriteValM[0] & P.VIRTMEM_SUPPORTED
};

View File

@ -106,6 +106,7 @@ module csrsr import cvw::*; #(parameter cvw_t P) (
always_comb
if (CSRWriteValM[12:11] == P.U_MODE & P.U_SUPPORTED) STATUS_MPP_NEXT = P.U_MODE;
else if (CSRWriteValM[12:11] == P.S_MODE & P.S_SUPPORTED) STATUS_MPP_NEXT = P.S_MODE;
else if (CSRWriteValM[12:11] == 2'b10) STATUS_MPP_NEXT = STATUS_MPP; // do not change MPP when trying to write reserved 10
else STATUS_MPP_NEXT = P.M_MODE;
///////////////////////////////////////////

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// csrindextoaddr.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 24 January 2024
// Modified: 24 January 2024
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// packetizer.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 21 May 2024
// Modified: 21 May 2024
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// priorityaomux.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 24 January 2024
// Modified: 24 January 2024
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// regchangedetect.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 24 January 2024
// Modified: 24 January 2024
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// rvvisynth.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: 23 January 2024
// Modified: 23 January 2024
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// triggergen.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Created: June 26, 2024
// Modified: June 26, 2024
//

View File

@ -221,8 +221,8 @@ module spi_apb import cvw::*; #(parameter cvw_t P) (
SPI_DELAY0: Dout <= {8'b0, Delay0[15:8], 8'b0, Delay0[7:0]};
SPI_DELAY1: Dout <= {8'b0, Delay1[15:8], 8'b0, Delay1[7:0]};
SPI_FMT: Dout <= {12'b0, Format[4:1], 13'b0, Format[0], 2'b0};
SPI_TXDATA: Dout <= {23'b0, TransmitFIFOWriteFull, 8'b0};
SPI_RXDATA: Dout <= {23'b0, ReceiveFIFOReadEmpty, ReceiveData[7:0]};
SPI_TXDATA: Dout <= {TransmitFIFOWriteFull, 23'b0, 8'b0};
SPI_RXDATA: Dout <= {ReceiveFIFOReadEmpty, 23'b0, ReceiveData[7:0]};
SPI_TXMARK: Dout <= {29'b0, TransmitWatermark};
SPI_RXMARK: Dout <= {29'b0, ReceiveWatermark};
SPI_IE: Dout <= {30'b0, InterruptEnable};
@ -234,9 +234,9 @@ module spi_apb import cvw::*; #(parameter cvw_t P) (
// SPI enable generation, where SCLK = PCLK/(2*(SckDiv + 1))
// Asserts SCLKenable at the rising and falling edge of SCLK by counting from 0 to SckDiv
// Active at 2x SCLK frequency to account for implicit half cycle delays and actions on both clock edges depending on phase
// When SckDiv is 0, count doesn't work and SCLKenable is simply PCLK
// When SckDiv is 0, count doesn't work and SCLKenable is simply PCLK *** dh 10/26/24: this logic is seriously broken. SCLK is not scaled to PCLK/(2*(SckDiv + 1)). SCLKenableEarly doesn't work right for SckDiv=0
assign ZeroDiv = ~|(SckDiv[10:0]);
assign SCLKenable = ZeroDiv ? PCLK : (DivCounter == SckDiv);
assign SCLKenable = ZeroDiv ? 1 : (DivCounter == SckDiv);
assign SCLKenableEarly = ((DivCounter + 12'b1) == SckDiv);
always_ff @(posedge PCLK)
if (~PRESETn) DivCounter <= '0;

View File

@ -0,0 +1,348 @@
///////////////////////////////////////////
// spi_controller.sv
//
// Written: jacobpease@protonmail.com
// Created: October 28th, 2024
// Modified:
//
// Purpose: Controller logic for SPI
//
// Documentation: RISC-V System on Chip Design
//
// A component of the CORE-V-WALLY configurable RISC-V project.
// https://github.com/openhwgroup/cvw
//
// Copyright (C) 2021-23 Harvey Mudd College & Oklahoma State University
//
// SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1
//
// Licensed under the Solderpad Hardware License v 2.1 (the “License”); you may not use this file
// except in compliance with the License, or, at your option, the Apache License version 2.0. You
// may obtain a copy of the License at
//
// https://solderpad.org/licenses/SHL-2.1/
//
// Unless required by applicable law or agreed to in writing, any work distributed under the
// License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
////////////////////////////////////////////////////////////////////////////////////////////////
module spi_controller (
input logic PCLK,
input logic PRESETn,
input logic TransmitStart,
input logic [11:0] SckDiv,
input logic [1:0] SckMode,
input logic [1:0] CSMode,
input logic [15:0] Delay0,
input logic [15:0] Delay1,
input logic [7:0] txFIFORead,
input logic txFIFOReadEmpty,
output logic SPICLK,
output logic SPIOUT,
output logic CS
);
// CSMode Stuff
localparam HOLDMODE = 2'b10;
localparam AUTOMODE = 2'b00;
localparam OFFMODE = 2'b11;
typedef enum logic [2:0] {INACTIVE, CSSCK, TRANSMIT, SCKCS, HOLD, INTERCS, INTERXFR} statetype;
statetype CurrState, NextState;
// SCLKenable stuff
logic [11:0] DivCounter;
logic SCLKenable;
logic SCLKenableEarly;
logic SCLKenableLate;
logic EdgeTiming;
logic ZeroDiv;
logic Clock0;
logic Clock1;
logic SCK; // SUPER IMPORTANT, THIS CAN'T BE THE SAME AS SPICLK!
// Shift and Sample Edges
logic PreShiftEdge;
logic PreSampleEdge;
logic ShiftEdge;
logic SampleEdge;
// Frame stuff
logic [2:0] BitNum;
logic LastBit;
logic EndOfFrame;
logic EndOfFrameDelay;
logic PhaseOneOffset;
// Transmit Stuff
logic ContinueTransmit;
// SPIOUT Stuff
logic TransmitLoad;
logic [7:0] TransmitReg;
logic Transmitting;
logic EndTransmission;
logic HoldMode;
// Delay Stuff
logic [7:0] cssck;
logic [7:0] sckcs;
logic [7:0] intercs;
logic [7:0] interxfr;
logic HasCSSCK;
logic HasSCKCS;
logic HasINTERCS;
logic HasINTERXFR;
logic EndOfCSSCK;
logic EndOfSCKCS;
logic EndOfINTERCS;
logic EndOfINTERXFR;
logic [7:0] CSSCKCounter;
logic [7:0] SCKCSCounter;
logic [7:0] INTERCSCounter;
logic [7:0] INTERXFRCounter;
logic DelayIsNext;
// Convenient Delay Reg Names
assign cssck = Delay0[7:0];
assign sckcs = Delay0[15:8];
assign intercs = Delay1[7:0];
assign interxfr = Delay1[15:8];
// Do we have delay for anything?
assign HasCSSCK = cssck > 8'b0;
assign HasSCKCS = sckcs > 8'b0;
assign HasINTERCS = intercs > 8'b0;
assign HasINTERXFR = interxfr > 8'b0;
// Have we hit full delay for any of the delays?
assign EndOfCSSCK = CSSCKCounter == cssck;
assign EndOfSCKCS = SCKCSCounter == sckcs;
assign EndOfINTERCS = INTERCSCounter == intercs;
assign EndOfINTERXFR = INTERXFRCounter == interxfr;
// Clock Signal Stuff -----------------------------------------------
// I'm going to handle all clock stuff here, including ShiftEdge and
// SampleEdge. This makes sure that SPICLK is an output of a register
// and it properly synchronizes signals.
assign SCLKenableLate = DivCounter > SckDiv;
assign SCLKenable = DivCounter == SckDiv;
assign SCLKenableEarly = (DivCounter + 1'b1) == SckDiv;
assign LastBit = BitNum == 3'd7;
assign EdgeTiming = SckDiv > 12'b0 ? SCLKenableEarly : SCLKenable;
//assign SPICLK = Clock0;
assign ContinueTransmit = ~txFIFOReadEmpty & EndOfFrame;
assign EndTransmission = txFIFOReadEmpty & EndOfFrameDelay;
always_ff @(posedge PCLK) begin
if (~PRESETn) begin
DivCounter <= 12'b0;
SPICLK <= SckMode[1];
SCK <= 0;
BitNum <= 3'h0;
PreShiftEdge <= 0;
PreSampleEdge <= 0;
EndOfFrame <= 0;
end else begin
// TODO: Consolidate into one delay counter since none of the
// delays happen at the same time?
if (TransmitStart) begin
SCK <= 0;
end else if (SCLKenable) begin
SCK <= ~SCK;
end
if ((CurrState == CSSCK) & SCK) begin
CSSCKCounter <= CSSCKCounter + 8'd1;
end else begin
CSSCKCounter <= 8'd0;
end
if ((CurrState == SCKCS) & SCK) begin
SCKCSCounter <= SCKCSCounter + 8'd1;
end else begin
SCKCSCounter <= 8'd0;
end
if ((CurrState == INTERCS) & SCK) begin
INTERCSCounter <= INTERCSCounter + 8'd1;
end else begin
INTERCSCounter <= 8'd0;
end
if ((CurrState == INTERXFR) & SCK) begin
INTERXFRCounter <= INTERXFRCounter + 8'd1;
end else begin
INTERXFRCounter <= 8'd0;
end
// SPICLK Logic
if (TransmitStart) begin
SPICLK <= SckMode[1];
end else if (SCLKenable & Transmitting) begin
SPICLK <= (~EndTransmission & ~DelayIsNext) ? ~SPICLK : SckMode[1];
end
// Reset divider
if (SCLKenable | TransmitStart) begin
DivCounter <= 12'b0;
end else begin
DivCounter = DivCounter + 12'd1;
end
// EndOfFrame controller
// if (SckDiv > 0 ? SCLKenableEarly & LastBit & SPICLK : LastBit & ~SPICLK) begin
// EndOfFrame <= 1'b1;
// end else begin
// EndOfFrame <= 1'b0;
// end
if (~TransmitStart) begin
EndOfFrame <= (SckMode[1] ^ SckMode[0] ^ SPICLK) & SCLKenable & LastBit & Transmitting;
end
// Increment BitNum
if (ShiftEdge & Transmitting) begin
BitNum <= BitNum + 3'd1;
end else if (EndOfFrameDelay) begin
BitNum <= 3'b0;
end
end
end
// Delay ShiftEdge and SampleEdge by a half PCLK period
// Aligned EXACTLY ON THE MIDDLE of the leading and trailing edges.
// Sweeeeeeeeeet...
always_ff @(posedge ~PCLK) begin
if (~PRESETn | TransmitStart) begin
ShiftEdge <= 0;
PhaseOneOffset <= 0;
SampleEdge <= 0;
EndOfFrameDelay <= 0;
end else begin
ShiftEdge <= ((SckMode[1] ^ SckMode[0] ^ SPICLK) & SCLKenable & ~LastBit & Transmitting) & PhaseOneOffset;
PhaseOneOffset <= PhaseOneOffset == 0 ? Transmitting & SCLKenable : PhaseOneOffset;
SampleEdge <= (SckMode[1] ^ SckMode[0] ^ ~SPICLK) & SCLKenable & Transmitting;
EndOfFrameDelay <= (SckMode[1] ^ SckMode[0] ^ SPICLK) & SCLKenable & LastBit & Transmitting;
end
end
// typedef enum logic [2:0] {INACTIVE, CSSCK, TRANSMIT, SCKCS, HOLD, INTERCS, INTERXFR} statetype;
// statetype CurrState, NextState;
assign HoldMode = CSMode == 2'b10;
assign TransmitLoad = TransmitStart | (EndOfFrameDelay & ~txFIFOReadEmpty);
always_ff @(posedge PCLK) begin
if (~PRESETn) begin
CurrState <= INACTIVE;
end else if (SCLKenable) begin
CurrState <= NextState;
end
end
always_comb begin
case (CurrState)
INACTIVE: begin // INACTIVE case --------------------------------
if (TransmitStart) begin
if (~HasCSSCK) begin
NextState = TRANSMIT;
end else begin
NextState = CSSCK;
end
end else begin
NextState = INACTIVE;
end
end
CSSCK: begin // DELAY0 case -------------------------------------
if (EndOfCSSCK) begin
NextState = TRANSMIT;
end
end
TRANSMIT: begin // TRANSMIT case --------------------------------
case(CSMode)
AUTOMODE: begin
if (EndTransmission) begin
NextState = INACTIVE;
end else if (ContinueTransmit) begin
NextState = SCKCS;
end
end
HOLDMODE: begin
if (EndTransmission) begin
NextState = HOLD;
end else if (ContinueTransmit) begin
if (HasINTERXFR) NextState = INTERXFR;
end
end
OFFMODE: begin
end
endcase
end
SCKCS: begin // SCKCS case --------------------------------------
if (EndOfSCKCS) begin
if (EndTransmission) begin
if (CSMode == AUTOMODE) NextState = INACTIVE;
else if (CSMode == HOLDMODE) NextState = HOLD;
end else if (ContinueTransmit) begin
if (HasINTERCS) NextState = INTERCS;
else NextState = TRANSMIT;
end
end
end
HOLD: begin // HOLD mode case -----------------------------------
if (CSMode == AUTOMODE) begin
NextState = INACTIVE;
end else if (TransmitStart) begin // If FIFO is written to, start again.
NextState = TRANSMIT;
end
end
INTERCS: begin // INTERCS case ----------------------------------
if (EndOfINTERCS) begin
if (HasCSSCK) NextState = CSSCK;
else NextState = TRANSMIT;
end
end
INTERXFR: begin // INTERXFR case --------------------------------
if (EndOfINTERXFR) begin
NextState = TRANSMIT;
end
end
default: begin
NextState = INACTIVE;
end
endcase
end
assign Transmitting = CurrState == TRANSMIT;
assign DelayIsNext = (NextState == CSSCK | NextState == SCKCS | NextState == INTERCS | NextState == INTERXFR);
//
always_ff @(posedge PCLK) begin
if (~PRESETn) begin
TransmitReg <= 8'b0;
end else if (TransmitLoad) begin
TransmitReg <= txFIFORead;
end else if (ShiftEdge) begin
TransmitReg <= {TransmitReg[6:0], TransmitReg[0]};
end
end
assign SPIOUT = TransmitReg[7];
assign CS = CurrState == INACTIVE | CurrState == INTERCS;
endmodule

View File

@ -1,42 +1,39 @@
Synthesis for RISC-V Microprocessor System-on-Chip Design
# Synthesis for RISC-V Microprocessor System-on-Chip Design
This subdirectory contains synthesis scripts for use with Synopsys
(snps) Design Compiler (DC). Synthesis commands are found in
scripts/synth.tcl.
`scripts/synth.tcl`.
Example Usage
## Example Usage
```bash
make synth DESIGN=wallypipelinedcore FREQ=500 CONFIG=rv32e
```
environment variables
## Environment Variables
DESIGN
Design provides the name of the output log. Default is synth.
- `DESIGN`
- Design provides the name of the output log. Default is synth.
- `FREQ`
- Frequency in MHz. Default is 500
- `CONFIG`
- The Wally configuration file. The default is rv32e.
- Examples: rv32e, rv64gc, rv32gc
- `TECH`
- The target standard cell library. The default is sky130.
- Options:
- sky90: skywater 90nm TT 25C
- sky130: skywater 130nm TT 25C
- `SAIFPOWER`
- Controls if power analysis is driven by switching factor or RTL modelsim simulation. When enabled requires a saif file named power.saif. The default is 0.
- Options:
- 0: switching factor power analysis
- 1: RTL simulation driven power analysis.
FREQ
Frequency in MHz. Default is 500
CONFIG
The Wally configuration file. The default is rv32e.
Examples: rv32e, rv64gc, rv32gc
TECH
The target standard cell library. The default is sky130.
sky90: skywater 90nm TT 25C
sky130: skywater 130nm TT 25C
SAIFPOWER
Controls if power analysis is driven by switching factor or
RTL modelsim simulation. When enabled requires a saif file
named power.saif. The default is 0.
0: switching factor power analysis
1: RTL simulation driven power analysis.
-----
Extra Tool (PPA)
## Extra Tool (PPA)
To run ppa analysis that hones into target frequency, you can type:
python3 ppa/ppaSynth.py from the synthDC directory. This runs a sweep
across all modules listed at the bottom of the ppaSynth.py file.
`python3 ppa/ppaSynth.py` from the synthDC directory. This runs a sweep
across all modules listed at the bottom of the `ppaSynth.py` file.
Two options for running the sweep. The first run runs all modules for
all techs around a given frequency (i.e., freqs). The second option
@ -44,19 +41,21 @@ will run all designs for the specific module based on bestSynths.csv
values. Since the second option is 2nd, it has priority. If the
second set of values is commented out, it will run all widths.
WARNING: The first option may runs lots of runs that could expend all
the licenses available for a license. Therefore, care must be taken
to be sure that enough licenses are available for this first option.
**WARNING:** The first option may runs lots of runs that could expend all the licenses available for a license. Therefore, care must be taken to be sure that enough licenses are available for this first option.
##### Run specific syntheses
widths = [8, 16, 32, 64, 128]
modules = ['mul', 'adder', 'shifter', 'flop', 'comparator', 'binencoder', 'csa', 'mux2', 'mux4', 'mux8']
techs = ['sky90', 'sky130', 'tsmc28', 'tsmc28psyn']
freqs = [5000]
synthsToRun = allCombos(widths, modules, techs, freqs)
### Run specific syntheses
```python
widths = [8, 16, 32, 64, 128]
modules = ['mul', 'adder', 'shifter', 'flop', 'comparator', 'binencoder', 'csa', 'mux2', 'mux4', 'mux8']
techs = ['sky90', 'sky130', 'tsmc28', 'tsmc28psyn']
freqs = [5000]
synthsToRun = allCombos(widths, modules, techs, freqs)
```
##### Run a sweep based on best delay found in existing syntheses
module = 'adder'
width = 32
tech = 'tsmc28psyn'
synthsToRun = freqSweep(module, width, tech)
### Run a sweep based on best delay found in existing syntheses
```python
module = 'adder'
width = 32
tech = 'tsmc28psyn'
synthsToRun = freqSweep(module, width, tech)
```

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// DCacheFlushFSM.sv
//
// Written: David Harris David_Harris@hmc.edu and Rose Thompson ross1728@gmail.com
// Written: David Harris David_Harris@hmc.edu and Rose Thompson rose@rosethompson.net
// Modified: 14 June 2023
//
// Purpose: The L1 data cache and any feature L2 or high cache will not necessary writeback all dirty

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// functionName.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
//
// Purpose: decode name of function
//

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// loggers.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Modified: 14 June 2023
//
// Purpose: Log branch instructions, log instruction fetches,

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// loggers.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Modified: 24 July 2024
//
// Purpose: Wraps all the synthesizable rvvi hardware into a single module for the testbench.

View File

@ -1,7 +1,7 @@
///////////////////////////////////////////
// watchdog.sv
//
// Written: Rose Thompson ross1728@gmail.com
// Written: Rose Thompson rose@rosethompson.net
// Modified: 14 June 2023
//
// Purpose: Detects if the processor is stuck and halts the simulation

View File

@ -540,7 +540,6 @@ module testbench;
always @(posedge clk) begin
if (LoadMem) begin
$readmemh(memfilename, dut.core.lsu.dtim.dtim.ram.ram.RAM);
$display("Read memfile %s", memfilename);
end
if (CopyRAM) begin
LogXLEN = (1 + P.XLEN/32); // 2 for rv32 and 3 for rv64

View File

@ -60,6 +60,7 @@ string coverage64gc[] = '{
"tlbTP",
"tlbMisaligned",
"hptwAccessFault",
"nonleafpbmtfault",
"amoAccessFault",
"floatmisc",
"ifuCamlineWrite",

View File

@ -1,35 +1,43 @@
CEXT := c
CPPEXT := cpp
# Disable builtin rules because they are a shorter (but incorrect) path that Make will use by default
MAKEFLAGS += --no-builtin-rules
SRCDIR := .
SRCEXT := S
AEXT := s
SEXT := S
SRCEXT := \([$(CEXT)$(AEXT)$(SEXT)]\|$(CPPEXT)\)
#SRCS = $(wildcard *.S)
#PROGS = $(patsubst %.S,%,$(SRCS))
SRCDIR = .
SRCEXT = S
OBJEXT := o
EXEEXT := elf
SOURCES ?= $(shell find $(SRCDIR) -type f -regex ".*\.$(SRCEXT)" | sort)
OBJEXT = elf
OBJECTS := $(SOURCES:.$(SEXT)=.$(OBJEXT))
ELFS := $(SOURCES:.$(SRCEXT)=.$(EXEEXT))
OBJDUMPS := $(addsuffix .objdump, $(ELFS))
MEMFILES := $(addsuffix .memfile, $(ELFS))
all: $(OBJECTS)
all: $(OBJDUMPS) $(MEMFILES)
# Create dissassembly
%.elf.objdump: %.elf
riscv64-unknown-elf-objdump -S -D $< > $@
extractFunctionRadix.sh $@
# Change many things if bit width isn't 64
%.elf: $(SRCDIR)/%.$(SEXT) WALLY-init-lib.h Makefile
riscv64-unknown-elf-gcc -g -o $@ -march=rv64gqc_zfa_zba_zbb_zbc_zbs_zfh_zicboz_zicbop_zicbom_zbkb_zbkx_zknd_zkne_zknh -mabi=lp64 -mcmodel=medany \
-nostartfiles -T../../examples/link/link.ld $<
riscv64-unknown-elf-objdump -S -D $@ > $@.objdump
riscv64-unknown-elf-elf2hex --bit-width 64 --input $@ --output $@.memfile
extractFunctionRadix.sh $@.objdump
# Create memfile
%.elf.memfile: %.elf
riscv64-unknown-elf-elf2hex --bit-width 64 --input $< --output $@
sim: %.elf
# Link object file to create executable
.PRECIOUS: %.$(EXEEXT)
%.$(EXEEXT): %.$(OBJEXT)
riscv64-unknown-elf-gcc -g -o $@ -mcmodel=medany -nostartfiles -T../../examples/link/link.ld $*.o
# Assemble into object files
%.$(OBJEXT): %.$(AEXT)
riscv64-unknown-elf-as -g -o $@ -march=rv64gqc_zfa_zba_zbb_zbc_zbs_zfh_zicboz_zicbop_zicbom_zbkb_zbkx_zknd_zkne_zknh -mabi=lp64 $<
# Preprocess assembly files
%.$(AEXT): %.$(SRCEXT) WALLY-init-lib.h
riscv64-unknown-elf-gcc -E -g -o $@ $<
sim: %.$(EXEEXT)
spike +signature=%.signature.output +signature-granularity=8 %.elf
diff --ignore-case %.signature.output %.reference_output || exit
echo "Signature matches! Success!"
clean:
rm -f *.elf *.objdump *.signature.output *.addr *.lab *.memfile
rm -f *.elf *.objdump *.signature.output *.addr *.lab *.memfile *.o *.s

View File

@ -28,6 +28,12 @@
// The PMP tests are sensitive to the exact addresses in this code, so unfortunately
// modifying anything breaks those tests.
// Provides simple firmware services through ecall. Place argument in a0 and issue ecall:
// 0: change to user mode
// 1: change to supervisor mode
// 3: change to machine mode
// 4: terminate program
.section .text.init
.global rvtest_entry_point
@ -128,6 +134,18 @@ write_tohost:
self_loop:
j self_loop # wait
// utility routines
# put a 1 in msb of a0 (position XLEN-1); works for both RV32 and RV64
setmsb:
li a0, 0x80000000 # 1 in bit 31
slli a1, a0, 1 # check if register is wider than 31 bits
beqz a1, setmsbdone # yes, a0 has 1 in bit 31
slli a0, a0, 16 # no: shift a0 to have 1 inn bit 63
slli a0, a0, 16 # use two shifts of 16 bits each to be compatible with compiling either RV32 or 64
setmsbdone:
ret # return to calller
.section .tohost
tohost: # write to HTIF
.dword 0
@ -139,6 +157,9 @@ begin_signature:
.fill 6*(XLEN/32),4,0xdeadbeef #
end_signature:
scratch:
.fill 4,4,0x0
# Initialize stack with room for 512 bytes
.bss
.space 512

View File

@ -275,4 +275,3 @@ one_0:
bnez t2, loop_0
ret

View File

@ -50,6 +50,31 @@ main:
add t0, t0, t2
sw t1, 0(t0)
j jumppoint
jumppoint:
.align 6 # aligns to cache line size
sw t1, 0(t0)
sw t1, 4(t0)
sw t1, 8(t0)
sw t1, 12(t0)
sw t1, 16(t0)
sw t1, 20(t0)
sw t1, 24(t0)
sw t1, 28(t0)
sw t1, 32(t0)
sw t1, 36(t0)
sw t1, 40(t0)
sw t1, 44(t0)
sw t1, 48(t0)
sw t1, 52(t0) # this one causes a concurrent I$ miss with HPTW access exception (store access exception)
sw t1, 56(t0)
lw t3, 0(t0)
lw t3, 4(t0)
lw t3, 8(t0)
lw t3, 12(t0)
lw t3, 16(t0)
fence.I
finished:

View File

@ -105,4 +105,3 @@ main:
j done

View File

@ -0,0 +1,143 @@
///////////////////////////////////////////
// hptwAccessFault.S
//
// Written: Rose Thompson rose@rosethompson.net
//
// Purpose: Force the HPTW to walk a page table with non-leaf non-zero PBMT bits. This will generate
// a load or store/amo page fault based on the original access type.
//
// A component of the CORE-V-WALLY configurable RISC-V project.
// https://github.com/openhwgroup/cvw
//
// Copyright (C) 2021-24 Harvey Mudd College & Oklahoma State University
//
// SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1
//
// Licensed under the Solderpad Hardware License v 2.1 (the License); you may not use this file
// except in compliance with the License, or, at your option, the Apache License version 2.0. You
// may obtain a copy of the License at
//
// https://solderpad.org/licenses/SHL-2.1/
//
// Unless required by applicable law or agreed to in writing, any work distributed under the
// License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
////////////////////////////////////////////////////////////////////////////////////////////////
// load code to initalize stack, handle interrupts, terminate
#include "WALLY-init-lib.h"
# run-elf.bash find this in project description
main:
# Page table root address at 0x80010000
li t5, 0x9000000000080010
csrw satp, t5
# sfence.vma x0, x0
# switch to supervisor mode
li a0, 1
ecall
li t5, 0
li t2, 0x1000
li t0, 0x8000001000
lw t1, 0(t0) # valid virtual address, valid physical address, but invalid PBMT in middle of page table.
li t1, 0x00008067
add t0, t0, t2
sw t1, 0(t0) # valid virtual address, valid physical address, but invalid PBMT in middle of page table.
fence.I
finished:
j done
.data
.align 16
# Page table situated at 0x80010000
pagetable:
.8byte 0x200044C1
.8byte 0x200044C1
.align 12
.8byte 0x40000040200048C1
.8byte 0x00000000200048C1
.8byte 0x00000000200048C1
.align 12
.8byte 0x0000000020004CC1
.align 12
#80000000
.8byte 0x200000CF
.8byte 0x200004CF
.8byte 0x200008CF
.8byte 0x20000CCF
.8byte 0x200010CF
.8byte 0x200014CF
.8byte 0x200018CF
.8byte 0x20001CCF
.8byte 0x200020CF
.8byte 0x200024CF
.8byte 0x200028CF
.8byte 0x20002CCF
.8byte 0x200030CF
.8byte 0x200034CF
.8byte 0x200038CF
.8byte 0x20003CCF
.8byte 0x200040CF
.8byte 0x200044CF
.8byte 0x200048CF
.8byte 0x20004CCF
.8byte 0x200050CF
.8byte 0x200054CF
.8byte 0x200058CF
.8byte 0x20005CCF
.8byte 0x200060CF
.8byte 0x200064CF
.8byte 0x200068CF
.8byte 0x20006CCF
.8byte 0x200070CF
.8byte 0x200074CF
.8byte 0x200078CF
.8byte 0x20007CCF
.8byte 0x200080CF
.8byte 0x200084CF
.8byte 0x200088CF
.8byte 0x20008CCF
.8byte 0x200090CF
.8byte 0x200094CF
.8byte 0x200098CF
.8byte 0x20009CCF
.8byte 0x2000A0CF
.8byte 0x2000A4CF
.8byte 0x2000A8CF
.8byte 0x2000ACCF
.8byte 0x2000B0CF
.8byte 0x2000B4CF
.8byte 0x2000B8CF
.8byte 0x2000BCCF
.8byte 0x2000C0CF
.8byte 0x2000C4CF
.8byte 0x2000C8CF
.8byte 0x2000CCCF
.8byte 0x2000D0CF
.8byte 0x2000D4CF

View File

@ -156,6 +156,3 @@ main:
j done

View File

@ -324,6 +324,3 @@ sretdone:
finished: j done

View File

@ -129,5 +129,3 @@ pagetable:
.8byte 0x200084CF
.8byte 0x200088CF
.8byte 0x20008CCF

Some files were not shown because too many files have changed in this diff Show More