Skip to content

Commit

Permalink
Merge branch 'master' into ssqosid
Browse files Browse the repository at this point in the history
Signed-off-by: Ved Shanbhogue <91900059+ved-rivos@users.noreply.github.com>
  • Loading branch information
ved-rivos authored Jan 7, 2024
2 parents b8c31af + f896f78 commit e0d15dd
Show file tree
Hide file tree
Showing 6 changed files with 123 additions and 41 deletions.
11 changes: 9 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,17 @@

This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).


## [3.14.4] - 2023-12-25
## [3.16.1] - 2023-12-25
- Add support for Ssqosid extension

## [3.16.0] - 2024-01-03
- use the "hartX" naming for the merged dict
- improve logging statements
- update the "fields" node of each csr in the normalized custom yaml

## [3.15.0] - 2024-01-01
- Added function that returns the march and mabi for gcc from a given ISA

## [3.14.3] - 2023-12-01
- Add support for Zimop extension

Expand Down
2 changes: 1 addition & 1 deletion riscv_config/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
__version__ = '3.14.4'
__version__ = '3.16.1'

73 changes: 37 additions & 36 deletions riscv_config/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -1505,6 +1505,7 @@ def check_warl_legality(spec, logging = False):
warlnodes = {}
xlen = 64 if 64 in spec['supported_xlen'] else 32
for csrname, csrnode in spec.items():
logger.debug(f'Checking legality of warl strings for csr: {csrname}')
# don't perform any warl legality checks for uarch signal definitions.
if csrname == 'uarch_signals':
continue
Expand Down Expand Up @@ -1723,9 +1724,6 @@ def check_debug_specs(debug_spec, isa_spec,
and the second being the absolute path to the platform spec file.
'''

if logging:
logger.info('Input-Debug file')

foo1 = isa_spec
foo = debug_spec
schema = constants.debug_schema
Expand All @@ -1735,24 +1733,24 @@ def check_debug_specs(debug_spec, isa_spec,
"""
# Load input YAML file
if logging:
logger.info('Loading input file: ' + str(foo))
logger.info('DebugCheck: Loading input Debug file: ' + str(foo))
master_inp_debug_yaml = utils.load_yaml(foo, no_anchors)

# Load input YAML file
if logging:
logger.info('Loading input isa file: ' + str(foo1))
logger.info('DebugCheck: Loading input isa file for debug: ' + str(foo1))
master_inp_yaml = utils.load_yaml(foo1, no_anchors)
isa_string = master_inp_yaml['hart0']['ISA']

# instantiate validator
if logging:
logger.info('Load Schema ' + str(schema))
logger.info('DebugCheck: Load Debug Schema ' + str(schema))
master_schema_yaml = utils.load_yaml(schema, no_anchors)

outyaml = copy.deepcopy(master_inp_debug_yaml)
for x in master_inp_debug_yaml['hart_ids']:
if logging:
logger.info('Processing Hart: hart'+str(x))
logger.info(f'DebugCheck: Processing Hart:{x}')
inp_debug_yaml = master_inp_debug_yaml['hart'+str(x)]
schema_yaml = add_debug_setters(master_schema_yaml['hart_schema']['schema'])
#Extract xlen
Expand All @@ -1765,21 +1763,21 @@ def check_debug_specs(debug_spec, isa_spec,

# Perform Validation
if logging:
logger.info('Initiating Validation')
logger.info(f'DebugCheck: Initiating Validation for hart:{x}')
valid = validator.validate(normalized)

# Print out errors
if valid:
if logging:
logger.info('No errors for Hart: '+str(x) + ' :)')
logger.info(f'DebugCheck: No errors for Hart:{x}')
else:
error_list = validator.errors
raise ValidationError("Error in " + foo + ".", error_list)

normalized['ISA'] = isa_string

if logging:
logger.info(f' Updating fields node for each CSR')
logger.info(f'DebugCheck: Updating fields node for each CSR in Hart:{x}')
normalized = update_fields(normalized, logging)

outyaml['hart'+str(x)] = trim(normalized)
Expand All @@ -1790,7 +1788,7 @@ def check_debug_specs(debug_spec, isa_spec,
dfile = output_filename
outfile = open(output_filename, 'w')
if logging:
logger.info('Dumping out Normalized Checked YAML: ' + output_filename)
logger.info('DebugCheck: Dumping out Normalized Checked YAML: ' + output_filename)
utils.dump_yaml(outyaml, outfile, no_anchors )
return dfile

Expand Down Expand Up @@ -1830,18 +1828,18 @@ def check_isa_specs(isa_spec,
"""
# Load input YAML file
if logging:
logger.info('Loading input file: ' + str(foo))
logger.info('ISACheck: Loading input file: ' + str(foo))
master_inp_yaml = utils.load_yaml(foo, no_anchors)

# instantiate validator
if logging:
logger.info('Load Schema ' + str(schema))
logger.info('ISACheck: Load Schema ' + str(schema))
master_schema_yaml = utils.load_yaml(schema, no_anchors)

outyaml = copy.deepcopy(master_inp_yaml)
for x in master_inp_yaml['hart_ids']:
if logging:
logger.info('Processing Hart: hart'+str(x))
logger.info(f'ISACheck: Processing Hart:{x}')
inp_yaml = master_inp_yaml['hart'+str(x)]
schema_yaml = add_def_setters(master_schema_yaml['hart_schema']['schema'])
schema_yaml = add_reset_setters(master_schema_yaml['hart_schema']['schema'])
Expand All @@ -1855,18 +1853,18 @@ def check_isa_specs(isa_spec,

# Perform Validation
if logging:
logger.info('Initiating Validation')
logger.info(f'ISACheck: Initiating Validation for Hart:{x}')
valid = validator.validate(normalized)

# Print out errors
if valid:
if logging:
logger.info('No errors for Hart: '+str(x) + ' :)')
logger.info(f'ISACheck: No errors for Hart:{x}')
else:
error_list = validator.errors
raise ValidationError("Error in " + foo + ".", error_list)
raise ValidationError(f"ISACheck: Error in " + foo + ".", error_list)
if logging:
logger.info(f' Updating fields node for each CSR')
logger.info(f'ISACheck: Updating fields node for each CSR in Hart:{x}')
normalized = update_fields(normalized, logging)

outyaml['hart'+str(x)] = trim(normalized)
Expand All @@ -1877,7 +1875,7 @@ def check_isa_specs(isa_spec,
ifile = output_filename
outfile = open(output_filename, 'w')
if logging:
logger.info('Dumping out Normalized Checked YAML: ' + output_filename)
logger.info('ISACheck: Dumping out Normalized Checked YAML: ' + output_filename)
utils.dump_yaml(outyaml, outfile, no_anchors )
return ifile

Expand Down Expand Up @@ -1911,26 +1909,29 @@ def check_custom_specs(custom_spec,

# Load input YAML file
if logging:
logger.info('Loading input file: ' + str(foo))
logger.info('CustomCheck: Loading input file: ' + str(foo))
master_custom_yaml = utils.load_yaml(foo, no_anchors)
schema_yaml = utils.load_yaml(constants.custom_schema, no_anchors)
validator = schemaValidator(schema_yaml, xlen=[])
validator.allow_unknown = True

outyaml = copy.deepcopy(master_custom_yaml)
normalized = {}
for x in master_custom_yaml['hart_ids']:
if logging:
logger.info('Processing Hart: hart'+str(x))
logger.info(f'CustomCheck: Processing Hart:{x}')
inp_yaml = master_custom_yaml['hart'+str(x)]
valid = validator.validate(inp_yaml)
if valid:
if logging:
logger.info('No errors for Hart: '+str(x) + ' :)')
logger.info(f'CustomCheck: No errors for Hart:{x}')
else:
error_list = validator.errors
raise ValidationError("Error in " + foo + ".", error_list)
normalized[f'hart{x}'] = validator.normalized(inp_yaml, schema_yaml)
raise ValidationError("CustomCheck: Error in " + foo + ".", error_list)
normalized = validator.normalized(inp_yaml, schema_yaml)
if logging:
logger.info(f'CustomCheck: Updating fields node for each CSR Hart:{x}')
normalized = update_fields(normalized, logging)
outyaml['hart'+str(x)] = trim(normalized)
errors = check_fields(inp_yaml)
if errors:
raise ValidationError("Error in " + foo + ".", errors)
Expand All @@ -1942,7 +1943,7 @@ def check_custom_specs(custom_spec,
cfile = output_filename
outfile = open(output_filename, 'w')
if logging:
logger.info('Dumping out Normalized Checked YAML: ' + output_filename)
logger.info('CustomCheck: Dumping out Normalized Checked YAML: ' + output_filename)
utils.dump_yaml(outyaml, outfile, no_anchors )
return cfile

Expand Down Expand Up @@ -2060,23 +2061,23 @@ def check_csr_specs(ispec=None, customspec=None, dspec=None, pspec=None, work_di
hart_ids = []
for entry in ispec_dict['hart_ids']:
hart_ids.append(entry)
merged[entry] = {}
merged[entry].update(ispec_dict['hart'+str(entry)])
merged[f'hart{entry}'] = {}
merged[f'hart{entry}'].update(ispec_dict['hart'+str(entry)])
if custom_file is not None:
merged[entry].update(customspec_dict['hart'+str(entry)])
merged[f'hart{entry}'].update(customspec_dict['hart'+str(entry)])
if debug_file is not None:
merged[entry].update(dspec_dict['hart'+str(entry)])
merged[f'hart{entry}'].update(dspec_dict['hart'+str(entry)])

try:
uarch_signals = merged[entry]['uarch_signals']
uarch_signals = merged[f'hart{entry}']['uarch_signals']
except KeyError as e:
logger.info("No uarch signals found for hart"+str(entry))
logger.info(f"No uarch signals found for hart:{entry}")
uarch_signals = {}

for entry in hart_ids:
csr_db = merged[entry]
csr_db = merged[f'hart{entry}']
if logging:
logger.info("Initiating WARL legality checks.")
logger.info(f"Initiating WARL legality checks for hart:{entry}.")
errors = check_warl_legality(csr_db, logging)
if errors:
raise ValidationError("Error in csr definitions", errors)
Expand All @@ -2086,7 +2087,7 @@ def check_csr_specs(ispec=None, customspec=None, dspec=None, pspec=None, work_di
raise ValidationError("Error in csr definitions", errors)

if logging:
logger.info("Initiating post processing and reset value checks.")
logger.info(f"Initiating post processing and reset value checks for hart{entry}.")
errors = check_reset(csr_db, logging)
if errors:
raise ValidationError("Error in csr definitions", errors)
Expand Down
75 changes: 75 additions & 0 deletions riscv_config/isa_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,4 +161,79 @@ def get_extension_list(isa):

return (extension_list, err, err_list)

def get_march_mabi (isa : str):
'''
This function returns the corresponding march and mabi argument values
for RISC-V GCC
arguments:
isa: (string) this is the isa string in canonical order
returns:
march: (string) this is the string to be passed to -march to gcc for a given isa
mabi: (string) this is the string to be passed to -mabi for given isa
march_list: (list) gives march as a list of all extensions as elements
None: if ISA validation throws error
'''

# march generation

march = 'rv32' if '32' in isa else 'rv64'
march_list = []
march_list.append(march)

# get extension list
(ext_list, err, err_list) = get_extension_list(isa)

# if isa validation throws errors, return None
if err:
return None

# extensions to be nullified
null_ext = [
# privilege modes
'U',
'S',

# rnmi
'Smrnmi',

# debug mode
'Sdext',

# performance counter
'Zicntr',
'Zihpm',

# unratified Zb* extensions
'Zbe',
'Zbf',
'Zbm',
'Zbr',
]

# add Zbp and Zbt to null_ext if Zbpbo is present
if 'Zbpbo' in ext_list:
null_ext += ['Zbp', 'Zbt']
# construct march
for ext in ext_list:
if ext not in null_ext:
march_list.append(ext.lower())
# suffix multicharacter extensions with '_'
if len(ext) == 1:
march += ext.lower()
else:
# suffix multiline extensions with '_'
march = march + '_' + ext.lower()

# mabi generation
mabi = 'ilp32'
if 'F' in ext_list and 'D' in ext_list:
mabi += 'd'
elif 'F' in ext_list:
mabi += 'f'

if 'rv64' in march:
mabi = mabi.replace('ilp32', 'lp64')

return (march, mabi, march_list)
1 change: 0 additions & 1 deletion riscv_config/warl.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ def __init__(self, node, csrname, f_msb, f_lsb, spec=None):
else:
self.uarch_signals = {}
except KeyError:
logger.info(f'No uarch_signals found in spec.')
self.uarch_signals = {}
self.csrname = csrname
if spec is not None:
Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 3.14.4
current_version = 3.16.1
commit = True
tag = True

Expand Down

0 comments on commit e0d15dd

Please sign in to comment.