diff --git a/bids-validator/src/issues/list.ts b/bids-validator/src/issues/list.ts index 3e040e06b..6310d51aa 100644 --- a/bids-validator/src/issues/list.ts +++ b/bids-validator/src/issues/list.ts @@ -1,4 +1,4 @@ -import { IssueDefinitionRecord } from '../types/issues.ts' +import { IssueDefinitionRecord } from "../types/issues.ts"; export const filenameIssues: IssueDefinitionRecord = { MISSING_DATASET_DESCRIPTION: { @@ -7,93 +7,103 @@ export const filenameIssues: IssueDefinitionRecord = { 'A dataset_description.json file is required in the root of the dataset', }, INVALID_ENTITY_LABEL: { - severity: 'error', + severity: "error", reason: "entity label doesn't match format found for files with this suffix", }, ENTITY_WITH_NO_LABEL: { - severity: 'error', - reason: 'Found an entity with no label.', + severity: "error", + reason: "Found an entity with no label.", }, MISSING_REQUIRED_ENTITY: { - severity: 'error', - reason: 'Missing required entity for files with this suffix.', + severity: "error", + reason: "Missing required entity for files with this suffix.", }, ENTITY_NOT_IN_RULE: { - severity: 'error', + severity: "error", reason: - 'Entity not listed as required or optional for files with this suffix', + "Entity not listed as required or optional for files with this suffix", }, DATATYPE_MISMATCH: { - severity: 'error', + severity: "error", reason: - 'The datatype directory does not match datatype of found suffix and extension', + "The datatype directory does not match datatype of found suffix and extension", }, ALL_FILENAME_RULES_HAVE_ISSUES: { - severity: 'error', + severity: "error", reason: - 'Multiple filename rules were found as potential matches. All of them had at least one issue during filename validation.', + "Multiple filename rules were found as potential matches. All of them had at least one issue during filename validation.", }, EXTENSION_MISMATCH: { - severity: 'error', + severity: "error", reason: - 'Extension used by file does not match allowed extensions for its suffix', + "Extension used by file does not match allowed extensions for its suffix", }, JSON_KEY_REQUIRED: { - severity: 'error', + severity: "error", reason: "A data file's JSON sidecar is missing a key listed as required.", }, JSON_KEY_RECOMMENDED: { - severity: 'warning', - reason: 'A data files JSON sidecar is missing a key listed as recommended.', + severity: "warning", + reason: "A data files JSON sidecar is missing a key listed as recommended.", }, TSV_ERROR: { - severity: 'error', - reason: 'generic place holder for errors from tsv files', + severity: "error", + reason: "generic place holder for errors from tsv files", }, TSV_COLUMN_MISSING: { - severity: 'error', - reason: 'A required column is missing', + severity: "error", + reason: "A required column is missing", }, TSV_COLUMN_ORDER_INCORRECT: { - severity: 'error', - reason: 'Some TSV columns are in the incorrect order', + severity: "error", + reason: "Some TSV columns are in the incorrect order", }, TSV_ADDITIONAL_COLUMNS_NOT_ALLOWED: { - severity: 'error', + severity: "error", reason: - 'A TSV file has extra columns which are not allowed for its file type', + "A TSV file has extra columns which are not allowed for its file type", }, TSV_INDEX_VALUE_NOT_UNIQUE: { - severity: 'error', + severity: "error", reason: - 'An index column(s) was specified for the tsv file and not all of the values for it are unique.', + "An index column(s) was specified for the tsv file and not all of the values for it are unique.", }, TSV_VALUE_INCORRECT_TYPE: { - severity: 'error', + severity: "error", + reason: + "A value in a column did match the acceptable type for that column headers specified format.", + }, + TSV_VALUE_INCORRECT_TYPE_NONREQUIRED: { + severity: "warning", reason: - 'A value in a column did match the acceptable type for that column headers specified format.', + "A value in a column did match the acceptable type for that column headers specified format.", + }, + TSV_COLUMN_TYPE_REDEFINED: { + severity: "warning", + reason: + "A column required in a TSV file has been redefined in a sidecar file. This redefinition is being ignored.", }, CHECK_ERROR: { - severity: 'error', + severity: "error", reason: - 'generic place holder for errors from failed `checks` evaluated from schema.', + "generic place holder for errors from failed `checks` evaluated from schema.", }, NOT_INCLUDED: { - severity: 'error', + severity: "error", reason: - 'Files with such naming scheme are not part of BIDS specification. This error is most commonly ' + - 'caused by typos in file names that make them not BIDS compatible. Please consult the specification and ' + - 'make sure your files are named correctly. If this is not a file naming issue (for example when including ' + + "Files with such naming scheme are not part of BIDS specification. This error is most commonly " + + "caused by typos in file names that make them not BIDS compatible. Please consult the specification and " + + "make sure your files are named correctly. If this is not a file naming issue (for example when including " + 'files not yet covered by the BIDS specification) you should include a ".bidsignore" file in your dataset (see' + - ' https://github.com/bids-standard/bids-validator#bidsignore for details). Please ' + - 'note that derived (processed) data should be placed in /derivatives folder and source data (such as DICOMS ' + - 'or behavioural logs in proprietary formats) should be placed in the /sourcedata folder.', + " https://github.com/bids-standard/bids-validator#bidsignore for details). Please " + + "note that derived (processed) data should be placed in /derivatives folder and source data (such as DICOMS " + + "or behavioural logs in proprietary formats) should be placed in the /sourcedata folder.", }, EMPTY_FILE: { - severity: 'error', - reason: 'Empty files not allowed.', + severity: "error", + reason: "Empty files not allowed.", }, -} +}; -export const nonSchemaIssues = { ...filenameIssues } +export const nonSchemaIssues = { ...filenameIssues }; diff --git a/bids-validator/src/schema/applyRules.ts b/bids-validator/src/schema/applyRules.ts index 39d3b69e9..e4ce09e7f 100644 --- a/bids-validator/src/schema/applyRules.ts +++ b/bids-validator/src/schema/applyRules.ts @@ -3,12 +3,12 @@ import { GenericSchema, SchemaFields, SchemaTypeLike, -} from '../types/schema.ts' -import { Severity } from '../types/issues.ts' -import { BIDSContext } from './context.ts' -import { expressionFunctions } from './expressionLanguage.ts' -import { logger } from '../utils/logger.ts' -import { memoize } from '../utils/memoize.ts' +} from "../types/schema.ts"; +import { Severity } from "../types/issues.ts"; +import { BIDSContext } from "./context.ts"; +import { expressionFunctions } from "./expressionLanguage.ts"; +import { logger } from "../utils/logger.ts"; +import { memoize } from "../utils/memoize.ts"; /** * Given a schema and context, evaluate which rules match and test them. @@ -28,53 +28,53 @@ export function applyRules( schemaPath?: string, ) { if (!rootSchema) { - rootSchema = schema + rootSchema = schema; } if (!schemaPath) { - schemaPath = 'schema.rules' + schemaPath = "schema.rules"; } - Object.assign(context, expressionFunctions) + Object.assign(context, expressionFunctions); // @ts-expect-error - context.exists.bind(context) + context.exists.bind(context); for (const key in schema) { if (!(schema[key].constructor === Object)) { - continue + continue; } - if ('selectors' in schema[key]) { + if ("selectors" in schema[key]) { evalRule( schema[key] as GenericRule, context, rootSchema, `${schemaPath}.${key}`, - ) + ); } else if (schema[key].constructor === Object) { applyRules( schema[key] as GenericSchema, context, rootSchema, `${schemaPath}.${key}`, - ) + ); } } - return Promise.resolve() + return Promise.resolve(); } const evalConstructor = (src: string): Function => - new Function('context', `with (context) { return ${src} }`) -const safeHas = () => true + new Function("context", `with (context) { return ${src} }`); +const safeHas = () => true; const safeGet = (target: any, prop: any) => - prop === Symbol.unscopables ? undefined : target[prop] + prop === Symbol.unscopables ? undefined : target[prop]; -const memoizedEvalConstructor = memoize(evalConstructor) +const memoizedEvalConstructor = memoize(evalConstructor); export function evalCheck(src: string, context: BIDSContext) { - const test = memoizedEvalConstructor(src) - const safeContext = new Proxy(context, { has: safeHas, get: safeGet }) + const test = memoizedEvalConstructor(src); + const safeContext = new Proxy(context, { has: safeHas, get: safeGet }); try { - return test(safeContext) + return test(safeContext); } catch (error) { - logger.debug(error) - return null + logger.debug(error); + return null; } } @@ -99,7 +99,7 @@ const evalMap: Record< initial_columns: evalInitialColumns, index_columns: evalIndexColumns, fields: evalJsonCheck, -} +}; /** * Entrypoint for evaluating a individual rule. @@ -114,18 +114,18 @@ function evalRule( schemaPath: string, ) { if (rule.selectors && !mapEvalCheck(rule.selectors, context)) { - return + return; } Object.keys(rule) .filter((key) => key in evalMap) .map((key) => { // @ts-expect-error - evalMap[key](rule, context, schema, schemaPath) - }) + evalMap[key](rule, context, schema, schemaPath); + }); } function mapEvalCheck(statements: string[], context: BIDSContext): boolean { - return statements.every((x) => evalCheck(x, context)) + return statements.every((x) => evalCheck(x, context)); } /** @@ -145,14 +145,14 @@ function evalRuleChecks( reason: rule.issue.message, files: [{ ...context.file, evidence: schemaPath }], severity: rule.issue.level as Severity, - }) + }); } else { - context.issues.addNonSchemaIssue('CHECK_ERROR', [ + context.issues.addNonSchemaIssue("CHECK_ERROR", [ { ...context.file, evidence: schemaPath }, - ]) + ]); } } - return true + return true; } /** @@ -168,27 +168,53 @@ function schemaObjectTypeCheck( schema: GenericSchema, ): boolean { // always allow n/a? - if (value === 'n/a') { - return true + if (value === "n/a") { + return true; } - if ('anyOf' in schemaObject) { + if ("anyOf" in schemaObject) { return schemaObject.anyOf.some((x) => - schemaObjectTypeCheck(x, value, schema), - ) + schemaObjectTypeCheck(x, value, schema) + ); } - if ('enum' in schemaObject && schemaObject.enum) { - return schemaObject.enum.some((x) => x === value) + if ("enum" in schemaObject && schemaObject.enum) { + return schemaObject.enum.some((x) => x === value); } // @ts-expect-error - const format = schema.objects.formats[schemaObject.type] - const re = new RegExp(`^${format.pattern}$`) - return re.test(value) + const format = schema.objects.formats[schemaObject.type]; + const re = new RegExp(`^${format.pattern}$`); + return re.test(value); +} + +/** + * Checks user supplied type information from a sidecar against tsv column value. + */ +function sidecarDefinedTypeCheck( + rule: object, + value: string, + schema: GenericSchema, +): boolean { + if ( + "Levels" in rule && rule["Levels"] && typeof (rule["Levels"]) == "object" + ) { + return value == 'n/a' || value in rule["Levels"]; + } else if ("Units" in rule) { + return schemaObjectTypeCheck({ "type": "number" }, value, schema); + } else { + return true; + } } /** * Columns in schema rules are assertions about the requirement level of what * headers should be present in a tsv file. Examples in specification: * schema/rules/tabular_data/* + * + * For each column in a rule.tabluar_data check we generate an error if the + * column is missing from the tsv and listed as required by the schema, a + * warning if the schema rule is clobbered by the sidecar but shouldn't be. If + * the column is not in the tsv we bail out and move to the next column, + * otherwise we type check each value in the column according to the type + * specified in the schema rule (or sidecar type information if applicable). */ function evalColumns( rule: GenericRule, @@ -196,33 +222,69 @@ function evalColumns( schema: GenericSchema, schemaPath: string, ): void { - if (!rule.columns || context.extension !== '.tsv') return - const headers = [...Object.keys(context.columns)] + if (!rule.columns || context.extension !== ".tsv") return; + const headers = [...Object.keys(context.columns)]; for (const [ruleHeader, requirement] of Object.entries(rule.columns)) { // @ts-expect-error - const columnObject = schema.objects.columns[ruleHeader] - const name = columnObject.name - if (!headers.includes(name) && requirement === 'required') { - context.issues.addNonSchemaIssue('TSV_COLUMN_MISSING', [ + const columnObject: GenericRule = schema.objects.columns[ruleHeader]; + if (!("name" in columnObject) || !columnObject["name"]) { + return; + } + const name = columnObject.name; + let typeCheck = (value: string) => + schemaObjectTypeCheck( + columnObject as unknown as SchemaTypeLike, + value, + schema, + ); + const error_code = (requirement != "required") + ? "TSV_VALUE_INCORRECT_TYPE_NONREQUIRED" + : "TSV_VALUE_INCORRECT_TYPE"; + let errorObject = columnObject; + + if (!headers.includes(name) && requirement === "required") { + context.issues.addNonSchemaIssue("TSV_COLUMN_MISSING", [ { ...context.file, - evidence: `Column with header ${name} listed as required. ${schemaPath}`, + evidence: + `Column with header ${name} listed as required. ${schemaPath}`, }, - ]) + ]); } - if (headers.includes(name)) { - for (const value of context.columns[name] as string[]) { - if ( - !schemaObjectTypeCheck(columnObject as SchemaTypeLike, value, schema) - ) { - context.issues.addNonSchemaIssue('TSV_VALUE_INCORRECT_TYPE', [ - { - ...context.file, - evidence: `'${value}' ${Deno.inspect(columnObject)}`, - }, - ]) - break - } + + if ( + name in context.sidecar && context.sidecar[name] && + typeof (context.sidecar[name]) === "object" + ) { + if ("definition" in columnObject) { + typeCheck = (value) => + sidecarDefinedTypeCheck(context.sidecar[name], value, schema); + errorObject = context.sidecar[name]; + } else { + context.issues.addNonSchemaIssue("TSV_COLUMN_TYPE_REDEFINED", [{ + ...context.file, + evidence: `'${name}' redefined with sidecar ${ + Deno.inspect(context.sidecar[name]) + }`, + }]); + } + } + + if (!headers.includes(name)) { + continue; + } + + for (const value of context.columns[name] as string[]) { + if ( + !typeCheck(value) + ) { + context.issues.addNonSchemaIssue(error_code, [ + { + ...context.file, + evidence: `'${value}' ${Deno.inspect(columnObject)}`, + }, + ]); + break; } } } @@ -238,25 +300,30 @@ function evalInitialColumns( schema: GenericSchema, schemaPath: string, ): void { - if (!rule?.columns || !rule?.initial_columns || context.extension !== '.tsv') - return - const headers = [...Object.keys(context.columns)] + if ( + !rule?.columns || !rule?.initial_columns || context.extension !== ".tsv" + ) { + return; + } + const headers = [...Object.keys(context.columns)]; rule.initial_columns.map((ruleHeader: string, ruleIndex: number) => { // @ts-expect-error - const ruleHeaderName = schema.objects.columns[ruleHeader].name - const contextIndex = headers.findIndex((x) => x === ruleHeaderName) + const ruleHeaderName = schema.objects.columns[ruleHeader].name; + const contextIndex = headers.findIndex((x) => x === ruleHeaderName); if (contextIndex === -1) { - const evidence = `Column with header ${ruleHeaderName} not found, indexed from 0 it should appear in column ${ruleIndex}. ${schemaPath}` - context.issues.addNonSchemaIssue('TSV_COLUMN_MISSING', [ + const evidence = + `Column with header ${ruleHeaderName} not found, indexed from 0 it should appear in column ${ruleIndex}. ${schemaPath}`; + context.issues.addNonSchemaIssue("TSV_COLUMN_MISSING", [ { ...context.file, evidence: evidence }, - ]) + ]); } else if (ruleIndex !== contextIndex) { - const evidence = `Column with header ${ruleHeaderName} found at index ${contextIndex} while rule specifies, indexed from 0, it should be in column ${ruleIndex}. ${schemaPath}` - context.issues.addNonSchemaIssue('TSV_COLUMN_ORDER_INCORRECT', [ + const evidence = + `Column with header ${ruleHeaderName} found at index ${contextIndex} while rule specifies, indexed from 0, it should be in column ${ruleIndex}. ${schemaPath}`; + context.issues.addNonSchemaIssue("TSV_COLUMN_ORDER_INCORRECT", [ { ...context.file, evidence: evidence }, - ]) + ]); } - }) + }); } function evalAdditionalColumns( @@ -265,24 +332,24 @@ function evalAdditionalColumns( schema: GenericSchema, schemaPath: string, ): void { - if (context.extension !== '.tsv') return - const headers = Object.keys(context?.columns) + if (context.extension !== ".tsv") return; + const headers = Object.keys(context?.columns); // hard coding allowed here feels bad - if (!(rule.additional_columns === 'allowed') && rule.columns) { + if (!(rule.additional_columns === "allowed") && rule.columns) { const ruleHeadersNames = Object.keys(rule.columns).map( // @ts-expect-error (x) => schema.objects.columns[x].name, - ) + ); let extraCols = headers.filter( (header) => !ruleHeadersNames.includes(header), - ) - if (rule.additional_columns === 'allowed_if_defined') { - extraCols = extraCols.filter((header) => !(header in context.sidecar)) + ); + if (rule.additional_columns === "allowed_if_defined") { + extraCols = extraCols.filter((header) => !(header in context.sidecar)); } if (extraCols.length) { - context.issues.addNonSchemaIssue('TSV_ADDITIONAL_COLUMNS_NOT_ALLOWED', [ + context.issues.addNonSchemaIssue("TSV_ADDITIONAL_COLUMNS_NOT_ALLOWED", [ { ...context.file, evidence: `Disallowed columns found ${extraCols}` }, - ]) + ]); } } } @@ -297,39 +364,41 @@ function evalIndexColumns( !rule?.columns || !rule?.index_columns || !rule?.index_columns.length || - context.extension !== '.tsv' - ) - return - const headers = Object.keys(context?.columns) - const uniqueIndexValues = new Set() + context.extension !== ".tsv" + ) { + return; + } + const headers = Object.keys(context?.columns); + const uniqueIndexValues = new Set(); const index_columns = rule.index_columns.map((col: string) => { // @ts-expect-error - return schema.objects.columns[col].name - }) - const missing = index_columns.filter((col: string) => !headers.includes(col)) + return schema.objects.columns[col].name; + }); + const missing = index_columns.filter((col: string) => !headers.includes(col)); if (missing.length) { - context.issues.addNonSchemaIssue('TSV_COLUMN_MISSING', [ + context.issues.addNonSchemaIssue("TSV_COLUMN_MISSING", [ { ...context.file, - evidence: `Columns cited as index columns not in file: ${missing}. ${schemaPath}`, + evidence: + `Columns cited as index columns not in file: ${missing}. ${schemaPath}`, }, - ]) - return + ]); + return; } - const rowCount = (context.columns[index_columns[0]] as string[])?.length || 0 + const rowCount = (context.columns[index_columns[0]] as string[])?.length || 0; for (let i = 0; i < rowCount; i++) { - let indexValue = '' + let indexValue = ""; index_columns.map((col: string) => { indexValue = indexValue.concat( - (context.columns[col] as string[])?.[i] || '', - ) - }) + (context.columns[col] as string[])?.[i] || "", + ); + }); if (uniqueIndexValues.has(indexValue)) { - context.issues.addNonSchemaIssue('TSV_INDEX_VALUE_NOT_UNIQUE', [ + context.issues.addNonSchemaIssue("TSV_INDEX_VALUE_NOT_UNIQUE", [ { ...context.file, evidence: `Row: ${i + 2}, Value: ${indexValue}` }, - ]) + ]); } else { - uniqueIndexValues.add(indexValue) + uniqueIndexValues.add(indexValue); } } } @@ -339,7 +408,6 @@ function evalIndexColumns( * sidecar for a file. Will need to implement an additional check/error for * `prohibitied` fields. Examples in specification: * schema/rules/sidecars/* - * */ function evalJsonCheck( rule: GenericRule, @@ -348,24 +416,24 @@ function evalJsonCheck( schemaPath: string, ): void { for (const [key, requirement] of Object.entries(rule.fields)) { - const severity = getFieldSeverity(requirement, context) + const severity = getFieldSeverity(requirement, context); // @ts-expect-error - const keyName = schema.objects.metadata[key].name - if (severity && severity !== 'ignore' && !(keyName in context.sidecar)) { + const keyName = schema.objects.metadata[key].name; + if (severity && severity !== "ignore" && !(keyName in context.sidecar)) { if (requirement.issue?.code && requirement.issue?.message) { context.issues.add({ key: requirement.issue.code, reason: requirement.issue.message, severity, files: [{ ...context.file }], - }) + }); } else { - context.issues.addNonSchemaIssue('JSON_KEY_REQUIRED', [ + context.issues.addNonSchemaIssue("JSON_KEY_REQUIRED", [ { ...context.file, evidence: `missing ${keyName} as per ${schemaPath}`, }, - ]) + ]); } } } @@ -382,28 +450,27 @@ function getFieldSeverity( ): Severity { // Does this conversion hold for other parts of the schema or just json checks? const levelToSeverity: Record = { - recommended: 'ignore', - required: 'error', - optional: 'ignore', - prohibited: 'ignore', - } - let severity: Severity = 'ignore' + recommended: "ignore", + required: "error", + optional: "ignore", + prohibited: "ignore", + }; + let severity: Severity = "ignore"; - if (typeof requirement === 'string' && requirement in levelToSeverity) { - severity = levelToSeverity[requirement] - } else if (typeof requirement === 'object' && requirement.level) { - severity = levelToSeverity[requirement.level] - const addendumRegex = /(required|recommended) if \`(\w+)\` is \`(\w+)\`/ + if (typeof requirement === "string" && requirement in levelToSeverity) { + severity = levelToSeverity[requirement]; + } else if (typeof requirement === "object" && requirement.level) { + severity = levelToSeverity[requirement.level]; + const addendumRegex = /(required|recommended) if \`(\w+)\` is \`(\w+)\`/; if (requirement.level_addendum) { - const match = addendumRegex.exec(requirement.level_addendum) + const match = addendumRegex.exec(requirement.level_addendum); if (match && match.length === 4) { - const [_, addendumLevel, key, value] = match - // @ts-expect-error + const [_, addendumLevel, key, value] = match; if (key in context.sidecar && context.sidecar[key] === value) { - severity = levelToSeverity[addendumLevel] + severity = levelToSeverity[addendumLevel]; } } } } - return severity + return severity; } diff --git a/bids-validator/src/schema/context.test.ts b/bids-validator/src/schema/context.test.ts index f08cc3c3a..de6306f77 100644 --- a/bids-validator/src/schema/context.test.ts +++ b/bids-validator/src/schema/context.test.ts @@ -93,13 +93,11 @@ let context = new BIDSContext(anatFileTree, dataFile, issues) Deno.test('test context LoadSidecar', async (t) => { await context.loadSidecar(rootFileTree) await t.step('sidecar overwrites correct fields', () => { - // @ts-expect-error const { rootOverwrite, subOverwrite } = context.sidecar assert(rootOverwrite, 'anat') assert(subOverwrite, 'anat') }) await t.step('sidecar adds new fields at each level', () => { - // @ts-expect-error const { rootValue, subValue, anatValue } = context.sidecar assert(rootValue, 'root') assert(subValue, 'subject') diff --git a/bids-validator/src/schema/context.ts b/bids-validator/src/schema/context.ts index 41e10144e..094e3e277 100644 --- a/bids-validator/src/schema/context.ts +++ b/bids-validator/src/schema/context.ts @@ -77,7 +77,7 @@ export class BIDSContext implements Context { subject: ContextSubject datatype: string modality: string - sidecar: object + sidecar: Record json: object columns: ColumnsMap associations: ContextAssociations diff --git a/bids-validator/src/types/context.ts b/bids-validator/src/types/context.ts index aa9059a92..4b47ef0cf 100644 --- a/bids-validator/src/types/context.ts +++ b/bids-validator/src/types/context.ts @@ -96,7 +96,7 @@ export interface Context { suffix: string extension: string modality: string - sidecar: object + sidecar: Record associations: ContextAssociations columns: object json: object diff --git a/bids-validator/src/validators/bids.ts b/bids-validator/src/validators/bids.ts index 05a18d89e..395784e5e 100644 --- a/bids-validator/src/validators/bids.ts +++ b/bids-validator/src/validators/bids.ts @@ -24,7 +24,7 @@ const CHECKS: CheckFunction[] = [ filenameIdentify, filenameValidate, applyRules, -] +]; /** * Full BIDS schema validation entrypoint @@ -33,77 +33,79 @@ export async function validate( fileTree: FileTree, options: ValidatorOptions, ): Promise { - const issues = new DatasetIssues() - const summary = new Summary() - const schema = await loadSchema(options.schema) - summary.schemaVersion = schema.schema_version + const issues = new DatasetIssues(); + const summary = new Summary(); + const schema = await loadSchema(options.schema); + summary.schemaVersion = schema.schema_version; /* There should be a dataset_description in root, this will tell us if we * are dealing with a derivative dataset */ const ddFile = fileTree.files.find( - (file: BIDSFile) => file.name === 'dataset_description.json', - ) + (file: BIDSFile) => file.name === "dataset_description.json", + ); - let dsContext + let dsContext; if (ddFile) { - const description = await ddFile.text().then((text) => JSON.parse(text)) - summary.dataProcessed = description.DatasetType === 'derivative' - dsContext = new BIDSContextDataset(options, description) + const description = await ddFile.text().then((text) => JSON.parse(text)); + summary.dataProcessed = description.DatasetType === "derivative"; + dsContext = new BIDSContextDataset(options, description); } else { - dsContext = new BIDSContextDataset(options) - issues.addNonSchemaIssue('MISSING_DATASET_DESCRIPTION', [] as IssueFile[]) + dsContext = new BIDSContextDataset(options); + issues.addNonSchemaIssue('MISSING_DATASET_DESCRIPTION', [] as IssueFile[]); } - let derivatives: FileTree[] = [] + let derivatives: FileTree[] = []; fileTree.directories = fileTree.directories.filter((dir) => { - if (dir.name === 'derivatives') { + if (dir.name === "derivatives") { dir.directories.map((deriv) => { if ( deriv.files.some( - (file: BIDSFile) => file.name === 'dataset_description.json', + (file: BIDSFile) => file.name === "dataset_description.json", ) ) { - derivatives.push(deriv) + derivatives.push(deriv); } - }) - return true + }); + return true; } - return true - }) + return true; + }); for await (const context of walkFileTree(fileTree, issues, dsContext)) { // TODO - Skip ignored files for now (some tests may reference ignored files) if (context.file.ignored) { - continue + continue; } - if (dsContext.dataset_description.DatasetType == 'raw' && context.file.path.includes('derivatives')) { - continue + if ( + dsContext.dataset_description.DatasetType == "raw" && + context.file.path.includes("derivatives") + ) { + continue; } - await context.asyncLoads() + await context.asyncLoads(); // Run majority of checks for (const check of CHECKS) { - // TODO - Resolve this double casting? - await check(schema as unknown as GenericSchema, context) + await check(schema as unknown as GenericSchema, context); } - await summary.update(context) + await summary.update(context); } - let derivativesSummary: Record = {} + let derivativesSummary: Record = {}; await Promise.allSettled( derivatives.map(async (deriv) => { - derivativesSummary[deriv.name] = await validate(deriv, options) - return derivativesSummary[deriv.name] + derivativesSummary[deriv.name] = await validate(deriv, options); + return derivativesSummary[deriv.name]; }), - ) + ); let output: ValidationResult = { issues, summary: summary.formatOutput(), - } + }; if (Object.keys(derivativesSummary).length) { - output['derivativesSummary'] = derivativesSummary + output["derivativesSummary"] = derivativesSummary; } - return output + return output; }