diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 22f52e70..00000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,99 +0,0 @@ -version: 2 - -jobs: - test: - docker: - - image: node:18-alpine - steps: - - checkout - - restore_cache: - keys: - - npm-deps-{{ checksum "package.json" }}-{{ checksum "./bids-validator/package.json" }}-{{ checksum "./bids-validator-web/package.json" }}-v1 - - run: apk --no-cache add git - - run: npm install -g npm@^7 - - run: npm install - - run: - name: Eslint - command: npm run lint - - run: - name: Get bids-examples data - command: git submodule update --init - - run: - # must include separating --, see https://stackoverflow.com/a/14404223/5201771 - name: Jest tests - command: npm run coverage -- --maxWorkers=2 --testTimeout=10000 - - run: - name: Upload to codecov - command: npm run codecov - - run: - name: Smoke tests - command: | - bids-validator/bin/bids-validator bids-validator/tests/data/valid_headers/ --ignoreNiftiHeaders - bids-validator/bin/bids-validator bids-validator/tests/data/valid_headers/ --ignoreNiftiHeaders --json - test_docker: - environment: - IMAGE_NAME: bids/validator - machine: - # Ubuntu 20.04, Docker v20.10.11, Docker Compose v1.29.2 - # see: https://circleci.com/docs/2.0/configuration-reference/#available-machine-images - image: ubuntu-2004:202111-02 - steps: - - checkout - - run: docker build -t $IMAGE_NAME:latest . - - run: - name: Smoke tests - command: | - docker run --rm -it -v $PWD/bids-validator/tests/data/valid_headers:/data bids/validator:latest /data --ignoreNiftiHeaders - docker run --rm -it -v $PWD/bids-validator/tests/data/valid_headers:/data bids/validator:latest /data --ignoreNiftiHeaders --json - - run: - name: Archive Docker image - command: docker save -o image.tar $IMAGE_NAME - - persist_to_workspace: - root: . - paths: - - ./image.tar - githubPagesTest: - docker: - - image: node:18-alpine - steps: - - run: npm install --global npm - - run: apk --no-cache add ca-certificates git openssh-client rsync - - checkout - - restore_cache: - keys: - - npm-deps-{{ checksum "package.json" }}-{{ checksum "./bids-validator/package.json" }}-{{ checksum "./bids-validator-web/package.json" }}-v1 - - run: - name: Install bids-validator and bids-validator-web dependencies - command: npm install - - run: - name: Build and export web-validator - command: npm run web-export && rsync -av ./bids-validator-web/out/ ~/web_version - - save_cache: - key: npm-deps-{{ checksum "package.json" }}-{{ checksum "./bids-validator/package.json" }}-{{ checksum "./bids-validator-web/package.json" }}-v1 - paths: - - ./node_modules - - ./.next/cache - - store_artifacts: - path: ~/web_version -workflows: - version: 2 - build-deploy: - jobs: - - test: - filters: - branches: - only: /.*/ - tags: - only: /.*/ - - test_docker: - filters: - tags: - only: /.*/ - - githubPagesTest: - requires: - - test - filters: - branches: - only: /.*/ - tags: - only: /.*/ diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 028855ae..e3ab0cc4 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -15,12 +15,3 @@ updates: directory: '/' schedule: interval: 'monthly' -# - package-ecosystem: 'npm' -# directory: '/' -# schedule: -# interval: 'weekly' -# -# - package-ecosystem: 'npm' -# directory: '/bids-validator-web' -# schedule: -# interval: 'weekly' diff --git a/.github/workflows/docker-build-push.yml b/.github/workflows/docker-build-push.yml deleted file mode 100644 index 911dbddc..00000000 --- a/.github/workflows/docker-build-push.yml +++ /dev/null @@ -1,89 +0,0 @@ -name: Build & publish containers - -on: - push: - tags: - - 'v[0-9]+.[0-9]+.[0-9]+' - -jobs: - push_docker_to_registry: - name: Push Docker image to Docker Hub - runs-on: ubuntu-latest - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - - name: Log in to Docker Hub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@v5 - with: - images: bids/validator - - - name: Build and push Docker image - uses: docker/build-push-action@v6 - with: - context: . - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - - push_singularity_to_registry: - runs-on: ubuntu-latest - needs: - - push_docker_to_registry - steps: - - name: Set up Go 1.13 - uses: actions/setup-go@v5 - with: - go-version: 1.13 - id: go - - name: Install Dependencies - run: | - sudo apt-get update && sudo apt-get install -y \ - build-essential \ - libssl-dev \ - uuid-dev \ - libgpgme11-dev \ - squashfs-tools \ - libseccomp-dev \ - pkg-config - - name: Install Singularity - env: - SINGULARITY_VERSION: 3.8.0 - GOPATH: /tmp/go - run: | - mkdir -p $GOPATH - sudo mkdir -p /usr/local/var/singularity/mnt && \ - mkdir -p $GOPATH/src/github.com/sylabs && \ - cd $GOPATH/src/github.com/sylabs && \ - wget -qO- https://github.com/sylabs/singularity/releases/download/v${SINGULARITY_VERSION}/singularity-ce-${SINGULARITY_VERSION}.tar.gz | \ - tar xzv && \ - cd singularity-ce-${SINGULARITY_VERSION} && \ - ./mconfig -p /usr/local && \ - make -C builddir && \ - sudo make -C builddir install - - name: Check out code for the container build - uses: actions/checkout@v4 - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@v5 - with: - images: bids/validator - - name: Build Container - run: | - tags=${{ steps.meta.outputs.tags }} - echo $tags - singularity pull container.sif docker://${tags%,*} - - name: Login and Deploy Container - if: (github.event_name != 'pull_request') - run: | - tags=${{ steps.meta.outputs.tags }} - echo ${{ secrets.GITHUB_TOKEN }} | singularity remote login -u ${{ github.actor }} --password-stdin oras://ghcr.io - for tag in $(echo $tags | sed "s/,/ /g" | sed "s/bids\/validator/bids-standard\/bids-validator/g"); - do echo $tag; singularity push container.sif oras://ghcr.io/$tag; done diff --git a/.github/workflows/node_tests.yml b/.github/workflows/node_tests.yml deleted file mode 100644 index 422a9dcd..00000000 --- a/.github/workflows/node_tests.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: Node Tests - -on: - push: - branches: [master] - pull_request: - branches: [master] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - build: - strategy: - fail-fast: false - matrix: - platform: [ubuntu-latest, macos-latest, windows-latest] - node: [18] - - runs-on: ${{ matrix.platform }} - - steps: - - name: Set git to use LF - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - with: - node-version: '18' - cache: 'npm' - - name: Set git name/email - run: | - git config --global user.email "bids.maintenance@gmail.com" - git config --global user.name "bids-maintenance" - - name: Set up Node - run: | - npm install -g npm@^7 - npm install - - name: Eslint - run: npm run lint - - name: Get bids-examples data - run: git submodule update --init - - name: Jest tests - run: npm run coverage -- --maxWorkers=2 --testTimeout=10000 --colors - - name: Upload to codecov - uses: codecov/codecov-action@v4 - with: - token: ${{ secrets.CODECOV_TOKEN }} - - name: Smoke tests - run: | - bids-validator/bin/bids-validator bids-validator/tests/data/valid_headers/ --ignoreNiftiHeaders - bids-validator/bin/bids-validator bids-validator/tests/data/valid_headers/ --ignoreNiftiHeaders --json diff --git a/.github/workflows/schema_web_deploy.yml b/.github/workflows/schema_web_deploy.yml index 2778bf4e..5d9037c2 100644 --- a/.github/workflows/schema_web_deploy.yml +++ b/.github/workflows/schema_web_deploy.yml @@ -14,10 +14,6 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - # Use Node 18 for legacy build - - uses: actions/setup-node@v4 - with: - node-version: 18 - uses: denoland/setup-deno@v1 with: deno-version: v1.x @@ -25,10 +21,6 @@ jobs: working-directory: ./web - name: Install NPM deps run: npm install - - name: Build legacy validator website - run: npm run web-export - - name: Move legacy validator build into deno website - run: mv bids-validator-web/out web/dist/legacy - name: Upload GitHub Pages artifact uses: actions/upload-pages-artifact@v3 with: diff --git a/.github/workflows/test-bids-examples.yml b/.github/workflows/test-bids-examples.yml deleted file mode 100644 index 15b922cc..00000000 --- a/.github/workflows/test-bids-examples.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: test-bids-examples - -on: - push: - branches: ['*'] - pull_request: - branches: ['*'] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - build: - strategy: - fail-fast: false - matrix: - platform: [ubuntu-latest] - bids-examples-branch: [master] - - runs-on: ${{ matrix.platform }} - - env: - TZ: Europe/Berlin - FORCE_COLOR: 1 - - steps: - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: 18 - - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Upgrade npm - run: npm install --global npm - - - name: Install bids-validator - run: | - npm install - npm link ./bids-validator - echo "./node_modules/.bin" >> $GITHUB_PATH - - - name: Get bids-examples data - run: | - git clone --depth 1 https://github.com/bids-standard/bids-examples - - - name: Display versions and environment information - run: | - echo $TZ - export PATH="../node_modules/.bin:$PATH" - date - echo "npm"; npm --version - echo "node"; node --version - echo "bids-validator"; bids-validator --version - which bids-validator - ls -a $(which bids-validator) - - - name: Ignore examples not supported by legacy validator - run: | - for DS in bids-examples/mrs_* bids-examples/dwi_deriv; do - touch $DS/.SKIP_VALIDATION - done - - - name: Validate all BIDS datasets using bids-validator - run: | - export PATH="../node_modules/.bin:$PATH" - cat ./run_tests.sh - bash ./run_tests.sh - shell: bash - working-directory: ./bids-examples diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 9a51c992..00000000 --- a/Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -FROM node:18-alpine as build -RUN npm install -g npm - -COPY . /src -WORKDIR /src - -RUN npm install -RUN npm -w bids-validator run build -RUN npm -w bids-validator pack - -FROM node:18-alpine - -COPY --from=build /src/bids-validator-*.tgz /tmp -RUN npm install -g /tmp/bids-validator-*.tgz - -ENTRYPOINT ["/usr/local/bin/bids-validator"] diff --git a/babel.config.js b/babel.config.js deleted file mode 100644 index 7a59ca8d..00000000 --- a/babel.config.js +++ /dev/null @@ -1,18 +0,0 @@ -module.exports = { - presets: [ - [ - '@babel/preset-env', - { - targets: { - node: 'current', - }, - }, - 'jest', - ], - ], - env: { - test: { - plugins: ['@babel/plugin-transform-modules-commonjs'], - }, - }, -} diff --git a/bids-validator-web/.eslintrc b/bids-validator-web/.eslintrc deleted file mode 100644 index 55903ff5..00000000 --- a/bids-validator-web/.eslintrc +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": ["eslint:recommended", "prettier", "next"], -} diff --git a/bids-validator-web/components/App.jsx b/bids-validator-web/components/App.jsx deleted file mode 100644 index 34ee002f..00000000 --- a/bids-validator-web/components/App.jsx +++ /dev/null @@ -1,116 +0,0 @@ -import React from 'react' -import Issues from '../components/Issues' -import Validate from '../components/Validate' -import validate from '../../bids-validator' -import validatorPackageJson from 'bids-validator/package.json' assert { type: 'json' } -const version = validatorPackageJson.version - -// component setup ----------------------------------------------------------- -const initState = () => ({ - dirName: '', - list: {}, - nameError: null, - projectId: '', - refs: {}, - errors: [], - warnings: [], - summary: null, - status: '', - uploadStatus: '', - options: { - ignoreWarnings: false, - ignoreNiftiHeaders: false, - ignoreSubjectConsistency: false, - }, -}) - -export default class App extends React.Component { - constructor() { - super() - this.state = initState() - this.validate = this._validate.bind(this) - this.reset = this._reset.bind(this) - } - - _validate(selectedFiles) { - const dirName = selectedFiles.list[0].webkitRelativePath.split('/')[0] - const defaultConfig = `${dirName}/.bids-validator-config.json` - this.setState({ - status: 'validating', - showIssues: true, - activeKey: 3, - dirName, - }) - return validate.BIDS( - selectedFiles.list, - { - verbose: true, - ...this.state.options, - config: defaultConfig, - }, - (issues, summary) => { - if (issues === 'Invalid') { - return this.setState({ - errors: 'Invalid', - summary, - status: 'validated', - }) - } else { - return this.setState({ - errors: issues.errors ? issues.errors : [], - warnings: issues.warnings ? issues.warnings : [], - summary, - status: 'validated', - }) - } - }, - ) - } - - _reset() { - this.setState(initState()) - } - - handleOptionToggle = (e) => { - const { name } = e.target - this.setState((prevState) => ({ - ...prevState, - options: { - ...prevState.options, - [name]: !prevState.options[name], - }, - })) - } - - render() { - return ( -
- -
-
- -
- {this.state.status === 'validated' ? ( - - ) : null} -
-
- ) - } -} diff --git a/bids-validator-web/components/ErrorLink.jsx b/bids-validator-web/components/ErrorLink.jsx deleted file mode 100644 index f63f6811..00000000 --- a/bids-validator-web/components/ErrorLink.jsx +++ /dev/null @@ -1,80 +0,0 @@ -import React from 'react' - -// component setup -------------------------------------------------------- - -export default class ErrorLink extends React.Component { - // life cycle events ------------------------------------------------------ - - render() { - let dataURL = this._generateErrorLog(this.props.errors, this.props.warnings) - return ( - - Download error log for {this.props.dirName} - - ) - } - - // custom methods --------------------------------------------------------- - - /** - * Generate Error Log - * - * Takes an array of errors and an array of - * warnings and returns text error log encoded - * as a url. - */ - _generateErrorLog(errors, warnings) { - let issueString = this._generateIssueLog(errors, 'Error') - issueString += this._generateIssueLog(warnings, 'Warning') - let errorURL = - 'data:application/octet-stream;charset=utf-8,' + - encodeURIComponent(issueString) - return errorURL - } - - /** - * Generate Issue Log - * - * Takes an array of issues and a string of the - * issue type and return a human readable log - * of the issues as a string. - */ - _generateIssueLog(issues, type) { - let issueString = '' - let endLine = '======================================================' - for (var i = 0; i < issues.length; i++) { - issueString += 'File Path: ' + issues[i].reason + '\n\n' - for (var j = 0; j < issues[i].files.length; j++) { - var file = issues[i].files[j] - issueString += '\tType:\t\t' + type + '\n' - if (!file || !file.file) { - continue - } - if (file.file.name) { - issueString += '\tFile:\t\t' + file.file.name + '\n' - } - if (file.file.webkitRelativePath) { - issueString += '\tLocation:\t\t' + file.file.webkitRelativePath + '\n' - } - if (file.reason) { - issueString += '\tReason:\t\t' + file.reason + '\n' - } - if (file.line) { - issueString += '\tLine:\t\t' + file.line + '\n' - } - if (file.character) { - issueString += '\tCharacter:\t' + file.character + '\n' - } - if (file.evidence) { - issueString += '\tEvidence:\t' + file.evidence + '\n\n' - } - } - issueString += '\n' + endLine + '\n\n\n' - } - return issueString - } -} diff --git a/bids-validator-web/components/Issues.jsx b/bids-validator-web/components/Issues.jsx deleted file mode 100644 index 42c41e5e..00000000 --- a/bids-validator-web/components/Issues.jsx +++ /dev/null @@ -1,137 +0,0 @@ -// dependencies ------------------------------------------------------- - -import React from 'react' -import PropTypes from 'prop-types' -import pluralize from 'pluralize' -import Results from './results/Results' -import Spinner from './Spinner.jsx' -import ErrorLink from './ErrorLink.jsx' -import Summary from '../components/Summary' - -class Issues extends React.Component { - constructor(props) { - super(props) - this._reset = this.props.reset.bind(this) - } - // life cycle events -------------------------------------------------- - - render() { - // short references - let errors = this.props.errors - let warnings = this.props.warnings - let dirName = this.props.dirName - - // counts - // let totalErrors = 0 - let totalWarnings = 0 - let warningCount = 0 - // let errorCount = 0 - if (errors !== 'Invalid') { - // totalErrors = errors.length - totalWarnings = warnings.length - warningCount = totalWarnings + ' ' + pluralize('Warning', totalWarnings) - // errorCount = totalErrors + ' ' + pluralize('Error', totalErrors) - } - // messages - let specLink = ( -
- Click to view details on{' '} - - BIDS specification - -
- ) - let notBIDSMessage = ( -

- This directory failed an initial Quick Test. This means the basic names - and structure of the files and directories do not comply with BIDS - specification. Select a new folder{' '} - and try again. -

- ) - let warningsMessage =

We found {warningCount} in your dataset.

- let errorMessage =

Your dataset is not a valid BIDS dataset.

- let noErrorMessage =

This is a valid BIDS dataset!

- let neurostarsLink = ( -
- If you have any questions please post on{' '} - - Neurostars - -
- ) - let sourcecode = ( -
- The source code for the validator can be found{' '} - - here - -
- ) - - // determine message - let message - if (errors === 'Invalid') { - message = notBIDSMessage - } else if (errors.length > 0) { - message = errorMessage - } else if (warnings.length > 0) { - message = warningsMessage - } else { - message = noErrorMessage - } - - // loading animation - let loading = ( - - ) - - // results - let results = ( -
- - - {message} - {errors !== 'Invalid' ? ( - - ) : null} - {(errors.length > 0 && errors !== 'Invalid') || warnings.length > 0 ? ( - - ) : null} - {specLink} - {neurostarsLink} - {sourcecode} -
- ) - - return
{this.props.status === 'validating' ? loading : results}
- } -} - -Issues.propTypes = { - reset: PropTypes.func, - errors: PropTypes.array, - warnings: PropTypes.array, - dirName: PropTypes.string, - status: PropTypes.string, -} - -Issues.defaultProps = { - reset: () => {}, - errors: [], - warnings: [], - dirName: '', - status: '', -} - -export default Issues diff --git a/bids-validator-web/components/List.jsx b/bids-validator-web/components/List.jsx deleted file mode 100644 index adc3332c..00000000 --- a/bids-validator-web/components/List.jsx +++ /dev/null @@ -1,19 +0,0 @@ -import React from 'react' -import PropTypes from 'prop-types' - -export default class List extends React.Component { - render() { - if (this.props.items) { - return this.props.items.map((item, index) => { - return
  • {item}
  • - }) - } - } -} - -List.propTypes = { - items: PropTypes.array, -} -List.defaultProps = { - items: [], -} diff --git a/bids-validator-web/components/Options.jsx b/bids-validator-web/components/Options.jsx deleted file mode 100644 index 42521627..00000000 --- a/bids-validator-web/components/Options.jsx +++ /dev/null @@ -1,37 +0,0 @@ -import React from 'react' - -const Options = ({ setOption, options }) => ( - <> -
    - - - - - - - -
    -
    - -) - -export default Options diff --git a/bids-validator-web/components/Spinner.jsx b/bids-validator-web/components/Spinner.jsx deleted file mode 100644 index 1108768c..00000000 --- a/bids-validator-web/components/Spinner.jsx +++ /dev/null @@ -1,30 +0,0 @@ -import React from 'react' -import PropTypes from 'prop-types' - -// component setup --------------------------------------------------------------------------- - -export default class Spinner extends React.Component { - // life cycle events ------------------------------------------------------------------------- - - render() { - let spinner = ( -
    -
    -
    -
    - {this.props.text} -
    - ) - return this.props.active ? spinner : null - } -} - -Spinner.propTypes = { - text: PropTypes.string, - active: PropTypes.bool, -} - -Spinner.defaultProps = { - text: 'Loading', - active: false, -} diff --git a/bids-validator-web/components/Summary.jsx b/bids-validator-web/components/Summary.jsx deleted file mode 100644 index ecb65320..00000000 --- a/bids-validator-web/components/Summary.jsx +++ /dev/null @@ -1,59 +0,0 @@ -// dependencies ------------------------------------------------------- - -import React from 'react' -import pluralize from 'pluralize' -import bytes from 'bytes' -import List from './List' - -class Summary extends React.Component { - // life cycle events -------------------------------------------------- - - render() { - let summary = this.props.summary - if (summary) { - var numSessions = - summary.sessions.length > 0 ? summary.sessions.length : 1 - return ( -
    -

    {this.props.dirName}

    -
    -
    -
    -
    Summary
    -
      -
    • - {summary.totalFiles} {pluralize('File', summary.totalFiles)} - , {bytes(summary.size)} -
    • -
    • - {summary.subjects.length} -{' '} - {pluralize('Subject', summary.subjects.length)} -
    • -
    • - {numSessions} - {pluralize('Session', numSessions)} -
    • -
    -
    -
    -
    Available Tasks
    -
      - -
    -
    -
    -
    Available Modalities
    -
      - -
    -
    -
    -
    -
    - ) - } else { - return null - } - } -} - -export default Summary diff --git a/bids-validator-web/components/Upload.jsx b/bids-validator-web/components/Upload.jsx deleted file mode 100644 index 6a2eaf7c..00000000 --- a/bids-validator-web/components/Upload.jsx +++ /dev/null @@ -1,31 +0,0 @@ -// dependencies ------------------------------------------------------- - -import React from 'react' - -class Upload extends React.Component { - // life cycle events -------------------------------------------------- - render() { - return ( - - ) - } - - // custom methods ----------------------------------------------------- - _onFileSelect(e) { - if (e.target && e.target.files.length > 0) { - let files = e.target.files - let results = { list: files } - this.props.onChange(results) - } - } -} - -export default Upload diff --git a/bids-validator-web/components/Validate.jsx b/bids-validator-web/components/Validate.jsx deleted file mode 100644 index d400fa24..00000000 --- a/bids-validator-web/components/Validate.jsx +++ /dev/null @@ -1,51 +0,0 @@ -import React from 'react' -import Upload from './Upload.jsx' -import Spinner from './Spinner.jsx' -import Options from './Options.jsx' - -class Validate extends React.Component { - constructor(props) { - super(props) - this.state = { - loading: false, - } - - this._clearInput = this.props.reset - this._onChange = this.props.onChange - this._setRefs = this.props.setRefs - } - - // life cycle events -------------------------------------------------- - - render() { - let { options, handleOptionToggle } = this.props - let loading = - - let select = ( -
    -

    - Select a{' '} - - BIDS dataset - {' '} - to validate -

    - -
    - - - Note: Selecting a dataset only performs validation. Files are never - uploaded. - -
    - ) - - return
    {this.props.loading ? loading : select}
    - } -} - -export default Validate diff --git a/bids-validator-web/components/results/Issue.jsx b/bids-validator-web/components/results/Issue.jsx deleted file mode 100644 index b02de023..00000000 --- a/bids-validator-web/components/results/Issue.jsx +++ /dev/null @@ -1,55 +0,0 @@ -// dependencies ------------------------------------------------------- - -import React from 'react' -import PropTypes from 'prop-types' - -// component setup ---------------------------------------------------- - -export default class Issue extends React.Component { - // life cycle events -------------------------------------------------- - - render() { - let error = this.props.error - - // build error location string - let errLocation = '' - if (error.line) { - errLocation += 'Line: ' + error.line + ' ' - } - if (error.character) { - errLocation += 'Character: ' + error.character + '' - } - if (errLocation === '' && error.evidence) { - errLocation = 'Evidence: ' - } - - return ( -
    -

    - {error.file.name} - - {error.file.size / 1000} KB | {error.file.type} - -

    - - -

    {error.file.webkitRelativePath}

    - -

    {error.reason}

    -
    - - -

    {error.evidence}

    -
    -
    - ) - } - - // custom methods ----------------------------------------------------- -} - -Issue.propTypes = { - file: PropTypes.object, - error: PropTypes.object, - type: PropTypes.string.isRequired, -} diff --git a/bids-validator-web/components/results/Issues.jsx b/bids-validator-web/components/results/Issues.jsx deleted file mode 100644 index 7d2c5cec..00000000 --- a/bids-validator-web/components/results/Issues.jsx +++ /dev/null @@ -1,147 +0,0 @@ -// dependencies ----------------------------------------------------------- - -import React from 'react' -import PropTypes from 'prop-types' -import { Card, Collapse } from 'react-bootstrap' -import pluralize from 'pluralize' -import Issue from './Issue' - -// component setup -------------------------------------------------------- - -class Issues extends React.Component { - constructor() { - super() - this.state = { - showMore: [], - errorOpen: false, - } - } - - // life cycle events ------------------------------------------------------ - - render() { - let self = this - let issueFiles = this.props.issues - let issues = issueFiles.map((issue, index) => { - let files = issue.files - if (this.state.showMore.indexOf(index) === -1) { - files = issue.files.slice(0, 10) - } - - // issue sub-errors - let hasErrorFiles = false - let subErrors = files.map(function (error, index2) { - if (error && error.file) { - hasErrorFiles = true - return ( - - ) - } - }) - - // issue card - if (hasErrorFiles) { - return ( - - - this.setState({ errorOpen: !this.state.errorOpen }) - }> - {this._header(issue, index, this.props.issueType, hasErrorFiles)} - - - - {subErrors} - {this._viewMore(issue.files, index)} - - - - ) - } else { - return ( -
    -
    - {this._header(issue, index, this.props.issueType, hasErrorFiles)} -
    -
    - ) - } - }) - return
    {issues}
    - } - - // template methods ------------------------------------------------------- - - _header(issue, index, type, hasErrorFiles) { - let issueCount = pluralize('files', issue.files.length) - let fileCount - if (hasErrorFiles) { - fileCount = ( - - {issue.files.length} {issueCount} - - ) - } - return ( - -

    - - {type} {index + 1}: [Code {issue.code}] {issue.key} - -

    - {this._issueLink(issue)} - {issue.reason} - {fileCount} -
    - ) - } - - _issueLink(issue) { - if (issue && issue.helpUrl) { - return ( -

    - - Click here for more information about this issue - -

    - ) - } else { - return null - } - } - - _viewMore(files, index) { - if (this.state.showMore.indexOf(index) === -1 && files.length > 10) { - return ( -
    - -
    - ) - } - } - - // custom methods --------------------------------------------------------- - - _showMore(index) { - let showMore = this.state.showMore - showMore.push(index) - this.setState({ showMore }) - } -} - -Issues.propTypes = { - issues: PropTypes.array.isRequired, - issueType: PropTypes.string.isRequired, -} - -export default Issues diff --git a/bids-validator-web/components/results/Results.jsx b/bids-validator-web/components/results/Results.jsx deleted file mode 100644 index 57067a97..00000000 --- a/bids-validator-web/components/results/Results.jsx +++ /dev/null @@ -1,129 +0,0 @@ -// dependencies ----------------------------------------------------------- - -import React from 'react' -import { Card, Collapse, Alert } from 'react-bootstrap' -import PropTypes from 'prop-types' -import pluralize from 'pluralize' -import Issues from './Issues' - -// component setup -------------------------------------------------------- - -export default class ValidationResults extends React.Component { - constructor() { - super() - this.state = { - warningsOpen: false, - errorsOpen: false, - } - } - // life cycle events ------------------------------------------------------ - - render() { - let errors = this.props.errors - let warnings = this.props.warnings - - // errors - let errorsWrap - if (errors.length > 0) { - let fileCount = this._countFiles(errors) - let errorHeader = ( - - view {errors.length} {pluralize('error', errors.length)}{' '} - {this._countText(fileCount)} - - ) - errorsWrap = ( - - - this.setState({ errorsOpen: !this.state.errorsOpen }) - } - aria-controls="errors-issues" - aria-expanded={this.state.errorsOpen} - variant="danger" - className="mb-0"> - {errorHeader} - - - - - - - - ) - } - - //warnings - let warningWrap - if (warnings.length > 0) { - let fileCount = this._countFiles(warnings) - let warningHeader = ( - - view {warnings.length} {pluralize('warning', warnings.length)}{' '} - {this._countText(fileCount)} - - ) - warningWrap = ( - - - this.setState({ warningsOpen: !this.state.warningsOpen }) - } - aria-controls="warning-issues" - aria-expanded={this.state.warningsOpen} - variant="warning" - className="mb-0"> - {warningHeader} - - - - - - - - ) - } - - // validations errors and warning wraps - return ( - // -
    - {errorsWrap} - {warningWrap} -
    - //
    - ) - } - - // custom methods --------------------------------------------------------- - - _countFiles(issues) { - let numFiles = 0 - for (let issue of issues) { - if (issue.files.length > 1 || !!issue.files[0].file) { - numFiles += issue.files.length - } - } - return numFiles - } - - _countText(count) { - if (count > 0) { - return ( - - in {count} {pluralize('files', count)} - - ) - } - } -} - -ValidationResults.propTypes = { - errors: PropTypes.array, - warnings: PropTypes.array, -} - -ValidationResults.Props = { - errors: [], - warnings: [], -} diff --git a/bids-validator-web/index.scss b/bids-validator-web/index.scss deleted file mode 100644 index af3f4f35..00000000 --- a/bids-validator-web/index.scss +++ /dev/null @@ -1,156 +0,0 @@ -body { - font-size: 14px; -} - -h4 { - font-size: 18px; -} - -label { - font-weight: bold; -} - -h5 { - font-size: 14px; -} - -.page-wrapper { - padding-top: 80px; -} - -form.options label { - margin: 0; - padding-right: 2rem; - font-weight: normal; -} -form.options input { - margin-right: 0.4rem; -} - -.validation-error { - position: relative; -} -.validation-error > .card-header > .file-header > .pull-right { - position: absolute; - top: 15px; - right: 15px; -} - -.validation-error > .card-body > .file-header, -.pull-right { - float: right; - top: 15px; - right: 15px; -} - -.issues { - .validation-messages { - padding-top: 20px; - } - button > span { - float: right; - } - .issue { - border-bottom: 1px solid #ccc; - .pull-right { - font-size: 14px; - } - } - .issue:last-child { - border-bottom: none; - } - .issues-view-more { - padding-top: 15px; - button { - border: none; - background: none; - color: #337ab7; - font-size: 18px; - } - button:hover { - text-decoration: underline; - color: #23527c; - } - } -} - -.upload-panel > .alert, -.error-header { - cursor: pointer; -} - -.summary { - font-size: 14px; -} - -.error-log { - margin-bottom: 10px; -} - -.loading-wrap { - padding: 20px 0 30px; - text-align: center; - position: relative; - span { - text-transform: uppercase; - margin-top: 5px; - display: block; - font-size: 12px; - color: #337ab7; - } - .spinner, - .spinnerinner { - position: relative; - border: 4px solid #337ab7; - border-color: transparent #337ab7; - margin: auto; - -webkit-box-sizing: border-box; - -moz-box-sizing: border-box; - box-sizing: border-box; - -webkit-border-radius: 50%; - -moz-border-radius: 50%; - border-radius: 50%; - width: 45px; - height: 45px; - padding: 0; - margin: 0; - } - - .spinner { - animation: spin 4s linear 0s infinite normal; - margin: 0 auto; - } - - .spinnerinner { - border-width: 2px; - margin: 21%; - padding: 0; - border-radius: 50%; - position: absolute; - top: 0; - bottom: 0; - left: 0; - right: 0; - display: block; - width: 60%; - height: 60%; - animation: innerspin 1s linear 0s infinite normal; - } -} - -@keyframes spin { - from { - -webkit-transform: rotate(0); - } - to { - -webkit-transform: rotate(360deg); - } -} -@keyframes innerspin { - from { - -webkit-transform: rotate(0); - } - to { - -webkit-transform: rotate(360deg); - } -} diff --git a/bids-validator-web/next.config.js b/bids-validator-web/next.config.js deleted file mode 100644 index 13965153..00000000 --- a/bids-validator-web/next.config.js +++ /dev/null @@ -1,11 +0,0 @@ -// next.config.js - -module.exports = { - output: 'export', - transpilePackages: ['bids-validator'], - assetPrefix: './', - webpack: (config, {}) => { - config.watchOptions.ignored = '**/node_modules' - return config - }, -} diff --git a/bids-validator-web/package.json b/bids-validator-web/package.json deleted file mode 100644 index 9abd4978..00000000 --- a/bids-validator-web/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "bids-validator-web", - "version": "1.14.15-dev.0", - "description": "web client for bids-validator", - "main": "index.js", - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.22.10", - "bootstrap": "^5.3.3", - "eslint-config-next": "^14.2.13", - "next": "^14.0.0", - "pluralize": "^8.0.0", - "react": "^18.3.1", - "react-bootstrap": "^2.10.5", - "react-dom": "^18.3.1", - "sass": "^1.79.4" - }, - "scripts": { - "dev": "next", - "build": "next build", - "start": "next start" - }, - "devDependencies": { - "@next/eslint-plugin-next": "^14.2.13" - } -} diff --git a/bids-validator-web/pages/_app.js b/bids-validator-web/pages/_app.js deleted file mode 100644 index e766d5e6..00000000 --- a/bids-validator-web/pages/_app.js +++ /dev/null @@ -1,8 +0,0 @@ -import '../../node_modules/bootstrap/dist/css/bootstrap.min.css' -import '../index.scss' - -function MyApp({ Component, pageProps }) { - return -} - -export default MyApp diff --git a/bids-validator-web/pages/index.js b/bids-validator-web/pages/index.js deleted file mode 100644 index 35dc4231..00000000 --- a/bids-validator-web/pages/index.js +++ /dev/null @@ -1,6 +0,0 @@ -import React from 'react' -import App from '../components/App' - -const AppComponent = () => - -export default AppComponent diff --git a/bids-validator/bids_validator/rules/associated_data_rules.json b/bids-validator/bids_validator/rules/associated_data_rules.json deleted file mode 100644 index 96de6a67..00000000 --- a/bids-validator/bids_validator/rules/associated_data_rules.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "associated_data": { - "regexp": "^[\\/\\\\](?:@@@_associated_data_type_@@@)[\\/\\\\](?:.*)$", - "tokens": { - "@@@_associated_data_type_@@@": [ - "code", - "derivatives", - "sourcedata", - "stimuli" - ] - } - } -} diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json deleted file mode 100644 index 291c4c6c..00000000 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ /dev/null @@ -1,658 +0,0 @@ -{ - "anat_nonparametric": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:_part-(imag|mag|phase|real))?(?:_chunk-[0-9]+)?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", - "tokens": { - "@@@_anat_suffixes_@@@": [ - "T1w", - "T2w", - "PDw", - "T2starw", - "FLAIR", - "inplaneT1", - "inplaneT2", - "PDT2", - "angio", - "T2star", - "FLASH", - "PD" - ], - "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "anat_parametric": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", - "tokens": { - "@@@_anat_suffixes_@@@": [ - "T1map", - "T2map", - "T2starmap", - "R1map", - "R2map", - "R2starmap", - "PDmap", - "MTRmap", - "MTsat", - "UNIT1", - "T1rho", - "MWFmap", - "MTVmap", - "PDT2map", - "Chimap", - "S0map", - "M0map" - ], - "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "anat_defacemask": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_mod-(?:@@@_anat_suffixes_@@@))?_defacemask\\.(@@@_anat_ext_@@@)$", - "tokens": { - "@@@_anat_suffixes_@@@": [ - "T1w", - "T2w", - "PDw", - "T2starw", - "FLAIR", - "inplaneT1", - "inplaneT2", - "PDT2", - "angio", - "T1map", - "T2map", - "T2starmap", - "R1map", - "R2map", - "R2starmap", - "PDmap", - "MTRmap", - "MTsat", - "UNIT1", - "T1rho", - "MWFmap", - "MTVmap", - "PDT2map", - "Chimap", - "TB1map", - "RB1map", - "S0map", - "M0map", - "MESE", - "MEGRE", - "VFA", - "IRT1", - "MP2RAGE", - "MPM", - "MTS", - "MTR", - "T2star", - "FLASH", - "PD" - ], - "@@@_anat_ext_@@@": ["nii.gz", "nii"] - } - }, - - "anat_multiecho": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_echo-[0-9]+?(_part-(imag|mag|phase|real))?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", - "tokens": { - "@@@_anat_suffixes_@@@": ["MESE", "MEGRE"], - "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "anat_multiflip": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_flip-[0-9]+?(?:_part-(imag|mag|phase|real))?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", - "tokens": { - "@@@_anat_suffixes_@@@": ["VFA"], - "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "anat_multiinv": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_inv-[0-9]+?(?:_part-(imag|mag|phase|real))?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", - "tokens": { - "@@@_anat_suffixes_@@@": ["IRT1"], - "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "anat_mp2rage": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:_flip-[0-9]+)?_inv-[0-9]+?(?:_part-(imag|mag|phase|real))?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", - "tokens": { - "@@@_anat_suffixes_@@@": ["MP2RAGE"], - "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "anat_vfa_mt": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?_flip-[0-9]+?_mt-(on|off)?(?:_part-(imag|mag|phase|real))?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", - "tokens": { - "@@@_anat_suffixes_@@@": ["MPM", "MTS"], - "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "anat_mtr": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_mt-(on|off)?(?:_part-(imag|mag|phase|real))?_(?:@@@_anat_suffixes_@@@)\\.(@@@_anat_ext_@@@)$", - "tokens": { - "@@@_anat_suffixes_@@@": ["MTR"], - "@@@_anat_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - "anat_cont": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?anat[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:_part-(imag|mag|phase|real))?(?:_recording-[a-zA-Z0-9]+)?(?:_chunk-[0-9]+)?(?:@@@_cont_ext_@@@)$", - "tokens": { - "@@@_cont_ext_@@@": [ - "_physio\\.tsv\\.gz", - "_stim\\.tsv\\.gz", - "_physio\\.json", - "_stim\\.json" - ] - } - }, - - "behavioral": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?beh[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?((?:@@@_behavioral_ext_@@@)|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", - "tokens": { - "@@@_behavioral_ext_@@@": [ - "_beh\\.json", - "_beh\\.tsv", - "_events\\.json", - "_events\\.tsv" - ], - "@@@_cont_ext_@@@": [ - "_physio\\.tsv\\.gz", - "_stim\\.tsv\\.gz", - "_physio\\.json", - "_stim\\.json" - ] - } - }, - - "dwi": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?dwi[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_part-(imag|mag|phase|real))?((?:@@@_dwi_ext_@@@)|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", - "tokens": { - "@@@_dwi_ext_@@@": [ - "_dwi\\.nii\\.gz", - "_dwi\\.nii", - "_dwi\\.json", - "_dwi\\.bvec", - "_dwi\\.bval", - "_sbref\\.nii\\.gz", - "_sbref\\.nii", - "_sbref\\.json" - ], - "@@@_cont_ext_@@@": [ - "_physio\\.tsv\\.gz", - "_stim\\.tsv\\.gz", - "_physio\\.json", - "_stim\\.json" - ] - } - }, - - "fmap_gre": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", - "tokens": { - "@@@_field_map_type_@@@": [ - "phasediff", - "phase1", - "phase2", - "magnitude1", - "magnitude2", - "magnitude", - "fieldmap" - ], - "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "fmap_pepolar_asl": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?_dir-[a-zA-Z0-9]+(?:_run-[0-9]+)?(?:_part-(mag|phase|real|imag))?(?:_chunk-[0-9]+)?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", - "tokens": { - "@@@_field_map_type_@@@": ["m0scan", "epi"], - "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "fmap_TB1DAM": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_flip-[0-9]+?(?:_part-(imag|mag|phase|real))?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", - "tokens": { - "@@@_field_map_type_@@@": ["TB1DAM"], - "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "fmap_TB1EPI": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_echo-[0-9]+?_flip-[0-9]+?(?:_inv-[0-9]+)?(?:_part-(imag|mag|phase|real))?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", - "tokens": { - "@@@_field_map_type_@@@": ["TB1EPI"], - "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "fmap_rf": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:_flip-[0-9]+)?(?:_inv-[0-9]+)?(?:_part-(imag|mag|phase|real))?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", - "tokens": { - "@@@_field_map_type_@@@": ["TB1AFI", "TB1TFL", "TB1RFM", "RB1COR"], - "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "fmap_TB1SRGE": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?_flip-[0-9]+?_inv-[0-9]+?(?:_part-(imag|mag|phase|real))?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", - "tokens": { - "@@@_field_map_type_@@@": ["TB1SRGE"], - "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "fmap_parametric": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?fmap[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_(?:@@@_field_map_type_@@@)\\.(@@@_field_map_ext_@@@)$", - "tokens": { - "@@@_field_map_type_@@@": ["TB1map", "RB1map"], - "@@@_field_map_ext_@@@": ["nii\\.gz", "nii", "json"] - } - }, - - "func": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?func[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:_part-(imag|mag|phase|real))?(?:@@@_func_ext_@@@)$", - "tokens": { - "@@@_func_ext_@@@": [ - "_bold\\.nii\\.gz", - "_bold\\.nii", - "_bold\\.json", - "_cbv\\.nii\\.gz", - "_cbv\\.nii", - "_cbv\\.json", - "_sbref\\.nii\\.gz", - "_sbref\\.nii", - "_sbref\\.json" - ] - } - }, - - "func_phase_deprecated": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?func[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:@@@_func_ext_@@@)$", - "tokens": { - "@@@_func_ext_@@@": ["_phase\\.nii\\.gz", "_phase\\.nii", "_phase\\.json"] - } - }, - - "func_events": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?func[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:@@@_func_ext_@@@)$", - "tokens": { - "@@@_func_ext_@@@": ["_events\\.tsv", "_events\\.json"] - } - }, - - "func_timeseries": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?func[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@)$", - "tokens": { - "@@@_cont_ext_@@@": [ - "_physio\\.tsv\\.gz", - "_stim\\.tsv\\.gz", - "_physio\\.json", - "_stim\\.json" - ] - } - }, - - "func_bold": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?func[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(?:_part-(imag|mag|phase|real))?(?:@@@_func_bold_ext_@@@)$", - "tokens": { - "@@@_func_bold_ext_@@@": [ - "_bold\\.nii\\.gz", - "_bold\\.nii", - "_sbref\\.nii\\.gz", - "_sbref\\.nii" - ] - } - }, - - "asl": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?perf[\\/\\\\]\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:(?:@@@_asl_type_@@@)|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", - "tokens": { - "@@@_asl_type_@@@": [ - "_asl\\.nii\\.gz", - "_asl\\.nii", - "_asl\\.json", - "_m0scan\\.nii\\.gz", - "_m0scan\\.nii", - "_m0scan\\.json", - "_aslcontext\\.tsv", - "_asllabeling\\.jpg" - ], - "@@@_cont_ext_@@@": [ - "_physio\\.tsv\\.gz", - "_stim\\.tsv\\.gz", - "_physio\\.json", - "_stim\\.json" - ] - } - }, - - "eeg": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?eeg[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:_split-[0-9]+)?(?:_space-(@@@_eeg_space_@@@))?((_eeg\\.(@@@_eeg_type_@@@)|(@@@_eeg_ext_@@@))|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", - "tokens": { - "@@@_eeg_space_@@@": [ - "Other", - "CapTrak", - "EEGLAB", - "EEGLAB-HJ", - "CTF", - "ElektaNeuromag", - "4DBti", - "KitYokogawa", - "ChietiItab", - "ICBM452AirSpace", - "ICBM452Warp5Space", - "IXI549Space", - "fsaverage", - "fsaverageSym", - "fsLR", - "MNIColin27", - "MNI152Lin", - "MNI152NLin2009aSym", - "MNI152NLin2009bSym", - "MNI152NLin2009cSym", - "MNI152NLin2009aAsym", - "MNI152NLin2009bAsym", - "MNI152NLin2009cAsym", - "MNI152NLin6Sym", - "MNI152NLin6ASym", - "MNI305", - "NIHPD", - "OASIS30AntsOASISAnts", - "OASIS30Atropos", - "Talairach", - "UNCInfant", - "fsaverage3", - "fsaverage4", - "fsaverage5", - "fsaverage6", - "fsaveragesym", - "UNCInfant0V21", - "UNCInfant1V21", - "UNCInfant2V21", - "UNCInfant0V22", - "UNCInfant1V22", - "UNCInfant2V22", - "UNCInfant0V23", - "UNCInfant1V23", - "UNCInfant2V23" - ], - "@@@_eeg_type_@@@": ["vhdr", "vmrk", "eeg", "edf", "bdf", "set", "fdt"], - "@@@_eeg_ext_@@@": [ - "_events\\.json", - "_events\\.tsv", - "_electrodes\\.json", - "_electrodes\\.tsv", - "_channels\\.json", - "_channels\\.tsv", - "_eeg\\.json", - "_coordsystem\\.json", - "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif" - ], - "@@@_cont_ext_@@@": [ - "_physio\\.tsv\\.gz", - "_stim\\.tsv\\.gz", - "_physio\\.json", - "_stim\\.json" - ] - } - }, - "ieeg": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?ieeg[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:_split-[0-9]+)?(?:_space-(@@@_ieeg_space_@@@))?((_ieeg\\.(@@@_ieeg_type_@@@)|(@@@_ieeg_ext_@@@))|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", - "tokens": { - "@@@_ieeg_space_@@@": [ - "Other", - "Pixels", - "ACPC", - "ScanRAS", - "ICBM452AirSpace", - "ICBM452Warp5Space", - "IXI549Space", - "fsaverage", - "fsaverageSym", - "fsLR", - "MNIColin27", - "MNI152Lin", - "MNI152NLin2009aSym", - "MNI152NLin2009bSym", - "MNI152NLin2009cSym", - "MNI152NLin2009aAsym", - "MNI152NLin2009bAsym", - "MNI152NLin2009cAsym", - "MNI152NLin6Sym", - "MNI152NLin6ASym", - "MNI305", - "NIHPD", - "OASIS30AntsOASISAnts", - "OASIS30Atropos", - "Talairach", - "UNCInfant", - "fsaverage3", - "fsaverage4", - "fsaverage5", - "fsaverage6", - "fsaveragesym", - "UNCInfant0V21", - "UNCInfant1V21", - "UNCInfant2V21", - "UNCInfant0V22", - "UNCInfant1V22", - "UNCInfant2V22", - "UNCInfant0V23", - "UNCInfant1V23", - "UNCInfant2V23" - ], - "@@@_ieeg_type_@@@": [ - "edf", - "vhdr", - "vmrk", - "eeg", - "set", - "fdt", - "nwb", - "mefd[\\/\\\\].*" - ], - "@@@_ieeg_ext_@@@": [ - "_events\\.json", - "_events\\.tsv", - "_electrodes\\.json", - "_electrodes\\.tsv", - "_channels\\.json", - "_channels\\.tsv", - "_ieeg\\.json", - "_coordsystem\\.json", - "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif" - ], - "@@@_cont_ext_@@@": [ - "_physio\\.tsv\\.gz", - "_stim\\.tsv\\.gz", - "_physio\\.json", - "_stim\\.json" - ] - } - }, - - "meg": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?meg[\\/\\\\]\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:_split-[0-9]+)?(_digitizer\\.txt|_meg(@@@_meg_type_@@@[\\/\\\\](.(?!\\.(sqd|con|fif|raw|raw\\.mhd|trg|kdf|chn)$))*|[\\/\\\\](.(?!\\.(sqd|con|fif|raw|raw\\.mhd|trg|kdf|chn)$))*)|(@@@_meg_ext_@@@)|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", - "tokens": { - "@@@_meg_type_@@@": [ - "\\.ds[\\/\\\\].*", - "\\.(?:chn|kdf|trg)", - "\\.(?:raw|raw\\.mhd)", - "\\.fif", - "\\.(?:con|sqd)", - "\\.(?:kdf|chn|trg)" - ], - "@@@_meg_ext_@@@": [ - "_events\\.json", - "_events\\.tsv", - "_channels\\.json", - "_channels\\.tsv", - "_electrodes\\.json", - "_electrodes\\.tsv", - "_meg\\.json", - "_coordsystem\\.json", - "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif", - "_headshape\\.pos", - "_markers\\.(?:mrk|sqd)" - ], - "@@@_cont_ext_@@@": [ - "_physio\\.tsv\\.gz", - "_stim\\.tsv\\.gz", - "_physio\\.json", - "_stim\\.json" - ] - } - }, - - "meg_calbibration": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?meg[\\/]\\1(_\\2)?_acq-calibration_meg\\.dat$" - }, - - "meg_crosstalk": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?meg[\\/]\\1(_\\2)?_acq-crosstalk_meg\\.fif$" - }, - - "stimuli": { - "regexp": "^[\\/\\\\](?:stimuli)[\\/\\\\](?:.*)$" - }, - - "nirs": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?nirs[\\/\\\\]\\1(_\\2)?(((?:_acq-[a-zA-Z0-9]+)?(@@@_nirs_optodes_@@@))|((?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:_part-[0-9]+)?(_nirs\\.(@@@_nirs_type_@@@)|(@@@_nirs_ext_@@@))))$", - "tokens": { - "@@@_nirs_type_@@@": ["snirf"], - "@@@_nirs_ext_@@@": [ - "_events\\.json", - "_events\\.tsv", - "_channels\\.json", - "_channels\\.tsv", - "_nirs\\.json", - "_photo\\.jpg" - ], - "@@@_nirs_optodes_@@@": [ - "_optodes\\.tsv", - "_optodes\\.json", - "_coordsystem\\.json" - ] - } - }, - - "pet": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?pet[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_task-[a-zA-Z0-9]+)?(?:_trc-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:(?:@@@_pet_ext_@@@)|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", - "tokens": { - "@@@_pet_ext_@@@": [ - "_pet\\.nii\\.gz", - "_pet\\.nii", - "_pet\\.json", - "_events\\.json", - "_events\\.tsv" - ], - "@@@_cont_ext_@@@": [ - "_physio\\.tsv\\.gz", - "_stim\\.tsv\\.gz", - "_physio\\.json", - "_stim\\.json" - ] - } - }, - - "pet_blood": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?pet[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_task-[a-zA-Z0-9]+)?(?:_trc-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_recording-[a-zA-Z0-9]+)_(@@@_pet_ext_@@@)$", - "tokens": { - "@@@_pet_ext_@@@": ["blood\\.tsv\\.gz", "blood\\.tsv", "blood\\.json"] - } - }, - "motion": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?motion[\\/\\\\]\\1(_\\2)?_task-[a-zA-Z0-9]+(_tracksys-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_(@@@_motion_ext_@@@))|((?:_tracksys-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?_(@@@_cont_ext_@@@))$", - "tokens": { - "@@@_motion_ext_@@@": [ - "channels\\.json", - "channels\\.tsv", - "motion\\.json", - "motion\\.tsv" - ], - "@@@_cont_ext_@@@": ["events\\.json", "events\\.tsv"] - } - }, - "microscopy": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[0-9]+)?((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))$", - "tokens": { - "@@@_microscopy_type_@@@": [ - "_TEM", - "_SEM", - "_uCT", - "_BF", - "_DF", - "_PC", - "_DIC", - "_FLUO", - "_CONF", - "_PLI", - "_CARS", - "_2PE", - "_MPE", - "_SR", - "_NLO", - "_OCT", - "_SPIM", - "_XPCT" - ], - "@@@_microscopy_ext_@@@": [ - "\\.ome\\.tif", - "\\.ome\\.btf", - "\\.ome\\.zarr[\\/\\\\].*", - "\\.tif", - "\\.png" - ] - } - }, - "microscopy_photo": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(@@@_photo_ext_@@@)$", - "tokens": { - "@@@_photo_ext_@@@": [ - "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif", - "_photo\\.json" - ] - } - }, - "microscopy_json": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[0-9]+)?(@@@_microscopy_type_@@@)\\.json$", - "tokens": { - "@@@_microscopy_type_@@@": [ - "_TEM", - "_SEM", - "_uCT", - "_BF", - "_DF", - "_PC", - "_DIC", - "_FLUO", - "_CONF", - "_PLI", - "_CARS", - "_2PE", - "_MPE", - "_SR", - "_NLO", - "_OCT", - "_SPIM", - "_XPCT" - ] - } - } -} diff --git a/bids-validator/bids_validator/rules/phenotypic_rules.json b/bids-validator/bids_validator/rules/phenotypic_rules.json deleted file mode 100644 index b981c01a..00000000 --- a/bids-validator/bids_validator/rules/phenotypic_rules.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "phenotypic_data": { - "regexp": "^[\\/\\\\](?:phenotype)[\\/\\\\](?:.*\\.tsv|.*\\.json)$" - } -} diff --git a/bids-validator/bids_validator/rules/session_level_rules.json b/bids-validator/bids_validator/rules/session_level_rules.json deleted file mode 100644 index 90ba9982..00000000 --- a/bids-validator/bids_validator/rules/session_level_rules.json +++ /dev/null @@ -1,267 +0,0 @@ -{ - "scans": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(@@@_scat_ext_@@@)$", - "tokens": { - "@@@_scat_ext_@@@": ["_scans\\.tsv", "_scans\\.json"] - } - }, - - "func_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?_task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?(@@@_func_ses_ext_@@@)$", - "tokens": { - "@@@_func_ses_ext_@@@": [ - "_bold\\.json", - "_sbref\\.json", - "_events\\.json", - "_events\\.tsv", - "_physio\\.json", - "_stim\\.json" - ] - } - }, - - "asl_ses": { - "regexp": "^\\/(sub-[a-zA-Z0-9]+)\\/(?:(ses-[a-zA-Z0-9]+)\\/)?\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_asl_ses_ext_@@@)$", - "tokens": { - "@@@_asl_ses_ext_@@@": [ - "_asl\\.json", - "_aslcontext\\.tsv", - "_m0scan\\.json", - "_asllabeling\\.jpg" - ] - } - }, - - "pet_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_trc-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(@@@_pet_ses_type_@@@)$", - "tokens": { - "@@@_pet_ses_type_@@@": ["_pet\\.json", "_events\\.json", "_events\\.tsv"] - } - }, - - "anat_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(@@@_anat_ses_type_@@@)\\.json$", - "tokens": { - "@@@_anat_ses_type_@@@": [ - "T1w", - "T2w", - "T1map", - "T2map", - "T1rho", - "FLAIR", - "PD", - "PDT2", - "inplaneT1", - "inplaneT2", - "angio", - "defacemask", - "SWImagandphase" - ] - } - }, - - "dwi_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_)?dwi\\.(?:@@@_dwi_ses_ext_@@@)$", - "tokens": { - "@@@_dwi_ses_ext_@@@": ["json", "bval", "bvec"] - } - }, - - "meg_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(@@@_meg_ses_type_@@@)$", - "tokens": { - "@@@_meg_ses_type_@@@": [ - "_events\\.tsv", - "_channels\\.tsv", - "_channels\\.json", - "_meg\\.json", - "_coordsystem\\.json", - "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif", - "_headshape\\.pos" - ] - } - }, - - "eeg_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:_space-(@@@_eeg_space_@@@))?(@@@_eeg_ses_type_@@@)$", - "tokens": { - "@@@_eeg_ses_type_@@@": [ - "_events\\.tsv", - "_channels\\.tsv", - "_channels\\.json", - "_electrodes\\.tsv", - "_electrodes\\.json", - "_eeg\\.json", - "_coordsystem\\.json", - "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif" - ], - "@@@_eeg_space_@@@": [ - "Other", - "CapTrak", - "EEGLAB", - "EEGLAB-HJ", - "CTF", - "ElektaNeuromag", - "4DBti", - "KitYokogawa", - "ChietiItab", - "ICBM452AirSpace", - "ICBM452Warp5Space", - "IXI549Space", - "fsaverage", - "fsaverageSym", - "fsLR", - "MNIColin27", - "MNI152Lin", - "MNI152NLin2009aSym", - "MNI152NLin2009bSym", - "MNI152NLin2009cSym", - "MNI152NLin2009aAsym", - "MNI152NLin2009bAsym", - "MNI152NLin2009cAsym", - "MNI152NLin6Sym", - "MNI152NLin6ASym", - "MNI305", - "NIHPD", - "OASIS30AntsOASISAnts", - "OASIS30Atropos", - "Talairach", - "UNCInfant", - "fsaverage3", - "fsaverage4", - "fsaverage5", - "fsaverage6", - "fsaveragesym", - "UNCInfant0V21", - "UNCInfant1V21", - "UNCInfant2V21", - "UNCInfant0V22", - "UNCInfant1V22", - "UNCInfant2V22", - "UNCInfant0V23", - "UNCInfant1V23", - "UNCInfant2V23" - ] - } - }, - - "ieeg_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:_space-(@@@_ieeg_space_@@@))?(@@@_ieeg_ses_type_@@@)$", - "tokens": { - "@@@_ieeg_ses_type_@@@": [ - "_events\\.tsv", - "_channels\\.tsv", - "_channels\\.json", - "_electrodes\\.tsv", - "_electrodes\\.json", - "_ieeg\\.json", - "_coordsystem\\.json", - "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif" - ], - "@@@_ieeg_space_@@@": [ - "Other", - "Pixels", - "ACPC", - "ScanRAS", - "ICBM452AirSpace", - "ICBM452Warp5Space", - "IXI549Space", - "fsaverage", - "fsaverageSym", - "fsLR", - "MNIColin27", - "MNI152Lin", - "MNI152NLin2009aSym", - "MNI152NLin2009bSym", - "MNI152NLin2009cSym", - "MNI152NLin2009aAsym", - "MNI152NLin2009bAsym", - "MNI152NLin2009cAsym", - "MNI152NLin6Sym", - "MNI152NLin6ASym", - "MNI305", - "NIHPD", - "OASIS30AntsOASISAnts", - "OASIS30Atropos", - "Talairach", - "UNCInfant", - "fsaverage3", - "fsaverage4", - "fsaverage5", - "fsaverage6", - "fsaveragesym", - "UNCInfant0V21", - "UNCInfant1V21", - "UNCInfant2V21", - "UNCInfant0V22", - "UNCInfant1V22", - "UNCInfant2V22", - "UNCInfant0V23", - "UNCInfant1V23", - "UNCInfant2V23" - ] - } - }, - - "motion_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(@@@_motion_ses_type_@@@)$", - "tokens": { - "@@@_motion_ses_type_@@@": [ - "_events.tsv", - "_events.json", - "_channels.tsv", - "_channels.json", - "_motion.json", - "_coordsystem.json" - ] - } - }, - "microscopy_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(?:_chunk-[0-9]+)?(@@@_microscopy_ses_type_@@@)$", - "tokens": { - "@@@_microscopy_ses_type_@@@": [ - "_TEM\\.json", - "_SEM\\.json", - "_uCT\\.json", - "_BF\\.json", - "_DF\\.json", - "_PC\\.json", - "_DIC\\.json", - "_FLUO\\.json", - "_CONF\\.json", - "_PLI\\.json", - "_CARS\\.json", - "_2PE\\.json", - "_MPE\\.json", - "_SR\\.json", - "_NLO\\.json", - "_OCT\\.json", - "_SPIM\\.json", - "_XPCT\\.json" - ] - } - }, - - "nirs_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(((?:_acq-[a-zA-Z0-9]+)?(@@@_nirs_optodes_@@@))|((?:_task-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(@@@_nirs_ses_type_@@@)))$", - "tokens": { - "@@@_nirs_ses_type_@@@": [ - "_events\\.tsv", - "_channels\\.tsv", - "_nirs\\.json", - "_photo\\.jpg" - ], - "@@@_nirs_optodes_@@@": [ - "_optodes\\.tsv", - "_optodes\\.json", - "_coordsystem\\.json" - ] - } - } -} diff --git a/bids-validator/bids_validator/rules/subject_level_rules.json b/bids-validator/bids_validator/rules/subject_level_rules.json deleted file mode 100644 index d2e80d92..00000000 --- a/bids-validator/bids_validator/rules/subject_level_rules.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "subject_level": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\]\\1(@@@_subject_level_ext_@@@)$", - "tokens": { - "@@@_subject_level_ext_@@@": ["_sessions\\.tsv", "_sessions\\.json"] - } - } -} diff --git a/bids-validator/bids_validator/rules/top_level_rules.json b/bids-validator/bids_validator/rules/top_level_rules.json deleted file mode 100644 index 33e2a39d..00000000 --- a/bids-validator/bids_validator/rules/top_level_rules.json +++ /dev/null @@ -1,279 +0,0 @@ -{ - "root_top": { - "regexp": "^[\\/\\\\]?(@@@_root_files_@@@)$", - "tokens": { - "@@@_root_files_@@@": [ - "README", - "README\\.md", - "README\\.rst", - "README\\.txt", - "CHANGES", - "CITATION\\.cff", - "LICENSE", - "dataset_description\\.json", - "genetic_info\\.json", - "participants\\.tsv", - "participants\\.json", - "phasediff.json", - "phase1\\.json", - "phase2\\.json", - "fieldmap\\.json", - "TB1DAM\\.json", - "TB1EPI\\.json", - "TB1AFI\\.json", - "TB1TFL\\.json", - "TB1RFM\\.json", - "TB1SRGE\\.json", - "RB1COR\\.json", - "events\\.json", - "scans\\.json", - "samples\\.json", - "samples\\.tsv" - ] - } - }, - - "func_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_ce-[a-zA-Z0-9]+)?(?:_dir-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_echo-[0-9]+)?((?:@@@_func_top_ext_@@@)|(?:_recording-[a-zA-Z0-9]+)?(?:@@@_cont_ext_@@@))$", - "tokens": { - "@@@_func_top_ext_@@@": [ - "_bold\\.json", - "_sbref\\.json", - "_events\\.json", - "_events\\.tsv", - "_beh\\.json" - ], - "@@@_cont_ext_@@@": ["_physio\\.json", "_stim\\.json"] - } - }, - - "asl_top": { - "regexp": "^\\/(?:ses-[a-zA-Z0-9]+_)?(?:_acq-[a-zA-Z0-9]+)?(?:_rec-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_asl_top_ext_@@@)$", - "tokens": { - "@@@_asl_top_ext_@@@": [ - "_asl\\.json", - "_m0scan\\.json", - "_aslcontext\\.tsv", - "_labeling.jpg" - ] - } - }, - - "pet_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:task-[a-zA-Z0-9]+_)?(?:trc-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(@@@_pet_suffixes_@@@)\\.json$", - "tokens": { - "@@@_pet_suffixes_@@@": ["pet"] - } - }, - - "anat_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:task-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(@@@_anat_suffixes_@@@)\\.json$", - "tokens": { - "@@@_anat_suffixes_@@@": [ - "T1w", - "T2w", - "T1map", - "T2map", - "T1rho", - "FLAIR", - "PD", - "PDT2", - "inplaneT1", - "inplaneT2", - "angio", - "SWImagandphase", - "T2star", - "FLASH", - "PDmap", - "photo" - ] - } - }, - - "VFA_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:echo-[0-9]+_)?(?:flip-[0-9]+_)?(?:part-(mag|phase|real|imag)_)?(@@@_mese_megre_suffixes_@@@)\\.json$", - "tokens": { - "@@@_mese_megre_suffixes_@@@": ["VFA"] - } - }, - - "megre_mese_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:echo-[0-9]+_)?(?:part-(mag|phase|real|imag)_)?(@@@_mese_megre_suffixes_@@@)\\.json$", - "tokens": { - "@@@_mese_megre_suffixes_@@@": ["MEGRE", "MESE"] - } - }, - - "irt1_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:inv-[0-9]+_)?(?:part-(mag|phase|real|imag)_)?(@@@_irt1_suffixes_@@@)\\.json$", - "tokens": { - "@@@_irt1_suffixes_@@@": ["IRT1"] - } - }, - - "mpm_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:echo-[0-9]+_)?(?:flip-[0-9]+_)?(?:mt-(on|off)_)(?:part-(mag|phase|real|imag)_)?(@@@_mpm_suffixes_@@@)\\.json$", - "tokens": { - "@@@_mpm_suffixes_@@@": ["MPM"] - } - }, - - "mts_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:echo-[0-9]+_)?(?:flip-[0-9]+_mt-(on|off)_)?(?:part-(mag|phase|real|imag)_)?(@@@_mts_suffixes_@@@)\\.json$", - "tokens": { - "@@@_mts_suffixes_@@@": ["MTS"] - } - }, - - "mtr_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:mt-(on|off)_)?(?:part-(mag|phase|real|imag)_)?(@@@_mtr_suffixes_@@@)\\.json$", - "tokens": { - "@@@_mtr_suffixes_@@@": ["MTR"] - } - }, - - "mp2rage_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:echo-[0-9]+_)?(?:flip-[0-9]+_)?(?:inv-[0-9]+_)?(?:part-(mag|phase|real|imag)_)?(@@@_mp2rage_suffixes_@@@)\\.json$", - "tokens": { - "@@@_mp2rage_suffixes_@@@": ["MP2RAGE"] - } - }, - - "unit1_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:task-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:chunk-[0-9]+_)?(@@@_unit1_suffixes_@@@)\\.json$", - "tokens": { - "@@@_unit1_suffixes_@@@": ["UNIT1"] - } - }, - - "dwi_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:dir-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:part-(mag|phase|real|imag)_)?(?:chunk-[0-9]+_)?(dwi\\.(?:@@@_dwi_top_ext_@@@)|sbref\\.json)$", - "tokens": { - "@@@_dwi_top_ext_@@@": ["json", "bval", "bvec"] - } - }, - "eeg_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:@@@_eeg_top_ext_@@@)$", - "tokens": { - "@@@_eeg_top_ext_@@@": [ - "_eeg\\.json", - "_channels\\.tsv", - "_channels\\.json", - "_electrodes\\.tsv", - "_electrodes\\.json", - "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif", - "_coordsystem\\.json" - ] - } - }, - "ieeg_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:@@@_ieeg_top_ext_@@@)$", - "tokens": { - "@@@_ieeg_top_ext_@@@": [ - "_ieeg\\.json", - "_channels\\.tsv", - "_channels\\.json", - "_electrodes\\.tsv", - "_electrodes\\.json", - "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif", - "_coordsystem\\.json" - ] - } - }, - "meg_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:@@@_meg_top_ext_@@@)$", - "tokens": { - "@@@_meg_top_ext_@@@": [ - "_meg\\.json", - "_channels\\.tsv", - "_channels\\.json", - "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif", - "_coordsystem\\.json" - ] - } - }, - "motion_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?task-[a-zA-Z0-9]+(?:@@@_motion_top_ext_@@@)$", - "tokens": { - "@@@_motion_top_ext_@@@": [ - "_motion\\.json", - "_channels\\.tsv", - "_channels\\.json", - "_coordsystem\\.json" - ] - } - }, - "nirs_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(((?:_acq-[a-zA-Z0-9]+)?(@@@_nirs_optodes_@@@))|(task-[a-zA-Z0-9]+(?:_acq-[a-zA-Z0-9]+)?(?:_proc-[a-zA-Z0-9]+)?(?:@@@_nirs_top_ext_@@@)))$", - "tokens": { - "@@@_nirs_top_ext_@@@": [ - "_nirs\\.json", - "_channels\\.tsv", - "_photo\\.jpg" - ], - "@@@_nirs_optodes_@@@": [ - "_optodes\\.tsv", - "_optodes\\.json", - "_coordsystem\\.json" - ] - } - }, - "fmap_epi_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:ce-[a-zA-Z0-9]+_)?(?:dir-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:part-(mag|phase|real|imag)_)?(?:chunk-[0-9]+_)?(?:@@@_field_map_type_@@@)\\.json$", - "tokens": { - "@@@_field_map_type_@@@": ["m0scan", "epi"] - } - }, - "fmap_gre_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:chunk-[0-9]+_)?(@@@_fmap_gre_suffixes_@@@)\\.json$", - "tokens": { - "@@@_fmap_gre_suffixes_@@@": [ - "magnitude1", - "magnitude2", - "phasediff", - "phase1", - "phase2", - "magnitude", - "fieldmap" - ] - } - }, - "other_top_files": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:task-[a-zA-Z0-9]+_)?(?:acq-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(?:recording-[a-zA-Z0-9]+_)?(@@@_other_top_files_ext_@@@)$", - "tokens": { - "@@@_other_top_files_ext_@@@": ["physio\\.json", "stim\\.json"] - } - }, - - "microscopy_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", - "tokens": { - "@@@_microscopy_top_ext_@@@": [ - "_TEM\\.json", - "_SEM\\.json", - "_uCT\\.json", - "_BF\\.json", - "_DF\\.json", - "_PC\\.json", - "_DIC\\.json", - "_FLUO\\.json", - "_CONF\\.json", - "_PLI\\.json", - "_CARS\\.json", - "_2PE\\.json", - "_MPE\\.json", - "_SR\\.json", - "_NLO\\.json", - "_OCT\\.json", - "_SPIM\\.json", - "_XPCT\\.json" - ] - } - } -} diff --git a/bids-validator/bids_validator/tsv/non_custom_columns.json b/bids-validator/bids_validator/tsv/non_custom_columns.json deleted file mode 100644 index 75dd3b61..00000000 --- a/bids-validator/bids_validator/tsv/non_custom_columns.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "channels": [ - "description", - "high_cutoff", - "low_cutoff", - "name", - "notch", - "source", - "detector", - "wavelength_actual", - "wavelength_nominal", - "wavelength_emission_actual", - "component", - "short_channel", - "sampling_frequency", - "software_filters", - "status", - "status_description", - "type", - "units", - "reference", - "group", - "tracking_system", - "tracked_point", - "component", - "placement", - "HED" - ], - "electrodes": [ - "name", - "x", - "y", - "z", - "size", - "material", - "manufacturer", - "group", - "hemisphere", - "type", - "impedance", - "dimension", - "HED" - ], - "optodes": [ - "name", - "type", - "x", - "y", - "z", - "template_x", - "template_y", - "template_z", - "source_type", - "detector_type", - "HED" - ], - "events": [ - "duration", - "HED", - "onset", - "trial_type", - "response_time", - "stim_file", - "HED" - ], - "misc": [], - "participants": ["participant_id", "HED"], - "phenotype": ["participant_id", "HED"], - "scans": ["acq_time", "filename", "HED"], - "samples": [ - "sample_id", - "participant_id", - "sample_type", - "pathology", - "derived_from", - "HED" - ], - "sessions": ["acq_time", "session_id", "HED"], - "aslcontext": ["volume_type", "HED"], - "blood": [ - "time", - "plasma_radioactivity", - "whole_blood_radioactivity", - "metabolite_parent_fraction", - "hplc_recovery_fractions", - "HED" - ], - "nirs": [ - "name", - "type", - "source", - "detector", - "wavelength_nominal", - "units", - "sampling_frequency", - "component", - "wavelength_actual", - "description", - "wavelength_emission_actual", - "short_channel", - "status", - "status_description", - "HED" - ] -} diff --git a/bids-validator/bin/bids-validator b/bids-validator/bin/bids-validator deleted file mode 100755 index c3490f04..00000000 --- a/bids-validator/bin/bids-validator +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env node -function entry(cli) { - cli(process.argv.slice(2)).catch(code => { - process.exit(code) - }) -} - -try { - // Test if there's a development tree to run - require.resolve('../cli.js') - process.env.ESBUILD_MAX_BUFFER = 64 * 1024 * 1024 - // For dev, use esbuild-runner - require('esbuild-runner/register') - const { default: cli } = require('../cli.js') - entry(cli) -} catch (err) { - if (err.code === 'MODULE_NOT_FOUND') { - const { default: cli } = require('bids-validator/cli') - entry(cli) - } else { - console.log(err) - process.exitCode = 1 - } -} diff --git a/bids-validator/bin/test-submodule-exists b/bids-validator/bin/test-submodule-exists deleted file mode 100755 index f111f3c0..00000000 --- a/bids-validator/bin/test-submodule-exists +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env node -/* eslint-disable no-console */ -const fs = require('fs') - -/** - * checks that submodule directory exists and is not empty - * @param {string} path - * @returns {boolean} - */ -const checkTestData = path => { - const submoduleDirExists = fs.existsSync(path) && fs.readdirSync(path).length - return submoduleDirExists -} - -/** - * if submodule at current test version does not exist, - * prompt user to update submodule - */ -const main = () => { - let testDataExists - try { - testDataExists = checkTestData('bids-validator/tests/data/bids-examples') - } catch (err) { - testDataExists = false - console.error(err) - } - if (!testDataExists) { - console.error( - "\nError: It looks like you're missing the bids-examples test files. Please run `git submodule update --init --depth 1`.\n See project README for more information.\n", - ) - process.exit(1) - } -} - -main() diff --git a/bids-validator/cli.js b/bids-validator/cli.js deleted file mode 100644 index 65be0ed1..00000000 --- a/bids-validator/cli.js +++ /dev/null @@ -1,90 +0,0 @@ -/*eslint no-console: ["error", {allow: ["log"]}] */ - -import { parseOptions } from './validators/options' -import validate from './index.js' - -const format = validate.consoleFormat -import colors from 'colors/safe' -import fs from 'fs' -import { filenamesOnly } from './utils/filenamesOnly.js' - -const errorToString = (err) => { - if (err instanceof Error) return err.stack - else if (typeof err === 'object') return JSON.parse(err) - else return err -} - -/** - * Write a large string or buffer to stdout and wait for the drain event - * - * This is needed to avoid truncating buffered output when piped - * @param {string} data - * @param {function} cb - */ -const writeStdout = (data, cb) => { - if (!process.stdout.write(data)) { - process.stdout.once('drain', cb) - } else { - process.nextTick(cb) - } -} - -export function cli(argumentOverride) { - return new Promise((resolve, reject) => { - // Setup CLI state when called via Node.js - if (process.env['NO_COLOR'] !== undefined) { - colors.disable() - } - process.title = 'bids-validator' - const argv = parseOptions(argumentOverride) - const dir = argv._[0] - const options = argv - process.on('unhandledRejection', (err) => { - console.log( - format.unexpectedError( - // eslint-disable-next-line - `Unhandled rejection (\n reason: ${errorToString(err)}\n).\n`, - ), - ) - reject(3) - }) - - if (options.filenames) { - return filenamesOnly() - } - - try { - // Test if we can access the dataset directory at all - fs.opendirSync(dir) - } catch (err) { - console.log(colors.red(dir + ' does not exist or is inaccessible')) - reject(2) - } - - validate.BIDS(dir, options, function (issues, summary) { - function resolveOrReject() { - if ( - issues === 'Invalid' || - (issues.errors && issues.errors.length >= 1) || - (issues.config && issues.config.length >= 1) - ) { - reject(1) - } else { - resolve(0) - } - } - if (options.json) { - writeStdout(JSON.stringify({ issues, summary }), resolveOrReject) - } else { - writeStdout( - format.issues(issues, options) + - '\n' + - format.summary(summary, options), - resolveOrReject, - ) - } - }) - }) -} - -export default cli diff --git a/bids-validator/index.js b/bids-validator/index.js deleted file mode 100644 index 593103f0..00000000 --- a/bids-validator/index.js +++ /dev/null @@ -1,5 +0,0 @@ -// import validations -import validate from './validators' - -// export validations for use in other applications -export default validate diff --git a/bids-validator/package.json b/bids-validator/package.json deleted file mode 100644 index 3b330701..00000000 --- a/bids-validator/package.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "name": "bids-validator", - "version": "1.14.15-dev.0", - "description": "", - "main": "./dist/commonjs/index.js", - "exports": { - ".": { - "import": "./dist/esm/index.js", - "require": "./dist/commonjs/index.js" - }, - "./cli": { - "require": "./dist/commonjs/cli.js" - }, - "./utils/consoleFormat": { - "require": "./dist/commonjs/consoleFormat.js" - }, - "./package.json": "./package.json" - }, - "scripts": { - "build": "node ./esbuild.mjs", - "prepublishOnly": "npm run build" - }, - "repository": { - "type": "git", - "url": "https://github.com/bids-standard/bids-validator.git" - }, - "engines": { - "node": ">=18.0.0" - }, - "bin": { - "bids-validator": "bin/bids-validator" - }, - "author": "Squishymedia", - "license": "MIT", - "bugs": { - "url": "https://github.com/bids-standard/bids-validator/issues" - }, - "homepage": "https://github.com/bids-standard/bids-validator", - "dependencies": { - "@aws-sdk/client-s3": "^3.637.0", - "ajv": "^6.5.2", - "bytes": "^3.1.2", - "colors": "^1.4.0", - "cross-fetch": "^4.0.0", - "date-fns": "^4.1.0", - "events": "^3.3.0", - "exifreader": "^4.23.7", - "hed-validator": "^3.15.4", - "ignore": "^6.0.2", - "is-utf8": "^0.2.1", - "jest": "^29.7.0", - "jshint": "^2.13.6", - "lerna": "^8.1.8", - "lodash": "^4.17.21", - "minimatch": "3.0.5", - "next": "14.2.13", - "nifti-js": "^1.0.1", - "p-limit": "^2.1.0", - "pako": "^1.0.6", - "path": "^0.12.7", - "pluralize": "^8.0.0", - "semver": "^7.6.3", - "stream-browserify": "^3.0.0", - "table": "^6.8.2", - "util": "^0.12.5", - "xml2js": "^0.6.2", - "yaml": "^2.3.1", - "yargs": "^17.7.2" - }, - "devDependencies": { - "adm-zip": "", - "chai": "", - "esbuild": "^0.24.0", - "esbuild-plugin-globals": "^0.2.0", - "esbuild-runner": "^2.2.2", - "eslint": "^8.57.0", - "eslint-config-prettier": "^9.1.0", - "eslint-plugin-prettier": "^5.2.1", - "husky": "^9.1.6", - "lockfile": "^1.0.4", - "sync-request": "6.1.0" - }, - "browser": { - "fs": false, - "child_process": false, - "stream": "stream-browserify" - }, - "files": [ - "dist/*", - "schema/*" - ] -} diff --git a/bids-validator/tests/bids-web.spec.js b/bids-validator/tests/bids-web.spec.js deleted file mode 100644 index a92deb1d..00000000 --- a/bids-validator/tests/bids-web.spec.js +++ /dev/null @@ -1,6 +0,0 @@ -/** - * eslint no-console: ["error", { allow: ["log"] }] - * @jest-environment jsdom - */ - -import './bids.spec.js' diff --git a/bids-validator/tests/bids.spec.js b/bids-validator/tests/bids.spec.js deleted file mode 100644 index 22c70020..00000000 --- a/bids-validator/tests/bids.spec.js +++ /dev/null @@ -1,357 +0,0 @@ -/** - * eslint no-console: ["error", { allow: ["log"] }] - */ -import { assert } from 'chai' - -import validate from '../index.js' -import fs from 'fs' -import path from 'path' -import { createFileList } from './env/FileList.js' -import isNode from '../utils/isNode.js' - -function getDirectories(srcpath) { - return fs.readdirSync(srcpath).filter(function (file) { - return ( - file !== '.git' && fs.statSync(path.join(srcpath, file)).isDirectory() - ) - }) -} - -const missing_session_files = [ - '7t_trt', - 'ds004332', - 'ds006', - 'ds007', - 'ds008', - 'ds051', - 'ds052', - 'ds105', - 'ds109', - 'ds113b', - 'ds000117', - 'ds000247', - 'ieeg_motorMiller2007', - 'ieeg_visual', - 'eeg_ds003654s_hed_inheritance', - 'eeg_ds003645s_hed_demo', - 'motion_dualtask', -] - -const dataDirectory = path.join('bids-validator', 'tests', 'data') - -// Generate validate.BIDS input for included minimal tests -function createDatasetFileList(inputPath) { - const testDatasetPath = path.join(dataDirectory, inputPath) - if (!isNode) { - return createFileList(testDatasetPath) - } else { - return testDatasetPath - } -} - -// Generate validate.BIDS input for bids-examples -function createExampleFileList(inputPath) { - return createDatasetFileList(path.join('bids-examples', inputPath)) -} - -function assertErrorCode(errors, expected_error_code) { - const matchingErrors = errors.filter(function (error) { - return error.code === expected_error_code - }) - assert(matchingErrors.length > 0) -} - -describe('BIDS example datasets ', function () { - // Default validate.BIDS options - const options = { ignoreNiftiHeaders: true, json: true } - const enableNiftiHeaders = { json: true } - - describe('basic example dataset tests', () => { - const bidsExamplePath = path.join(dataDirectory, 'bids-examples') - getDirectories(bidsExamplePath).forEach(function testDataset(inputPath) { - it(inputPath, (isdone) => { - validate.BIDS( - createExampleFileList(inputPath), - options, - function (issues) { - let warnings = issues.warnings - let session_flag = false - for (const warning in warnings) { - if (warnings[warning]['code'] === 38) { - session_flag = true - break - } - } - if (missing_session_files.indexOf(inputPath) === -1) { - assert.deepEqual(session_flag, false) - } else { - assert.deepEqual(session_flag, true) - } - isdone() - }, - ) - }) - }) - }) - - // we need to have at least one non-dynamic test - it('validates path without trailing backslash', function (isdone) { - validate.BIDS( - createExampleFileList('ds001'), - options, - function (issues, summary) { - const errors = issues.errors - const warnings = issues.warnings - assert(summary.sessions.length === 0) - assert(summary.subjects.length === 16) - assert.deepEqual(summary.tasks, ['balloon analog risk task']) - expect(summary.modalities).toEqual(['MRI']) - assert(summary.totalFiles === 135) - assert.deepEqual(errors.length, 1) - assert(warnings.length === 3) - assert( - warnings.findIndex((warning) => warning.code === 13) > -1, - 'warnings do not contain a code 13', - ) - isdone() - }, - ) - }) - - // we need to have at least one non-dynamic test - it('validates dataset with valid nifti headers', function (isdone) { - const options = { ignoreNiftiHeaders: false } - validate.BIDS( - createDatasetFileList('valid_headers'), - options, - function (issues, summary) { - const errors = issues.errors - const warnings = issues.warnings - assert(summary.sessions.length === 0) - assert(summary.subjects.length === 1) - assert.deepEqual(summary.tasks, ['rhyme judgment']) - assert.isFalse(summary.dataProcessed) - expect(summary.modalities).toEqual(['MRI']) - expect(summary.totalFiles).toEqual(8) - assert( - errors.findIndex((error) => error.code === 60) > -1, - 'errors do not contain a code 60', - ) - assert.deepEqual(warnings.length, 4) - assert( - warnings.findIndex((warning) => warning.code === 13) > -1, - 'warnings do not contain a code 13', - ) - assert.deepEqual(summary.subjectMetadata[0], { - age: 25, - participantId: '01', - sex: 'M', - }) - isdone() - }, - ) - }) - - // test for duplicate files present with both .nii and .nii.gz extension - it('validates dataset for duplicate files present with both .nii and .nii.gz extension', function (isdone) { - validate.BIDS( - createDatasetFileList('valid_filenames'), - enableNiftiHeaders, - function (issues) { - assertErrorCode(issues.errors, 74) - isdone() - }, - ) - }) - - it('includes issue 53 NO_T1W for dataset without T1w files', function (isdone) { - validate.BIDS(createDatasetFileList('no_t1w'), options, function (issues) { - assertErrorCode(issues.ignored, 53) - isdone() - }) - }) - - // test for illegal characters used in acq and task name - it('validates dataset with illegal characters in task name', function (isdone) { - validate.BIDS( - createDatasetFileList('valid_filenames'), - enableNiftiHeaders, - function (issues) { - assertErrorCode(issues.errors, 58) - isdone() - }, - ) - }) - - // test for illegal characters used in sub name - it('validates dataset with illegal characters in sub name', function (isdone) { - validate.BIDS( - createDatasetFileList('valid_filenames'), - enableNiftiHeaders, - function (issues) { - assertErrorCode(issues.errors, 64) - isdone() - }, - ) - }) - - it('checks for subjects with no valid data', function (isdone) { - validate.BIDS( - createDatasetFileList('no_valid_data'), - options, - function (issues) { - assertErrorCode(issues.errors, 67) - isdone() - }, - ) - }) - - it('validates MRI modalities', function (isdone) { - validate.BIDS( - createExampleFileList('ds001'), - options, - function (issues, summary) { - var errors = issues.errors - var warnings = issues.warnings - assert(summary.sessions.length === 0) - assert(summary.subjects.length === 16) - assert.deepEqual(summary.tasks, ['balloon analog risk task']) - assert(summary.modalities.includes('MRI')) - assert(summary.totalFiles === 135) - assert.deepEqual(errors.length, 1) - assert(warnings.length === 3) - assert( - warnings.findIndex((warning) => warning.code === 13) > -1, - 'warnings do not contain a code 13', - ) - isdone() - }, - ) - }) - - it('blacklists modalities specified', function (isdone) { - const _options = { ...options, blacklistModalities: ['MRI'] } - validate.BIDS( - createExampleFileList('ds001'), - _options, - function (issues, summary) { - var errors = issues.errors - var warnings = issues.warnings - assert(summary.sessions.length === 0) - assert(summary.subjects.length === 16) - assert.deepEqual(summary.tasks, ['balloon analog risk task']) - assert(summary.modalities.includes('MRI')) - assert(summary.totalFiles === 135) - assert.deepEqual(errors.length, 2) - assert(warnings.length === 3) - assert( - warnings.findIndex((warning) => warning.code === 13) > -1, - 'warnings do not contain a code 13', - ) - assert( - errors.findIndex((error) => error.code === 139) > -1, - 'errors do contain a code 139', - ) - - isdone() - }, - ) - }) - - it('checks for data dictionaries without corresponding data files', function (isdone) { - validate.BIDS( - createDatasetFileList('unused_data_dict'), - options, - function (issues) { - assert.notEqual( - issues.errors.findIndex((issue) => issue.code === 90), - -1, - ) - isdone() - }, - ) - }) - - it('checks for fieldmaps with no _magnitude file', function (isdone) { - validate.BIDS( - createDatasetFileList('fieldmap_without_magnitude'), - options, - function (issues) { - assert.notEqual( - issues.errors.findIndex((issue) => issue.code === 91), - -1, - ) - isdone() - }, - ) - }) - - it('should not throw a warning if all _phasediff.nii are associated with _magnitude1.nii', function (isdone) { - validate.BIDS( - createExampleFileList('hcp_example_bids'), - options, - function (issues) { - assert.deepEqual(issues.errors, []) - isdone() - }, - ) - }) - - it('should throw a warning if there are _phasediff.nii without an associated _magnitude1.nii', function (isdone) { - validate.BIDS( - createDatasetFileList('phasediff_without_magnitude1'), - options, - function (issues) { - assert.notEqual(issues.warnings.findIndex((issue) => issue.code === 92)) - isdone() - }, - ) - }) - - it('should not throw an error if it encounters no non-utf-8 files', function (isdone) { - validate.BIDS( - createDatasetFileList('valid_dataset'), - options, - function (issues) { - assert.equal( - issues.errors.findIndex((issue) => issue.code === 123), - -1, - ) - isdone() - }, - ) - }) - - it('should validate pet data', function (isdone) { - validate.BIDS( - createDatasetFileList('broken_pet_example_2-pet_mri'), - options, - function (issues) { - assertErrorCode(issues.errors, 55) - isdone() - }, - ) - }) - - it('should validate pet blood data', function (isdone) { - validate.BIDS( - createDatasetFileList('broken_pet_example_3-pet_blood'), - options, - function (issues) { - assertErrorCode(issues.errors, 55) - isdone() - }, - ) - }) - - it('should catch missing tsv columns', function (isdone) { - validate.BIDS( - createDatasetFileList('pet_blood_missing_tsv_column'), - options, - function (issues) { - assertErrorCode(issues.errors, 211) - isdone() - }, - ) - }) -}) diff --git a/bids-validator/tests/bval.spec.js b/bids-validator/tests/bval.spec.js deleted file mode 100644 index 584c6455..00000000 --- a/bids-validator/tests/bval.spec.js +++ /dev/null @@ -1,46 +0,0 @@ -import assert from 'assert' -import bval from '../validators/bval/bval' - -describe('bval', function () { - it('should allow proper bval contents', function () { - const val = '4 6 2 5 3 23 5' - bval({}, val, function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should not allow more than one row', function () { - const val = '0 4 3 6 1 6 2 4 1\n 4 3 5 2 4 2 4 5' - bval({}, val, function (issues) { - assert(issues.length == 1 && issues[0].code == 30) - }) - }) - - it('should catch doublespace separators', function () { - const val = '4 6 2 5 3 23 5' - bval({}, val, function (issues) { - assert(issues.length == 1 && issues[0].code == 47) - }) - }) - - it('should not allow undefined bvals', function () { - const val = undefined - bval({}, val, function (issues) { - assert(issues.length == 1 && issues[0].code == 89) - }) - }) - - it('should not allow bvals of types other than string', function () { - const val = [0, 1, 2, 3] - bval({}, val, function (issues) { - assert(issues.length == 1 && issues[0].code == 89) - }) - }) - - it('should not allow bvecs to be submitted in place of bval', function () { - const val = '4 6 7\n 2 3 4\n 4 5 6' - bval({}, val, function (issues) { - assert(issues.length == 1 && issues[0].code == 30) - }) - }) -}) diff --git a/bids-validator/tests/bvec.spec.js b/bids-validator/tests/bvec.spec.js deleted file mode 100644 index 9c815589..00000000 --- a/bids-validator/tests/bvec.spec.js +++ /dev/null @@ -1,59 +0,0 @@ -import assert from 'assert' -import bvec from '../validators/bvec/bvec' - -describe('bvec', function () { - it('should allow valid bvec contents', function () { - const vec = '4 6 2 5\n3 2 3 5\n6 4 3 5' - bvec({}, vec, function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should not allow more or less than 3 rows', function () { - let vec = '0 4 3 6 1 6 2 4\n 4 3 5 2 4 2 4 5' - bvec({}, vec, function (issues) { - assert(issues.length == 1 && issues[0].code == 31) - }) - - vec = - '0 4 3 6 1 6 2 4\n 4 3 5 2 4 2 4 5\n 4 3 5 2 4 2 4 5\n 4 3 5 2 4 2 4 5' - bvec({}, vec, function (issues) { - assert(issues.length == 1 && issues[0].code == 31) - }) - }) - - it('should not allow rows of inconsistent length', function () { - const vec = '0 4 3 6 1 6 4\n 4 3 4 2 4 5\n 4 3 5 2 4 2 4 5' - bvec({}, vec, function (issues) { - assert(issues.length == 1 && issues[0].code == 46) - }) - }) - - it('should catch doublespace separators', function () { - const vec = '4 6 2 5\n3 2 3 5\n6 4 3 5' - bvec({}, vec, function (issues) { - assert(issues.length == 1 && issues[0].code == 47) - }) - }) - - it('should not allow undefined bvecs', function () { - const vec = undefined - bvec({}, vec, function (issues) { - assert(issues.length == 1 && issues[0].code == 88) - }) - }) - - it('should not allow bvecs of types other than string', function () { - const vec = [0, 1, 2, 3] - bvec({}, vec, function (issues) { - assert(issues.length == 1 && issues[0].code == 88) - }) - }) - - it('should not allow bvals to be submitted in place of bvec', function () { - const vec = '4 6 7' - bvec({}, vec, function (issues) { - assert(issues.length == 1 && issues[0].code == 31) - }) - }) -}) diff --git a/bids-validator/tests/cli.spec.js b/bids-validator/tests/cli.spec.js deleted file mode 100644 index ec0243e1..00000000 --- a/bids-validator/tests/cli.spec.js +++ /dev/null @@ -1,99 +0,0 @@ -import cli from '../cli' -import path from 'path' - -const dir = process.cwd() -const data_dir = path.join(dir, 'bids-validator', 'tests', 'data') -const data_with_errors = path.join(data_dir, 'empty_files') -const data_without_errors = path.join(data_dir, 'valid_dataset') - -const colorRegEx = new RegExp( - // eslint-disable-next-line no-control-regex - '[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]', -) - -expect.extend({ - toBeJSON: function (received) { - try { - JSON.parse(received) - return { - pass: true, - } - } catch (err) { - return { - pass: false, - } - } - }, -}) - -let mockStdout -let mockStderr -let mockExit -let mockConsoleError - -describe('CLI', () => { - beforeEach(() => { - // bids-validator uses these - mockStdout = jest - .spyOn(process.stdout, 'write') - .mockImplementation(() => true) - mockStderr = jest - .spyOn(process.stderr, 'write') - .mockImplementation(() => true) - // Yargs uses these - mockExit = jest.spyOn(process, 'exit').mockImplementation(() => true) - mockConsoleError = jest - .spyOn(console, 'error') - .mockImplementation(() => true) - }) - afterEach(() => { - mockStdout.mockRestore() - mockStderr.mockRestore() - mockExit.mockRestore() - mockConsoleError.mockRestore() - }) - it('should display usage hints when no arguments / options are provided', async () => { - try { - await cli(' ') - } catch (code) { - expect(code).toEqual(2) - // 'jest' is the process name here but usually it is 'bids-validator' - expect(mockConsoleError.mock.calls[0][0]).toEqual( - expect.stringContaining(' [options]'), - ) - } - }) - - it('should accept a directory as the first argument without error', async () => { - await expect(cli(data_without_errors)).resolves.toEqual(0) - }) - - it('without errors should exit with code 0', async () => { - await expect(cli(`${data_without_errors} --json`)).resolves.toEqual(0) - }) - - it('with errors should not exit with code 0', async () => { - await expect(cli(`${data_with_errors}`)).rejects.toEqual(1) - }) - - it('with errors should not exit with code 0 with --json argument', async () => { - await expect(cli(`${data_with_errors} --json`)).rejects.toEqual(1) - }) - - it('should print valid json when the --json argument is provided', async () => { - await expect(cli(`${data_without_errors} --json`)).resolves.toEqual(0) - expect(mockStdout).toBeCalledWith(expect.toBeJSON()) - }) - - it('should print with colors by default', async () => { - await cli(`${data_without_errors}`) - expect(mockStdout.mock.calls[0][0]).toMatch(colorRegEx) - }) - - it('should print without colors when NO_COLOR env set', async () => { - process.env.NO_COLOR = 'any value' - await cli(`${data_without_errors}`) - expect(mockStdout.mock.calls[0][0]).not.toMatch(colorRegEx) - delete process.env.NO_COLOR - }) -}) diff --git a/bids-validator/tests/consoleFormat.spec.js b/bids-validator/tests/consoleFormat.spec.js deleted file mode 100644 index e443eb1c..00000000 --- a/bids-validator/tests/consoleFormat.spec.js +++ /dev/null @@ -1,101 +0,0 @@ -import assert from 'assert' -import Issue from '../utils/issues' -import consoleFormat from '../utils/consoleFormat' - -describe('console format', () => { - let issues - beforeEach(() => { - issues = { - errors: [ - { - key: 'TEST_ERROR', - severity: 'error', - reason: 'testing consoleFormat', - files: [ - new Issue({ - key: 'TEST_ERROR', - file: '/nonexistent/test/file.wut', - code: 0, - evidence: 'none', - line: -1, - character: -1, - severity: 'error', - reason: 'testing consoleFormat', - }), - ], - additionalFileCount: 0, - }, - ], - warnings: [ - { - key: 'TEST_WARNING', - severity: 'warning', - reason: 'testing consoleFormat', - files: [ - new Issue({ - key: 'TEST_WARNING', - file: '/nonexistent/test/file.wut', - code: 2, - evidence: 'none', - line: -1, - character: -1, - severity: 'warning', - reason: 'testing consoleFormat', - }), - ], - additionalFileCount: 0, - }, - ], - summary: { - sessions: [], - subjects: [], - tasks: [], - modalities: [], - totalFiles: 0, - size: 0, - }, - } - }) - - describe('logIssues', () => { - it('takes an array of errors and returns them formatted as an array', () => { - const output = consoleFormat.logIssues(issues.errors, 'red', { - verbose: true, - }) - assert(Array.isArray(output)) - assert.deepEqual(output, [ - '\t\u001b[31m1: [ERR] testing consoleFormat (code: undefined - TEST_ERROR)\u001b[39m', - '\t\ttesting consoleFormat', - '\t\t@ line: -1 character: -1', - '\t\tEvidence: none', - '', - ]) - }) - it('takes an array of warnings and returns them formatted as an array', () => { - const output = consoleFormat.logIssues(issues.warnings, 'yellow', { - verbose: true, - }) - assert.deepEqual(output, [ - '\t\u001b[33m1: [WARN] testing consoleFormat (code: undefined - TEST_WARNING)\u001b[39m', - '\t\ttesting consoleFormat', - '\t\t@ line: -1 character: -1', - '\t\tEvidence: none', - '', - ]) - }) - }) - - describe('issues', () => { - it('formats issues as a string a given issues object', () => { - const output = consoleFormat.issues(issues, {}) - assert.equal(typeof output, 'string') - }) - }) - - describe('summary', () => { - it('formats summary as a string a given issues object', () => { - const output = consoleFormat.summary(issues.summary, {}) - assert.equal(typeof output, 'string') - }) - }) -}) diff --git a/bids-validator/tests/env/FileList.js b/bids-validator/tests/env/FileList.js deleted file mode 100644 index 40232a74..00000000 --- a/bids-validator/tests/env/FileList.js +++ /dev/null @@ -1,85 +0,0 @@ -/** - * for use in test suites using File & FileList browser APIs in jsdom environment - */ - -import fs from 'fs' - -import path from 'path' -import mime from 'mime-types' - -function createFileList(dir) { - const str = dir.substr(dir.lastIndexOf(path.sep) + 1) + '$' - const rootpath = dir.replace(new RegExp(str), '') - const paths = getFilepaths(dir, [], rootpath) - return paths.map((path) => { - return createFile(path, path.replace(rootpath, '')) - }) -} - -function getFilepaths(dir, files_) { - files_ = files_ || [] - const files = fs.readdirSync(dir) - files - .map((file) => path.join(dir, file)) - .map((path) => - isDirectory(path) ? getFilepaths(path, files_) : files_.push(path), - ) - return files_ -} - -function isDirectory(path) { - const pathStat = fs.lstatSync(path) - let isDir = pathStat.isDirectory() - if (pathStat.isSymbolicLink()) { - try { - var targetPath = fs.realpathSync(path) - isDir = fs.lstatSync(targetPath).isDirectory() - } catch (err) { - isDir = false - } - } - return isDir -} - -function addFileList(input, file_paths) { - if (typeof file_paths === 'string') file_paths = [file_paths] - else if (!Array.isArray(file_paths)) { - throw new Error( - 'file_paths needs to be a file path string or an Array of file path strings', - ) - } - - const file_list = file_paths.map((fp) => createFile(fp)) - file_list.__proto__ = Object.create(FileList.prototype) - - Object.defineProperty(input, 'files', { - value: file_list, - writable: false, - }) - - return input -} - -function createFile(file_path, relativePath) { - const file = fs.statSync(file_path) - - const browserFile = new File( - [new fs.readFileSync(file_path)], - path.basename(file_path), - { - type: mime.lookup(file_path) || '', - lastModified: file.mtimeMs, - }, - ) - browserFile.webkitRelativePath = relativePath || file_path - - return browserFile -} - -export { addFileList, createFile, createFileList } - -export default { - addFileList, - createFile, - createFileList, -} diff --git a/bids-validator/tests/events.spec.js b/bids-validator/tests/events.spec.js deleted file mode 100644 index 19ac4b0f..00000000 --- a/bids-validator/tests/events.spec.js +++ /dev/null @@ -1,93 +0,0 @@ -import assert from 'assert' -import validateEvents from '../validators/events/validate' - -describe('Events', function () { - const headers = [ - [ - { - path: '/sub01/sub01_task-test_bold.nii.gz', - relativePath: '/sub01/sub01_task-test_bold.nii.gz', - }, - { dim: [4, 0, 0, 0, 10] }, - ], - ] - - it('all files in the /stimuli folder should be included in an _events.tsv file', () => { - // stimuli.events will have all of the - // files included in the stim_file column of every _events.tsv file. - // stimuli.directory will have all of the - // files included in the /stimuli directory. - const stimuli = { - events: ['/stimuli/images/red-square.jpg'], - directory: [{ relativePath: '/stimuli/images/blue-square.jpg' }], - } - const issues = validateEvents([], stimuli, [], {}) - assert.strictEqual(issues.length, 1) - assert.strictEqual(issues[0].code, 77) - }) - - it('should not throw issues if all files in the /stimuli folder are included in an _events.tsv file', () => { - const stimuli = { - events: ['/stimuli/images/red-square.jpg'], - directory: [{ relativePath: '/stimuli/images/red-square.jpg' }], - } - const issues = validateEvents([], stimuli, [], {}) - assert.deepStrictEqual(issues, []) - }) - - it('should throw an issue if the onset of the last event in _events.tsv is more than TR * number of volumes in corresponding nifti header', () => { - const events = [ - { - file: { path: '/sub01/sub01_task-test_events.tsv' }, - path: '/sub01/sub01_task-test_events.tsv', - contents: '12\tsomething\tsomething\n', - }, - ] - const jsonDictionary = { - '/sub01/sub01_task-test_bold.json': { - RepetitionTime: 1, - }, - } - - const issues = validateEvents(events, [], headers, jsonDictionary) - assert.strictEqual(issues.length, 1) - assert.strictEqual(issues[0].code, 85) - }) - - it('should throw an issue if the onset of the last event in _events.tsv is less than .5 * TR * number of volumes in corresponding nifti header', () => { - const events = [ - { - file: { path: '/sub01/sub01_task-test_events.tsv' }, - path: '/sub01/sub01_task-test_events.tsv', - contents: '2\tsomething\tsomething\n', - }, - ] - const jsonDictionary = { - '/sub01/sub01_task-test_bold.json': { - RepetitionTime: 1, - }, - } - - const issues = validateEvents(events, [], headers, jsonDictionary) - assert.strictEqual(issues.length, 1) - assert.strictEqual(issues[0].code, 86) - }) - - it('should not throw any issues if the onset of the last event in _events.tsv is a reasonable value', () => { - const events = [ - { - file: { path: '/sub01/sub01_task-test_events.tsv' }, - path: '/sub01/sub01_task-test_events.tsv', - contents: '7\tsomething\tsomething\n', - }, - ] - const jsonDictionary = { - '/sub01/sub01_task-test_bold.json': { - RepetitionTime: 1, - }, - } - - const issues = validateEvents(events, [], headers, jsonDictionary) - assert.deepStrictEqual(issues, []) - }) -}) diff --git a/bids-validator/tests/headerField.spec.js b/bids-validator/tests/headerField.spec.js deleted file mode 100644 index 33b16d6b..00000000 --- a/bids-validator/tests/headerField.spec.js +++ /dev/null @@ -1,143 +0,0 @@ -import assert from 'assert' -import headerFields from '../validators/headerFields' - -describe('headerFields', () => { - it('should throw an error if _magnitude1 or _magnitude2 files do not have exactly 3 dimensions.', () => { - const headers = [ - // each of these headers has one too many dimensions on the 'dim' field. - [ - { - name: 'sub-01_magnitude1.nii', - relativePath: 'sub-01_magnitude1.nii', - }, - { - dim: [5, 1, 1, 1, 1], - pixdim: [5, 1, 1, 1, 1], - xyzt_units: [5, 1, 1, 1, 1], - }, - ], - [ - { - name: 'sub-01_magnitude2.nii', - relativePath: 'sub-01_magnitude2.nii', - }, - { - dim: [5, 1, 1, 1, 1], - pixdim: [5, 1, 1, 1, 1], - xyzt_units: [5, 1, 1, 1, 1], - }, - ], - // each of these headers has one too few dimensions on the 'dim' field. - [ - { - name: 'sub-02_magnitude1.nii', - relativePath: 'sub-02_magnitude1.nii', - }, - { - dim: [3, 1, 1], - pixdim: [4, 1, 1, 1], - xyzt_units: [4, 1, 1, 1], - }, - ], - [ - { - name: 'sub-02_magnitude2.nii', - relativePath: 'sub-02_magnitude2.nii', - }, - { - dim: [3, 1, 1], - pixdim: [4, 1, 1, 1], - xyzt_units: [4, 1, 1, 1], - }, - ], - ] - const issues = headerFields(headers) - assert( - issues.length == 4 && - issues[0].code == '94' && - issues[1].code == '94' && - issues[2].code == '94' && - issues[3].code == '94', - ) - }) - - it('_magnitude1 or _magnitude2 files should have 3 dimensions.', () => { - const headers = [ - [ - { - name: 'sub-01_magnitude1.nii', - relativePath: 'sub-01_magnitude1.nii', - }, - { - dim: [3, 1, 1, 1], - pixdim: [3, 1, 1, 1], - xyzt_units: [3, 1, 1, 1], - }, - ], - [ - { - name: 'sub-01_magnitude2.nii', - relativePath: 'sub-01_magnitude2.nii', - }, - { - dim: [3, 1, 1, 1], - pixdim: [3, 1, 1, 1], - xyzt_units: [3, 1, 1, 1], - }, - ], - ] - const issues = headerFields(headers) - assert.deepEqual(issues, []) - }) - - it('should throw an error if _T1w files has the wrong dimensions.', () => { - // each of these headers has one too many dimensions on the 'dim' field. - // the first entry is the total count, and the following three entries are spatial. - const headers = [ - [ - { - name: 'sub-01_T1w.nii', - relativePath: 'sub-01_T1w.nii', - }, - { - dim: [5, 1, 1, 1, 1], - pixdim: [5, 1, 1, 1, 1], - xyzt_units: [5, 1, 1, 1, 1], - }, - ], - [ - { - name: 'sub-02_T1w.nii', - relativePath: 'sub-02_T1w.nii', - }, - { - dim: [3, 1, 1], - pixdim: [4, 1, 1, 1], - xyzt_units: [4, 1, 1, 1], - }, - ], - ] - const issues = headerFields(headers) - assert( - issues.length == 2 && issues[0].code == '95' && issues[1].code == '95', - ) - }) - - it('_T1w files should have exactly 3 dimensions.', () => { - const headers = [ - [ - { - name: 'sub-01_T1w.nii', - relativePath: 'sub-01_T1w.nii', - }, - { - dim: [3, 1, 1, 1], - pixdim: [3, 1, 1, 1], - xyzt_units: [4, 1, 1, 1], - }, - ], - ] - const issues = headerFields(headers) - assert.deepEqual(issues, []) - }) -}) diff --git a/bids-validator/tests/hed.spec.js b/bids-validator/tests/hed.spec.js deleted file mode 100644 index 11ea1c5a..00000000 --- a/bids-validator/tests/hed.spec.js +++ /dev/null @@ -1,277 +0,0 @@ -import assert from 'assert' -import validateHed from '../validators/hed' - -describe('HED', function () { - const jsonFiles = [ - { - relativePath: '/sub01/sub01_task-test_events.json', - path: '/sub01/sub01_task-test_events.json', - }, - { - relativePath: '/dataset_description.json', - path: '/dataset_description.json', - }, - ] - - it('should not throw an issue if the HED data is valid', () => { - const events = [ - { - file: { - path: '/sub01/sub01_task-test_events.tsv', - relativePath: '/sub01/sub01_task-test_events.tsv', - }, - path: '/sub01/sub01_task-test_events.tsv', - contents: - 'onset\tduration\ttest\tHED\n' + '7\tsomething\tone\tSpeed/30 mph\n', - }, - ] - const jsonDictionary = { - '/sub01/sub01_task-test_events.json': { - myCodes: { - HED: { - one: 'Duration/5 s', - }, - }, - }, - '/dataset_description.json': { HEDVersion: '8.0.0' }, - } - - return validateHed(events, jsonDictionary, jsonFiles, '').then((issues) => { - assert.deepStrictEqual(issues, []) - }) - }) - - it('should not throw an issue if a value column is annotated', () => { - const events = [ - { - file: { - path: '/sub01/sub01_task-test_events.tsv', - relativePath: '/sub01/sub01_task-test_events.tsv', - }, - path: '/sub01/sub01_task-test_events.tsv', - contents: - 'onset\tduration\ttest\tHED\n' + '7\t3.0\tone\tSpeed/30 mph\n', - }, - ] - const jsonDictionary = { - '/sub01/sub01_task-test_events.json': { - myCodes: { - test: { - HED: { - one: 'Label/#', - }, - }, - }, - }, - '/dataset_description.json': { HEDVersion: '8.0.0' }, - } - - return validateHed(events, jsonDictionary, jsonFiles, '').then((issues) => { - assert.deepStrictEqual(issues, []) - }) - }) - - it('should not throw an issue if a library schema is included', () => { - const events = [ - { - file: { - path: '/sub01/sub01_task-test_events.tsv', - relativePath: '/sub01/sub01_task-test_events.tsv', - }, - path: '/sub01/sub01_task-test_events.tsv', - contents: - 'onset\tduration\ttest\tHED\n' + '7\t3.0\tone\tSpeed/30 mph\n', - }, - ] - - const jsonDictionary = { - '/sub01/sub01_task-test_events.json': { - myCodes: { - test: { - HED: { - one: 'ts:Sensory-presentation, Label/#', - }, - }, - }, - }, - '/dataset_description.json': { - HEDVersion: ['8.0.0', 'ts:testlib_1.0.2'], - }, - } - - return validateHed(events, jsonDictionary, jsonFiles, '').then((issues) => { - assert.deepStrictEqual(issues, []) - }) - }) - - it('should throw an issue if the HED data is invalid', () => { - const events = [ - { - file: { - path: '/sub01/sub01_task-test_events.tsv', - relativePath: '/sub01/sub01_task-test_events.tsv', - }, - path: '/sub01/sub01_task-test_events.tsv', - contents: - 'onset\tduration\ttest\tHED\n' + '7\tsomething\tone\tDuration/5 s\n', - }, - ] - const jsonDictionary = { - '/sub01/sub01_task-test_events.json': { - test: { - HED: { - one: 'Speed/5 ms', - }, - }, - }, - '/dataset_description.json': { HEDVersion: '8.0.0' }, - } - - return validateHed(events, jsonDictionary, jsonFiles, '').then((issues) => { - assert.strictEqual(issues.length, 1) - assert.strictEqual(issues[0].code, 104) - }) - }) - - it('should not throw an issue if multiple library schemas are included', () => { - const events = [ - { - file: { - path: '/sub01/sub01_task-test_events.tsv', - relativePath: '/sub01/sub01_task-test_events.tsv', - }, - path: '/sub01/sub01_task-test_events.tsv', - contents: - 'onset\tduration\ttest\tHED\n' + '7\t3.0\tone\tSpeed/30 mph\n', - }, - ] - - const jsonDictionary = { - '/sub01/sub01_task-test_events.json': { - myCodes: { - test: { - HED: { - one: 'ts:Sensory-presentation, Label/#, sc:Sleep-deprivation', - }, - }, - }, - }, - '/dataset_description.json': { - HEDVersion: ['8.0.0', 'ts:testlib_1.0.2', 'sc:score_1.0.0'], - }, - } - - return validateHed(events, jsonDictionary, jsonFiles, '').then((issues) => { - assert.deepStrictEqual(issues, []) - }) - }) - - it('should properly issue warnings when appropriate', () => { - const events = [ - { - file: { - path: '/sub01/sub01_task-test_events.tsv', - relativePath: '/sub01/sub01_task-test_events.tsv', - }, - path: '/sub01/sub01_task-test_events.tsv', - contents: - 'onset\tduration\ttest\tHED\n' + '7\tsomething\tone\tHuman/Driver\n', - }, - ] - const jsonDictionary = { - '/sub01/sub01_task-test_events.json': { - test: { - HED: { - one: 'Train/Maglev', - }, - }, - }, - '/dataset_description.json': { HEDVersion: '8.0.0' }, - } - - return validateHed(events, jsonDictionary, jsonFiles, '').then((issues) => { - assert.strictEqual(issues.length, 2) - assert.strictEqual(issues[0].code, 105) - assert.strictEqual(issues[1].code, 105) - }) - }) - - it('should properly issue errors if HED data is used in a sidecar without using HEDVersion', () => { - const events = [ - { - file: { - path: '/sub01/sub01_task-test_events.tsv', - relativePath: '/sub01/sub01_task-test_events.tsv', - }, - path: '/sub01/sub01_task-test_events.tsv', - contents: 'onset\tduration\ttest\n' + '7\tsomething\tone\n', - }, - ] - const jsonDictionary = { - '/sub01/sub01_task-test_events.json': { - test: { - HED: { - one: 'Train', - }, - }, - }, - '/dataset_description.json': {}, - } - - return validateHed(events, jsonDictionary, jsonFiles, '').then((issues) => { - assert.strictEqual(issues.length, 1) - assert.strictEqual(issues[0].code, 109) - }) - }) - - it('should properly issue errors if HED data is used in a TSV file without using HEDVersion', () => { - const events = [ - { - file: { - path: '/sub01/sub01_task-test_events.tsv', - relativePath: '/sub01/sub01_task-test_events.tsv', - }, - path: '/sub01/sub01_task-test_events.tsv', - contents: 'onset\tduration\tHED\n' + '7\tsomething\tHuman\n', - }, - ] - const jsonDictionary = { - '/sub01/sub01_task-test_events.json': {}, - '/dataset_description.json': {}, - } - - return validateHed(events, jsonDictionary, jsonFiles, '').then((issues) => { - assert.strictEqual(issues.length, 1) - assert.strictEqual(issues[0].code, 109) - }) - }) - - it('should throw an issue if HEDVersion is invalid', () => { - const events = [ - { - file: { - path: '/sub01/sub01_task-test_events.tsv', - relativePath: '/sub01/sub01_task-test_events.tsv', - }, - path: '/sub01/sub01_task-test_events.tsv', - contents: - 'onset\tduration\ttest\tHED\n' + '7\tsomething\tone\tSpeed/30 mph\n', - }, - ] - const jsonDictionary = { - '/sub01/sub01_task-test_events.json': { - myCodes: { - HED: { - one: 'Duration/5 s', - }, - }, - }, - '/dataset_description.json': { HEDVersion: 'one:two:8.0.0' }, - } - - return validateHed(events, jsonDictionary, jsonFiles, '').then((issues) => { - assert.strictEqual(issues.length, 1) - assert.strictEqual(issues[0].code, 104) - }) - }) -}) diff --git a/bids-validator/tests/json.spec.js b/bids-validator/tests/json.spec.js deleted file mode 100644 index bf7f54d5..00000000 --- a/bids-validator/tests/json.spec.js +++ /dev/null @@ -1,778 +0,0 @@ -import assert from 'assert' -import validate from '../index' - -describe('JSON', function () { - var file = { - name: 'task-rest_bold.json', - relativePath: '/task-rest_bold.json', - } - var jsonDict = {} - - it('sidecars should have key/value pair for "RepetitionTime" expressed in seconds', function () { - var jsonObj = { - RepetitionTime: 1.2, - echo_time: 0.005, - flip_angle: 90, - TaskName: 'Rest', - } - jsonDict[file.relativePath] = jsonObj - validate.JSON(file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - var jsonObjInval = { - RepetitionTime: 1200, - echo_time: 0.005, - flip_angle: 90, - TaskName: 'Rest', - } - jsonDict[file.relativePath] = jsonObjInval - validate.JSON(file, jsonDict, function (issues) { - assert(issues && issues.length === 1) - }) - }) - - it('should detect negative value for SliceTiming', function () { - var jsonObj = { - RepetitionTime: 1.2, - SliceTiming: [-1.0, 0.0, 1.0], - TaskName: 'Rest', - } - jsonDict[file.relativePath] = jsonObj - validate.JSON(file, jsonDict, function (issues) { - assert(issues.length === 1 && issues[0].code == 55) - }) - }) - - var meg_file = { - name: 'sub-01_run-01_meg.json', - relativePath: '/sub-01_run-01_meg.json', - } - - it('*_meg.json sidecars should have required key/value pairs', function () { - var jsonObj = { - TaskName: 'Audiovis', - SamplingFrequency: 1000, - PowerLineFrequency: 50, - DewarPosition: 'Upright', - SoftwareFilters: 'n/a', - DigitizedLandmarks: true, - DigitizedHeadPoints: false, - } - jsonDict[meg_file.relativePath] = jsonObj - validate.JSON(meg_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - - var jsonObjInval = jsonObj - jsonObjInval['SamplingFrequency'] = '' - jsonDict[meg_file.relativePath] = jsonObjInval - validate.JSON(meg_file, jsonDict, function (issues) { - assert(issues && issues.length === 1) - }) - }) - - var eeg_file = { - name: 'sub-01_run-01_eeg.json', - relativePath: '/sub-01_run-01_eeg.json', - } - - it('*.json sidecars with CogPOID or CogAtlasID fields should require a uri format', function () { - var jsonObj = { - TaskName: 'rest', - SamplingFrequency: 1000, - EEGReference: 'Cz', - SoftwareFilters: 'n/a', - PowerLineFrequency: 1000, - CogAtlasID: - 'we did a search on https://ww.idontexist.com for the word "atlas"', - } - jsonDict[eeg_file.relativePath] = jsonObj - validate.JSON(eeg_file, jsonDict, function (issues) { - assert(issues.length === 1) - assert(issues[0].evidence == '.CogAtlasID should match format "uri"') - }) - }) - - it('*_eeg.json sidecars should have required key/value pairs', function () { - var jsonObj = { - TaskName: 'rest', - SamplingFrequency: 1000, - EEGReference: 'Cz', - SoftwareFilters: { - HighPass: { HalfAmplitudeCutOffHz: 1, RollOff: '6dB/Octave' }, - }, - PowerLineFrequency: 'n/a', - CogPOID: 'https://www.idontexist.com', - } - jsonDict[eeg_file.relativePath] = jsonObj - validate.JSON(eeg_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - - var jsonObjInval = jsonObj - jsonObjInval['SamplingFrequency'] = '' - jsonDict[eeg_file.relativePath] = jsonObjInval - validate.JSON(eeg_file, jsonDict, function (issues) { - assert(issues && issues[0].code == 55) - }) - }) - - var ieeg_file = { - name: 'sub-01_run-01_ieeg.json', - relativePath: '/sub-01_run-01_ieeg.json', - } - - it('*_ieeg.json sidecars should have required key/value pairs', function () { - var jsonObj = { - TaskName: 'Audiovis', - SamplingFrequency: 10, - PowerLineFrequency: 50, - SoftwareFilters: { - HighPass: { HalfAmplitudeCutOffHz: 1, RollOff: '6dB/Octave' }, - }, - iEEGReference: 'chan1', - CogAtlasID: 'doi:thisisadoi', - } - jsonDict[ieeg_file.relativePath] = jsonObj - validate.JSON(ieeg_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - var jsonObjInval = jsonObj - jsonObjInval['Manufacturer'] = '' - jsonDict[ieeg_file.relativePath] = jsonObjInval - validate.JSON(ieeg_file, jsonDict, function (issues) { - assert(issues && issues.length === 1) - }) - }) - - var meg_coordsystem_file = { - name: 'sub-01/meg/sub-01_task-testing_coordsystem.json', - relativePath: '/sub-01/meg/sub-01_task-testing_coordsystem.json', - } - - it('MEG *_coordsystem.json files should have required key/value pairs', function () { - var jsonObj = { - FiducialsDescription: 'Fiducials were digitized using ... ', - MEGCoordinateSystem: 'CTF', - MEGCoordinateUnits: 'mm', - MEGCoordinateSystemDescription: 'this is the usual ...', - EEGCoordinateSystem: 'CapTrak', - EEGCoordinateSystemDescription: 'RAS orientation ...', - HeadCoilCoordinateSystem: 'Other', - HeadCoilCoordinates: { - LPA: [-1, 0, 0], - RPA: [1, 0, 0], - NAS: [0, 1, 0], - }, - AnatomicalLandmarkCoordinates: { - LPA: [-1, 0, 0], - RPA: [1, 0, 0], - NAS: [0, 1, 0], - }, - AnatomicalLandmarkCoordinateSystem: 'Other', - AnatomicalLandmarkCoordinateUnits: 'mm', - } - jsonDict[meg_coordsystem_file.relativePath] = jsonObj - validate.JSON(meg_coordsystem_file, jsonDict, function (issues) { - assert(issues.length === 4) - assert( - issues[0].evidence == - " should have required property 'HeadCoilCoordinateSystemDescription'", - ) - assert(issues[1].evidence == ' should match "then" schema') - assert( - issues[2].evidence == - " should have required property 'AnatomicalLandmarkCoordinateSystemDescription'", - ) - assert(issues[3].evidence == ' should match "then" schema') - }) - }) - - var eeg_coordsystem_file = { - name: 'sub-01/eeg/sub-01_task-testing_coordsystem.json', - relativePath: '/sub-01/eeg/sub-01_task-testing_coordsystem.json', - } - - it('EEG *_coordsystem.json files should have required key/value pairs', function () { - var jsonObj = { - IntendedFor: 'sub-01_task-testing_electrodes.tsv', - FiducialsDescription: 'Fiducials were digitized using ... ', - EEGCoordinateSystem: 'CapTrak', - EEGCoordinateUnits: 'mm', - EEGCoordinateSystemDescription: 'RAS orientation ...', - AnatomicalLandmarkCoordinates: { - LPA: [-1, 0, 0], - RPA: [1, 0, 0], - NAS: [0, 1, 0], - }, - AnatomicalLandmarkCoordinateSystem: 'Other', - AnatomicalLandmarkCoordinateUnits: 'mm', - AnatomicalLandmarkCoordinateSystemDescription: '...', - } - jsonDict[eeg_coordsystem_file.relativePath] = jsonObj - validate.JSON(eeg_coordsystem_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - }) - - it('EEG *_coordsystem.json files should not contain unaccepted *CoordinateSystem keywords', function () { - var jsonObj = { - EEGCoordinateSystem: 'RAS', - EEGCoordinateUnits: 'mm', - EEGCoordinateSystemDescription: 'RAS orientation ...', - } - jsonDict[eeg_coordsystem_file.relativePath] = jsonObj - validate.JSON(eeg_coordsystem_file, jsonDict, function (issues) { - assert(issues.length === 5) - assert( - issues[0].evidence == - '.EEGCoordinateSystem should be equal to one of the allowed values', - ) - assert( - issues[4].evidence == - '.EEGCoordinateSystem should match some schema in anyOf', - ) - }) - }) - - it('EEG *_coordsystem.json schema should require *Description if *Coordsystem is "Other"', function () { - var jsonObj = { - EEGCoordinateSystem: 'Other', - EEGCoordinateUnits: 'mm', - EEGCoordinateSystemDescription: 'we did ...', - FiducialsCoordinateSystem: 'Other', - AnatomicalLandmarkCoordinateSystem: 'Other', - AnatomicalLandmarkCoordinateSystemDescription: 'we did ...', - } - jsonDict[eeg_coordsystem_file.relativePath] = jsonObj - validate.JSON(eeg_coordsystem_file, jsonDict, function (issues) { - assert(issues.length === 2) - assert( - issues[0].evidence == - " should have required property 'FiducialsCoordinateSystemDescription'", - ) - assert(issues[1].evidence == ' should match "then" schema') - }) - }) - - it('EEG *_coordsystem.json schema general requirements should not be overridden by conditional requirements', function () { - var jsonObj = { - EEGCoordinateSystem: 'Other', - EEGCoordinateSystemDescription: 'We used a ...', - AnatomicalLandmarkCoordinateSystem: 'Other', - } - jsonDict[eeg_coordsystem_file.relativePath] = jsonObj - validate.JSON(eeg_coordsystem_file, jsonDict, function (issues) { - assert(issues.length === 3) - assert( - issues[0].evidence == - " should have required property 'EEGCoordinateUnits'", - ) - assert( - issues[1].evidence == - " should have required property 'AnatomicalLandmarkCoordinateSystemDescription'", - ) - assert(issues[2].evidence == ' should match "then" schema') - }) - }) - - var ieeg_coordsystem_file = { - name: 'sub-01/ieeg/sub-01_task-testing_coordsystem.json', - relativePath: '/sub-01/ieeg/sub-01_task-testing_coordsystem.json', - } - - it('iEEG *_coordsystem.json files should have required key/value pairs', function () { - var jsonObj = { - iEEGCoordinateSystem: 'Pixels', - iEEGCoordinateUnits: 'pixels', - } - jsonDict[ieeg_coordsystem_file.relativePath] = jsonObj - validate.JSON(ieeg_coordsystem_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - }) - - it('If iEEG CoordinateSystem is "Pixels", then CoordinateUnits must be "pixels"', function () { - var jsonObj = { - iEEGCoordinateSystem: 'Pixels', - iEEGCoordinateUnits: 'mm', - } - jsonDict[ieeg_coordsystem_file.relativePath] = jsonObj - validate.JSON(ieeg_coordsystem_file, jsonDict, function (issues) { - assert(issues.length === 2) - assert( - issues[0].evidence == - '.iEEGCoordinateUnits should be equal to one of the allowed values', - ) - assert(issues[1].evidence == ' should match "then" schema') - }) - }) - - it('iEEG *_coordsystem.json schema should require *Description if *Coordsystem is "Other"', function () { - var jsonObj = { - iEEGCoordinateSystem: 'Other', - iEEGCoordinateUnits: 'pixels', - } - jsonDict[ieeg_coordsystem_file.relativePath] = jsonObj - validate.JSON(ieeg_coordsystem_file, jsonDict, function (issues) { - assert(issues.length === 2) - assert( - issues[0].evidence == - " should have required property 'iEEGCoordinateSystemDescription'", - ) - assert(issues[1].evidence == ' should match "then" schema') - }) - }) - - it('should use inherited sidecars to find missing fields', function () { - const multiEntryJsonDict = {} - - // this json file is missing the SamplingFrequency field - const partialJsonObj = { - TaskName: 'Audiovis', - PowerLineFrequency: 50, - DewarPosition: 'Upright', - SoftwareFilters: 'n/a', - DigitizedLandmarks: true, - DigitizedHeadPoints: false, - } - multiEntryJsonDict[meg_file.relativePath] = partialJsonObj - - // this json file (sitting at the root directory level) - // provides the missing json field - const inheritedMegFile = { - name: 'meg.json', - relativePath: '/meg.json', - } - - const restOfJsonObj = { - SamplingFrequency: 2000, - } - multiEntryJsonDict[inheritedMegFile.relativePath] = restOfJsonObj - - // json validation will pass because (when merged) there are no - // missing data fields - validate.JSON(meg_file, multiEntryJsonDict, function (issues) { - assert(issues.length == 0) - }) - }) - - it('should favor the sidecar on the directory level closest to the file being validated', function () { - const multiEntryJsonDict = {} - const lowLevelFile = { - name: 'run-01_meg.json', - relativePath: '/sub-01/run-01_meg.json', - } - - // this json file has a good SamplingFrequency field - const partialJsonObj = { - TaskName: 'Audiovis', - SamplingFrequency: 1000, - PowerLineFrequency: 50, - DewarPosition: 'Upright', - SoftwareFilters: 'n/a', - DigitizedLandmarks: true, - DigitizedHeadPoints: false, - } - multiEntryJsonDict[lowLevelFile.relativePath] = partialJsonObj - - // this json file (sitting at the root directory level) - // also has a SamplingFrequency field, but it is wrong. - const inheritedMegFile = { - name: 'meg.json', - relativePath: '/meg.json', - } - - const restOfJsonObj = { - SamplingFrequency: '', - } - multiEntryJsonDict[inheritedMegFile.relativePath] = restOfJsonObj - - // json validation will pass because merged dictionaries prefer - // field values of the json sidecar furthest from the root. - // /meg.json is closer to the root than /sub-01/run-01_meg.json - // and so the values of the latter should be preferred. - validate.JSON(lowLevelFile, multiEntryJsonDict, function (issues) { - assert(issues.length == 0) - }) - }) - - it('*_bold.json sidecars should not have EffectiveEchoSpacing > TotalReadoutTime', () => { - // this json dictionary generates a sidecar with EffectiveEchoSpacing > TotalReadoutTime, - // which is nonsensical - const fieldMapJsonDict = { - EffectiveEchoSpacing: 3, - TotalReadoutTime: 1, - } - jsonDict[file.relativePath] = fieldMapJsonDict - - // validation should return an error of code 93 - validate.JSON(file, jsonDict, (issues) => { - assert(issues.length == 1 && issues[0].code == '93') - }) - }) - - it('*_bold.json sidecars should have EffectiveEchoSpacing < TotalReadoutTime', () => { - // this json dictionary generates a sidecar with EffectiveEchoSpacing < TotalReadoutTime, - // which is reasonable - const fieldMapJsonDict = { - EffectiveEchoSpacing: 3, - TotalReadoutTime: 5, - } - jsonDict[file.relativePath] = fieldMapJsonDict - - // validation should pass with no errors. - validate.JSON(file, jsonDict, (issues) => { - assert.deepEqual(issues, []) - }) - }) - - var genetic_info_file = { - name: 'genetic_info.json', - relativePath: '/genetic_info.json', - } - - it('sample genetic_info.json should parse', function () { - var jsonObj = { - GeneticLevel: ['Genetic'], - AnalyticalApproach: ['SNP Genotypes'], - SampleOrigin: 'brain', - TissueOrigin: 'gray matter', - CellType: 'neuron', - BrainLocation: '[-30 -15 10]', - } - jsonDict[genetic_info_file.relativePath] = jsonObj - validate.JSON(genetic_info_file, jsonDict, function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('genetic_info.json should use limited vocabulary for sample origin', function () { - var jsonObj = { - GeneticLevel: ['Genetic'], - AnalyticalApproach: ['SNP Genotypes'], - SampleOrigin: 'not_from_around_here', - TissueOrigin: 'gray matter', - CellType: 'neuron', - BrainLocation: '[-30 -15 10]', - } - jsonDict[genetic_info_file.relativePath] = jsonObj - validate.JSON(genetic_info_file, jsonDict, function (issues) { - assert(issues.length === 1 && issues[0].code == 55) - }) - }) - - var dataset_description_file = { - name: 'dataset_description.json', - relativePath: '/dataset_description.json', - } - - it('dataset_description.json should validate DatasetLinks', function () { - var jsonObj = { - Name: 'Example Name', - BIDSVersion: '1.4.0', - DatasetLinks: { - mylink: 'https://www.google.com', - deriv1: 'derivatives/derivative1', - phantoms: 'file:///data/phantoms', - ds000001: 'doi:10.18112/openneuro.ds000001.v1.0.0', - }, - } - jsonDict[dataset_description_file.relativePath] = jsonObj - validate.JSON(dataset_description_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - }) - - it('dataset_description.json should raise on bad keys in DatasetLinks', function () { - var jsonObj = { - Name: 'Example Name', - BIDSVersion: '1.4.0', - DatasetLinks: { - mylink: 'https://www.google.com', - '': 'https://www.yahoo.com', - 'mylink!': ':/path', - 'my link': ':/another/path', - }, - } - jsonDict[dataset_description_file.relativePath] = jsonObj - validate.JSON(dataset_description_file, jsonDict, function (issues) { - assert(issues.length === 6) - assert( - issues[0].evidence == - '.DatasetLinks should NOT be shorter than 1 characters', - ) - assert(issues[1].evidence == ".DatasetLinks property name '' is invalid") - assert( - issues[2].evidence == - '.DatasetLinks should match pattern "^[a-zA-Z0-9]*$"', - ) - assert( - issues[3].evidence == - ".DatasetLinks property name 'mylink!' is invalid", - ) - assert(issues[4].evidence == issues[2].evidence) - assert( - issues[5].evidence == - ".DatasetLinks property name 'my link' is invalid", - ) - }) - }) - - it('dataset_description.json should raise on non-object value in DatasetLinks', function () { - var jsonObj = { - Name: 'Example Name', - BIDSVersion: '1.4.0', - DatasetLinks: 'https://www.google.com', - } - jsonDict[dataset_description_file.relativePath] = jsonObj - validate.JSON(dataset_description_file, jsonDict, function (issues) { - assert(issues.length === 1) - assert(issues[0].evidence == '.DatasetLinks should be object') - }) - }) - - it('dataset_description.json should raise on invalid values in DatasetLinks', function () { - var jsonObj = { - Name: 'Example Name', - BIDSVersion: '1.4.0', - DatasetLinks: { - mylink1: 'https://www.google.com', - mylink2: 1, - '': 'https://www.yahoo.com', - }, - } - jsonDict[dataset_description_file.relativePath] = jsonObj - validate.JSON(dataset_description_file, jsonDict, function (issues) { - assert(issues.length === 3) - assert( - issues[0].evidence == - '.DatasetLinks should NOT be shorter than 1 characters', - ) - assert(issues[1].evidence == ".DatasetLinks property name '' is invalid") - assert(issues[2].evidence == ".DatasetLinks['mylink2'] should be string") - }) - }) - - it('dataset_description.json should validate with enum of DatasetType', function () { - var jsonObj = { - Name: 'Example Name', - BIDSVersion: '1.4.0', - Authors: ['example author'], - DatasetType: 'raw', - } - jsonDict[dataset_description_file.relativePath] = jsonObj - validate.JSON(dataset_description_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - }) - - it('dataset_description.json should NOT validate with wrong enum of DatasetType', function () { - var jsonObj = { - Name: 'Example Name', - BIDSVersion: '1.4.0', - Authors: ['example author'], - DatasetType: 'badenum', - } - jsonDict[dataset_description_file.relativePath] = jsonObj - validate.JSON(dataset_description_file, jsonDict, function (issues) { - assert(issues.length === 1 && issues[0].code == 55) - }) - }) - - it('dataset_description.json should NOT validate with number in Authors', function () { - var jsonObj = { - Name: 'Example Name', - BIDSVersion: '1.4.0', - Authors: ['example author', 1], - DatasetType: 'raw', - } - jsonDict[dataset_description_file.relativePath] = jsonObj - validate.JSON(dataset_description_file, jsonDict, function (issues) { - assert(issues.length === 1 && issues[0].code == 55) - }) - }) - - it('dataset_description.json should validate with only required fields, no recommended', function () { - var jsonObj = { - Name: 'Example Name', - BIDSVersion: '1.4.0', - } - jsonDict[dataset_description_file.relativePath] = jsonObj - validate.JSON(dataset_description_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - }) - - it('dataset_description.json should validate with DatasetType "derivative" and GeneratedBy defined', function () { - var jsonObj = { - Name: 'Example Name', - BIDSVersion: '1.4.0', - Authors: ['example author'], - DatasetType: 'derivative', - GeneratedBy: [{ Name: 'Manual' }], - } - jsonDict[dataset_description_file.relativePath] = jsonObj - validate.JSON(dataset_description_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - }) - - it('dataset_description.json should NOT validate with DatasetType "derivative" and GeneratedBy empty', function () { - var jsonObj = { - Name: 'Example Name', - BIDSVersion: '1.4.0', - Authors: ['example author'], - DatasetType: 'derivative', - GeneratedBy: [], - } - jsonDict[dataset_description_file.relativePath] = jsonObj - validate.JSON(dataset_description_file, jsonDict, function (issues) { - assert(issues.length === 1) - assert( - issues[0].code == 55 && - issues[0].evidence == - '.GeneratedBy should NOT have fewer than 1 items', - ) - }) - }) - - it('dataset_description.json should NOT validate with DatasetType "derivative" and GeneratedBy missing', function () { - var jsonObj = { - Name: 'Example Name', - BIDSVersion: '1.4.0', - Authors: ['example author'], - DatasetType: 'derivative', - } - jsonDict[dataset_description_file.relativePath] = jsonObj - validate.JSON(dataset_description_file, jsonDict, function (issues) { - assert(issues.length === 2) - assert( - issues[0].code == 55 && - issues[0].evidence == " should have required property 'GeneratedBy'", - ) - }) - }) - - var beh_file = { - name: 'sub-01_run-01_beh.json', - relativePath: '/sub-01_run-01_beh.json', - } - - it('*beh.json sidecars with CogPOID or CogAtlasID fields should require a uri format', function () { - var jsonObj = { - TaskName: 'stroop', - CogAtlasID: - 'we did a search on https://ww.idontexist.com for the word "atlas"', - CogPOID: - 'we did a search on https://ww.idontexisteither.com for the word "paradigm"', - } - jsonDict[beh_file.relativePath] = jsonObj - validate.JSON(beh_file, jsonDict, function (issues) { - assert(issues.length === 2) - assert(issues[0].evidence == '.CogAtlasID should match format "uri"') - assert(issues[1].evidence == '.CogPOID should match format "uri"') - }) - }) - - it('*beh.json with extra content throws no error', function () { - var jsonObj = { - TaskName: 'stroop', - trial: { - LongName: 'Trial name', - Description: 'Indicator of the type of trial', - Levels: { - congruent: 'Word and color font are congruent.', - incongruent: 'Word and color font are not congruent.', - }, - }, - } - jsonDict[beh_file.relativePath] = jsonObj - validate.JSON(beh_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - }) - - var nirs_file = { - name: 'sub-01_run-01_nirs.json', - relativePath: '/sub-01_run-01_nirs.json', - } - - it('*_nirs.json sidecars should have required key/value pairs', function () { - var jsonObj = { - TaskName: 'Audiovis', - SamplingFrequency: 7, - NIRSChannelCount: 7, - NIRSSourceOptodeCount: 7, - NIRSDetectorOptodeCount: 7, - CapManufacturer: 'EasyCap', - CapManufacturersModelName: 'actiCAP 64 Ch Standard-2', - } - jsonDict[nirs_file.relativePath] = jsonObj - validate.JSON(nirs_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - var jsonObjInval = jsonObj - jsonObjInval['BadKey'] = '' - jsonDict[nirs_file.relativePath] = jsonObjInval - validate.JSON(nirs_file, jsonDict, function (issues) { - assert(issues && issues.length === 1) - }) - }) - var nirs_coordsystem_file = { - name: 'sub-01/nirs/sub-01_task-testing_coordsystem.json', - relativePath: '/sub-01/nirs/sub-01_task-testing_coordsystem.json', - } - - it('NIRS *_coordsystem.json files should have required key/value pairs', function () { - var jsonObj = { - NIRSCoordinateSystem: 'fsaverage', - NIRSCoordinateUnits: 'mm', - } - jsonDict[nirs_coordsystem_file.relativePath] = jsonObj - validate.JSON(nirs_coordsystem_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - }) - - it('NIRS *_coordsystem.json schema should require *Description if *Coordsystem is "Other"', function () { - var jsonObj = { - NIRSCoordinateSystem: 'Other', - NIRSCoordinateUnits: 'mm', - } - jsonDict[nirs_coordsystem_file.relativePath] = jsonObj - validate.JSON(nirs_coordsystem_file, jsonDict, function (issues) { - assert(issues.length === 2) - assert( - issues[0].evidence == - " should have required property 'NIRSCoordinateSystemDescription'", - ) - assert(issues[1].evidence == ' should match "then" schema') - }) - }) - - var motion_file = { - name: 'sub-01_ses-VR_task-dance_tracksys-Unity_motion.json', - relativePath: '/sub-01_ses-VR_task-dance_tracksys-Unity_motion.json', - } - - it('*_motion.json sidecars should have required key/value pairs', function () { - var jsonObj = { - TaskName: 'Dance', - SamplingFrequency: 90, - MotionChannelCount: 7, - POSChannelCount: 3, - ORNTChannelCount: 4, - } - jsonDict[motion_file.relativePath] = jsonObj - validate.JSON(motion_file, jsonDict, function (issues) { - assert(issues.length === 0) - }) - var jsonObjInval = jsonObj - jsonObjInval['BadKey'] = '' - jsonDict[motion_file.relativePath] = jsonObjInval - validate.JSON(motion_file, jsonDict, function (issues) { - assert(issues && issues.length === 1) - }) - }) -}) diff --git a/bids-validator/tests/nii.spec.js b/bids-validator/tests/nii.spec.js deleted file mode 100644 index 712306cc..00000000 --- a/bids-validator/tests/nii.spec.js +++ /dev/null @@ -1,535 +0,0 @@ -import assert from 'assert' -import validate from '../index' - -describe('NIFTI', function () { - var file = { - name: 'sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - relativePath: - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - } - var jsonContentsDict = { - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.json': { - EchoTime: 1, - PhaseEncodingDirection: 3, - EffectiveEchoSpacing: 5, - SliceTiming: 3, - SliceEncodingDirection: 4, - RepetitionTime: 1, - TotalReadoutTime: 3, - TaskName: 'Mixed Event Related Probe', - }, - } - var events = [ - { - path: '/sub-15/func/sub-14_task-mixedeventrelatedprobe_run-01_events.tsv', - }, - { - path: '/sub-15/run-01_events.tsv', - }, - ] - - it('should warn user about missing events file', function () { - validate.NIFTI( - null, - file, - jsonContentsDict, - {}, - [], - events, - function (issues) { - assert((issues.length = 1 && issues[0].code == 25)) - }, - ) - }) - - it('should ignore missing events files for rest scans', function () { - let header = { - dim: [4, 128, 128, 72, 71], - pixdim: [-1, 2, 2, 2, 1], - xyzt_units: ['mm', 'mm', 'mm', 's'], - } - jsonContentsDict[ - '/sub-15/func/sub-15_task-mixedeventrelatedproberest_run-01_bold.json' - ] = - jsonContentsDict[ - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.json' - ] - file.relativePath = - '/sub-15/func/sub-15_task-mixedeventrelatedproberest_run-01_bold.nii.gz' - validate.NIFTI( - header, - file, - jsonContentsDict, - {}, - [], - events, - function (issues) { - assert.deepEqual(issues, []) - }, - ) - }) - - it('should catch mismatched numbers of volumes in dwi scan and .bval/.bvec files', function () { - var file = { - name: 'sub-09_ses-test_dwi.nii.gz', - path: '/ds114/sub-09/ses-test/dwi/sub-09_ses-test_dwi.nii.gz', - relativePath: '/sub-09/ses-test/dwi/sub-09_ses-test_dwi.nii.gz', - } - var header = { - dim: [4, 128, 128, 72, 71], - pixdim: [-1, 2, 2, 2, 16.5], - xyzt_units: ['mm', 'mm', 'mm', 's'], - } - jsonContentsDict['/sub-09/ses-test/dwi/sub-09_ses-test_dwi.json'] = - jsonContentsDict[ - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.json' - ] - var bContentsDict = { - '/dwi.bval': - '0 0 0 0 0 0 0 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000 1000\n', - '/dwi.bvec': - '0 0 0 0 0 0 0 -1 -0.002 0.026 -0.591 0.236 0.893 -0.796 -0.234 -0.936 -0.506 -0.346 -0.457 0.487 0.618 0.577 0.827 -0.894 -0.29 -0.116 0.8 -0.514 0.789 -0.949 -0.233 0.021 -0.217 -0.774 0.161 0.147 -0.888 0.562 0.381 0.306 0.332 0.963 0.959 -0.453 0.773 -0.709 0.693 -0.682 0.142 0.74 0.103 -0.584 0.088 0.552 -0.838 -0.363 0.184 0.721 -0.433 -0.502 0.171 -0.463 -0.385 0.713 -0.26 -0.001 -0.037 -0.57 0.282 -0.721 -0.267 \n0 0 0 0 0 0 0 0 1 0.649 -0.766 -0.524 -0.259 0.129 0.93 0.14 -0.845 -0.847 -0.631 -0.389 0.673 -0.105 -0.521 -0.04 -0.541 -0.963 0.403 0.84 0.153 -0.233 0.783 -0.188 -0.956 -0.604 0.356 0.731 0.417 0.232 0.143 -0.199 -0.13 -0.265 0.205 -0.889 0.628 0.408 0.024 0.529 -0.725 0.388 0.822 -0.596 -0.335 -0.792 -0.458 -0.561 0.392 -0.693 0.682 0.69 -0.509 0.423 -0.809 -0.247 0.885 0.077 -0.902 -0.303 0.145 0.608 0.96 \n0 0 0 0 0 0 0 0 0 0.76 0.252 0.818 0.368 0.591 0.284 0.324 -0.175 -0.402 -0.627 0.782 0.407 -0.81 0.213 -0.447 -0.789 -0.245 -0.444 0.174 -0.596 0.211 0.577 -0.982 0.199 0.19 0.921 -0.666 0.193 -0.794 0.914 -0.931 0.934 0.044 0.193 0.068 0.088 0.575 0.721 -0.506 0.674 0.549 0.56 0.551 0.938 0.259 -0.296 0.744 -0.901 0.009 -0.589 0.521 -0.844 0.779 0.444 0.656 -0.387 -0.997 0.43 -0.763 -0.948 0.332 -0.085 \n', - } - validate.NIFTI( - header, - file, - jsonContentsDict, - bContentsDict, - [], - [], - function (issues) { - assert(issues.length == 1 && issues[0].code == 29) - }, - ) - }) - - it('should catch missing .bval an .bvec files', function () { - var file = { - name: 'sub-09_ses-test_dwi.nii.gz', - path: '/ds114/sub-09/ses-test/dwi/sub-09_ses-test_dwi.nii.gz', - relativePath: '/sub-09/ses-test/dwi/sub-09_ses-test_dwi.nii.gz', - } - validate.NIFTI(null, file, jsonContentsDict, {}, [], [], function (issues) { - assert(issues.length == 2 && issues[0].code == 32 && issues[1].code == 33) - }) - }) - - it('should catch missing task name definitions on task scans', function () { - delete jsonContentsDict[ - '/sub-15/func/sub-15_task-mixedeventrelatedproberest_run-01_bold.json' - ].TaskName - validate.NIFTI( - null, - file, - jsonContentsDict, - {}, - [], - events, - function (issues) { - assert((issues.length = 1 && issues[0].code == 50)) - }, - ) - }) - - it('should ignore missing task name definitions on sbref task scans', function () { - var file = { - name: 'sub-15_task-mixedeventrelatedprobe_acq-LR_sbref.nii.gz', - relativePath: - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_acq-LR_sbref.nii.gz', - } - jsonContentsDict[file.relativePath.replace('.nii.gz', '.json')] = - jsonContentsDict[ - '/sub-15/func/sub-15_task-mixedeventrelatedproberest_run-01_bold.json' - ] - validate.NIFTI( - null, - file, - jsonContentsDict, - {}, - [], - events, - function (issues) { - assert.deepEqual(issues, []) - }, - ) - }) - - it('should generate warning if files listed in IntendedFor of fieldmap json are not of type .nii or .nii.gz', function () { - var file = { - name: 'sub-09_ses-test_run-01_fieldmap.nii.gz', - path: '/ds114/sub-09/ses-test/fmap/sub-09_ses-test_run-01_fieldmap.nii.gz', - relativePath: - '/sub-09/ses-test/fmap/sub-09_ses-test_run-01_fieldmap.nii.gz', - } - - var jsonContentsDict = { - '/sub-09/ses-test/fmap/sub-09_ses-test_run-01_fieldmap.json': { - TaskName: 'Mixed Event Related Probe', - IntendedFor: [ - 'func/sub-15_task-mixedeventrelatedprobe_run-05_bold.json', - 'func/sub-15_task-mixedeventrelatedprobe_run-02_bold.nii.gz', - ], - }, - } - var fileList = [] - fileList.push({ - name: 'sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - path: 'sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - relativePath: - '/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - }) - validate.NIFTI(null, file, jsonContentsDict, {}, [], [], function (issues) { - assert( - issues.some( - (issue) => - issue.reason === - 'Invalid filetype: IntendedFor should point to the .nii[.gz] files.' && - issue.evidence === - 'func/sub-15_task-mixedeventrelatedprobe_run-05_bold.json', - ), - ) - }) - }) - - it('should generate warning if files listed in IntendedFor of fieldmap json do not exist', function () { - let header = { - dim: [4, 128, 128, 1, 71], - pixdim: [-1, 2, 2, 2, 16.5], - xyzt_units: ['mm', 'mm', 'mm', 's'], - } - - var file = { - name: 'sub-09_ses-test_run-01_fieldmap.nii.gz', - path: '/ds114/sub-09/ses-test/fmap/sub-09_ses-test_run-01_fieldmap.nii.gz', - relativePath: - '/sub-09/ses-test/fmap/sub-09_ses-test_run-01_fieldmap.nii.gz', - } - - var jsonContentsDict = { - '/sub-09/ses-test/fmap/sub-09_ses-test_run-01_fieldmap.json': { - TaskName: 'Mixed Event Related Probe', - RepetitionTime: 2, - SliceTiming: [0.4], - IntendedFor: [ - 'func/sub-15_task-mixedeventrelatedprobe_run-05_bold.nii.gz', - 'func/sub-15_task-mixedeventrelatedprobe_run-02_bold.nii.gz', - ], - }, - } - var fileList = [] - fileList.push({ - name: 'sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - path: 'sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - relativePath: - '/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - }) - validate.NIFTI( - header, - file, - jsonContentsDict, - {}, - [], - [], - function (issues) { - assert( - issues.length === 3 && issues[0].code == 17 && issues[1].code == 37, - ) - }, - ) - }) - - it('should not generate warning if files listed in IntendedFor of fieldmap json exist', function () { - var file = { - name: 'sub-15_ses-test_run-01_fieldmap.nii.gz', - path: '/ds114/sub-15/ses-test/dwi/sub-15_ses-test_run-01_fieldmap.nii.gz', - relativePath: - '/sub-15/ses-test/dwi/sub-15_ses-test_run-01_fieldmap.nii.gz', - } - - var jsonContentsDict = { - '/sub-15/ses-test/dwi/sub-15_ses-test_run-01_fieldmap.json': { - TaskName: 'Mixed Event Related Probe', - Units: 'rad/s', - IntendedFor: [ - 'func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - ], - }, - } - - var fileList = [ - { - name: 'sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - path: 'sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - relativePath: - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - }, - ] - validate.NIFTI( - null, - file, - jsonContentsDict, - {}, - fileList, - [], - function (issues) { - assert.deepEqual(issues, []) - }, - ) - }) - - it('SliceTiming should not be greater than RepetitionTime', function () { - let header = { - dim: [4, 128, 128, 7, 71], - pixdim: [-1, 2, 2, 2, 16.5], - xyzt_units: ['mm', 'mm', 'mm', 's'], - } - var jsonContentsDict_new = { - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.json': { - RepetitionTime: 1.5, - TaskName: - 'AntiSaccade (AS) Rewarded & Neutral with varying dot position', - EchoTime: 0.025, - NumberofPhaseEncodingSteps: 64, - FlipAngle: 70, - PhaseEncodingDirection: 'j', - SliceTiming: [0.0, 1.3448, 1.6207, 1.3966, 0.6724, 1.4483, 1.7241], - }, - } - var file_new = { - name: 'sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - relativePath: - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - } - validate.NIFTI( - header, - file_new, - jsonContentsDict_new, - {}, - [], - events, - function (issues) { - assert(issues[3].code === 66 && issues.length === 4) - assert(issues[2].code === 12 && issues.length === 4) - }, - ) - }) - - it('SliceTiming should be the same length as the k dimension of the corresponding nifti header', function () { - var jsonContents = { - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.json': { - RepetitionTime: 16.5, - TaskName: - 'AntiSaccade (AS) Rewarded & Neutral with varying dot position', - EchoTime: 0.025, - EffectiveEchoSpacing: 0.05, - NumberofPhaseEncodingSteps: 64, - FlipAngle: 70, - PhaseEncodingDirection: 'j', - SliceTiming: [0.0, 1.3448, 1.6207, 1.3966, 0.6724, 1.4483, 1.7241], - }, - } - var testFile = { - name: 'sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - relativePath: - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - } - var header = { - dim: [4, 128, 128, 7, 71], - pixdim: [-1, 2, 2, 2, 16.5], - xyzt_units: ['mm', 'mm', 'mm', 's'], - } - var events = [ - { - path: '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_events.tsv', - }, - { - path: '/sub-15/run-01_events.tsv', - }, - ] - validate.NIFTI( - header, - testFile, - jsonContents, - {}, - [], - events, - function (issues) { - assert.deepEqual(issues, []) - }, - ) - }) - - it('SliceTiming should not have a length different than the k dimension of the corresponding nifti header', function () { - var jsonContents = { - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.json': { - RepetitionTime: 16.5, - TaskName: - 'AntiSaccade (AS) Rewarded & Neutral with varying dot position', - EchoTime: 0.025, - EffectiveEchoSpacing: 0.05, - NumberofPhaseEncodingSteps: 64, - FlipAngle: 70, - PhaseEncodingDirection: 'j', - SliceTiming: [0.0, 1.3448, 1.6207, 1.3966, 0.6724, 1.4483, 1.7241], - }, - } - var testFile = { - name: 'sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - relativePath: - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - } - var header = { - dim: [4, 128, 128, 5, 71], - pixdim: [-1, 2, 2, 2, 16.5], - xyzt_units: ['mm', 'mm', 'mm', 's'], - } - var events = [ - { - path: '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_events.tsv', - }, - { - path: '/sub-15/run-01_events.tsv', - }, - ] - - validate.NIFTI( - header, - testFile, - jsonContents, - {}, - [], - events, - function (issues) { - assert(issues.length === 1 && issues[0].code === 87) - }, - ) - }) - - it('should throw an error for _phasediff.nii files with associated (EchoTime2 - EchoTime1) less than 0.0001', function () { - var phaseDiffJson = { - '/sub-01/func/sub-01_ses-mri_phasediff.json': { - RepetitionTime: 0.4, - EchoTime1: 0.00515, - EchoTime2: 0.00519, - FlipAngle: 60, - }, - } - var phaseDiffFile = { - name: 'sub-01_ses-mri_phasediff.nii', - relativePath: '/sub-01/func/sub-01_ses-mri_phasediff.nii', - } - validate.NIFTI( - null, - phaseDiffFile, - phaseDiffJson, - {}, - [], - events, - function (issues) { - assert(issues[0].code === 83 && issues.length === 1) - }, - ) - }) - - it('should throw an error for _phasediff.nii files with associated (EchoTime2 - EchoTime1) greater than 0.01', function () { - var phaseDiffJson = { - '/sub-01/func/sub-01_ses-mri_phasediff.json': { - RepetitionTime: 0.4, - EchoTime1: 0.00515, - EchoTime2: 0.1019, - FlipAngle: 60, - }, - } - var phaseDiffFile = { - name: 'sub-01_ses-mri_phasediff.nii', - relativePath: '/sub-01/func/sub-01_ses-mri_phasediff.nii', - } - validate.NIFTI( - null, - phaseDiffFile, - phaseDiffJson, - {}, - [], - events, - function (issues) { - assert(issues[0].code === 83 && issues.length === 1) - }, - ) - }) - - it('should give not error for _phasediff.nii files with reasonable values of associated (EchoTime2 - EchoTime1)', function () { - var phaseDiffJson = { - '/sub-01/func/sub-01_ses-mri_phasediff.json': { - RepetitionTime: 0.4, - EchoTime1: 0.00515, - EchoTime2: 0.00819, - FlipAngle: 60, - }, - } - var phaseDiffFile = { - name: 'sub-01_ses-mri_phasediff.nii', - relativePath: '/sub-01/func/sub-01_ses-mri_phasediff.nii', - } - validate.NIFTI( - null, - phaseDiffFile, - phaseDiffJson, - {}, - [], - events, - function (issues) { - assert(issues.length === 0) - }, - ) - }) - it('should give error if VolumeTiming missing acquisition time', function () { - let header = { - dim: [4, 128, 128, 72, 71], - pixdim: [-1, 2, 2, 2, 16.5], - xyzt_units: ['mm', 'mm', 'mm', 's'], - } - let volumeJson = { - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.json': { - VolumeTiming: 1, - TaskName: 'mixedeventrelatedprobrest', - }, - } - - let fileList = [ - { - name: 'sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - path: 'sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - relativePath: - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - }, - ] - file.relativePath = - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz' - validate.NIFTI( - header, - file, - volumeJson, - {}, - fileList, - events, - function (issues) { - assert(issues.filter((x) => x.code === 171).length === 1) - }, - ) - }) - it('should not give error if VolumeTiming has an acquisition time', function () { - let volumeJson = { - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.json': { - VolumeTiming: 1, - SliceTiming: 1, - TaskName: 'mixedeventrelatedprobe', - }, - } - - file.relativePath = - '/sub-15/func/sub-15_task-mixedeventrelatedprobe_run-01_bold.nii.gz' - validate.NIFTI(null, file, volumeJson, {}, [], events, function (issues) { - assert(issues.filter((x) => x.code === 171).length === 0) - }) - }) -}) diff --git a/bids-validator/tests/potentialLocations.spec.js b/bids-validator/tests/potentialLocations.spec.js deleted file mode 100644 index 67c737a3..00000000 --- a/bids-validator/tests/potentialLocations.spec.js +++ /dev/null @@ -1,18 +0,0 @@ -import assert from 'assert' -import potentialLocations from '../utils/files/potentialLocations' - -describe('potentialLocations', () => { - it('should not return duplicate paths', () => { - const path = 'data/BIDS-examples/ds001' - const pLs = potentialLocations(path) - assert.deepEqual(pLs.length, new Set(pLs).size) - }) - it('.bold files should only return potential locations that include tasknames', () => { - const path = 'dsTest/sub-01/func/sub-01_task-testing_run-01_bold.json' - const pLs = potentialLocations(path) - const anyNonTaskSpecific = pLs.some( - (location) => location.indexOf('task') < 0, - ) - assert.equal(anyNonTaskSpecific, false) - }) -}) diff --git a/bids-validator/tests/session.spec.js b/bids-validator/tests/session.spec.js deleted file mode 100644 index 989018b2..00000000 --- a/bids-validator/tests/session.spec.js +++ /dev/null @@ -1,179 +0,0 @@ -import assert from 'assert' -import utils from '../utils' -const Subject = utils.files.sessions.Subject -import { - session as missingSessionFiles, - getDataOrganization, - getFilename, - missingSessionWarnings, - getSubjectFiles, - missingFileWarnings, - checkFileInMissingSession, - checkMissingFile, -} from '../validators/session' -const dir = process.cwd() -const data_dir = dir + '/bids-validator/tests/data/' -const missing_session_data = data_dir + 'ds006_missing-session' - -describe('session', () => { - let filelist - - describe('missingSessionFiles', () => { - describe('handling missing sessions', () => { - beforeEach(async () => { - filelist = await utils.files.readDir(missing_session_data) - }) - - it('should produce a single MISSING_SESSION warning', () => { - const warnings = missingSessionFiles(filelist) - const targetWarning = warnings.find( - (warning) => warning.key === 'MISSING_SESSION', - ) - assert.ok(targetWarning) - }) - - it('should not produce INCONSISTENT_SUBJECTS warnings', () => { - const warnings = missingSessionFiles(filelist) - warnings.forEach((warning) => - assert.notEqual(warning.key, 'INCONSISTENT_SUBJECTS'), - ) - }) - }) - }) - - describe('getDataOrganization', () => { - it('should take a fileList of data with subjects and sessions and list and return them', async () => { - let filelist - await utils.files.readDir(missing_session_data).then((files) => { - filelist = files - }) - - const { subjects, sessions } = getDataOrganization(filelist) - assert.equal(typeof subjects, 'object') - - const subjKeys = Object.keys(subjects) - assert.ok(subjKeys.length >= 1) - assert.ok(subjKeys.every((key) => subjects[key] instanceof Subject)) - assert.ok(sessions.length >= 1) - }) - }) - - describe('getFilename', () => { - it('should be able to extract the filename from its path', () => { - const subjKey = 'sub-01' - const paths = [ - '/sub-01/ses-post/anat/sub-01_ses-post_inplaneT2.nii.gz', - '/sub-01/ses-post/anat/sub-01_ses-post_T1w.nii.gz', - '/sub-01/ses-post/func/sub-01_ses-post_task-livingnonlivingdecisionwithplainormirrorreversedtext_run-01_bold.nii.gz', - ] - const expecteds = [ - '/ses-post/anat/_ses-post_inplaneT2.nii.gz', - '/ses-post/anat/_ses-post_T1w.nii.gz', - '/ses-post/func/_ses-post_task-livingnonlivingdecisionwithplainormirrorreversedtext_run-01_bold.nii.gz', - ] - - for (let i = 0; i < paths.length; i++) { - const result = getFilename(paths[i], subjKey) - assert.equal(result, expecteds[i]) - } - }) - }) - - describe('missingSessionWarnings', () => { - it('should take a subjects dir and a sessions list and return a list of issues', async () => { - let filelist - await utils.files.readDir(missing_session_data).then((files) => { - filelist = files - }) - const { subjects, sessions } = getDataOrganization(filelist) - - const sessionWarnings = missingSessionWarnings(subjects, sessions) - assert.ok(Array.isArray(sessionWarnings)) - assert.ok( - sessionWarnings.every( - (warning) => warning instanceof utils.issues.Issue, - ), - ) - }) - }) - - describe('getSubjectFiles', () => { - it('should take a list of subjects and return a set containing each file', async () => { - let filelist - await utils.files.readDir(missing_session_data).then((files) => { - filelist = files - }) - const { subjects } = getDataOrganization(filelist) - - const subjFiles = getSubjectFiles(subjects) - assert.ok(subjFiles.every((filename) => typeof filename === 'string')) - assert.equal(subjFiles.length, new Set(subjFiles).size) - - const allFiles = Object.keys(subjects).reduce( - (allFiles, subjKey) => allFiles.concat(subjects[subjKey].files), - [], - ) - assert.ok(allFiles.every((file) => subjFiles.includes(file))) - }) - }) - - describe('missingFileWarnings', () => { - it('generates an issue for each file missing from each subject and returns them as a list', () => { - const subjects = {} - const subjKey = 'sub-01' - const subject01 = new Subject() - const subjFiles = [ - '/ses-post/anat/_ses-post_inplaneT2.nii.gz', - '/ses-post/anat/_ses-post_T1w.nii.gz', - '/ses-post/func/_ses-post_task-livingnonlivingdecisionwithplainormirrorreversedtext_run-01_bold.nii.gz', - ] - subject01.files.push(subjFiles[0]) - subjects[subjKey] = subject01 - - const warnings = missingFileWarnings(subjects, subjFiles) - assert.ok(Array.isArray(warnings)) - warnings.every( - (warning) => - warning instanceof utils.issues.Issue && warning.code === 38, - ) - }) - }) - - describe('checkFileInMissingSession', () => { - it('returns true if filepath belongs to missing session', () => { - const filepath = '/sub-01/ses-post/anat/sub-01_ses-post_inplaneT2.nii.gz' - const subject = new Subject() - subject.missingSessions.push('ses-post') - - const inMissingSession = checkFileInMissingSession(filepath, subject) - assert.strictEqual(inMissingSession, true) - }) - it('returns false if filepath belongs to extant session', () => { - const filepath = '/sub-01/ses-post/anat/sub-01_ses-post_inplaneT2.nii.gz' - const subject = new Subject() - subject.sessions.push('ses-post') - - const inMissingSession = checkFileInMissingSession(filepath, subject) - assert.strictEqual(inMissingSession, false) - }) - }) - - describe('checkMissingFile', () => { - it('returns an issue if filename is missing from subject', () => { - const subject = new Subject() - const subjKey = 'sub-01' - const filenames = [ - '/ses-post/anat/_ses-post_inplaneT2.nii.gz', - '/ses-post/anat/_ses-post_T1w.nii.gz', - '/ses-post/func/_ses-post_task-livingnonlivingdecisionwithplainormirrorreversedtext_run-01_bold.nii.gz', - ] - - assert.equal(subject.files.length, 0) - filenames.forEach((filename) => { - const warning = checkMissingFile(subject, subjKey, filename) - assert.ok(warning instanceof utils.issues.Issue) - assert.equal(warning.code, 38) - }) - }) - }) -}) diff --git a/bids-validator/tests/tsv.spec.js b/bids-validator/tests/tsv.spec.js deleted file mode 100644 index 421ed7e9..00000000 --- a/bids-validator/tests/tsv.spec.js +++ /dev/null @@ -1,825 +0,0 @@ -import assert from 'assert' -import validate from '../index' - -describe('TSV', function () { - // general tsv checks ------------------------------------------------------------------ - - var file = { - name: 'sub-08_ses-test_task-­nback_physio.tsv.gz', - relativePath: - '/sub-08/ses-test/func/sub-08_ses-test_task-linebisection_events.tsv', - } - - it('should not allow empty values saved as empty cells.', function () { - var tsv = - 'header-one\theader-two\theader-three\theader-four\theader-five\n' + - '1.0\t\t0.2\tresponse 1\t12.32' - validate.TSV.TSV(file, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 23) - }) - }) - - it('should not allow missing values that are specified by something other than "n/a".', function () { - var tsv = - 'header-one\theader-two\theader-three\theader-four\theader-five\n' + - 'n1.0\tNA\t0.2\tresponse 1\t12.32' - validate.TSV.TSV(file, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 24) - }) - }) - - it('should not allow different length rows', function () { - var tsv = - 'header-one\theader-two\theader-three\n' + - 'value-one\tvalue-two\n' + - 'value-one\tvalue-two\tvalue-three' - validate.TSV.TSV(file, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 22) - }) - }) - - /* See utils.unit.validate for comment - it('should not allow non-SI units', function () { - var tsv = - 'header-one\tunits\theader-three\n' + - 'value-one\tµV\tvalue-three\n' + - 'value-one\tuV\tvalue-three' - - validate.TSV.TSV(file, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].key === 'INVALID_TSV_UNITS') - }) - }) - */ - - // events checks ----------------------------------------------------------------------- - - var eventsFile = { - name: 'sub-08_ses-test_task-linebisection_events.tsv', - relativePath: - '/sub-08/ses-test/func/sub-08_ses-test_task-linebisection_events.tsv', - } - - it('should require events files to have "onset" as first header', function () { - var tsv = - 'header-one\tduration\theader-three\n' + - 'value-one\tvalue-two\tvalue-three' - validate.TSV.TSV(eventsFile, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 20) - }) - }) - - it('should require events files to have "duration" as second header', function () { - var tsv = - 'onset\theader-two\theader-three\n' + 'value-one\tvalue-two\tvalue-three' - validate.TSV.TSV(eventsFile, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 21) - }) - }) - - it('should not throw issues for a valid events file', function () { - var tsv = - 'onset\tduration\theader-three\n' + 'value-one\tvalue-two\tvalue-three' - validate.TSV.TSV(eventsFile, tsv, [], function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should not throw issues for a valid events file with only two columns', function () { - var tsv = 'onset\tduration\n' + 'value-one\tvalue-two' - validate.TSV.TSV(eventsFile, tsv, [], function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should check for the presence of any stimulus files declared', function () { - var tsv = - 'onset\tduration\tstim_file\n' + - 'value-one\tvalue-two\timages/red-square.jpg' - var fileList = [{ relativePath: '/stimuli/images/blue-square.jpg' }] - validate.TSV.TSV(eventsFile, tsv, fileList, function (issues) { - assert(issues.length === 1 && issues[0].code === 52) - }) - - fileList.push({ relativePath: '/stimuli/images/red-square.jpg' }) - validate.TSV.TSV(eventsFile, tsv, fileList, function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should return all values in the stim_file column as a list', function () { - var tsv = - 'onset\tduration\tstim_file\n' + - 'value-one\tvalue-two\timages/red-square.jpg' - var fileList = [{ relativePath: '/stimuli/images/red-square.jpg' }] - validate.TSV.TSV( - eventsFile, - tsv, - fileList, - function (issues, participants, stimFiles) { - assert( - stimFiles.length === 1 && - stimFiles[0] === '/stimuli/images/red-square.jpg', - ) - }, - ) - }) - - // participants checks ----------------------------------------------------------------- - - var participantsFile = { - name: 'participants.tsv', - relativePath: '/participants.tsv', - } - - it('should not allow participants.tsv files without participant_id columns', function () { - var tsv = - 'subject_id\theader-two\theader-three\n' + - 'value-one\tvalue-two\tvalue-three' - validate.TSV.TSV(participantsFile, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 48) - }) - }) - - it('should allow a valid participants.tsv file', function () { - var tsv = - 'participant_id\theader-two\theader-three\n' + - 'sub-01\tvalue-two\tvalue-three' - validate.TSV.TSV(participantsFile, tsv, [], function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should not allow participants with age 89 and above in participants.tsv file', function () { - var tsv = 'participant_id\theader-two\tage\n' + 'sub-01\tvalue-two\t89' - validate.TSV.TSV(participantsFile, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 56) - }) - }) - - it('should not allow participants written with incorrect pattern', function () { - var tsv = - 'participant_id\theader-two\theader-three\n' + - '01\tvalue-two\tvalue-three' - validate.TSV.TSV(participantsFile, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 212) - }) - }) - - // _scans checks ----------------------------------------------------------------- - - var scansFile = { - name: 'sub-08_ses-test_task-linebisection_scans.tsv', - relativePath: - '/sub-08/ses-test/sub-08_ses-test_task-linebisection_scans.tsv', - } - - var niftiFile = { - name: 'sub-08_ses-test_task-linebisection_run-01_bold.nii.gz', - relativePath: - '/sub-08/ses-test/func/sub-08_ses-test_task-linebisection_run-01_bold.nii.gz', - } - - var eegFile = { - name: 'sub-08_ses-test_task-linebisection_run-01_eeg.fif', - relativePath: - '/sub-08/ses-test/eeg/sub-08_ses-test_task-linebisection_run-01_eeg.fif', - } - var ieegFile = { - name: 'sub-08_ses-test_task-linebisection_run-01_ieeg.edf', - relativePath: - '/sub-08/ses-test/ieeg/sub-08_ses-test_task-linebisection_run-01_ieeg.edf', - } - - var btiFiles = [ - { - name: 'c,rf0.1Hz', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg/c,rf0.1Hz', - }, - { - name: 'config', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg/config', - }, - { - name: 'hs_file', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg/hs_file', - }, - ] - - var ctfFiles = [ - { - name: 'BadChannels', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg.ds/BadChannels', - }, - { - name: 'bad.segments', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg.ds/bad.segments', - }, - { - name: 'params.dsc', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg.ds/params.dsc', - }, - { - name: 'ClassFile.cls', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg.ds/ClassFile.cls', - }, - { - name: 'processing.cfg', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg.ds/processing.cfg', - }, - { - name: 'sub-08_ses-test_task-linebisection_acq-01_run-01_meg.res4', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg.ds/sub-01_ses-01_task-testing_acq-01_run-01_meg.res4', - }, - { - name: 'sub-08_ses-test_task-linebisection_acq-01_run-01_meg.hc', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg.ds/sub-01_ses-01_task-testing_acq-01_run-01_meg.hc', - }, - { - name: 'sub-08_ses-test_task-linebisection_acq-01_run-01_meg.infods', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg.ds/sub-01_ses-01_task-testing_acq-01_run-01_meg.infods', - }, - { - name: 'sub-08_ses-test_task-linebisection_acq-01_run-01_meg.acq', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg.ds/sub-01_ses-01_task-testing_acq-01_run-01_meg.acq', - }, - { - name: 'sub-08_ses-test_task-linebisection_acq-01_run-01_meg.newds', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg.ds/sub-01_ses-01_task-testing_acq-01_run-01_meg.newds', - }, - { - name: 'sub-08_ses-test_task-linebisection_acq-01_run-01_meg.meg4', - relativePath: - '/sub-08/ses-test/meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg.ds/sub-01_ses-01_task-testing_acq-01_run-01_meg.meg4', - }, - ] - - it('should not allow _scans.tsv files without filename column', function () { - var tsv = - 'header-one\theader-two\theader-three\n' + - 'value-one\tvalue-two\tvalue-three' - validate.TSV.TSV(scansFile, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 68) - }) - }) - - it('should allow _scans.tsv files with filename column', function () { - var tsv = - 'header-one\tfilename\theader-three\n' + - 'value-one\tfunc/sub-08_ses-test_task-linebisection_run-01_bold.nii.gz\tvalue-three' - validate.TSV.TSV(scansFile, tsv, [niftiFile], function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should not allow improperly formatted acq_time column entries', function () { - const tsv = - 'filename\tacq_time\n' + - 'func/sub-08_ses-test_task-linebisection_run-01_bold.nii.gz\t000001' - validate.TSV.TSV(scansFile, tsv, [niftiFile], function (issues) { - assert(issues.length === 1 && issues[0].code === 84) - }) - }) - - it('should allow n/a as acq_time column entries', function () { - const tsv = - 'filename\tacq_time\n' + - 'func/sub-08_ses-test_task-linebisection_run-01_bold.nii.gz\tn/a' - validate.TSV.TSV(scansFile, tsv, [niftiFile], function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should allow properly formatted acq_time column entries', function () { - const tsv = - 'filename\tacq_time\n' + - 'func/sub-08_ses-test_task-linebisection_run-01_bold.nii.gz\t2017-05-03T06:45:45' - validate.TSV.TSV(scansFile, tsv, [niftiFile], function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should allow acq_time column entries with optional fractional seconds', function () { - const tsv = - 'filename\tacq_time\n' + - 'func/sub-08_ses-test_task-linebisection_run-01_bold.nii.gz\t2017-05-03T06:45:45.88288' - validate.TSV.TSV(scansFile, tsv, [niftiFile], function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should allow acq_time column entries with optional UTC specifier: "Z"', function () { - const tsv = - 'filename\tacq_time\n' + - 'func/sub-08_ses-test_task-linebisection_run-01_bold.nii.gz\t2017-05-03T06:45:45.88288Z' - validate.TSV.TSV(scansFile, tsv, [niftiFile], function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should allow session missing', function () { - var niftiNoSesFile = { - name: 'sub-08_task-linebisection_run-01_bold.nii.gz', - relativePath: '/sub-08/func/sub-08_task-linebisection_run-01_bold.nii.gz', - } - var scansNoSesFile = { - name: 'sub-08_task-linebisection_scans.tsv', - relativePath: '/sub-08/sub-08_task-linebisection_scans.tsv', - } - const tsv = - 'filename\tacq_time\n' + - 'func/sub-08_task-linebisection_run-01_bold.nii.gz\t2017-05-03T06:45:45' - validate.TSV.TSV(scansNoSesFile, tsv, [niftiNoSesFile], function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should not allow mismatched filename entries', function () { - const fileList = [eegFile] - const tsv = - 'filename\tacq_time\n' + - 'func/sub-08_ses-test_task-linebisection_run-01_bold.nii.gz\t2017-05-03T06:45:45' - validate.TSV.TSV(scansFile, tsv, fileList, function (issues) { - assert(issues.length === 1 && issues[0].code === 129) - }) - }) - - it('should allow matching filename entries', function () { - const fileList = [niftiFile, eegFile, ieegFile] - const tsv = - 'filename\tacq_time\n' + - 'func/sub-08_ses-test_task-linebisection_run-01_bold.nii.gz\t2017-05-03T06:45:45\n' + - 'eeg/sub-08_ses-test_task-linebisection_run-01_eeg.fif\t2017-05-03T06:45:45\n' + - 'ieeg/sub-08_ses-test_task-linebisection_run-01_ieeg.edf\t2017-05-03T06:45:45' - validate.TSV.TSV(scansFile, tsv, fileList, function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should allow matching filename entries for CTF and BTI data', function () { - const fileList = btiFiles.concat(ctfFiles) - const tsv = - 'filename\tacq_time\n' + - 'meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg\t2017-05-03T06:45:45\n' + - 'meg/sub-08_ses-test_task-linebisection_acq-01_run-01_meg.ds\t2017-05-03T06:45:45' - validate.TSV.TSV(scansFile, tsv, fileList, function (issues) { - assert.deepEqual(issues, []) - }) - }) - - it('should check participants listed in phenotype/*tsv and sub-ids ', function () { - var phenotypeParticipants = [ - { - list: ['10159', '10171', '10189'], - file: { - name: 'vmnm.tsv', - path: '/corral-repl/utexas/poldracklab/openfmri/shared2/ds000030/ds030_R1.0.5/ds000030_R1.0.5//phenotype/vmnm.tsv', - relativePath: '/phenotype/vmnm.tsv', - }, - }, - ] - var summary = { - sessions: [], - subjects: ['10159', '10171'], - tasks: [], - totalFiles: 43, - size: 11845, - } - var issues = [] - validate.TSV.checkPhenotype( - phenotypeParticipants, - summary, - issues, - function (issues) { - assert(issues.length === 1 && issues[0].code === 51) - }, - ) - }) - - // channels checks ----------------------------------------------------------------- - - var channelsFileMEG = { - name: 'sub-01_ses-meg_task-facerecognition_run-01_channels.tsv', - relativePath: - '/sub-01/ses-meg/meg/sub-01_ses-meg_task-facerecognition_run-01_channels.tsv', - } - - it('should not allow MEG channels.tsv files without name column', function () { - var tsv = 'header-one\ttype\tunits\n' + 'value-one\tEEG\tmV' - validate.TSV.TSV(channelsFileMEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 71) - }) - }) - - it('should not allow MEG channels.tsv files without type column', function () { - var tsv = 'name\theader-two\tunits\n' + 'value-one\tEEG\tmV' - validate.TSV.TSV(channelsFileMEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 71) - }) - }) - - it('should not allow MEG channels.tsv files without units column', function () { - var tsv = 'name\ttype\theader-three\n' + 'value-one\tEEG\tvalue-three' - validate.TSV.TSV(channelsFileMEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 71) - }) - }) - - it('should allow MEG channels.tsv files with name, type and units columns', function () { - var tsv = - 'name\ttype\tunits\theader-four\n' + 'value-one\tEEG\tmV\tvalue-four' - validate.TSV.TSV(channelsFileMEG, tsv, [], function (issues) { - assert(issues.length === 0) - }) - }) - - var channelsFileEEG = { - name: 'sub-01_ses-001_task-rest_run-01_channels.tsv', - relativePath: - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_channels.tsv', - } - - it('should not allow EEG channels.tsv files without name column', function () { - var tsv = 'header-one\ttype\tunits\n' + 'value-one\tEEG\tmV' - validate.TSV.TSV(channelsFileEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 71) - }) - }) - - it('should not allow EEG channels.tsv files with name column in wrong place', function () { - var tsv = - 'header-one\ttype\tunits\tname\n' + 'value-one\tEEG\tmV\tvalue-name' - validate.TSV.TSV(channelsFileEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 230) - }) - }) - - it('should not allow EEG channels.tsv files without type column', function () { - var tsv = 'name\theader-two\tunits\n' + 'value-one\tEEG\tmV' - validate.TSV.TSV(channelsFileEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 71) - }) - }) - - it('should not allow EEG channels.tsv files without units column', function () { - var tsv = 'name\ttype\theader-three\n' + 'value-one\tEEG\tvalue-three' - validate.TSV.TSV(channelsFileEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 71) - }) - }) - - it('should allow EEG channels.tsv files with name, type and units columns', function () { - var tsv = - 'name\ttype\tunits\theader-four\n' + 'value-one\tEEG\tmV\tvalue-four' - validate.TSV.TSV(channelsFileEEG, tsv, [], function (issues) { - assert(issues.length === 0) - }) - }) - - var channelsFileIEEG = { - name: 'sub-01_ses-ieeg_task-facerecognition_run-01_channels.tsv', - relativePath: - '/sub-01/ses-ieeg/ieeg/sub-01_ses-meg_task-facerecognition_run-01_channels.tsv', - } - - it('should not allow iEEG channels.tsv files without low_cutoff column', function () { - var tsv = - 'name\ttype\tunits\textra-column\thigh_cutoff\n' + - 'value-name\tECOG\tmV\tvalue-fake\tvalue-highcut' - validate.TSV.TSV(channelsFileIEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 72) - }) - }) - - it('should not allow iEEG channels.tsv files with low_cutoff column in wrong place', function () { - var tsv = - 'name\ttype\tunits\thigh_cutoff\tlow_cutoff\n' + - 'value-name\tECOG\tmV\tvalue-highcut\tvalue-lowcut' - validate.TSV.TSV(channelsFileIEEG, tsv, [], function (issues) { - assert( - issues.length === 2 && issues[0].code === 229 && issues[1].code === 229, - ) - }) - }) - - it('should not allow iEEG channels.tsv files without high_cutoff column', function () { - var tsv = - 'name\ttype\tunits\tlow_cutoff\textra-column\n' + - 'value-name\tECOG\tmV\tvalue-lowcut\tvalue-fake' - validate.TSV.TSV(channelsFileIEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 72) - }) - }) - - it('should not allow iEEG channels.tsv files with value other than good/bad in status column', function () { - var tsv = - 'name\ttype\tunits\tlow_cutoff\thigh_cutoff\tstatus\n' + - 'value-name\tECOG\tmV\tvalue-lowcut\tvalue-highcut\tnot-good' - validate.TSV.TSV(channelsFileIEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 125) - }) - }) - - it('correct columns should pass for iEEG channels.tsv file', function () { - var tsv = - 'name\ttype\tunits\tlow_cutoff\thigh_cutoff\tstatus\n' + - 'value-name\tECOG\tmV\tvalue-lowcut\tvalue-highcut\tgood' - validate.TSV.TSV(channelsFileIEEG, tsv, [], function (issues) { - assert(issues.length === 0) - }) - }) - - it('should not allow iEEG channels.tsv files with value other than accepted values in type column', function () { - var tsv = - 'name\ttype\tunits\tlow_cutoff\thigh_cutoff\tstatus\n' + - 'value-name\tMEEG\tmV\tvalue-lowcut\tvalue-highcut\tgood' - validate.TSV.TSV(channelsFileIEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 131) - expect(typeof issues[0].evidence).toBe('string') - }) - }) - - it('should return a string value for evidence for issue 130', function () { - const tsv = - 'name\ttype\tunits\tlow_cutoff\thigh_cutoff\tstatus\n' + - 'value-name\teeg\tmV\tvalue-lowcut\tvalue-highcut\tgood' - validate.TSV.TSV(channelsFileEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 130) - expect(typeof issues[0].evidence).toBe('string') - }) - }) - - it('should return a string value for evidence for issue 131', function () { - const tsv = - 'name\ttype\tunits\tlow_cutoff\thigh_cutoff\tstatus\n' + - 'value-name\tMEEG\tmV\tvalue-lowcut\tvalue-highcut\tgood' - validate.TSV.TSV(channelsFileIEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 131) - expect(typeof issues[0].evidence).toBe('string') - }) - }) - - it('should not allow EEG channels.tsv files with value other than accepted values in type column', function () { - var tsv = - 'name\ttype\tunits\tlow_cutoff\thigh_cutoff\tstatus\n' + - 'value-name\tMEEG\tmV\tvalue-lowcut\tvalue-highcut\tgood' - validate.TSV.TSV(channelsFileEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 131) - }) - }) - - it('should not allow MEG channels.tsv files with value other than accepted values in type column', function () { - var tsv = - 'name\ttype\tunits\tlow_cutoff\thigh_cutoff\tstatus\n' + - 'value-name\tMEEG\tmV\tvalue-lowcut\tvalue-highcut\tgood' - validate.TSV.TSV(channelsFileEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 131) - }) - }) - - it('should not allow channels.tsv files with lower-casing in type column', function () { - var tsv = - 'name\ttype\tunits\tlow_cutoff\thigh_cutoff\tstatus\n' + - 'value-name\teeg\tmV\tvalue-lowcut\tvalue-highcut\tgood' - validate.TSV.TSV(channelsFileEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 130) - }) - }) - - it('should allow iEEG channels.tsv files with accepted values in type column', function () { - var tsv = - 'name\ttype\tunits\tlow_cutoff\thigh_cutoff\tstatus\n' + - 'value-name\tECOG\tmV\tvalue-lowcut\tvalue-highcut\tgood' - validate.TSV.TSV(channelsFileEEG, tsv, [], function (issues) { - assert(issues.length === 0) - }) - }) - var channelsFileNIRS = { - name: 'sub-01_ses-001_task-rest_run-01_channels.tsv', - relativePath: - '/sub-01/ses-001/nirs/sub-01_ses-001_task-rest_run-01_channels.tsv', - } - - it('NIRS channels.tsv with correct columns should throw no error', function () { - var tsv = - 'name\ttype\tsource\tdetector\twavelength_nominal\tunits\n' + - 'testch\tNIRSCWAMPLITUDE\tS1\tD1\t760.0\tV' - validate.TSV.TSV(channelsFileNIRS, tsv, [], function (issues) { - assert(issues.length === 0) - }) - }) - - it('should not allow NIRS channels.tsv files without name column', function () { - var tsv = - 'type\tsource\tdetector\twavelength_nominal\tunits\n' + - 'NIRSCWAMPLITUDE\tS1\tD1\t760.0\tV' - validate.TSV.TSV(channelsFileNIRS, tsv, [], function (issues) { - assert(issues[0].code === 234) - }) - }) - - var channelsFileMOTION = { - name: 'sub-01_ses-walk_task-navigation_tracksys-IMU1_run-01_channels.tsv', - relativePath: - '/sub-01/ses-walk/motion/sub-01_ses-walk_task-navigation_tracksys-IMU1_run-01_channels.tsv', - } - - it('MOTION channels.tsv with correct columns should throw no error', function () { - var tsv = - 'name\tcomponent\ttype\ttracked_point\tunits\n' + - 't1_acc_x\tx\tACCEL\tLeftFoot\tm/s^2' - validate.TSV.TSV(channelsFileMOTION, tsv, [], function (issues) { - assert(issues.length === 0) - }) - }) - - it('should not allow MOTION channels.tsv files without component column', function () { - var tsv = - 'name\ttype\ttracked_point\tunits\n' + 't1_acc_x\tACCEL\tLeftFoot\tm/s^2' - validate.TSV.TSV(channelsFileMOTION, tsv, [], function (issues) { - assert(issues[0].code === 129) - }) - }) - - // optodes checks --------------------------------------------------------- - var optodesFileNIRS = { - name: 'sub-01_ses-001_task-rest_run-01_optodes.tsv', - relativePath: - '/sub-01/ses-001/nirs/sub-01_ses-001_task-rest_run-01_optodes.tsv', - } - - it('should allow NIRS optodes.tsv files with correct columns', function () { - var tsv = 'name\ttype\tx\ty\tz\n' + 'S1\tsource\t-0.04\t0.02\t0.5\n' - validate.TSV.TSV(optodesFileNIRS, tsv, [], function (issues) { - assert(issues.length === 0) - }) - }) - - it('should not allow NIRS optodes.tsv files with out name columns', function () { - var tsv = 'type\tx\ty\tz\n' + 'source\t-0.04\t0.02\t0.5\n' - validate.TSV.TSV(optodesFileNIRS, tsv, [], function (issues) { - assert(issues[0].code === 233) - }) - }) - - // electrodes checks --------------------------------------------------------- - var electrodesFileEEG = { - name: 'sub-01_ses-001_task-rest_run-01_electrodes.tsv', - relativePath: - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_electrodes.tsv', - } - - it('should not allow EEG electrodes.tsv files without name column', function () { - var tsv = - 'wrongcolumn\tx\ty\tz\ttype\tmaterial\timpedance\n' + - 'valName\tvalX\tvalY\tvalZ\tvalType\tvalMaterial\tvalImpedance\n' - validate.TSV.TSV(electrodesFileEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 96) - }) - }) - - it('should not allow EEG electrodes.tsv files without x column', function () { - var tsv = - 'name\twrongcolumn\ty\tz\ttype\tmaterial\timpedance\n' + - 'valName\tvalX\tvalY\tvalZ\tvalType\tvalMaterial\tvalImpedance\n' - validate.TSV.TSV(electrodesFileEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 96) - }) - }) - - it('should not allow EEG electrodes.tsv files without y column', function () { - var tsv = - 'name\tx\twrongcolumn\tz\ttype\tmaterial\timpedance\n' + - 'valName\tvalX\tvalY\tvalZ\tvalType\tvalMaterial\tvalImpedance\n' - validate.TSV.TSV(electrodesFileEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 96) - }) - }) - - it('should not allow EEG electrodes.tsv files without z column', function () { - var tsv = - 'name\tx\ty\twrongcolumn\ttype\tmaterial\timpedance\n' + - 'valName\tvalX\tvalY\tvalZ\tvalType\tvalMaterial\tvalImpedance\n' - validate.TSV.TSV(electrodesFileEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 96) - }) - }) - - it('correct columns should pass for EEG electrodes file', function () { - var tsv = - 'name\tx\ty\tz\ttype\tmaterial\timpedance\n' + - 'valName\tvalX\tvalY\tvalZ\tvalType\tvalMaterial\tvalImpedance\n' - validate.TSV.TSV(electrodesFileEEG, tsv, [], function (issues) { - assert(issues.length === 0) - }) - }) - - var electrodesFileIEEG = { - name: 'sub-01_ses-ieeg_task-facerecognition_run-01_electrodes.tsv', - relativePath: - '/sub-01/ses-ieeg/ieeg/sub-01_ses-ieeg_task-facerecognition_run-01_electrodes.tsv', - } - - it('should not allow iEEG electrodes.tsv files without name column', function () { - var tsv = - 'blah\tx\ty\tz\tsize\ttype\n' + - 'value-one\tvalue-two\tvalue-three\tvalue-four\tvalue-five\tvalue-six\n' - validate.TSV.TSV(electrodesFileIEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 73) - }) - }) - - it('should not allow iEEG electrodes.tsv files without x column', function () { - var tsv = - 'name\tblah\ty\tz\tsize\ttype\n' + - 'value-one\tvalue-two\tvalue-three\tvalue-four\tvalue-five\tvalue-six\n' - validate.TSV.TSV(electrodesFileIEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 73) - }) - }) - - it('should not allow iEEG electrodes.tsv files without y column', function () { - var tsv = - 'name\tx\tblah\tz\tsize\ttype\n' + - 'value-one\tvalue-two\tvalue-three\tvalue-four\tvalue-five\tvalue-six\n' - validate.TSV.TSV(electrodesFileIEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 73) - }) - }) - - it('should not allow iEEG electrodes.tsv files without z column', function () { - var tsv = - 'name\tx\ty\tblah\tsize\ttype\n' + - 'value-one\tvalue-two\tvalue-three\tvalue-four\tvalue-five\tvalue-six\n' - validate.TSV.TSV(electrodesFileIEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 73) - }) - }) - - it('should not allow iEEG electrodes.tsv files without size column', function () { - var tsv = - 'name\tx\ty\tz\tblah\ttype\n' + - 'value-one\tvalue-two\tvalue-three\tvalue-four\tvalue-five\tvalue-six\n' - validate.TSV.TSV(electrodesFileIEEG, tsv, [], function (issues) { - assert(issues.length === 1 && issues[0].code === 73) - }) - }) - - it('correct columns should pass for iEEG electrodes file', function () { - var tsv = - 'name\tx\ty\tz\tsize\ttype\n' + - 'value-one\tvalue-two\tvalue-three\tvalue-four\tvalue-five\tvalue-six\n' - validate.TSV.TSV(electrodesFileIEEG, tsv, [], function (issues) { - assert(issues.length === 0) - }) - }) - - var physio_file = { - name: 'sub-20_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - relativePath: - '/sub-20/ses-1/func/sub-20_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - } - - it('should not allow physio.tsv.gz file without some associated json', function () { - let issues = validate.TSV.validateContRec([physio_file], {}) - assert(issues.length === 1 && issues[0].code === 170) - }) - - // samples checks ----------------------------------------------------------- - - const samplesFile = { - name: 'samples.tsv', - relativePath: '/samples.tsv', - } - - it('should return errors for each missing mandatory header in samples.tsv', () => { - const tsv = 'wrong_col\nsome_data\n' - validate.TSV.TSV(samplesFile, tsv, [], function (issues) { - expect(issues.length).toBe(3) - const codes = issues.map((x) => x.code) - expect(codes.includes(216)).toBe(true) - expect(codes.includes(217)).toBe(true) - expect(codes.includes(218)).toBe(true) - }) - }) - - it('should return an error for invalid sample_type samples.tsv', () => { - const tsv = 'sample_type\nbad\n' - validate.TSV.TSV(samplesFile, tsv, [], function (issues) { - const codes = issues.map((x) => x.code) - expect(codes.includes(219)).toBe(true) - }) - }) -}) diff --git a/bids-validator/tests/type.spec.js b/bids-validator/tests/type.spec.js deleted file mode 100644 index 3770df0d..00000000 --- a/bids-validator/tests/type.spec.js +++ /dev/null @@ -1,639 +0,0 @@ -import { assert } from 'chai' -import utils from '../utils' -import BIDS from '../validators/bids' - -describe('utils.type.file.isAnat', function () { - const goodFilenames = [ - '/sub-15/anat/sub-15_inplaneT2.nii.gz', - '/sub-15/ses-12/anat/sub-15_ses-12_inplaneT2.nii.gz', - '/sub-16/anat/sub-16_T1w.nii.gz', - '/sub-16/anat/sub-16_T1w.json', - '/sub-16/anat/sub-16_run-01_T1w.nii.gz', - '/sub-16/anat/sub-16_acq-highres_T1w.nii.gz', - '/sub-16/anat/sub-16_rec-mc_T1w.nii.gz', - '/sub-16/anat/sub-16_ce-contrastagent_T1w.nii.gz', - '/sub-16/anat/sub-16_part-mag_T1w.nii.gz', - '/sub-16/anat/sub-16_T1map.nii.gz', - '/sub-16/anat/sub-16_mod-T1w_defacemask.nii.gz', - '/sub-16/anat/sub-16_echo-1_MESE.nii.gz', - '/sub-16/anat/sub-16_flip-1_VFA.nii.gz', - '/sub-16/anat/sub-16_inv-1_IRT1.nii.gz', - '/sub-16/anat/sub-16_flip-1_inv-1_MP2RAGE.nii.gz', - '/sub-16/anat/sub-16_flip-1_mt-on_MPM.nii.gz', - '/sub-16/anat/sub-16_mt-on_part-real_MTR.nii.gz', - ] - - goodFilenames.forEach(function (path) { - it("isAnat('" + path + "') === true", function (isdone) { - assert.equal(utils.type.file.isAnat(path), true) - isdone() - }) - }) - - const badFilenames = [ - '/sub-1/anat/sub-15_inplaneT2.nii.gz', - '/sub-15/ses-12/anat/sub-15_inplaneT2.nii.gz', - '/sub-16/anat/sub-16_T1.nii.gz', - 'blaaa.nii.gz', - '/sub-16/anat/sub-16_run-second_T1w.nii.gz', - '/sub-16/anat/sub-16_run-01_rec-mc_T1w.nii.gz', - '/sub-16/anat/sub-16_part-magnitude_T1w.nii.gz', - '/sub-16/anat/sub-16_part-mag_T1map.nii.gz', - '/sub-16/anat/sub-16_mod-T1weighted_defacemask.nii.gz', - '/sub-16/anat/sub-16_MESE.nii.gz', - '/sub-16/anat/sub-16_VFA.nii.gz', - '/sub-16/anat/sub-16_IRT1.nii.gz', - '/sub-16/anat/sub-16_flip-1_MP2RAGE.nii.gz', - '/sub-16/anat/sub-16_flip-1_mt-fail_MPM.nii.gz', - '/sub-16/anat/sub-16_flip-1_mt-fail_part-real_MTR.nii.gz', - ] - - badFilenames.forEach(function (path) { - it("isAnat('" + path + "') === false", function (isdone) { - assert.equal(utils.type.file.isAnat(path), false) - isdone() - }) - }) -}) - -describe('utils.type.file.isFunc', function () { - var goodFilenames = [ - '/sub-15/func/sub-15_task-0back_bold.nii.gz', - '/sub-15/ses-12/func/sub-15_ses-12_task-0back_bold.nii.gz', - '/sub-16/func/sub-16_task-0back_bold.json', - '/sub-16/func/sub-16_task-0back_run-01_bold.nii.gz', - '/sub-16/func/sub-16_task-0back_acq-highres_bold.nii.gz', - '/sub-16/func/sub-16_task-0back_rec-mc_bold.nii.gz', - '/sub-16/func/sub-16_task-0back_run-01_phase.nii.gz', - '/sub-16/func/sub-16_task-0back_echo-1_phase.nii.gz', - '/sub-15/func/sub-15_task-0back_part-phase_bold.nii.gz', - ] - - goodFilenames.forEach(function (path) { - it("isFunc('" + path + "') === true", function (isdone) { - assert.equal(utils.type.file.isFunc(path), true) - isdone() - }) - }) - - var badFilenames = [ - '/sub-1/func/sub-15_inplaneT2.nii.gz', - '/sub-15/ses-12/func/sub-15_inplaneT2.nii.gz', - '/sub-16/func/sub-16_T1.nii.gz', - 'blaaa.nii.gz', - '/sub-16/func/sub-16_run-second_T1w.nii.gz', - '/sub-16/func/sub-16_task-0-back_rec-mc_bold.nii.gz', - '/sub-16/func/sub-16_run-01_rec-mc_T1w.nii.gz', - '/sub-16/func/sub-16_task-0back_part-magnitude_bold.nii.gz', - ] - - badFilenames.forEach(function (path) { - it("isFunc('" + path + "') === false", function (isdone) { - assert.equal(utils.type.file.isFunc(path), false) - isdone() - }) - }) -}) - -describe('utils.type.file.isTopLevel', function () { - const goodFilenames = [ - '/README', - '/CHANGES', - '/LICENSE', - '/dataset_description.json', - '/ses-pre_task-rest_bold.json', - '/dwi.bval', - '/dwi.bvec', - '/T1w.json', - '/acq-test_dwi.json', - '/rec-test_physio.json', - '/task-testing_eeg.json', - '/task-testing_ieeg.json', - '/task-testing_meg.json', - '/events.json', - '/scans.json', - ] - - goodFilenames.forEach(function (path) { - it("isTopLevel('" + path + "') === true", function (isdone) { - assert.equal(utils.type.file.isTopLevel(path), true) - isdone() - }) - }) - - const badFilenames = [ - '/readme.txt', - '/changelog', - '/license.txt', - '/dataset_description.yml', - '/ses.json', - '/_T1w.json', - '/_dwi.json', - '/_task-test_physio.json', - // cross-talk and fine-calibration files for Neuromag/Elekta/MEGIN data (.fif) - // must be defined at file level. - '/acq-calibration_meg.dat', - '/acq-crosstalk_meg.fif', - ] - - badFilenames.forEach(function (path) { - it("isTopLevel('" + path + "') === false", function (isdone) { - assert.equal(utils.type.file.isTopLevel(path), false) - isdone() - }) - }) -}) - -describe('utils.type.file.isSubjectLevel', () => { - const goodFilenames = [] // to be extended in the future... - - goodFilenames.forEach((path) => { - it("isSubjectLevel('" + path + "') === true", function (isdone) { - assert.equal(utils.type.file.isSubjectLevel(path), true) - isdone() - }) - }) - - const badFilenames = [ - // cross-talk and fine-calibration files for Neuromag/Elekta/MEGIN data (.fif) - // must be placed on file level. - '/sub-12/sub-12_acq-calibration_meg.dat', - '/sub-12/sub-12_acq-crosstalk_meg.fif', - '/sub-12/acq-calibration_meg.dat', - '/sub-12/acq-crosstalk_meg.fif', - '/sub-12/acq-calibration.dat', - '/sub-12/acq-crosstalk.fif', - ] - - badFilenames.forEach((path) => { - it("isSubjectLevel('" + path + "') === false", function (isdone) { - assert.equal(utils.type.file.isSubjectLevel(path), false) - isdone() - }) - }) -}) - -describe('utils.type.file.isSessionLevel', function () { - const goodFilenames = [ - '/sub-12/sub-12_scans.tsv', - '/sub-12/sub-12_scans.json', - '/sub-12/ses-pre/sub-12_ses-pre_scans.tsv', - '/sub-12/ses-pre/sub-12_ses-pre_scans.json', - ] - - goodFilenames.forEach(function (path) { - it("isSessionLevel('" + path + "') === true", function (isdone) { - assert.equal(utils.type.file.isSessionLevel(path), true) - isdone() - }) - }) - - const badFilenames = [ - '/sub-12/sub-12.tsv', - '/sub-12/ses-pre/sub-12_ses-pre_scan.tsv', - // cross-talk and fine-calibration files for Neuromag/Elekta/MEGIN data (.fif) - // must be placed at file level. - '/sub-12/sub-12_acq-calibration_meg.dat', - '/sub-12/sub-12_acq-crosstalk_meg.fif', - '/sub-12/ses-pre/sub-12_ses-pre_acq-calibration_meg.dat', - '/sub-12/ses-pre/sub-12_ses-pre_acq-crosstalk_meg.fif', - ] - - badFilenames.forEach(function (path) { - it("isSessionLevel('" + path + "') === false", function (isdone) { - assert.equal(utils.type.file.isSessionLevel(path), false) - isdone() - }) - }) -}) - -describe('utils.type.file.isDWI', function () { - const goodFilenames = [ - '/sub-12/dwi/sub-12_dwi.nii.gz', - '/sub-12/dwi/sub-12_dwi.json', - '/sub-12/ses-pre/dwi/sub-12_ses-pre_dwi.nii.gz', - '/sub-12/ses-pre/dwi/sub-12_ses-pre_dwi.bvec', - '/sub-12/ses-pre/dwi/sub-12_ses-pre_dwi.bval', - '/sub-12/ses-pre/dwi/sub-12_ses-pre_dwi.json', - '/sub-12/dwi/sub-12_sbref.nii.gz', - '/sub-12/dwi/sub-12_sbref.json', - '/sub-12/ses-pre/dwi/sub-12_ses-pre_sbref.nii.gz', - '/sub-12/ses-pre/dwi/sub-12_ses-pre_sbref.json', - '/sub-12/dwi/sub-12_part-mag_sbref.json', - ] - - goodFilenames.forEach(function (path) { - it("isDWI('" + path + "') === true", function (isdone) { - assert.equal(utils.type.file.isDWI(path), true) - isdone() - }) - }) - - const badFilenames = [ - '/sub-12/sub-12.tsv', - '/sub-12/ses-pre/sub-12_ses-pre_scan.tsv', - '/sub-12/ses-pre/dwi/sub-12_ses-pre_dwi.bvecs', - '/sub-12/ses-pre/dwi/sub-12_ses-pre_dwi.bvals', - '/sub-12/dwi/sub-12_sbref.bval', - '/sub-12/dwi/sub-12_sbref.bvec', - '/sub-12/ses-pre/dwi/sub-12_ses-pre_sbref.bval', - '/sub-12/ses-pre/dwi/sub-12_ses-pre_sbref.bvec', - '/sub-12/dwi/sub-12_part-magnitude_sbref.json', - ] - - badFilenames.forEach(function (path) { - it("isDWI('" + path + "') === false", function (isdone) { - assert.equal(utils.type.file.isDWI(path), false) - isdone() - }) - }) -}) - -describe('utils.type.file.isMEG', function () { - const goodFilenames = [ - // Metadata MEG files - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg.json', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_channels.tsv', - // Father directory files are fine for some file formats: - // Father dir: CTF data with a .ds ... the contents within .ds are not checked - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg.ds/catch-alp-good-f.meg4', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg.ds/xyz', - // Father dir: BTi/4D ... again: within contents not checked - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/config', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/hs_file', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/e,rfhp1.0Hz.COH', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/c,rfDC', - // NO father dir: KRISS data - '/sub-control01/ses-001/meg/sub-control01_ses-001_task-rest_run-01_meg.chn', - '/sub-control01/ses-001/meg/sub-control01_ses-001_task-rest_run-01_meg.kdf', - '/sub-control01/ses-001/meg/sub-control01_ses-001_task-rest_run-01_meg.trg', - '/sub-control01/ses-001/meg/sub-control01_ses-001_task-rest_digitizer.txt', - // NO father dir: KIT data - '/sub-01/ses-001/meg/sub-01_ses-001_markers.sqd', - '/sub-01/ses-001/meg/sub-01_ses-001_markers.mrk', - '/sub-01/ses-001/meg/sub-01_ses-001_meg.sqd', - '/sub-01/ses-001/meg/sub-01_ses-001_meg.con', - // NO father dir: ITAB data - '/sub-control01/ses-001/meg/sub-control01_ses-001_task-rest_run-01_meg.raw', - '/sub-control01/ses-001/meg/sub-control01_ses-001_task-rest_run-01_meg.raw.mhd', - // NO father dir: fif data - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_split-01_meg.fif', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_acq-TEST_run-01_split-01_meg.fif', - // cross-talk and fine-calibration files for Neuromag/Elekta/MEGIN data (.fif) - '/sub-01/meg/sub-01_acq-calibration_meg.dat', - '/sub-01/meg/sub-01_acq-crosstalk_meg.fif', - '/sub-01/ses-001/meg/sub-01_ses-001_acq-calibration_meg.dat', - '/sub-01/ses-001/meg/sub-01_ses-001_acq-crosstalk_meg.fif', - ] - - goodFilenames.forEach(function (path) { - it("isMeg('" + path + "') === true", function (isdone) { - assert.equal(utils.type.file.isMeg(path), true) - isdone() - }) - }) - - const badFilenames = [ - // missing session directory - '/sub-01/meg/sub-01_ses-001_task-rest_run-01_meg.json', - // subject not matching - '/sub-01/ses-001/meg/sub-12_ses-001_task-rest_run-01_split-01_meg.fif', - // invalid file endings - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg.tsv', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg.bogus', - // wrong order of entities: https://github.com/bids-standard/bids-validator/issues/767 - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_acq-TEST_split-01_meg.fif', - // only parent directory name matters for BTi and CTF systems - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meggg/config', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg.dd/xyz', - // KIT with a father dir ... should not have a father dir - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/sub-01_ses-001_markers.sqd', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/sub-01_ses-001_markers.con', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/sub-01_ses-001_task-rest_run-01_meg.sqd', - // FIF with a father dir ... should not have a father dir - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/sub-01_ses-001_task-rest_meg.fif', - // ITAB with a father dir ... should not have a father dir - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/sub-01_ses-001_task-rest_run-01_meg.raw', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/sub-01_ses-001_task-rest_run-01_meg.raw.mhd', - // KRISS with a father dir ... should not have a father dir - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/sub-01_ses-001_task-rest_run-01_meg.kdf', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/sub-01_ses-001_task-rest_run-01_meg.trg', - '/sub-01/ses-001/meg/sub-01_ses-001_task-rest_run-01_meg/sub-01_ses-001_task-rest_run-01_meg.chn', - // cross-talk and fine-calibration files for Neuromag/Elekta/MEGIN data (.fif) - // .dat in MEG only allowed for "acq-calibration" - '/acq-notcalibration_meg.dat', - '/sub-01/ses-001/meg/sub-01_ses-001_acq-notcalibration_meg.dat', - '/sub-01/ses-001/meg/sub-01_ses-001_acq-crosstalk_meg.dat', - ] - - badFilenames.forEach(function (path) { - it("isMeg('" + path + "') === false", function (isdone) { - assert.equal(utils.type.file.isMeg(path), false) - isdone() - }) - }) -}) - -describe('utils.type.file.isEEG', function () { - const goodFilenames = [ - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_eeg.json', - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_events.tsv', - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_split-01_eeg.edf', - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_eeg.eeg', - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_eeg.vmrk', - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_eeg.vhdr', - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_eeg.bdf', - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_eeg.set', - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_eeg.fdt', - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_channels.tsv', - '/sub-01/ses-001/eeg/sub-01_ses-001_electrodes.tsv', - '/sub-01/ses-001/eeg/sub-01_ses-001_space-CapTrak_electrodes.tsv', - '/sub-01/ses-001/eeg/sub-01_ses-001_coordsystem.json', - '/sub-01/ses-001/eeg/sub-01_ses-001_space-CapTrak_coordsystem.json', - '/sub-01/ses-001/eeg/sub-01_ses-001_photo.jpg', - ] - - goodFilenames.forEach(function (path) { - it("isEEG('" + path + "') === true", function (isdone) { - assert.equal(utils.type.file.isEEG(path), true) - isdone() - }) - }) - - const badFilenames = [ - '/sub-01/eeg/sub-01_ses-001_task-rest_run-01_eeg.json', - '/sub-01/ses-001/eeg/sub-12_ses-001_task-rest_run-01_split-01_eeg.edf', - '/sub-01/ses-001/eeg/sub-01_ses-001_task-rest_run-01_eeg.tsv', - '/sub-01/ses-001/eeg/sub-01_ses-001_space-BOGUS_electrodes.tsv', - '/sub-01/ses-001/eeg/sub-01_ses-001_space-BOGUS_coordsystem.json', - ] - - badFilenames.forEach(function (path) { - it("isEEG('" + path + "') === false", function (isdone) { - assert.equal(utils.type.file.isEEG(path), false) - isdone() - }) - }) -}) - -describe('utils.type.file.isIEEG', function () { - const goodFilenames = [ - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_ieeg.json', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_split-01_ieeg.edf', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_split-01_ieeg.vhdr', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_split-01_ieeg.vmrk', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_split-01_ieeg.eeg', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_split-01_ieeg.set', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_split-01_ieeg.fdt', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_split-01_ieeg.nwb', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_split-01_ieeg.mefd/sub-01_ses-001_task-rest_run-01_ieeg.rdat', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_split-01_ieeg.mefd/sub-01_ses-001_task-rest_run-01_ieeg.ridx', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_split-01_ieeg.mefd/CH1.timd/CH1-000000.segd/sub-01_ses-001_task-rest_run-01_ieeg.tdat', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_split-01_ieeg.mefd/CH1.timd/CH1-000000.segd/sub-01_ses-001_task-rest_run-01_ieeg.idx', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_split-01_ieeg.mefd/CH1.timd/CH1-000000.segd/sub-01_ses-001_task-rest_run-01_ieeg.tmet', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_channels.tsv', - '/sub-01/ses-001/ieeg/sub-01_ses-001_space-fsaverage_electrodes.tsv', - '/sub-01/ses-001/ieeg/sub-01_ses-001_space-fsaverage_coordsystem.json', - ] - - goodFilenames.forEach(function (path) { - it("isIEEG('" + path + "') === true", function (isdone) { - assert.equal(utils.type.file.isIEEG(path), true) - isdone() - }) - }) - - const badFilenames = [ - '/sub-01/ieeg/sub-01_ses-001_task-rest_run-01_ieeg.json', - '/sub-01/ses-001/ieeg/sub-12_ses-001_task-rest_run-01_split-01_ieeg.fif', - '/sub-01/ses-001/ieeg/sub-01_ses-001_task-rest_run-01_ieeg.tsv', - '/sub-01/ses-001/ieeg/sub-01_ses-001_space-fsavg_electrodes.tsv', - '/sub-01/ses-001/ieeg/sub-01_ses-001_space-fsavg_coordsystem.json', - ] - - badFilenames.forEach(function (path) { - it("isIEEG('" + path + "') === false", function (isdone) { - assert.equal(utils.type.file.isIEEG(path), false) - isdone() - }) - }) -}) - -describe('utils.type.file.isPhenotypic', function () { - it('should allow .tsv and .json files in the /phenotype directory', function () { - assert(utils.type.file.isPhenotypic('/phenotype/acds_adult.json')) - assert(utils.type.file.isPhenotypic('/phenotype/acds_adult.tsv')) - }) - - it('should not allow non .tsv and .json files in the /phenotype directory', function () { - assert(!utils.type.file.isPhenotypic('/phenotype/acds_adult.jpeg')) - assert(!utils.type.file.isPhenotypic('/phenotype/acds_adult.gif')) - }) -}) - -describe('utils.type.file.isAssociatedData', function () { - it('should return false for unknown root directories', function () { - var badFilenames = ['/images/picture.jpeg', '/temporary/test.json'] - - badFilenames.forEach(function (path) { - assert.equal(utils.type.file.isAssociatedData(path), false) - }) - }) - - it('should return true for associated data directories and any files within', function () { - var goodFilenames = [ - '/code/test-script.py', - '/derivatives/sub-01_QA.pdf', - '/sourcedata/sub-01_ses-01_bold.dcm', - '/stimuli/text.pdf', - ] - - goodFilenames.forEach(function (path) { - assert(utils.type.file.isAssociatedData(path)) - }) - }) -}) - -describe('utils.type.file.isStimuliData', function () { - it('should return false for unknown root directories', function () { - var badFilenames = ['/images/picture.jpeg', '/temporary/test.json'] - - badFilenames.forEach(function (path) { - assert.equal(utils.type.file.isStimuliData(path), false) - }) - }) - - it('should return true for stimuli data directories and any files within', function () { - var goodFilenames = ['/stimuli/sub-01/mov.avi', '/stimuli/text.pdf'] - - goodFilenames.forEach(function (path) { - assert(utils.type.file.isStimuliData(path)) - }) - }) -}) - -describe('utils.type.getPathValues', function () { - it('should return the correct path values from a valid file path', function () { - assert.equal( - utils.type.getPathValues( - '/sub-22/ses-1/func/sub-22_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - ).sub, - 22, - ) - assert.equal( - utils.type.getPathValues( - '/sub-22/ses-1/func/sub-22_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - ).ses, - 1, - ) - assert.equal( - utils.type.getPathValues( - '/sub-22/func/sub-22_task-rest_acq-prefrontal_physio.tsv.gz', - ).sub, - 22, - ) - assert.equal( - utils.type.getPathValues( - '/sub-22/func/sub-22_task-rest_acq-prefrontal_physio.tsv.gz', - ).ses, - null, - ) - }) -}) - -describe('utils.type.file.isPET', function () { - const goodFilenames = [ - '/sub-1/ses-1/pet/sub-1_ses-1_task-1_trc-1_rec-1_run-1_pet.json', - '/sub-1/ses-1/pet/sub-1_ses-1_task-1_trc-1_rec-1_run-1_pet.nii', - '/sub-1/ses-1/pet/sub-1_ses-1_task-1_trc-1_rec-1_run-1_pet.nii.gz', - '/sub-03/ses-01/pet/sub-02_ses-40_task-30_pet.json', - '/sub-03/ses-01/pet/sub-02_ses-40_pet.nii', - '/sub-03/ses-01/pet/sub-02_ses-40_pet.nii.gz', - '/sub-03/pet/sub-02_pet.nii', - '/sub-03/pet/sub-02_pet.nii.gz', - ] - - goodFilenames.forEach(function (path) { - it("isPET('" + path + "') === true", function (isdone) { - assert.equal(utils.type.file.isPET(path), true) - isdone() - }) - }) - - const badFilenames = [ - '/sub-1/ses-1/pet/sub-1_ses-1_task-1_trc-1_rec-1_run-1_pet+json', - '/sub-1/ses-1/pet/sub-1_ses-1_task-1_trc-1_rec-1_run-1_pet.json.gz', - '/sub-1/ses-1/pet/sub-1ses-1_task-1_trc-1_rec-1_run-1_pet.nii', - 'sub-1/ses-1/pet/sub-1ses-1_task-1_trc-1_rec-1_run-1_pet.nii', - '/sub-1/ses-1/pet/sub-1/ses-1_task-1_trc-1_rec-1_run-q_pet.csv', - '/sub-1/ses-1/pet/sub-1/ses-1_task-1_trc-1_rec-1_run-q_recording-1_pet.nii', - ] - - badFilenames.forEach(function (path) { - it("isPET('" + path + "') === false", function (isdone) { - assert.equal(utils.type.file.isPET(path), false) - isdone() - }) - }) -}) - -describe('utils.type.file.isPETBlood', function () { - const goodFilenames = [ - '/sub-1/ses-1/pet/sub-1_ses-1_task-1_trc-1_rec-1_run-1_recording-1_blood.json', - '/sub-1/ses-1/pet/sub-1_ses-1_task-1_trc-1_rec-1_run-1_recording-1_blood.tsv', - '/sub-03/ses-01/pet/sub-02_ses-40_task-30_recording-manual_blood.json', - '/sub-03/ses-01/pet/sub-02_ses-40_recording-manual_blood.tsv', - '/sub-03/pet/sub-02_recording-manual_blood.tsv', - ] - - goodFilenames.forEach(function (path) { - it("isPETBlood('" + path + "') === true", function (isdone) { - assert.equal(utils.type.file.isPETBlood(path), true) - isdone() - }) - }) - - const badFilenames = [ - '/sub-1/ses-1/pet/sub-1_ses-1_task-1_trc-1_rec-1_run-1_recording-1_blood+json', - '/sub-1/ses-1/pet/sub-1ses-1_task-1_trc-1_rec-1_run-1_recording-1_blood.tsv', - 'sub-1/ses-1/pet/sub-1ses-1_task-1_trc-1_rec-1_run-1_recording-1_blood.tsv', - '/sub-1/ses-1/pet/sub-1/ses-1_task-1_trc-1_rec-1_run-q_recording-1_blood.csv', - '/sub-1/ses-1/pet/sub-1/ses-1_task-1_trc-1_rec-1_run-q_recording-1_pet.tsv', - ] - - badFilenames.forEach(function (path) { - it("isPETBlood('" + path + "') === false", function (isdone) { - assert.equal(utils.type.file.isPETBlood(path), false) - isdone() - }) - }) -}) - -describe('utils.type.file.isMOTION', function () { - const goodFilenames = [ - '/sub-01/motion/sub-01_task-rest_tracksys-unity_run-01_motion.tsv', - '/sub-01/ses-walk/motion/sub-01_ses-walk_task-visual_tracksys-unity_motion.tsv', - '/sub-01/ses-walk/motion/sub-01_ses-walk_task-visual_tracksys-unity_motion.json', - '/sub-01/ses-walk/motion/sub-01_ses-walk_task-visual_tracksys-unity_channels.tsv', - '/sub-01/ses-desktop/motion/sub-01_ses-desktop_task-rest_tracksys-unity_run-01_events.tsv', - '/sub-01/ses-desktop/motion/sub-01_ses-desktop_task-rest_events.tsv', - ] - - goodFilenames.forEach(function (path) { - it("isMOTION('" + path + "') === true", function (isdone) { - assert.equal(utils.type.file.isMOTION(path), true) - isdone() - }) - }) - - const badFilenames = [ - '/sub-01/motion/sub-01_ses-001_tracksys-unity_task-rest_run-01_motion.json', - '/sub-01/ses-001/motion/sub-12_ses-001_task-rest_run-01_motion.tsv', - '/sub-01/ses-walk/motion/sub-01_ses-walk_task-visual_channels.tsv', - '/sub-01/ses-001/motion/sub-01_ses-001_run-01_motion.tsv', - '/sub-01/motion/sub-01_task-walk_run-01_motion.tsv', - ] - - badFilenames.forEach(function (path) { - it("isMOTION('" + path + "') === false", function (isdone) { - assert.equal(utils.type.file.isMOTION(path), false) - isdone() - }) - }) -}) - -describe('BIDS.subIDsesIDmismatchtest', function () { - it("should return if sub and ses doesn't match", function () { - const files = { - 0: { - name: 'sub-22_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - path: 'tests/data/BIDS-examples-1.0.0-rc3u5/ds001/sub-22_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - relativePath: - 'ds001/sub-22_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - }, - 1: { - name: '/sub-22/ses-1/func/sub-23_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - path: 'tests/data/BIDS-examples-1.0.0-rc3u5/ds001/sub-22/ses-1/func/sub-23_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - relativePath: - 'ds001/sub-22/ses-1/func/sub-23_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - }, - 2: { - name: '/sub-22/ses-1/func/sub-22_ses-2_task-rest_acq-prefrontal_physio.tsv.gz', - path: 'tests/data/BIDS-examples-1.0.0-rc3u5/ds001/sub-22/ses-1/func/sub-22_ses-2_task-rest_acq-prefrontal_physio.tsv.gz', - relativePath: - '/sub-22/ses-1/func/sub-22_ses-2_task-rest_acq-prefrontal_physio.tsv.gz', - }, - 3: { - name: '/sub-25/ses-2/func/sub-22_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - path: 'tests/data/BIDS-examples-1.0.0-rc3u5/ds001/sub-25/ses-2/func/sub-22_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - relativePath: - 'ds001//sub-25/ses-2/func/sub-22_ses-1_task-rest_acq-prefrontal_physio.tsv.gz', - }, - } - const issues = BIDS.subIDsesIDmismatchtest(files) - const code64_seen = issues.some((issue) => issue.code == '64') - const code65_seen = issues.some((issue) => issue.code == '65') - assert(code64_seen) - assert(code65_seen) - }) -}) diff --git a/bids-validator/tests/utils/config.spec.js b/bids-validator/tests/utils/config.spec.js deleted file mode 100644 index 9cdcff70..00000000 --- a/bids-validator/tests/utils/config.spec.js +++ /dev/null @@ -1,83 +0,0 @@ -import utils from '../../utils' -import assert from 'assert' - -describe('utils.config', function () { - var codes = [1, 3, 4, 7, 21, 33, 34] - var conf = { - ignore: [3], - warn: [ - 4, - { - and: [7, { or: [33, 21] }], - }, - ], - error: [34, 33], - ignoredFiles: ['**/**/**/.DS_Store'], - } - - describe('ignoredFile', function () { - it('should return true if the file is ignored', function () { - assert(utils.config.ignoredFile(conf, '/.DS_Store')) - assert(utils.config.ignoredFile(conf, 'ds001/.DS_Store')) - assert(utils.config.ignoredFile(conf, 'ds001/sub-01/.DS_Store')) - }) - - it('should return false if the file is not ignored', function () { - assert(!utils.config.ignoredFile(conf, '/participants.tsv')) - assert(!utils.config.ignoredFile(conf, 'ds001/README')) - assert( - !utils.config.ignoredFile(conf, 'ds001/sub-16/anat/sub-16_T1w.nii.gz'), - ) - }) - }) - - describe('interpret', function () { - it('should return the correct severity mappings', function () { - var severityMap = utils.config.interpret(codes, conf) - assert.deepEqual(severityMap, { - 3: 'ignore', - 4: 'warning', - 7: 'warning', - 21: 'warning', - 33: 'error', - 34: 'error', - }) - }) - }) - - describe('match', function () { - it('should return a list of triggered codes that match the config', function () { - assert.deepEqual([3], utils.config.match(codes, conf.ignore)) - assert.deepEqual([4, 7, 33, 21], utils.config.match(codes, conf.warn)) - assert.deepEqual([34, 33], utils.config.match(codes, conf.error)) - }) - }) - - describe('flatten', function () { - it('should return a flattened list of codes', function () { - assert.deepEqual([3], utils.config.flatten(conf.ignore)) - assert.deepEqual([4, 7, 33, 21], utils.config.flatten(conf.warn)) - assert.deepEqual([34, 33], utils.config.flatten(conf.error)) - }) - }) - - describe('andFullfilled', function () { - it("should return true if the 'and' array is fulfilled by the triggered codes", function () { - assert(utils.config.andFulfilled(codes, conf.warn[1].and)) - }) - - it("should return false if the 'and' array is not fulfilled", function () { - assert(!utils.config.andFulfilled(codes, [1, 4, 7, 21, 22])) - }) - }) - - describe('orFulfilled', function () { - it("should return true if the 'or' array is fulfilled by the triggered codes", function () { - assert(utils.config.orFulfilled(codes, conf.warn[1].and[1].or)) - }) - - it("should return false if the 'or' array is not fulfilled", function () { - assert(!utils.config.orFulfilled(codes, [5, 6])) - }) - }) -}) diff --git a/bids-validator/tests/utils/files-web.spec.js b/bids-validator/tests/utils/files-web.spec.js deleted file mode 100644 index f5779f5c..00000000 --- a/bids-validator/tests/utils/files-web.spec.js +++ /dev/null @@ -1,102 +0,0 @@ -/** - * eslint no-console: ["error", { allow: ["log"] }] - * @jest-environment jsdom - */ - -import assert from 'assert' - -import utils from '../../../bids-validator/utils' -import groupFileTypes from '../../../bids-validator/validators/bids/groupFileTypes' -import checkReadme from '../../../bids-validator/validators/bids/checkReadme.js' -import checkDatasetDescription from '../../../bids-validator/validators/bids/checkDatasetDescription.js' -import validateMisc from '../../../bids-validator/utils/files/validateMisc' -import { createFileList } from '../env/FileList' - -describe('files in browser', () => { - describe('files utils in nodejs', () => { - describe('FileAPI', () => { - it('should not return a mock implementation', () => { - let File = utils.files.FileAPI() - assert(File.name !== 'NodeFile') - }) - }) - }) - - describe('files utils in browsers', () => { - describe('newFile', () => { - it('creates a new File API object', () => { - const test_file = utils.files.newFile('test-file') - assert.equal(test_file.name, 'test-file') - assert(File.prototype.isPrototypeOf(test_file)) - }) - }) - }) - - describe('dataset_description.json', () => { - it('throws warning if it does not exist in proper location', () => { - const fileList = {} - const issues = checkDatasetDescription(fileList) - assert(issues[0].key === 'DATASET_DESCRIPTION_JSON_MISSING') - }) - }) - - describe('README', () => { - it('throws warning if it does not exist in proper location', () => { - const fileList = { - 1: { - name: 'README', - path: 'tests/data/bids-examples/ds001/not-root-dir/README', - relativePath: '/not-root-dir/README', - }, - } - const issues = checkReadme(fileList) - assert(issues[0].key === 'README_FILE_MISSING') - }) - - it('throws warning if it is too small', () => { - const fileList = { - 1: { - name: 'README', - path: 'tests/data/bids-examples/ds001/README', - relativePath: '/README', - size: 20, - }, - } - const issues = checkReadme(fileList) - assert(issues[0].key === 'README_FILE_SMALL') - }) - }) - - describe('validateMisc', () => { - let filelist = [], - dir - - beforeAll(() => { - // contains stripped down CTF format dataset: Both, BadChannels and - // bad.segments files can be empty and still valid. Everything else must - // not be empty. - dir = `${process.cwd()}/bids-validator/tests/data/empty_files` - }) - - // generate an array of browser Files - beforeEach(() => { - filelist = createFileList(dir) - }) - - it('returns issues for empty files (0kb), accepting a limited set of exceptions', (done) => { - const files = groupFileTypes(filelist, {}) - - validateMisc(files.misc).then((issues) => { - // *.meg4 and BadChannels files are empty. But only *.meg4 is an issue - assert.ok(issues.length == 1) - assert.ok(issues.every((issue) => issue instanceof utils.issues.Issue)) - assert.notStrictEqual( - issues.findIndex((issue) => issue.code === 99), - -1, - ) - assert.ok(issues[0].file.name == 'sub-0001_task-AEF_run-01_meg.meg4') - done() - }) - }) - }) -}) diff --git a/bids-validator/tests/utils/files.spec.js b/bids-validator/tests/utils/files.spec.js deleted file mode 100644 index 20031936..00000000 --- a/bids-validator/tests/utils/files.spec.js +++ /dev/null @@ -1,135 +0,0 @@ -import assert from 'assert' -import utils from '../../utils' -import groupFileTypes from '../../validators/bids/groupFileTypes' -import checkReadme from '../../validators/bids/checkReadme.js' -import checkDatasetDescription from '../../validators/bids/checkDatasetDescription.js' -import validateMisc from '../../utils/files/validateMisc' - -const setupMocks = () => { - // Mock version of the File API for tests - global.File = function MockFile(data, fileName, options) { - assert(data.hasOwnProperty('length')) - assert.equal(typeof data[0], 'string') - this._data = data - this._options = options - this.name = fileName - } -} -const cleanupMocks = () => { - delete global.File -} - -describe('files utils in nodejs', () => { - describe('FileAPI', () => { - it('should return a mock implementation', () => { - let File = utils.files.FileAPI() - assert(typeof File !== 'undefined') - assert(File.name === 'NodeFile') - }) - }) - describe('newFile', () => { - it('creates a new File API object', () => { - let file = utils.files.newFile('test-file') - assert.equal(file.name, 'test-file') - }) - }) -}) - -describe('files utils in browsers', () => { - beforeAll(setupMocks) - afterAll(cleanupMocks) - describe('newFile', () => { - it('creates a new File API object', () => { - const test_file = utils.files.newFile('test-file') - assert(File.prototype.isPrototypeOf(test_file)) - }) - }) -}) - -describe('dataset_description.json', () => { - it('throws warning if it does not exist in proper location', () => { - const jsonFileContents = {} - const issues = checkDatasetDescription(jsonFileContents) - assert(issues[0].key === 'DATASET_DESCRIPTION_JSON_MISSING') - }) - it('throws a warning if the Authors field of the dataset description has a single entry and less than two commas', () => { - const jsonFileContents = { - '/dataset_description.json': { - Authors: ['An, Author'], - }, - } - const issues = checkDatasetDescription(jsonFileContents) - assert(issues[0].key === 'TOO_FEW_AUTHORS') - }) - it('throws an error if the Authors field of the dataset description has a single field and multiple commas', () => { - const jsonFileContents = { - '/dataset_description.json': { - Authors: [ - 'Too many, Commas, Indicate, That the user, May not have, Separated authors, Into an array', - ], - }, - } - const issues = checkDatasetDescription(jsonFileContents) - assert(issues[0].key === 'MULTIPLE_COMMAS_IN_AUTHOR_FIELD') - }) -}) - -describe('README', () => { - it('throws warning if it does not exist in proper location', () => { - const fileList = { - 1: { - name: 'README', - path: 'tests/data/bids-examples/ds001/not-root-dir/README', - relativePath: '/not-root-dir/README', - }, - } - const issues = checkReadme(fileList) - assert(issues[0].key === 'README_FILE_MISSING') - }) - - it('throws warning if it is too small', () => { - const fileList = { - 1: { - name: 'README', - path: 'tests/data/bids-examples/ds001/README', - relativePath: '/README', - stats: { size: 20 }, - }, - } - const issues = checkReadme(fileList) - assert(issues[0].key === 'README_FILE_SMALL') - }) -}) - -describe('validateMisc', () => { - let filelist, dir - - beforeAll(() => { - // contains stripped down CTF format dataset: Both, BadChannels and - // bad.segments files can be empty and still valid. Everything else must - // not be empty. - dir = `${process.cwd()}/bids-validator/tests/data/empty_files` - }) - - beforeEach(() => { - return utils.files.readDir(dir).then((files) => { - filelist = files - }) - }) - it('returns issues for empty files (0kb), accepting a limited set of exceptions', (done) => { - utils.collectSummary(filelist, {}) - const files = groupFileTypes(filelist, {}) - - validateMisc(files.misc).then((issues) => { - // *.meg4 and BadChannels files are empty. But only *.meg4 is an issue - assert.ok(issues.length == 1) - assert.ok(issues.every((issue) => issue instanceof utils.issues.Issue)) - assert.notStrictEqual( - issues.findIndex((issue) => issue.code === 99), - -1, - ) - assert.ok(issues[0].file.name == 'sub-0001_task-AEF_run-01_meg.meg4') - done() - }) - }) -}) diff --git a/bids-validator/tests/utils/issues.spec.js b/bids-validator/tests/utils/issues.spec.js deleted file mode 100644 index 7b11c3f4..00000000 --- a/bids-validator/tests/utils/issues.spec.js +++ /dev/null @@ -1,103 +0,0 @@ -import assert from 'assert' -import utils from '../../utils' - -describe('issues', () => { - describe('exceptionHandler', () => { - let testErr, issueList, summary, options, formattedIssues - - beforeAll(() => { - testErr = new Error('oh no') - issueList = [] - summary = { - sessions: [], - subjects: [], - tasks: [], - modalities: [], - totalFiles: 0, - size: 0, - } - options = { - ignoreWarnings: false, - ignoreNiftiHeaders: false, - verbose: false, - config: {}, - } - formattedIssues = utils.issues.exceptionHandler( - testErr, - issueList, - summary, - options, - ) - }) - - it('adds INTERNAL ERROR to the issues.errors list', () => { - assert.equal(formattedIssues.errors[0].key, 'INTERNAL ERROR') - }) - - it("creates a properly formatted issue in the error's files property", () => { - const exceptionIssue = formattedIssues.errors[0].files[0] - assert.ok(utils.issues.isAnIssue(exceptionIssue)) - }) - - it('gives a reason for the error', () => { - const exceptionIssue = formattedIssues.errors[0].files[0] - assert.equal( - exceptionIssue.reason, - `${testErr.message}; please help the BIDS team and community by opening an issue at (https://github.com/bids-standard/bids-validator/issues) with the evidence here.`, - ) - }) - }) - - describe('exception/issue redirect', () => { - let promise, innerPromise, validIssue, invalidIssue - beforeAll(() => { - promise = null - validIssue = new utils.issues.Issue({ - code: 12, - file: 'goodstuff.json', - reason: 'a series of unfortunate events', - }) - invalidIssue = new Error('oops') - - promise = () => { - return new Promise((resolve, reject) => { - innerPromise().catch((err) => - utils.issues.redirect(err, reject, () => { - resolve() - }), - ) - }) - } - }) - - it('resolves with valid issue', (done) => { - innerPromise = () => - new Promise((_, reject) => { - reject(validIssue) - }) - - promise().then(() => done()) - }) - - it('rejects exceptions', (done) => { - innerPromise = () => - new Promise((_, reject) => { - reject(invalidIssue) - }) - - promise().catch(() => done()) - }) - - it('passes the exception through the error', (done) => { - innerPromise = () => - new Promise((_, reject) => { - reject(invalidIssue) - }) - - promise().catch((err) => { - assert.deepEqual(err, invalidIssue) - done() - }) - }) - }) -}) diff --git a/bids-validator/utils/__tests__/bids_files.spec.js b/bids-validator/utils/__tests__/bids_files.spec.js deleted file mode 100644 index 91507be3..00000000 --- a/bids-validator/utils/__tests__/bids_files.spec.js +++ /dev/null @@ -1,38 +0,0 @@ -import { assert } from 'chai' -import { checkSidecarForDatafiles } from '../bids_files.js' - -describe('bids_files', () => { - describe('checkSidecarForDatafiles()', () => { - it('matches .tsv datafile to sidecar', () => { - const file = { - relativePath: - 'ds001/sub-02/func/sub-02_task-balloonanalogrisktask_run-01_events.json', - } - const fileList = { - 1: { - name: 'sub-02_task-balloonanalogrisktask_run-01_events.tsv', - relativePath: - 'ds001/sub-02/func/sub-02_task-balloonanalogrisktask_run-01_events.tsv', - }, - } - const match = checkSidecarForDatafiles(file, fileList) - assert.isTrue(match) - }) - - it('does not match invalid datafile formats', () => { - const file = { - relativePath: - 'ds001/sub-02/func/sub-02_task-balloonanalogrisktask_run-01_events.json', - } - const fileList = { - 1: { - name: 'sub-02_task-balloonanalogrisktask_run-01_events.tsv', - relativePath: - 'ds001/sub-02/func/sub-02_task-balloonanalogrisktask_run-01_events.tsn', - }, - } - const match = checkSidecarForDatafiles(file, fileList) - assert.isFalse(match) - }) - }) -}) diff --git a/bids-validator/utils/__tests__/collectSubjectMetadata.spec.js b/bids-validator/utils/__tests__/collectSubjectMetadata.spec.js deleted file mode 100644 index a5863ce4..00000000 --- a/bids-validator/utils/__tests__/collectSubjectMetadata.spec.js +++ /dev/null @@ -1,23 +0,0 @@ -import { assert } from 'chai' -import collectSubjectMetadata from '../summary/collectSubjectMetadata' - -describe('collectSubjectMetadata', () => { - it('extracts tsv string to subjectMetadata object', () => { - const tsvFile = `participant_id age sex -sub-01 34 F -sub-02 38 M -` - const subjectMetadata = collectSubjectMetadata(tsvFile) - assert.lengthOf(subjectMetadata, 2) - assert.deepEqual(subjectMetadata[0], { - participantId: '01', - age: 34, - sex: 'F', - }) - }) - it('extracts tsv string to subjectMetadata object', () => { - const tsvFile = `` - const subjectMetadata = collectSubjectMetadata(tsvFile) - assert.equal(subjectMetadata, undefined) - }) -}) diff --git a/bids-validator/utils/__tests__/filenamesOnly.spec.js b/bids-validator/utils/__tests__/filenamesOnly.spec.js deleted file mode 100644 index 63e37200..00000000 --- a/bids-validator/utils/__tests__/filenamesOnly.spec.js +++ /dev/null @@ -1,46 +0,0 @@ -import { validateFilenames } from '../filenamesOnly.js' - -describe('test filenames mode', () => { - beforeEach(() => { - // eslint-disable-next-line - console.log = jest.fn() - }) - it('throws an error when obviously non-BIDS input', async () => { - async function* badData() { - yield '0001' - yield 'nope' - yield 'not-bids' - yield 'data' - } - const res = await validateFilenames(badData()) - expect(res).toBe(false) - }) - it('passes validation with a simple dataset', async () => { - async function* goodData() { - yield '0001' - yield 'CHANGES' - yield 'dataset_description.json' - yield 'participants.tsv' - yield 'README' - yield 'sub-01/anat/sub-01_T1w.nii.gz' - yield 'T1w.json' - } - const res = await validateFilenames(goodData()) - expect(res).toBe(true) - }) - it('passes validation with .bidsignore', async () => { - async function* goodData() { - yield 'sub-02/*' - yield '0001' - yield 'CHANGES' - yield 'dataset_description.json' - yield 'participants.tsv' - yield 'README' - yield 'sub-01/anat/sub-01_T1w.nii.gz' - yield 'T1w.json' - yield 'sub-02/not-bids-file.txt' - } - const res = await validateFilenames(goodData()) - expect(res).toBe(true) - }) -}) diff --git a/bids-validator/utils/__tests__/gitTreeMode.spec.js b/bids-validator/utils/__tests__/gitTreeMode.spec.js deleted file mode 100644 index a16d2e70..00000000 --- a/bids-validator/utils/__tests__/gitTreeMode.spec.js +++ /dev/null @@ -1,128 +0,0 @@ -import { assert } from 'chai' -import { - readLsTreeLines, - readCatFileLines, - processFiles, -} from '../files/readDir' -import ignore from 'ignore' - -describe('gitTreeMode functions', () => { - describe('readLsTreeLines', () => { - it('will handle regular files', () => { - const lsTreeLines = [ - '100644 blob longkeystring 1000000\tfile/path', - '100644 blob anotherlongkeystring 1\tpath/to/file', - ] - - const output = readLsTreeLines(lsTreeLines) - assert.hasAllKeys(output, ['files', 'symlinkFilenames', 'symlinkObjects']) - assert.isEmpty(output.symlinkFilenames) - assert.isEmpty(output.symlinkObjects) - assert.equal(output.files[0].path, 'file/path') - assert.equal(output.files[0].size, 1000000) - }) - - it('will handle symlinked files', () => { - const lsTreeLines = [ - '120000 blob e886cd8566b5e97db1fc41bb9364fc22cbe81426 199\tsymlink/filepath', - '120000 blob e2cd091677489a0377d9062347c32d3efebf4322 199\they/jude/dont/be/afraid', - ] - const expected = { - files: [], - symlinkFilenames: ['symlink/filepath', 'hey/jude/dont/be/afraid'], - symlinkObjects: [ - 'e886cd8566b5e97db1fc41bb9364fc22cbe81426', - 'e2cd091677489a0377d9062347c32d3efebf4322', - ], - } - assert.deepEqual(readLsTreeLines(lsTreeLines), expected) - }) - }) - - describe('readCatFileLines', () => { - it('creates file objects from git cat-file output', () => { - const catFileOutput = [ - 'hash blob 140', - '.git/annex/objects/Mv/99/SHA256E-s54--42c98d14dbe3d066d35897a61154e39ced478cd1f0ec6159ba5f2361c4919878.json/SHA256E-s54--42c98d14dbe3d066d35897a61154e39ced478cd1f0ec6159ba5f2361c4919878.json', - 'otherhash blob 140', - '.git/annex/objects/QV/mW/SHA256E-s99--bbef536348750373727d3b5856398d7377e5d7e23875eed026b83d12cee6f885.json/SHA256E-s99--bbef536348750373727d3b5856398d7377e5d7e23875eed026b83d12cee6f885.json', - ] - const symlinkFilenames = ['path/to/file/a', 'path/to/file/b'] - const output = readCatFileLines(catFileOutput, symlinkFilenames) - assert.equal(output[0].path, symlinkFilenames[0]) - assert.equal(output[0].size, 54) - }) - }) - - describe('processFiles', () => { - const ig = ignore().add('.*').add('/derivatives') - it('aggregates, filters, and augments the files given to it', () => { - const filesA = [ - { - path: '.DS_Store', - size: 1000000, - }, - { - path: 'path/to/a', - size: 100, - }, - { - path: 'path/to/b', - size: 99, - }, - ] - const filesB = [ - { - path: 'path/to/c', - size: 98, - }, - { - path: 'path/to/d', - size: 1, - }, - { - path: 'derivatives/to/derivative_file', - size: 1, - }, - ] - /* Not currently in use. - const expected = [ - { - path: '/path/to/dataset/path/to/a', - size: 100, - relativePath: '/path/to/a', - name: 'a', - }, - { - path: '/path/to/dataset/path/to/b', - size: 99, - relativePath: '/path/to/b', - name: 'b', - }, - { - path: '/path/to/dataset/path/to/c', - size: 98, - relativePath: '/path/to/c', - name: 'c', - }, - { - path: '/path/to/dataset/path/to/d', - size: 1, - relativePath: '/path/to/d', - name: 'd', - }, - ] - */ - const output = processFiles('/path/to/dataset', ig, filesA, filesB) - const fileNames = output.map((file) => file.name) - assert(!fileNames.includes('.DS_Store'), 'filters out ignored files') - assert( - !fileNames.includes('derivative_file'), - 'filters out ignored directories', - ) - assert.deepEqual(fileNames, ['a', 'b', 'c', 'd'], 'aggregates files') - assert.isString(output[0].relativePath, 'adds relativePath to files') - assert.isString(output[1].relativePath, 'adds name to files') - }) - }) -}) diff --git a/bids-validator/utils/__tests__/type.spec.js b/bids-validator/utils/__tests__/type.spec.js deleted file mode 100644 index 1de4402e..00000000 --- a/bids-validator/utils/__tests__/type.spec.js +++ /dev/null @@ -1,38 +0,0 @@ -import type from '../type.js' - -describe('type.js', () => { - describe('isBids()', () => { - it('does not throw an error for valid defacemask filenames', () => { - expect( - type.isBIDS( - '/sub-rid000043/anat/sub-rid000043_run-02_mod-T1w_defacemask.nii.gz', - ), - ).toBe(true) - }) - - it('does not throw an error for recording entity in physio data', () => { - expect( - type.isBIDS( - '/sub-05/eeg/sub-05_task-matchingpennies_recording-eyetracking_physio.tsv.gz', - ), - ).toBe(true) - }) - - it('does not throw an error for recording entity in physio data at root of the dataset', () => { - expect( - type.isBIDS('/task-matchingpennies_recording-eyetracking_physio.json'), - ).toBe(true) - }) - - const physio_task_modalities = ['eeg', 'ieeg', 'meg', 'func', 'beh'] - physio_task_modalities.map((mod) => { - it(`does not throw an error for recording entity in ${mod} physio data`, () => { - expect( - type.isBIDS( - `/sub-05/${mod}/sub-05_task-matchingpennies_recording-eyetracking_physio.tsv.gz`, - ), - ).toBe(true) - }) - }) - }) -}) diff --git a/bids-validator/utils/__tests__/unit.spec.js b/bids-validator/utils/__tests__/unit.spec.js deleted file mode 100644 index 10716769..00000000 --- a/bids-validator/utils/__tests__/unit.spec.js +++ /dev/null @@ -1,92 +0,0 @@ -import unit from '../unit' - -const { prefixes, roots } = unit -const validRoot = roots[0] - -describe('unit validator', () => { - it('handles simple units', () => { - roots.forEach((validRoot) => { - const goodOutput = unit.validate(validRoot) - expect(goodOutput.isValid).toBe(true) - }) - const invalidRoots = [ - 'definitielynotavalidroot', - `%/${validRoot}`, - `n/a*${validRoot}`, - ] - invalidRoots.forEach((invalidRoot) => { - const badOutput = unit.validate(invalidRoot) - expect(badOutput.isValid).toBe(false) - }) - }) - - it('handles simple units with prefixes', () => { - prefixes.forEach((validPrefix) => { - const goodOutput = unit.validate(validPrefix + validRoot) - expect(goodOutput.isValid).toBe(true) - }) - const badOutput = unit.validate('badprefix' + validRoot) - expect(badOutput.isValid).toBe(false) - }) - - const validExponents = [ - '^2', - '^543', - '¹²³', - ...unit.superscriptNumbers.slice(0, 3), - '^-2', - '⁻³', - ] - it('handles simple units with exponents', () => { - validExponents.forEach((exp) => { - const goodOutput = unit.validate(validRoot + exp) - expect(goodOutput.isValid).toBe(true) - }) - const invalidExponents = ['^^12', '142', '1', '0', '^.1', '^2.1'] - invalidExponents.forEach((exp) => { - const badOutput = unit.validate(validRoot + exp) - expect(badOutput.isValid).toBe(false) - }) - validExponents.slice(0, 3).forEach((exp) => { - const badOutput = unit.validate(exp) - expect(badOutput.isValid).toBe(false) - }) - }) - - it('handles derived units', () => { - const validUnits = ['T/m', 'N*m', 'm^2/s^2', 'mm/ms', 'kT³*nm²', 'm²/s²'] - validUnits.forEach((derivedUnit) => { - const goodOutput = unit.validate(derivedUnit) - expect(goodOutput.isValid).toBe(true) - }) - const invalidUnits = [ - `/${validRoot}`, - `*${validRoot}`, - `${validRoot}/`, - `${validRoot}*`, - `${validRoot}//${validRoot}`, - `${validRoot}///${validRoot}`, - `${validRoot}**${validRoot}`, - `${validRoot}***${validRoot}`, - `${roots.slice(0, 3).join('')}`, - ...validExponents.map((exp) => `${exp}${validRoot}`), - ] - invalidUnits.forEach((derivedUnit) => { - const badOutput = unit.validate(derivedUnit) - expect(badOutput.isValid).toBe(false) - }) - }) - - describe('edge cases', () => { - it('handles unavailable units', () => { - const unavaliableUnit = 'n/a' - const goodOutput = unit.validate(unavaliableUnit) - expect(goodOutput.isValid).toBe(true) - }) - it('handles percentages', () => { - const unavaliableUnit = '%' - const goodOutput = unit.validate(unavaliableUnit) - expect(goodOutput.isValid).toBe(true) - }) - }) -}) diff --git a/bids-validator/utils/array.js b/bids-validator/utils/array.js deleted file mode 100644 index c9ee84ac..00000000 --- a/bids-validator/utils/array.js +++ /dev/null @@ -1,63 +0,0 @@ -var array = { - /** - * Equals - * - * Takes two arrays and returns true if they're - * equal. Takes a third optional boolean argument - * to sort arrays before checking equality. - */ - equals: function (array1, array2, sort) { - // if the other array is a falsy value, return - if (!array1 || !array2) { - return false - } - - // compare lengths - if (array1.length != array2.length) { - return false - } - - // optionally sort arrays - if (sort) { - array1.sort() - array2.sort() - } - - for (var i = 0, l = array1.length; i < l; i++) { - // Check if we have nested arrays - if (array1[i] instanceof Array && array2[i] instanceof Array) { - // recurse into the nested arrays - if (!array.equals(array1[i], array2[i], sort)) { - return false - } - } else if (array1[i] != array2[i]) { - // Warning - two different object instances will never be equal: {x:20} != {x:20} - return false - } - } - return true - }, - - /** - * Takes to arrays and returns an array of two - * arrays contains the differences contained - * in each array. - */ - diff: function (array1, array2) { - var diff1 = [], - diff2 = [] - for (var i = 0; i < array1.length; i++) { - var elem1 = array1[i] - var index = array2.indexOf(elem1) - if (index > -1) { - array2.splice(index, 1) - } else { - diff1.push(elem1) - } - } - diff2 = array2 - return [diff1, diff2] - }, -} - -export default array diff --git a/bids-validator/utils/bids_files.js b/bids-validator/utils/bids_files.js deleted file mode 100644 index 54fd90f4..00000000 --- a/bids-validator/utils/bids_files.js +++ /dev/null @@ -1,89 +0,0 @@ -/** - * Utility functions for checking bids file structures - * eg. corresponding files - */ -// dependencies ------------------------------------------------------------------- -import type from './type.js' - -// implementations ---------------------------------------------------------------- -/** - * Verify that JSON sidecars have corresponding data files - */ -function checkSidecarForDatafiles(file, fileList) { - const noExt = file.relativePath.replace('.json', '') - const dictName = noExt.substring(noExt.lastIndexOf('/') + 1, noExt.length) - const args = { - dictArgs: dictName.split('_'), - dictName: dictName, - dictPath: noExt.substring(0, noExt.lastIndexOf('/') + 1), - file: file, - fileList: fileList, - noExt: noExt, - } - const idxs = Object.keys(fileList) - // Check each file in fileList for potential match - return true on first match - let dataFile = idxs.some(checkFileListForMatch, args) - return dataFile -} - -/** - * Check file list for first valid match for sidecar file - */ -function checkFileListForMatch(i) { - this.path = this.fileList[i].relativePath - let match = false - // Only check file if path includes the path to sidecar - const dictArgs = this.path.includes(this.dictPath) ? this.dictArgs : [] - // Set true if dictArgs and all dictargs exist in file path (except 'coordsystem') - let pathMatch = - dictArgs.length > 0 - ? dictArgs.every( - (arg) => arg === 'coordsystem' || this.path.includes(arg), - ) - : false - if (pathMatch) { - match = verifyDatafileMatch( - this.file.relativePath, - this.noExt, - this.fileList[i], - ) - } - return match -} - -/** - * Accepts path to sidecar file, the sidecar filename without extension - * and the datafile that's a potential match - * Returns boolean indicating if file evaluates as valid datafile - */ -function verifyDatafileMatch(sidecarPath, noExt, matchFile) { - let match = false - let folderMatch = false - - // Make sure it's not the data dictionary itself - const isSelf = matchFile.relativePath === sidecarPath - if (!isSelf && type.file.isDatafile(matchFile.relativePath)) { - match = true - } - - // IEEG and MEG datafiles may be a folder, therefore not contained in fileList, will need to look in paths - if (!isSelf && !match) { - if (noExt.endsWith('_meg') || noExt.endsWith('_coordsystem')) { - folderMatch = matchFile.relativePath.includes('_meg.ds') - } - if (noExt.endsWith('_ieeg') || noExt.endsWith('_coordsystem')) { - folderMatch = matchFile.relativePath.includes('_ieeg.mefd') - } - } - if (folderMatch) { - match = true - } - - return match -} - -export { checkSidecarForDatafiles } - -export default { - checkSidecarForDatafiles, -} diff --git a/bids-validator/utils/common.js b/bids-validator/utils/common.js deleted file mode 100644 index a63f56e6..00000000 --- a/bids-validator/utils/common.js +++ /dev/null @@ -1,18 +0,0 @@ -// An index of rules documents to export as modules -// The Python module expects these to be within its tree, but we can just import them from there -import associated_data_rules from '../bids_validator/bids_validator/rules/associated_data_rules.json' - -import file_level_rules from '../bids_validator/bids_validator/rules/file_level_rules.json' -import phenotypic_rules from '../bids_validator/bids_validator/rules/phenotypic_rules.json' -import session_level_rules from '../bids_validator/bids_validator/rules/session_level_rules.json' -import subject_level_rules from '../bids_validator/bids_validator/rules/subject_level_rules.json' -import top_level_rules from '../bids_validator/bids_validator/rules/top_level_rules.json' - -export default { - associated_data_rules: associated_data_rules, - file_level_rules: file_level_rules, - phenotypic_rules: phenotypic_rules, - session_level_rules: session_level_rules, - subject_level_rules: subject_level_rules, - top_level_rules: top_level_rules, -} diff --git a/bids-validator/utils/config.js b/bids-validator/utils/config.js deleted file mode 100644 index 2b9f8f18..00000000 --- a/bids-validator/utils/config.js +++ /dev/null @@ -1,150 +0,0 @@ -import minimatch from 'minimatch' - -var config = { - /** - * Ignored File - */ - ignoredFile: function (conf, filePath) { - if (conf.ignoredFiles) { - for (var i = 0; i < conf.ignoredFiles.length; i++) { - var ignoredPattern = conf.ignoredFiles[i] - if (minimatch(filePath, ignoredPattern)) { - return true - } - } - } - return false - }, - - /** - * Interpret Config - * - * Takes a list of triggered codes and a config object - * and create a map of modified severities - */ - interpret: function (codes, conf) { - var severityMap = {} - - if (conf.ignore && conf.ignore.length > 0) { - var ignoreCodes = this.match(codes, conf.ignore) - for (var i = 0; i < ignoreCodes.length; i++) { - var ignoreCode = ignoreCodes[i] - severityMap[ignoreCode] = 'ignore' - } - } - - if (conf.warn && conf.warn.length > 0) { - var warnCodes = this.match(codes, conf.warn) - for (var j = 0; j < warnCodes.length; j++) { - var warnCode = warnCodes[j] - severityMap[warnCode] = 'warning' - } - } - - if (conf.error && conf.error.length > 0) { - var errorCodes = this.match(codes, conf.error) - for (var k = 0; k < errorCodes.length; k++) { - var errorCode = errorCodes[k] - severityMap[errorCode] = 'error' - } - } - - return severityMap - }, - - /** - * Match - * - * Takes a list of triggered codes and a config - * object and returns the matched codes. - */ - match: function (codes, conf) { - var matches = [] - for (var i = 0; i < conf.length; i++) { - var confCode = conf[i] - if (codes.indexOf(confCode) > -1) { - matches.push(confCode) - } else if ( - confCode.hasOwnProperty('and') && - this.andFulfilled(codes, confCode.and) - ) { - // 'and' array fulfilled - matches = matches.concat(this.flatten(confCode.and)) - } - } - return matches - }, - - /** - * Flatten - * - * Takes an array that may contain objects with - * 'and' or 'or' properties and flattens it. - */ - flatten: function (list) { - var codes = [] - for (var i = 0; i < list.length; i++) { - var code = list[i] - if (code.hasOwnProperty('and')) { - codes = codes.concat(this.flatten(code.and)) - } else if (code.hasOwnProperty('or')) { - codes = codes.concat(this.flatten(code.or)) - } else { - codes.push(code) - } - } - return codes - }, - - /** - * And Fulfilled - * - * Takes an array of triggered code and an 'and' - * array, recursively checks if it's fulfilled - * and returns true if it is. - */ - andFulfilled: function (codes, and) { - for (var i = 0; i < and.length; i++) { - var andCode = and[i] - if (andCode.hasOwnProperty('and')) { - if (!this.andFulfilled(codes, andCode.and)) { - return false - } - } else if (andCode.hasOwnProperty('or')) { - if (!this.orFulfilled(codes, andCode.or)) { - return false - } - } else if (codes.indexOf(andCode) < 0) { - return false - } - } - return true - }, - - /** - * Or Fulfilled - * - * Takes an array of triggered code and an 'or' - * array, recursively checks if it's fulfilled - * and returns true if it is. - */ - orFulfilled: function (codes, or) { - for (var i = 0; i < or.length; i++) { - var orCode = or[i] - if (orCode.hasOwnProperty('and')) { - if (this.andFulfilled(codes, orCode.and)) { - return true - } - } else if (orCode.hasOwnProperty('or')) { - if (this.orFulfilled(codes, orCode.or)) { - return true - } - } else if (codes.indexOf(orCode) > -1) { - return true - } - } - return false - }, -} - -export default config diff --git a/bids-validator/utils/consoleFormat.js b/bids-validator/utils/consoleFormat.js deleted file mode 100644 index cccd9e23..00000000 --- a/bids-validator/utils/consoleFormat.js +++ /dev/null @@ -1,179 +0,0 @@ -import colors from 'colors/safe' -import { table, getBorderCharacters } from 'table' -import pluralize from 'pluralize' -import bytes from 'bytes' - -export default { - issues: formatIssues, - summary: formatSummary, - logIssues, - unexpectedError, -} - -function unexpectedError(message) { - return colors.red(message) -} - -function formatIssues(issues, options = {}) { - var errors = issues.errors - var warnings = issues.warnings - var output = [] - if (errors && errors.length === 1 && errors[0].code === '61') { - output.push( - colors.red( - '[ERR] The given directory failed an initial Quick Test. This means the basic names and structure of the files and directories do not comply with BIDS specification. For more info go to https://bids.neuroimaging.io/', - ), - ) - } else if (issues.config && issues.config.length >= 1) { - output.push(colors.red('[ERR] Invalid Config File')) - for (var i = 0; i < issues.config.length; i++) { - var issue = issues.config[i] - issue.file.file = { relativePath: issue.file.path } - issue.files = [issue.file] - } - output = output.concat(logIssues(issues.config, 'red', options)) - } else if (errors.length >= 1 || warnings.length >= 1) { - output = output.concat(logIssues(errors, 'red', options)) - output = output.concat(logIssues(warnings, 'yellow', options)) - } else { - output.push(colors.green('This dataset appears to be BIDS compatible.')) - } - return output.join('\n') -} - -function logIssues(issues, color, options) { - const severity = color == 'red' ? 'ERR' : 'WARN' - const output = [] - for (var i = 0; i < issues.length; i++) { - const issue = issues[i] - const issueNumber = i + 1 - output.push( - '\t' + - colors[color]( - issueNumber + - ': ' + - `[${severity}] ` + - issue.reason + - ' (code: ' + - issue.code + - ' - ' + - issue.key + - ')', - ), - ) - for (var j = 0; j < issue.files.length; j++) { - var file = issues[i].files[j] - if (!file || !file.file) { - continue - } - let indent = '\t\t' - if (file.file.relativePath) { - output.push(`${indent}.` + file.file.relativePath) - indent = '\t\t\t' - } - if (options.verbose) { - output.push(indent + file.reason) - } - if (file.line) { - var msg = `${indent}@ line: ` + file.line - if (file.character) { - msg += ' character: ' + file.character - } - output.push(msg) - } - if (file.evidence) { - output.push(`${indent}Evidence: ` + file.evidence) - } - } - if (issue.additionalFileCount > 0) { - output.push( - '\t\t' + - colors[color]( - '... and ' + - issue.additionalFileCount + - ' more files having this issue (Use --verbose to see them all).', - ), - ) - } - output.push('') - if (issue.helpUrl) { - output.push( - colors.cyan( - '\t' + - 'Please visit ' + - issue.helpUrl + - ' for existing conversations about this issue.', - ), - ) - output.push('') - } - } - return output -} - -function formatSummary(summary) { - const output = [] - if (summary) { - var numSessions = summary.sessions.length > 0 ? summary.sessions.length : 1 - - // data - var column1 = [ - summary.totalFiles + - ' ' + - pluralize('File', summary.totalFiles) + - ', ' + - bytes(summary.size), - summary.subjects.length + - ' - ' + - pluralize('Subject', summary.subjects.length), - numSessions + ' - ' + pluralize('Session', numSessions), - ], - column2 = summary.tasks, - column3 = summary.modalities - - var longestColumn = Math.max(column1.length, column2.length, column3.length) - var pad = ' ' - - // headers - var headers = [ - pad, - colors.blue.underline('Summary:') + pad, - colors.blue.underline('Available Tasks:') + pad, - colors.blue.underline('Available Modalities:'), - ] - - // rows - var rows = [headers] - for (var i = 0; i < longestColumn; i++) { - var val1, val2, val3 - val1 = column1[i] ? column1[i] + pad : '' - val2 = column2[i] ? column2[i] + pad : '' - val3 = column3[i] ? column3[i] : '' - rows.push([pad, val1, val2, val3]) - } - output.push( - table(rows, { - border: getBorderCharacters(`void`), - columnDefault: { - paddingLeft: 0, - paddingRight: 1, - }, - drawHorizontalLine: () => { - return false - }, - }), - ) - - output.push('') - - //Neurostars message - output.push( - colors.cyan( - '\tIf you have any questions, please post on https://neurostars.org/tags/bids.', - ), - ) - - output.push('') - } - return output.join('\n') -} diff --git a/bids-validator/utils/filenamesOnly.js b/bids-validator/utils/filenamesOnly.js deleted file mode 100644 index 7b096e8b..00000000 --- a/bids-validator/utils/filenamesOnly.js +++ /dev/null @@ -1,111 +0,0 @@ -/** - * Run validation against a list of input files from git pre-receive - */ -import readline from 'readline' -import path from 'path' -import { defaultIgnore } from './files/readDir.js' -import quickTest from '../validators/bids/quickTest.js' -import groupFileTypes from '../validators/bids/groupFileTypes.js' - -// Disable most tests that might access files -const defaultOptions = { - ignoreWarnings: true, - ignoreNiftiHeaders: true, - ignoreSymlinks: true, - ignoreSubjectConsistency: true, - verbose: false, - gitTreeMode: false, - remoteFiles: false, - gitRef: 'HEAD', - config: { ignore: [44], warn: [], error: [], ignoredFiles: [] }, -} - -async function generateFileObjects(stream) { - const ig = defaultIgnore() - const inputFiles = {} - let bidsIgnore = true - let index = 0 - for await (const line of stream) { - // Part 1, parse bidsignore until 0001 (git delimiter packet) - if (line === '0001') { - bidsIgnore = false - } else { - if (bidsIgnore) { - ig.add(line) - } else { - // Done with bidsignore, read filename data - const rootPath = `/${line}` - /** - * Simulated file object based on input - * File size is 1 to prevent 0 size errors but makes some checks inaccurate - */ - const file = { - name: path.basename(line), - path: rootPath, - relativePath: rootPath, - size: 1, - } - if (ig.ignores(line)) { - file.ignore = true - } - inputFiles[index] = file - index++ - } - } - } - return inputFiles -} - -/** - * Validate input from stdin as bidsignore + filenames - * - * Protocol uses `0001` line to separate the two streams - * .bidsignore lines are read first - * One filename per line is read in and bidsignore rules applied - * - * @param {AsyncIterable} stream Readline stream - */ -export async function validateFilenames(stream) { - const inputFiles = await generateFileObjects(stream) - const couldBeBIDS = quickTest(inputFiles) - if (couldBeBIDS) { - const files = groupFileTypes(inputFiles, defaultOptions) - if (files.invalid.length > 0) { - const invalidFiles = [] - for (const f of files.invalid) { - if (!f.ignore) { - invalidFiles.push(f) - } - } - if (invalidFiles.length > 0) { - // eslint-disable-next-line no-console - console.log( - 'Validation failed, some files are not valid BIDS filenames:', - ) - for (const ef of invalidFiles) { - // eslint-disable-next-line no-console - console.log(` ${ef.path}`) - } - return false - } - } - return true - } else { - // eslint-disable-next-line no-console - console.log( - 'This dataset failed a quick validation, please verify it is a BIDS dataset at the root of the git repository', - ) - return false - } -} - -export async function filenamesOnly() { - const rl = readline.createInterface({ - input: process.stdin, - }) - if (await validateFilenames(rl)) { - return 0 - } else { - return 1 - } -} diff --git a/bids-validator/utils/files/FileAPI.js b/bids-validator/utils/files/FileAPI.js deleted file mode 100644 index 3fc4e88a..00000000 --- a/bids-validator/utils/files/FileAPI.js +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Simulates some of the browser File API interface. - * https://developer.mozilla.org/en-US/docs/Web/API/File - * - * @param {string[]} parts - file contents as bytes - * @param {string} filename - filename without path info - * @param {Object} properties - unused Blob properties - */ -function NodeFile(parts, filename, properties) { - this.parts = parts - this.name = filename - this.properties = properties - this.size = parts.reduce(function (a, val) { - return a + val.length - }, 0) - // Unknown defacto mime-type - this.type = 'application/octet-stream' - this.lastModified = 0 -} - -/** - * Return a either a mock or real FileAPI if one is available - */ -function FileAPI() { - return typeof File === 'undefined' ? NodeFile : File -} - -export default FileAPI diff --git a/bids-validator/utils/files/__tests__/generateMergedSidecarDict.spec.js b/bids-validator/utils/files/__tests__/generateMergedSidecarDict.spec.js deleted file mode 100644 index 2c7f13ad..00000000 --- a/bids-validator/utils/files/__tests__/generateMergedSidecarDict.spec.js +++ /dev/null @@ -1,49 +0,0 @@ -/*global globalThis*/ -import generateMergedSidecarDict from '../generateMergedSidecarDict.js' - -describe('generateMergedSidecarDict.js', () => { - describe('Object pollution test', () => { - beforeAll(() => { - // Simulate code that injects globalThis into every object - Object.defineProperty(Object.prototype, 'global', { - get: function () { - return globalThis - }, - configurable: true, - }) - }) - - afterAll(() => { - // Clean up the pollution - delete Object.prototype.global - }) - - it('trivial check', () => { - expect(generateMergedSidecarDict([], {})).toStrictEqual({}) - }) - - it('merges objects with global property', () => { - const potentialSidecars = ['/sidecar1.json', '/sidecar2.json'] - const jsonContents = { - '/sidecar1.json': { - RegularMetadata1: 'value1', - global: { - globalMetadata: 'value1', - }, - }, - '/sidecar2.json': { - RegularMetadata2: 'value2', - }, - } - expect( - generateMergedSidecarDict(potentialSidecars, jsonContents), - ).toStrictEqual({ - RegularMetadata1: 'value1', - RegularMetadata2: 'value2', - global: { - globalMetadata: 'value1', - }, - }) - }) - }) -}) diff --git a/bids-validator/utils/files/__tests__/readDir-examples.spec.js b/bids-validator/utils/files/__tests__/readDir-examples.spec.js deleted file mode 100644 index fc929b03..00000000 --- a/bids-validator/utils/files/__tests__/readDir-examples.spec.js +++ /dev/null @@ -1,76 +0,0 @@ -import readDir from '../readDir.js' - -describe('readDir.js - examples integration', () => { - describe('readDir()', () => { - it('returns expected files', async () => { - await readDir('bids-validator/tests/data/bids-examples/ds002/').then( - (files) => { - const filenames = Object.values(files).map((f) => f.name) - filenames.sort() - expect(filenames).toHaveLength(246) - expect(filenames[0]).toBe('CHANGES') - expect(filenames[25]).toBe( - 'sub-02_task-mixedeventrelatedprobe_run-01_bold.nii.gz', - ) - expect(filenames[200]).toBe( - 'sub-14_task-probabilisticclassification_run-02_events.tsv', - ) - }, - ) - }) - it('correctly follows symlinks for subjects with followSymbolicLink: true', async () => { - await readDir('bids-validator/tests/data/symlinked_subject', { - ignoreSymlinks: false, - }).then((files) => { - expect(Object.keys(files)).toHaveLength(12) - const filenames = Object.values(files).map((f) => f.name) - filenames.sort() - expect(filenames).toEqual([ - 'CHANGES', - 'README', - 'dataset_description.json', - 'participants.tsv', - 'sub-0-1_task-rhymejudgment_bold.nii.gz', - 'sub-01_T1w.nii', - 'sub-01_T1w.nii.gz', - 'sub-01_task-rhyme-judgment_bold.nii.gz', - 'sub-01_task-rhyme-judgment_events.tsv', - 'sub-01_task-rhyme_judgment_bold.nii.gz', - 'sub-01_task-rhyme_judgment_events.tsv', - 'task-rhymejudgment_bold.json', - ]) - }) - }) - it('correctly does not follow symlinks for subjects with followSymbolicLink: false', async () => { - await readDir('bids-validator/tests/data/symlinked_subject', { - ignoreSymlinks: true, - }).then((files) => { - expect(Object.keys(files)).toHaveLength(6) - const filenames = Object.values(files).map((f) => f.name) - filenames.sort() - expect(filenames).toEqual([ - 'CHANGES', - 'README', - 'dataset_description.json', - 'participants.tsv', - 'sub-01', - 'task-rhymejudgment_bold.json', - ]) - }) - }) - it('returns file objects with the expected shape', async () => { - await readDir('bids-validator/tests/data/symlinked_subject', { - ignoreSymlinks: true, - }).then((files) => { - expect(Object.keys(files)).toHaveLength(6) - Object.values(files).forEach((f) => { - expect(Object.getOwnPropertyNames(f)).toEqual([ - 'name', - 'path', - 'relativePath', - ]) - }) - }) - }) - }) -}) diff --git a/bids-validator/utils/files/__tests__/readDir.spec.js b/bids-validator/utils/files/__tests__/readDir.spec.js deleted file mode 100644 index c383f7e4..00000000 --- a/bids-validator/utils/files/__tests__/readDir.spec.js +++ /dev/null @@ -1,45 +0,0 @@ -import readDir from '../readDir.js' - -describe('readDir.js', () => { - describe('fileArrayToObject', () => { - it('transforms an array to an object', () => { - expect( - readDir.fileArrayToObject([ - { name: 'one' }, - { name: 'two' }, - { name: 'three' }, - ]), - ).toEqual({ - 0: { - name: 'one', - }, - 1: { - name: 'two', - }, - 2: { - name: 'three', - }, - }) - }) - }) - describe('harmonizeRelativePath', () => { - it('harmonizes a basic POSIX path', () => { - expect(readDir.harmonizeRelativePath('test/a/path')).toEqual('/a/path') - }) - it('does not mangle absolute Windows paths', () => { - expect(readDir.harmonizeRelativePath('C:\\dataset\\directory')).toEqual( - '/dataset/directory', - ) - }) - it('does not mangle relative Windows paths', () => { - expect(readDir.harmonizeRelativePath('dataset\\directory')).toEqual( - '/directory', - ) - }) - it('does not mangle relative Windows paths with parent directories', () => { - expect( - readDir.harmonizeRelativePath('..\\..\\dataset\\directory'), - ).toEqual('/../dataset/directory') - }) - }) -}) diff --git a/bids-validator/utils/files/__tests__/remoteFiles.spec.js b/bids-validator/utils/files/__tests__/remoteFiles.spec.js deleted file mode 100644 index a309f6a7..00000000 --- a/bids-validator/utils/files/__tests__/remoteFiles.spec.js +++ /dev/null @@ -1,242 +0,0 @@ -import { assert } from 'chai' -import remoteFiles from '../remoteFiles' -import fs from 'fs' -import zlib from 'zlib' -const config = { - s3Params: { - Bucket: 'none', - }, - file: { - name: 'something', - }, -} - -describe('remoteFiles', () => { - beforeAll(() => { - // fetch mock - global.fetch = jest - .fn() - .mockImplementation(() => - Promise.resolve({ ok: true, buffer: () => 'buffer' }), - ) - }) - - beforeEach(() => { - delete process.env.AWS_ACCESS_KEY_ID - }) - - describe('accessRemoteFile', () => { - it('should return a promise', () => { - const promise = remoteFiles.accessRemoteFile(config) - expect(promise).toBeInstanceOf(Promise) - }) - it('should return the response of constructAwsRequest if successful', () => { - remoteFiles - .accessRemoteFile(config) - .then((res) => expect(res).toBe('buffer')) - }) - it('should return the issue of extractGzipBuffer if unzip is unsuccessful', () => { - config.file.name = 'something.gz' - return remoteFiles.accessRemoteFile(config).catch((issue) => { - expect(issue).toHaveProperty('code') - config.file.name = 'something' - }) - }) - }) - - describe('constructAwsRequest', () => { - it('should return a fetch resolution promise when aws creds are not present', async () => { - const response = remoteFiles.constructAwsRequest({ - s3Params: { Bucket: 'matters not' }, - }) - expect(response).toBeInstanceOf(Promise) - }) - it('should return the buffer() property of the fetch response', async () => { - remoteFiles - .constructAwsRequest({ - s3Params: { Bucket: 'matters not' }, - }) - .then((data) => { - assert.equal(data, 'buffer') - }) - }) - }) - - describe('extractGzipBuffer', () => { - it('should extract proper gzip files', async () => { - zlib.gzip('Some String', async (err, res) => { - const gzip = await remoteFiles.extractGzipBuffer(res, {}) - expect(gzip).toBeInstanceOf(Uint8Array) - }) - }) - it('should reject with an issue when gzip reading fails', async () => { - try { - const zip = 'bad data' - await remoteFiles.extractGzipBuffer(zip, {}) - } catch (e) { - expect(e).toHaveProperty('code') - expect(e.code).toEqual(28) - } - }) - }) - - describe('callGitAnnex', () => { - it('should return the string result of execSync', () => { - const resp = remoteFiles.callGitAnnex('echo test') - expect(resp.trim()).toBe('test') - }) - }) - - describe('getRemotesInfo', () => { - it('should return an empty array if callGitAnnex does not return contents of a metadata file', () => { - remoteFiles.callGitAnnex = jest.fn() - remoteFiles.callGitAnnex.mockReturnValue('bad_response') - const remotesInfo = remoteFiles.getRemotesInfo('some_directory', { - relativePath: 'some_file', - }) - assert.lengthOf(remotesInfo, 0) - }) - it('should return an empty array if file is not properly formatted', () => { - const remotesInfo = remoteFiles.getRemotesInfo('some_directory', {}) - assert.lengthOf(remotesInfo, 0) - }) - it('should return an empty array if directory is not properly formatted', () => { - const remotesInfo = remoteFiles.getRemotesInfo('bad directory', { - relativePath: 'some_path', - }) - assert.lengthOf(remotesInfo, 0) - }) - it('should return an array of remote objects if getRemoteData returns properly formatted remote metadata file', () => { - remoteFiles.getRemoteMetadata = jest.fn() - remoteFiles.getRemoteMetadata.mockReturnValue( - 'timestamp remoteuuid:commitinfo xversionId#fileName', - ) - const remotesInfo = remoteFiles.getRemotesInfo('some_directory', { - relativePath: 'some_file', - }) - remoteFiles.getRemoteMetadata.mockRestore() - assert.lengthOf(remotesInfo, 1) - }) - }) - - describe('getSingleRemoteInfo', () => { - it('returns an object with null Bucket property if the response does not contain remote info', () => { - remoteFiles.callGitAnnex = jest.fn() - remoteFiles.callGitAnnex.mockReturnValue('bad_response') - const singleRemoteInfo = remoteFiles.getSingleRemoteInfo( - 'some_dir', - 'some_uuid', - ) - expect(singleRemoteInfo).toHaveProperty('Bucket') - expect(singleRemoteInfo.Bucket).toBe(null) - }) - it('returns an object with a Bucket property if callGitAnnex returns an object with the Bucket field', () => { - remoteFiles.callGitAnnex = jest.fn() - remoteFiles.callGitAnnex.mockReturnValue( - 'good_response\nbucket: such_bucket\nawesome_line', - ) - const singleRemoteInfo = remoteFiles.getSingleRemoteInfo( - 'some_dir', - 'some_uuid', - ) - expect(singleRemoteInfo).toHaveProperty('Bucket') - expect(singleRemoteInfo.Bucket).toEqual('such_bucket') - }) - }) - - describe('getRemoteBucket', () => { - it('returns an object with a Bucket property if the response contains that field', () => { - const resp = 'something:something\nbucket: omg\nawesome:awesome' - const params = remoteFiles.getRemoteBucket(resp) - expect(params).toHaveProperty('Bucket') - expect(params.Bucket).toEqual('omg') - }) - it('returns an object with null Bucket property if the response does not contain the bucket field', () => { - const resp = 'wow_this_is_a_bad_response' - const params = remoteFiles.getRemoteBucket(resp) - expect(params).toHaveProperty('Bucket') - expect(params.Bucket).toBe(null) - }) - }) - - describe('processRemoteMetadata', () => { - it('properly parses a git-annex remote metadata file', () => { - const resp = 'timestamp remoteuuid:commitinfo xversionId#fileName' - const remotesInfo = remoteFiles.processRemoteMetadata(resp) - assert.lengthOf(remotesInfo, 1) - const remoteObj = remotesInfo[0] - expect(remoteObj).toHaveProperty('timestamp') - expect(remoteObj.timestamp).toEqual('timestamp') - expect(remoteObj).toHaveProperty('remoteUuid') - expect(remoteObj.remoteUuid).toEqual('remoteuuid') - expect(remoteObj).toHaveProperty('fileName') - expect(remoteObj.fileName).toEqual('fileName') - expect(remoteObj).toHaveProperty('versionId') - expect(remoteObj.versionId).toEqual('versionId') - }) - it('returns an empty array if there is an improperly formatted metadata file', () => { - let remotesInfo - const no_spaces = 'poorly_formatted_response' // contains no spaces - remotesInfo = remoteFiles.processRemoteMetadata(no_spaces) - assert.lengthOf(remotesInfo, 0) - const not_enough_items = 'one two' // does not contain enough "columns" - remotesInfo = remoteFiles.processRemoteMetadata(not_enough_items) - assert.lengthOf(remotesInfo, 0) - - // does not have the properly one two:three xfour#five format - const not_properly_formatted = 'one two:three four' - remotesInfo = remoteFiles.processRemoteMetadata(not_properly_formatted) - assert.lengthOf(remotesInfo, 0) - const not_the_right_separators = 'one two:three xfour:five' - remotesInfo = remoteFiles.processRemoteMetadata(not_the_right_separators) - assert.lengthOf(remotesInfo, 0) - }) - it('returns objects corresponding to any properly formatted line', () => { - const one_line_right = - 'properly formatted:response xwith#a\nline_that_is_not_properly_formatted' - const remotesInfo = remoteFiles.processRemoteMetadata(one_line_right) - assert.lengthOf(remotesInfo, 1) - }) - }) - - describe('isGitAnnex', () => { - it('returns false when fs.existsSync returns false', () => { - fs.existsSync = jest.fn() - fs.existsSync.mockReturnValue(false) - const isGitAnnex = remoteFiles.isGitAnnex('some-path') - expect(fs.existsSync).toHaveBeenCalled() - expect(isGitAnnex).toBe(false) - }) - it('returns true when fs.existsSync returns true', () => { - fs.existsSync = jest.fn() - fs.existsSync.mockReturnValue(true) - const isGitAnnex = remoteFiles.isGitAnnex('some-path') - expect(fs.existsSync).toHaveBeenCalled() - expect(isGitAnnex).toBe(true) - }) - }) - - describe('tryRemote', () => { - it('should resolve with the results of accessRemoteFile', (done) => { - remoteFiles.getSingleRemoteInfo = jest.fn() - remoteFiles.getSingleRemoteInfo.mockReturnValue({ Bucket: 'wow' }) - remoteFiles.accessRemoteFile = jest.fn() - remoteFiles.accessRemoteFile.mockReturnValue(Promise.resolve('data')) - remoteFiles - .tryRemote( - {}, - { dir: 'directory', file: { relativePath: 'wow', name: 'name' } }, - ) - .then((data) => { - expect(data) - done() - }) - .catch(done) - }) - }) - // reset the fs object back to its normal state - // so we dont break jest - afterAll(() => { - fs.existsSync.mockRestore() - }) -}) diff --git a/bids-validator/utils/files/collectDirectorySize.js b/bids-validator/utils/files/collectDirectorySize.js deleted file mode 100644 index ecea9af5..00000000 --- a/bids-validator/utils/files/collectDirectorySize.js +++ /dev/null @@ -1,23 +0,0 @@ -import isNode from '../isNode' -import getFileStats from './getFileStats' - -const collectDirectorySize = (fileList) => { - let size = 0 - const keys = Object.keys(fileList) - keys.forEach((key) => { - const file = fileList[key] - // collect file stats - if (file.size) { - // from File api in browser - size += file.size - // or from git-annex metadata when in gitTreeMode - if (isNode) file.stats = { size: file.size } - } else { - file.stats = getFileStats(file) - size += file.stats.size - } - }) - return size -} - -export default collectDirectorySize diff --git a/bids-validator/utils/files/generateMergedSidecarDict.js b/bids-validator/utils/files/generateMergedSidecarDict.js deleted file mode 100644 index a642eb21..00000000 --- a/bids-validator/utils/files/generateMergedSidecarDict.js +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Generate Merged Sidecar Dictionary - * - * Takes an array of potential sidecards and a - * master object dictionary of all JSON file - * content and returns a merged dictionary - * containing all values from the potential - * sidecars. - */ -function generateMergedSidecarDict(potentialSidecars, jsonContents) { - // Use a map to avoid potential conflicts with keys in Object.prototype - const mergedDictionary = new Map() - let valid = true - potentialSidecars.map((sidecarName) => { - const jsonObject = jsonContents[sidecarName] - if (jsonObject) { - for (const key of Object.keys(jsonObject)) { - if (jsonObject.hasOwnProperty(key)) { - mergedDictionary.set(key, jsonObject[key]) - } - } - } else if (jsonObject === null) { - valid = false - } - }) - const mergedDictionaryObj = Object.fromEntries(mergedDictionary) - if (!valid) { - mergedDictionaryObj.invalid = true - } - return mergedDictionaryObj -} - -export default generateMergedSidecarDict diff --git a/bids-validator/utils/files/getBFileContent.js b/bids-validator/utils/files/getBFileContent.js deleted file mode 100644 index 6e1d6001..00000000 --- a/bids-validator/utils/files/getBFileContent.js +++ /dev/null @@ -1,17 +0,0 @@ -/** - * Get B-File Contents - * - * Takes an array of potential bval or bvec files - * and a master b-file contents dictionary and returns - * the contents of the desired file. - */ -function getBFileContent(potentialBFiles, bContentsDict) { - for (var i = 0; i < potentialBFiles.length; i++) { - var potentialBFile = potentialBFiles[i] - if (bContentsDict.hasOwnProperty(potentialBFile)) { - return bContentsDict[potentialBFile] - } - } -} - -export default getBFileContent diff --git a/bids-validator/utils/files/getFileStats.js b/bids-validator/utils/files/getFileStats.js deleted file mode 100644 index 6e78ac14..00000000 --- a/bids-validator/utils/files/getFileStats.js +++ /dev/null @@ -1,15 +0,0 @@ -import fs from 'fs' - -function getFileStats(file) { - let stats - if (!file.stats) { - try { - stats = fs.statSync(file.path) - } catch (err) { - stats = { size: 0 } - } - } - return stats -} - -export default getFileStats diff --git a/bids-validator/utils/files/illegalCharacterTest.js b/bids-validator/utils/files/illegalCharacterTest.js deleted file mode 100644 index fe8b4dc5..00000000 --- a/bids-validator/utils/files/illegalCharacterTest.js +++ /dev/null @@ -1,52 +0,0 @@ -import Issue from '../../utils/issues' - -const re = { - task_re: - /sub-(.*?)_task-[a-zA-Z0-9]*[_-][a-zA-Z0-9]*(?:_acq-[a-zA-Z0-9-]*)?(?:_run-\d+)?_/g, - acq_re: - /sub-(.*?)(_task-\w+.\w+)?(_acq-[a-zA-Z0-9]*[_-][a-zA-Z0-9]*)(?:_run-\d+)?_/g, - sub_re: /sub-[a-zA-Z0-9]*[_-][a-zA-Z0-9]*_/g, // illegal character in sub - ses_re: /ses-[a-zA-Z0-9]*[_-][a-zA-Z0-9]*?_(.*?)/g, //illegal character in ses -} - -const illegalchar_regex_list = [ - [re.task_re, 58, 'task name contains illegal character:'], - [re.acq_re, 59, 'acq name contains illegal character:'], - [re.sub_re, 62, 'sub name contains illegal character:'], - [re.ses_re, 63, 'ses name contains illegal character:'], -] - -const illegalCharacterTest = (fileList) => { - const issues = [] - const fileKeys = Object.keys(fileList) - fileKeys.forEach((key) => { - const file = fileList[key] - const completename = file.relativePath - if ( - !( - completename.startsWith('/derivatives') || - completename.startsWith('/code') || - completename.startsWith('/sourcedata') - ) - ) { - illegalchar_regex_list.map((regex) => { - const err_regex = regex[0] - const err_code = regex[1] - const err_evidence = regex[2] - - if (err_regex.exec(completename)) { - issues.push( - new Issue({ - file: file, - code: err_code, - evidence: err_evidence + completename, - }), - ) - } - }) - } - }) - return issues -} - -export default illegalCharacterTest diff --git a/bids-validator/utils/files/index.js b/bids-validator/utils/files/index.js deleted file mode 100644 index 237b940a..00000000 --- a/bids-validator/utils/files/index.js +++ /dev/null @@ -1,37 +0,0 @@ -// dependencies ------------------------------------------------------------------- -import FileAPI from './FileAPI' - -import newFile from './newFile' -import readFile from './readFile' -import readOMEFile from './readOMEFile' -import readBuffer from './readBuffer' -import readNiftiHeader from './readNiftiHeader' -import readDir from './readDir' -import potentialLocations from './potentialLocations' -import generateMergedSidecarDict from './generateMergedSidecarDict' -import getBFileContent from './getBFileContent' -import collectDirectorySize from './collectDirectorySize' -import illegalCharacterTest from './illegalCharacterTest' -import sessions from './sessions' -import remoteFiles from './remoteFiles' -import getFileStats from './getFileStats' - -// public API --------------------------------------------------------------------- - -export default { - FileAPI, - newFile, - readFile, - readDir, - readBuffer, - readOMEFile, - readNiftiHeader, - generateMergedSidecarDict, - potentialLocations, - getBFileContent, - collectDirectorySize, - illegalCharacterTest, - sessions, - remoteFiles, - getFileStats, -} diff --git a/bids-validator/utils/files/newFile.js b/bids-validator/utils/files/newFile.js deleted file mode 100644 index 9f9c9e5f..00000000 --- a/bids-validator/utils/files/newFile.js +++ /dev/null @@ -1,15 +0,0 @@ -import FileAPI from './FileAPI' - -/** - * New File - * - * Creates an empty File object - * - * @param {string} filename - the filename without path info - */ -function newFile(filename) { - var File = FileAPI() - return new File([''], filename) -} - -export default newFile diff --git a/bids-validator/utils/files/potentialLocations.js b/bids-validator/utils/files/potentialLocations.js deleted file mode 100644 index d1857934..00000000 --- a/bids-validator/utils/files/potentialLocations.js +++ /dev/null @@ -1,89 +0,0 @@ -const potentialLocations = (path) => { - //add a '/' at the beginning of the path if it doesn't exist yet - path = path.startsWith('/') ? path : '/' + path - const splitPath = path.split('/') - const filename = splitPath[splitPath.length - 1] // filename path component - const pathComponents = splitPath.splice(0, splitPath.length - 1) // all path components before - - // split the filename into separate components - const filenameComponents = filename.split('_') - - // create components object consisting of path + filename component lists - const components = { - path: pathComponents, - filename: filenameComponents, - } - - // generate relevant paths and put into closest -> root order - const potentials = potentialPaths(components) - if (potentials.indexOf(path) < 0) { - return [path].concat(potentials).reverse() - } else { - return potentials - } -} - -const potentialPaths = (components) => { - let filenameComponents = components.filename // get the underscore separated file components - let pathComponents = components.path // get the path components before file - const fileIndex = filenameComponents.length - 1 // index of the filename in file components - const file = filenameComponents[fileIndex] // filename (events.tsv, bold.json, etc) - const informationalFileComponents = filenameComponents.slice(0, fileIndex) // all non-filename file path components (ses-*, sub-*, task-*, etc) - - // filter filename components that are allowed only in a lower directory - // eg if we are root level we will not want sub-* included in the possible - // paths for this level. Also we do not want to include run in that list. - const nonPathSpecificFileComponents = informationalFileComponents.filter( - (component) => pathComponents.indexOf(component) < 0, - ) - - // loop through all the directory levels - root, sub, (ses), (datatype) - let paths = [] - pathComponents.map((component, i) => { - const activeDirectoryComponents = pathComponents.slice(0, i + 1) // the directory components in the current working level - const directoryString = activeDirectoryComponents.join('/') // path of active directory - - const prefixComponents = informationalFileComponents.filter( - (component) => activeDirectoryComponents.indexOf(component) > -1, - ) - - const prefix = prefixComponents.join('_') - for ( - let j = 0; - j < Math.pow(2, nonPathSpecificFileComponents.length); - j++ - ) { - const filename = nonPathSpecificFileComponents - .filter((value, index) => j & (1 << index)) - .concat([file]) - .join('_') - - // join directory + filepath strings together to get entire path - paths.push(constructFileName(directoryString, filename, prefix)) - } - }) - - // There is an exception to the inheritance principle when it comes - // to bold data .json sidecars - the potential locations *must* include - // the task- keyword. - if (filenameComponents.indexOf('bold.json') > -1) { - paths = removePathsWithoutTasknames(paths) - } - - return paths -} - -const constructFileName = (directoryString, filename, prefix) => { - // join the prefix + filename if prefix exists - const filePathString = prefix ? [prefix, filename].join('_') : filename - const newPath = directoryString + '/' + filePathString - return newPath -} - -const removePathsWithoutTasknames = (paths) => { - return paths.filter((path) => { - return path.indexOf('task') > -1 - }) -} - -export default potentialLocations diff --git a/bids-validator/utils/files/readBuffer.js b/bids-validator/utils/files/readBuffer.js deleted file mode 100644 index f9caa303..00000000 --- a/bids-validator/utils/files/readBuffer.js +++ /dev/null @@ -1,23 +0,0 @@ -import isNode from '../isNode' -import fs from 'fs' - -const readBuffer = (file) => { - return new Promise((resolve, reject) => { - if (isNode) { - resolve(fs.readFileSync(file.path)) - } else { - try { - const reader = new FileReader() - reader.onload = (event) => { - resolve(event.target.result) - } - - reader.readAsArrayBuffer(file) - } catch (e) { - reject(e) - } - } - }) -} - -export default readBuffer diff --git a/bids-validator/utils/files/readDir.js b/bids-validator/utils/files/readDir.js deleted file mode 100644 index 90211420..00000000 --- a/bids-validator/utils/files/readDir.js +++ /dev/null @@ -1,388 +0,0 @@ -import ignore from 'ignore' -import readFile from './readFile' -import path from 'path' -import fs from 'fs' -import * as child_proccess from 'child_process' -import isNode from '../isNode' - -/** - * Read Directory - * - * In node it takes a path to a directory and returns - * an array containing all of the files to a callback. - * Used to input and organize files in node, in a - * similar structure to how chrome reads a directory. - * In the browser it simply passes the file dir - * object to the callback. - * @param {String} dir Path to read - * @param {Object} options - * @param {boolean} options.ignoreSymlinks enable to prevent recursively following directory symlinks - * @returns {Promise} - */ -async function readDir(dir, options = {}) { - const ig = await getBIDSIgnore(dir) - const fileArray = isNode - ? await preprocessNode(path.resolve(dir), ig, options) - : preprocessBrowser(dir, ig) - const files = fileArrayToObject(fileArray) - return files -} - -/** - * Transform array of file-like objects to one object with each file as a property - * @param {Array[Object]} fileArray - * @returns {Object} - */ -function fileArrayToObject(fileArray) { - const filesObj = {} - // converting array to object - for (let j = 0; j < fileArray.length; j++) { - filesObj[j] = fileArray[j] - } - return filesObj -} - -/** - * Preprocess file objects from a browser - * - * 1. Filters out ignored files and folder. - * 2. Adds 'relativePath' field of each file object. - */ -function preprocessBrowser(filesObj, ig) { - const filesList = [] - for (let i = 0; i < filesObj.length; i++) { - const fileObj = filesObj[i] - fileObj.relativePath = harmonizeRelativePath(fileObj.webkitRelativePath) - if (ig.ignores(path.relative('/', fileObj.relativePath))) { - fileObj.ignore = true - } - filesList.push(fileObj) - } - return filesList -} - -/** - * Harmonize Relative Path - * - * Takes a file and returns the browser style relative path - * base on the environment. - * - * Since this may be called in the browser, do not call Node.js modules - * - * @param {String} path Relative path to normalize - * @returns {String} - */ -function harmonizeRelativePath(path) { - // This hack uniforms relative paths for command line calls to 'BIDS-examples/ds001/' and 'BIDS-examples/ds001' - if (path.indexOf('\\') !== -1) { - // This is likely a Windows path - Node.js - const pathParts = path.split('\\') - return '/' + pathParts.slice(1).join('/') - } else if (path[0] !== '/') { - // Bad POSIX path - Node.js - const pathParts = path.split('/') - return '/' + pathParts.slice(1).join('/') - } else { - // Already correct POSIX path - Browsers (all platforms) - return path - } -} - -/** - * Preprocess directory path from a Node CLI - * - * 1. Recursively travers the directory tree - * 2. Filters out ignored files and folder. - * 3. Harmonizes the 'relativePath' field - */ -async function preprocessNode(dir, ig, options) { - const str = dir.substr(dir.lastIndexOf(path.sep) + 1) + '$' - const rootpath = dir.replace(new RegExp(str), '') - if (options.gitTreeMode) { - // if in gitTreeMode, attempt to get files from git-annex metadata - // before using fs - const files = await getFilesFromGitTree(dir, ig, options) - if (files !== null) return files - } - return await getFilesFromFs(dir, rootpath, ig, options) -} - -/** - * runs command `git ls-tree -l -r ` in given directory - * @param {string} cwd path to dataset directory - * @param {string} gitRef git ref (commit hash, ref, 'HEAD', etc) - * @returns {string[]} - */ -const getGitLsTree = (cwd, gitRef) => - new Promise((resolve) => { - let output = '' - const gitProcess = child_proccess.spawn( - 'git', - ['ls-tree', '-l', '-r', gitRef], - { - cwd, - encoding: 'utf-8', - }, - ) - gitProcess.stdout.on('data', (data) => { - output += data.toString() - }) - gitProcess.stderr.on('data', () => { - resolve(null) - }) - gitProcess.on('close', () => { - resolve(output.trim().split('\n')) - }) - }) - -const readLsTreeLines = (gitTreeLines) => - gitTreeLines - .map((line) => { - const [metadata, path] = line.split('\t') - const [mode, objType, objHash, size] = metadata.split(/\s+/) - return { path, mode, objType, objHash, size } - }) - .filter( - ({ path, mode }) => - // skip git / datalad files and submodules - !/^\.git/.test(path) && - !/^\.datalad/.test(path) && - '.gitattributes' !== path && - mode !== '160000', - ) - .reduce( - ( - // accumulator - { files, symlinkFilenames, symlinkObjects }, - // git-tree line - { path, mode, objHash, size }, - ) => { - // read ls-tree line - if (mode === '120000') { - symlinkFilenames.push(path) - symlinkObjects.push(objHash) - } else { - files.push({ - path, - size: parseInt(size), - }) - } - return { files, symlinkFilenames, symlinkObjects } - }, - { files: [], symlinkFilenames: [], symlinkObjects: [] }, - ) - -/** - * runs `git cat-file --batch --buffer` in given directory - * @param {string} cwd - * @param {string} input - * @returns {string[]} - */ -const getGitCatFile = (cwd, input) => - new Promise((resolve) => { - let output = '' - const gitProcess = child_proccess.spawn( - 'git', - ['cat-file', '--batch', '--buffer'], - { - cwd, - encoding: 'utf-8', - }, - ) - - // pass in symlink objects - gitProcess.stdin.write(input) - gitProcess.stdin.end() - - gitProcess.stdout.on('data', (data) => { - output += data.toString() - }) - gitProcess.stderr.on('data', () => { - resolve(null) - }) - gitProcess.on('close', () => { - resolve(output.trim().split('\n')) - }) - }) - -const readCatFileLines = (gitCatFileLines, symlinkFilenames) => - gitCatFileLines - // even lines contain unneeded metadata - .filter((_, i) => i % 2 === 1) - .map((line, i) => { - const path = symlinkFilenames[i] - const key = line.split('/').pop() - const size = parseInt(key.match(/-s(\d+)/)[1]) - return { - path, - size, - } - }) - -const processFiles = (dir, ig, ...fileLists) => - fileLists - .reduce((allFiles, files) => [...allFiles, ...files], []) - .map((file) => { - file.relativePath = path.normalize(`${path.sep}${file.path}`) - return file - }) - .filter((file) => { - const ignore = ig.ignores(file.relativePath.slice(1)) - return !ignore - }) - .map((file) => { - file.relativePath = harmonizeRelativePath(file.relativePath) - file.name = path.basename(file.path) - file.path = path.join(dir, file.relativePath) - return file - }) - -async function getFilesFromGitTree(dir, ig, options) { - const gitTreeLines = await getGitLsTree(dir, options.gitRef) - if ( - gitTreeLines === null || - (gitTreeLines.length === 1 && gitTreeLines[0] === '') - ) - return null - const { files, symlinkFilenames, symlinkObjects } = - readLsTreeLines(gitTreeLines) - - const gitCatFileLines = await getGitCatFile(dir, symlinkObjects.join('\n')) - // example gitCatFile output: - // .git/annex/objects/Mv/99/SHA256E-s54--42c98d14dbe3d066d35897a61154e39ced478cd1f0ec6159ba5f2361c4919878.json/SHA256E-s54--42c98d14dbe3d066d35897a61154e39ced478cd1f0ec6159ba5f2361c4919878.json - // .git/annex/objects/QV/mW/SHA256E-s99--bbef536348750373727d3b5856398d7377e5d7e23875eed026b83d12cee6f885.json/SHA256E-s99--bbef536348750373727d3b5856398d7377e5d7e23875eed026b83d12cee6f885.json - const symlinkFiles = readCatFileLines(gitCatFileLines, symlinkFilenames) - - return processFiles(dir, ig, files, symlinkFiles) -} - -/** - * Recursive helper function for 'preprocessNode' - */ -async function getFilesFromFs(dir, rootPath, ig, options, parent = []) { - const files = await fs.promises.readdir(dir, { withFileTypes: true }) - const filesAccumulator = parent - for (const file of files) { - const fullPath = path.join(dir, file.name) - const relativePath = harmonizeRelativePath( - path.relative(rootPath, fullPath), - ) - const ignore = ig.ignores(path.relative('/', relativePath)) - const fileObj = { - name: file.name, - path: fullPath, - relativePath, - } - if (ignore) { - fileObj.ignore = true - } - // Three cases to consider: directories, files, symlinks - if (file.isDirectory()) { - await getFilesFromFs(fullPath, rootPath, ig, options, filesAccumulator) - } else if (file.isSymbolicLink()) { - // Allow skipping symbolic links which lead to recursion - // Disabling this is a big performance advantage on high latency - // storage but it's a good default for versatility - if (!options.ignoreSymlinks) { - try { - const targetPath = await fs.promises.realpath(fullPath) - const targetStat = await fs.promises.stat(targetPath) - // Either add or recurse from the target depending - if (targetStat.isDirectory()) { - await getFilesFromFs( - targetPath, - rootPath, - ig, - options, - filesAccumulator, - ) - } else { - filesAccumulator.push(fileObj) - } - } catch (err) { - // Symlink points at an invalid target, skip it - return - } - } else { - // This branch assumes all symbolic links are not directories - filesAccumulator.push(fileObj) - } - } else { - filesAccumulator.push(fileObj) - } - } - return filesAccumulator -} - -export function defaultIgnore() { - return ignore() - .add('.*') - .add('!*.icloud') - .add('/derivatives') - .add('/sourcedata') - .add('/code') -} - -async function getBIDSIgnore(dir) { - const ig = defaultIgnore() - - const bidsIgnoreFileObj = getBIDSIgnoreFileObj(dir) - if (bidsIgnoreFileObj) { - const content = await readFile(bidsIgnoreFileObj) - ig.add(content) - } - return ig -} - -/** - * Get File object corresponding to the .bidsignore file - * @param dir - * @returns File object or null if not found - */ -function getBIDSIgnoreFileObj(dir) { - if (isNode) { - return getBIDSIgnoreFileObjNode(dir) - } else { - return getBIDSIgnoreFileObjBrowser(dir) - } -} - -function getBIDSIgnoreFileObjNode(dir) { - const path = dir + '/.bidsignore' - try { - fs.accessSync(path) - return { path: path, stats: { size: null } } - } catch (err) { - return null - } -} - -function getBIDSIgnoreFileObjBrowser(dir) { - for (var i = 0; i < dir.length; i++) { - const fileObj = dir[i] - const relativePath = harmonizeRelativePath(fileObj.webkitRelativePath) - if (relativePath === '/.bidsignore') { - return fileObj - } - } -} - -export { - readDir, - getFilesFromFs, - fileArrayToObject, - harmonizeRelativePath, - readLsTreeLines, - readCatFileLines, - processFiles, -} - -export default Object.assign(readDir, { - readDir, - getFilesFromFs, - fileArrayToObject, - harmonizeRelativePath, - readLsTreeLines, - readCatFileLines, - processFiles, -}) diff --git a/bids-validator/utils/files/readFile.js b/bids-validator/utils/files/readFile.js deleted file mode 100644 index aebadff8..00000000 --- a/bids-validator/utils/files/readFile.js +++ /dev/null @@ -1,84 +0,0 @@ -import testFile from './testFile' -import Issue from '../../utils/issues' -import fs from 'fs' -import isNode from '../isNode' -import checkIfUtf8 from 'is-utf8' - -const JSONFilePattern = /.json$/ -const isJSONFile = (file) => - JSONFilePattern.test(isNode ? file.name : file.relativePath) - -// Work around JSDom not providing TextDecoder yet -if (typeof TextDecoder === 'undefined') { - const { TextDecoder } = require('util') - global.TextDecoder = TextDecoder -} - -/** - * checkEncoding - * @param {object | File} file - nodeJS fs file or browser File - * @param {buffer | Uint8Array} data - file content buffer - * @param {function} cb - returns { isUtf8 } - */ -const checkEncoding = (file, data, cb) => { - if (isJSONFile(file)) cb({ isUtf8: checkIfUtf8(data) }) -} - -/** - * readFile - * @param {object | File} file - nodeJS fs file or browser File - * @param {boolean} annexed - is the file currently annexed? - * @param {string} dir - path to directory containing dataset. Only used if - * annexed is true. - * - * A helper method for reading file contents. - * Takes a file object and a callback and calls - * the callback with the binary contents of the - * file as the only argument. - * - * In the browser the file should be a file object. - * In node the file should be a path to a file. - * - */ -function readFile(file, annexed, dir) { - return new Promise((resolve, reject) => { - if (isNode) { - testFile(file, annexed, dir, function (issue, stats, remoteBuffer) { - if (issue) { - return reject(issue) - } - if (!remoteBuffer) { - fs.readFile(file.path, function (err, data) { - if (err) { - return reject(err) - } - checkEncoding(file, data, ({ isUtf8 }) => { - if (!isUtf8) reject(new Issue({ code: 123, file })) - }) - return resolve(data.toString('utf8')) - }) - } - if (remoteBuffer) { - return resolve(remoteBuffer.toString('utf8')) - } - }) - } else { - const reader = new FileReader() - reader.onloadend = (e) => { - if (e.target.readyState == FileReader.DONE) { - if (!e.target.result) { - return reject(new Issue({ code: 44, file: file })) - } - const buffer = new Uint8Array(e.target.result) - checkEncoding(file, buffer, ({ isUtf8 }) => { - if (!isUtf8) reject(new Issue({ code: 123, file })) - }) - return resolve(new TextDecoder().decode(buffer)) - } - } - reader.readAsArrayBuffer(file) - } - }) -} - -export default readFile diff --git a/bids-validator/utils/files/readNiftiHeader.js b/bids-validator/utils/files/readNiftiHeader.js deleted file mode 100644 index 984efd24..00000000 --- a/bids-validator/utils/files/readNiftiHeader.js +++ /dev/null @@ -1,145 +0,0 @@ -import nifti from 'nifti-js' -import pako from 'pako' -import fs from 'fs' -import testFile from './testFile' -import Issue from '../../utils/issues' -import isNode from '../isNode' - -/** - * Read Nifti Header - * - * Takes a files and returns a json parsed nifti - * header without reading any extra bytes. - */ -function readNiftiHeader(file, annexed, dir, callback) { - if (isNode) { - nodeNiftiTest(file, annexed, dir, callback) - } else { - browserNiftiTest(file, callback) - } -} - -function nodeNiftiTest(file, annexed, dir, callback) { - testFile(file, annexed, dir, function (issue, stats, remoteBuffer) { - file.stats = stats - if (issue) { - callback({ error: issue }) - return - } - if (stats) { - if (stats.size < 348) { - callback({ error: new Issue({ code: 36, file: file }) }) - return - } - } - if (remoteBuffer) { - callback(parseNIfTIHeader(remoteBuffer, file)) - } else { - return extractNiftiFile(file, callback) - } - }) -} - -function extractNiftiFile(file, callback) { - const bytesRead = 1024 - const buffer = Buffer.alloc(bytesRead) - - fs.open(file.path, 'r', function (err, fd) { - if (err) { - callback({ error: new Issue({ code: 44, file: file }) }) - return - } else { - fs.read(fd, buffer, 0, bytesRead, 0, function () { - if (file.name.endsWith('.nii')) { - callback(parseNIfTIHeader(buffer, file)) - } else { - try { - const data = pako.inflate(buffer) - callback(parseNIfTIHeader(data, file)) - } catch (err) { - callback(handleGunzipError(buffer, file)) - } - } - }) - } - }) -} - -async function browserNiftiTest(file, callback) { - const bytesRead = 1024 - let blob - if ('slice' in file) { - // This is a real browser - blob = file.slice(0, bytesRead) - } else { - // Slice is undefined by the Deno adapter, this is likely Deno or a very confused browser - blob = await file.readBytes(0, bytesRead) - } - if (file.size == 0) { - callback({ error: new Issue({ code: 44, file: file }) }) - return - } - - // file size is smaller than nifti header size - if (file.size < 348) { - callback({ error: new Issue({ code: 36, file: file }) }) - return - } - const fileReader = constructBrowserFileReader(file, callback) - fileReader.readAsArrayBuffer(blob) -} - -function constructBrowserFileReader(file, callback) { - const fileReader = new FileReader() - fileReader.onloadend = function () { - const buffer = new Uint8Array(fileReader.result) - let unzipped - - try { - unzipped = file.name.endsWith('.nii') ? buffer : pako.inflate(buffer) - } catch (err) { - callback(handleGunzipError(buffer, file)) - return - } - - callback(parseNIfTIHeader(unzipped, file)) - } - return fileReader -} -/** - * Parse NIfTI Header (private) - * - * Attempts to parse a header buffer with - * nifti-js and handles errors. - */ -function parseNIfTIHeader(buffer, file) { - var header - try { - header = nifti.parseNIfTIHeader(buffer) - } catch (err) { - // file is unreadable - return { error: new Issue({ code: 26, file: file }) } - } - // file was not originally gzipped - return header -} - -/** - * Handle Gunzip Error (private) - * - * Used when unzipping fails. Tests if file was - * actually gzipped to begin with by trying to parse - * the original header. - */ -function handleGunzipError(buffer, file) { - try { - nifti.parseNIfTIHeader(buffer) - } catch (err) { - // file is unreadable - return { error: new Issue({ code: 26, file: file }) } - } - // file was not originally gzipped - return { error: new Issue({ code: 28, file: file }) } -} - -export default readNiftiHeader diff --git a/bids-validator/utils/files/readOMEFile.js b/bids-validator/utils/files/readOMEFile.js deleted file mode 100644 index 4871bff9..00000000 --- a/bids-validator/utils/files/readOMEFile.js +++ /dev/null @@ -1,17 +0,0 @@ -import ExifReader from 'exifreader' -const xml2js = require('xml2js') - -const readOMEFile = (buffer) => { - let tags = ExifReader.load(buffer) - let xml = tags['ImageDescription']['description'] - return new Promise((resolve, reject) => { - xml2js - .parseStringPromise(xml) - .then((result) => { - resolve(result) - }) - .catch((error) => reject(error)) - }) -} - -export default readOMEFile diff --git a/bids-validator/utils/files/remoteFiles.js b/bids-validator/utils/files/remoteFiles.js deleted file mode 100644 index de02bda6..00000000 --- a/bids-validator/utils/files/remoteFiles.js +++ /dev/null @@ -1,221 +0,0 @@ -import { S3Client } from '@aws-sdk/client-s3' -import fs from 'fs' -import cp from 'child_process' -import Issue from '../issues' -import pako from 'pako' -import isNode from '../isNode' - -/** - * Remote Files - * - * Helper functions for checking for and using remote file paths - * - */ - -const remoteFiles = { - // Initiates access of a remote file from git-annex remote - // Get remotes info the call to try successive remotes - // Called by testFile - getAnnexedFile: function (file, dir, limit, callback) { - // Build config object - const config = { - file: file, - dir: dir, - } - if (limit) config.limit = limit - config.remotesInfo = this.getRemotesInfo(dir, file) - - // try all the special git-annex remotes, and exit if there is an issue (reading / fetching files) - // if all remotes fail, throw issue code 97 - config.remotesInfo.map((remote, idx) => { - return this.tryRemote(remote, config) - .then((data) => callback(null, null, data)) - .catch((err) => { - if (err.code) { - return callback(err, null, null) - } - if (idx == config.remotesInfo.length) { - return callback( - new Issue({ code: 98, file: config.file }), - null, - null, - ) - } - }) - }) - }, - - // Try to access file from a remote - tryRemote: function (remote, config) { - // Get current remote - config.s3Params = this.getSingleRemoteInfo(config.dir, remote.remoteUuid) - const dir = config.dir.endsWith('/') ? config.dir.slice(0, -1) : config.dir - const datasetName = dir.split('/')[dir.split('/').length - 1] - const key = datasetName + config.file.relativePath - // Add additional parameters - config.s3Params['Key'] = key - config.s3Params['VersionId'] = remote.versionId - return this.accessRemoteFile(config) - }, - - // Download a remote file from its path - accessRemoteFile: function (config) { - if (config.limit) config.s3Params['Range'] = 'bytes=0-' + config.limit - return new Promise((resolve, reject) => { - this.constructAwsRequest(config) - .then((buffer) => { - if (config.file.name.endsWith('.gz')) { - this.extractGzipBuffer(buffer, config) - .then((data) => resolve(data)) - .catch((err) => reject(err)) - } else { - resolve(buffer) - } - }) - .catch(reject) - }) - }, - - constructAwsRequest: function (config) { - const hasCreds = isNode - ? Object.keys(process.env).indexOf('AWS_ACCESS_KEY_ID') > -1 - : false - if (hasCreds) { - const s3 = new S3Client() - return s3.getObject(config.s3Params).then((data) => data.Body) - } else { - let url = this.constructAwsUrl(config) - return fetch(url).then((resp) => { - if (resp.ok) { - return resp.buffer() - } else { - return Promise.reject( - new Error( - `HTTP response failed - ${resp.status} - ${resp.statusText}`, - ), - ) - } - }) - } - }, - - constructAwsUrl: function (config) { - // bucket + key url - let url = `http://s3.amazonaws.com/${config.s3Params.Bucket}/${config.s3Params.Key}` - - // add version to url, if exists - url = config.s3Params.VersionId - ? url + '?VersionId=' + config.s3Params.VersionId - : url - - // add range to url, if exists - url = config.s3Params.Range ? url + '?Range=' + config.s3Params.Range : url - return url - }, - - extractGzipBuffer: function (buffer, config) { - return new Promise((resolve, reject) => { - try { - resolve(pako.inflate(buffer)) - } catch (e) { - return reject(new Issue({ code: 28, file: config.file })) - } - }) - }, - - // Function for calling local git-annex - callGitAnnex: function (cmd, cwd) { - const stream = cp.execSync(cmd, { - shell: true, - cwd, - }) - return stream.toString() - }, - - // Ask git-annex for more information about a file - getRemotesInfo: function (dir, file) { - // Remove leading slash from relativePath - const relativePath = - file.relativePath && file.relativePath.startsWith('/') - ? file.relativePath.substring(1) - : file.relativePath - const lookupkey = this.getLookupKey(relativePath, dir) - const hashDirLower = this.getHashDirLower(lookupkey, dir) - const metadata = this.getRemoteMetadata(hashDirLower, lookupkey, dir) - const remotesInfo = this.processRemoteMetadata(metadata) - return remotesInfo - }, - - // get the key for a particular file's relative path - getLookupKey: function (relativePath, dir) { - const lookupKeyCmd = `git-annex lookupkey ${relativePath}` - return this.callGitAnnex(lookupKeyCmd, dir).trim() - }, - - // get hashdirlower property from the git-annex examinekey command - getHashDirLower: function (lookupkey, dir) { - try { - const examineKeyCmd = `git-annex examinekey --json ${lookupkey}` - const examineKey = JSON.parse(this.callGitAnnex(examineKeyCmd, dir)) - return examineKey.hashdirlower - } catch (e) { - return null - } - }, - - // get the remote metadata log content from git show command - getRemoteMetadata: function (hashDirLower, lookupkey, dir) { - const gitShowCmd = `git show git-annex:${hashDirLower}${lookupkey}.log.rmet` - return this.callGitAnnex(gitShowCmd, dir) - }, - - // Get info from a given git-annex remote - getSingleRemoteInfo: function (dir, uuid) { - const infoCmd = `cd ${dir} - git-annex info ${uuid}` - const resp = this.callGitAnnex(infoCmd) - return this.getRemoteBucket(resp) - }, - - // Obtain bucket field from git-annex info query - getRemoteBucket: function (resp) { - const params = { - Bucket: null, - } - for (let line of resp.split('\n')) { - if (line.includes('bucket: ')) { - params.Bucket = line.split(': ')[1] - } - } - return params - }, - - // Manipulate the response from git-annex lookupkey query - processRemoteMetadata: function (resp) { - const remotesInfo = [] - const lines = resp.split('\n') - lines.map((line) => { - const splitSpace = line.split(' ') - if (splitSpace.length == 3) { - const fileInfo = splitSpace[2].split('#') - const timestamp = splitSpace[0] - const annexInfo = splitSpace[1].split(':') - if (fileInfo.length == 2 && annexInfo.length == 2) { - const remoteUuid = annexInfo[0] - const fileName = fileInfo[1] - const versionId = fileInfo[0].substring(1) - const remoteInfo = { timestamp, remoteUuid, fileName, versionId } - remotesInfo.push(remoteInfo) - } - } - }) - return remotesInfo - }, - // Check if a local directory is a git-annex repo - isGitAnnex: function (path) { - if (isNode) return fs.existsSync(path + '/.git/annex') - return false - }, -} - -export default remoteFiles diff --git a/bids-validator/utils/files/sessions.js b/bids-validator/utils/files/sessions.js deleted file mode 100644 index c1083012..00000000 --- a/bids-validator/utils/files/sessions.js +++ /dev/null @@ -1,9 +0,0 @@ -export default { - sessionMatcher: new RegExp('(ses-.*?)/'), - - Subject: function () { - this.files = [] - this.sessions = [] - this.missingSessions = [] - }, -} diff --git a/bids-validator/utils/files/testFile.js b/bids-validator/utils/files/testFile.js deleted file mode 100644 index 117bb838..00000000 --- a/bids-validator/utils/files/testFile.js +++ /dev/null @@ -1,71 +0,0 @@ -import fs from 'fs' -import Issue from '../../utils/issues' -import remoteFiles from './remoteFiles' -import options from '../../utils/options' - -/** - * Test File - * - * Takes a file and callback and tests if it's viable for - * reading and is larger than 0 kb. Calls back with an error and stats if it isn't - * or null and stats if it is. - */ -function testFile(file, annexed, dir, callback) { - fs.access(file.path, function (accessErr) { - if (!accessErr) { - // accessible - handleFsAccess(file, callback) - } else { - // inaccessible - fs.lstat(file.path, function (lstatErr, lstats) { - if (!lstatErr && lstats && lstats.isSymbolicLink()) { - // symlink - if (options.getOptions().remoteFiles) - // only follow symlinks when --remoteFiles option is on - handleRemoteAccess(file, annexed, dir, callback) - else - callback( - new Issue({ - code: 114, - file, - }), - file.stats, - ) - } else { - // inaccessible local file - callback(new Issue({ code: 44, file: file }), file.stats) - } - }) - } - }) -} - -function handleFsAccess(file, callback) { - process.nextTick(function () { - if (file.stats.size === 0) { - callback( - new Issue({ - code: 99, - file: file, - reason: `Empty files (${file.path}) not allowed.`, - }), - file.stats, - ) - } - callback(null, file.stats) - }) -} - -function handleRemoteAccess(file, annexed, dir, callback) { - if (annexed) { - // Set byte retrieval limits based on file type - const limit = file.name.includes('.nii') ? 500 : false - // Call process to get remote files - // It will call callback with content or error - remoteFiles.getAnnexedFile(file, dir, limit, callback) - } else { - callback(new Issue({ code: 43, file: file }), file.stats) - } -} - -export default testFile diff --git a/bids-validator/utils/files/validateMisc.js b/bids-validator/utils/files/validateMisc.js deleted file mode 100644 index d4ceed10..00000000 --- a/bids-validator/utils/files/validateMisc.js +++ /dev/null @@ -1,29 +0,0 @@ -import Issue from '../issues/issue' -import isNode from '../isNode' - -function createIssueForEmpty(file) { - const size = !isNode ? file.size : file.stats.size - var failsSizeRequirement = size <= 0 - // Exception misc files that can be valid although size==0 - // E.g., BadChannels and bad.segments in CTF data format (MEG modality) - const exceptionMiscs = ['BadChannels', 'bad.segments'] - if (exceptionMiscs.indexOf(file.name) > -1) { - failsSizeRequirement = false - } - - return failsSizeRequirement && new Issue({ code: 99, file: file }) -} -function clearNonIssues(x) { - return x instanceof Issue -} - -/** - * validateMisc - * - * takes a list of files and returns an issue for each file - */ -export default function validateMisc(miscFiles) { - return Promise.resolve( - miscFiles.map(createIssueForEmpty).filter(clearNonIssues), - ) -} diff --git a/bids-validator/utils/index.js b/bids-validator/utils/index.js deleted file mode 100644 index 4eb8f223..00000000 --- a/bids-validator/utils/index.js +++ /dev/null @@ -1,43 +0,0 @@ -import './prototype' -import array from './array' -import config from './config' -import files from './files' -import bids_files from './bids_files' -import issues from './issues' -import json from './json' -import modalities from './modalities' -import options from './options' -import type from './type' -import collectSummary from './summary/collectSummary' -import limit from './promise_limiter' -import unit from './unit' - -export { - array, - config, - files, - bids_files, - issues, - json, - modalities, - options, - type, - collectSummary, - limit, - unit, -} - -export default { - array, - config, - files, - bids_files, - issues, - json, - modalities, - options, - type, - collectSummary, - limit, - unit, -} diff --git a/bids-validator/utils/isNode.js b/bids-validator/utils/isNode.js deleted file mode 100644 index dfb3e6a8..00000000 --- a/bids-validator/utils/isNode.js +++ /dev/null @@ -1,7 +0,0 @@ -const isBrowserWorker = () => - // eslint-disable-next-line no-undef - typeof WorkerGlobalScope !== 'undefined' && self instanceof WorkerGlobalScope - -const isNode = () => typeof window === 'undefined' && !isBrowserWorker() - -export default isNode() diff --git a/bids-validator/utils/issues/index.js b/bids-validator/utils/issues/index.js deleted file mode 100644 index 88c0add6..00000000 --- a/bids-validator/utils/issues/index.js +++ /dev/null @@ -1,206 +0,0 @@ -import list from './list' -import Issue from './issue' -import config from '../config' - -var issues = { - /** - * List - * - * List of all validator issues. - */ - list: list, - - /** - * Issue - * - * Issue constructor - */ - Issue: Issue, - - /** - * Filter Fieldmaps - * - * Remove fieldmap related warnings if no fieldmaps - * are present. - */ - filterFieldMaps: function (issueList) { - var filteredIssueList = [] - var fieldmapRelatedCodes = [6, 7, 8, 9] - for (var i = 0; i < issueList.length; i++) { - var issue = issueList[i] - if (fieldmapRelatedCodes.indexOf(issue.code) < 0) { - filteredIssueList.push(issue) - } - } - return filteredIssueList - }, - - /** - * Format Issues - */ - format: function (issueList, summary, options) { - var errors = [], - warnings = [], - ignored = [] - - if (summary.modalities.indexOf('fieldmap') < 0) { - issueList = this.filterFieldMaps(issueList) - } - - // sort alphabetically by relative path of files - issueList.sort(function (a, b) { - var aPath = a.file ? a.file.relativePath : '' - var bPath = b.file ? b.file.relativePath : '' - return aPath > bPath ? 1 : bPath > aPath ? -1 : 0 - }) - - // organize by issue code - const categorized = {} - const codes = [] - for (var i = 0; i < issueList.length; i++) { - var issue = issueList[i] - - if ( - issue.file && - config.ignoredFile(options.config, issue.file.relativePath) - ) { - continue - } - - if (!categorized[issue.code]) { - codes.push(issue.key) - codes.push(issue.code) - categorized[issue.code] = list[issue.code] - categorized[issue.code].files = [] - categorized[issue.code].additionalFileCount = 0 - categorized[issue.code].helpUrl = issue.helpUrl - } - if (options.verbose || categorized[issue.code].files.length < 10) { - categorized[issue.code].files.push(issue) - } else { - categorized[issue.code].additionalFileCount++ - } - } - - const severityMap = config.interpret(codes, options.config) - - // organize by severity - for (const codePropertyName in categorized) { - if (!categorized.hasOwnProperty(codePropertyName)) { - continue - } - // Properties are always strings but error codes are always integers - const code = parseInt(codePropertyName) - issue = categorized[code] - issue.code = code - - if (severityMap.hasOwnProperty(issue.code)) { - issue.severity = severityMap[issue.code] - } - - if (severityMap.hasOwnProperty(issue.key)) { - issue.severity = severityMap[issue.key] - } - if (issue.severity === 'error') { - // Schema validation issues will yield the JSON file invalid, we should display them first to attract - // user attention. - if (code == 55) { - errors.unshift(issue) - } else { - errors.push(issue) - } - } else if (issue.severity === 'warning' && !options.ignoreWarnings) { - warnings.push(issue) - } else if (issue.severity === 'ignore') { - ignored.push(issue) - } - } - return { errors, warnings, ignored } - }, - - /** - * Error To Issue - * - * Takes and exception and returns an Issue - */ - errorToIssue: function (err, code = 0) { - const callStack = err.stack - ? err.stack.split('\n').slice(1).join('\n').trim() - : '' - - return new Issue({ - file: callStack, - evidence: err.stack || '', - reason: `${err.message}; please help the BIDS team and community by opening an issue at (https://github.com/bids-standard/bids-validator/issues) with the evidence here.`, - code: code, - }) - }, - - /** - * isAnIssue - * - * takes an object and checks if it's an Issue - */ - isAnIssue: function (obj) { - const objKeys = Object.keys(obj) - return objKeys.includes('code') && objKeys.includes('reason') - }, - - /** - * Reformat - * - * Takes an already formatted set of issues, a - * summary and a config object and returns the - * same issues reformatted against the config. - */ - reformat: function (issueList, summary, config) { - var errors = issueList.errors ? issueList.errors : [], - warnings = issueList.warnings ? issueList.warnings : [], - ignored = issueList.ignored ? issueList.ignored : [] - - issueList = errors.concat(warnings).concat(ignored) - var unformatted = [] - for (var i = 0; i < issueList.length; i++) { - var issue = issueList[i] - for (var j = 0; j < issue.files.length; j++) { - var file = issue.files[j] - unformatted.push(file) - } - } - return issues.format(unformatted, summary, { config: config }) - }, - /** - * Exception Handler - * - * takes an error in fullTest.js catch - * converts it to an Issue and pushes it to the total list of issues - * formats issue list and returns it - */ - exceptionHandler: function (err, issueList, summary, options) { - // err here can be a validator Issue or an unknown exception - if (err.hasOwnProperty('key')) { - issueList.push(err) - } else { - issueList.push(this.errorToIssue(err)) - } - - // Format issues - const issues = this.format(issueList, summary, options) - return issues - }, - - /** - * Error/Issue redirector - * - * takes an error, resolve callback, and reject callback - */ - redirect: function (err, reject, resolveCB) { - if (this.isAnIssue(err)) { - resolveCB() - } else { - reject(err) - } - }, -} - -export default Object.assign(Issue, issues) diff --git a/bids-validator/utils/issues/issue.js b/bids-validator/utils/issues/issue.js deleted file mode 100644 index 7311c881..00000000 --- a/bids-validator/utils/issues/issue.js +++ /dev/null @@ -1,52 +0,0 @@ -import issues from './list' - -/** - * Help Url - * - * Construct a link to a helpful neurostars query, based on the - * issue key - */ -const constructHelpUrl = (issue) => { - const neurostarsPrefix = 'https://neurostars.org/' - const searchQuery = issue && issue.key ? 'search?q=' + issue.key : '' - const helpUrl = neurostarsPrefix + searchQuery - return helpUrl -} - -/** - * Issue - * - * A constructor for BIDS issues. - * - * @param {Object} options - * @param {string} options.key The descriptive string matching the issue code - * @param {number} options.code Issue code - see 'list.js' for definitions - * @param {File} [options.file] File object for the affected file - * @param {string} [options.evidence] The value throwing this issue - * @param {number} [options.line] The line of the affected file (if within a file) - * @param {number} [options.character] The character offset in the affected line - * @param {string} [options.severity] Is this an error or warning? - * @param {string} [options.reason] A descriptive - * @param {string} [options.helpUrl] A URL providing documentation to help solve this error - * @returns {Object} Issue object - */ -function Issue(options) { - const code = options.hasOwnProperty('code') ? options.code : null - const issue = issues[code] - - this.key = issue.key - this.code = code - this.file = options.hasOwnProperty('file') ? options.file : null - this.evidence = options.hasOwnProperty('evidence') ? options.evidence : null - this.line = options.hasOwnProperty('line') ? options.line : null - this.character = options.hasOwnProperty('character') - ? options.character - : null - this.severity = options.hasOwnProperty('severity') - ? options.severity - : issue.severity - this.reason = options.hasOwnProperty('reason') ? options.reason : issue.reason - this.helpUrl = constructHelpUrl(issue) -} - -export default Issue diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js deleted file mode 100644 index e1afa254..00000000 --- a/bids-validator/utils/issues/list.js +++ /dev/null @@ -1,1187 +0,0 @@ -/** - * Issues - * - * A list of all possible issues organized by - * issue code and including severity and reason - * agnostic to file specifics. - */ -export default { - 0: { - key: 'INTERNAL ERROR', - severity: 'error', - reason: 'Internal error. SOME VALIDATION STEPS MAY NOT HAVE OCCURRED', - }, - 1: { - key: 'NOT_INCLUDED', - severity: 'error', - reason: - 'Files with such naming scheme are not part of BIDS specification. This error is most commonly ' + - 'caused by typos in file names that make them not BIDS compatible. Please consult the specification and ' + - 'make sure your files are named correctly. If this is not a file naming issue (for example when including ' + - 'files not yet covered by the BIDS specification) you should include a ".bidsignore" file in your dataset (see' + - ' https://github.com/bids-standard/bids-validator#bidsignore for details). Please ' + - 'note that derived (processed) data should be placed in /derivatives folder and source data (such as DICOMS ' + - 'or behavioural logs in proprietary formats) should be placed in the /sourcedata folder.', - }, - 2: { - key: 'REPETITION_TIME_GREATER_THAN', - severity: 'warning', - reason: - "'RepetitionTime' is greater than 100 are you sure it's expressed in seconds?", - }, - 3: { - key: 'ECHO_TIME_GREATER_THAN', - severity: 'warning', - reason: - "'EchoTime' is greater than 1 are you sure it's expressed in seconds?", - }, - 4: { - key: 'ECHO_TIME_DIFFERENCE_GREATER_THAN', - severity: 'warning', - reason: - "'EchoTimeDifference' is greater than 1 are you sure it's expressed in seconds?", - }, - 5: { - key: 'TOTAL_READOUT_TIME_GREATER_THAN', - severity: 'warning', - reason: - "'TotalReadoutTime' is greater than 10 are you sure it's expressed in seconds?", - }, - 6: { - key: 'ECHO_TIME_NOT_DEFINED', - severity: 'warning', - reason: - "You should define 'EchoTime' for this file. If you don't provide this information field map correction will not be possible.", - }, - 7: { - key: 'PHASE_ENCODING_DIRECTION_NOT_DEFINED', - severity: 'warning', - reason: - "You should define 'PhaseEncodingDirection' for this file. If you don't provide this information field map correction will not be possible.", - }, - 8: { - key: 'EFFECTIVE_ECHO_SPACING_NOT_DEFINED', - severity: 'warning', - reason: - "You should define 'EffectiveEchoSpacing' for this file. If you don't provide this information field map correction will not be possible.", - }, - 9: { - key: 'TOTAL_READOUT_TIME_NOT_DEFINED', - severity: 'warning', - reason: - "You should define 'TotalReadoutTime' for this file. If you don't provide this information field map correction using TOPUP might not be possible.", - }, - 10: { - key: 'REPETITION_TIME_MUST_DEFINE', - severity: 'error', - reason: "You have to define 'RepetitionTime' for this file.", - }, - 11: { - key: 'REPETITION_TIME_UNITS', - severity: 'error', - reason: - "Repetition time was not defined in seconds, milliseconds or microseconds in the scan's header.", - }, - 12: { - key: 'REPETITION_TIME_MISMATCH', - severity: 'error', - reason: - "Repetition time did not match between the scan's header and the associated JSON metadata file.", - }, - 13: { - key: 'SLICE_TIMING_NOT_DEFINED', - severity: 'warning', - reason: - "You should define 'SliceTiming' for this file. If you don't provide this information slice time correction will not be possible. 'Slice Timing' is the time at which each slice was acquired within each volume (frame) of the acquisition. Slice timing is not slice order -- rather, it is a list of times containing the time (in seconds) of each slice acquisition in relation to the beginning of volume acquisition.", - }, - 15: { - key: 'ECHO_TIME1-2_NOT_DEFINED', - severity: 'error', - reason: "You have to define 'EchoTime1' and 'EchoTime2' for this file.", - }, - 16: { - key: 'ECHO_TIME_MUST_DEFINE', - severity: 'error', - reason: "You have to define 'EchoTime' for this file.", - }, - 17: { - key: 'UNITS_MUST_DEFINE', - severity: 'error', - reason: "You have to define 'Units' for this file.", - }, - 18: { - key: 'PHASE_ENCODING_DIRECTION_MUST_DEFINE', - severity: 'error', - reason: "You have to define 'PhaseEncodingDirection' for this file.", - }, - 19: { - key: 'TOTAL_READOUT_TIME_MUST_DEFINE', - severity: 'error', - reason: "You have to define 'TotalReadoutTime' for this file.", - }, - 20: { - key: 'EVENTS_COLUMN_ONSET', - severity: 'error', - reason: "First column of the events file must be named 'onset'", - }, - 21: { - key: 'EVENTS_COLUMN_DURATION', - severity: 'error', - reason: "Second column of the events file must be named 'duration'", - }, - 22: { - key: 'TSV_EQUAL_ROWS', - severity: 'error', - reason: - 'All rows must have the same number of columns as there are headers.', - }, - 23: { - key: 'TSV_EMPTY_CELL', - severity: 'error', - reason: - 'Empty cell in TSV file detected: The proper way of labeling missing values is "n/a".', - }, - 24: { - key: 'TSV_IMPROPER_NA', - severity: 'warning', - reason: 'A proper way of labeling missing values is "n/a".', - }, - 25: { - key: 'EVENTS_TSV_MISSING', - severity: 'warning', - reason: - 'Task scans should have a corresponding events.tsv file. If this is a resting state scan you can ignore this warning or rename the task to include the word "rest".', - }, - 26: { - key: 'NIFTI_HEADER_UNREADABLE', - severity: 'error', - reason: - 'We were unable to parse header data from this NIfTI file. Please ensure it is not corrupted or mislabeled.', - }, - 27: { - key: 'JSON_INVALID', - severity: 'error', - reason: 'Not a valid JSON file.', - }, - 28: { - key: 'GZ_NOT_GZIPPED', - severity: 'error', - reason: 'This file ends in the .gz extension but is not actually gzipped.', - }, - 29: { - key: 'VOLUME_COUNT_MISMATCH', - severity: 'error', - reason: - 'The number of volumes in this scan does not match the number of volumes in the corresponding .bvec and .bval files.', - }, - 30: { - key: 'BVAL_MULTIPLE_ROWS', - severity: 'error', - reason: '.bval files should contain exactly one row of volumes.', - }, - 31: { - key: 'BVEC_NUMBER_ROWS', - severity: 'error', - reason: '.bvec files should contain exactly three rows of volumes.', - }, - 32: { - key: 'DWI_MISSING_BVEC', - severity: 'error', - reason: 'DWI scans should have a corresponding .bvec file.', - }, - 33: { - key: 'DWI_MISSING_BVAL', - severity: 'error', - reason: 'DWI scans should have a corresponding .bval file.', - }, - 36: { - key: 'NIFTI_TOO_SMALL', - severity: 'error', - reason: 'This file is too small to contain the minimal NIfTI header.', - }, - 37: { - key: 'INTENDED_FOR', - severity: 'error', - reason: "'IntendedFor' field needs to point to an existing file.", - }, - 38: { - key: 'INCONSISTENT_SUBJECTS', - severity: 'warning', - reason: - 'Not all subjects contain the same files. Each subject should contain the same number of files with ' + - 'the same naming unless some files are known to be missing.', - }, - 39: { - key: 'INCONSISTENT_PARAMETERS', - severity: 'warning', - reason: 'Not all subjects/sessions/runs have the same scanning parameters.', - }, - 40: { - key: 'NIFTI_DIMENSION', - severity: 'warning', - reason: - "NIfTI file's header field for dimension information blank or too short.", - }, - 41: { - key: 'NIFTI_UNIT', - severity: 'warning', - reason: - "NIfTI file's header field for unit information for x, y, z, and t dimensions empty or too short", - }, - 42: { - key: 'NIFTI_PIXDIM', - severity: 'warning', - reason: - "NIfTI file's header field for pixel dimension information empty or too short.", - }, - 43: { - key: 'ORPHANED_SYMLINK', - severity: 'error', - reason: - 'This file appears to be an orphaned symlink. Make sure it correctly points to its referent.', - }, - 44: { - key: 'FILE_READ', - severity: 'error', - reason: - 'We were unable to read this file. Make sure it contains data (file size > 0 kB) and is not corrupted, incorrectly named, or incorrectly symlinked.', - }, - 45: { - key: 'SUBJECT_FOLDERS', - severity: 'error', - reason: - 'There are no subject folders (labeled "sub-*") in the root of this dataset.', - }, - 46: { - key: 'BVEC_ROW_LENGTH', - severity: 'error', - reason: - 'Each row in a .bvec file should contain the same number of values.', - }, - 47: { - key: 'B_FILE', - severity: 'error', - reason: - '.bval and .bvec files must be single space delimited and contain only numerical values.', - }, - 48: { - key: 'PARTICIPANT_ID_COLUMN', - severity: 'error', - reason: - "Participants and phenotype .tsv files must have a 'participant_id' column.", - }, - 49: { - key: 'PARTICIPANT_ID_MISMATCH', - severity: 'error', - reason: - 'Participant labels found in this dataset did not match the values in participant_id column found in the participants.tsv file.', - }, - 50: { - key: 'TASK_NAME_MUST_DEFINE', - severity: 'error', - reason: "You have to define 'TaskName' for this file.", - }, - 51: { - key: 'PHENOTYPE_SUBJECTS_MISSING', - severity: 'error', - reason: - 'A phenotype/ .tsv file lists subjects that were not found in the dataset.', - }, - 52: { - key: 'STIMULUS_FILE_MISSING', - severity: 'error', - reason: 'A stimulus file was declared but not found in the dataset.', - }, - 53: { - key: 'NO_T1W', - severity: 'ignore', - reason: 'Dataset does not contain any T1w scans.', - }, - 54: { - key: 'BOLD_NOT_4D', - severity: 'error', - reason: 'Bold scans must be 4 dimensional.', - }, - 55: { - key: 'JSON_SCHEMA_VALIDATION_ERROR', - severity: 'error', - reason: - 'Invalid JSON file. The file is not formatted according the schema.', - }, - 56: { - key: 'Participants age 89 or higher', - severity: 'warning', - reason: - 'As per section 164.514(C) of "The De-identification Standard" under HIPAA guidelines, participants with age 89 or higher should be tagged as 89+. More information can be found at https://www.hhs.gov/hipaa/for-professionals/privacy/special-topics/de-identification/#standard', - }, - 57: { - key: 'DATASET_DESCRIPTION_JSON_MISSING', - severity: 'error', - reason: - 'The compulsory file /dataset_description.json is missing. See Section 03 (Modality agnostic files) of the BIDS specification.', - }, - 58: { - key: 'TASK_NAME_CONTAIN_ILLEGAL_CHARACTER', - severity: 'error', - reason: - 'Task Name contain an Illegal Character hyphen or underscore. Please edit the filename as per BIDS spec.', - }, - 59: { - key: 'ACQ_NAME_CONTAIN_ILLEGAL_CHARACTER', - severity: 'error', - reason: - 'acq Name contain an Illegal Character hyphen or underscore. Please edit the filename as per BIDS spec.', - }, - 60: { - key: 'SFORM_AND_QFORM_IN_IMAGE_HEADER_ARE_ZERO', - severity: 'error', - reason: - 'sform_code and qform_code in the image header are 0. The image/file will be considered invalid or assumed to be in LAS orientation.', - }, - 61: { - key: 'QUICK_VALIDATION_FAILED', - severity: 'error', - reason: - 'Quick validation failed - the general folder structure does not resemble a BIDS dataset. Have you chosen the right folder (with "sub-*/" subfolders)? Check for structural/naming issues and presence of at least one subject.', - }, - 62: { - key: 'SUBJECT_VALUE_CONTAINS_ILLEGAL_CHARACTER', - severity: 'error', - reason: - 'Sub label contain an Illegal Character hyphen or underscore. Please edit the filename as per BIDS spec.', - }, - 63: { - key: 'SESSION_VALUE_CONTAINS_ILLEGAL_CHARACTER', - severity: 'error', - reason: - 'Ses label contain an Illegal Character hyphen or underscore. Please edit the filename as per BIDS spec.', - }, - 64: { - key: 'SUBJECT_LABEL_IN_FILENAME_DOESNOT_MATCH_DIRECTORY', - severity: 'error', - reason: - "Subject label in the filename doesn't match with the path of the file. File seems to be saved in incorrect subject directory.", - }, - 65: { - key: 'SESSION_LABEL_IN_FILENAME_DOESNOT_MATCH_DIRECTORY', - severity: 'error', - reason: - "Session label in the filename doesn't match with the path of the file. File seems to be saved in incorrect session directory.", - }, - 66: { - key: 'SLICETIMING_VALUES_GREATOR_THAN_REPETITION_TIME', - severity: 'error', - reason: - '"SliceTiming" value/s contains invalid value as it is greater than RepetitionTime. SliceTiming values should be in seconds not milliseconds (common mistake).', - }, - 67: { - key: 'NO_VALID_DATA_FOUND_FOR_SUBJECT', - severity: 'error', - reason: 'No BIDS compatible data found for at least one subject.', - }, - 68: { - key: 'FILENAME_COLUMN', - severity: 'error', - reason: "_scans.tsv files must have a 'filename' column.", - }, - 70: { - key: 'WRONG_NEW_LINE', - severity: 'error', - reason: - "All TSV files must use Line Feed '\\n' characters to denote new lines. This files uses Carriage Return '\\r'.", - }, - 71: { - key: 'MISSING_TSV_COLUMN_CHANNELS', - severity: 'error', - reason: - "The column names of the channels file must begin with ['name', 'type', 'units']", - }, - 72: { - key: 'MISSING_TSV_COLUMN_IEEG_CHANNELS', - severity: 'error', - reason: - "The column names of the channels file must begin with ['name', 'type', 'units', 'low_cutoff', 'high_cutoff']", - }, - 73: { - key: 'MISSING_TSV_COLUMN_IEEG_ELECTRODES', - severity: 'error', - reason: - "The column names of the electrodes file must begin with ['name', 'x', 'y', 'z', 'size']", - }, - 74: { - key: 'DUPLICATE_NIFTI_FILES', - severity: 'error', - reason: "NIfTI file exist with both '.nii' and '.nii.gz' extensions.", - }, - 75: { - key: 'NIFTI_PIXDIM4', - severity: 'error', - reason: "NIfTI file's header is missing time dimension information.", - }, - 76: { - key: 'EFFECTIVEECHOSPACING_TOO_LARGE', - severity: 'error', - reason: "Abnormally high value of 'EffectiveEchoSpacing'.", - }, - 77: { - key: 'UNUSED_STIMULUS', - severity: 'warning', - reason: - 'There are files in the /stimuli directory that are not utilized in any _events.tsv file.', - }, - 78: { - key: 'CHANNELS_COLUMN_SFREQ', - severity: 'error', - reason: - "Fourth column of the channels file must be named 'sampling_frequency'", - }, - 79: { - key: 'CHANNELS_COLUMN_LOWCUT', - severity: 'error', - reason: "Third column of the channels file must be named 'low_cutoff'", - }, - 80: { - key: 'CHANNELS_COLUMN_HIGHCUT', - severity: 'error', - reason: "Third column of the channels file must be named 'high_cutoff'", - }, - 81: { - key: 'CHANNELS_COLUMN_NOTCH', - severity: 'error', - reason: "Third column of the channels file must be named 'notch'", - }, - 82: { - key: 'CUSTOM_COLUMN_WITHOUT_DESCRIPTION', - severity: 'warning', - reason: - 'Tabular file contains custom columns not described in a data dictionary', - }, - 83: { - key: 'ECHOTIME1_2_DIFFERENCE_UNREASONABLE', - severity: 'error', - reason: - 'The value of (EchoTime2 - EchoTime1) should be within the range of 0.0001 - 0.01.', - }, - 84: { - key: 'ACQTIME_FMT', - severity: 'error', - reason: - 'Entries in the "acq_time" column of _scans.tsv should be expressed in the following format YYYY-MM-DDTHH:mm:ss[.000000] (year, month, day, hour (24h), minute, second, and optionally fractional second; this is equivalent to the RFC3339 "date-time" format.', - }, - 85: { - key: 'SUSPICIOUSLY_LONG_EVENT_DESIGN', - severity: 'warning', - reason: - 'The onset of the last event is after the total duration of the corresponding scan. This design is suspiciously long. ', - }, - 86: { - key: 'SUSPICIOUSLY_SHORT_EVENT_DESIGN', - severity: 'warning', - reason: - 'The onset of the last event is less than half the total duration of the corresponding scan. This design is suspiciously short. ', - }, - 87: { - key: 'SLICETIMING_ELEMENTS', - severity: 'warning', - reason: - "The number of elements in the SliceTiming array should match the 'k' dimension of the corresponding NIfTI volume.", - }, - 88: { - key: 'MALFORMED_BVEC', - severity: 'error', - reason: - 'The contents of this .bvec file are undefined or severely malformed. ', - }, - 89: { - key: 'MALFORMED_BVAL', - severity: 'error', - reason: - 'The contents of this .bval file are undefined or severely malformed. ', - }, - 90: { - key: 'SIDECAR_WITHOUT_DATAFILE', - severity: 'error', - reason: 'A json sidecar file was found without a corresponding data file', - }, - 91: { - key: '_FIELDMAP_WITHOUT_MAGNITUDE_FILE', - severity: 'error', - reason: - '_fieldmap.nii[.gz] file does not have accompanying _magnitude.nii[.gz] file. ', - }, - 92: { - key: 'MISSING_MAGNITUDE1_FILE', - severity: 'warning', - reason: - 'Each _phasediff.nii[.gz] file should be associated with a _magnitude1.nii[.gz] file.', - }, - 93: { - key: 'EFFECTIVEECHOSPACING_LARGER_THAN_TOTALREADOUTTIME', - severity: 'error', - reason: - 'EffectiveEchoSpacing should always be smaller than TotalReadoutTime. ', - }, - 94: { - key: 'MAGNITUDE_FILE_WITH_TOO_MANY_DIMENSIONS', - severity: 'error', - reason: - '_magnitude1.nii[.gz] and _magnitude2.nii[.gz] files must have exactly three dimensions. ', - }, - 95: { - key: 'T1W_FILE_WITH_TOO_MANY_DIMENSIONS', - severity: 'error', - reason: '_T1w.nii[.gz] files must have exactly three dimensions. ', - }, - 96: { - key: 'MISSING_TSV_COLUMN_EEG_ELECTRODES', - severity: 'error', - reason: - "The column names of the electrodes file must begin with ['name', 'x', 'y', 'z']", - }, - 97: { - key: 'MISSING_SESSION', - severity: 'warning', - reason: 'Not all subjects contain the same sessions.', - }, - 98: { - key: 'INACCESSIBLE_REMOTE_FILE', - severity: 'error', - reason: - 'This file appears to be a symlink to a remote annexed file but could not be accessed from any of the configured remotes.', - }, - 99: { - key: 'EMPTY_FILE', - severity: 'error', - reason: 'Empty files not allowed.', - }, - 100: { - key: 'BRAINVISION_LINKS_BROKEN', - severity: 'error', - reason: - 'Internal file pointers in BrainVision file triplet (*.eeg, *.vhdr, and *.vmrk) are broken or some files do not exist.', - }, - 101: { - key: 'README_FILE_MISSING', - severity: 'warning', - reason: - 'The recommended file /README is missing. See Section 03 (Modality agnostic files) of the BIDS specification.', - }, - 102: { - key: 'TOO_FEW_AUTHORS', - severity: 'warning', - reason: - 'The Authors field of dataset_description.json should contain an array of fields - with one author per field. This was triggered based on the presence of only one author field. Please ignore if all contributors are already properly listed.', - }, - 103: { - key: 'MULTIPLE_COMMAS_IN_AUTHOR_FIELD', - severity: 'error', - reason: - 'The Authors field of dataset_description.json should contain an array of fields - with one author per field. This was triggered based on the presence of multiple commas in a single author field. Please ensure your authors are properly formatted.', - }, - 104: { - key: 'HED_ERROR', - severity: 'error', - reason: 'The validation on this HED string returned an error.', - }, - 105: { - key: 'HED_WARNING', - severity: 'warning', - reason: 'The validation on this HED string returned a warning.', - }, - 106: { - key: 'HED_INTERNAL_ERROR', - severity: 'error', - reason: 'An internal error occurred during HED validation.', - }, - 107: { - key: 'HED_INTERNAL_WARNING', - severity: 'warning', - reason: 'An internal warning occurred during HED validation.', - }, - 108: { - key: 'HED_MISSING_VALUE_IN_SIDECAR', - severity: 'warning', - reason: - 'The json sidecar does not contain this column value as a possible key to a HED string.', - }, - 109: { - key: 'HED_VERSION_NOT_DEFINED', - severity: 'warning', - reason: - "You should define 'HEDVersion' for this file. If you don't provide this information, the HED validation will use the latest version available.", - }, - 113: { - key: 'NO_AUTHORS', - severity: 'warning', - reason: - 'The Authors field of dataset_description.json should contain an array of fields - with one author per field. This was triggered because there are no authors, which will make DOI registration from dataset metadata impossible.', - }, - 114: { - key: 'INCOMPLETE_DATASET', - severity: 'error', - reason: - 'This dataset contains remote files. If you would like to validate with remote files, use the --remoteFiles option.', - }, - 115: { - key: 'EMPTY_DATASET_NAME', - severity: 'warning', - reason: - 'The Name field of dataset_description.json is present but empty of visible characters.', - }, - 123: { - key: 'INVALID JSON ENCODING', - severity: 'error', - reason: 'JSON files must be valid utf-8.', - }, - 124: { - key: 'INVALID_TSV_UNITS', - severity: 'error', - reason: - 'Units in .tsv files must be valid SI units as described in the BIDS spec Appendix V (https://bids-specification.readthedocs.io/en/stable/99-appendices/05-units.html).', - }, - 125: { - key: 'CHANNELS_COLUMN_STATUS', - severity: 'error', - reason: - 'Status column in channels.tsv files must contain only one of two values: good or bad. Per the BIDS spec: (https://bids-specification.readthedocs.io/en/stable/04-modality-specific-files/04-intracranial-electroencephalography.html#channels-description-_channelstsv).', - }, - 126: { - key: 'MISSING_TSV_COLUMN_TIME', - severity: 'error', - reason: '*_blood.tsv require a time column.', - }, - 127: { - key: 'NOT_IN_USE', - severity: 'error', - reason: 'Code 127 is currently not used or reserved.', - }, - 128: { - key: 'NO_GENETIC_DATABASE', - severity: 'error', - reason: - 'A genetic_info.json file is present but no Database field present in Genetics object in dataset_description.json.', - }, - 129: { - key: 'SCANS_FILENAME_NOT_MATCH_DATASET', - severity: 'error', - reason: - 'The filename in scans.tsv file does not match what is present in the BIDS dataset.', - }, - 130: { - key: 'CHANNELS_COLUMN_TYPE_UPPER_CASE', - severity: 'error', - reason: - 'Type column in channels.tsv files should consist of upper-case characters.', - }, - 131: { - key: 'CHANNELS_COLUMN_TYPE', - severity: 'error', - reason: - 'Type column in channels.tsv files should only consist of values allowed in the specification for MEG/EEG/iEEG data.', - }, - 133: { - key: 'LABELING_TYPE_MUST_DEFINE', - severity: 'error', - reason: - "You should define 'ArterialSpinLabelingType' for this file. 'ArterialSpinLabelingType' can be CASL, PCASL, PASL.", - }, - 134: { - key: 'LABELING_DURATION_MUST_DEFINE', - severity: 'error', - reason: - "You should define 'LabelingDuration' for this file. 'LabelingDuration' is the total duration of the labeling pulse train, in seconds, corresponding to the temporal width of the labeling bolus for `(P)CASL`. In case all control-label volumes (or deltam or CBF) have the same `LabelingDuration`, a scalar must be specified. In case the control-label volumes (or deltam or cbf) have a different `LabelingDuration`, an array of numbers must be specified, for which any `m0scan` in the timeseries has a `LabelingDuration` of zero. In case an array of numbers is provided, its length should be equal to the number of volumes specified in `*_aslcontext.tsv`. Corresponds to DICOM Tag 0018,9258 `ASL Pulse Train Duration`.", - }, - 135: { - key: 'POST_LABELING_DELAY_MUST_DEFINE', - severity: 'error', - reason: - "You should define 'PostLabelingDelay' for this file. 'PostLabelingDelay' is the time, in seconds, after the end of the labeling (for (P)CASL) or middle of the labeling pulse (for PASL) until the middle of the excitation pulse applied to the imaging slab (for 3D acquisition) or first slice (for 2D acquisition). Can be a number (for a single-PLD time series) or an array of numbers (for multi-PLD and Look-Locker). In the latter case, the array of numbers contains the PLD of each volume (i.e. each 'control' and 'label') in the acquisition order. Any image within the time-series without a PLD (e.g. an 'm0scan') is indicated by a zero. Based on DICOM Tags 0018,9079 Inversion Times and 0018,0082 InversionTime.", - }, - 136: { - key: 'BACKGROUND_SUPPRESSION_MUST_DEFINE', - severity: 'error', - reason: - "You should define 'BackgroundSuppression' for this file. 'BackGroundSuppression' is a boolean indicating if background suppression is used.", - }, - 137: { - key: 'VASCULAR_CRUSHING_MUST_DEFINE', - severity: 'warning', - reason: - "It is recommended to define 'VascularCrushing' for this file. 'VascularCrushing' is a boolean value indicating if an ASL crusher method is used.", - }, - 138: { - key: 'PULSE_SEQUENCE_DETAILS_MISSING', - severity: 'warning', - reason: - "It is recommended to define 'PulseSequenceDetails' for this file. 'PulseSequenceDetails' is the information beyond pulse sequence type that identifies the specific pulse sequence used (for example, 'Standard Siemens Sequence distributed with the VB17 software', 'Siemens WIP ### version #.##', or 'Sequence written by X using a version compiled on MM/DD/YYYY').", - }, - 139: { - key: 'BLACKLISTED_MODALITY', - severity: 'error', - reason: - 'Found a modality that has been blacklisted through validator configuration.', - }, - 140: { - key: '140_EMPTY', - severity: 'warning', - reason: '', - }, - 141: { - key: '141_EMPTY', - severity: 'warning', - reason: '', - }, - 142: { - key: 'LABELING_SLAB_THICKNESS_MISSING', - severity: 'warning', - reason: - "It is recommended to define 'LabelingSlabThickness' for this file. 'LabelingSlabThickness' is the thickness of the labeling slab in millimeters. For non-selective FAIR a zero is entered. Corresponds to DICOM Tag 0018,9254 ASL Slab Thickness.", - }, - 143: { - key: 'ACQUISITION_VOXELSIZE_MISSING', - severity: 'warning', - reason: - "It is recommended to define 'AcquisitionVoxelSize' for this file. 'AcquisitionVoxelSize' is an array of numbers with a length of 3, in millimeters. This parameter denotes the original acquisition voxel size, excluding any inter-slice gaps and before any interpolation or resampling within reconstruction or image processing. Any point spread function effects (e.g. due to T2-blurring) that would decrease the effective resolution are not considered here.", - }, - 144: { - key: 'BACKGROUND_SUPPRESSION_PULSE_TIME_MISSING', - severity: 'warning', - reason: - "It is recommended to define 'BackgroundSuppressionPulseTime' for this file, when the 'BackgroundSuppression' is set to true. 'BackGroundSuppressionPulseTime' is an array of numbers containing timing, in seconds, of the background suppression pulses with respect to the start of the labeling. In case of multi-PLD with different background suppression pulse times, only the pulse time of the first PLD should be defined.", - }, - 145: { - key: 'VASCULAR_CRUCHING_VENC_MISSING', - severity: 'warning', - reason: - "It is recommended to define 'VascularCrushingVENC' for this file, when the 'VascularCrushing' is set to true. 'VascularCrushingVENC' is the crusher gradient strength, in centimeters per second. Specify either one number for the total time-series, or provide an array of numbers, for example when using QUASAR, using the value zero to identify volumes for which 'VascularCrushing' was turned off. Corresponds to DICOM Tag 0018,925A ASL Crusher Flow Limit.", - }, - 147: { - key: 'PASL_BOLUS_CUT_OFF_FLAG', - severity: 'error', - reason: - "You should define the 'BolusCutOffFlag' for this file. 'BolusCutOffFlag' is a boolean indicating if a bolus cut-off technique is used. Corresponds to DICOM Tag 0018,925C ASL Bolus Cut-off Flag.", - }, - 149: { - key: 'PASL_BOLUS_CUT_OFF_DELAY_TIME', - severity: 'error', - reason: - "It is required to define 'BolusCutOffDelayTime' for this file, when 'BolusCutOffFlag' is set to true. 'BolusCutOffDelayTime' is the duration between the end of the labeling and the start of the bolus cut-off saturation pulse(s), in seconds. This can be a number or array of numbers, of which the values must be non-negative and monotonically increasing, depending on the number of bolus cut-off saturation pulses. For Q2TIPS, only the values for the first and last bolus cut-off saturation pulses are provided. Based on DICOM Tag 0018,925F ASL Bolus Cut-off Delay Time.", - }, - 150: { - key: 'PASL_BOLUS_CUT_OFF_TECHNIQUE', - severity: 'error', - reason: - "It is required to define 'BolusCutOffTechnique' for this file, when 'BolusCutOffFlag' is set to true. 'BolusCutOffTechnique' is the name of the technique used (e.g. Q2TIPS, QUIPSS, QUIPSSII). Corresponds to DICOM Tag 0018,925E ASL Bolus Cut-off Technique.", - }, - 153: { - key: 'M0Type_NOT_SET', - severity: 'error', - reason: - "You should define the 'M0Type' for this file. 'M0Type' describes the presence of M0 information, as either: “Separate” when a separate `*_m0scan.nii[.gz]` is present, “Included” when an m0scan volume is contained within the current ‘*_asl.nii[.gz]’, “Estimate” when a single whole-brain M0 value is provided, or “Absent” when no specific M0 information is present.", - }, - 154: { - key: 'M0Type_SET_INCORRECTLY', - severity: 'error', - reason: - "M0Type was not defined correctly. If 'M0Type' is equal to included, the corresponding '*_aslcontext.tsv' should contain the 'm0scan' volume.", - }, - 155: { - key: 'MRACQUISITIONTYPE_MUST_DEFINE', - severity: 'error', - reason: - "You should define 'MRAcquisitionType' for this file. 'MRAcquistionType' is the type of sequence readout with possible values: `2D` or `3D`. Corresponds to DICOM Tag 0018,0023 `MR Acquisition Type`.", - }, - 156: { - key: 'ACQUISITION_VOXELSIZE_WRONG', - severity: 'warning', - reason: - "The 'AcquisitionVoxelSize' field length is not 3. 'AcquisitionVoxelSize' should be defined as an array of numbers with a length of 3, in millimeters. This parameter denotes the original acquisition voxel size, excluding any inter-slice gaps and before any interpolation or resampling within reconstruction or image processing. Any point spread function effects (e.g. due to T2-blurring) that would decrease the effective resolution are not considered here.", - }, - 157: { - key: 'LABELLING_DURATION_LENGTH_NOT_MATCHING_NIFTI', - severity: 'error', - reason: - "The number of values for 'LabelingDuration' for this file does not match the 4th dimension of the NIfTI header. 'LabelingDuration' is the total duration of the labeling pulse train, in seconds, corresponding to the temporal width of the labeling bolus for `(P)CASL`. In case all control-label volumes (or deltam or CBF) have the same `LabelingDuration`, a scalar must be specified. In case the control-label volumes (or deltam or cbf) have a different `LabelingDuration`, an array of numbers must be specified, for which any `m0scan` in the timeseries has a `LabelingDuration` of zero. In case an array of numbers is provided, its length should be equal to the number of volumes specified in `*_aslcontext.tsv`. Corresponds to DICOM Tag 0018,9258 `ASL Pulse Train Duration`.", - }, - 164: { - key: 'ASL_MANUFACTURER_MISSING', - severity: 'warning', - reason: - "It is recommended to define 'Manufacturer' for this file. 'Manufacturer' is the manufacturer of the equipment that produced the composite instances. Corresponds to DICOM Tag 0008, 0070 Manufacturer", - }, - 165: { - key: 'ASLCONTEXT_TSV_NOT_CONSISTENT', - severity: 'error', - reason: - "The number of volumes in the '*_aslcontext.tsv' for this file does not match the number of values in the NIfTI header.", - }, - 166: { - key: 'LOOK_LOCKER_FLIP_ANGLE_MISSING', - severity: 'error', - reason: - "You should define 'FlipAngle' for this file, in case of a LookLocker acquisition. 'FlipAngle' is the flip angle (FA) for the acquisition, specified in degrees. Corresponds to: DICOM Tag 0018, 1314 `Flip Angle`. The data type number may apply to files from any MRI modality concerned with a single value for this field, or to the files in a file collection where the value of this field is iterated using the flip entity. The data type array provides a value for each volume in a 4D dataset and should only be used when the volume timing is critical for interpretation of the data, such as in ASL or variable flip angle fMRI sequences.", - }, - 167: { - key: 'FLIP_ANGLE_MISSING', - severity: 'warning', - reason: - "It is recommended to define 'FlipAngle' for this file. 'FlipAngle' is the flip angle (FA) for the acquisition, specified in degrees. Corresponds to: DICOM Tag 0018, 1314 `Flip Angle`. The data type number may apply to files from any MRI modality concerned with a single value for this field, or to the files in a file collection where the value of this field is iterated using the flip entity. The data type array provides a value for each volume in a 4D dataset and should only be used when the volume timing is critical for interpretation of the data, such as in ASL or variable flip angle fMRI sequences.", - }, - 168: { - key: 'FLIP_ANGLE_NOT_MATCHING_NIFTI', - severity: 'error', - reason: - "The number of values for 'FlipAngle' for this file does not match the 4th dimension of the NIfTI header. 'FlipAngle' is the flip angle (FA) for the acquisition, specified in degrees. Corresponds to: DICOM Tag 0018, 1314 `Flip Angle`. The data type number may apply to files from any MRI modality concerned with a single value for this field, or to the files in a file collection where the value of this field is iterated using the flip entity. The data type array provides a value for each volume in a 4D dataset and should only be used when the volume timing is critical for interpretation of the data, such as in ASL or variable flip angle fMRI sequences.", - }, - 169: { - key: 'LABELING_DURATION_PASL_INCONSISTENT', - severity: 'error', - reason: - "The 'LabelingDuration' for PASL 'ArterialSpinLabelingType' can be only a numerical value put to zero or unset. 'LabelingDuration' is the total duration of the labeling pulse train, in seconds, corresponding to the temporal width of the labeling bolus for `(P)CASL`. In case all control-label volumes (or deltam or CBF) have the same `LabelingDuration`, a scalar must be specified. In case the control-label volumes (or deltam or cbf) have a different `LabelingDuration`, an array of numbers must be specified, for which any `m0scan` in the timeseries has a `LabelingDuration` of zero. In case an array of numbers is provided, its length should be equal to the number of volumes specified in `*_aslcontext.tsv`. Corresponds to DICOM Tag 0018,9258 `ASL Pulse Train Duration`.", - }, - 170: { - key: 'CONTINOUS_RECORDING_MISSING_JSON', - severity: 'error', - reason: - 'Continous recording data files are required to have an associated JSON metadata file.', - }, - 171: { - key: 'VOLUME_TIMING_MISSING_ACQUISITION_DURATION', - severity: 'error', - reason: - "The field 'VolumeTiming' requires 'AcquisitionDuration' or 'SliceTiming' to be defined.", - }, - 172: { - key: 'FLIP_ANGLE_NOT_MATCHING_ASLCONTEXT_TSV', - severity: 'error', - reason: - "The number of values for 'FlipAngle' for this file does not match the number of volumes in the 'sub-