From 2380a6e46675ca17bdf22be06bc7c6d138736e59 Mon Sep 17 00:00:00 2001 From: dianaml0 <82468439+dianaml0@users.noreply.github.com> Date: Wed, 24 Nov 2021 18:02:34 -0800 Subject: [PATCH] Add pre commit config and flake8 config (#2676) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: # Before submitting Separating https://github.com/fairinternal/fairseq-py/issues/2212 into separate PR's to be cleaner. - [ ] Was this discussed/approved via a Github issue? (no need for typos, doc improvements) - [ ] Did you read the [contributor guideline](https://github.com/pytorch/fairseq/blob/main/CONTRIBUTING.md)? - [ ] Did you make sure to update the docs? - [ ] Did you write any new necessary tests? ## What does this PR do? Fixes # (issue). ## PR review Anyone in the community is free to review the PR once the tests have passed. If we didn't discuss your PR in Github issues there's a high chance it will not be merged. ## Did you have fun? Make sure you had fun coding � Pull Request resolved: https://github.com/fairinternal/fairseq-py/pull/2676 Reviewed By: alexeib Differential Revision: D32653505 Pulled By: dianaml0 fbshipit-source-id: fd338289d23b340ef56b0188f9c73b37c367d6ca --- .isort.cfg | 2 ++ .pre-commit-config.yaml | 40 ++++++++++++++++++++++++++++++ CONTRIBUTING.md | 54 +++++++++++++++++++++++++++++++++++++++++ setup.cfg | 3 +++ 4 files changed, 99 insertions(+) create mode 100644 .isort.cfg create mode 100644 .pre-commit-config.yaml create mode 100644 setup.cfg diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 0000000000..aed482f47e --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,2 @@ +[settings] +known_third_party = _cffi_backend,agg_results,aml,bitarray,boto3,botocore,dump_hubert_feature,dynamicconv_cuda,editdistance,faiss,fasttext,feature_utils,ffmpeg,g2p_en,h5py,hydra,hypothesis,indicnlp,inflect,iopath,joblib,kaldi_io,kenlm,libfb,librosa,lightconv_cuda,matplotlib,misc,mmpt,mmpt_cli,model,nltk,npy_append_array,numpy,omegaconf,pandas,pathbuilder,preprocessing,progressbar,pythainlp,random_sequence_shuffler,regex,sacrebleu,sacremoses,scipy,sentencepiece,setuptools,six,sklearn,soundfile,sweep,sweep_wmt_en2de_transformer_big_common,tabulate,torch,torchaudio,tqdm,unidecode,utils,videoreader,wav2vec_cluster_faiss,wget,yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..f9efb2e96e --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,40 @@ +exclude: 'build|stubs' + +default_language_version: + python: python3 + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: trailing-whitespace + - id: check-ast + - id: check-merge-conflict + - id: no-commit-to-branch + args: ['--branch=master'] + - id: check-added-large-files + args: ['--maxkb=500'] + - id: end-of-file-fixer + +- repo: https://github.com/ambv/black + rev: 20.8b1 + hooks: + - id: black + language_version: python3.8 + +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.9.2 + hooks: + - id: flake8 + args: [ + # only error for syntax errors and undefined names + "--select=E9,F63,F7,F82", + ] + +- repo: https://github.com/pycqa/isort + rev: 5.10.1 + hooks: + - id: isort + exclude: README.md + additional_dependencies: [toml] + args: ["--profile", "black"] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3930c46196..60e9025887 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -26,3 +26,57 @@ clear and has sufficient instructions to be able to reproduce the issue. By contributing to Facebook AI Research Sequence-to-Sequence Toolkit (fairseq), you agree that your contributions will be licensed under the LICENSE file in the root directory of this source tree. + +## Pre-commit hooks +In order to ensure your code lints, there are pre-commit hooks configured in the repository which you can install. +After installation, they will automatically run each time you commit. +An abbreviated guide is given below; for more information, refer to [the offical pre-commit documentation](https://pre-commit.com/). + +### Installation +``` +pip install pre-commit +pre-commit install +``` + +### Usage +Just commit your changes: +``` +git commit -m "My informative commit message" +``` + +If there was a failure, you will get feedback +``` +[INFO] Initializing environment for https://github.com/PyCQA/flake8. +[INFO] Installing environment for https://github.com/pre-commit/pre-commit-hooks. +[INFO] Once installed this environment will be reused. +[INFO] This may take a few minutes... +[INFO] Installing environment for https://github.com/PyCQA/flake8. +[INFO] Once installed this environment will be reused. +[INFO] This may take a few minutes... +Trim Trailing Whitespace.................................................Failed +- hook id: trailing-whitespace +- exit code: 1 +- files were modified by this hook +Fixing examples/nllb/modeling/wmt15_benchmark/eval_langs2.sh +Fix End of Files.........................................................Failed +- hook id: end-of-file-fixer +- exit code: 1 +- files were modified by this hook +Fixing examples/few_shot/scripts/schedule_jobs_few_shot.py +flake8...................................................................Passed +``` + +Certain hooks modify your files to comply. +To include these modifications, you will need to add them (i.e. `git add ...`) and commit again. + +If all is well, you should see something like: +``` +Trim Trailing Whitespace.................................................Passed +Fix End of Files.........................................................Passed +flake8...................................................................Passed +[gshard-fix-ci 8698644e1] Fix lint, add pre-commit hooks + 10 files changed, 148 insertions(+), 110 deletions(-) + create mode 100644 .flake8 + create mode 100644 .pre-commit-config.yaml + rename examples/nllb/modeling/wmt15_benchmark/{eval_langs2.py => eval_langs2.sh} (99%) + ``` diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000000..3ea6243324 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,3 @@ +[flake8] +max-line-length = 127 +extend-ignore = E203, W503