forked from openmindproject/rasa
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Makefile
265 lines (216 loc) · 9.42 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
.PHONY: clean test lint init docs build-docker build-docker-full build-docker-mitie-en build-docker-spacy-en build-docker-spacy-de
JOBS ?= 1
INTEGRATION_TEST_FOLDER = tests/integration_tests/
INTEGRATION_TEST_PYTEST_MARKERS ?= "sequential or not sequential"
help:
@echo "make"
@echo " clean"
@echo " Remove Python/build artifacts."
@echo " install"
@echo " Install rasa."
@echo " install-full"
@echo " Install rasa with all extras (transformers, tensorflow_text, spacy, jieba)."
@echo " formatter"
@echo " Apply black formatting to code."
@echo " lint"
@echo " Lint code with flake8, and check if black formatter should be applied."
@echo " lint-docstrings"
@echo " Check docstring conventions in changed files."
@echo " types"
@echo " Check for type errors using mypy."
@echo " static-checks"
@echo " Run all python static checks."
@echo " prepare-tests-ubuntu"
@echo " Install system requirements for running tests on Ubuntu and Debian based systems."
@echo " prepare-tests-macos"
@echo " Install system requirements for running tests on macOS."
@echo " prepare-tests-windows"
@echo " Install system requirements for running tests on Windows."
@echo " prepare-tests-files"
@echo " Download all additional project files needed to run tests."
@echo " prepare-spacy"
@echo " Download all additional resources needed to use spacy as part of Rasa."
@echo " prepare-mitie"
@echo " Download all additional resources needed to use mitie as part of Rasa."
@echo " prepare-transformers"
@echo " Download all models needed for testing LanguageModelFeaturizer."
@echo " test"
@echo " Run pytest on tests/."
@echo " Use the JOBS environment variable to configure number of workers (default: 1)."
@echo " test-integration"
@echo " Run integration tests using pytest."
@echo " Use the JOBS environment variable to configure number of workers (default: 1)."
@echo " livedocs"
@echo " Build the docs locally."
@echo " release"
@echo " Prepare a release."
@echo " build-docker"
@echo " Build Rasa Open Source Docker image."
@echo " run-integration-containers"
@echo " Run the integration test containers."
@echo " stop-integration-containers"
@echo " Stop the integration test containers."
clean:
find . -name '*.pyc' -exec rm -f {} +
find . -name '*.pyo' -exec rm -f {} +
find . -name '*~' -exec rm -f {} +
rm -rf build/
rm -rf .mypy_cache/
rm -rf dist/
rm -rf docs/build
rm -rf docs/.docusaurus
install:
poetry run python -m pip install -U pip
poetry install
install-mitie:
poetry run python -m pip install -U git+https://github.com/tmbo/MITIE.git#egg=mitie
install-full: install install-mitie
poetry install -E full
install-docs:
cd docs/ && yarn install
formatter:
poetry run black rasa tests
lint:
# Ignore docstring errors when running on the entire project
poetry run flake8 rasa tests --extend-ignore D
poetry run black --check rasa tests
make lint-docstrings
# Compare against `main` if no branch was provided
BRANCH ?= main
lint-docstrings:
# Lint docstrings only against the the diff to avoid too many errors.
# Check only production code. Ignore other flake errors which are captured by `lint`
# Diff of committed changes (shows only changes introduced by your branch
ifneq ($(strip $(BRANCH)),)
git diff $(BRANCH)...HEAD -- rasa | poetry run flake8 --select D --diff
endif
# Diff of uncommitted changes for running locally
git diff HEAD -- rasa | poetry run flake8 --select D --diff
lint-security:
poetry run bandit -ll -ii -r --config bandit.yml rasa/*
types:
poetry run mypy rasa
static-checks: lint lint-security types
prepare-spacy:
poetry install -E spacy
poetry run python -m spacy download en_core_web_md
poetry run python -m spacy download de_core_news_sm
prepare-mitie:
wget --progress=dot:giga -N -P data/ https://github.com/mit-nlp/MITIE/releases/download/v0.4/MITIE-models-v0.2.tar.bz2
ifeq ($(OS),Windows_NT)
7z x data/MITIE-models-v0.2.tar.bz2 -bb3
7z x MITIE-models-v0.2.tar -bb3
cp MITIE-models/english/total_word_feature_extractor.dat data/
rm -r MITIE-models
rm MITIE-models-v0.2.tar
else
tar -xvjf data/MITIE-models-v0.2.tar.bz2 --strip-components 2 -C data/ MITIE-models/english/total_word_feature_extractor.dat
endif
rm data/MITIE*.bz2
prepare-transformers:
CACHE_DIR=$(HOME)/.cache/torch/transformers;\
mkdir -p "$$CACHE_DIR";\
i=0;\
while read -r URL; do read -r CACHE_FILE; if { [ $(CI) ] && [ $$i -gt 4 ]; } || ! [ $(CI) ]; then wget $$URL -O $$CACHE_DIR/$$CACHE_FILE; fi; i=$$((i + 1)); done < "data/test/hf_transformers_models.txt"
prepare-tests-files: prepare-spacy prepare-mitie prepare-transformers
prepare-wget-macos:
brew install wget || true
prepare-wget-windows:
choco install wget
prepare-tests-macos: prepare-wget-macos prepare-tests-files
brew install graphviz || true
prepare-tests-ubuntu: prepare-tests-files
sudo apt-get -y install graphviz graphviz-dev python-tk
prepare-tests-windows: prepare-wget-windows prepare-tests-files
choco install graphviz
test: clean
# OMP_NUM_THREADS can improve overall performance using one thread by process (on tensorflow), avoiding overload
OMP_NUM_THREADS=1 poetry run pytest tests -n $(JOBS) --cov rasa --ignore $(INTEGRATION_TEST_FOLDER)
test-integration:
# OMP_NUM_THREADS can improve overall performance using one thread by process (on tensorflow), avoiding overload
ifeq (,$(wildcard tests_deployment/.env))
OMP_NUM_THREADS=1 poetry run pytest $(INTEGRATION_TEST_FOLDER) -n $(JOBS) -m $(INTEGRATION_TEST_PYTEST_MARKERS)
else
set -o allexport; source tests_deployment/.env && OMP_NUM_THREADS=1 poetry run pytest $(INTEGRATION_TEST_FOLDER) -n $(JOBS) -m $(INTEGRATION_TEST_PYTEST_MARKERS) && set +o allexport
endif
test-cli: PYTEST_MARKER=category_cli
test-cli: test-marker
test-core-featurizers: PYTEST_MARKER=category_core_featurizers
test-core-featurizers: test-marker
test-policies: PYTEST_MARKER=category_policies
test-policies: test-marker
test-nlu-featurizers: PYTEST_MARKER=category_nlu_featurizers
test-nlu-featurizers: test-marker
test-nlu-predictors: PYTEST_MARKER=category_nlu_predictors
test-nlu-predictors: test-marker
test-full-model-training: PYTEST_MARKER=category_full_model_training
test-full-model-training: test-marker
test-other-unit-tests: PYTEST_MARKER=category_other_unit_tests
test-other-unit-tests: test-marker
test-performance: PYTEST_MARKER=category_performance
test-performance: test-marker
test-marker: clean
# OMP_NUM_THREADS can improve overall performance using one thread by process (on tensorflow), avoiding overload
OMP_NUM_THREADS=1 poetry run pytest tests -n $(JOBS) --cov rasa -m "$(PYTEST_MARKER)" --ignore $(INTEGRATION_TEST_FOLDER)
generate-pending-changelog:
poetry run python -c "from scripts import release; release.generate_changelog('major.minor.patch')"
cleanup-generated-changelog:
# this is a helper to cleanup your git status locally after running "make test-docs"
# it's not run on CI at the moment
git status --porcelain | sed -n '/^D */s///p' | xargs git reset HEAD
git reset HEAD CHANGELOG.mdx
git ls-files --deleted | xargs git checkout
git checkout CHANGELOG.mdx
test-docs: generate-pending-changelog docs
poetry run pytest tests/docs/*
cd docs && yarn mdx-lint
prepare-docs:
cd docs/ && poetry run yarn pre-build
docs: prepare-docs
cd docs/ && yarn build
livedocs:
cd docs/ && poetry run yarn start
release:
poetry run python scripts/release.py
build-docker:
export IMAGE_NAME=rasa && \
docker buildx use default && \
docker buildx bake -f docker/docker-bake.hcl base && \
docker buildx bake -f docker/docker-bake.hcl base-poetry && \
docker buildx bake -f docker/docker-bake.hcl base-builder && \
docker buildx bake -f docker/docker-bake.hcl default
build-docker-full:
export IMAGE_NAME=rasa && \
docker buildx use default && \
docker buildx bake -f docker/docker-bake.hcl base-images && \
docker buildx bake -f docker/docker-bake.hcl base-builder && \
docker buildx bake -f docker/docker-bake.hcl full
build-docker-mitie-en:
export IMAGE_NAME=rasa && \
docker buildx use default && \
docker buildx bake -f docker/docker-bake.hcl base-images && \
docker buildx bake -f docker/docker-bake.hcl base-builder && \
docker buildx bake -f docker/docker-bake.hcl mitie-en
build-docker-spacy-en:
export IMAGE_NAME=rasa && \
docker buildx use default && \
docker buildx bake -f docker/docker-bake.hcl base && \
docker buildx bake -f docker/docker-bake.hcl base-poetry && \
docker buildx bake -f docker/docker-bake.hcl base-builder && \
docker buildx bake -f docker/docker-bake.hcl spacy-en
build-docker-spacy-de:
export IMAGE_NAME=rasa && \
docker buildx use default && \
docker buildx bake -f docker/docker-bake.hcl base && \
docker buildx bake -f docker/docker-bake.hcl base-poetry && \
docker buildx bake -f docker/docker-bake.hcl base-builder && \
docker buildx bake -f docker/docker-bake.hcl spacy-de
build-tests-deployment-env: ## Create environment files (.env) for docker-compose.
cd tests_deployment && \
test -f .env || cat .env.example >> .env
run-integration-containers: build-tests-deployment-env ## Run the integration test containers.
cd tests_deployment && \
docker-compose -f docker-compose.integration.yml up &
stop-integration-containers: ## Stop the integration test containers.
cd tests_deployment && \
docker-compose -f docker-compose.integration.yml down