Store multiple crawls in a single database #359
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Test | |
on: [pull_request] | |
jobs: | |
test: | |
runs-on: ubuntu-24.04 | |
steps: | |
- name: Check out repository | |
uses: actions/checkout@v4 | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: "3.12" | |
- name: Install packages to support building lxml from source | |
run: sudo apt-get install python3.12-dev libxml2-dev libxslt-dev | |
- uses: actions/cache@v4 | |
name: Configure pip caching | |
with: | |
path: ~/.cache/pip | |
key: ${{ runner.os }}-pip-${{ hashFiles('requirements/*.txt') }} | |
restore-keys: | | |
${{ runner.os }}-pip- | |
- name: Install dependencies | |
run: | | |
pip install \ | |
-r requirements/base.txt \ | |
-r requirements/test.txt | |
- name: Check formatting | |
run: black . --check | |
- name: The test fixture should be kept in sync with the sample database | |
run: ./manage.py dumpdata --indent=2 crawler | diff crawler/fixtures/sample.json - | |
- name: Run Python tests | |
run: pytest | |