Skip to content

Commit

Permalink
fix: lint, renable workflows
Browse files Browse the repository at this point in the history
  • Loading branch information
NiklasKoehneckeAA committed Apr 10, 2024
1 parent dd031a4 commit ecd81a6
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 41 deletions.
82 changes: 41 additions & 41 deletions .github/workflows/daily.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@ on:
# TODO temporary
push:
workflow_dispatch:
# Scheduled workflows will only run on the default branch.
# Scheduled workflows will only run on the default branch.
schedule:
- cron: '0 0 * * *' # runs once a day at midnight in the timezone of your GitHub repository

defaults:
run:
shell: bash
Expand Down Expand Up @@ -49,42 +49,42 @@ jobs:
run: |
poetry run python -c "import nltk; nltk.download('punkt')"
poetry run pytest -n 10 -m "not docker"
# run-notebooks:
# strategy:
# fail-fast: false
# matrix:
# os: [macos-latest, windows-latest]
# runs-on: ${{matrix.os}}
# steps:
# - name: Checkout repository
# uses: actions/checkout@v4
# - uses: actions/setup-python@v5
# with:
# python-version: "3.10"
# - name: Install and configure Poetry
# uses: snok/install-poetry@v1
# with:
# virtualenvs-create: true
# virtualenvs-in-project: true
# installer-parallel: true
# - name: Install dependencies
# run: |
# poetry config installer.max-workers 10
# poetry install --no-interaction
# - name: Configure Poetry for notebooks and run
# env:
# AA_TOKEN: ${{ secrets.AA_TOKEN }}
# HUGGING_FACE_TOKEN: ${{ secrets.HUGGING_FACE_TOKEN }}
# ARGILLA_API_URL: "http://localhost:6900/"
# ARGILLA_API_KEY: "argilla.apikey"
# CLIENT_URL: "https://api.aleph-alpha.com"
# run: |
# [ -f .env ] && source .env
# export AA_TOKEN
# # Find all .ipynb files in the directory and pass them to xargs for parallel execution
# rm -rf src/examples/.ipynb_checkpoints
# rm -rf src/examples/how_tos/.ipynb_checkpoints
# find src/examples -name "*.nbconvert.ipynb" -type f -delete
# find src/examples -name "*.ipynb" ! -name "performance_tips.ipynb" ! -name "human_evaluation.ipynb" ! -name "how_to_human_evaluation_via_argilla.ipynb" | xargs -n 1 -P 6 poetry run jupyter nbconvert --to notebook --execute
# find src/examples -name "*.nbconvert.ipynb" -type f -delete
run-notebooks:
strategy:
fail-fast: false
matrix:
os: [macos-latest, windows-latest]
runs-on: ${{matrix.os}}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install and configure Poetry
uses: snok/install-poetry@v1
with:
virtualenvs-create: true
virtualenvs-in-project: true
installer-parallel: true
- name: Install dependencies
run: |
poetry config installer.max-workers 10
poetry install --no-interaction
- name: Configure Poetry for notebooks and run
env:
AA_TOKEN: ${{ secrets.AA_TOKEN }}
HUGGING_FACE_TOKEN: ${{ secrets.HUGGING_FACE_TOKEN }}
ARGILLA_API_URL: "http://localhost:6900/"
ARGILLA_API_KEY: "argilla.apikey"
CLIENT_URL: "https://api.aleph-alpha.com"
run: |
[ -f .env ] && source .env
export AA_TOKEN
# Find all .ipynb files in the directory and pass them to xargs for parallel execution
rm -rf src/examples/.ipynb_checkpoints
rm -rf src/examples/how_tos/.ipynb_checkpoints
find src/examples -name "*.nbconvert.ipynb" -type f -delete
find src/examples -name "*.ipynb" ! -name "performance_tips.ipynb" ! -name "human_evaluation.ipynb" ! -name "how_to_human_evaluation_via_argilla.ipynb" | xargs -n 1 -P 6 poetry run jupyter nbconvert --to notebook --execute
find src/examples -name "*.nbconvert.ipynb" -type f -delete
9 changes: 9 additions & 0 deletions tests/connectors/argilla/test_argilla_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,13 +111,15 @@ def long_qa_records(
argilla_client.add_record(qa_dataset_id, record)
return records


@pytest.mark.docker
def test_error_on_non_existent_dataset(
argilla_client: DefaultArgillaClient,
) -> None:
with pytest.raises(HTTPError):
list(argilla_client.records("non_existent_dataset_id"))


@pytest.mark.docker
def test_records_returns_records_previously_added(
argilla_client: DefaultArgillaClient,
Expand All @@ -131,6 +133,7 @@ def test_records_returns_records_previously_added(
key=lambda r: r.example_id,
)


@pytest.mark.docker
def test_evaluations_returns_evaluation_results(
argilla_client: DefaultArgillaClient,
Expand Down Expand Up @@ -158,6 +161,7 @@ def test_evaluations_returns_evaluation_results(
evaluations, key=lambda e: e.record_id
)


@pytest.mark.docker
def test_split_dataset_works(
argilla_client: DefaultArgillaClient,
Expand All @@ -182,6 +186,7 @@ def test_split_dataset_works(
del new_metadata["split"] # type: ignore
assert old_metadata == new_metadata


@pytest.mark.docker
def test_split_dataset_twice_works(
argilla_client: DefaultArgillaClient,
Expand All @@ -204,6 +209,7 @@ def test_split_dataset_twice_works(
metadata_properties = response["items"][0]
assert len(metadata_properties["settings"]["values"]) == 1


@pytest.mark.docker
def test_split_dataset_works_with_uneven_splits(
argilla_client: DefaultArgillaClient,
Expand All @@ -221,6 +227,7 @@ def test_split_dataset_works_with_uneven_splits(
)
assert n_records_per_split == [9, 9, 9, 9, 8, 8, 8]


@pytest.mark.docker
def test_add_record_adds_multiple_records_with_same_content(
argilla_client: DefaultArgillaClient,
Expand All @@ -241,6 +248,7 @@ def test_add_record_adds_multiple_records_with_same_content(
argilla_client.add_record(qa_dataset_id, second_data)
assert len(list(argilla_client.records(qa_dataset_id))) == 2


@pytest.mark.docker
def test_add_record_does_not_put_example_id_into_metadata(
argilla_client: DefaultArgillaClient,
Expand All @@ -264,6 +272,7 @@ def test_add_record_does_not_put_example_id_into_metadata(
assert "example_id" not in record.metadata.keys()
assert record.example_id == "0"


@pytest.mark.docker
def test_split_dataset_can_split_long_dataset(
argilla_client: DefaultArgillaClient,
Expand Down
2 changes: 2 additions & 0 deletions tests/core/test_tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,7 @@ def test_in_memory_tracer_trace_viewer_doesnt_crash_if_it_cant_reach() -> None:
expected = InMemoryTracer()
expected._ipython_display_()


@pytest.mark.docker()
def test_open_telemetry_tracer_check_consistency_in_trace_ids(
open_telemetry_tracer: tuple[str, OpenTelemetryTracer],
Expand All @@ -321,6 +322,7 @@ def test_open_telemetry_tracer_check_consistency_in_trace_ids(
for span in spans:
assert _get_trace_id_from_span(span) == expected_trace_id


@pytest.mark.docker()
def test_open_telemetry_tracer_loggs_input_and_output(
open_telemetry_tracer: tuple[str, OpenTelemetryTracer],
Expand Down

0 comments on commit ecd81a6

Please sign in to comment.