From 817f3c78f0207f1be39427368e55f36624ad6bb6 Mon Sep 17 00:00:00 2001 From: ydshieh Date: Thu, 24 Oct 2024 16:07:37 +0200 Subject: [PATCH] run --- .github/workflows/self-scheduled-caller.yml | 58 +-------------------- tests/models/vit/test_modeling_vit.py | 3 ++ utils/notification_service.py | 19 +++---- utils/split_model_tests.py | 1 + 4 files changed, 15 insertions(+), 66 deletions(-) diff --git a/.github/workflows/self-scheduled-caller.yml b/.github/workflows/self-scheduled-caller.yml index 75ea3bb24bc7fa..33d6163783f727 100644 --- a/.github/workflows/self-scheduled-caller.yml +++ b/.github/workflows/self-scheduled-caller.yml @@ -7,7 +7,7 @@ on: - cron: "17 2 * * *" push: branches: - - run_scheduled_ci* + - simplify jobs: model-ci: @@ -20,59 +20,3 @@ jobs: docker: huggingface/transformers-all-latest-gpu ci_event: Daily CI secrets: inherit - - torch-pipeline: - name: Torch pipeline CI - uses: ./.github/workflows/self-scheduled.yml - with: - job: run_pipelines_torch_gpu - slack_report_channel: "#transformers-ci-daily-pipeline-torch" - runner: daily-ci - docker: huggingface/transformers-pytorch-gpu - ci_event: Daily CI - secrets: inherit - - tf-pipeline: - name: TF pipeline CI - uses: ./.github/workflows/self-scheduled.yml - with: - job: run_pipelines_tf_gpu - slack_report_channel: "#transformers-ci-daily-pipeline-tf" - runner: daily-ci - docker: huggingface/transformers-tensorflow-gpu - ci_event: Daily CI - secrets: inherit - - example-ci: - name: Example CI - uses: ./.github/workflows/self-scheduled.yml - with: - job: run_examples_gpu - slack_report_channel: "#transformers-ci-daily-examples" - runner: daily-ci - docker: huggingface/transformers-all-latest-gpu - ci_event: Daily CI - secrets: inherit - - deepspeed-ci: - name: DeepSpeed CI - uses: ./.github/workflows/self-scheduled.yml - with: - job: run_torch_cuda_extensions_gpu - slack_report_channel: "#transformers-ci-daily-deepspeed" - runner: daily-ci - docker: huggingface/transformers-pytorch-deepspeed-latest-gpu - ci_event: Daily CI - working-directory-prefix: /workspace - secrets: inherit - - quantization-ci: - name: Quantization CI - uses: ./.github/workflows/self-scheduled.yml - with: - job: run_quantization_torch_gpu - slack_report_channel: "#transformers-ci-daily-quantization" - runner: daily-ci - docker: huggingface/transformers-quantization-latest-gpu - ci_event: Daily CI - secrets: inherit diff --git a/tests/models/vit/test_modeling_vit.py b/tests/models/vit/test_modeling_vit.py index cace1d377034fb..4921a55a13432e 100644 --- a/tests/models/vit/test_modeling_vit.py +++ b/tests/models/vit/test_modeling_vit.py @@ -208,6 +208,9 @@ class ViTModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase): test_resize_embeddings = False test_head_masking = False + def test_foo(self): + assert 1 == 2 + def setUp(self): self.model_tester = ViTModelTester(self) self.config_tester = ConfigTester(self, config_class=ViTConfig, has_text_modality=False, hidden_size=37) diff --git a/utils/notification_service.py b/utils/notification_service.py index 629b793337889a..056e1b0cf4102c 100644 --- a/utils/notification_service.py +++ b/utils/notification_service.py @@ -530,13 +530,13 @@ def payload(self) -> str: # upload results to Hub dataset file_path = os.path.join(os.getcwd(), f"ci_results_{job_name}/new_model_failures.txt") - commit_info = api.upload_file( - path_or_fileobj=file_path, - path_in_repo=f"{datetime.datetime.today().strftime('%Y-%m-%d')}/ci_results_{job_name}/new_model_failures.txt", - repo_id="hf-internal-testing/transformers_daily_ci", - repo_type="dataset", - token=os.environ.get("TRANSFORMERS_CI_RESULTS_UPLOAD_TOKEN", None), - ) + # commit_info = api.upload_file( + # path_or_fileobj=file_path, + # path_in_repo=f"{datetime.datetime.today().strftime('%Y-%m-%d')}/ci_results_{job_name}/new_model_failures.txt", + # repo_id="hf-internal-testing/transformers_daily_ci", + # repo_type="dataset", + # token=os.environ.get("TRANSFORMERS_CI_RESULTS_UPLOAD_TOKEN", None), + # ) url = f"https://huggingface.co/datasets/hf-internal-testing/transformers_daily_ci/raw/{commit_info.oid}/{datetime.datetime.today().strftime('%Y-%m-%d')}/ci_results_{job_name}/new_model_failures.txt" # extra processing to save to json format @@ -556,13 +556,14 @@ def payload(self) -> str: # upload results to Hub dataset file_path = os.path.join(os.getcwd(), f"ci_results_{job_name}/new_model_failures.json") - _ = api.upload_file( + commit_info = api.upload_file( path_or_fileobj=file_path, path_in_repo=f"{datetime.datetime.today().strftime('%Y-%m-%d')}/ci_results_{job_name}/new_model_failures.json", repo_id="hf-internal-testing/transformers_daily_ci", repo_type="dataset", token=os.environ.get("TRANSFORMERS_CI_RESULTS_UPLOAD_TOKEN", None), ) + url = f"https://huggingface.co/datasets/hf-internal-testing/transformers_daily_ci/raw/{commit_info.oid}/{datetime.datetime.today().strftime('%Y-%m-%d')}/ci_results_{job_name}/new_model_failures.json" block = { "type": "section", @@ -1250,7 +1251,7 @@ def prepare_reports(title, header, reports, to_truncate=True): ) prev_ci_artifacts = None - if is_scheduled_ci_run: + if True: if job_name == "run_models_gpu": # Get the last previously completed CI's failure tables artifact_names = [f"ci_results_{job_name}"] diff --git a/utils/split_model_tests.py b/utils/split_model_tests.py index e5083aaeb46fa5..958b872c4a50cd 100644 --- a/utils/split_model_tests.py +++ b/utils/split_model_tests.py @@ -62,4 +62,5 @@ start = end end = start + num_jobs_per_splits + (1 if idx < num_jobs % args.num_splits else 0) model_splits.append(d[start:end]) + model_splits = [["models/vit"]] print(model_splits)