diff --git a/.github/workflows/slack-report.yml b/.github/workflows/slack-report.yml index 0d1197a05d122a..0fe86f056ee314 100644 --- a/.github/workflows/slack-report.yml +++ b/.github/workflows/slack-report.yml @@ -19,6 +19,8 @@ on: required: true type: string +env: + TRANSFORMERS_CI_RESULTS_UPLOAD_TOKEN: ${{ secrets.TRANSFORMERS_CI_RESULTS_UPLOAD_TOKEN }} jobs: send_results: diff --git a/utils/notification_service.py b/utils/notification_service.py index c57f9cdf899341..4a0de14367601d 100644 --- a/utils/notification_service.py +++ b/utils/notification_service.py @@ -13,22 +13,26 @@ # limitations under the License. import ast +import datetime import collections import functools import json import operator import os import re +import requests import sys import time + +from huggingface_hub import HfApi from typing import Dict, List, Optional, Union -import requests from get_ci_error_statistics import get_jobs from get_previous_daily_ci import get_last_daily_ci_reports from slack_sdk import WebClient +api = HfApi() client = WebClient(token=os.environ["CI_SLACK_BOT_TOKEN"]) NON_MODEL_TEST_MODULES = [ @@ -1154,12 +1158,25 @@ def prepare_reports(title, header, reports, to_truncate=True): if not os.path.isdir(os.path.join(os.getcwd(), f"ci_results_{job_name}")): os.makedirs(os.path.join(os.getcwd(), f"ci_results_{job_name}")) + target_workflow = "huggingface/transformers/.github/workflows/self-scheduled-caller.yml@refs/heads/main" + is_scheduled_ci_run = os.environ.get("CI_WORKFLOW_REF") == target_workflow + # Only the model testing job is concerned: this condition is to avoid other jobs to upload the empty list as # results. if job_name == "run_models_gpu": with open(f"ci_results_{job_name}/model_results.json", "w", encoding="UTF-8") as fp: json.dump(model_results, fp, indent=4, ensure_ascii=False) + # upload results to Hub dataset (only for the scheduled daily CI run on `main`) + if is_scheduled_ci_run: + api.upload_file( + path_or_fileobj=f"ci_results_{job_name}/model_results.json", + path_in_repo=datetime.datetime.today().strftime('%Y-%m-%d'), + repo_id="hf-internal-testing/transformers_daily_ci", + repo_type="dataset", + token=os.environ.get("TRANSFORMERS_CI_RESULTS_UPLOAD_TOKEN", None), + ) + # Must have the same keys as in `additional_results`. # The values are used as the file names where to save the corresponding CI job results. test_to_result_name = { @@ -1172,10 +1189,19 @@ def prepare_reports(title, header, reports, to_truncate=True): with open(f"ci_results_{job_name}/{test_to_result_name[job]}_results.json", "w", encoding="UTF-8") as fp: json.dump(job_result, fp, indent=4, ensure_ascii=False) + # upload results to Hub dataset (only for the scheduled daily CI run on `main`) + if is_scheduled_ci_run: + api.upload_file( + path_or_fileobj=f"ci_results_{job_name}/{test_to_result_name[job]}_results.json", + path_in_repo=datetime.datetime.today().strftime('%Y-%m-%d'), + repo_id="hf-internal-testing/transformers_daily_ci", + repo_type="dataset", + token=os.environ.get("TRANSFORMERS_CI_RESULTS_UPLOAD_TOKEN", None), + ) + prev_ci_artifacts = None - if job_name == "run_models_gpu": - target_workflow = "huggingface/transformers/.github/workflows/self-scheduled-caller.yml@refs/heads/main" - if os.environ.get("CI_WORKFLOW_REF") == target_workflow: + if is_scheduled_ci_run: + if job_name == "run_models_gpu": # Get the last previously completed CI's failure tables artifact_names = [f"ci_results_{job_name}"] output_dir = os.path.join(os.getcwd(), "previous_reports")