diff --git a/.github/cases/blastoise/subsystem.json b/.github/cases/blastoise/subsystem.json new file mode 100644 index 000000000..16eadb3af --- /dev/null +++ b/.github/cases/blastoise/subsystem.json @@ -0,0 +1,17 @@ +{ + "rvv-vp-intrinsic-add-mlir": 291, + "rvv-vp-intrinsic-add-scalable-mlir": 448, + "hello-mlir": 72, + "stripmining-mlir": 23635, + "conv2d-less-m2-intrinsic": 27600, + "linear_normalization-intrinsic": 1, + "matmul-intrinsic": 1, + "softmax-intrinsic": 1, + "uarttest-intrinsic": 1, + "fpsmoke-asm": 1, + "memcpy-asm": 1, + "mmm-asm": 61401, + "smoke-asm": 7539, + "strlen-asm": 1, + "utf8-count-asm": 1 +} diff --git a/.github/cases/v1024-l8-b2/subsystem.json b/.github/cases/v1024-l8-b2/subsystem.json new file mode 100644 index 000000000..16eadb3af --- /dev/null +++ b/.github/cases/v1024-l8-b2/subsystem.json @@ -0,0 +1,17 @@ +{ + "rvv-vp-intrinsic-add-mlir": 291, + "rvv-vp-intrinsic-add-scalable-mlir": 448, + "hello-mlir": 72, + "stripmining-mlir": 23635, + "conv2d-less-m2-intrinsic": 27600, + "linear_normalization-intrinsic": 1, + "matmul-intrinsic": 1, + "softmax-intrinsic": 1, + "uarttest-intrinsic": 1, + "fpsmoke-asm": 1, + "memcpy-asm": 1, + "mmm-asm": 61401, + "smoke-asm": 7539, + "strlen-asm": 1, + "utf8-count-asm": 1 +} diff --git a/.github/scripts/ci.sc b/.github/scripts/ci.sc index 09186e024..09de76329 100755 --- a/.github/scripts/ci.sc +++ b/.github/scripts/ci.sc @@ -51,7 +51,8 @@ def scheduleTasks(allTasksFile: Seq[os.Path], bucketSize: Int): Seq[String] = { .toSeq }) // Initialize a list of buckets - val cargo = (0 until bucketSize).map(_ => new BucketBuffer()) + val size = bucketSize.min(allCycleData.size) + val cargo = (0 until size).map(_ => new BucketBuffer()) // _2 is the cycle number val (unProcessedData, normalData) = allCycleData.partition(_._2 <= 0) // Group tests that have cycle data into subset by their cycle size @@ -63,8 +64,8 @@ def scheduleTasks(allTasksFile: Seq[os.Path], bucketSize: Int): Seq[String] = { }) // For unprocessed data, just split them into subset that have equal size cargo.zipWithIndex.foreach { case(buffer, i) => - val startIdx = i * bucketSize - val endIdx = math.min((i + 1) * bucketSize, unProcessedData.length) + val startIdx = i * size + val endIdx = math.min((i + 1) * size, unProcessedData.length) unProcessedData.slice(startIdx, endIdx).foreach { case(name, cycle) => buffer.push_back(name, cycle) } } cargo.map(_.mkString).toSeq @@ -80,18 +81,15 @@ def toMatrixJson(buckets: Seq[String]) = // Read default tests information from '.github/cases/default.txt' file, and use that information to generate GitHub CI matrix. // The result will be printed to stdout, and should be pipe into $GITHUB_OUTPUT @main -def generateCiMatrix( - runnersAmount: Int, -) = { - val defaultCases = os.pwd / os.RelPath(".github/cases/default.txt") - println(toMatrixJson( - scheduleTasks( - os.read - .lines(defaultCases) - .map(defaultCases / os.up / os.RelPath(_)), - runnersAmount - ), - )) +def generateCiMatrix(defaultCases: String = "default.json", runnersAmount: Int) = { + println( + toMatrixJson( + scheduleTasks( + os.walk(os.pwd/".github"/"cases").filter(_.last == defaultCases), + runnersAmount + ), + ) + ) } // Resolve all the executable test and filter out unpassed tests, appending perf testcases @@ -229,8 +227,11 @@ def runTests(jobs: String, runTarget: String = "ip", resultDir: Option[os.Path], case(failed, (job, i)) => { val Array(config, caseName) = job.split(",") System.err.println(s"\n\n\n>>>[${i+1}/${totalJobs.length}] Running test case $config,$caseName") + val args = Seq("scripts/run-test.py", runTarget, "-c", config, "--base-out-dir", testRunDir.toString) ++ + { if (runTarget == "ip") Seq("--no-log") else Seq() } ++ + Seq(caseName) val handle = os - .proc("scripts/run-test.py", runTarget, "-c", config, "--no-log", "--base-out-dir", testRunDir, caseName) + .proc(args) .call(check=false) if (handle.exitCode != 0) { val outDir = testRunDir / config / caseName @@ -238,7 +239,10 @@ def runTests(jobs: String, runTarget: String = "ip", resultDir: Option[os.Path], os.write(actualResultDir / "failed-logs" / s"$job.txt", handle.out.text) failed :+ job } else { - writeCycleUpdates(job, testRunDir, actualResultDir) + if (runTarget == "ip") { + writeCycleUpdates(job, testRunDir, actualResultDir) + } + failed } } diff --git a/.github/workflows/subsystem.yml b/.github/workflows/subsystem.yml index f30401adb..238636e8d 100644 --- a/.github/workflows/subsystem.yml +++ b/.github/workflows/subsystem.yml @@ -12,14 +12,12 @@ concurrency: jobs: build-emulators: name: "Build Emulators" - runs-on: [self-hosted, linux, nixos] + runs-on: [self-hosted, linux, nixos, AMD Ryzen 9 7940HS w/ Radeon 780M Graphics] strategy: matrix: config: - "v1024-l8-b2" - "v1024-l8-b2-fp" - - "v4096-l8-b4" - - "v4096-l8-b4-fp" steps: - uses: actions/checkout@v4 with: @@ -52,19 +50,18 @@ jobs: name: "Generate test matrix" run: | echo -n matrix= >> "$GITHUB_OUTPUT" - nix shell ".#ammonite" -c .github/scripts/ci.sc generateCiMatrix --runnersAmount "$RUNNERS" >> "$GITHUB_OUTPUT" + nix shell ".#ammonite" -c .github/scripts/ci.sc generateCiMatrix subsystem.json "$RUNNERS" >> "$GITHUB_OUTPUT" build-trace-emulators: name: "Build trace emulator" - runs-on: [self-hosted, linux, nixos] + needs: [gen-matrix] + runs-on: [self-hosted, linux, nixos, AMD Ryzen 9 7940HS w/ Radeon 780M Graphics] strategy: fail-fast: false matrix: config: - "v1024-l8-b2" - "v1024-l8-b2-fp" - - "v4096-l8-b4" - - "v4096-l8-b4-fp" steps: - uses: actions/checkout@v4 with: @@ -115,8 +112,6 @@ jobs: name: test-reports-${{ matrix.id }} path: | test-results-*/failed-tests.md - test-results-*/cycle-updates.md - test-results-*/*_cycle.json - uses: actions/upload-artifact@v4 if: failure() @@ -202,22 +197,3 @@ jobs: echo -e "\n## Failed tests\n" >> $GITHUB_STEP_SUMMARY shopt -s nullglob cat test-results-*/failed-tests.md >> $GITHUB_STEP_SUMMARY - echo -e "\n## Cycle updates\n" >> $GITHUB_STEP_SUMMARY - shopt -s nullglob - cat test-results-*/cycle-updates.md >> $GITHUB_STEP_SUMMARY - - name: "Commit cycle updates" - run: | - nix shell ".#ammonite" -c .github/scripts/ci.sc mergeCycleData - git config user.name github-actions - git config user.email github-actions@github.com - changed_cases=$(git diff --name-only '.github/cases/**/default.json') - - if [ -n "$changed_cases" ]; then - echo "changed cases: $changed_cases" - git add '.github/cases/**/default.json' - git commit -m "[ci] update test case cycle data" - git push origin ${{ github.head_ref }} - else - echo "No cycle change detect" - fi - diff --git a/scripts/run-test.py b/scripts/run-test.py index 2917049d6..d44fed177 100755 --- a/scripts/run-test.py +++ b/scripts/run-test.py @@ -179,11 +179,11 @@ def load_elf_from_dir(config, cases_dir, case_name, use_individual_drv, force_x8 ) else: nix_args.append(f".#t1.{config}.{cases_attr_name}.all") - logger.info(f'Run "{" ".join(nix_args)}"') + logger.info(f'Get ELFs dir by cmd: "{" ".join(nix_args)}"') cases_dir = subprocess.check_output(nix_args).strip().decode("UTF-8") cases_dir = Path(cases_dir) - logger.info(f"Running cases in {cases_dir}") + logger.info(f"Reading ELFs in {cases_dir}") case_config_path = ( cases_dir / f"{case_name}.json" @@ -244,7 +244,7 @@ def run_test(args): "-t", f"{args.out_dir}", ] - logger.info(f'Run "{" ".join(configgen_args)}"') + logger.info(f'Get emulator config by cmd: "{" ".join(configgen_args)}"') subprocess.Popen(configgen_args).wait() assert ( elaborate_config_path.exists() @@ -319,7 +319,7 @@ def optionals(cond, items): ] ) + emu_args - logger.info(f'Run "{" ".join(process_args)}"') + logger.info(f'Run {emu_type} emulator with cmd: "{" ".join(process_args)}"') return_code = subprocess.Popen(process_args).wait() if return_code != 0: