diff --git a/.github/workflows/BuildStatusDataRecorder/action.yaml b/.github/workflows/BuildStatusDataRecorder/action.yaml index 54bbbb9e5..2c2106844 100644 --- a/.github/workflows/BuildStatusDataRecorder/action.yaml +++ b/.github/workflows/BuildStatusDataRecorder/action.yaml @@ -26,6 +26,9 @@ inputs: arch-name: description: "Workflow matrix build arch name" required: true + branch-name: + description: "Build branch name" + required: true runs: using: "composite" @@ -83,7 +86,7 @@ runs: # Record build status data for current workflow. echo "Proceeding with data recording..." cp ${DASH_DIR}/record_builds.py . - python record_builds.py --workflow ${{ inputs.workflow-name }} --${{ inputs.record-mode }} --${{ inputs.status }} --run-id ${{ inputs.run-id }} --arch ${{ inputs.arch-name }} + python record_builds.py --workflow ${{ inputs.workflow-name }} --${{ inputs.record-mode }} --${{ inputs.status }} --run-id ${{ inputs.run-id }} --arch ${{ inputs.arch-name }} --branch ${{ inputs.branch-name }} cp build-status.db ${DASH_DIR}/ rm -f record_builds.py echo "Removing ${{ inputs.workflow-name }} ${{ inputs.arch-name }} data lock." diff --git a/.github/workflows/nightly-musl-builder.yml b/.github/workflows/nightly-musl-builder.yml index ab92b99e8..1c7cc7935 100644 --- a/.github/workflows/nightly-musl-builder.yml +++ b/.github/workflows/nightly-musl-builder.yml @@ -34,6 +34,7 @@ jobs: status: "fail" run-id: ${{github.run_id}} arch-name: "x86-64" + branch-name: ${{ github.ref_name }} - name: Install dependencies shell: bash @@ -110,6 +111,7 @@ jobs: status: "pass" run-id: ${{github.run_id}} arch-name: "x86-64" + branch-name: ${{ github.ref_name }} test-musl: strategy: @@ -137,6 +139,7 @@ jobs: status: "fail" run-id: ${{github.run_id}} arch-name: ${{ matrix.arch }} + branch-name: ${{ github.ref_name }} - uses: ./llvm-project/llvm/tools/eld/.github/workflows/FetchNightlyToolset @@ -213,3 +216,4 @@ jobs: status: "pass" run-id: ${{github.run_id}} arch-name: ${{ matrix.arch }} + branch-name: ${{ github.ref_name }} diff --git a/.github/workflows/picolibc-builder.yml b/.github/workflows/picolibc-builder.yml index 76dfb8c95..479edd8c4 100644 --- a/.github/workflows/picolibc-builder.yml +++ b/.github/workflows/picolibc-builder.yml @@ -82,6 +82,7 @@ jobs: status: "fail" run-id: ${{github.run_id}} arch-name: ${{ matrix.arch.name }} + branch-name: ${{ github.ref_name }} - name: Configure LLVM toolchain for ${{ matrix.arch.name }} run: | @@ -245,3 +246,4 @@ jobs: status: "pass" run-id: ${{github.run_id}} arch-name: ${{ matrix.arch.name }} + branch-name: ${{ github.ref_name }} diff --git a/.github/workflows/scripts/record_builds.py b/.github/workflows/scripts/record_builds.py index 6d13bb26a..f3a75cb26 100644 --- a/.github/workflows/scripts/record_builds.py +++ b/.github/workflows/scripts/record_builds.py @@ -55,7 +55,7 @@ def createBuildDataTables(workflow): create_table = ( " CREATE TABLE IF NOT EXISTS " + workflow - + " (build_count INTEGER PRIMARY KEY AUTOINCREMENT, run_id INTEGER, state TEXT, build_date TEXT, build_time TEXT, arch TEXT, UNIQUE(run_id, arch));" + + " (build_count INTEGER PRIMARY KEY AUTOINCREMENT, run_id INTEGER, state TEXT, build_date TEXT, build_time TEXT, arch TEXT, branch TEXT, build_end_time TEXT, UNIQUE(run_id, arch));" ) try: cursor.execute(create_table) @@ -74,13 +74,14 @@ def addNewBuildData(args): cursor.execute( "INSERT INTO " + workflow_table - + " (run_id, state, build_date, build_time, arch) VALUES (?, ?, ?, ?, ?)", + + " (run_id, state, build_date, build_time, arch, branch) VALUES (?, ?, ?, ?, ?, ?)", ( args.run_id, "pass" if args.build_status else "fail", str(date.today()), datetime.now().strftime("%H:%M"), args.workflow_arch, + args.build_branch, ), ) except Exception as e: @@ -170,16 +171,9 @@ def emitJSData(args): all_data = [] all_states_data = [] try: - if args.workflow_build.lower() == "picolibc": - cursor.execute( - "SELECT run_id, state, build_date, build_time, arch FROM " - + workflow - + ";" - ) - else: - cursor.execute( - "SELECT run_id, state, build_date, build_time FROM " + workflow + ";" - ) + cursor.execute( + "SELECT run_id, state, build_date, build_time, arch, branch FROM " + workflow + ";" + ) all_data = cursor.fetchall() except Exception as e: print( @@ -194,6 +188,7 @@ def emitJSData(args): "date": data[2], "time": data[3], "arch": data[4], + "branch": data[5], } else: new_state = { @@ -201,6 +196,8 @@ def emitJSData(args): "state": data[1], "date": data[2], "time": data[3], + "arch": data[4], + "branch": data[5], } all_states_data.append(new_state) @@ -239,6 +236,13 @@ def handleArguments(): required=False, help="The workflow build architecture name.", ) + parser.add_argument( + "--branch", + "-b", + dest="build_branch", + required=False, + help="The build branch name.", + ) parser.add_argument( "--run-id", dest="run_id", required=False, help="The github workflow run ID." )