diff --git a/.dockerignore b/.dockerignore
index 336912aa6..3c4962a11 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -13,5 +13,10 @@
/local_data
/performance_results
/scripts
+**/target
/target
+!/target/debug/iggy
+!/target/debug/iggy-server
+!/target/debug/iggy-mcp
+!/target/debug/iggy-connectors
/web
diff --git a/.github/actions/csharp-dotnet/post-merge/action.yml b/.github/actions/csharp-dotnet/post-merge/action.yml
new file mode 100644
index 000000000..7d9ca5af6
--- /dev/null
+++ b/.github/actions/csharp-dotnet/post-merge/action.yml
@@ -0,0 +1,110 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: csharp-dotnet-post-merge
+description: .NET post-merge publishing github iggy actions
+
+inputs:
+ version:
+ description: "Version for publishing"
+ required: true
+ dry_run:
+ description: "Dry run mode"
+ required: false
+ default: "false"
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: "8.0.x"
+
+ - name: Restore dependencies
+ run: |
+ cd foreign/csharp
+ dotnet restore
+ shell: bash
+
+ - name: Build Release
+ run: |
+ cd foreign/csharp
+
+ # Build in Release mode
+ dotnet build -c Release --no-restore
+
+ # List build output
+ echo "Build output:"
+ find . -name "*.dll" -path "*/bin/Release/*" | head -20 || echo "No DLLs found in Release folders"
+ shell: bash
+
+ - name: Pack NuGet packages
+ run: |
+ cd foreign/csharp
+
+ # Set version if provided
+ if [ -n "${{ inputs.version }}" ]; then
+ dotnet pack ./Iggy_SDK -c Release \
+ -p:PackageVersion=${{ inputs.version }} \
+ -o ./nupkgs \
+ --no-build
+ else
+ echo "β Version is required for packing"
+ exit 1
+ fi
+
+ # List packages
+ echo "NuGet packages:"
+ ls -la ./nupkgs/ || echo "No packages found"
+ shell: bash
+
+ - name: Publish to NuGet
+ env:
+ NUGET_API_KEY: ${{ env.NUGET_API_KEY }}
+ run: |
+ cd foreign/csharp
+
+ if [ "${{ inputs.dry_run }}" = "true" ]; then
+ echo "π Dry run - would publish these packages:"
+ ls -la ./nupkgs/*.nupkg
+
+ # Validate packages
+ for package in ./nupkgs/*.nupkg; do
+ echo "Validating: $package"
+ dotnet nuget locals all --clear
+ # Extract package info
+ unzip -l "$package" | head -20
+ done
+ else
+ if [ -z "$NUGET_API_KEY" ]; then
+ echo "β NUGET_API_KEY is not set"
+ exit 1
+ fi
+
+ echo "π¦ Publishing packages to NuGet..."
+ # Push to NuGet
+ for package in ./nupkgs/*.nupkg; do
+ echo "Publishing: $(basename $package)"
+ dotnet nuget push "$package" \
+ --api-key "$NUGET_API_KEY" \
+ --source https://api.nuget.org/v3/index.json \
+ --skip-duplicate
+ done
+ echo "β
Publishing completed"
+ fi
+ shell: bash
diff --git a/.github/actions/csharp-dotnet/pre-merge/action.yml b/.github/actions/csharp-dotnet/pre-merge/action.yml
new file mode 100644
index 000000000..b96b379c5
--- /dev/null
+++ b/.github/actions/csharp-dotnet/pre-merge/action.yml
@@ -0,0 +1,123 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# TODO(hubcio): Currently, C# tests don't need server. They use testcontainers with 'edge' image.
+# We should change this to use server-start/stop action, so that code from PR is tested.
+
+name: csharp-dotnet-pre-merge
+description: .NET pre-merge testing github iggy actions
+
+inputs:
+ task:
+ description: "Task to run (lint, test, build, e2e)"
+ required: true
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: "8.0.x"
+
+ - name: Setup Rust with cache
+ if: inputs.task == 'test' || inputs.task == 'e2e'
+ uses: ./.github/actions/utils/setup-rust-with-cache
+ with:
+ cache-targets: false # Only cache registry and git deps, not target dir (sccache handles that)
+
+ - name: Install netcat
+ if: inputs.task == 'e2e'
+ run: sudo apt-get update && sudo apt-get install -y netcat-openbsd
+ shell: bash
+
+ - name: Restore dependencies
+ run: |
+ cd foreign/csharp
+ dotnet restore
+ shell: bash
+
+ - name: Build
+ if: inputs.task == 'test' || inputs.task == 'build' || inputs.task == 'lint'
+ run: |
+ cd foreign/csharp
+ dotnet build --no-restore
+ shell: bash
+
+ - name: Lint (Code Analysis)
+ if: inputs.task == 'lint'
+ run: |
+ cd foreign/csharp
+
+ # Run code analysis
+ dotnet build --no-restore /p:EnforceCodeStyleInBuild=true /p:TreatWarningsAsErrors=false
+
+ # TODO: make format check blocking (requires dotnet-format tool)
+ dotnet format --verify-no-changes --verbosity diagnostic || true
+
+ shell: bash
+
+ - name: Test
+ if: inputs.task == 'test'
+ run: |
+ cd foreign/csharp
+
+ # Run unit tests
+ dotnet test Iggy_SDK_Tests --no-build --verbosity normal
+
+ # Run integration tests
+ dotnet test Iggy_SDK.Tests.Integration --no-build --verbosity normal
+
+ shell: bash
+
+ # TODO(hubcio): currently, C# tests don't need server. They use testcontainers with 'edge' image.
+ # instead, they should use server-start/stop action to test the actual code from PR.
+ # - name: Start Iggy server
+ # id: iggy
+ # if: inputs.task == 'e2e'
+ # uses: ./.github/actions/utils/server-start
+ # with:
+ # mode: cargo
+ # cargo-bin: iggy-server
+ # port: 8090
+
+ - name: Run integration tests
+ if: inputs.task == 'e2e'
+ run: |
+ cd foreign/csharp
+ dotnet test Iggy_SDK.Tests.Integration --no-build --verbosity normal
+ shell: bash
+
+ - name: Stop Iggy server
+ if: inputs.task == 'e2e'
+ uses: ./.github/actions/utils/server-stop
+ with:
+ pid-file: ${{ steps.iggy.outputs.pid_file }}
+ log-file: ${{ steps.iggy.outputs.log_file }}
+
+ - name: Build Release
+ if: inputs.task == 'build'
+ run: |
+ cd foreign/csharp
+
+ # Build in Release mode
+ dotnet build -c Release --no-restore
+
+ # List build output
+ echo "Build output:"
+ find . -name "*.dll" -path "*/bin/Release/*" | head -20 || echo "No DLLs found in Release folders"
+ shell: bash
diff --git a/.github/actions/go/post-merge/action.yml b/.github/actions/go/post-merge/action.yml
new file mode 100644
index 000000000..22bded265
--- /dev/null
+++ b/.github/actions/go/post-merge/action.yml
@@ -0,0 +1,118 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: go-post-merge
+description: Go post-merge tag preparation github iggy actions
+
+inputs:
+ version:
+ description: "Version for tagging (without 'v' prefix)"
+ required: true
+ dry_run:
+ description: "Dry run mode"
+ required: false
+ default: "false"
+
+runs:
+ using: "composite"
+ steps:
+ - name: Validate version format
+ run: |
+ VERSION="${{ inputs.version }}"
+
+ # Check if version matches semantic versioning
+ if ! echo "$VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?(\+[a-zA-Z0-9.-]+)?$' > /dev/null; then
+ echo "β Invalid version format: $VERSION"
+ echo "Expected format: X.Y.Z or X.Y.Z-prerelease+metadata"
+ exit 1
+ fi
+
+ echo "β
Version format valid: $VERSION"
+ shell: bash
+
+ - name: Prepare Go module for tagging
+ run: |
+ VERSION="${{ inputs.version }}"
+ TAG="foreign/go/v${VERSION}"
+
+ echo "π¦ Go Module Publishing Information"
+ echo "===================================="
+ echo "Version: v${VERSION}"
+ echo "Git tag: ${TAG}"
+ echo ""
+
+ if [ "${{ inputs.dry_run }}" = "true" ]; then
+ echo "π DRY RUN MODE - No tag will be created"
+ echo ""
+ echo "Would create tag: ${TAG}"
+ echo ""
+ echo "After tagging, users could import using:"
+ echo " go get github.com/${{ github.repository }}/foreign/go@v${VERSION}"
+ echo ""
+ echo "Or add to go.mod:"
+ echo " require github.com/${{ github.repository }}/foreign/go v${VERSION}"
+ else
+ echo "β
Go module ready for tagging"
+ echo ""
+ echo "Tag will be created: ${TAG}"
+ echo "This will be handled by the create-tags job in the publish workflow"
+ echo ""
+ echo "After the tag is pushed, users can import using:"
+ echo " go get github.com/${{ github.repository }}/foreign/go@v${VERSION}"
+ echo ""
+ echo "Or add to go.mod:"
+ echo " require github.com/${{ github.repository }}/foreign/go v${VERSION}"
+ fi
+
+ # Verify the go.mod file exists
+ if [ ! -f "foreign/go/go.mod" ]; then
+ echo "β οΈ Warning: foreign/go/go.mod not found"
+ echo "Make sure the Go module is properly initialized"
+ else
+ echo ""
+ echo "Module information:"
+ grep "^module" foreign/go/go.mod || echo "Module declaration not found"
+ fi
+ shell: bash
+
+ - name: Output tag information
+ id: tag-info
+ run: |
+ VERSION="${{ inputs.version }}"
+ TAG="foreign/go/v${VERSION}"
+
+ # Set outputs for use in other jobs
+ echo "tag=${TAG}" >> "$GITHUB_OUTPUT"
+ echo "version=v${VERSION}" >> "$GITHUB_OUTPUT"
+
+ # Summary for GitHub Actions
+ {
+ echo "## π·οΈ Go Module Tag Information"
+ echo ""
+ echo "| Property | Value |"
+ echo "|----------|-------|"
+ echo "| **Version** | \`v${VERSION}\` |"
+ echo "| **Git Tag** | \`${TAG}\` |"
+ echo "| **Import Path** | \`github.com/${{ github.repository }}/foreign/go\` |"
+ echo ""
+ if [ "${{ inputs.dry_run }}" = "true" ]; then
+ echo "**Note:** This is a dry run - no actual tag will be created"
+ else
+ echo "**Note:** Tag will be created by the publish workflow"
+ fi
+ } >> "$GITHUB_STEP_SUMMARY"
+ shell: bash
diff --git a/.github/actions/go/pre-merge/action.yml b/.github/actions/go/pre-merge/action.yml
new file mode 100644
index 000000000..f8628d2ba
--- /dev/null
+++ b/.github/actions/go/pre-merge/action.yml
@@ -0,0 +1,139 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: go-pre-merge
+description: Go pre-merge testing github iggy actions
+
+inputs:
+ task:
+ description: "Task to run (lint, test, build, e2e)"
+ required: true
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup Go
+ uses: actions/setup-go@v5
+ with:
+ go-version: "1.23.0"
+ cache-dependency-path: |
+ foreign/go/go.sum
+ bdd/go/go.sum
+ examples/go/go.sum
+
+ - name: Download dependencies
+ run: |
+ if [ -f "foreign/go/go.mod" ]; then
+ cd foreign/go && go mod download
+ cd ..
+ fi
+
+ if [ -f "bdd/go/go.mod" ]; then
+ cd bdd/go && go mod download
+ cd ..
+ fi
+
+ if [ -f "examples/go/go.mod" ]; then
+ cd examples/go && go mod download
+ fi
+ shell: bash
+
+ - name: Tidy Check
+ shell: bash
+ if: inputs.task == 'test' || inputs.task == 'lint'
+ run: |
+ cd foreign/go
+ go mod tidy
+ git diff --exit-code go.mod go.sum
+
+ - name: Test
+ shell: bash
+ if: inputs.task == 'test'
+ run: |
+ cd foreign/go
+
+ # Create reports directory
+ mkdir -p ../../reports
+
+ # Run tests with coverage
+ go test -v -race -coverprofile=../../reports/go-coverage.out ./...
+
+ # Generate coverage report
+ go tool cover -html=../../reports/go-coverage.out -o ../../reports/go-coverage.html
+
+ # Run tests with JSON output for better reporting
+ go test -v -json ./... > ../../reports/go-tests.json
+
+
+ - name: Lint
+ if: inputs.task == 'lint'
+ uses: golangci/golangci-lint-action@v6
+ with:
+ version: v1.61
+ working-directory: foreign/go
+ args: --timeout=5m
+
+ - name: Lint BDD
+ if: inputs.task == 'lint' && hashFiles('bdd/go/go.mod') != ''
+ uses: golangci/golangci-lint-action@v6
+ with:
+ version: v1.61
+ working-directory: bdd/go
+ args: --timeout=5m
+
+ - name: Lint Examples
+ if: inputs.task == 'lint' && hashFiles('examples/go/go.mod') != ''
+ uses: golangci/golangci-lint-action@v6
+ with:
+ version: v1.61
+ working-directory: examples/go
+ args: --timeout=5m
+
+ - name: Build
+ shell: bash
+ if: inputs.task == 'build'
+ run: |
+ cd foreign/go
+
+ # Build the module
+ go build -v ./...
+
+ - name: Setup server for e2e tests
+ if: inputs.task == 'e2e'
+ uses: ./.github/actions/utils/server-start
+
+ - name: Run e2e tests
+ shell: bash
+ if: inputs.task == 'e2e'
+ run: |
+ echo "π§ͺ Running Go e2e tests..."
+
+ # Install ginkgo
+ echo "Installing ginkgo..."
+ go install github.com/onsi/ginkgo/v2/ginkgo@latest
+
+ # Ensure ginkgo is in PATH
+ export PATH="${PATH}:$(go env GOPATH)/bin"
+
+ # Run foreign/go e2e tests
+ echo "Running foreign/go e2e tests..."
+ cd foreign/go
+ go test -v -race ./...
+
+ - name: Stop server after e2e tests
+ if: always() && inputs.task == 'e2e'
+ uses: ./.github/actions/utils/server-stop
diff --git a/.github/actions/java-gradle/post-merge/action.yml b/.github/actions/java-gradle/post-merge/action.yml
new file mode 100644
index 000000000..5ef672412
--- /dev/null
+++ b/.github/actions/java-gradle/post-merge/action.yml
@@ -0,0 +1,133 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: java-gradle-post-merge
+description: Java Gradle post-merge publishing github iggy actions
+
+inputs:
+ version:
+ description: "Version for publishing"
+ required: true
+ dry_run:
+ description: "Dry run mode"
+ required: false
+ default: "false"
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup Java
+ uses: actions/setup-java@v4
+ with:
+ distribution: "temurin"
+ java-version: "17"
+ cache: "gradle"
+
+ - name: Setup Gradle
+ uses: gradle/actions/setup-gradle@af1da67850ed9a4cedd57bfd976089dd991e2582 # v4.0.0
+
+ - name: Grant execute permission for gradlew
+ shell: bash
+ run: |
+ if [ -f "foreign/java/gradlew" ]; then
+ chmod +x foreign/java/gradlew
+ fi
+
+ - name: Build for publishing
+ shell: bash
+ run: |
+ echo "π¦ Building Java SDK for publishing..."
+ foreign/java/dev-support/checks/build.sh build -x test -x checkstyleMain -x checkstyleTest
+ BUILD_EXIT_CODE=$?
+
+ # List artifacts only if build succeeded
+ if [ $BUILD_EXIT_CODE -eq 0 ]; then
+ echo ""
+ echo "Build artifacts:"
+ find foreign/java -path "*/build/libs/*.jar" -type f 2>/dev/null | head -20 || echo "No jar artifacts found in build/libs directories"
+ else
+ echo "β Build failed with exit code $BUILD_EXIT_CODE"
+ exit $BUILD_EXIT_CODE
+ fi
+
+ - name: Publish to Maven Nexus
+ shell: bash
+ env:
+ NEXUS_USER: ${{ env.NEXUS_USER }}
+ NEXUS_PASSWORD: ${{ env.NEXUS_PASSWORD }}
+ run: |
+ if [ "${{ inputs.dry_run }}" = "true" ]; then
+ echo "π Dry run - would publish to Maven Nexus:"
+ echo ""
+
+ # Extract version from build.gradle.kts
+ gradle_version=$(foreign/java/gradlew -p foreign/java properties -q | grep "version:" | cut -d: -f2 | tr -d ' ')
+ echo "Version from gradle: $gradle_version"
+ echo "Input version: ${{ inputs.version }}"
+
+ # Verify versions match
+ if [ "$gradle_version" != "${{ inputs.version }}" ]; then
+ echo "β οΈ Warning: Gradle version ($gradle_version) doesn't match input version (${{ inputs.version }})"
+ fi
+
+ echo ""
+ echo "Would publish artifacts:"
+ echo " Group ID: org.apache.iggy"
+ echo " Artifact ID: iggy"
+ echo " Version: ${{ inputs.version }}"
+ echo ""
+ echo "Maven coordinates: org.apache.iggy:iggy:${{ inputs.version }}"
+
+ # Show what would be published
+ echo ""
+ echo "Artifacts that would be published:"
+ find foreign/java -path "*/build/libs/*.jar" -type f 2>/dev/null | while read jar; do
+ echo " - $(basename $jar)"
+ done
+ else
+ if [ -z "$NEXUS_USER" ] || [ -z "$NEXUS_PASSWORD" ]; then
+ echo "β NEXUS_USER or NEXUS_PASSWORD not set"
+ exit 1
+ fi
+
+ echo "π¦ Publishing to Maven Nexus..."
+ echo "Version: ${{ inputs.version }}"
+ echo ""
+
+ # Run the publish task
+ foreign/java/dev-support/checks/build.sh build -x test -x checkstyleMain -x checkstyleTest publish
+ PUBLISH_EXIT_CODE=$?
+
+ if [ $PUBLISH_EXIT_CODE -eq 0 ]; then
+ echo ""
+ echo "β
Successfully published to Maven Nexus"
+ echo "Maven coordinates: org.apache.iggy:iggy:${{ inputs.version }}"
+ echo ""
+ echo "Users can now add to their build.gradle:"
+ echo " implementation 'org.apache.iggy:iggy:${{ inputs.version }}'"
+ echo ""
+ echo "Or to their pom.xml:"
+ echo " "
+ echo " org.apache.iggy"
+ echo " iggy"
+ echo " ${{ inputs.version }}"
+ echo " "
+ else
+ echo "β Publishing failed with exit code $PUBLISH_EXIT_CODE"
+ exit $PUBLISH_EXIT_CODE
+ fi
+ fi
diff --git a/.github/actions/java-gradle/pre-merge/action.yml b/.github/actions/java-gradle/pre-merge/action.yml
new file mode 100644
index 000000000..c939652e1
--- /dev/null
+++ b/.github/actions/java-gradle/pre-merge/action.yml
@@ -0,0 +1,86 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: java-gradle-pre-merge
+description: Java Gradle pre-merge testing github iggy actions
+
+inputs:
+ task:
+ description: "Task to run (lint, test, build)"
+ required: true
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup Java
+ uses: actions/setup-java@v4
+ with:
+ distribution: "temurin"
+ java-version: "17"
+ cache: "gradle"
+
+ - name: Setup Gradle
+ uses: gradle/actions/setup-gradle@af1da67850ed9a4cedd57bfd976089dd991e2582 # v4.0.0
+
+ - name: Grant execute permission for gradlew
+ shell: bash
+ run: |
+ if [ -f "foreign/java/gradlew" ]; then
+ chmod +x foreign/java/gradlew
+ fi
+
+ - name: Lint
+ if: inputs.task == 'lint'
+ shell: bash
+ run: |
+ # TODO(hubcio): make this blocking when Java lints are fixed
+ foreign/java/dev-support/checks/build.sh check -x test || true
+
+ - name: Build
+ shell: bash
+ if: inputs.task == 'build'
+ run: |
+ foreign/java/dev-support/checks/build.sh build -x test -x checkstyleMain -x checkstyleTest
+ BUILD_EXIT_CODE=$?
+
+ # List artifacts only if build succeeded
+ if [ $BUILD_EXIT_CODE -eq 0 ]; then
+ echo ""
+ echo "Build artifacts:"
+ find foreign/java -path "*/build/libs/*.jar" -type f 2>/dev/null | head -20 || echo "No jar artifacts found in build/libs directories"
+ fi
+
+ # Exit with build exit code
+ exit $BUILD_EXIT_CODE
+
+ - name: Test
+ if: inputs.task == 'test'
+ shell: bash
+ run: |
+ foreign/java/dev-support/checks/build.sh test
+
+ # Copy test reports
+ if [ -d "foreign/java/build/test-results" ]; then
+ mkdir -p reports
+ cp -r foreign/java/build/test-results reports/java-tests
+ fi
+
+ # Generate test report if jacoco is configured
+ if grep -q "jacoco" foreign/java/build.gradle 2>/dev/null; then
+ cd foreign/java
+ ./gradlew jacocoTestReport --no-daemon
+ fi
diff --git a/.github/actions/node-npm/post-merge/action.yml b/.github/actions/node-npm/post-merge/action.yml
new file mode 100644
index 000000000..3e0295008
--- /dev/null
+++ b/.github/actions/node-npm/post-merge/action.yml
@@ -0,0 +1,186 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: node-npm-post-merge
+description: Node.js post-merge npm publishing github iggy actions
+
+inputs:
+ version:
+ description: "Version for publishing"
+ required: true
+ dry_run:
+ description: "Dry run mode"
+ required: false
+ default: "false"
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: "23"
+ registry-url: "https://registry.npmjs.org"
+ cache: "npm"
+ cache-dependency-path: foreign/node/package-lock.json
+
+ - name: Install dependencies
+ run: |
+ cd foreign/node
+ npm ci --ignore-scripts
+ shell: bash
+
+ - name: Build for publishing
+ run: |
+ cd foreign/node
+ echo "π¦ Building Node.js SDK for publishing..."
+ npm run build
+
+ # Verify build output
+ if [ -d "dist" ]; then
+ echo ""
+ echo "Build output:"
+ ls -la dist/
+
+ # Count files
+ FILE_COUNT=$(find dist -type f | wc -l)
+ echo ""
+ echo "Total files in dist: $FILE_COUNT"
+ else
+ echo "β Error: dist directory not found after build"
+ exit 1
+ fi
+ shell: bash
+
+ - name: Prepare package version
+ run: |
+ cd foreign/node
+
+ # Get current version from package.json
+ CURRENT_VERSION=$(node -p "require('./package.json').version")
+ NEW_VERSION="${{ inputs.version }}"
+
+ echo "Current package.json version: $CURRENT_VERSION"
+ echo "Target version: $NEW_VERSION"
+
+ if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then
+ if [ "${{ inputs.dry_run }}" = "false" ]; then
+ echo "Updating version to $NEW_VERSION..."
+ npm version "$NEW_VERSION" --no-git-tag-version --allow-same-version
+ else
+ echo "Would update version from $CURRENT_VERSION to $NEW_VERSION"
+ fi
+ else
+ echo "Version already matches target"
+ fi
+ shell: bash
+
+ - name: Validate package
+ run: |
+ cd foreign/node
+
+ echo "π Package validation:"
+ echo ""
+
+ # Check package.json required fields
+ node -e "
+ const pkg = require('./package.json');
+ const required = ['name', 'version', 'description', 'main', 'types', 'license'];
+ const missing = required.filter(field => !pkg[field]);
+ if (missing.length > 0) {
+ console.error('β Missing required fields:', missing.join(', '));
+ process.exit(1);
+ }
+ console.log('β
All required fields present');
+ console.log('');
+ console.log('Package info:');
+ console.log(' Name:', pkg.name);
+ console.log(' Version:', pkg.version);
+ console.log(' Description:', pkg.description);
+ console.log(' License:', pkg.license);
+ console.log(' Main:', pkg.main);
+ console.log(' Types:', pkg.types);
+ "
+
+ # Check if files to be published exist
+ echo ""
+ echo "Files to be published:"
+ # npm pack might output non-JSON lines before the JSON, so we need to extract only the JSON part
+ npm pack --dry-run --json 2>/dev/null | tail -1 | jq -r '.[0].files[].path' 2>/dev/null | head -20 || echo " (Unable to list files, but continuing...)"
+ shell: bash
+
+ - name: Publish to npm
+ env:
+ NODE_AUTH_TOKEN: ${{ env.NPM_TOKEN }}
+ run: |
+ cd foreign/node
+
+ if [ "${{ inputs.dry_run }}" = "true" ]; then
+ echo "π Dry run - would publish to npm:"
+ echo ""
+
+ # Show what would be published
+ npm pack --dry-run
+
+ echo ""
+ echo "Package details:"
+ node -e "
+ const pkg = require('./package.json');
+ console.log(' Name: ' + pkg.name);
+ console.log(' Version: ${{ inputs.version }}');
+ console.log(' Registry: https://registry.npmjs.org');
+ "
+
+ echo ""
+ echo "After publishing, users could install with:"
+ echo " npm install apache-iggy@${{ inputs.version }}"
+ echo ""
+ echo "Or add to package.json:"
+ echo ' "apache-iggy": "^${{ inputs.version }}"'
+ else
+ if [ -z "$NODE_AUTH_TOKEN" ]; then
+ echo "β NPM_TOKEN is not set"
+ exit 1
+ fi
+
+ echo "π¦ Publishing to npm registry..."
+ echo "Version: ${{ inputs.version }}"
+ echo ""
+
+ # Publish with provenance for supply chain security
+ npm publish --provenance --access public
+
+ if [ $? -eq 0 ]; then
+ echo ""
+ echo "β
Successfully published to npm"
+ echo ""
+ echo "Package: apache-iggy@${{ inputs.version }}"
+ echo "Registry: https://registry.npmjs.org/apache-iggy"
+ echo ""
+ echo "Users can now install with:"
+ echo " npm install apache-iggy@${{ inputs.version }}"
+ echo ""
+ echo "Or add to package.json:"
+ echo ' "apache-iggy": "^${{ inputs.version }}"'
+ echo ""
+ echo "View on npm: https://www.npmjs.com/package/apache-iggy/v/${{ inputs.version }}"
+ else
+ echo "β Publishing failed"
+ exit 1
+ fi
+ fi
+ shell: bash
diff --git a/.github/actions/node-npm/pre-merge/action.yml b/.github/actions/node-npm/pre-merge/action.yml
new file mode 100644
index 000000000..be9a7ed6c
--- /dev/null
+++ b/.github/actions/node-npm/pre-merge/action.yml
@@ -0,0 +1,109 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: node-npm-pre-merge
+description: Node.js pre-merge testing github iggy actions
+
+inputs:
+ task:
+ description: "Task to run (lint, test, build, e2e)"
+ required: true
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: "23"
+ registry-url: "https://registry.npmjs.org"
+ cache: "npm"
+ cache-dependency-path: foreign/node/package-lock.json
+
+ - name: Setup Rust with cache
+ if: inputs.task == 'e2e'
+ uses: ./.github/actions/utils/setup-rust-with-cache
+ with:
+ cache-targets: false # Only cache registry and git deps, not target dir (sccache handles that)
+
+ - name: Install netcat
+ if: inputs.task == 'e2e'
+ run: sudo apt-get update && sudo apt-get install -y netcat-openbsd
+ shell: bash
+
+ - name: Install dependencies
+ run: |
+ cd foreign/node
+ npm ci --ignore-scripts
+ shell: bash
+
+ - name: Lint
+ if: inputs.task == 'lint'
+ run: |
+ cd foreign/node
+ npm run lint
+ shell: bash
+
+ - name: Build
+ if: inputs.task == 'build'
+ run: |
+ cd foreign/node
+ npm run build
+
+ # Verify build output
+ if [ -d "dist" ]; then
+ echo "Build output:"
+ ls -la dist/
+ else
+ echo "Warning: dist directory not found after build"
+ fi
+ shell: bash
+
+ - name: Test
+ if: inputs.task == 'test'
+ run: |
+ cd foreign/node
+ npm run test:unit
+ shell: bash
+
+ - name: Start Iggy server
+ id: iggy
+ if: inputs.task == 'e2e'
+ uses: ./.github/actions/utils/server-start
+ with:
+ mode: cargo
+ cargo-bin: iggy-server
+ host: 127.0.0.1
+ port: 8090
+ wait-timeout-seconds: 45
+
+ - name: E2E tests
+ if: inputs.task == 'e2e'
+ run: |
+ cd foreign/node
+ npm run test:e2e
+ env:
+ IGGY_SERVER_HOST: 127.0.0.1
+ IGGY_SERVER_TCP_PORT: 8090
+ shell: bash
+
+ - name: Stop Iggy server
+ if: inputs.task == 'e2e'
+ uses: ./.github/actions/utils/server-stop
+ with:
+ pid-file: ${{ steps.iggy.outputs.pid_file }}
+ log-file: ${{ steps.iggy.outputs.log_file }}
diff --git a/.github/actions/python-maturin/post-merge/action.yml b/.github/actions/python-maturin/post-merge/action.yml
new file mode 100644
index 000000000..bb2d73f69
--- /dev/null
+++ b/.github/actions/python-maturin/post-merge/action.yml
@@ -0,0 +1,161 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: python-maturin-post-merge
+description: Python post-merge PyPI publishing with pre-built wheels
+
+inputs:
+ version:
+ description: "Version for publishing"
+ required: true
+ dry_run:
+ description: "Dry run mode"
+ required: false
+ default: "false"
+ wheels_artifact:
+ description: "Name of the artifact containing the wheels"
+ required: false
+ default: "python-wheels-all"
+ wheels_path:
+ description: "Path where wheels should be downloaded"
+ required: false
+ default: "dist"
+
+runs:
+ using: "composite"
+ steps:
+ - name: Validate version format
+ run: |
+ VERSION="${{ inputs.version }}"
+
+ # Check if version matches Python/PEP 440 versioning
+ if ! echo "$VERSION" | grep -qE '^[0-9]+\.[0-9]+\.[0-9]+((a|b|rc)[0-9]+)?(\.[a-z0-9]+)?$'; then
+ echo "β Invalid version format: $VERSION"
+ echo "Expected format: X.Y.Z or X.Y.Z[a|b|rc]N or X.Y.Z.devN"
+ exit 1
+ fi
+
+ echo "β
Version format valid: $VERSION"
+ shell: bash
+
+ - name: Download pre-built wheels
+ uses: actions/download-artifact@v4
+ with:
+ name: ${{ inputs.wheels_artifact }}
+ path: ${{ inputs.wheels_path }}
+
+ - name: Validate downloaded wheels
+ run: |
+ WHEELS_PATH="${{ inputs.wheels_path }}"
+
+ echo "π Validating downloaded wheels and source distribution:"
+ echo ""
+
+ # Check if directory exists and has files
+ if [ ! -d "$WHEELS_PATH" ]; then
+ echo "β Wheels directory not found: $WHEELS_PATH"
+ exit 1
+ fi
+
+ # Count artifacts
+ WHEEL_COUNT=$(ls -1 "$WHEELS_PATH"/*.whl 2>/dev/null | wc -l || echo "0")
+ SDIST_COUNT=$(ls -1 "$WHEELS_PATH"/*.tar.gz 2>/dev/null | wc -l || echo "0")
+
+ if [ "$WHEEL_COUNT" -eq 0 ] && [ "$SDIST_COUNT" -eq 0 ]; then
+ echo "β No wheels or source distributions found in $WHEELS_PATH"
+ exit 1
+ fi
+
+ echo "Found artifacts:"
+ echo " - Wheels: $WHEEL_COUNT"
+ echo " - Source distributions: $SDIST_COUNT"
+ echo ""
+
+ # List all artifacts with details
+ echo "Artifacts to publish:"
+ for file in "$WHEELS_PATH"/*.whl "$WHEELS_PATH"/*.tar.gz; do
+ if [ -f "$file" ]; then
+ filename=$(basename "$file")
+ size=$(du -h "$file" | cut -f1)
+
+ # Determine platform from filename
+ if [[ "$filename" == *"linux"* ]]; then platform="Linux"
+ elif [[ "$filename" == *"macosx"* ]]; then platform="macOS"
+ elif [[ "$filename" == *"win"* ]]; then platform="Windows"
+ elif [[ "$filename" == *.tar.gz ]]; then platform="Source"
+ else platform="Universal"; fi
+
+ # Determine architecture from filename
+ if [[ "$filename" == *"x86_64"* ]] || [[ "$filename" == *"amd64"* ]]; then arch="x86_64"
+ elif [[ "$filename" == *"aarch64"* ]] || [[ "$filename" == *"arm64"* ]] || [[ "$filename" == *"universal2"* ]]; then arch="arm64"
+ elif [[ "$filename" == *"i686"* ]] || [[ "$filename" == *"win32"* ]]; then arch="x86"
+ elif [[ "$filename" == *.tar.gz ]]; then arch="N/A"
+ else arch="any"; fi
+
+ echo " - $filename ($platform/$arch, $size)"
+ fi
+ done
+
+ echo ""
+ echo "β
All packages ready for publishing"
+ shell: bash
+
+ - name: Display publishing information (dry run)
+ if: inputs.dry_run == 'true'
+ run: |
+ WHEELS_PATH="${{ inputs.wheels_path }}"
+ VERSION="${{ inputs.version }}"
+
+ echo "π DRY RUN - Would publish to PyPI:"
+ echo ""
+ echo "Package: apache-iggy"
+ echo "Version: $VERSION"
+ echo "Registry: https://pypi.org"
+ echo ""
+
+ echo "Packages that would be published:"
+ for file in "$WHEELS_PATH"/*.whl "$WHEELS_PATH"/*.tar.gz; do
+ if [ -f "$file" ]; then
+ echo " - $(basename "$file")"
+ fi
+ done
+
+ echo ""
+ echo "Platform coverage:"
+ echo " - Linux: x86_64, aarch64"
+ echo " - macOS: x86_64, arm64"
+ echo " - Windows: x64"
+ echo " - Source distribution: included"
+
+ echo ""
+ echo "After publishing, users could install with:"
+ echo " pip install apache-iggy==$VERSION"
+ echo ""
+ echo "Or add to requirements.txt:"
+ echo " apache-iggy==$VERSION"
+ echo ""
+ echo "View on PyPI: https://pypi.org/project/apache-iggy/$VERSION/"
+ shell: bash
+
+ - name: Publish to PyPI using maturin
+ if: inputs.dry_run == 'false'
+ uses: PyO3/maturin-action@v1
+ env:
+ MATURIN_PYPI_TOKEN: ${{ env.PYPI_API_TOKEN }}
+ with:
+ command: upload
+ args: --non-interactive --skip-existing ${{ inputs.wheels_path }}/*
diff --git a/.github/actions/python-maturin/pre-merge/action.yml b/.github/actions/python-maturin/pre-merge/action.yml
new file mode 100644
index 000000000..75b70665e
--- /dev/null
+++ b/.github/actions/python-maturin/pre-merge/action.yml
@@ -0,0 +1,178 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: python-maturin-pre-merge
+description: Python pre-merge testing with maturin github iggy actions
+
+inputs:
+ task:
+ description: "Task to run (lint, test, build)"
+ required: true
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Setup Rust with cache
+ uses: ./.github/actions/utils/setup-rust-with-cache
+ with:
+ cache-targets: false # Only cache registry and git deps, not target dir (sccache handles that)
+
+ - name: Cache pip
+ uses: actions/cache@v4
+ with:
+ path: ~/.cache/pip
+ key: pip-${{ runner.os }}-${{ hashFiles('foreign/python/pyproject.toml') }}
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip wheel setuptools
+
+ # Install maturin for building
+ pip install 'maturin>=1.2,<2.0'
+
+ # Install testing dependencies from pyproject.toml
+ cd foreign/python
+ pip install -e ".[testing,examples,dev]"
+ cd ../..
+
+ # Install additional CI dependencies for Docker testing if needed
+ if [ "${{ inputs.task }}" = "test" ]; then
+ pip install 'testcontainers[docker]>=3.7.0,<5.0' || true
+ fi
+ shell: bash
+
+ - name: Lint and format check
+ if: inputs.task == 'lint'
+ run: |
+ # Build list of directories to check using absolute paths
+ DIRS_TO_CHECK="${GITHUB_WORKSPACE}/foreign/python/tests ${GITHUB_WORKSPACE}/foreign/python"
+ STUB_FILE="${GITHUB_WORKSPACE}/foreign/python/apache_iggy.pyi"
+
+ echo "Directories to check: $DIRS_TO_CHECK"
+ echo "Stub file: $STUB_FILE"
+
+ echo "ruff check --select I $DIRS_TO_CHECK"
+ ruff check --select I $DIRS_TO_CHECK
+ echo "ruff version: $(ruff --version)"
+
+ echo "ruff format --check $DIRS_TO_CHECK"
+ ruff format --check $DIRS_TO_CHECK
+
+ # TODO(hubcio): make this blocking when mypy lints are fixed
+ echo "mypy --explicit-package-bases $STUB_FILE $DIRS_TO_CHECK"
+ mypy --explicit-package-bases "$STUB_FILE" $DIRS_TO_CHECK || true
+ echo "mypy version: $(mypy --version)"
+ shell: bash
+
+ - name: Build Python wheel for testing
+ if: inputs.task == 'test' || inputs.task == 'build'
+ run: |
+ cd foreign/python
+
+ # Build the module
+ echo "Building Python wheel..."
+ maturin build -o dist
+
+ if [ "${{ inputs.task }}" = "test" ]; then
+ # Install the built wheel for testing
+ echo "Installing built wheel..."
+ pip install dist/*.whl --force-reinstall
+
+ # Create symlink for examples directory if needed
+ if [ -d "python_examples" ] && [ ! -e "examples" ]; then
+ ln -s python_examples examples
+ fi
+ fi
+
+ if [ "${{ inputs.task }}" = "build" ]; then
+ # List built artifacts
+ echo ""
+ echo "Build artifacts:"
+ ls -la dist/
+ fi
+ shell: bash
+
+ - name: Start Iggy server
+ if: inputs.task == 'test'
+ id: iggy
+ uses: ./.github/actions/utils/server-start
+ with:
+ mode: cargo
+ cargo-bin: iggy-server
+ host: 127.0.0.1
+ port: 8090
+ wait-timeout-seconds: 45
+ continue-on-error: true
+
+ - name: Run Python integration tests
+ if: inputs.task == 'test' && steps.iggy.outcome == 'success'
+ run: |
+ cd foreign/python
+
+ echo "Running integration tests with Iggy server at ${{ steps.iggy.outputs.address }}..."
+
+ # Run all tests with server connection
+ IGGY_SERVER_HOST=127.0.0.1 \
+ IGGY_SERVER_TCP_PORT=8090 \
+ pytest tests/ -v \
+ --junitxml=../../reports/python-junit.xml \
+ --tb=short \
+ --capture=no || TEST_EXIT_CODE=$?
+
+ # Exit with test result
+ exit ${TEST_EXIT_CODE:-0}
+ shell: bash
+
+ - name: Run Python unit tests only (fallback)
+ if: inputs.task == 'test' && steps.iggy.outcome != 'success'
+ run: |
+ cd foreign/python
+
+ echo "β οΈ Server failed to start, running unit tests only..."
+
+ # Run unit tests only (exclude integration tests)
+ pytest tests/ -v \
+ -m "not integration" \
+ --junitxml=../../reports/python-junit.xml \
+ --tb=short || TEST_EXIT_CODE=$?
+
+ # Exit with test result (allow some failures in unit-only mode)
+ exit ${TEST_EXIT_CODE:-0}
+ shell: bash
+
+ - name: Stop Iggy server
+ if: always() && inputs.task == 'test'
+ uses: ./.github/actions/utils/server-stop
+ with:
+ pid-file: ${{ steps.iggy.outputs.pid_file }}
+ log-file: ${{ steps.iggy.outputs.log_file }}
+
+ - name: Upload test artifacts
+ if: always() && inputs.task == 'test'
+ uses: actions/upload-artifact@v4
+ with:
+ name: python-test-results-${{ github.run_id }}-${{ github.run_attempt }}
+ path: |
+ reports/python-junit.xml
+ foreign/python/dist/*.whl
+ retention-days: 7
+ if-no-files-found: ignore
diff --git a/.github/actions/rust/post-merge/action.yml b/.github/actions/rust/post-merge/action.yml
new file mode 100644
index 000000000..1255ae469
--- /dev/null
+++ b/.github/actions/rust/post-merge/action.yml
@@ -0,0 +1,172 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: rust-post-merge
+description: Rust post-merge crates.io publishing github iggy actions
+
+inputs:
+ package:
+ description: "Package name for publishing (e.g., iggy, iggy_common, iggy_binary_protocol)"
+ required: true
+ version:
+ description: "Version for publishing"
+ required: true
+ dry_run:
+ description: "Dry run mode"
+ required: false
+ default: "false"
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup Rust with cache
+ uses: ./.github/actions/utils/setup-rust-with-cache
+ with:
+ cache-targets: false # Only cache registry and git deps, not target dir (sccache handles that)
+ show-stats: false # Don't need stats for publishing
+
+ - name: Validate package
+ run: |
+ PACKAGE="${{ inputs.package }}"
+ VERSION="${{ inputs.version }}"
+
+ echo "π¦ Validating Rust crate: $PACKAGE"
+ echo "Version: $VERSION"
+ echo ""
+
+ # Check if package exists in workspace
+ if ! cargo metadata --format-version 1 | jq -e ".packages[] | select(.name == \"$PACKAGE\")" > /dev/null; then
+ echo "β Package '$PACKAGE' not found in workspace"
+ echo ""
+ echo "Available packages:"
+ cargo metadata --format-version 1 | jq -r '.packages[].name' | sort
+ exit 1
+ fi
+
+ # Get package information
+ CARGO_VERSION=$(cargo metadata --format-version 1 | jq -r ".packages[] | select(.name == \"$PACKAGE\") | .version")
+ CARGO_PATH=$(cargo metadata --format-version 1 | jq -r ".packages[] | select(.name == \"$PACKAGE\") | .manifest_path")
+
+ echo "Current Cargo.toml version: $CARGO_VERSION"
+ echo "Target version: $VERSION"
+ echo "Manifest path: $CARGO_PATH"
+
+ # Check version consistency
+ if [ "$CARGO_VERSION" != "$VERSION" ]; then
+ echo "β οΈ Warning: Cargo.toml version ($CARGO_VERSION) doesn't match target version ($VERSION)"
+ echo "Make sure to update Cargo.toml before publishing"
+ fi
+
+ # Show package dependencies
+ echo ""
+ echo "Package dependencies:"
+ cargo tree -p "$PACKAGE" --depth 1 | head -20
+ shell: bash
+
+ - name: Build package
+ run: |
+ PACKAGE="${{ inputs.package }}"
+
+ echo "π¨ Building package: $PACKAGE"
+ cargo build -p "$PACKAGE" --release
+
+ # Verify the build
+ if [ $? -eq 0 ]; then
+ echo "β
Package built successfully"
+ else
+ echo "β Build failed"
+ exit 1
+ fi
+ shell: bash
+
+ - name: Verify package contents
+ run: |
+ PACKAGE="${{ inputs.package }}"
+
+ echo "π Package contents verification:"
+ echo ""
+
+ # List what would be included in the package
+ cargo package -p "$PACKAGE" --list | head -50
+
+ echo ""
+ echo "Package size estimate:"
+ cargo package -p "$PACKAGE" --list | wc -l
+ echo "files would be included"
+ shell: bash
+
+ - name: Publish to crates.io
+ env:
+ CARGO_REGISTRY_TOKEN: ${{ env.CARGO_REGISTRY_TOKEN }}
+ run: |
+ PACKAGE="${{ inputs.package }}"
+ VERSION="${{ inputs.version }}"
+
+ if [ "${{ inputs.dry_run }}" = "true" ]; then
+ echo "π Dry run - would publish crate: $PACKAGE"
+ echo ""
+
+ # Run cargo publish in dry-run mode
+ cargo publish --dry-run -p "$PACKAGE"
+
+ echo ""
+ echo "Would publish:"
+ echo " Package: $PACKAGE"
+ echo " Version: $VERSION"
+ echo " Registry: crates.io"
+ echo ""
+ echo "After publishing, users could use:"
+ echo ' [dependencies]'
+ echo " $PACKAGE = \"$VERSION\""
+ echo ""
+ echo "Or with cargo add:"
+ echo " cargo add $PACKAGE@$VERSION"
+ else
+ if [ -z "$CARGO_REGISTRY_TOKEN" ]; then
+ echo "β CARGO_REGISTRY_TOKEN is not set"
+ exit 1
+ fi
+
+ echo "π¦ Publishing crate to crates.io..."
+ echo "Package: $PACKAGE"
+ echo "Version: $VERSION"
+ echo ""
+
+ # Publish the package
+ cargo publish -p "$PACKAGE"
+
+ if [ $? -eq 0 ]; then
+ echo ""
+ echo "β
Successfully published to crates.io"
+ echo ""
+ echo "Package: $PACKAGE v$VERSION"
+ echo "Registry: https://crates.io/crates/$PACKAGE"
+ echo ""
+ echo "Users can now use:"
+ echo ' [dependencies]'
+ echo " $PACKAGE = \"$VERSION\""
+ echo ""
+ echo "Or with cargo add:"
+ echo " cargo add $PACKAGE@$VERSION"
+ echo ""
+ echo "View on crates.io: https://crates.io/crates/$PACKAGE/$VERSION"
+ else
+ echo "β Publishing failed"
+ exit 1
+ fi
+ fi
+ shell: bash
diff --git a/.github/actions/rust/pre-merge/action.yml b/.github/actions/rust/pre-merge/action.yml
new file mode 100644
index 000000000..e380b69f8
--- /dev/null
+++ b/.github/actions/rust/pre-merge/action.yml
@@ -0,0 +1,110 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: rust-pre-merge
+description: Rust pre-merge testing and linting github iggy actions
+
+inputs:
+ task:
+ description: "Task to run (check, fmt, clippy, sort, machete, doctest, test, compat)"
+ required: true
+ component:
+ description: "Component name (for context)"
+ required: false
+ default: ""
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup Rust with cache
+ uses: ./.github/actions/utils/setup-rust-with-cache
+ with:
+ cache-targets: false # Only cache registry and git deps, not target dir (sccache handles that)
+ show-stats: true
+
+ - name: Install tools for specific tasks
+ run: |
+ case "${{ inputs.task }}" in
+ sort)
+ cargo install cargo-sort --locked
+ ;;
+ machete)
+ cargo install cargo-machete --locked
+ ;;
+ esac
+ shell: bash
+
+ # Individual lint tasks for parallel execution
+ - name: Cargo check
+ if: inputs.task == 'check'
+ run: cargo check --all --all-features
+ shell: bash
+
+ - name: Cargo fmt
+ if: inputs.task == 'fmt'
+ run: cargo fmt --all -- --check
+ shell: bash
+
+ - name: Cargo clippy
+ if: inputs.task == 'clippy'
+ run: cargo clippy --all-targets --all-features -- -D warnings
+ shell: bash
+
+ - name: Cargo sort
+ if: inputs.task == 'sort'
+ run: cargo sort --check --workspace
+ shell: bash
+
+ - name: Cargo machete
+ if: inputs.task == 'machete'
+ run: cargo machete --with-metadata
+ shell: bash
+
+ - name: Doc test
+ if: inputs.task == 'doctest'
+ run: |
+ cargo test --locked --doc
+ cargo doc --no-deps --all-features --quiet
+ shell: bash
+
+ - name: Install dependencies for Rust tests
+ if: inputs.task == 'test' && runner.os == 'Linux'
+ run: |
+ sudo apt-get update --yes && sudo apt-get install --yes musl-tools gnome-keyring keyutils
+ rm -f $HOME/.local/share/keyrings/*
+ echo -n "test" | gnome-keyring-daemon --unlock
+ shell: bash
+
+ - name: Build and test
+ if: inputs.task == 'test'
+ run: |
+ # Build all targets first
+ cargo build --locked --all-targets
+
+ # Run tests with nextest for better parallelization and output
+ # nextest automatically uses all available cores and provides better test output
+ cargo nextest run --locked --no-fail-fast
+ shell: bash
+
+ - name: Backwards compatibility check
+ if: inputs.task == 'compat'
+ run: |
+ scripts/check-backwards-compat.sh \
+ --master-ref master \
+ --pr-ref ${{ github.sha }} \
+ --port 8090 --wait-secs 180
+ shell: bash
diff --git a/.github/actions/utils/docker-buildx/action.yml b/.github/actions/utils/docker-buildx/action.yml
new file mode 100644
index 000000000..ac2565585
--- /dev/null
+++ b/.github/actions/utils/docker-buildx/action.yml
@@ -0,0 +1,303 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: docker-buildx
+description: Multi-arch Docker build and push
+inputs:
+ task:
+ description: "Task to run (build/publish/docker)"
+ required: false
+ default: "build"
+ component:
+ description: "Component key from .github/config/publish.yml (e.g. rust-server)"
+ required: false
+ default: ""
+ version:
+ description: "Version tag (use 'edge' for master post-merge)"
+ required: false
+ default: "test"
+ context:
+ description: "Build context"
+ required: false
+ default: "."
+ dry_run:
+ description: "Dry run mode"
+ required: false
+ default: "false"
+ libc:
+ description: "Libc to use (glibc/musl)"
+ required: false
+ default: "musl"
+
+runs:
+ using: "composite"
+ steps:
+ - name: Resolve component from publish.yml
+ id: config
+ shell: bash
+ run: |
+ set -euxo pipefail
+
+ comp="${{ inputs.component }}"
+ image=""
+ dockerfile=""
+ cfg_platforms=""
+
+ if [ -n "$comp" ]; then
+ if ! command -v yq >/dev/null 2>&1; then
+ YQ_VERSION="v4.47.1"
+ YQ_CHECKSUM="0fb28c6680193c41b364193d0c0fc4a03177aecde51cfc04d506b1517158c2fb"
+ wget -qO /tmp/yq https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/yq_linux_amd64
+ echo "${YQ_CHECKSUM} /tmp/yq" | sha256sum -c - || exit 1
+ chmod +x /tmp/yq
+ sudo mv /tmp/yq /usr/local/bin/yq
+ fi
+
+ components_b64="$(yq -o=json -I=0 '.components' .github/config/publish.yml | base64 -w0)"
+
+ JSON_B64="$components_b64" COMP="$comp" node -e '
+ const b64 = process.env.JSON_B64 || "";
+ const comp = process.env.COMP || "";
+ if (!b64) { console.error("Missing publish.yml components"); process.exit(1); }
+ const cfg = JSON.parse(Buffer.from(b64, "base64").toString("utf8"));
+ const e = cfg[comp];
+ if (!e) { console.error(`Component not found in publish.yml: ${comp || ""}`); process.exit(1); }
+ const out = {
+ image: e.image || "",
+ dockerfile: e.dockerfile || "Dockerfile",
+ platforms: Array.isArray(e.platforms) ? e.platforms.join(",") : (e.platforms || "")
+ };
+ process.stdout.write(JSON.stringify(out));
+ ' > /tmp/_dockercfg.json
+
+ image="$(jq -r .image /tmp/_dockercfg.json)"
+ dockerfile="$(jq -r .dockerfile /tmp/_dockercfg.json)"
+ cfg_platforms="$(jq -r .platforms /tmp/_dockercfg.json)"
+ fi
+
+ should_push="false"
+ if [ "${{ inputs.task }}" = "publish" ] && [ "${{ inputs.dry_run }}" != "true" ]; then
+ should_push="true"
+ fi
+
+ echo "image=$image" >> "$GITHUB_OUTPUT"
+ echo "dockerfile=$dockerfile" >> "$GITHUB_OUTPUT"
+ echo "cfg_platforms=$cfg_platforms" >> "$GITHUB_OUTPUT"
+ echo "should_push=$should_push" >> "$GITHUB_OUTPUT"
+
+ echo "π³ Config:"
+ echo " component: ${comp:-}"
+ echo " image: ${image:-}"
+ echo " dockerfile:${dockerfile:-}"
+ echo " push: ${should_push}"
+ echo " version: ${{ inputs.version }}"
+
+ if [ -n "$comp" ] && [ -z "$image" ]; then
+ echo "Component '${comp}' missing image mapping in publish.yml" >&2
+ exit 1
+ fi
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+ with:
+ platforms: all
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+ with:
+ driver-opts: |
+ network=host
+ image=moby/buildkit:latest
+
+ - name: Login to Docker Hub
+ if: steps.config.outputs.should_push == 'true'
+ uses: docker/login-action@v3
+ with:
+ username: ${{ env.DOCKERHUB_USERNAME }}
+ password: ${{ env.DOCKERHUB_TOKEN }}
+
+ - name: Docker meta
+ id: meta
+ uses: docker/metadata-action@v5
+ with:
+ images: ${{ steps.config.outputs.image }}
+ tags: |
+ type=raw,value=${{ inputs.version }}
+ type=raw,value=latest,enable=${{ steps.config.outputs.should_push == 'true' && inputs.version != 'test' && inputs.version != 'edge' }}
+ type=sha,enable=${{ inputs.version == 'test' }}
+
+ - name: Determine platforms
+ id: platforms
+ shell: bash
+ run: |
+ p_cfg="${{ steps.config.outputs.cfg_platforms }}"
+ if [ -n "$p_cfg" ]; then
+ platforms="$p_cfg"
+ elif [ "${{ steps.config.outputs.should_push }}" = "true" ]; then
+ platforms="linux/amd64,linux/arm64"
+ else
+ platforms="linux/amd64"
+ fi
+ echo "platforms=$platforms" >> "$GITHUB_OUTPUT"
+ echo "π₯οΈ Platforms: $platforms"
+
+
+ - name: Compose cache config (no registry on dry-run)
+ id: cachecfg
+ shell: bash
+ run: |
+ set -euo pipefail
+ comp="${{ inputs.component }}"
+ os="${{ runner.os }}"
+ img="${{ steps.config.outputs.image }}"
+ if [ "${{ inputs.dry_run }}" = "true" ]; then
+ # dry-run/forks: avoid Docker Hub completely
+ CACHE_TO=$(
+ printf '%s\n' \
+ "type=gha,scope=buildkit-${comp}-${os},mode=max" \
+ "type=gha,scope=buildkit-${comp},mode=max" \
+ "type=inline"
+ )
+ CACHE_FROM=$(
+ printf '%s\n' \
+ "type=gha,scope=buildkit-${comp}-${os}" \
+ "type=gha,scope=buildkit-${comp}"
+ )
+ else
+ # CI on upstream: use registry + gha + inline
+ CACHE_TO=$(
+ printf '%s\n' \
+ "type=registry,ref=${img}:buildcache,mode=max" \
+ "type=registry,ref=${img}:buildcache-${os},mode=max" \
+ "type=inline" \
+ "type=gha,scope=buildkit-${comp}-${os},mode=max"
+ )
+ CACHE_FROM=$(
+ printf '%s\n' \
+ "type=registry,ref=${img}:buildcache" \
+ "type=registry,ref=${img}:buildcache-${os}" \
+ "type=registry,ref=${img}:latest" \
+ "type=gha,scope=buildkit-${comp}-${os}" \
+ "type=gha,scope=buildkit-${comp}"
+ )
+ fi
+
+ {
+ echo 'CACHE_TO<> "$GITHUB_ENV"
+
+ echo "Computed cache-to:"
+ printf '%s\n' "$CACHE_TO"
+ echo "Computed cache-from:"
+ printf '%s\n' "$CACHE_FROM"
+
+
+ - name: Determine Rust toolchain version (labels)
+ shell: bash
+ run: |
+ if [[ -f rust-toolchain.toml ]]; then
+ ver="$(sed -En 's/^[[:space:]]*channel[[:space:]]*=[[:space:]]*"([^"]+)".*/\1/p' rust-toolchain.toml | head -1)"
+ : "${ver:=unknown}"
+ echo "RUST_VERSION=$ver" >> "$GITHUB_ENV"
+ echo "Using toolchain: $ver"
+ else
+ echo "RUST_VERSION=unknown" >> "$GITHUB_ENV"
+ echo "No rust-toolchain.toml found; labeling as unknown"
+ fi
+
+ - name: Compose build args
+ id: bargs
+ shell: bash
+ run: |
+ set -euo pipefail
+ args="VERSION=${{ inputs.version }}
+ BUILD_DATE=${{ github.event.repository.updated_at }}
+ VCS_REF=${{ github.sha }}
+ RUST_VERSION=${{ env.RUST_VERSION }}
+ LIBC=${{ inputs.libc }}
+ IGGY_CI_BUILD=true"
+ {
+ echo "all<<__EOF__"
+ echo "$args"
+ echo "__EOF__"
+ } >> "$GITHUB_OUTPUT"
+ echo "Build args:"
+ printf '%s\n' "$args"
+
+ - name: Determine build context
+ id: ctx
+ shell: bash
+ run: |
+ # For web-ui, the context should be the web directory
+ if [ "${{ inputs.component }}" = "web-ui" ]; then
+ context="web"
+ else
+ context="${{ inputs.context }}"
+ fi
+ echo "context=$context" >> "$GITHUB_OUTPUT"
+ echo "π Build context: $context"
+
+ - name: Build and push
+ id: build
+ uses: docker/build-push-action@v6
+ with:
+ context: ${{ steps.ctx.outputs.context }}
+ file: ${{ steps.config.outputs.dockerfile }}
+ platforms: ${{ steps.platforms.outputs.platforms }}
+ push: ${{ steps.config.outputs.should_push }}
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
+ cache-from: ${{ env.CACHE_FROM }}
+ cache-to: ${{ env.CACHE_TO }}
+ build-args: |
+ ${{ steps.bargs.outputs.all }}
+
+ - name: Export image (if not pushing)
+ if: steps.config.outputs.should_push == 'false' && inputs.task != 'publish'
+ shell: bash
+ run: |
+ echo "Built: ${{ steps.config.outputs.image }}:${{ inputs.version }}"
+ echo "To save: docker save ${{ steps.config.outputs.image }}:${{ inputs.version }} -o image.tar"
+
+ - name: Summary
+ if: always()
+ shell: bash
+ run: |
+ {
+ echo "## π³ Docker Build Summary"
+ echo ""
+ echo "| Property | Value |"
+ echo "|----------|-------|"
+ echo "| Image | \`${{ steps.config.outputs.image }}\` |"
+ echo "| Version | \`${{ inputs.version }}\` |"
+ echo "| Dockerfile | \`${{ steps.config.outputs.dockerfile }}\` |"
+ echo "| Platforms | \`${{ steps.platforms.outputs.platforms }}\` |"
+ echo "| Pushed | ${{ steps.config.outputs.should_push }} |"
+ if [ "${{ steps.config.outputs.should_push }}" = "true" ]; then
+ echo "| Digest | \`${{ steps.build.outputs.digest }}\` |"
+ echo ""
+ echo "### Pull"
+ echo '```bash'
+ echo "docker pull ${{ steps.config.outputs.image }}:${{ inputs.version }}"
+ echo '```'
+ fi
+ } >> "$GITHUB_STEP_SUMMARY"
diff --git a/.github/actions/utils/server-start/action.yml b/.github/actions/utils/server-start/action.yml
new file mode 100644
index 000000000..1ebe5350d
--- /dev/null
+++ b/.github/actions/utils/server-start/action.yml
@@ -0,0 +1,181 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: server-start
+description: Start Iggy server and wait for readiness
+inputs:
+ mode:
+ description: "How to run the server: cargo|bin"
+ required: false
+ default: "cargo"
+ cargo-bin:
+ description: "Cargo binary name (when mode=cargo)"
+ required: false
+ default: "iggy-server"
+ cargo-profile:
+ description: "Cargo profile: release|debug"
+ required: false
+ default: "debug"
+ bin:
+ description: "Path to server binary (when mode=bin)"
+ required: false
+ default: ""
+ working-directory:
+ description: "Working directory for building/running"
+ required: false
+ default: "."
+ host:
+ description: "Bind host to test readiness against"
+ required: false
+ default: "127.0.0.1"
+ port:
+ description: "TCP port to check readiness"
+ required: false
+ default: "8090"
+ wait-timeout-seconds:
+ description: "Max seconds to wait until the server is ready"
+ required: false
+ default: "45"
+ log-file:
+ description: "Where to write server logs"
+ required: false
+ default: ""
+ pid-file:
+ description: "Where to write the PID"
+ required: false
+ default: ""
+ fail-if-busy:
+ description: "Fail if port is already in use"
+ required: false
+ default: "true"
+outputs:
+ pid:
+ description: "PID of the background server process"
+ value: ${{ steps.out.outputs.pid }}
+ pid_file:
+ description: "Path to PID file"
+ value: ${{ steps.out.outputs.pid_file }}
+ log_file:
+ description: "Path to log file"
+ value: ${{ steps.out.outputs.log_file }}
+ address:
+ description: "Host:port used for checks"
+ value: ${{ steps.out.outputs.address }}
+runs:
+ using: "composite"
+ steps:
+ - id: prep
+ shell: bash
+ run: |
+ set -euo pipefail
+ : "${RUNNER_TEMP:?}"
+ LOG_FILE="${{ inputs.log-file || '' }}"
+ PID_FILE="${{ inputs.pid-file || '' }}"
+ [ -n "$LOG_FILE" ] || LOG_FILE="$RUNNER_TEMP/iggy-server.log"
+ [ -n "$PID_FILE" ] || PID_FILE="$RUNNER_TEMP/iggy-server.pid"
+ echo "LOG_FILE=$LOG_FILE" >> "$GITHUB_ENV"
+ echo "PID_FILE=$PID_FILE" >> "$GITHUB_ENV"
+
+ - id: resolve-bin
+ shell: bash
+ working-directory: ${{ inputs.working-directory }}
+ run: |
+ set -euo pipefail
+ MODE="${{ inputs.mode }}"
+ BIN_PATH=""
+ if [[ "$MODE" == "cargo" ]]; then
+ PROFILE="${{ inputs.cargo-profile }}"
+ NAME="${{ inputs.cargo-bin }}"
+ if [[ "$PROFILE" == "release" ]]; then
+ OUT="target/release/$NAME"
+ else
+ OUT="target/debug/$NAME"
+ fi
+ if [[ ! -x "$OUT" ]]; then
+ echo "Building $NAME with cargo ($PROFILE)β¦"
+ cargo build --locked --bin "$NAME" $([[ "$PROFILE" == "release" ]] && echo "--release")
+ fi
+ BIN_PATH="$OUT"
+ else
+ BIN_PATH="${{ inputs.bin }}"
+ [[ -x "$BIN_PATH" ]] || { echo "Binary not found or not executable: $BIN_PATH"; exit 1; }
+ fi
+ echo "bin=$BIN_PATH" >> "$GITHUB_OUTPUT"
+
+ - id: busy
+ shell: bash
+ run: |
+ set -euo pipefail
+ HOST="${{ inputs.host }}"
+ PORT="${{ inputs.port }}"
+ # true if socket is already open
+ if command -v nc >/dev/null 2>&1; then
+ if nc -z "$HOST" "$PORT" 2>/dev/null; then busy=1; else busy=0; fi
+ else
+ if timeout 1 bash -lc ":/dev/null; then busy=1; else busy=0; fi
+ fi
+ echo "BUSY=$busy" >> "$GITHUB_ENV"
+
+ - if: env.BUSY == '1' && inputs.fail-if-busy == 'true'
+ shell: bash
+ run: |
+ echo "Port ${{ inputs.host }}:${{ inputs.port }} is already in use." >&2
+ exit 1
+
+ - name: Start server
+ shell: bash
+ working-directory: ${{ inputs.working-directory }}
+ run: |
+ set -euo pipefail
+ nohup "${{ steps.resolve-bin.outputs.bin }}" >"$LOG_FILE" 2>&1 &
+ echo $! > "$PID_FILE"
+ echo "Started server PID $(cat "$PID_FILE") β logs: $LOG_FILE"
+
+ - name: Wait for readiness
+ shell: bash
+ run: |
+ set -euo pipefail
+ HOST="${{ inputs.host }}"
+ PORT="${{ inputs.port }}"
+ DEADLINE=$(( $(date +%s) + ${{ inputs.wait-timeout-seconds }} ))
+ until (( $(date +%s) > DEADLINE )); do
+ if command -v nc >/dev/null 2>&1; then
+ nc -z "$HOST" "$PORT" 2>/dev/null && ready=1 || ready=0
+ else
+ timeout 1 bash -lc ":/dev/null && ready=1 || ready=0
+ fi
+ if [[ "$ready" == "1" ]]; then
+ echo "Server is ready on $HOST:$PORT"
+ break
+ fi
+ sleep 1
+ done
+ if [[ "${ready:-0}" != "1" ]]; then
+ echo "Server did not become ready within ${{ inputs.wait-timeout-seconds }}s." >&2
+ echo "---- last 100 lines of log ----"
+ tail -n 100 "$LOG_FILE" || true
+ kill "$(cat "$PID_FILE")" 2>/dev/null || true
+ exit 1
+ fi
+
+ - id: out
+ shell: bash
+ run: |
+ echo "pid=$(cat "$PID_FILE")" >> "$GITHUB_OUTPUT"
+ echo "pid_file=$PID_FILE" >> "$GITHUB_OUTPUT"
+ echo "log_file=$LOG_FILE" >> "$GITHUB_OUTPUT"
+ echo "address=${{ inputs.host }}:${{ inputs.port }}" >> "$GITHUB_OUTPUT"
diff --git a/.github/actions/utils/server-stop/action.yml b/.github/actions/utils/server-stop/action.yml
new file mode 100644
index 000000000..3c694a1e9
--- /dev/null
+++ b/.github/actions/utils/server-stop/action.yml
@@ -0,0 +1,67 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: server-stop
+description: Stop Iggy server by PID or PID file
+inputs:
+ pid:
+ description: "PID to stop (optional if pid-file is provided)"
+ required: false
+ default: ""
+ pid-file:
+ description: "Path to PID file"
+ required: false
+ default: ""
+ log-file:
+ description: "Path to log file (for final tail)"
+ required: false
+ default: ""
+ kill-timeout-seconds:
+ description: "Graceful timeout before SIGKILL"
+ required: false
+ default: "10"
+runs:
+ using: "composite"
+ steps:
+ - shell: bash
+ run: |
+ set -euo pipefail
+ PID_INPUT="${{ inputs.pid }}"
+ PID_FILE="${{ inputs.pid-file }}"
+ LOG_FILE="${{ inputs.log-file }}"
+ if [[ -z "$PID_INPUT" && -n "$PID_FILE" && -f "$PID_FILE" ]]; then
+ PID_INPUT="$(cat "$PID_FILE" || true)"
+ fi
+ if [[ -z "$PID_INPUT" ]]; then
+ echo "No PID provided/found; nothing to stop."
+ exit 0
+ fi
+
+ if ps -p "$PID_INPUT" > /dev/null 2>&1; then
+ kill "$PID_INPUT" || true
+ for i in $(seq 1 ${{ inputs.kill-timeout-seconds }}); do
+ ps -p "$PID_INPUT" > /dev/null 2>&1 || exit 0
+ sleep 1
+ done
+ echo "Process still alive after grace; sending SIGKILL"
+ kill -9 "$PID_INPUT" || true
+ fi
+
+ if [[ -n "$LOG_FILE" && -f "$LOG_FILE" ]]; then
+ echo "---- last 50 lines of server log ----"
+ tail -n 50 "$LOG_FILE" || true
+ fi
diff --git a/.github/actions/utils/setup-rust-with-cache/action.yml b/.github/actions/utils/setup-rust-with-cache/action.yml
new file mode 100644
index 000000000..bf2df2831
--- /dev/null
+++ b/.github/actions/utils/setup-rust-with-cache/action.yml
@@ -0,0 +1,141 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# NOTE: this action sets up the Rust toolchain + swatinem/rust-cache for dependencies + sccache for compilation,
+# it is a convenience wrapper, so that we can use it in all workflows.
+
+name: setup-rust-with-cache
+description: Setup Rust toolchain and comprehensive caching (rust-cache for dependencies + sccache for compilation)
+inputs:
+ enabled:
+ description: "Whether to enable caching"
+ required: false
+ default: "true"
+ show-stats:
+ description: "Whether to show sccache statistics at the end"
+ required: false
+ default: "false"
+ cache-targets:
+ description: "Whether to cache target directory (passed to rust-cache)"
+ required: false
+ default: "false"
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup Rust toolchain
+ run: |
+ echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
+ shell: bash
+
+ - name: Setup Rust dependencies cache
+ if: inputs.enabled == 'true'
+ uses: Swatinem/rust-cache@v2
+ with:
+ cache-targets: ${{ inputs.cache-targets }}
+
+ - name: Setup sccache cache
+ if: inputs.enabled == 'true'
+ uses: actions/cache@v4
+ with:
+ path: |
+ ~/.cache/sccache
+ ~/Library/Caches/sccache
+ ~/.local/share/sccache
+ key: sccache-${{ runner.os }}-${{ github.job }}-${{ hashFiles('**/Cargo.lock', 'rust-toolchain.toml') }}
+ restore-keys: |
+ sccache-${{ runner.os }}-${{ github.job }}-
+ sccache-${{ runner.os }}-
+
+ - name: Install sccache
+ if: inputs.enabled == 'true'
+ run: |
+ # Check if sccache is already installed
+ if ! command -v sccache &> /dev/null; then
+ echo "Installing sccache..."
+ SCCACHE_VERSION="v0.8.2"
+ SCCACHE_URL="https://github.com/mozilla/sccache/releases/download/${SCCACHE_VERSION}/sccache-${SCCACHE_VERSION}-x86_64-unknown-linux-musl.tar.gz"
+
+ curl -L "$SCCACHE_URL" | tar xz
+ sudo mv sccache-${SCCACHE_VERSION}-x86_64-unknown-linux-musl/sccache /usr/local/bin/
+ rm -rf sccache-${SCCACHE_VERSION}-x86_64-unknown-linux-musl
+ fi
+ shell: bash
+ continue-on-error: true
+
+ - name: Install cargo-nextest
+ run: |
+ # Check if cargo-nextest is already installed
+ if ! command -v cargo-nextest &> /dev/null; then
+ echo "Installing cargo-nextest..."
+ # Use the install script for the fastest installation
+ curl -LsSf https://get.nexte.st/latest/linux | tar xzf - -C ${CARGO_HOME:-~/.cargo}/bin
+ echo "β
cargo-nextest installed successfully"
+ else
+ echo "β
cargo-nextest already installed"
+ fi
+
+ # Verify installation
+ cargo nextest --version || true
+ shell: bash
+ continue-on-error: true
+
+ - name: Configure Rust for sccache
+ if: inputs.enabled == 'true'
+ run: |
+ # Only use sccache if it was successfully installed
+ if command -v sccache &> /dev/null; then
+ # Configure sccache to use local disk cache
+ export SCCACHE_DIR="${HOME}/.cache/sccache"
+ mkdir -p "$SCCACHE_DIR"
+
+ echo "SCCACHE_DIR=$SCCACHE_DIR" >> $GITHUB_ENV
+ echo "SCCACHE_CACHE_SIZE=2G" >> $GITHUB_ENV
+ echo "SCCACHE_ERROR_BEHAVIOR=warn" >> $GITHUB_ENV
+ echo "SCCACHE_IGNORE_SERVER_IO_ERROR=1" >> $GITHUB_ENV
+
+ # Don't use GHA cache backend - use local disk cache with actions/cache
+ # This provides better control and persistence
+ echo "RUSTC_WRAPPER=sccache" >> $GITHUB_ENV
+
+ # Start sccache server
+ sccache --stop-server 2>/dev/null || true
+ sccache --start-server || true
+
+ # Test if sccache is working
+ if sccache --show-stats >/dev/null 2>&1; then
+ echo "β
sccache configured with local cache at $SCCACHE_DIR"
+
+ # Show initial stats
+ echo "Initial cache stats:"
+ sccache --show-stats || true
+ else
+ echo "β οΈ sccache installed but not functioning, continuing without cache"
+ fi
+ else
+ echo "β οΈ sccache not available, continuing without cache"
+ fi
+ shell: bash
+
+ - name: Show sccache stats on completion
+ if: always() && inputs.enabled == 'true' && inputs.show-stats == 'true'
+ run: |
+ if command -v sccache &> /dev/null; then
+ echo "### Final sccache Statistics ###"
+ sccache --show-stats || true
+ fi
+ shell: bash
diff --git a/.github/changed-files-config.json b/.github/changed-files-config.json
deleted file mode 100644
index 87113c31b..000000000
--- a/.github/changed-files-config.json
+++ /dev/null
@@ -1,100 +0,0 @@
-{
- "rust": [
- "Dockerfile",
- "Dockerfile.debug",
- "core/(bench|cli|examples|integration|sdk|server|tools|connectors|ai).*\\.rs",
- "bdd/rust/.*\\.rs",
- "core/Cargo.toml",
- "Cargo.toml",
- "Cargo.lock",
- ".github/workflows/ci-.*-rust.yml",
- ".github/workflows/ci-check-pr.yml"
- ],
- "shell": [
- "Dockerfile",
- "Dockerfile.debug",
- ".github/scripts/.*\\.sh",
- "scripts/.*\\.sh",
- ".github/workflows/ci-.*-shell.yml",
- ".github/workflows/ci-check-pr.yml"
- ],
- "rust-prod": [
- "Dockerfile",
- "Dockerfile.debug",
- "core/(bench|cli|examples|integration|sdk|server|tools|connectors|ai).*\\.rs",
- "bdd/rust/.*\\.rs",
- "core/Cargo.toml",
- "Cargo.toml",
- "Cargo.lock",
- "Dockerfile.*",
- ".github/workflows/ci-prod-rust.yml"
- ],
- "go-sdk": [
- "Dockerfile",
- "Dockerfile.debug",
- "foreign/go/.*\\.go",
- "foreign/go/go.mod",
- "foreign/go/go.sum",
- "bdd/go/.*\\.go",
- "bdd/go/go.mod",
- "bdd/go/go.sum",
- "examples/go/.*\\.go",
- "examples/go/go.mod",
- "examples/go/go.sum",
- ".github/workflows/ci-check-go-sdk.yml"
- ],
- "java-sdk": [
- "Dockerfile",
- "Dockerfile.debug",
- "foreign/java/.*\\.java",
- "foreign/java/.*\\.kts",
- "foreign/java/.*\\.sh",
- ".github/workflows/ci-check-java-sdk.yml"
- ],
- "python-sdk": [
- "Dockerfile",
- "Dockerfile.debug",
- "foreign/python/.*\\.py",
- "foreign/python/.*\\.rs",
- "bdd/python/.*\\.py",
- "foreign/python/pyproject.toml",
- "foreign/python/Cargo.toml",
- "foreign/python/docker-compose.test.yml",
- "foreign/python/Dockerfile.test",
- "foreign/python/.*\\.toml",
- "foreign/python/.*\\.ini",
- "foreign/python/.*\\.md",
- ".github/workflows/ci-check-python-sdk.yml"
- ],
- "node-sdk": [
- "Dockerfile",
- "Dockerfile.debug",
- "foreign/node/.*\\.ts",
- "foreign/node/.*\\.js",
- "foreign/node/.*\\.json",
- ".github/workflows/ci-check-node-sdk.yml"
- ],
- "csharp-sdk": [
- "Dockerfile",
- "Dockerfile.debug",
- "foreign/csharp/.*\\.cs",
- "foreign/csharp/.*\\.csproj",
- ".github/workflows/ci-check-csharp-sdk.yml"
- ],
- "bdd": [
- "Dockerfile",
- "Dockerfile.debug",
- "bdd/.*\\.feature",
- "bdd/.*\\.py",
- "bdd/.*\\.rs",
- "bdd/.*\\.go",
- "bdd/go/go.mod",
- "bdd/go/go.sum",
- "bdd/.*Dockerfile",
- "bdd/docker-compose.yml",
- "scripts/run-bdd-tests.sh",
- ".github/workflows/ci-test-bdd.yml",
- "core/.*\\.rs",
- "foreign/csharp/Iggy_SDK.Tests.BDD/.*\\.cs"
- ]
-}
diff --git a/.github/config/components.yml b/.github/config/components.yml
new file mode 100644
index 000000000..de2d40b4f
--- /dev/null
+++ b/.github/config/components.yml
@@ -0,0 +1,278 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+components:
+ # Workspace-level changes that affect all Rust components
+ rust-workspace:
+ paths:
+ - "Cargo.toml"
+ - "Cargo.lock"
+ - "rust-toolchain.toml"
+ - ".cargo/**"
+
+ # CI/CD infrastructure changes that require full regression
+ ci-infrastructure:
+ paths:
+ - ".github/**"
+
+ # Core library components that others depend on
+ rust-sdk:
+ depends_on:
+ - "rust-workspace" # SDK is affected by workspace changes
+ - "ci-infrastructure" # CI changes trigger full regression
+ paths:
+ - "core/sdk/**"
+
+ rust-common:
+ depends_on:
+ - "rust-workspace" # Common is affected by workspace changes
+ - "ci-infrastructure" # CI changes trigger full regression
+ paths:
+ - "core/common/**"
+
+ rust-binary-protocol:
+ depends_on:
+ - "rust-workspace" # Protocol is affected by workspace changes
+ - "ci-infrastructure" # CI changes trigger full regression
+ paths:
+ - "core/binary_protocol/**"
+
+ rust-server:
+ depends_on:
+ - "rust-workspace" # Server is affected by workspace changes
+ - "ci-infrastructure" # CI changes trigger full regression
+ - "rust-common"
+ - "rust-binary-protocol"
+ paths:
+ - "core/server/**"
+
+ # Main Rust workspace testing
+ rust:
+ depends_on:
+ - "rust-workspace"
+ - "rust-sdk"
+ - "rust-common"
+ - "rust-binary-protocol"
+ - "rust-server"
+ - "rust-tools"
+ - "ci-infrastructure" # CI changes trigger full regression
+ paths:
+ - "Dockerfile*"
+ - "!foreign/**" # Exclude foreign SDKs
+ - "!web/**" # Exclude web UI
+ tasks:
+ - "check"
+ - "fmt"
+ - "clippy"
+ - "sort"
+ - "doctest"
+ - "machete"
+ - "test"
+ - "compat"
+
+ # CLI component
+ rust-cli:
+ depends_on:
+ - "rust-sdk"
+ - "rust-binary-protocol"
+ paths:
+ - "core/cli/**"
+
+ # Benchmark component
+ rust-bench:
+ depends_on:
+ - "rust-sdk"
+ paths:
+ - "core/bench/**"
+
+ # Connectors runtime
+ rust-connectors:
+ depends_on:
+ - "rust-sdk"
+ paths:
+ - "core/connectors/**"
+
+ # MCP AI component
+ rust-mcp:
+ depends_on:
+ - "rust-sdk"
+ paths:
+ - "core/ai/mcp/**"
+
+ # Integration tests
+ rust-integration:
+ depends_on:
+ - "rust-sdk"
+ - "rust-common"
+ - "rust-binary-protocol"
+ - "rust-server"
+ paths:
+ - "core/integration/**"
+
+ sdk-python:
+ depends_on:
+ - "rust-sdk" # Python SDK wraps the Rust SDK
+ - "rust-server" # For integration tests
+ - "ci-infrastructure" # CI changes trigger full regression
+ paths:
+ - "foreign/python/**"
+ tasks: ["lint", "test", "build"]
+
+ sdk-node:
+ depends_on:
+ - "rust-sdk" # Node SDK depends on core SDK
+ - "rust-server" # For integration tests
+ - "ci-infrastructure" # CI changes trigger full regression
+ paths:
+ - "foreign/node/**"
+ tasks: ["lint", "test", "build", "e2e"]
+
+ sdk-go:
+ depends_on:
+ - "rust-sdk" # Go SDK depends on core SDK
+ - "rust-server" # For integration tests
+ - "ci-infrastructure" # CI changes trigger full regression
+ paths:
+ - "foreign/go/**"
+ - "bdd/go/**"
+ - "examples/go/**"
+ tasks: ["lint", "test", "build", "e2e"]
+
+ sdk-java:
+ depends_on:
+ - "rust-sdk" # Java SDK depends on core SDK
+ - "rust-server" # For integration tests
+ - "ci-infrastructure" # CI changes trigger full regression
+ paths:
+ - "foreign/java/**"
+ tasks: ["lint", "test", "build"]
+
+ sdk-csharp:
+ depends_on:
+ - "rust-sdk" # C# SDK depends on core SDK
+ - "rust-server" # For integration tests
+ - "ci-infrastructure" # CI changes trigger full regression
+ paths:
+ - "foreign/csharp/**"
+ tasks: ["lint", "test", "build"]
+
+ # Individual BDD tests per SDK - only run when specific SDK changes
+ bdd-rust:
+ depends_on:
+ - "rust-sdk"
+ - "rust-server"
+ - "ci-infrastructure"
+ paths:
+ - "bdd/rust/**"
+ - "bdd/scenarios/**"
+ tasks: ["bdd-rust"]
+
+ bdd-python:
+ depends_on:
+ - "rust-server"
+ - "rust-sdk" # All SDKs depend on core SDK changes
+ - "sdk-python"
+ - "ci-infrastructure"
+ paths:
+ - "bdd/python/**"
+ - "bdd/scenarios/**"
+ tasks: ["bdd-python"]
+
+ bdd-go:
+ depends_on:
+ - "rust-server"
+ - "rust-sdk" # All SDKs depend on core SDK changes
+ - "sdk-go"
+ - "ci-infrastructure"
+ paths:
+ - "bdd/go/**"
+ - "bdd/scenarios/**"
+ tasks: ["bdd-go"]
+
+ bdd-node:
+ depends_on:
+ - "rust-server"
+ - "rust-sdk" # All SDKs depend on core SDK changes
+ - "sdk-node"
+ - "ci-infrastructure"
+ paths:
+ - "bdd/node/**"
+ - "bdd/scenarios/**"
+ tasks: ["bdd-node"]
+
+ bdd-csharp:
+ depends_on:
+ - "rust-server"
+ - "rust-sdk" # All SDKs depend on core SDK changes
+ - "sdk-csharp"
+ - "ci-infrastructure"
+ paths:
+ - "bdd/csharp/**"
+ - "bdd/scenarios/**"
+ tasks: ["bdd-csharp"]
+
+ # Meta component for running all BDD tests (used for scripts/docker-compose changes)
+ bdd-suite:
+ depends_on:
+ - "bdd-rust"
+ - "bdd-python"
+ - "bdd-go"
+ - "bdd-node"
+ - "bdd-csharp"
+ paths:
+ - "bdd/docker-compose.yml"
+ - "bdd/Dockerfile"
+ - "scripts/run-bdd-tests.sh"
+ tasks: ["bdd-rust", "bdd-python", "bdd-go", "bdd-node", "bdd-csharp"]
+
+ examples-suite:
+ depends_on:
+ - "rust-sdk"
+ - "rust-server"
+ - "sdk-go"
+ - "ci-infrastructure" # CI changes trigger full regression
+ paths:
+ - "examples/**"
+ - "scripts/run-rust-examples-from-readme.sh"
+ - "scripts/run-go-examples-from-readme.sh"
+ tasks: ["examples-rust", "examples-go"]
+
+ web-ui:
+ paths:
+ - "web/**"
+ tasks: ["lint", "build"]
+
+ rust-bench-dashboard:
+ paths:
+ - "core/bench/dashboard/**"
+ - "scripts/dashboard/**"
+ tasks: ["build"]
+
+ # Core tools component
+ rust-tools:
+ depends_on:
+ - "rust-workspace"
+ paths:
+ - "core/tools/**"
+
+ # CI/CD workflow monitoring (informational, no tasks)
+ ci-workflows:
+ paths:
+ - ".github/workflows/**/*.yml"
+ - ".github/actions/**/*.yml"
+ - ".github/ci/**/*.yml"
+ tasks: ["validate"] # Could run workflow validation
diff --git a/.github/config/publish.yml b/.github/config/publish.yml
new file mode 100644
index 000000000..f536cb9e3
--- /dev/null
+++ b/.github/config/publish.yml
@@ -0,0 +1,123 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+components:
+
+ # ββ Rust crates (crates.io) ββββββββββββββββββββββββββββββββββββββββββββββββ
+ rust-sdk:
+ tag_pattern: "^iggy-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: crates
+ package: iggy
+ version_file: "core/sdk/Cargo.toml"
+ version_regex: '(?m)^\s*version\s*=\s*"([^"]+)"'
+
+ rust-cli:
+ tag_pattern: "^cli-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: crates
+ package: iggy-cli
+ version_file: "core/cli/Cargo.toml"
+ version_regex: '(?m)^\s*version\s*=\s*"([^"]+)"'
+
+ rust-binary-protocol:
+ tag_pattern: "^binary-protocol-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: crates
+ package: iggy_binary_protocol
+ version_file: "core/binary_protocol/Cargo.toml"
+ version_regex: '(?m)^\s*version\s*=\s*"([^"]+)"'
+
+ rust-common:
+ tag_pattern: "^common-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: crates
+ package: iggy_common
+ version_file: "core/common/Cargo.toml"
+ version_regex: '(?m)^\s*version\s*=\s*"([^"]+)"'
+
+ # ββ DockerHub images βββββββββββββββββββββββββββββββββββββββββββββββββββββββ
+ rust-server:
+ tag_pattern: "^server-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: dockerhub
+ image: apache/iggy
+ dockerfile: core/server/Dockerfile
+ platforms: ["linux/amd64", "linux/arm64"]
+ version_file: "core/server/Cargo.toml"
+ version_regex: '(?m)^\s*version\s*=\s*"([^"]+)"'
+
+ rust-mcp:
+ tag_pattern: "^mcp-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: dockerhub
+ image: apache/iggy-mcp
+ dockerfile: core/ai/mcp/Dockerfile
+ platforms: ["linux/amd64", "linux/arm64"]
+ version_file: "core/ai/mcp/Cargo.toml"
+ version_regex: '(?m)^\s*version\s*=\s*"([^"]+)"'
+
+ rust-bench-dashboard:
+ tag_pattern: "^bench-dashboard-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: dockerhub
+ image: apache/iggy-bench-dashboard
+ dockerfile: core/bench/dashboard/server/Dockerfile
+ platforms: ["linux/amd64"]
+ version_file: "core/bench/dashboard/server/Cargo.toml"
+ version_regex: '(?m)^\s*version\s*=\s*"([^"]+)"'
+
+ rust-connectors:
+ tag_pattern: "^connectors-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: dockerhub
+ image: apache/iggy-connect
+ dockerfile: core/connectors/runtime/Dockerfile
+ platforms: ["linux/amd64", "linux/arm64"]
+ version_file: "core/connectors/runtime/Cargo.toml"
+ version_regex: '(?m)^\s*version\s*=\s*"([^"]+)"'
+
+ web-ui:
+ tag_pattern: "^web-ui-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: dockerhub
+ image: apache/iggy-web-ui
+ dockerfile: web/Dockerfile
+ platforms: ["linux/amd64"]
+ version_file: "web/package.json"
+ version_regex: '"version"\s*:\s*"([^"]+)"'
+
+ # ββ Other SDKs βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
+ sdk-python:
+ tag_pattern: "^python-sdk-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: pypi
+ version_file: "foreign/python/pyproject.toml"
+ version_regex: '(?m)^\s*version\s*=\s*"([^"]+)"'
+
+ sdk-node:
+ tag_pattern: "^node-sdk-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: npm
+ version_file: "foreign/node/package.json"
+ version_regex: '"version"\s*:\s*"([^"]+)"'
+
+ sdk-java:
+ tag_pattern: "^java-sdk-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: maven
+ version_file: "foreign/java/java-sdk/build.gradle.kts"
+ version_regex: '(?m)^\s*version\s*=\s*"([^"]+)"'
+
+ sdk-csharp:
+ tag_pattern: "^csharp-sdk-([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: nuget
+ version_file: "foreign/csharp/Iggy_SDK/Iggy_SDK.csproj"
+ version_regex: '(?ms)<(?:PackageVersion|Version)>\s*([^<]+)\s*(?:PackageVersion|Version)>'
+
+ # ββ Go (tag only, no registry publish) βββββββββββββββββββββββββββββββββββββ
+ sdk-go:
+ tag_pattern: "^foreign/go/v([0-9]+\\.[0-9]+\\.[0-9]+(?:-[0-9A-Za-z.-]+)?(?:\\+[0-9A-Za-z.-]+)?)$"
+ registry: none
diff --git a/.github/scripts/analyze_changed_files.py b/.github/scripts/analyze_changed_files.py
deleted file mode 100644
index ad2c8da58..000000000
--- a/.github/scripts/analyze_changed_files.py
+++ /dev/null
@@ -1,154 +0,0 @@
-#!/usr/bin/env python3
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-import argparse
-import json
-import sys
-import re
-import os
-import pprint
-
-
-def load_config(config_file):
- """Load the configuration from a JSON file."""
- try:
- with open(config_file, 'r') as f:
- return json.load(f)
- except Exception as e:
- print(f"Error loading config file: {e}")
- sys.exit(1)
-
-
-def categorize_files(files, config):
- """Categorize files based on the configuration."""
- results = {}
- matched_files = {}
-
- for category, patterns in config.items():
- print(f"Checking files for category: {category}")
- results[category] = False
- matched_files[category] = []
- for file in files:
- print(f" Checking {file}")
- for pattern in patterns:
- if re.match(f"^{pattern}$", file) is not None:
- print(f" Matched pattern: {pattern}")
- results[category] = True
- matched_files[category].append(file)
- break
-
- return results, matched_files
-
-
-def parse_arguments():
- """Parse command line arguments."""
- parser = argparse.ArgumentParser(
- description=
- 'Analyze changed files and categorize them based on patterns from a config file.\
- Outputs environment variables using GITHUB_ENV and GITHUB_OUTPUT for GitHub Actions to indicate\
- which categories have changed files. Environment variables are in the format:\
- CATEGORY_FILES_CHANGED=true and CATEGORY_FILES=file1.py\\nfile2.py\
- where CATEGORY is the name of the category in uppercase from the config file.'
- )
-
- parser.add_argument(
- 'changed_files',
- metavar='changed_files',
- default=None,
- help='String with list of file changed in the PR or commit')
-
- parser.add_argument(
- 'config_file',
- default=None,
- help='Path to the JSON configuration file with file patterns and groups'
- )
-
- parser.add_argument('-v',
- '--verbose',
- action='store_true',
- default=False,
- help='Enable verbose output for debugging')
-
- return parser.parse_args()
-
-
-def main():
- """Main function."""
- args = parse_arguments()
-
- changed_files = args.changed_files.splitlines()
- config_file = args.config_file
- verbose = args.verbose
- config = load_config(config_file)
-
- if verbose:
- print("Changed files:")
- for file in changed_files:
- print(f" {file}")
- print("\nConfig file:", config_file)
- pprint.pp(config)
- print("")
-
- # Categorize files
- results, matched_files = categorize_files(changed_files, config)
-
- # Set GitHub environment variables
- github_env = os.environ.get('GITHUB_ENV')
- github_output = os.environ.get('GITHUB_OUTPUT')
-
- # Output results
- print("\nChanged Files Analysis results:")
-
- for category, has_changes in results.items():
- category_upper = category.upper()
- print(f" {category_upper}_FILES_CHANGED={str(has_changes).lower()}")
-
- if github_env:
- with open(github_env, 'a') as f:
- f.write(
- f"{category_upper}_FILES_CHANGED={str(has_changes).lower()}\n"
- )
-
- if has_changes:
- f.write(f"{category_upper}_FILES</dev/null; then
- echo "python3 is required but not installed, please install it"
- exit 1
-fi
-
-if [ -z "${GITHUB_EVENT_NAME:-}" ]; then
- # If the script is not running in a GitHub Actions environment (e.g., running locally),
- # get the changed files based on the last commit
- echo "The script is not running in a GitHub Actions environment"
- CHANGED_FILES=$(git diff --name-only HEAD^)
-else
- # If the script is running in a GitHub Actions environment, check the event type
- # Get the changed files based on the event type
- if [[ "${GITHUB_EVENT_NAME}" == "push" ]]; then
- # If the event type is push, get the changed files based on the last commit
- echo "The script is running in a GitHub Actions environment for a push event"
- CHANGED_FILES=$(git diff --name-only HEAD^)
- else
- # If the event type is not push (assuming pull request), get the changed files based
- # on the base and head refs of the pull request. If the GITHUB_BASE_REF and GITHUB_HEAD_REF
- # environment variables are not set, exit the script with an error message.
- if [[ -z "${GITHUB_BASE_REF:-}" || -z "${GITHUB_HEAD_REF:-}" ]]; then
- echo "The GITHUB_BASE_REF or GITHUB_HEAD_REF environment variable is not set"
- exit 1
- fi
- # Get the changed files based on the base and head refs of the pull request
- echo "The script is running in a GitHub Actions environment for a pull request event"
- echo "Base ref: origin/${GITHUB_BASE_REF}, Head ref: HEAD"
-
- # Fetch the base branch to ensure we have it
- git fetch origin "${GITHUB_BASE_REF}" --depth=50 || true
-
- # Get the merge base to ensure we're comparing the right commits
- MERGE_BASE=$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)
- echo "Merge base: ${MERGE_BASE}"
-
- # Get changed files from the merge base to HEAD
- CHANGED_FILES=$(git diff --name-only "${MERGE_BASE}" HEAD)
-
- if [ -z "$CHANGED_FILES" ]; then
- echo "No changed files detected. Trying alternative method..."
- # Alternative: compare against the target branch directly
- CHANGED_FILES=$(git diff --name-only "origin/${GITHUB_BASE_REF}...HEAD")
- fi
-
- echo "Changed files detected:"
- echo "$CHANGED_FILES"
- fi
-fi
-
-# Analyze the changed files
-# shellcheck disable=SC2086
-python3 .github/scripts/analyze_changed_files.py ${VERBOSE_FLAG} "$CHANGED_FILES" .github/changed-files-config.json
diff --git a/.github/workflows/_build_python_wheels.yml b/.github/workflows/_build_python_wheels.yml
new file mode 100644
index 000000000..40d83413e
--- /dev/null
+++ b/.github/workflows/_build_python_wheels.yml
@@ -0,0 +1,282 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# .github/workflows/_build_python_wheels.yml
+name: _build_python_wheels
+on:
+ workflow_call:
+ inputs:
+ version:
+ type: string
+ required: false
+ default: ""
+ upload_artifacts:
+ type: boolean
+ required: false
+ default: true
+ use_latest_ci:
+ type: boolean
+ required: false
+ default: true
+ description: "Use latest CI configuration and scripts from master branch"
+ outputs:
+ artifact_name:
+ description: "Name of the uploaded artifact containing wheels"
+ value: ${{ jobs.collect.outputs.artifact_name }}
+
+jobs:
+ linux:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ target: [x86_64, aarch64]
+ steps:
+ - name: Save latest CI and scripts from master
+ if: inputs.use_latest_ci
+ run: |
+ # First checkout master to get latest CI and scripts
+ git clone --depth 1 --branch master https://github.com/${{ github.repository }}.git /tmp/repo-master
+ cp -r /tmp/repo-master/scripts /tmp/latest-scripts
+ cp -r /tmp/repo-master/.github /tmp/latest-github
+ echo "β
Saved latest CI and scripts from master branch"
+
+ - uses: actions/checkout@v4
+
+ - name: Apply latest CI and scripts
+ if: inputs.use_latest_ci
+ run: |
+ echo "π Applying latest CI and scripts from master branch..."
+ cp -r /tmp/latest-scripts/* scripts/ 2>/dev/null || true
+ cp -r /tmp/latest-github/* .github/
+ if [ -d scripts ]; then chmod +x scripts/*.sh 2>/dev/null || true; fi
+ echo "β
Latest CI and scripts applied"
+
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Setup Rust with cache
+ uses: ./.github/actions/utils/setup-rust-with-cache
+ with:
+ cache-targets: false
+
+ - name: Build wheels
+ uses: PyO3/maturin-action@v1
+ with:
+ target: ${{ matrix.target }}
+ working-directory: foreign/python
+ before-script-linux: |
+ # manylinux_2_28 uses yum, not dnf
+ if command -v dnf &> /dev/null; then
+ dnf install -y perl-IPC-Cmd
+ elif command -v yum &> /dev/null; then
+ yum install -y perl-IPC-Cmd
+ fi
+ python3 -m pip --version || python3 -m ensurepip
+ manylinux: "2_28"
+ args: --release --out dist --interpreter python3.8 python3.9 python3.10 python3.11 python3.12 python3.13
+ sccache: "true"
+
+ - name: Upload wheels
+ if: inputs.upload_artifacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: wheels-linux-${{ matrix.target }}
+ path: foreign/python/dist
+ retention-days: 7
+
+ macos:
+ runs-on: ${{ matrix.target == 'x86_64' && 'macos-13' || 'macos-14' }}
+ strategy:
+ matrix:
+ target: [x86_64, aarch64]
+ steps:
+ - name: Save latest CI and scripts from master
+ if: inputs.use_latest_ci
+ run: |
+ # First checkout master to get latest CI and scripts
+ git clone --depth 1 --branch master https://github.com/${{ github.repository }}.git /tmp/repo-master
+ cp -r /tmp/repo-master/scripts /tmp/latest-scripts
+ cp -r /tmp/repo-master/.github /tmp/latest-github
+ echo "β
Saved latest CI and scripts from master branch"
+
+ - uses: actions/checkout@v4
+
+ - name: Apply latest CI and scripts
+ if: inputs.use_latest_ci
+ run: |
+ echo "π Applying latest CI and scripts from master branch..."
+ cp -r /tmp/latest-scripts/* scripts/ 2>/dev/null || true
+ cp -r /tmp/latest-github/* .github/
+ if [ -d scripts ]; then chmod +x scripts/*.sh 2>/dev/null || true; fi
+ echo "β
Latest CI and scripts applied"
+
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Build wheels
+ uses: PyO3/maturin-action@v1
+ with:
+ target: ${{ matrix.target }}
+ working-directory: foreign/python
+ args: --release --out dist --interpreter python3.8 python3.9 python3.10 python3.11 python3.12 python3.13
+ sccache: "true"
+
+ - name: Upload wheels
+ if: inputs.upload_artifacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: wheels-macos-${{ matrix.target }}
+ path: foreign/python/dist
+ retention-days: 7
+
+ windows:
+ runs-on: windows-latest
+ steps:
+ - name: Save latest CI and scripts from master
+ if: inputs.use_latest_ci
+ shell: bash
+ run: |
+ # First checkout master to get latest CI and scripts
+ git clone --depth 1 --branch master https://github.com/${{ github.repository }}.git /tmp/repo-master
+ cp -r /tmp/repo-master/scripts /tmp/latest-scripts
+ cp -r /tmp/repo-master/.github /tmp/latest-github
+ echo "β
Saved latest CI and scripts from master branch"
+
+ - uses: actions/checkout@v4
+
+ - name: Apply latest CI and scripts
+ if: inputs.use_latest_ci
+ shell: bash
+ run: |
+ echo "π Applying latest CI and scripts from master branch..."
+ cp -r /tmp/latest-scripts/* scripts/ 2>/dev/null || true
+ cp -r /tmp/latest-github/* .github/
+ if [ -d scripts ]; then chmod +x scripts/*.sh 2>/dev/null || true; fi
+ echo "β
Latest CI and scripts applied"
+
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ architecture: x64
+
+ - name: Build wheels
+ uses: PyO3/maturin-action@v1
+ with:
+ target: x86_64
+ working-directory: foreign/python
+ args: --release --out dist --interpreter python3.9 python3.10 python3.11 python3.12 python3.13
+ sccache: "true"
+
+ - name: Upload wheels
+ if: inputs.upload_artifacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: wheels-windows-x64
+ path: foreign/python/dist
+ retention-days: 7
+
+ sdist:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Save latest CI and scripts from master
+ if: inputs.use_latest_ci
+ run: |
+ # First checkout master to get latest CI and scripts
+ git clone --depth 1 --branch master https://github.com/${{ github.repository }}.git /tmp/repo-master
+ cp -r /tmp/repo-master/scripts /tmp/latest-scripts
+ cp -r /tmp/repo-master/.github /tmp/latest-github
+ echo "β
Saved latest CI and scripts from master branch"
+
+ - uses: actions/checkout@v4
+
+ - name: Apply latest CI and scripts
+ if: inputs.use_latest_ci
+ run: |
+ echo "π Applying latest CI and scripts from master branch..."
+ cp -r /tmp/latest-scripts/* scripts/ 2>/dev/null || true
+ cp -r /tmp/latest-github/* .github/
+ if [ -d scripts ]; then chmod +x scripts/*.sh 2>/dev/null || true; fi
+ echo "β
Latest CI and scripts applied"
+
+ - name: Build sdist
+ uses: PyO3/maturin-action@v1
+ with:
+ command: sdist
+ working-directory: foreign/python
+ args: --out dist
+
+ - name: Upload sdist
+ if: inputs.upload_artifacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: wheels-sdist
+ path: foreign/python/dist
+ retention-days: 7
+
+ collect:
+ name: Collect all wheels
+ needs: [linux, macos, windows, sdist]
+ runs-on: ubuntu-latest
+ outputs:
+ artifact_name: ${{ steps.output.outputs.artifact_name }}
+ steps:
+ - name: Download all wheels
+ uses: actions/download-artifact@v4
+ with:
+ pattern: wheels-*
+ merge-multiple: true
+ path: dist
+
+ - name: List wheels
+ run: |
+ echo "## π¦ Built Python Wheels" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "| Platform | Architecture | File |" >> $GITHUB_STEP_SUMMARY
+ echo "|----------|-------------|------|" >> $GITHUB_STEP_SUMMARY
+
+ for wheel in dist/*.whl; do
+ filename=$(basename "$wheel")
+ if [[ "$filename" == *"linux"* ]]; then platform="Linux"
+ elif [[ "$filename" == *"macosx"* ]]; then platform="macOS"
+ elif [[ "$filename" == *"win"* ]]; then platform="Windows"
+ else platform="Universal"; fi
+
+ if [[ "$filename" == *"x86_64"* ]] || [[ "$filename" == *"amd64"* ]]; then arch="x86_64"
+ elif [[ "$filename" == *"aarch64"* ]] || [[ "$filename" == *"arm64"* ]]; then arch="arm64"
+ elif [[ "$filename" == *"i686"* ]]; then arch="x86"
+ else arch="any"; fi
+
+ echo "| $platform | $arch | \`$filename\` |" >> $GITHUB_STEP_SUMMARY
+ done
+
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "**Total wheels built:** $(ls -1 dist/*.whl | wc -l)" >> $GITHUB_STEP_SUMMARY
+
+ - name: Upload combined artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: python-wheels-all
+ path: dist
+ retention-days: 30
+
+ - id: output
+ run: echo "artifact_name=python-wheels-all" >> $GITHUB_OUTPUT
diff --git a/.github/workflows/_common.yml b/.github/workflows/_common.yml
new file mode 100644
index 000000000..199dfcbe8
--- /dev/null
+++ b/.github/workflows/_common.yml
@@ -0,0 +1,478 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: _common
+on:
+ workflow_call:
+ inputs:
+ skip_pr_title:
+ type: boolean
+ required: false
+ default: false
+ description: "Skip PR title check (for push events)"
+
+permissions:
+ contents: read
+ pull-requests: read
+
+jobs:
+ rust-versions:
+ name: Check Rust versions sync
+ runs-on: ubuntu-latest
+ env:
+ IGGY_CI_BUILD: true
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Check Rust versions are synchronized
+ run: |
+ # Use the sync-rust-version.sh script in check mode
+ if ! bash scripts/sync-rust-version.sh --check; then
+ echo ""
+ echo "β Rust versions are not synchronized!"
+ echo ""
+ echo "To fix this issue, run:"
+ echo " ./scripts/sync-rust-version.sh --fix"
+ echo ""
+ echo "This script will automatically update all Dockerfiles to match rust-toolchain.toml"
+ exit 1
+ fi
+
+ pr-title:
+ name: Check PR Title
+ if: github.event_name == 'pull_request' && !inputs.skip_pr_title
+ runs-on: ubuntu-latest
+ env:
+ IGGY_CI_BUILD: true
+ steps:
+ - name: Validate PR Title
+ uses: amannn/action-semantic-pull-request@v5
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ types: |
+ feat
+ fix
+ docs
+ style
+ refactor
+ perf
+ test
+ build
+ ci
+ chore
+ revert
+ repo
+ deps
+
+ license-headers:
+ name: Check license headers
+ runs-on: ubuntu-latest
+ env:
+ IGGY_CI_BUILD: true
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Check Apache license headers
+ run: |
+ echo "π Checking license headers..."
+
+ # Pull the addlicense image
+ docker pull ghcr.io/google/addlicense:latest
+
+ # Run the check
+ if docker run --rm -v ${{ github.workspace }}:/src -w /src \
+ ghcr.io/google/addlicense:latest \
+ -check -f ASF_LICENSE.txt . > missing_files.txt 2>&1; then
+ echo "β
All files have proper license headers"
+ else
+ file_count=$(wc -l < missing_files.txt)
+ echo "β Found $file_count files missing license headers:"
+ echo ""
+ cat missing_files.txt | sed 's/^/ β’ /'
+ echo ""
+ echo "π‘ Run 'addlicense -f ASF_LICENSE.txt .' to fix automatically"
+
+ # Add to summary
+ echo "## β License Headers Missing" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "The following files are missing Apache license headers:" >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+ cat missing_files.txt >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+ echo 'Please call `just licenses-fix` to fix automatically.' >> $GITHUB_STEP_SUMMARY
+
+ exit 1
+ fi
+
+ license-list:
+ name: Check licenses list
+ runs-on: ubuntu-latest
+ env:
+ IGGY_CI_BUILD: true
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Setup Rust toolchain
+ uses: ./.github/actions/utils/setup-rust-with-cache
+ with:
+ enabled: "false" # Don't need cache for just checking licenses
+
+ - run: scripts/licenses-list.sh --check
+
+ markdown:
+ name: Markdown lint
+ runs-on: ubuntu-latest
+ env:
+ IGGY_CI_BUILD: true
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: '23'
+
+ - name: Install markdownlint-cli
+ run: npm install -g markdownlint-cli
+
+ - name: Run markdownlint
+ run: |
+ echo "π Checking markdown files..."
+
+ # Create config if it doesn't exist
+ if [ ! -f ".markdownlint.yml" ]; then
+ cat > .markdownlint.yml << 'EOF'
+ # Markdown lint configuration
+ default: true
+ MD013:
+ line_length: 120
+ tables: false
+ MD033:
+ allowed_elements: [details, summary, img]
+ MD041: false # First line in file should be a top level heading
+ EOF
+ fi
+
+ # Run the linter
+ if markdownlint '**/*.md' --ignore-path .gitignore; then
+ echo "β
All markdown files are properly formatted"
+ else
+ echo "β Markdown linting failed"
+ echo "π‘ Run 'markdownlint **/*.md --fix' to auto-fix issues"
+ exit 1
+ fi
+
+ shellcheck:
+ name: Shell scripts lint
+ runs-on: ubuntu-latest
+ env:
+ IGGY_CI_BUILD: true
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Install shellcheck
+ run: |
+ sudo apt-get update --yes && sudo apt-get install --yes shellcheck
+
+ - name: Check shell scripts
+ run: |
+ echo "π Checking shell scripts..."
+
+ # Find all shell scripts excluding certain directories
+ if find . -type f -name "*.sh" \
+ -not -path "./target/*" \
+ -not -path "./node_modules/*" \
+ -not -path "./.git/*" \
+ -not -path "./foreign/node/node_modules/*" \
+ -not -path "./foreign/python/.venv/*" \
+ -exec shellcheck -S warning {} +; then
+ echo "β
All shell scripts passed shellcheck"
+ else
+ echo "β Shellcheck found issues in shell scripts"
+ echo "π‘ Fix the issues reported above"
+ exit 1
+ fi
+
+ trailing-whitespace:
+ name: Check trailing whitespace
+ runs-on: ubuntu-latest
+ env:
+ IGGY_CI_BUILD: true
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # Need full history to get diff
+
+ - name: Check for trailing whitespace in changed files
+ run: |
+ echo "π Checking for trailing whitespace in changed files..."
+
+ # Get list of changed files in PR
+ if [ "${{ github.event_name }}" = "pull_request" ]; then
+ git fetch --no-tags --depth=1 origin ${{ github.event.pull_request.base.ref }}:${{ github.event.pull_request.base.ref }} || true
+ BASE_SHA="${{ github.event.pull_request.base.sha }}"
+ CHANGED_FILES=$(git diff --name-only --diff-filter=ACM "$BASE_SHA"...HEAD || true)
+ else
+ CHANGED_FILES=$(git diff --name-only --diff-filter=ACM HEAD~1)
+ fi
+
+ if [ -z "$CHANGED_FILES" ]; then
+ echo "No files changed to check"
+ exit 0
+ fi
+
+ echo "Files to check:"
+ echo "$CHANGED_FILES" | sed 's/^/ β’ /'
+ echo ""
+
+ # Check each changed file for trailing whitespace
+ FILES_WITH_TRAILING=""
+ for file in $CHANGED_FILES; do
+ # Skip if file doesn't exist (might be deleted)
+ if [ ! -f "$file" ]; then
+ continue
+ fi
+
+ # Skip binary files
+ if file "$file" | grep -qE "binary|data|executable|compressed"; then
+ continue
+ fi
+
+ # Check for trailing whitespace
+ if grep -q '[[:space:]]$' "$file" 2>/dev/null; then
+ FILES_WITH_TRAILING="$FILES_WITH_TRAILING $file"
+ fi
+ done
+
+ if [ -z "$FILES_WITH_TRAILING" ]; then
+ echo "β
No trailing whitespace found in changed files"
+ else
+ echo "β Found trailing whitespace in the following changed files:"
+ echo ""
+ for file in $FILES_WITH_TRAILING; do
+ echo " β’ $file"
+ # Show lines with trailing whitespace (limit to first 5 occurrences per file)
+ grep -n '[[:space:]]$' "$file" | head -5 | while IFS=: read -r line_num content; do
+ # Show the line with visible whitespace markers
+ visible_content=$(echo "$content" | sed 's/ /Β·/g; s/\t/β/g')
+ echo " Line $line_num: '${visible_content}'"
+ done
+ TOTAL_LINES=$(grep -c '[[:space:]]$' "$file")
+ if [ "$TOTAL_LINES" -gt 5 ]; then
+ echo " ... and $((TOTAL_LINES - 5)) more lines"
+ fi
+ echo ""
+ done
+
+ echo "π‘ To fix trailing whitespace in these files:"
+ echo " β’ VSCode: Enable 'files.trimTrailingWhitespace' setting"
+ echo " β’ Fix specific file: sed -i 's/[[:space:]]*$//' "
+ echo " β’ Fix all changed files:"
+ echo " for f in$FILES_WITH_TRAILING; do sed -i 's/[[:space:]]*$//' \$f; done"
+ exit 1
+ fi
+
+ trailing-newline:
+ name: Check trailing newline
+ runs-on: ubuntu-latest
+ env:
+ IGGY_CI_BUILD: true
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # Need full history to get diff
+
+ - name: Check for trailing newline in changed text files
+ run: |
+ echo "π Checking for trailing newline in changed text files..."
+
+ # Get list of changed files in PR
+ if [ "${{ github.event_name }}" = "pull_request" ]; then
+ git fetch --no-tags --depth=1 origin ${{ github.event.pull_request.base.ref }}:${{ github.event.pull_request.base.ref }} || true
+ BASE_SHA="${{ github.event.pull_request.base.sha }}"
+ CHANGED_FILES=$(git diff --name-only --diff-filter=ACM "$BASE_SHA"...HEAD || true)
+ else
+ CHANGED_FILES=$(git diff --name-only --diff-filter=ACM HEAD~1)
+ fi
+
+ if [ -z "$CHANGED_FILES" ]; then
+ echo "No files changed to check"
+ exit 0
+ fi
+
+ echo "Files to check:"
+ echo "$CHANGED_FILES" | sed 's/^/ β’ /'
+ echo ""
+
+ # Check each changed file for missing trailing newline
+ FILES_WITHOUT_NEWLINE=""
+ for file in $CHANGED_FILES; do
+ # Skip if file doesn't exist (might be deleted)
+ if [ ! -f "$file" ]; then
+ continue
+ fi
+
+ # Skip binary files
+ if file "$file" | grep -qE "binary|data|executable|compressed"; then
+ continue
+ fi
+
+ # Skip empty files
+ if [ ! -s "$file" ]; then
+ continue
+ fi
+
+ # Check if file ends with a newline
+ # Use tail to get last byte and od to check if it's a newline (0x0a)
+ if [ -n "$(tail -c 1 "$file" | od -An -tx1 | grep -v '0a')" ]; then
+ FILES_WITHOUT_NEWLINE="$FILES_WITHOUT_NEWLINE $file"
+ fi
+ done
+
+ if [ -z "$FILES_WITHOUT_NEWLINE" ]; then
+ echo "β
All changed text files have trailing newlines"
+ else
+ echo "β Found text files without trailing newline:"
+ echo ""
+ for file in $FILES_WITHOUT_NEWLINE; do
+ echo " β’ $file"
+ # Show last few characters of the file for context
+ echo -n " Last characters: '"
+ tail -c 20 "$file" | tr '\n' 'β΅' | sed 's/\t/β/g'
+ echo "'"
+ echo ""
+ done
+
+ echo "π‘ To add trailing newlines to these files:"
+ echo " β’ VSCode: Enable 'files.insertFinalNewline' setting"
+ echo " β’ Fix specific file: echo >> "
+ echo " β’ Fix all files:"
+ echo " for f in$FILES_WITHOUT_NEWLINE; do [ -n \"\$(tail -c 1 \"\$f\")\" ] && echo >> \"\$f\"; done"
+ exit 1
+ fi
+
+ summary:
+ name: Common checks summary
+ needs: [rust-versions, pr-title, license-headers, license-list, markdown, shellcheck, trailing-whitespace, trailing-newline]
+ if: always()
+ runs-on: ubuntu-latest
+ env:
+ IGGY_CI_BUILD: true
+ steps:
+ - name: Summary
+ run: |
+ echo "## π Common Checks Summary" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "| Check | Status | Description |" >> $GITHUB_STEP_SUMMARY
+ echo "|-------|--------|-------------|" >> $GITHUB_STEP_SUMMARY
+
+ # PR-specific checks
+ if [ "${{ github.event_name }}" = "pull_request" ]; then
+ PR_TITLE="${{ needs.pr-title.result }}"
+
+ # Add emoji based on status
+ if [ "$PR_TITLE" = "success" ]; then
+ echo "| β
PR Title | success | Follows conventional format |" >> $GITHUB_STEP_SUMMARY
+ elif [ "$PR_TITLE" = "failure" ]; then
+ echo "| β PR Title | failure | Must follow conventional format |" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "| βοΈ PR Title | $PR_TITLE | Check skipped |" >> $GITHUB_STEP_SUMMARY
+ fi
+ else
+ echo "| βοΈ PR Title | skipped | Not a pull request |" >> $GITHUB_STEP_SUMMARY
+ fi
+
+ # Always-run checks
+ RUST_VERSIONS="${{ needs.rust-versions.result }}"
+ LICENSE_HEADERS="${{ needs.license-headers.result }}"
+ LICENSE_LIST="${{ needs.license-list.result }}"
+ MARKDOWN="${{ needs.markdown.result }}"
+
+ if [ "$RUST_VERSIONS" = "success" ]; then
+ echo "| β
Rust Versions | success | All Rust versions synchronized |" >> $GITHUB_STEP_SUMMARY
+ elif [ "$RUST_VERSIONS" = "failure" ]; then
+ echo "| β Rust Versions | failure | Rust versions mismatch in Dockerfiles |" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "| βοΈ Rust Versions | $RUST_VERSIONS | Check skipped |" >> $GITHUB_STEP_SUMMARY
+ fi
+
+ if [ "$LICENSE_HEADERS" = "success" ]; then
+ echo "| β
License Headers | success | All files have Apache headers |" >> $GITHUB_STEP_SUMMARY
+ elif [ "$LICENSE_HEADERS" = "failure" ]; then
+ echo "| β License Headers | failure | Missing Apache license headers |" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "| βοΈ License Headers | $LICENSE_HEADERS | Check skipped |" >> $GITHUB_STEP_SUMMARY
+ fi
+
+ if [ "$LICENSE_LIST" = "success" ]; then
+ echo "| β
License List | success | Dependencies licenses validated |" >> $GITHUB_STEP_SUMMARY
+ elif [ "$LICENSE_LIST" = "failure" ]; then
+ echo "| β License List | failure | License list needs update |" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "| βοΈ License List | $LICENSE_LIST | Check skipped |" >> $GITHUB_STEP_SUMMARY
+ fi
+
+ if [ "$MARKDOWN" = "success" ]; then
+ echo "| β
Markdown Lint | success | All markdown files are valid |" >> $GITHUB_STEP_SUMMARY
+ elif [ "$MARKDOWN" = "failure" ]; then
+ echo "| β Markdown Lint | failure | Markdown formatting issues found |" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "| βοΈ Markdown Lint | $MARKDOWN | Check skipped |" >> $GITHUB_STEP_SUMMARY
+ fi
+
+ SHELLCHECK="${{ needs.shellcheck.result }}"
+ if [ "$SHELLCHECK" = "success" ]; then
+ echo "| β
Shellcheck | success | All shell scripts are valid |" >> $GITHUB_STEP_SUMMARY
+ elif [ "$SHELLCHECK" = "failure" ]; then
+ echo "| β Shellcheck | failure | Shell script issues found |" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "| βοΈ Shellcheck | $SHELLCHECK | Check skipped |" >> $GITHUB_STEP_SUMMARY
+ fi
+
+ TRAILING="${{ needs.trailing-whitespace.result }}"
+ if [ "$TRAILING" = "success" ]; then
+ echo "| β
Trailing Whitespace | success | No trailing whitespace found |" >> $GITHUB_STEP_SUMMARY
+ elif [ "$TRAILING" = "failure" ]; then
+ echo "| β Trailing Whitespace | failure | Trailing whitespace detected |" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "| βοΈ Trailing Whitespace | $TRAILING | Check skipped |" >> $GITHUB_STEP_SUMMARY
+ fi
+
+ TRAILING_NL="${{ needs.trailing-newline.result }}"
+ if [ "$TRAILING_NL" = "success" ]; then
+ echo "| β
Trailing Newline | success | All text files have trailing newlines |" >> $GITHUB_STEP_SUMMARY
+ elif [ "$TRAILING_NL" = "failure" ]; then
+ echo "| β Trailing Newline | failure | Missing trailing newlines detected |" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "| βοΈ Trailing Newline | $TRAILING_NL | Check skipped |" >> $GITHUB_STEP_SUMMARY
+ fi
+
+ echo "" >> $GITHUB_STEP_SUMMARY
+
+ # Overall status
+ if [[ "${{ contains(needs.*.result, 'failure') }}" == "true" ]]; then
+ echo "### β Some checks failed" >> $GITHUB_STEP_SUMMARY
+ echo "Please review the failed checks above and fix the issues." >> $GITHUB_STEP_SUMMARY
+ elif [[ "${{ contains(needs.*.result, 'skipped') }}" == "true" ]]; then
+ echo "### β οΈ Some checks were skipped" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "### β
All checks passed!" >> $GITHUB_STEP_SUMMARY
+ fi
diff --git a/.github/workflows/_detect.yml b/.github/workflows/_detect.yml
new file mode 100644
index 000000000..0d11c5e3d
--- /dev/null
+++ b/.github/workflows/_detect.yml
@@ -0,0 +1,382 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# .github/workflows/_detect.yml
+name: _detect
+on:
+ workflow_call:
+ outputs:
+ rust_matrix:
+ description: "Matrix for Rust components"
+ value: ${{ jobs.detect.outputs.rust_matrix }}
+ python_matrix:
+ description: "Matrix for Python SDK"
+ value: ${{ jobs.detect.outputs.python_matrix }}
+ node_matrix:
+ description: "Matrix for Node SDK"
+ value: ${{ jobs.detect.outputs.node_matrix }}
+ go_matrix:
+ description: "Matrix for Go SDK"
+ value: ${{ jobs.detect.outputs.go_matrix }}
+ java_matrix:
+ description: "Matrix for Java SDK"
+ value: ${{ jobs.detect.outputs.java_matrix }}
+ csharp_matrix:
+ description: "Matrix for C# SDK"
+ value: ${{ jobs.detect.outputs.csharp_matrix }}
+ bdd_matrix:
+ description: "Matrix for BDD tests"
+ value: ${{ jobs.detect.outputs.bdd_matrix }}
+ examples_matrix:
+ description: "Matrix for examples tests"
+ value: ${{ jobs.detect.outputs.examples_matrix }}
+ other_matrix:
+ description: "Matrix for other components"
+ value: ${{ jobs.detect.outputs.other_matrix }}
+
+jobs:
+ detect:
+ runs-on: ubuntu-latest
+ env:
+ IGGY_CI_BUILD: true
+ outputs:
+ rust_matrix: ${{ steps.mk.outputs.rust_matrix }}
+ python_matrix: ${{ steps.mk.outputs.python_matrix }}
+ node_matrix: ${{ steps.mk.outputs.node_matrix }}
+ go_matrix: ${{ steps.mk.outputs.go_matrix }}
+ java_matrix: ${{ steps.mk.outputs.java_matrix }}
+ csharp_matrix: ${{ steps.mk.outputs.csharp_matrix }}
+ bdd_matrix: ${{ steps.mk.outputs.bdd_matrix }}
+ examples_matrix: ${{ steps.mk.outputs.examples_matrix }}
+ other_matrix: ${{ steps.mk.outputs.other_matrix }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Get changed files (PR or push)
+ id: changed
+ uses: Ana06/get-changed-files@25f79e676e7ea1868813e21465014798211fad8c
+ with:
+ format: json
+
+ - name: Load components config
+ id: config
+ run: |
+ if ! command -v yq &> /dev/null; then
+ YQ_VERSION="v4.47.1"
+ YQ_CHECKSUM="0fb28c6680193c41b364193d0c0fc4a03177aecde51cfc04d506b1517158c2fb"
+ wget -qO /tmp/yq https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/yq_linux_amd64
+ echo "${YQ_CHECKSUM} /tmp/yq" | sha256sum -c - || exit 1
+ chmod +x /tmp/yq
+ sudo mv /tmp/yq /usr/local/bin/yq
+ fi
+ echo "components=$(yq -o=json -I=0 '.' .github/config/components.yml | jq -c)" >> $GITHUB_OUTPUT
+
+ - name: Build matrices
+ id: mk
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const componentsJson = `${{ steps.config.outputs.components }}`;
+ const changedFilesJson = `${{ steps.changed.outputs.all || '[]' }}`;
+
+ let componentsCfg = { components: {} };
+ try { componentsCfg = JSON.parse(componentsJson); } catch {}
+ const components = componentsCfg.components || {};
+
+ let changedFiles = [];
+ try { changedFiles = JSON.parse(changedFilesJson); } catch {}
+ const files = changedFiles.map(p => p.replace(/\\/g, '/'));
+
+ const escapeRe = s => s.replace(/([.+^${}()|[\]\\])/g, '\\$1');
+
+ const globToRegex = (pattern) => {
+ // Handle exclusion patterns
+ const isExclusion = pattern.startsWith('!');
+ const cleanPattern = isExclusion ? pattern.slice(1) : pattern;
+
+ let s = escapeRe(cleanPattern);
+
+ // First, replace ** patterns with placeholders to protect them
+ s = s.replace(/\*\*\/+/g, '___DOUBLESTAR_SLASH___'); // '**/' -> placeholder
+ s = s.replace(/\/\*\*$/g, '___SLASH_DOUBLESTAR___'); // '/**' at end -> placeholder
+ s = s.replace(/\*\*/g, '___DOUBLESTAR___'); // remaining '**' -> placeholder
+
+ // Now handle single * and ? (they won't match our placeholders)
+ s = s.replace(/\*/g, '[^/]*'); // '*' -> match any except /
+ s = s.replace(/\?/g, '[^/]'); // '?' -> match single char except /
+
+ // Finally, replace placeholders with actual regex patterns
+ s = s.replace(/___DOUBLESTAR_SLASH___/g, '(?:.*/)?'); // '**/' -> any subpath (optional)
+ s = s.replace(/___SLASH_DOUBLESTAR___/g, '(?:/.*)?'); // '/**' at end -> rest of path
+ s = s.replace(/___DOUBLESTAR___/g, '.*'); // remaining '**' -> match anything
+
+ return { regex: new RegExp(`^${s}$`), exclude: isExclusion };
+ };
+
+ const compiled = new Map();
+ const toRx = (p) => {
+ if (!compiled.has(p)) compiled.set(p, globToRegex(p));
+ return compiled.get(p).regex;
+ };
+ const test = (file, patterns) => {
+ const inc = patterns.filter(p => !p.startsWith('!')).map(toRx);
+ const exc = patterns.filter(p => p.startsWith('!')).map(p => toRx(p.slice(1)));
+ const included = inc.some(rx => rx.test(file));
+ const excluded = exc.some(rx => rx.test(file));
+ return included && !excluded;
+ };
+
+ // Build dependency graph and resolve affected components
+ const affectedComponents = new Set();
+ const componentTriggers = new Map(); // Track what triggered each component
+ const dependencyGraph = new Map();
+
+ // First pass: build dependency graph
+ for (const [name, cfg] of Object.entries(components)) {
+ if (cfg.depends_on) {
+ dependencyGraph.set(name, cfg.depends_on);
+ }
+ }
+
+ // Second pass: check which components are directly affected by file changes
+ console.log('=== Change Detection ===');
+ console.log(`Analyzing ${files.length} changed files...`);
+ console.log('');
+
+ for (const [name, cfg] of Object.entries(components)) {
+ const pats = (cfg.paths || []);
+ const matchedFiles = files.filter(f => test(f, pats));
+ if (matchedFiles.length > 0) {
+ affectedComponents.add(name);
+ componentTriggers.set(name, {
+ reason: 'direct',
+ files: matchedFiles,
+ dependency: null
+ });
+
+ // Log direct matches
+ console.log(`β ${name} directly affected by:`);
+ if (matchedFiles.length <= 5) {
+ matchedFiles.forEach(f => console.log(` - ${f}`));
+ } else {
+ matchedFiles.slice(0, 3).forEach(f => console.log(` - ${f}`));
+ console.log(` ... and ${matchedFiles.length - 3} more files`);
+ }
+ }
+ }
+
+ // Third pass: resolve transitive dependencies
+ console.log('');
+ console.log('=== Dependency Resolution ===');
+
+ const resolveDependent = (componentName, depth = 0) => {
+ for (const [dependent, dependencies] of dependencyGraph.entries()) {
+ if (dependencies.includes(componentName) && !affectedComponents.has(dependent)) {
+ affectedComponents.add(dependent);
+
+ // Track why this component was added
+ if (!componentTriggers.has(dependent)) {
+ componentTriggers.set(dependent, {
+ reason: 'dependency',
+ files: [],
+ dependency: componentName
+ });
+ console.log(`${' '.repeat(depth)}β ${dependent} (depends on ${componentName})`);
+ }
+
+ resolveDependent(dependent, depth + 1); // Recursively add dependents
+ }
+ }
+ };
+
+ // Apply dependency resolution
+ const initialAffected = [...affectedComponents];
+ for (const comp of initialAffected) {
+ resolveDependent(comp, 1);
+ }
+
+ // Summary output
+ console.log('');
+ console.log('=== Summary ===');
+ console.log(`Initially affected: ${initialAffected.length} components`);
+ console.log(`After dependencies: ${affectedComponents.size} components`);
+
+ if (files.length <= 10) {
+ console.log('');
+ console.log('Changed files:');
+ files.forEach(f => console.log(` - ${f}`));
+ } else {
+ console.log(`Total files changed: ${files.length}`);
+ }
+
+ const groups = { rust:[], python:[], node:[], go:[], java:[], csharp:[], bdd:[], examples:[], other:[] };
+
+ // Process affected components and generate tasks
+ console.log('');
+ console.log('=== Task Generation ===');
+
+ const skippedComponents = [];
+ const taskedComponents = [];
+
+ for (const name of affectedComponents) {
+ const cfg = components[name];
+ const trigger = componentTriggers.get(name);
+
+ if (!cfg || !cfg.tasks || cfg.tasks.length === 0) {
+ skippedComponents.push(name);
+ continue;
+ }
+
+ const entries = cfg.tasks.map(task => ({ component: name, task }));
+ taskedComponents.push({ name, tasks: cfg.tasks, trigger });
+
+ if (name === 'rust') groups.rust.push(...entries);
+ else if (name === 'sdk-python') groups.python.push(...entries);
+ else if (name === 'sdk-node') groups.node.push(...entries);
+ else if (name === 'sdk-go') groups.go.push(...entries);
+ else if (name === 'sdk-java') groups.java.push(...entries);
+ else if (name === 'sdk-csharp') groups.csharp.push(...entries);
+ else if (name.startsWith('bdd-')) {
+ // Individual BDD tests should run separately with proper Docker setup
+ groups.bdd.push(...entries);
+ }
+ else if (name === 'examples-suite') {
+ // Examples should run separately
+ groups.examples.push(...entries);
+ }
+ else if (name === 'shell-scripts') groups.other.push(...entries);
+ else if (name === 'ci-workflows') groups.other.push(...entries);
+ else groups.other.push(...entries);
+ }
+
+ // Log components with tasks
+ if (taskedComponents.length > 0) {
+ console.log('Components with tasks to run:');
+ taskedComponents.forEach(({ name, tasks, trigger }) => {
+ const reason = trigger.reason === 'direct'
+ ? `directly triggered by ${trigger.files.length} file(s)`
+ : `dependency of ${trigger.dependency}`;
+ console.log(` β ${name}: ${tasks.length} task(s) - ${reason}`);
+ });
+ }
+
+ // Log skipped components (no tasks defined)
+ if (skippedComponents.length > 0) {
+ console.log('');
+ console.log('Components triggered but skipped (no tasks):');
+ skippedComponents.forEach(name => {
+ const trigger = componentTriggers.get(name);
+ const reason = trigger.reason === 'direct'
+ ? `directly triggered by ${trigger.files.length} file(s)`
+ : `dependency of ${trigger.dependency}`;
+ console.log(` β ${name} - ${reason}`);
+ });
+ }
+
+ // On master push, run everything
+ if (context.eventName === 'push' && context.ref === 'refs/heads/master') {
+ // Clear existing groups to avoid duplicates - we'll run everything anyway
+ groups.rust = [];
+ groups.python = [];
+ groups.node = [];
+ groups.go = [];
+ groups.java = [];
+ groups.csharp = [];
+ groups.bdd = [];
+ groups.examples = [];
+ groups.other = [];
+
+ for (const [name, cfg] of Object.entries(components)) {
+ if (!cfg.tasks || cfg.tasks.length === 0) continue;
+ const entries = cfg.tasks.map(task => ({ component: name, task }));
+ if (name === 'rust') groups.rust.push(...entries);
+ else if (name === 'sdk-python') groups.python.push(...entries);
+ else if (name === 'sdk-node') groups.node.push(...entries);
+ else if (name === 'sdk-go') groups.go.push(...entries);
+ else if (name === 'sdk-java') groups.java.push(...entries);
+ else if (name === 'sdk-csharp') groups.csharp.push(...entries);
+ else if (name.startsWith('bdd-')) groups.bdd.push(...entries);
+ else if (name === 'examples-suite') groups.examples.push(...entries);
+ else groups.other.push(...entries);
+ }
+ }
+
+ // Deduplicate entries in each group (in case of any edge cases)
+ const dedupeGroup = (group) => {
+ const seen = new Set();
+ return group.filter(item => {
+ const key = `${item.component}-${item.task}`;
+ if (seen.has(key)) return false;
+ seen.add(key);
+ return true;
+ });
+ };
+
+ // Apply deduplication to all groups
+ Object.keys(groups).forEach(key => {
+ groups[key] = dedupeGroup(groups[key]);
+ });
+
+ const matrix = a => a.length ? { include: a } : { include: [{ component: 'noop', task: 'noop' }] };
+
+ // Final summary of what will run
+ console.log('');
+ console.log('=== Test Jobs to Run ===');
+
+ const jobSummary = [
+ { name: 'Rust', tasks: groups.rust },
+ { name: 'Python SDK', tasks: groups.python },
+ { name: 'Node SDK', tasks: groups.node },
+ { name: 'Go SDK', tasks: groups.go },
+ { name: 'Java SDK', tasks: groups.java },
+ { name: 'C# SDK', tasks: groups.csharp },
+ { name: 'BDD Tests', tasks: groups.bdd },
+ { name: 'Examples', tasks: groups.examples },
+ { name: 'Other', tasks: groups.other }
+ ];
+
+ jobSummary.forEach(({ name, tasks }) => {
+ if (tasks.length > 0) {
+ const uniqueTasks = [...new Set(tasks.map(t => t.task))];
+ console.log(` β ${name}: ${uniqueTasks.join(', ')}`);
+ } else {
+ console.log(` β ${name}: SKIPPED (no changes detected)`);
+ }
+ });
+
+ // Overall stats
+ const totalTasks = Object.values(groups).reduce((sum, g) => sum + g.length, 0);
+ console.log('');
+ console.log(`Total: ${affectedComponents.size} components affected, ${totalTasks} tasks to run`);
+
+ // Use environment files instead of deprecated setOutput
+ const setOutput = (name, value) => {
+ const output = `${name}=${value}`;
+ require('fs').appendFileSync(process.env.GITHUB_OUTPUT, `${output}\n`);
+ };
+
+ setOutput('rust_matrix', JSON.stringify(matrix(groups.rust)));
+ setOutput('python_matrix', JSON.stringify(matrix(groups.python)));
+ setOutput('node_matrix', JSON.stringify(matrix(groups.node)));
+ setOutput('go_matrix', JSON.stringify(matrix(groups.go)));
+ setOutput('java_matrix', JSON.stringify(matrix(groups.java)));
+ setOutput('csharp_matrix', JSON.stringify(matrix(groups.csharp)));
+ setOutput('bdd_matrix', JSON.stringify(matrix(groups.bdd)));
+ setOutput('examples_matrix', JSON.stringify(matrix(groups.examples)));
+ setOutput('other_matrix', JSON.stringify(matrix(groups.other)));
diff --git a/.github/workflows/_test.yml b/.github/workflows/_test.yml
new file mode 100644
index 000000000..d496ef7b0
--- /dev/null
+++ b/.github/workflows/_test.yml
@@ -0,0 +1,132 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: _test
+on:
+ workflow_call:
+ inputs:
+ component:
+ type: string
+ required: true
+ description: "Component to test"
+ task:
+ type: string
+ required: true
+ description: "Task to run"
+
+permissions:
+ contents: read
+ security-events: write
+
+jobs:
+ run:
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ env:
+ IGGY_CI_BUILD: true
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Skip noop
+ if: inputs.component == 'noop'
+ run: echo "No changes detected, skipping tests"
+
+ # Rust
+ - name: Run Rust task
+ if: startsWith(inputs.component, 'rust')
+ uses: ./.github/actions/rust/pre-merge
+ with:
+ task: ${{ inputs.task }}
+ component: ${{ inputs.component }}
+
+ # Python SDK
+ - name: Set up Docker Buildx for Python
+ if: inputs.component == 'sdk-python' && inputs.task == 'test'
+ uses: docker/setup-buildx-action@v3
+
+ - name: Run Python SDK task
+ if: inputs.component == 'sdk-python'
+ uses: ./.github/actions/python-maturin/pre-merge
+ with:
+ task: ${{ inputs.task }}
+
+ # Node SDK
+ - name: Run Node SDK task
+ if: inputs.component == 'sdk-node'
+ uses: ./.github/actions/node-npm/pre-merge
+ with:
+ task: ${{ inputs.task }}
+
+ # Go SDK
+ - name: Run Go SDK task
+ if: inputs.component == 'sdk-go'
+ uses: ./.github/actions/go/pre-merge
+ with:
+ task: ${{ inputs.task }}
+
+ # Java SDK
+ - name: Run Java SDK task
+ if: inputs.component == 'sdk-java'
+ uses: ./.github/actions/java-gradle/pre-merge
+ with:
+ task: ${{ inputs.task }}
+
+ # C# SDK
+ - name: Run C# SDK task
+ if: inputs.component == 'sdk-csharp'
+ uses: ./.github/actions/csharp-dotnet/pre-merge
+ with:
+ task: ${{ inputs.task }}
+
+ # Web UI
+ - name: Run Web UI task
+ if: inputs.component == 'web-ui'
+ run: |
+ cd web
+ npm ci
+ if [ "${{ inputs.task }}" = "lint" ]; then
+ # TODO(hubcio): make this blocking once Web UI lints are fixed
+ npm run lint || true
+ elif [ "${{ inputs.task }}" = "build" ]; then
+ npm run build
+ fi
+
+ # CI workflow validation
+ - name: Validate CI workflows
+ if: inputs.component == 'ci-workflows' && inputs.task == 'validate'
+ run: |
+ echo "Validating GitHub Actions workflows..."
+ # Basic YAML validation
+ for workflow in .github/workflows/*.yml; do
+ echo "Checking $workflow"
+ python -c "import yaml; yaml.safe_load(open('$workflow'))" || exit 1
+ done
+ echo "All workflows are valid YAML"
+
+ # Upload reports
+ - name: Upload reports
+ if: always()
+ uses: actions/upload-artifact@v4
+ with:
+ name: ${{ inputs.component }}-${{ inputs.task }}-reports-${{ github.run_id }}-${{ github.run_attempt }}
+ path: |
+ reports/**
+ target/llvm-cov/**
+ coverage.lcov
+ if-no-files-found: ignore
+ retention-days: 7
diff --git a/.github/workflows/_test_bdd.yml b/.github/workflows/_test_bdd.yml
new file mode 100644
index 000000000..6a1c25e69
--- /dev/null
+++ b/.github/workflows/_test_bdd.yml
@@ -0,0 +1,110 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: _test_bdd
+on:
+ workflow_call:
+ inputs:
+ component:
+ type: string
+ required: true
+ description: "Component to test (e.g., 'bdd-rust', 'bdd-python', etc.)"
+ task:
+ type: string
+ required: true
+ description: "Task to run (e.g., 'bdd-rust', 'bdd-python', etc.)"
+
+permissions:
+ contents: read
+ security-events: write
+
+jobs:
+ run:
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ env:
+ IGGY_CI_BUILD: true
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Skip noop
+ if: inputs.component == 'noop'
+ run: echo "No changes detected, skipping tests"
+
+ - name: Setup Rust with cache for BDD
+ if: startsWith(inputs.component, 'bdd-') && startsWith(inputs.task, 'bdd-')
+ uses: ./.github/actions/utils/setup-rust-with-cache
+ with:
+ cache-targets: false # Only cache registry and git deps, not target dir (sccache handles that)
+
+ - name: Build server for BDD tests
+ if: startsWith(inputs.component, 'bdd-') && startsWith(inputs.task, 'bdd-')
+ run: |
+ echo "Building server binary and CLI for BDD tests with sccache..."
+ cargo build --locked --bin iggy-server --bin iggy
+
+ echo "Server binary built at: target/debug/iggy-server"
+ ls -lh target/debug/iggy-server
+
+ echo "CLI binary built at: target/debug/iggy"
+ ls -lh target/debug/iggy
+
+ # Verify the binary exists and is executable
+ if [ ! -f "target/debug/iggy-server" ]; then
+ echo "ERROR: Server binary not found at target/debug/iggy-server"
+ exit 1
+ fi
+
+ if [ ! -f "target/debug/iggy" ]; then
+ echo "ERROR: CLI binary not found at target/debug/iggy"
+ exit 1
+ fi
+
+ - name: Run BDD tests
+ if: startsWith(inputs.component, 'bdd-') && startsWith(inputs.task, 'bdd-')
+ run: |
+ # Extract SDK name from task (format: bdd-)
+ SDK_NAME=$(echo "${{ inputs.task }}" | sed 's/bdd-//')
+
+ echo "Running BDD tests for SDK: $SDK_NAME"
+ echo "Current directory: $(pwd)"
+ echo "Server binary location: $(ls -lh target/debug/iggy-server)"
+ echo "CLI binary location: $(ls -lh target/debug/iggy)"
+
+ # Export path to the pre-built server and cli binaries (relative to repo root)
+ export IGGY_SERVER_PATH="target/debug/iggy-server"
+ export IGGY_CLI_PATH="target/debug/iggy"
+ ./scripts/run-bdd-tests.sh "$SDK_NAME"
+
+ - name: Clean up Docker resources (BDD)
+ if: always() && startsWith(inputs.component, 'bdd-') && startsWith(inputs.task, 'bdd-')
+ run: |
+ ./scripts/run-bdd-tests.sh clean || true
+ docker system prune -f || true
+
+ - name: Upload reports
+ if: always()
+ uses: actions/upload-artifact@v4
+ with:
+ name: ${{ inputs.component }}-${{ inputs.task }}-reports-${{ github.run_id }}-${{ github.run_attempt }}
+ path: |
+ reports/**
+ target/llvm-cov/**
+ coverage.lcov
+ if-no-files-found: ignore
+ retention-days: 7
diff --git a/.github/workflows/_test_examples.yml b/.github/workflows/_test_examples.yml
new file mode 100644
index 000000000..5f23aa999
--- /dev/null
+++ b/.github/workflows/_test_examples.yml
@@ -0,0 +1,118 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: _test_examples
+on:
+ workflow_call:
+ inputs:
+ component:
+ type: string
+ required: true
+ description: "Component to test (should be 'examples-suite')"
+ task:
+ type: string
+ required: true
+ description: "Task to run (e.g., 'examples-rust', 'examples-go')"
+
+permissions:
+ contents: read
+ security-events: write
+
+jobs:
+ run:
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ env:
+ IGGY_CI_BUILD: true
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Skip noop
+ if: inputs.component == 'noop'
+ run: echo "No changes detected, skipping tests"
+
+ - name: Setup Rust with cache for examples
+ if: inputs.component == 'examples-suite'
+ uses: ./.github/actions/utils/setup-rust-with-cache
+ with:
+ cache-targets: false # Only cache registry and git deps, not target dir (sccache handles that)
+
+ - name: Build common binaries for all examples
+ if: inputs.component == 'examples-suite'
+ run: |
+ echo "Building common binaries for all examples tests..."
+ echo "Current directory: $(pwd)"
+
+ # Build server (needed by both Rust and Go examples)
+ echo "Building iggy-server..."
+ cargo build --locked --bin iggy-server
+
+ # For Rust examples, also build CLI and example binaries
+ if [[ "${{ inputs.task }}" == "examples-rust" ]]; then
+ echo "Building additional binaries for Rust examples..."
+ cargo build --locked --bin iggy --examples
+ fi
+
+ # Verify server binary was built (needed by all examples)
+ echo "Verifying server binary:"
+ if [ -f "target/debug/iggy-server" ]; then
+ echo "β
Server binary found at target/debug/iggy-server"
+ ls -lh target/debug/iggy-server
+ else
+ echo "β Server binary NOT found at target/debug/iggy-server"
+ echo "Checking target directory structure:"
+ find target -name "iggy-server" -type f 2>/dev/null || true
+ exit 1
+ fi
+
+ # For Rust examples, verify CLI was built
+ if [[ "${{ inputs.task }}" == "examples-rust" ]]; then
+ if [ -f "target/debug/iggy" ]; then
+ echo "β
CLI binary found at target/debug/iggy"
+ ls -lh target/debug/iggy
+ else
+ echo "β CLI binary NOT found at target/debug/iggy"
+ exit 1
+ fi
+ fi
+
+ - name: Run Rust examples
+ if: inputs.component == 'examples-suite' && inputs.task == 'examples-rust'
+ run: |
+ echo "Running Rust examples tests..."
+ # Run the examples script which will use the prebuilt binaries
+ ./scripts/run-rust-examples-from-readme.sh
+
+ - name: Run Go examples
+ if: inputs.component == 'examples-suite' && inputs.task == 'examples-go'
+ run: |
+ echo "Running Go examples tests..."
+ # Run the examples script which will use the prebuilt server binary
+ ./scripts/run-go-examples-from-readme.sh
+
+ - name: Upload reports
+ if: always()
+ uses: actions/upload-artifact@v4
+ with:
+ name: ${{ inputs.component }}-${{ inputs.task }}-reports-${{ github.run_id }}-${{ github.run_attempt }}
+ path: |
+ reports/**
+ target/llvm-cov/**
+ coverage.lcov
+ if-no-files-found: ignore
+ retention-days: 7
diff --git a/.github/workflows/ci-check-common.yml b/.github/workflows/ci-check-common.yml
deleted file mode 100644
index 711bd186b..000000000
--- a/.github/workflows/ci-check-common.yml
+++ /dev/null
@@ -1,120 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#
-# -------------------------------------------------------------
-#
-# CI Check Common Workflow
-#
-# This workflow runs common code quality checks that are not specific
-# to any programming language. Checks include:
-# - PR title validation using conventional commits
-# - license header verification
-# - license list validation
-# - markdown linting using markdownlint
-#
-name: ci-check-common
-
-on:
- workflow_call:
- inputs:
- commits-from:
- description: 'Lower end of the commit range to check'
- required: true
- default: HEAD~1
- type: string
- commits-to:
- description: 'Upper end of the new commit range to check'
- required: true
- default: HEAD
- type: string
-
-jobs:
- check-pr-title:
- name: Check PR Title
- runs-on: ubuntu-latest
- steps:
- - name: Validate PR Title
- uses: amannn/action-semantic-pull-request@v5
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- # Configure types as needed (optional, defaults to common ones)
- types: |
- feat
- fix
- docs
- style
- refactor
- perf
- test
- build
- ci
- chore
- revert
- repo
-
- ci-check-licenses:
- name: Check license headers
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
-
- - name: Pull addlicense Docker image
- run: docker pull ghcr.io/google/addlicense:latest
-
- - name: Check license headers
- run: |
- echo "π Checking license headers..."
-
- if docker run --rm -v ${{ github.workspace }}:/src -w /src ghcr.io/google/addlicense:latest -check -f ASF_LICENSE.txt . > missing_files.txt 2>&1; then
- echo "β
All files have proper license headers"
- else
- file_count=$(wc -l < missing_files.txt)
- echo "β Found $file_count files missing license headers:"
- echo ""
- cat missing_files.txt | sed 's/^/ β’ /'
- echo ""
- echo "π‘ Run 'addlicense -f ASF_LICENSE.txt .' to fix automatically"
- exit 1
- fi
-
- ci-check-licenses-list:
- name: Check licenses list
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
- - run: |
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
- - run: scripts/licenses-list.sh --check
-
- markdownlint:
- name: markdownlint
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Setup Node.js
- uses: actions/setup-node@v4
- with:
- node-version: '18'
- - name: Install markdownlint-cli
- run: |
- npm install -g markdownlint-cli
- - name: Run markdownlint
- run: |
- markdownlint '**/*.md' --ignore-path .gitignore
diff --git a/.github/workflows/ci-check-csharp-sdk.yml b/.github/workflows/ci-check-csharp-sdk.yml
deleted file mode 100644
index 5cb857116..000000000
--- a/.github/workflows/ci-check-csharp-sdk.yml
+++ /dev/null
@@ -1,63 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: ci-check-csharp-sdk
-
-on:
- workflow_dispatch:
- workflow_call:
-
-jobs:
- build:
- name: csharp build
- runs-on: ubuntu-latest
- steps:
- - name: Checkout Code
- uses: actions/checkout@v4
- - name: Setup .NET 8
- uses: actions/setup-dotnet@v3
- with:
- dotnet-version: '8.0.x'
- - name: Restore dependencies
- working-directory: foreign/csharp
- run: dotnet restore
- - name: Build
- working-directory: foreign/csharp
- run: dotnet build --no-restore
-
- test:
- name: Unit test
- needs: build
- runs-on: ubuntu-latest
- steps:
- - name: Checkout Code
- uses: actions/checkout@v4
- - name: Setup .NET 8
- uses: actions/setup-dotnet@v3
- with:
- dotnet-version: '8.0.x'
- - name: Restore dependencies
- working-directory: foreign/csharp
- run: dotnet restore
- - name: Build
- working-directory: foreign/csharp
- run: dotnet build --no-restore
- - name: Test
- working-directory: foreign/csharp
- run: |
- dotnet test Iggy_SDK_Tests --no-build --verbosity normal
- dotnet test Iggy_SDK.Tests.Integration --no-build --verbosity normal
diff --git a/.github/workflows/ci-check-go-sdk.yml b/.github/workflows/ci-check-go-sdk.yml
deleted file mode 100644
index 08b2abd5f..000000000
--- a/.github/workflows/ci-check-go-sdk.yml
+++ /dev/null
@@ -1,67 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-name: ci-check-go-sdk
-
-on:
- workflow_dispatch:
- workflow_call:
-
-jobs:
- build-and-test:
- name: Go Build And Test
- runs-on: ubuntu-latest
- steps:
- - name: Checkout Code
- uses: actions/checkout@v4
-
- - name: Setup Go
- uses: actions/setup-go@v5
- with:
- go-version: '1.23.0'
-
- - name: Tidy Check
- working-directory: foreign/go
- run: |
- go mod tidy
- git diff --exit-code
-
- - name: Build Go Modules
- working-directory: foreign/go
- run: go build ./...
-
- - name: Unit Test
- working-directory: foreign/go
- run: go test -v ./...
-
- - name: Run and Verify CLI Commands from Go SDK Examples README
- run: ./scripts/run-go-examples-from-readme.sh
-
- lint:
- strategy:
- matrix:
- module: [ foreign/go, bdd/go, examples/go ]
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
- - uses: actions/setup-go@v5
- with:
- go-version: '1.23.0'
- - name: golangci-lint
- uses: golangci/golangci-lint-action@v8
- with:
- version: v2.1
- working-directory: ${{ matrix.module }}
\ No newline at end of file
diff --git a/.github/workflows/ci-check-java-sdk.yml b/.github/workflows/ci-check-java-sdk.yml
deleted file mode 100644
index a10498f42..000000000
--- a/.github/workflows/ci-check-java-sdk.yml
+++ /dev/null
@@ -1,81 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# -------------------------------------------------------------
-#
-# CI Check Java Workflow
-#
-# This workflow runs checks for Java code.
-# Checks include:
-# - Java Gradle build
-# - TODO: Add checkstyle verification, will be done after #1753
-# - TODO: Add findbugs
-# - TODO: License header check
-# - TODO: Add code coverage
-# - TODO: Add shell check for scripts under foreign/java
-#
-# This workflow can be triggered manually or by other workflows.
-#
-name: ci-check-java-sdk
-
-on:
- workflow_dispatch:
- workflow_call:
-
-jobs:
- build:
- name: Java build
- runs-on: ubuntu-latest
- steps:
- - name: Checkout Code
- uses: actions/checkout@v4
-
- - name: Setup Java
- uses: actions/setup-java@v4
- with:
- java-version: '17'
- distribution: 'temurin'
-
- # Configure Gradle for optimal use in GitHub Actions, including caching of downloaded dependencies.
- # See: https://github.com/gradle/actions/blob/main/setup-gradle/README.md
- - name: Setup Gradle
- uses: gradle/actions/setup-gradle@af1da67850ed9a4cedd57bfd976089dd991e2582 # v4.0.0
-
- - name: Build
- run: foreign/java/dev-support/checks/build.sh build -x test -x checkstyleMain -x checkstyleTest
-
- test:
- name: Unit test
- needs: build
- runs-on: ubuntu-latest
- steps:
- - name: Checkout Code
- uses: actions/checkout@v4
-
- - name: Setup Java
- uses: actions/setup-java@v4
- with:
- java-version: '17'
- distribution: 'temurin'
-
- # Configure Gradle for optimal use in GitHub Actions, including caching of downloaded dependencies.
- # See: https://github.com/gradle/actions/blob/main/setup-gradle/README.md
- - name: Setup Gradle
- uses: gradle/actions/setup-gradle@af1da67850ed9a4cedd57bfd976089dd991e2582 # v4.0.0
-
- - name: Run tests
- run: foreign/java/dev-support/checks/build.sh test
diff --git a/.github/workflows/ci-check-node-sdk.yml b/.github/workflows/ci-check-node-sdk.yml
deleted file mode 100644
index bb4ecfbd7..000000000
--- a/.github/workflows/ci-check-node-sdk.yml
+++ /dev/null
@@ -1,86 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# -------------------------------------------------------------
-#
-# CI Check Node Workflow
-#
-# This workflow runs checks for nodejs sdk code.
-# Checks include:
-# - npm install dependencies
-# - commit-lint
-# - typescript build
-# - eslint
-# - unit test
-# - TODO: e2e test
-#
-# This workflow can be triggered manually or by other workflows.
-#
-name: ci-check-node-sdk
-
-on:
- workflow_dispatch:
- workflow_call:
-
-jobs:
- install:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
- - uses: actions/setup-node@v4
- with:
- cache-dependency-path: foreign/node/package-lock.json
- node-version: 22
- cache: "npm"
-
- - uses: actions/cache@v4
- env:
- cache-name: cache-node-modules
- with:
- # npm cache files are stored in `~/.npm` on Linux/macOS
- path: ~/.npm
- key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
- restore-keys: |
- ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
-
- - name: install dependencies
- run: cd foreign/node/ && npm ci --ignore-scripts # no husky hooks when in ci
-
- # - name: Validate current commit (last commit) message with commitlint
- # if: github.event_name == 'push'
- # run: npx commitlint --last --verbose
-
- # - name: Validate PR commits messages with commitlint
- # if: github.event_name == 'pull_request'
- # run: npx commitlint --from ${{ github.event.pull_request.base.sha }} --to ${{ github.event.pull_request.head.sha }} --verbose
-
- - name: lint typescript code
- run: cd foreign/node/ && npm run lint
-
- - name: build typescript
- run: cd foreign/node/ && npm run build
-
- - name: run unit test
- run: cd foreign/node/ && npm run test:unit
-
- ### Integration tests (e2e)
-
- # - name: Setup iggy
- # uses: iggy-rs/setup-iggy@v1
-
- # - name: Perform integration tests
- # run: npm run test:e2e
diff --git a/.github/workflows/ci-check-pr.yml b/.github/workflows/ci-check-pr.yml
deleted file mode 100644
index 543c1329f..000000000
--- a/.github/workflows/ci-check-pr.yml
+++ /dev/null
@@ -1,197 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# -------------------------------------------------------------
-#
-# CI Check PR Workflow
-#
-# This workflow validates pull requests to the master branch by detecting changed files
-# and running appropriate checks and tests.
-#
-# Flow:
-# 1. pr-file-changes: Detects which file types were modified (mandatory)
-# 2. ci-check-common: Validates commit message (mandatory)
-# 3. Conditional jobs based on file changes:
-# - For Rust changes: ci-check-rust β ci-test-rust β ci-test-rust-optional & ci-compatibility-rust
-# - For shell changes: ci-check-shell
-# 4. finalize-pr: Determines final PR status based on all job results (mandatory)
-#
-# Dependencies:
-# - ci-check-rust depends on pr-file-changes (outputs.trigger-rust)
-# - ci-test-rust and ci-compatibility-rust depend on ci-check-rust success
-# - ci-check-shell depends on pr-file-changes (outputs.trigger-shell)
-# - finalize-pr depends on all other jobs
-#
-# The workflow fails if any mandatory job fails.
-# Workflow can be triggered manually or on pull request events.
-
-name: ci-check-pr
-
-on:
- workflow_dispatch:
- pull_request:
- branches:
- - master
- types: [ opened, synchronize, reopened ]
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
- cancel-in-progress: true
-
-jobs:
- pr-file-changes:
- name: pr-file-changes
- runs-on: ubuntu-latest
- outputs:
- trigger-rust: ${{ steps.changed-files.outputs.RUST_FILES_CHANGED }}
- trigger-shell: ${{ steps.changed-files.outputs.SHELL_FILES_CHANGED }}
- trigger-java-sdk: ${{ steps.changed-files.outputs.JAVA-SDK_FILES_CHANGED }}
- trigger-python-sdk: ${{ steps.changed-files.outputs.PYTHON-SDK_FILES_CHANGED }}
- trigger-go-sdk: ${{ steps.changed-files.outputs.GO-SDK_FILES_CHANGED }}
- trigger-node-sdk: ${{ steps.changed-files.outputs.NODE-SDK_FILES_CHANGED }}
- trigger-csharp-sdk: ${{ steps.changed-files.outputs.CSHARP-SDK_FILES_CHANGED }}
- trigger-bdd: ${{ steps.changed-files.outputs.BDD_FILES_CHANGED }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
- - name: Install python3
- run: |
- sudo apt-get update
- sudo apt-get install -y python3
- python3 --version
- - name: Detect file changes
- id: changed-files
- run: |
- .github/scripts/detect-changed-files.sh master
- - name: List all changed files
- run: |
- if [ "${{ steps.changed-files.outputs.RUST_FILES_CHANGED }}" == "true" ]; then
- echo "One or more rust file(s) has changed."
- echo "List all the files that have changed: ${{ steps.changed-files.outputs.RUST_FILES }}"
- fi
- if [ "${{ steps.changed-files.outputs.SHELL_FILES_CHANGED }}" == "true" ]; then
- echo "One or more shell file(s) has changed."
- echo "List all the files that have changed: ${{ steps.changed-files.outputs.SHELL_FILES }}"
- fi
-
- ci-check-common:
- name: ci-check-common
- uses: ./.github/workflows/ci-check-common.yml
- with:
- commits-from: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || 'HEAD~1' }}
- commits-to: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || 'HEAD' }}
-
- ci-check-rust:
- name: ci-check-rust
- needs: pr-file-changes
- if: ${{ needs.pr-file-changes.outputs.trigger-rust == 'true' }}
- uses: ./.github/workflows/ci-check-rust.yml
-
- ci-test-rust:
- name: ci-test-rust
- needs: ci-check-rust
- if: ${{ needs.ci-check-rust.result == 'success' }}
- uses: ./.github/workflows/ci-test-rust.yml
-
- ci-test-rust-optional:
- name: ci-test-rust-optional
- needs: ci-check-rust
- if: ${{ needs.ci-check-rust.result == 'success' }}
- uses: ./.github/workflows/ci-test-rust-optional.yml
-
- ci-compatibility-rust:
- name: ci-compatibility-rust
- needs: ci-check-rust
- if: ${{ needs.ci-check-rust.result == 'success' }}
- uses: ./.github/workflows/ci-compatibility-rust.yml
- with:
- pr_body: ${{ github.event.pull_request.body }}
-
- ci-check-shell:
- name: ci-check-shell
- needs: pr-file-changes
- if: ${{ needs.pr-file-changes.outputs.trigger-shell == 'true' }}
- uses: ./.github/workflows/ci-check-shell.yml
-
- ci-check-java-sdk:
- name: ci-check-java-sdk
- needs: pr-file-changes
- if: ${{ needs.pr-file-changes.outputs.trigger-java-sdk == 'true' }}
- uses: ./.github/workflows/ci-check-java-sdk.yml
-
- ci-check-python-sdk:
- name: ci-check-python-sdk
- needs: pr-file-changes
- if: ${{ needs.pr-file-changes.outputs.trigger-python-sdk == 'true' }}
- uses: ./.github/workflows/ci-check-python-sdk.yml
-
- ci-check-go-sdk:
- name: ci-check-go-sdk
- needs: pr-file-changes
- if: ${{ needs.pr-file-changes.outputs.trigger-go-sdk == 'true' }}
- uses: ./.github/workflows/ci-check-go-sdk.yml
- ci-check-node-sdk:
- name: ci-check-node-sdk
- needs: pr-file-changes
- if: ${{ needs.pr-file-changes.outputs.trigger-node-sdk == 'true' }}
- uses: ./.github/workflows/ci-check-node-sdk.yml
-
- ci-check-csharp-sdk:
- name: ci-check-csharp-sdk
- needs: pr-file-changes
- if: ${{ needs.pr-file-changes.outputs.trigger-csharp-sdk == 'true' }}
- uses: ./.github/workflows/ci-check-csharp-sdk.yml
-
- ci-test-bdd:
- name: ci-test-bdd
- needs: pr-file-changes
- if: ${{
- needs.pr-file-changes.outputs.trigger-rust == 'true' ||
- needs.pr-file-changes.outputs.trigger-java-sdk == 'true' ||
- needs.pr-file-changes.outputs.trigger-python-sdk == 'true' ||
- needs.pr-file-changes.outputs.trigger-go-sdk == 'true' ||
- needs.pr-file-changes.outputs.trigger-node-sdk == 'true' ||
- needs.pr-file-changes.outputs.trigger-csharp-sdk == 'true' ||
- needs.pr-file-changes.outputs.trigger-bdd == 'true' }}
- uses: ./.github/workflows/ci-test-bdd.yml
-
- finalize_pr:
- runs-on: ubuntu-latest
- needs:
- - pr-file-changes
- - ci-check-common
- - ci-check-rust
- - ci-test-rust
- - ci-compatibility-rust
- - ci-check-shell
- - ci-check-java-sdk
- - ci-check-python-sdk
- - ci-check-go-sdk
- - ci-check-node-sdk
- - ci-check-csharp-sdk
- - ci-test-bdd
- if: always()
- steps:
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
-
- - name: Some tests failed
- if: ${{ contains(needs.*.result, 'failure') }}
- run: exit 1
diff --git a/.github/workflows/ci-check-python-sdk.yml b/.github/workflows/ci-check-python-sdk.yml
deleted file mode 100644
index 4f7287ce2..000000000
--- a/.github/workflows/ci-check-python-sdk.yml
+++ /dev/null
@@ -1,165 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# -------------------------------------------------------------
-#
-# CI Check Python SDK Workflow
-#
-# This workflow is integrated from the maturin-generated python.yml
-# and adapted to work within the monorepo structure.
-#
-name: ci-check-python-sdk
-
-on:
- workflow_dispatch:
- workflow_call:
- push:
- tags:
- - "python-v*"
-
-permissions:
- contents: read
-
-jobs:
- linux:
- runs-on: ubuntu-latest
- strategy:
- matrix:
- target: [x86_64, aarch64]
- steps:
- - uses: actions/checkout@v4
- - uses: actions/setup-python@v5
- with:
- python-version: "3.10"
- - name: Cache Rust dependencies
- uses: Swatinem/rust-cache@v2
- with:
- workspaces: |
- . -> target
- foreign/python -> target
- cache-on-failure: true
- key: python-${{ matrix.target }}
- - name: Build wheels
- uses: PyO3/maturin-action@v1
- with:
- target: ${{ matrix.target }}
- working-directory: foreign/python
- before-script-linux: "dnf install -y perl-IPC-Cmd && (python3 -m pip --version || python3 -m ensurepip)"
- manylinux: "2_28"
- args: --release --out dist --interpreter python3.8 python3.9 python3.10 python3.11 python3.12 python3.13 python3.14
- sccache: "true"
- - name: Set up Docker Buildx
- if: matrix.target == 'x86_64'
- uses: docker/setup-buildx-action@v3
- - name: Run tests
- if: matrix.target == 'x86_64'
- working-directory: foreign/python
- run: |
- # Run tests using Docker Compose with caching
- DOCKER_BUILDKIT=1 docker compose -f docker-compose.test.yml up --build --abort-on-container-exit --exit-code-from python-tests
-
- # Clean up
- docker compose -f docker-compose.test.yml down -v
- - name: Upload wheels
- uses: actions/upload-artifact@v4
- with:
- name: wheels-linux-${{ matrix.target }}
- path: foreign/python/dist
-
- # windows:
- # runs-on: windows-latest
- # strategy:
- # matrix:
- # target: [x64]
- # steps:
- # - uses: actions/checkout@v4
- # - uses: actions/setup-python@v5
- # with:
- # python-version: "3.10"
- # architecture: ${{ matrix.target }}
- # - name: Set up NASM # Action disallowed
- # uses: ilammy/setup-nasm@v1
- # - name: Build wheels
- # uses: PyO3/maturin-action@v1
- # with:
- # target: ${{ matrix.target }}
- # working-directory: foreign/python
- # args: --release --out dist --find-interpreter
- # sccache: "true"
- # - name: Upload wheels
- # uses: actions/upload-artifact@v4
- # with:
- # name: wheels-windows-${{ matrix.target }}
- # path: foreign/python/dist
-
- macos:
- runs-on: macos-latest
- strategy:
- matrix:
- target: [x86_64, aarch64]
- steps:
- - uses: actions/checkout@v4
- - uses: actions/setup-python@v5
- with:
- python-version: "3.10"
- - name: Build wheels
- uses: PyO3/maturin-action@v1
- with:
- target: ${{ matrix.target }}
- working-directory: foreign/python
- args: --release --out dist --find-interpreter
- sccache: "true"
- - name: Upload wheels
- uses: actions/upload-artifact@v4
- with:
- name: wheels-macos-${{ matrix.target }}
- path: foreign/python/dist
-
- sdist:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
- - name: Build sdist
- uses: PyO3/maturin-action@v1
- with:
- command: sdist
- working-directory: foreign/python
- args: --out dist
- - name: Upload sdist
- uses: actions/upload-artifact@v4
- with:
- name: wheels-sdist
- path: foreign/python/dist
-
- release:
- name: Release
- runs-on: ubuntu-latest
- if: startsWith(github.ref, 'refs/tags/python-v')
- needs: [linux, macos, sdist]
- steps:
- - uses: actions/download-artifact@v4
- with:
- pattern: wheels-*
- merge-multiple: true
- path: dist
- - name: Publish to PyPI
- uses: PyO3/maturin-action@v1
- env:
- MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
- with:
- command: upload
- args: --non-interactive --skip-existing dist/*
diff --git a/.github/workflows/ci-check-rust.yml b/.github/workflows/ci-check-rust.yml
deleted file mode 100644
index b8e45eb67..000000000
--- a/.github/workflows/ci-check-rust.yml
+++ /dev/null
@@ -1,164 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# -------------------------------------------------------------
-#
-# CI Check Rust Workflow
-#
-# This workflow runs checks for Rust code using cargo commands.
-# Checks include:
-# - static analysis using `cargo check`
-# - code formatting using `cargo fmt`
-# - linting using `cargo clippy`
-# - sorted dependencies check using `cargo sort`
-# - documentation tests using `cargo test --doc`
-# - documentation generation using `cargo doc`
-# - unused dependencies check using `cargo machete`
-#
-# This workflow can be triggered manually or by other workflows.
-#
-name: ci-check-rust
-
-on:
- workflow_dispatch:
- workflow_call:
-
-env:
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
-
-jobs:
- check-rust-version-sync:
- name: check Rust version sync
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Check Rust versions are synchronized
- run: |
- # rust-toolchain.toml -> 1.89(.0) -> 1.89
- TOOLCHAIN_VERSION=$(sed -En 's/^[[:space:]]*channel[[:space:]]*=[[:space:]]*"([^"]+)".*/\1/p' rust-toolchain.toml)
- to_minor() { echo "$1" | sed -E 's/^([0-9]+)\.([0-9]+).*/\1.\2/'; }
- TOOLCHAIN_MINOR=$(to_minor "$TOOLCHAIN_VERSION")
-
- # Extract X[.Y[.Z]] from Dockerfiles (ignores suffixes like -slim-bookworm), then -> X.Y
- BDD_VERSION=$(sed -En 's/^FROM[[:space:]]+rust:([0-9.]+).*/\1/p' bdd/rust/Dockerfile | head -1)
- BENCH_VERSION=$(sed -En 's/^FROM[[:space:]]+rust:([0-9.]+).*/\1/p' core/bench/dashboard/server/Dockerfile | head -1)
- BDD_MINOR=$(to_minor "$BDD_VERSION")
- BENCH_MINOR=$(to_minor "$BENCH_VERSION")
-
- # Fail if mismatched
- if [ "$TOOLCHAIN_MINOR" != "$BDD_MINOR" ]; then
- echo "ERROR: bdd/rust/Dockerfile uses $BDD_VERSION (-> $BDD_MINOR) but rust-toolchain.toml specifies $TOOLCHAIN_VERSION (-> $TOOLCHAIN_MINOR)" >&2
- exit 1
- fi
-
- if [ "$TOOLCHAIN_MINOR" != "$BENCH_MINOR" ]; then
- echo "ERROR: core/bench/dashboard/server/Dockerfile uses $BENCH_VERSION (-> $BENCH_MINOR) but rust-toolchain.toml specifies $TOOLCHAIN_VERSION (-> $TOOLCHAIN_MINOR)" >&2
- exit 1
- fi
-
- echo "β All Rust versions are synchronized at $TOOLCHAIN_VERSION"
-
- check:
- name: cargo check
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Install toolchain
- run: |
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
- - name: Run cargo check
- run: |
- cargo check
-
- fmt:
- name: cargo fmt
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Install toolchain
- run: |
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
- - name: Run cargo fmt
- run: |
- cargo fmt --all -- --check
-
- clippy:
- name: cargo clippy
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Install toolchain
- run: |
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
- - name: Run cargo clippy
- run: |
- cargo clippy --all-targets --all-features -- -D warnings
-
- sort:
- name: cargo sort
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Install toolchain
- run: |
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
- - name: Install cargo-sort
- uses: taiki-e/install-action@v2
- with:
- tool: cargo-sort
- - name: Run cargo sort
- run: |
- cargo sort --check --workspace
-
- doctest:
- name: cargo test docs
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Install toolchain
- run: |
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
- - name: Run cargo test (doc)
- run: |
- cargo test --doc
- - name: Run cargo doc
- run: |
- cargo doc --no-deps --all-features --quiet
-
- unused_dependencies:
- name: cargo machete
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Install toolchain
- run: |
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
- - name: Install cargo-machete
- uses: taiki-e/install-action@v2
- with:
- tool: cargo-machete
- - name: Run cargo machete
- run: |
- cargo machete --with-metadata
diff --git a/.github/workflows/ci-check-shell.yml b/.github/workflows/ci-check-shell.yml
deleted file mode 100644
index f09d549d5..000000000
--- a/.github/workflows/ci-check-shell.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# -------------------------------------------------------------
-#
-# CI Check Shell Workflow
-#
-# This workflow runs shellcheck on shell scripts in the repository.
-# This workflow can be triggered manually or by other workflows.
-#
-name: ci-check-shell
-on:
- workflow_dispatch:
- workflow_call:
-
-jobs:
- shellcheck:
- name: shellcheck
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Install shellcheck on Linux
- run: |
- sudo apt-get update --yes && sudo apt-get install --yes shellcheck
- - name: Check shell scripts
- run: find scripts -type f -name "*.sh" -exec shellcheck {} +
diff --git a/.github/workflows/ci-compatibility-rust.yml b/.github/workflows/ci-compatibility-rust.yml
deleted file mode 100644
index 6a1d4c96d..000000000
--- a/.github/workflows/ci-compatibility-rust.yml
+++ /dev/null
@@ -1,188 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# -------------------------------------------------------------
-#
-# CI Compatibility Rust Workflow
-#
-# This workflow runs compatibility tests for Rust code on the master branch for pull requests.
-# This workflow is triggered by the ci-check-pr workflow. It checks if BREAKING_CHANGE in the PR body
-# and commit messages and skips the compatibility tests if found.
-#
-name: backwards_compatibility
-
-on:
- workflow_call:
- inputs:
- pr_body:
- description: 'Pull Request body'
- required: true
- type: string
-
-jobs:
- check_commit_message:
- runs-on: ubuntu-latest
- outputs:
- should_skip: ${{ steps.check_skip.outputs.skip == 'true' }}
- steps:
- - uses: actions/checkout@v4
- with:
- ref: ${{ github.ref }}
- fetch-depth: 0
-
- - name: Print the event payload
- run: cat "$GITHUB_EVENT_PATH"
-
- - name: Check if BREAKING_CHANGE is present in any commits body in the PR
- id: check_commits
- run: |
- BREAKING_CHANGE_FOUND=false
- COMMIT_RANGE=origin/master...HEAD
-
- for COMMIT in $(git log --format=%H $COMMIT_RANGE); do
- COMMIT_MSG=$(git log --format=%B -n 1 $COMMIT)
- COMMIT_MSG_SUBJECT=$(echo "$COMMIT_MSG" | head -n 1)
- COMMIT_MSG_BODY=$(echo "$COMMIT_MSG" | tail -n +3)
- echo "Commit $COMMIT, subject: $COMMIT_MSG_SUBJECT, body: $COMMIT_MSG_BODY"
- if echo "$COMMIT_MSG_BODY" | grep -q "BREAKING_CHANGE"; then
- BREAKING_CHANGE_FOUND=true
- break
- fi
- done
-
- if $BREAKING_CHANGE_FOUND; then
- echo "skip=true" >> $GITHUB_OUTPUT
- echo "'BREAKING_CHANGE' found in commit message, setting skip=true"
- else
- echo "skip=false" >> $GITHUB_OUTPUT
- echo "'BREAKING_CHANGE' not found in commit message, setting skip=false"
- fi
-
- - name: Check if BREAKING_CHANGE is present in pull request body
- id: check_pr_body
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- ENV_PR_BODY: ${{ inputs.pr_body }}
- run: |
- PR_BODY="$ENV_PR_BODY"
- echo "Pull Request body: $PR_BODY"
- if [[ "$PR_BODY" == *"BREAKING_CHANGE"* ]]; then
- echo "skip=true" >> $GITHUB_OUTPUT
- echo "'BREAKING_CHANGE' found in pull request body, setting skip=true"
- else
- echo "skip=false" >> $GITHUB_OUTPUT
- echo "'BREAKING_CHANGE' not found in pull request body, setting skip=false"
- fi
- - name: Check For Skip Condition
- id: check_skip
- run: |
- if ${{ steps.check_commits.outputs.skip == 'true' }} || ${{ steps.check_pr_body.outputs.skip == 'true' }}; then
- echo "skip=true" >> $GITHUB_OUTPUT
- else
- echo "skip=false" >> $GITHUB_OUTPUT
- fi
-
- build_and_test:
- runs-on: ubuntu-latest
- needs: check_commit_message
- if: ${{ needs.check_commit_message.outputs.should_skip != 'true' }}
- env:
- BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
- steps:
- - run: echo "${{ needs.check_commit_message.outputs.should_skip == 'true' }}"
-
- - name: Checkout code (origin/master)
- uses: actions/checkout@v4
- with:
- ref: master
- fetch-depth: 0
-
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "backwards-compatibility"
-
- - name: Build iggy-server (origin/master)
- run: IGGY_CI_BUILD=true cargo build
-
- - uses: JarvusInnovations/background-action@v1
- name: Run iggy-server in background (origin/master)
- with:
- run: |
- target/debug/iggy-server &
- wait-on: tcp:localhost:8090
- wait-for: 1m
- log-output: true
- log-output-if: timeout
- tail: true
-
- - name: Run producer bench (origin/master)
- timeout-minutes: 1
- run: target/debug/iggy-bench --verbose --message-batches 50 --messages-per-batch 100 pinned-producer tcp
-
- - name: Run consumer bench (origin/master)
- timeout-minutes: 1
- run: target/debug/iggy-bench --verbose --message-batches 50 --messages-per-batch 100 pinned-consumer tcp
-
- - name: Stop iggy-server (origin/master)
- timeout-minutes: 1
- run: pkill -15 iggy-server && while pgrep -l iggy-server; do sleep 2; done;
-
- - name: Print iggy-server logs (origin/master)
- run: cat local_data/logs/iggy*
-
- - name: Remove iggy-server logs (origin/master)
- run: rm local_data/logs/iggy*
-
- - name: Copy local_data directory (origin/master)
- run: cp -r local_data ..
-
- - name: Checkout code (PR)
- uses: actions/checkout@v4
- with:
- repository: ${{ github.event.pull_request.head.repo.full_name }}
- ref: ${{ github.event.pull_request.head.ref }}
- fetch-depth: 0
- clean: false
-
- - name: Build iggy-server (PR)
- run: IGGY_CI_BUILD=true cargo build
-
- - name: Restore local_data directory (PR)
- run: cp -r ../local_data .
-
- - uses: JarvusInnovations/background-action@v1
- name: Run iggy-server in background (PR)
- with:
- run: |
- target/debug/iggy-server &
- wait-on: tcp:localhost:8090
- wait-for: 1m
- log-output: true
- log-output-if: timeout
- tail: true
-
- - name: Run consumer bench (PR)
- timeout-minutes: 1
- run: target/debug/iggy-bench --verbose --message-batches 50 --messages-per-batch 100 pinned-consumer tcp
-
- - name: Stop iggy-server (PR)
- timeout-minutes: 1
- run: pkill iggy-server && while pgrep -l iggy-server; do sleep 1; done;
-
- - name: Print server logs (PR)
- run: cat local_data/logs/iggy*
diff --git a/.github/workflows/ci-coverage-rust.yml b/.github/workflows/ci-coverage-rust.yml
deleted file mode 100644
index b7af211c5..000000000
--- a/.github/workflows/ci-coverage-rust.yml
+++ /dev/null
@@ -1,80 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# -------------------------------------------------------------
-#
-# CI Coverage Rust Workflow
-#
-# This workflow runs tests for Rust code and generates coverage report.
-# This workflow can be triggered manually or by other workflows.
-#
-name: ci-coverage-rust
-
-on:
- workflow_dispatch:
- workflow_call:
-
-env:
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
- GITHUB_BOT_CONTEXT_STRING: "coveralls coverage reporting job"
-
-jobs:
- coverage:
- name: coverage
- runs-on: ubuntu-latest
- timeout-minutes: 30
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "coverage"
-
- - name: Install gnome-keyring, keyutils and lcov
- run: |
- sudo apt-get update --yes && sudo apt-get install --yes gnome-keyring keyutils lcov
- rm -f $HOME/.local/share/keyrings/*
- echo -n "test" | gnome-keyring-daemon --unlock
-
- - name: Install cargo-llvm-cov
- uses: taiki-e/install-action@v2
- with:
- tool: cargo-llvm-cov
-
- - name: Build
- run: source <(cargo llvm-cov show-env --export-prefix) && cargo build
-
- - name: Test
- run: source <(cargo llvm-cov show-env --export-prefix) && cargo test
-
- - name: Generate code coverage
- run: source <(cargo llvm-cov show-env --export-prefix) && cargo llvm-cov report --lcov --output-path coverage.lcov --ignore-filename-regex '(bench\/|integration\/|tools\/|tpc\/)'
-
- - name: Display code coverage
- run: lcov --summary coverage.lcov
-
- - name: Upload code to Coveralls
- # Do not upload coverage for user triggered workflows
- if: ${{ github.event_name == 'workflow_call' }}
- uses: coverallsapp/github-action@v2
- with:
- fail-on-error: false
- github-token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/ci-prod-rust.yml b/.github/workflows/ci-prod-rust.yml
deleted file mode 100644
index d823bed60..000000000
--- a/.github/workflows/ci-prod-rust.yml
+++ /dev/null
@@ -1,170 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# -------------------------------------------------------------
-#
-# CI Production Rust Workflow
-#
-# This workflow runs production actions for Rust code.
-# It is triggered by changes to Rust files. It builds and
-# iggy-server and iggy-cli binaries and creates Docker images
-# for x86_64 and aarch64 architectures, then images are pushed to
-# Docker Hub by digest. In the next step, the workflow creates
-# edge tag based on the digests and pushes the manifest list
-# to Docker Hub.
-#
-# This workflow can be triggered only oby other workflows.
-#
-name: ci-prod-rust
-
-on:
- workflow_call:
- secrets:
- DOCKERHUB_USER:
- required: true
- DOCKERHUB_TOKEN:
- required: true
-
-env:
- DOCKERHUB_REGISTRY_NAME: apache/iggy
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
-
-jobs:
- docker-edge-build:
- name: ${{ matrix.platform.target }}
- runs-on: ubuntu-latest
- timeout-minutes: 30
- strategy:
- matrix:
- platform:
- - target: x86_64-unknown-linux-musl
- docker_arch: linux/amd64
- qemu: false
- qemu_arch: ""
- - target: aarch64-unknown-linux-musl
- docker_arch: linux/arm64/v8
- qemu: true
- qemu_arch: "arm64"
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "${{ matrix.platform.target }}"
- - name: Install musl-tools, gnome-keyring and keyutils
- run: |
- sudo apt-get update --yes && sudo apt-get install --yes musl-tools gnome-keyring keyutils
- rm -f $HOME/.local/share/keyrings/*
- echo -n "test" | gnome-keyring-daemon --unlock
- - name: Prepare ${{ matrix.platform.target }} toolchain
- run: |
- rustup target add ${{ matrix.platform.target }}
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
- - name: Install cross tool
- uses: taiki-e/install-action@v2
- with:
- tool: cross
- - name: Set verbose flag
- shell: bash
- run: echo "VERBOSE_FLAG=$([[ "${RUNNER_DEBUG}" = "1" ]] && echo "--verbose" || echo "")" >> $GITHUB_ENV
- - name: Build iggy-server ${{ matrix.platform.target }} release binary
- run: cross +stable build ${{ env.VERBOSE_FLAG }} --release --target ${{ matrix.platform.target }} --bin iggy-server
- - name: Build iggy-cli ${{ matrix.platform.target }} release binary
- run: cross +stable build ${{ env.VERBOSE_FLAG }} --release --no-default-features --target ${{ matrix.platform.target }} --bin iggy
- - name: Print build ready message
- run: echo "::notice ::Build binary artifacts for ${{ matrix.platform.target }}"
- - name: Set up QEMU
- if: ${{ matrix.platform.qemu == true }}
- uses: docker/setup-qemu-action@v3
- with:
- platforms: ${{ matrix.platform.qemu_arch }}
- - name: Set up Docker
- uses: docker/setup-docker-action@v4
- - name: Set up Docker buildx
- uses: docker/setup-buildx-action@v3
- - name: Login to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USER }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
- - name: Docker meta
- id: meta
- uses: docker/metadata-action@v5
- with:
- images: ${{ env.DOCKERHUB_REGISTRY_NAME }}
- - name: Build and push by digest
- id: build
- uses: docker/build-push-action@v6
- with:
- context: .
- file: Dockerfile.ci
- platforms: ${{ matrix.platform.docker_arch }}
- labels: ${{ steps.meta.outputs.labels }}
- outputs: type=image,name=${{ env.DOCKERHUB_REGISTRY_NAME }},push-by-digest=true,name-canonical=true
- push: true
- build-args: |
- IGGY_CMD_PATH=target/${{ matrix.platform.target }}/release/iggy
- IGGY_SERVER_PATH=target/${{ matrix.platform.target }}/release/iggy-server
- - name: Export digest
- run: |
- mkdir -p /tmp/digests
- digest="${{ steps.build.outputs.digest }}"
- touch "/tmp/digests/${digest#sha256:}"
- ls -l /tmp/digests
- - name: Upload digest
- uses: actions/upload-artifact@v4
- with:
- name: digests-${{ matrix.platform.target }}
- path: /tmp/digests/*
- if-no-files-found: error
- retention-days: 1
-
- docker_edge_publish:
- name: Docker edge tag publish
- runs-on: ubuntu-latest
- needs: docker-edge-build
- steps:
- - name: Download digests
- uses: actions/download-artifact@v4
- with:
- pattern: "digests-*"
- merge-multiple: true
- path: /tmp/digests
- - name: Login to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USER }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
- - name: Create Docker meta
- if: ${{ needs.tag.outputs.tag_created == '' }}
- uses: docker/metadata-action@v5
- with:
- images: ${{ env.DOCKERHUB_REGISTRY_NAME }}
- tags: edge
- - name: Create manifest list and push
- working-directory: /tmp/digests
- run: |
- docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
- $(printf '${{ env.DOCKERHUB_REGISTRY_NAME }}@sha256:%s ' *)
- - name: Inspect image
- run: |
- docker buildx imagetools inspect ${{ env.DOCKERHUB_REGISTRY_NAME }}:edge
diff --git a/.github/workflows/ci-prod.yml b/.github/workflows/ci-prod.yml
deleted file mode 100644
index 04239ea9d..000000000
--- a/.github/workflows/ci-prod.yml
+++ /dev/null
@@ -1,76 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# -------------------------------------------------------------
-#
-# CI Production Workflow
-#
-name: ci-prod
-
-on:
- push:
- branches:
- - master
-
-jobs:
- merge-file-changes:
- name: merge-file-changes
- runs-on: ubuntu-latest
- outputs:
- trigger-rust: ${{ steps.changed-files.outputs.RUST-PROD_FILES_CHANGED }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
- - name: Install python3
- run: |
- sudo apt-get update
- sudo apt-get install -y python3
- python3 --version
- - name: Detect file changes
- id: changed-files
- run: |
- .github/scripts/detect-changed-files.sh master
- - name: List all changed files
- run: |
- if [ "${{ steps.changed-files.outputs.RUST-PROD_FILES_CHANGED }}" == "true" ]; then
- echo "One or more rust file(s) has changed."
- echo "List all the files that have changed: ${{ steps.changed-files.outputs.RUST-PROD_FILES }}"
- fi
-
- ci-prod-rust:
- name: ci-prod-rust
- needs: merge-file-changes
- if: ${{ needs.merge-file-changes.outputs.trigger-rust == 'true' }}
- uses: ./.github/workflows/ci-prod-rust.yml
- secrets:
- DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
- DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
-
- finalize-prod:
- runs-on: ubuntu-latest
- needs:
- - ci-prod-rust
- if: always()
- steps:
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
- - name: Some tests failed
- if: ${{ contains(needs.*.result, 'failure') }}
- run: exit 1
diff --git a/.github/workflows/ci-publish-go.yml b/.github/workflows/ci-publish-go.yml
deleted file mode 100644
index 8b52a66a4..000000000
--- a/.github/workflows/ci-publish-go.yml
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: ci-publish-go
-on:
- push:
- tags:
- - "foreign/go/v*"
-
-jobs:
- validate:
- runs-on: ubuntu-latest
- steps:
- - name: Extract tag name
- id: extract
- run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
-
- - name: Validate tag format
- run: |
- if [[ ! "$TAG" =~ ^foreign/go/v[0-9]+\.[0-9]+\.[0-9]+(-[0-9A-Za-z]+([.-][0-9A-Za-z]+)*)?$ ]]; then
- echo "Tag $TAG does not match required format: foreign/go/vX.Y.Z"
- exit 1
- fi
- echo "Valid Go module tag: $TAG"
-
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Check go.mod module path in foreign/go
- run: |
- MOD_PATH=$(grep '^module ' foreign/go/go.mod | awk '{print $2}')
- if [[ "$MOD_PATH" != "github.com/${{ github.repository }}/foreign/go" ]]; then
- echo "go.mod module path ($MOD_PATH) does not match expected path: github.com/${{ github.repository }}/foreign/go"
- exit 1
- fi
- echo "go.mod module path is correct: $MOD_PATH"
-
- - name: Check if go.mod version matches tag
- run: |
- TAG_VERSION="${TAG#foreign/go/}"
- # Check if go.mod contains this version in replace or require (not strictly necessary, but can be used for extra checks)
- echo "Tag version is $TAG_VERSION (no strict version field in go.mod to check for single module)"
- # (No error here, just informative)
-
- - name: Run go mod tidy
- working-directory: foreign/go
- run: |
- go mod tidy
- git diff --exit-code || (echo 'Please run "go mod tidy" and commit the changes before tagging.' && exit 1)
-
- - name: Run Go tests
- working-directory: foreign/go
- run: go test -v ./...
-
diff --git a/.github/workflows/ci-test-bdd.yml b/.github/workflows/ci-test-bdd.yml
deleted file mode 100644
index 2f024d04c..000000000
--- a/.github/workflows/ci-test-bdd.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: Cross-SDK BDD Tests
-
-on:
- workflow_call:
-
-jobs:
- bdd-tests:
- name: BDD tests (${{ matrix.sdk }})
- runs-on: ubuntu-latest
- strategy:
- matrix:
- sdk: [rust, python, go, node, csharp]
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Cache Rust dependencies
- uses: Swatinem/rust-cache@v2
- with:
- workspaces: |
- . -> target
- foreign/python -> target
- cache-on-failure: true
- key: bdd-${{ matrix.sdk }}
-
- - name: Run BDD tests for ${{ matrix.sdk }}
- run: ./scripts/run-bdd-tests.sh ${{ matrix.sdk }}
-
- - name: Clean up Docker resources
- if: always()
- run: ./scripts/run-bdd-tests.sh clean
diff --git a/.github/workflows/ci-test-rust-optional.yml b/.github/workflows/ci-test-rust-optional.yml
deleted file mode 100644
index cff1e5e1e..000000000
--- a/.github/workflows/ci-test-rust-optional.yml
+++ /dev/null
@@ -1,64 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# -------------------------------------------------------------
-#
-# CI Test Rust Optional Workflow
-#
-# This workflow runs tests for Rust code on aarch64-apple-darwin target and generates coverage report.
-# This workflow is optional and can be triggered manually or by other workflows.
-#
-name: ci-test-rust-optional
-
-on:
- workflow_dispatch:
- workflow_call:
-
-env:
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
-
-jobs:
- ci-coverage-rust:
- name: ci-coverage-rust
- uses: ./.github/workflows/ci-coverage-rust.yml
-
- aarch64-apple-darwin:
- name: aarch64-apple-darwin
- runs-on: macos-latest
- timeout-minutes: 30
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "aarch64-apple-darwin"
- - name: Prepare aarch64-apple-darwin toolchain
- run: |
- rustup target add aarch64-apple-darwin
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
- - name: Set verbose flag
- shell: bash
- run: echo "VERBOSE_FLAG=$([[ "${RUNNER_DEBUG}" = "1" ]] && echo "--verbose" || echo "")" >> $GITHUB_ENV
- - name: Build binary
- run: cargo build ${{ env.VERBOSE_FLAG }} --target aarch64-apple-darwin
- - name: Run tests
- run: cargo test ${{ env.VERBOSE_FLAG }} --target aarch64-apple-darwin
- - name: Check if workspace is clean
- run: git status --porcelain
diff --git a/.github/workflows/ci-test-rust.yml b/.github/workflows/ci-test-rust.yml
deleted file mode 100644
index 39ba91a23..000000000
--- a/.github/workflows/ci-test-rust.yml
+++ /dev/null
@@ -1,105 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# -------------------------------------------------------------
-#
-# CI Test Rust Workflow
-#
-# This workflow runs tests for Rust code.
-# All tests are run on multiple targets:
-# - x86_64-unknown-linux-musl
-# - x86_64-unknown-linux-gnu
-# - x86_64-pc-windows-msvc
-#
-# This workflow can be triggered manually or by other workflows.
-#
-name: ci-test-rust
-
-on:
- workflow_dispatch:
- workflow_call:
-
-env:
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
-
-jobs:
- x86_64-unknown-linux:
- name: ${{ matrix.target }}
- runs-on: ubuntu-latest
- timeout-minutes: 30
- strategy:
- matrix:
- target: [x86_64-unknown-linux-musl, x86_64-unknown-linux-gnu]
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "${{ matrix.target }}"
- - name: Install musl-tools, gnome-keyring and keyutils
- run: |
- sudo apt-get update --yes && sudo apt-get install --yes musl-tools gnome-keyring keyutils
- rm -f $HOME/.local/share/keyrings/*
- echo -n "test" | gnome-keyring-daemon --unlock
- - name: Prepare ${{ matrix.target }} toolchain
- run: |
- rustup target add ${{ matrix.target }}
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
- - name: Set verbose flag
- shell: bash
- run: echo "VERBOSE_FLAG=$([[ "${RUNNER_DEBUG}" = "1" ]] && echo "--verbose" || echo "")" >> $GITHUB_ENV
- - name: Build binary ${{ matrix.target }}
- run: cargo build ${{ env.VERBOSE_FLAG }} --target ${{ matrix.target }}
- - name: Run tests ${{ matrix.target }}
- run: cargo test ${{ env.VERBOSE_FLAG }} --target ${{ matrix.target }}
- - name: Check CLI examples from README
- run: ./scripts/run-rust-examples-from-readme.sh ${{ matrix.target }}
- - name: Check if workspace is clean
- run: git status --porcelain
-
- x86_64-pc-windows-msvc:
- name: x86_64-pc-windows-msvc
- runs-on: windows-latest
- timeout-minutes: 30
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "x86_64-pc-windows-msvc"
- - name: Prepare x86_64-pc-windows-msvc toolchain
- run: |
- rustup target add x86_64-pc-windows-msvc
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
- - name: Set verbose flag
- shell: pwsh
- run: |
- if ($env:RUNNER_DEBUG -eq "1") {
- echo "VERBOSE_FLAG=--verbose" >> $env:GITHUB_ENV
- } else {
- echo "VERBOSE_FLAG=" >> $env:GITHUB_ENV
- }
- - name: Build iggy package
- run: cargo build ${{ env.VERBOSE_FLAG }} --target x86_64-pc-windows-msvc -p iggy
- - name: Build iggy-cli binary
- run: cargo build ${{ env.VERBOSE_FLAG }} --target x86_64-pc-windows-msvc --bin iggy
- - name: Check if workspace is clean
- run: git status --porcelain
diff --git a/.github/workflows/post-merge.yml b/.github/workflows/post-merge.yml
new file mode 100644
index 000000000..03a83295e
--- /dev/null
+++ b/.github/workflows/post-merge.yml
@@ -0,0 +1,96 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: Post-merge
+
+on:
+ push:
+ branches: [master]
+
+permissions:
+ contents: read
+ packages: write
+ id-token: write
+
+concurrency:
+ group: post-merge-${{ github.ref }}
+ cancel-in-progress: false
+
+env:
+ IGGY_CI_BUILD: true
+
+jobs:
+ plan:
+ name: Plan dockerhub components
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.mk.outputs.matrix }}
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Load publish config (base64)
+ id: cfg
+ shell: bash
+ run: |
+ if ! command -v yq >/dev/null 2>&1; then
+ YQ_VERSION="v4.47.1"
+ YQ_CHECKSUM="0fb28c6680193c41b364193d0c0fc4a03177aecde51cfc04d506b1517158c2fb"
+ curl -sSL -o /usr/local/bin/yq https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/yq_linux_amd64
+ echo "${YQ_CHECKSUM} /usr/local/bin/yq" | sha256sum -c - || exit 1
+ chmod +x /usr/local/bin/yq
+ fi
+ echo "components_b64=$(yq -o=json -I=0 '.components' .github/config/publish.yml | base64 -w0)" >> "$GITHUB_OUTPUT"
+
+ - name: Build matrix
+ id: mk
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const b64 = `${{ steps.cfg.outputs.components_b64 }}` || '';
+ if (!b64) {
+ core.setOutput('matrix', JSON.stringify({ include: [{ component: 'noop' }] }));
+ return;
+ }
+ const comps = JSON.parse(Buffer.from(b64, 'base64').toString('utf8'));
+ const include = Object.entries(comps)
+ .filter(([_, v]) => v && v.registry === 'dockerhub')
+ .map(([k]) => ({ component: k }));
+ const uniq = Array.from(new Map(include.map(i => [i.component, i])).values());
+ core.setOutput('matrix', JSON.stringify(uniq.length ? { include: uniq } : { include: [{ component: 'noop' }] }));
+
+ docker-edge:
+ name: ${{ matrix.component }}
+ needs: plan
+ if: ${{ fromJson(needs.plan.outputs.matrix).include[0].component != 'noop' }}
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.plan.outputs.matrix) }}
+ env:
+ DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
+ steps:
+ - uses: actions/checkout@v4
+
+ - uses: ./.github/actions/utils/docker-buildx
+ with:
+ task: publish
+ libc: musl
+ component: ${{ matrix.component }}
+ version: edge
+ dry_run: ${{ github.event.repository.fork }} # forks: always dry-run
diff --git a/.github/workflows/post_publish_server.yml b/.github/workflows/post_publish_server.yml
deleted file mode 100644
index d82ac7b0f..000000000
--- a/.github/workflows/post_publish_server.yml
+++ /dev/null
@@ -1,105 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: post_publish_server
-on:
- workflow_dispatch:
- workflow_run:
- workflows: [ "publish_server" ]
- types:
- - completed
-
-jobs:
- post_publish_server:
- runs-on: ubuntu-latest
- if: ${{ github.event.workflow_run.conclusion == 'success' || github.event_name == 'workflow_dispatch'}}
- steps:
- - uses: actions/checkout@v4
-
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "post-publish-server"
-
- - name: Build binary
- run: cargo build
-
- - uses: addnab/docker-run-action@v3
- name: Spin up Docker Container
- with:
- image: apache/iggy:latest
- options: -d -p 8090:8090
- run: /iggy/iggy-server
-
- - name: Wait till iggy-server has bound to TCP 8090 port
- timeout-minutes: 1
- run: |
- while ! nc -z 127.0.0.1 8090; do
- sleep 1
- done
- sleep 1
- ss -tuln | grep :8090
-
- - name: Test Benchmark - Producer
- timeout-minutes: 1
- run: |
- ./target/debug/iggy-bench --skip-server-start --message-batches 100 --messages-per-batch 100 pinned-producer tcp --server-address 127.0.0.1:8090
-
- - name: Test Benchmark - Consumer
- timeout-minutes: 1
- run: |
- ./target/debug/iggy-bench --skip-server-start --message-batches 100 --messages-per-batch 100 pinned-consumer tcp --server-address 127.0.0.1:8090
-
- - name: Test Benchmark - Producer and Consumer
- timeout-minutes: 1
- run: |
- ./target/debug/iggy-bench --skip-server-start --message-batches 100 --messages-per-batch 100 pinned-producer-and-consumer tcp --server-address 127.0.0.1:8090
-
- - name: Check if number of messages is correct
- timeout-minutes: 1
- run: |
- STATS=$(./target/debug/iggy -u iggy -p iggy stats)
- echo "$STATS"
- MESSAGE_COUNT=$(./target/debug/iggy -u iggy -p iggy -q stats -o json | jq '.messages_count')
- readonly EXPECTED_MESSAGE_COUNT=160000
- if [ "$MESSAGE_COUNT" -ne "$EXPECTED_MESSAGE_COUNT" ]; then
- echo "Expected message count to be $EXPECTED_MESSAGE_COUNT, but got $MESSAGE_COUNT"
- exit 1
- fi
-
- - name: Clean up
- run: docker rm -f iggy_container
-
- finalize_post_publish_server:
- runs-on: ubuntu-latest
- needs: [ post_publish_server ]
- if: always()
- steps:
- - uses: actions/checkout@v4
-
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
-
- - name: Something went wrong
- if: ${{ contains(needs.*.result, 'failure') && github.event_name == 'workflow_run' }}
- uses: JasonEtco/create-an-issue@v2
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- GITHUB_BOT_CONTEXT_STRING: "post release docker container test"
- with:
- filename: .github/BOT_ISSUE_TEMPLATE.md
diff --git a/.github/workflows/pre-merge.yml b/.github/workflows/pre-merge.yml
new file mode 100644
index 000000000..5364afe6f
--- /dev/null
+++ b/.github/workflows/pre-merge.yml
@@ -0,0 +1,426 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: Pre-merge
+on:
+ pull_request:
+ branches: [master]
+ # TODO(hubcio): remove push, no need to do it
+ push:
+ branches: [master]
+ workflow_dispatch:
+
+env:
+ IGGY_CI_BUILD: true
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }}
+
+permissions:
+ contents: read
+ security-events: write
+ pull-requests: read
+
+jobs:
+ # Common checks - always run
+ common:
+ name: Common checks
+ uses: ./.github/workflows/_common.yml
+ permissions:
+ contents: read
+ pull-requests: read
+
+ # Detect changes and build matrices
+ detect:
+ name: Detect changes
+ uses: ./.github/workflows/_detect.yml
+
+ # Rust components
+ test-rust:
+ name: Rust β’ ${{ matrix.component }}/${{ matrix.task }}
+ needs: detect
+ if: ${{ fromJson(needs.detect.outputs.rust_matrix).include[0].component != 'noop' }}
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.detect.outputs.rust_matrix) }}
+ uses: ./.github/workflows/_test.yml
+ with:
+ component: ${{ matrix.component }}
+ task: ${{ matrix.task }}
+
+ # Python SDK
+ test-python:
+ name: Python β’ ${{ matrix.task }}
+ needs: detect
+ if: ${{ fromJson(needs.detect.outputs.python_matrix).include[0].component != 'noop' }}
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.detect.outputs.python_matrix) }}
+ uses: ./.github/workflows/_test.yml
+ with:
+ component: ${{ matrix.component }}
+ task: ${{ matrix.task }}
+
+ # Node SDK
+ test-node:
+ name: Node β’ ${{ matrix.task }}
+ needs: detect
+ if: ${{ fromJson(needs.detect.outputs.node_matrix).include[0].component != 'noop' }}
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.detect.outputs.node_matrix) }}
+ uses: ./.github/workflows/_test.yml
+ with:
+ component: ${{ matrix.component }}
+ task: ${{ matrix.task }}
+
+ # Go SDK
+ test-go:
+ name: Go β’ ${{ matrix.task }}
+ needs: detect
+ if: ${{ fromJson(needs.detect.outputs.go_matrix).include[0].component != 'noop' }}
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.detect.outputs.go_matrix) }}
+ uses: ./.github/workflows/_test.yml
+ with:
+ component: ${{ matrix.component }}
+ task: ${{ matrix.task }}
+
+ # Java SDK
+ test-java:
+ name: Java β’ ${{ matrix.task }}
+ needs: detect
+ if: ${{ fromJson(needs.detect.outputs.java_matrix).include[0].component != 'noop' }}
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.detect.outputs.java_matrix) }}
+ uses: ./.github/workflows/_test.yml
+ with:
+ component: ${{ matrix.component }}
+ task: ${{ matrix.task }}
+
+ # C# SDK
+ test-csharp:
+ name: C# β’ ${{ matrix.task }}
+ needs: detect
+ if: ${{ fromJson(needs.detect.outputs.csharp_matrix).include[0].component != 'noop' }}
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.detect.outputs.csharp_matrix) }}
+ uses: ./.github/workflows/_test.yml
+ with:
+ component: ${{ matrix.component }}
+ task: ${{ matrix.task }}
+
+ # Other components
+ test-other:
+ name: Other β’ ${{ matrix.component }}/${{ matrix.task }}
+ needs: detect
+ if: ${{ fromJson(needs.detect.outputs.other_matrix).include[0].component != 'noop' }}
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.detect.outputs.other_matrix) }}
+ uses: ./.github/workflows/_test.yml
+ with:
+ component: ${{ matrix.component }}
+ task: ${{ matrix.task }}
+
+ # BDD Tests
+ test-bdd:
+ name: BDD β’ ${{ matrix.component }}/${{ matrix.task }}
+ needs: detect
+ if: ${{ fromJson(needs.detect.outputs.bdd_matrix).include[0].component != 'noop' }}
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.detect.outputs.bdd_matrix) }}
+ uses: ./.github/workflows/_test_bdd.yml
+ with:
+ component: ${{ matrix.component }}
+ task: ${{ matrix.task }}
+
+ # Examples Tests
+ test-examples:
+ name: Examples β’ ${{ matrix.component }}/${{ matrix.task }}
+ needs: detect
+ if: ${{ fromJson(needs.detect.outputs.examples_matrix).include[0].component != 'noop' }}
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.detect.outputs.examples_matrix) }}
+ uses: ./.github/workflows/_test_examples.yml
+ with:
+ component: ${{ matrix.component }}
+ task: ${{ matrix.task }}
+
+ # Final status check
+ status:
+ name: CI Status
+ runs-on: ubuntu-latest
+ needs: [common, detect, test-rust, test-python, test-node, test-go, test-java, test-csharp, test-bdd, test-examples, test-other]
+ if: always()
+ steps:
+ - name: Get job execution times
+ id: times
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const jobs = await github.rest.actions.listJobsForWorkflowRun({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ run_id: context.runId
+ });
+
+ const jobTimes = {};
+ const jobStatuses = {};
+
+ const formatDuration = (ms) => {
+ const seconds = Math.floor(ms / 1000);
+ const minutes = Math.floor(seconds / 60);
+ const hours = Math.floor(minutes / 60);
+ if (hours > 0) {
+ return `${hours}h ${minutes % 60}m ${seconds % 60}s`;
+ } else if (minutes > 0) {
+ return `${minutes}m ${seconds % 60}s`;
+ } else if (seconds > 0) {
+ return `${seconds}s`;
+ } else {
+ return '< 1s';
+ }
+ };
+
+ // Log job names for debugging
+ console.log('Job names found:');
+ for (const job of jobs.data.jobs) {
+ console.log(` - ${job.name}: ${job.status} (conclusion: ${job.conclusion || 'N/A'})`);
+ jobStatuses[job.name] = job.conclusion || job.status;
+
+ // Only show duration for jobs that actually ran
+ if (job.started_at && job.completed_at) {
+ const start = new Date(job.started_at);
+ const end = new Date(job.completed_at);
+ const duration = end - start;
+ jobTimes[job.name] = formatDuration(duration);
+ } else if (job.started_at && !job.completed_at) {
+ // Job is still running
+ const start = new Date(job.started_at);
+ const duration = Date.now() - start;
+ jobTimes[job.name] = formatDuration(duration) + ' β³';
+ } else {
+ // Job was skipped or hasn't started
+ jobTimes[job.name] = null;
+ }
+ }
+
+ // Helper to find job info for a component
+ const findJobInfo = (prefix) => {
+ for (const [name, status] of Object.entries(jobStatuses)) {
+ if (name.startsWith(prefix)) {
+ return {
+ time: jobTimes[name],
+ status: status
+ };
+ }
+ }
+ return { time: null, status: 'skipped' };
+ };
+
+ // Format duration based on job status
+ const formatJobDuration = (info) => {
+ if (info.status === 'skipped') return '-';
+ if (info.time === null) return '-';
+ return info.time;
+ };
+
+ // Set outputs for each component
+ const rust = findJobInfo('Rust β’');
+ const python = findJobInfo('Python β’');
+ const node = findJobInfo('Node β’');
+ const go = findJobInfo('Go β’');
+ const java = findJobInfo('Java β’');
+ const csharp = findJobInfo('C# β’');
+ const bdd = findJobInfo('BDD β’');
+ const examples = findJobInfo('Examples β’');
+ const other = findJobInfo('Other β’');
+
+ // For non-matrix jobs, check by exact name
+ const common = jobStatuses['Common checks']
+ ? { time: jobTimes['Common checks'], status: jobStatuses['Common checks'] }
+ : { time: null, status: 'skipped' };
+ const detect = jobStatuses['Detect changes']
+ ? { time: jobTimes['Detect changes'], status: jobStatuses['Detect changes'] }
+ : { time: null, status: 'skipped' };
+
+ // Output formatted durations
+ core.setOutput('rust_time', formatJobDuration(rust));
+ core.setOutput('python_time', formatJobDuration(python));
+ core.setOutput('node_time', formatJobDuration(node));
+ core.setOutput('go_time', formatJobDuration(go));
+ core.setOutput('java_time', formatJobDuration(java));
+ core.setOutput('csharp_time', formatJobDuration(csharp));
+ core.setOutput('bdd_time', formatJobDuration(bdd));
+ core.setOutput('examples_time', formatJobDuration(examples));
+ core.setOutput('other_time', formatJobDuration(other));
+ core.setOutput('common_time', formatJobDuration(common));
+ core.setOutput('detect_time', formatJobDuration(detect));
+
+ // Calculate total time - find the earliest job start time
+ let earliestStart = null;
+ let latestEnd = null;
+ for (const job of jobs.data.jobs) {
+ if (job.started_at) {
+ const start = new Date(job.started_at);
+ if (!earliestStart || start < earliestStart) {
+ earliestStart = start;
+ }
+ }
+ if (job.completed_at) {
+ const end = new Date(job.completed_at);
+ if (!latestEnd || end > latestEnd) {
+ latestEnd = end;
+ }
+ }
+ }
+
+ if (earliestStart && latestEnd) {
+ const totalDuration = latestEnd - earliestStart;
+ core.setOutput('total_time', formatDuration(totalDuration));
+ } else if (earliestStart) {
+ const totalDuration = Date.now() - earliestStart;
+ core.setOutput('total_time', formatDuration(totalDuration) + ' (running)');
+ } else {
+ core.setOutput('total_time', '-');
+ }
+
+ return jobTimes;
+
+ - name: Check status
+ run: |
+ set -euxo pipefail
+ echo "## CI Summary" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+
+ # Helper function to format status with appropriate emoji
+ format_status() {
+ local status="$1"
+ local duration="$2"
+
+ case "$status" in
+ "success")
+ if [[ "$duration" == "-" ]]; then
+ echo "β
success"
+ else
+ echo "β
success"
+ fi
+ ;;
+ "failure")
+ echo "β failure"
+ ;;
+ "cancelled")
+ echo "π« cancelled"
+ ;;
+ "skipped")
+ echo "βοΈ skipped"
+ ;;
+ *)
+ echo "βΈοΈ $status"
+ ;;
+ esac
+ }
+
+ # Language test results with timing
+ echo "### Test Results" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "| Component | Status | Duration |" >> $GITHUB_STEP_SUMMARY
+ echo "|-----------|--------|----------|" >> $GITHUB_STEP_SUMMARY
+
+ # Detection and Common checks always run
+ detect_status=$(format_status "${{ needs.detect.result }}" "${{ steps.times.outputs.detect_time }}")
+ common_status=$(format_status "${{ needs.common.result }}" "${{ steps.times.outputs.common_time }}")
+ echo "| π Detection | $detect_status | ${{ steps.times.outputs.detect_time }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| π Common Checks | $common_status | ${{ steps.times.outputs.common_time }} |" >> $GITHUB_STEP_SUMMARY
+
+ # Language/component tests
+ rust_status=$(format_status "${{ needs.test-rust.result }}" "${{ steps.times.outputs.rust_time }}")
+ python_status=$(format_status "${{ needs.test-python.result }}" "${{ steps.times.outputs.python_time }}")
+ node_status=$(format_status "${{ needs.test-node.result }}" "${{ steps.times.outputs.node_time }}")
+ go_status=$(format_status "${{ needs.test-go.result }}" "${{ steps.times.outputs.go_time }}")
+ java_status=$(format_status "${{ needs.test-java.result }}" "${{ steps.times.outputs.java_time }}")
+ csharp_status=$(format_status "${{ needs.test-csharp.result }}" "${{ steps.times.outputs.csharp_time }}")
+ bdd_status=$(format_status "${{ needs.test-bdd.result }}" "${{ steps.times.outputs.bdd_time }}")
+ examples_status=$(format_status "${{ needs.test-examples.result }}" "${{ steps.times.outputs.examples_time }}")
+ other_status=$(format_status "${{ needs.test-other.result }}" "${{ steps.times.outputs.other_time }}")
+
+ echo "| π¦ Rust | $rust_status | ${{ steps.times.outputs.rust_time }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| π Python | $python_status | ${{ steps.times.outputs.python_time }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| π’ Node | $node_status | ${{ steps.times.outputs.node_time }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| πΉ Go | $go_status | ${{ steps.times.outputs.go_time }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| β Java | $java_status | ${{ steps.times.outputs.java_time }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| π· C# | $csharp_status | ${{ steps.times.outputs.csharp_time }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| π§ͺ BDD | $bdd_status | ${{ steps.times.outputs.bdd_time }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| π Examples | $examples_status | ${{ steps.times.outputs.examples_time }} |" >> $GITHUB_STEP_SUMMARY
+ echo "| π¦ Other | $other_status | ${{ steps.times.outputs.other_time }} |" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "**Total workflow time:** ${{ steps.times.outputs.total_time }}" >> $GITHUB_STEP_SUMMARY
+
+ # Check for failures
+ if [[ "${{ needs.common.result }}" == "failure" ]] || \
+ [[ "${{ needs.detect.result }}" == "failure" ]] || \
+ [[ "${{ needs.test-rust.result }}" == "failure" ]] || \
+ [[ "${{ needs.test-python.result }}" == "failure" ]] || \
+ [[ "${{ needs.test-node.result }}" == "failure" ]] || \
+ [[ "${{ needs.test-go.result }}" == "failure" ]] || \
+ [[ "${{ needs.test-java.result }}" == "failure" ]] || \
+ [[ "${{ needs.test-csharp.result }}" == "failure" ]] || \
+ [[ "${{ needs.test-bdd.result }}" == "failure" ]] || \
+ [[ "${{ needs.test-examples.result }}" == "failure" ]] || \
+ [[ "${{ needs.test-other.result }}" == "failure" ]]; then
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "β **CI Failed** - Please check the logs for details." >> $GITHUB_STEP_SUMMARY
+ exit 1
+ elif [[ "${{ needs.common.result }}" == "cancelled" ]] || \
+ [[ "${{ needs.detect.result }}" == "cancelled" ]] || \
+ [[ "${{ needs.test-rust.result }}" == "cancelled" ]] || \
+ [[ "${{ needs.test-python.result }}" == "cancelled" ]] || \
+ [[ "${{ needs.test-node.result }}" == "cancelled" ]] || \
+ [[ "${{ needs.test-go.result }}" == "cancelled" ]] || \
+ [[ "${{ needs.test-java.result }}" == "cancelled" ]] || \
+ [[ "${{ needs.test-csharp.result }}" == "cancelled" ]] || \
+ [[ "${{ needs.test-bdd.result }}" == "cancelled" ]] || \
+ [[ "${{ needs.test-examples.result }}" == "cancelled" ]] || \
+ [[ "${{ needs.test-other.result }}" == "cancelled" ]]; then
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "β οΈ **CI Cancelled** - The workflow was cancelled." >> $GITHUB_STEP_SUMMARY
+ exit 1
+ else
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "β
**CI Passed** - All checks completed successfully!" >> $GITHUB_STEP_SUMMARY
+ fi
+
+ finalize_pr:
+ runs-on: ubuntu-latest
+ needs:
+ - status
+ if: always()
+ steps:
+ - name: Everything is fine
+ if: ${{ !(contains(needs.*.result, 'failure')) }}
+ run: exit 0
+
+ - name: Some tests failed
+ if: ${{ contains(needs.*.result, 'failure') }}
+ run: exit 1
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
new file mode 100644
index 000000000..162d48c73
--- /dev/null
+++ b/.github/workflows/publish.yml
@@ -0,0 +1,947 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: Publish
+
+on:
+ workflow_dispatch:
+ inputs:
+ dry_run:
+ description: "Dry run (build/test only, no actual publish)"
+ type: boolean
+ default: true
+ use_latest_ci:
+ description: "Use latest CI configuration and scripts from master branch (recommended for compatibility)"
+ type: boolean
+ required: false
+ default: true
+ commit:
+ description: "Commit SHA to publish from"
+ type: string
+ required: true
+ publish_crates:
+ description: "Rust crates to publish (comma-separated: rust-sdk, rust-cli, rust-binary-protocol, rust-common)"
+ type: string
+ required: false
+ default: ""
+ publish_dockerhub:
+ description: "Docker images to publish (comma-separated: rust-server, rust-mcp, rust-bench-dashboard, rust-connectors, web-ui)"
+ type: string
+ required: false
+ default: ""
+ publish_other:
+ description: "Other SDKs to publish (comma-separated: python, node, java, csharp, go:VERSION)"
+ type: string
+ required: false
+ default: ""
+
+env:
+ IGGY_CI_BUILD: true
+
+permissions:
+ contents: write # For tag creation
+ packages: write
+ id-token: write
+
+concurrency:
+ group: publish-${{ github.run_id }}
+ cancel-in-progress: false
+
+jobs:
+ validate:
+ name: Validate inputs
+ runs-on: ubuntu-latest
+ outputs:
+ commit: ${{ steps.resolve.outputs.commit }}
+ has_targets: ${{ steps.check.outputs.has_targets }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Check if any targets specified
+ id: check
+ run: |
+ if [ -z "${{ inputs.publish_crates }}" ] && \
+ [ -z "${{ inputs.publish_dockerhub }}" ] && \
+ [ -z "${{ inputs.publish_other }}" ]; then
+ echo "has_targets=false" >> "$GITHUB_OUTPUT"
+ else
+ echo "has_targets=true" >> "$GITHUB_OUTPUT"
+ fi
+
+ - name: Resolve commit
+ id: resolve
+ run: |
+ COMMIT="${{ inputs.commit }}"
+ if [ -z "$COMMIT" ]; then
+ echo "β No commit specified"
+ exit 1
+ fi
+
+ if ! git rev-parse --verify "$COMMIT^{commit}" >/dev/null 2>&1; then
+ echo "β Invalid commit: $COMMIT"
+ exit 1
+ fi
+
+ # Verify commit is on master branch
+ echo "π Verifying commit is on master branch..."
+ git fetch origin master --depth=1000
+
+ if git merge-base --is-ancestor "$COMMIT" origin/master; then
+ echo "β
Commit is on master branch"
+ else
+ echo "β ERROR: Commit $COMMIT is not on the master branch!"
+ echo ""
+ echo "Publishing is only allowed from commits on the master branch."
+ echo "Please ensure your commit has been merged to master before publishing."
+ echo ""
+ echo "To check which branch contains this commit, run:"
+ echo " git branch -r --contains $COMMIT"
+ exit 1
+ fi
+
+ echo "commit=$COMMIT" >> "$GITHUB_OUTPUT"
+ echo "β
Will publish from commit: $COMMIT"
+ echo
+ echo "Commit details:"
+ git log -1 --pretty=format:" Author: %an <%ae>%n Date: %ad%n Subject: %s" "$COMMIT"
+
+ plan:
+ name: Build publish plan
+ needs: validate
+ if: needs.validate.outputs.has_targets == 'true'
+ runs-on: ubuntu-latest
+ outputs:
+ targets: ${{ steps.mk.outputs.targets }}
+ non_rust_targets: ${{ steps.mk.outputs.non_rust_targets }}
+ count: ${{ steps.mk.outputs.count }}
+ go_sdk_version: ${{ steps.mk.outputs.go_sdk_version }}
+ has_python: ${{ steps.mk.outputs.has_python }}
+ has_rust_crates: ${{ steps.mk.outputs.has_rust_crates }}
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ needs.validate.outputs.commit }}
+
+ - name: Load publish config
+ id: cfg
+ run: |
+ if ! command -v yq &> /dev/null; then
+ YQ_VERSION="v4.47.1"
+ YQ_CHECKSUM="0fb28c6680193c41b364193d0c0fc4a03177aecde51cfc04d506b1517158c2fb"
+ wget -qO /tmp/yq https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/yq_linux_amd64
+ echo "${YQ_CHECKSUM} /tmp/yq" | sha256sum -c - || exit 1
+ chmod +x /tmp/yq && sudo mv /tmp/yq /usr/local/bin/yq
+ fi
+ echo "components_b64=$(yq -o=json -I=0 '.components' .github/config/publish.yml | base64 -w0)" >> "$GITHUB_OUTPUT"
+
+ - name: Build matrix from inputs
+ id: mk
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const componentsB64 = '${{ steps.cfg.outputs.components_b64 }}';
+ const cfg = JSON.parse(Buffer.from(componentsB64, 'base64').toString('utf-8') || "{}");
+
+ const wants = [];
+ let goVersion = '';
+
+ // Parse Rust crates
+ ('${{ inputs.publish_crates }}').split(',').map(s => s.trim()).filter(Boolean).forEach(crate => {
+ if (['rust-sdk','rust-cli','rust-binary-protocol','rust-common'].includes(crate)) wants.push(crate);
+ else core.warning(`Unknown crate: ${crate}`);
+ });
+
+ // Parse Docker images
+ ('${{ inputs.publish_dockerhub }}').split(',').map(s => s.trim()).filter(Boolean).forEach(img => {
+ if (['rust-server','rust-mcp','rust-bench-dashboard','rust-connectors','web-ui'].includes(img)) wants.push(img);
+ else core.warning(`Unknown Docker image: ${img}`);
+ });
+
+ // Parse other SDKs
+ ('${{ inputs.publish_other }}').split(',').map(s => s.trim()).filter(Boolean).forEach(sdk => {
+ if (sdk.startsWith('go:')) {
+ goVersion = sdk.substring(3);
+ if (!/^\d+\.\d+\.\d+/.test(goVersion)) {
+ core.setFailed(`Invalid Go version format: ${goVersion} (expected: X.Y.Z)`);
+ } else {
+ wants.push('sdk-go');
+ }
+ } else if (['python','node','java','csharp'].includes(sdk)) {
+ wants.push(`sdk-${sdk}`);
+ } else {
+ core.warning(`Unknown SDK: ${sdk}`);
+ }
+ });
+
+ const toType = (entry) => ({
+ dockerhub: 'docker',
+ crates: 'rust',
+ pypi: 'python',
+ npm: 'node',
+ maven: 'java',
+ nuget: 'csharp',
+ none: 'go'
+ }[entry.registry] || 'unknown');
+
+ const targets = [];
+ const nonRustTargets = [];
+ const seen = new Set();
+ let hasRustCrates = false;
+
+ for (const key of wants) {
+ if (seen.has(key)) continue;
+ seen.add(key);
+
+ const entry = cfg[key];
+ if (!entry) { core.warning(`Component '${key}' not found in publish.yml`); continue; }
+
+ const target = {
+ key,
+ name: key,
+ type: toType(entry),
+ registry: entry.registry || '',
+ package: entry.package || '',
+ image: entry.image || '',
+ dockerfile: entry.dockerfile || '',
+ platforms: Array.isArray(entry.platforms) ? entry.platforms.join(',') : '',
+ tag_pattern: entry.tag_pattern || '',
+ version_file: entry.version_file || '',
+ version_regex: entry.version_regex || ''
+ };
+
+ targets.push(target);
+
+ // Separate Rust crates from other targets
+ if (target.type === 'rust') {
+ hasRustCrates = true;
+ // Rust crates are handled by the sequential job
+ } else {
+ nonRustTargets.push(target);
+ }
+ }
+
+ console.log(`Publishing ${targets.length} components:`);
+ targets.forEach(t => console.log(` - ${t.name} (${t.type}) -> ${t.registry || 'N/A'}`));
+ console.log(` (${nonRustTargets.length} non-Rust, ${targets.length - nonRustTargets.length} Rust crates)`);
+
+ // Output all targets for reference and tag creation
+ core.setOutput('targets', JSON.stringify(targets.length ? { include: targets } : { include: [{ key: 'noop', type: 'noop' }] }));
+
+ // Output only non-Rust targets for the parallel publish job
+ core.setOutput('non_rust_targets', JSON.stringify(nonRustTargets.length ? { include: nonRustTargets } : { include: [{ key: 'noop', type: 'noop' }] }));
+
+ core.setOutput('count', String(targets.length));
+ core.setOutput('go_sdk_version', goVersion);
+ core.setOutput('has_rust_crates', String(hasRustCrates));
+
+ // Check if Python SDK is in targets and extract version
+ const pythonTarget = targets.find(t => t.key === 'sdk-python');
+ if (pythonTarget) {
+ core.setOutput('has_python', 'true');
+ // Python version will be extracted in the publish job
+ } else {
+ core.setOutput('has_python', 'false');
+ }
+
+ check-tags:
+ name: Check existing tags
+ needs: [validate, plan]
+ if: needs.validate.outputs.has_targets == 'true' && fromJson(needs.plan.outputs.targets).include[0].key != 'noop'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Save latest CI and scripts from master
+ if: inputs.use_latest_ci
+ run: |
+ # First checkout master to get latest CI and scripts
+ git clone --depth 1 --branch master https://github.com/${{ github.repository }}.git /tmp/repo-master
+ cp -r /tmp/repo-master/scripts /tmp/latest-scripts
+ cp -r /tmp/repo-master/.github /tmp/latest-github
+ echo "β
Saved latest CI and scripts from master branch"
+ echo "Scripts:"
+ ls -la /tmp/latest-scripts/
+ echo ".github:"
+ ls -la /tmp/latest-github/
+
+ - name: Checkout at commit
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ needs.validate.outputs.commit }}
+ fetch-depth: 0
+
+ - name: Apply latest CI and scripts
+ if: inputs.use_latest_ci
+ run: |
+ echo "π Applying latest CI and scripts from master branch..."
+ cp -r /tmp/latest-scripts/* scripts/
+ cp -r /tmp/latest-github/* .github/
+ chmod +x scripts/*.sh
+ echo "β
Latest CI and scripts applied"
+
+ - name: Setup yq
+ run: |
+ YQ_VERSION="v4.47.1"
+ YQ_CHECKSUM="0fb28c6680193c41b364193d0c0fc4a03177aecde51cfc04d506b1517158c2fb"
+ sudo wget -qO /usr/local/bin/yq https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/yq_linux_amd64
+ echo "${YQ_CHECKSUM} /usr/local/bin/yq" | sha256sum -c - || exit 1
+ sudo chmod +x /usr/local/bin/yq
+
+ - name: Check for existing tags
+ run: |
+ set -euo pipefail
+
+ echo "## π·οΈ Tag Existence Check" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+
+ TARGETS_JSON='${{ needs.plan.outputs.targets }}'
+ GO_SDK_VERSION='${{ needs.plan.outputs.go_sdk_version }}'
+
+ EXISTING_TAGS=()
+ NEW_TAGS=()
+
+ echo "| Component | Version | Tag | Status |" >> $GITHUB_STEP_SUMMARY
+ echo "|-----------|---------|-----|--------|" >> $GITHUB_STEP_SUMMARY
+
+ echo "$TARGETS_JSON" | jq -r '.include[] | select(.key!="noop") | @base64' | while read -r row; do
+ _jq() { echo "$row" | base64 -d | jq -r "$1"; }
+
+ KEY=$(_jq '.key')
+ NAME=$(_jq '.name')
+ TAG_PATTERN=$(_jq '.tag_pattern')
+
+ # Skip components without tag patterns
+ if [ -z "$TAG_PATTERN" ] || [ "$TAG_PATTERN" = "null" ]; then
+ echo "Skipping $NAME - no tag pattern defined"
+ continue
+ fi
+
+ # Extract version
+ GO_FLAG=""
+ if [ "$KEY" = "sdk-go" ] && [ -n "$GO_SDK_VERSION" ]; then
+ GO_FLAG="--go-sdk-version $GO_SDK_VERSION"
+ fi
+
+ # Make script executable if needed
+ chmod +x scripts/extract-version.sh || true
+
+ VERSION=$(scripts/extract-version.sh "$KEY" $GO_FLAG 2>/dev/null || echo "ERROR")
+ TAG=$(scripts/extract-version.sh "$KEY" $GO_FLAG --tag 2>/dev/null || echo "ERROR")
+
+ if [ "$VERSION" = "ERROR" ] || [ "$TAG" = "ERROR" ]; then
+ echo "β Failed to extract version/tag for $NAME"
+ echo "| $NAME | ERROR | ERROR | β Failed to extract |" >> $GITHUB_STEP_SUMMARY
+ exit 1
+ fi
+
+ # Check if tag exists
+ if git rev-parse "$TAG" >/dev/null 2>&1; then
+ EXISTING_TAGS+=("$TAG")
+ COMMIT_SHA=$(git rev-parse "$TAG" | head -c 8)
+ echo "β οΈ Tag exists: $TAG (points to $COMMIT_SHA)"
+ echo "| $NAME | $VERSION | $TAG | β οΈ Exists at $COMMIT_SHA |" >> $GITHUB_STEP_SUMMARY
+ else
+ NEW_TAGS+=("$TAG")
+ echo "β
Tag will be created: $TAG"
+ echo "| $NAME | $VERSION | $TAG | β
Will create |" >> $GITHUB_STEP_SUMMARY
+ fi
+ done
+
+ echo "" >> $GITHUB_STEP_SUMMARY
+
+ # Summary
+ if [ ${#EXISTING_TAGS[@]} -gt 0 ]; then
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "### β οΈ Warning: Existing Tags Detected" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "The following tags already exist and will be skipped:" >> $GITHUB_STEP_SUMMARY
+ for tag in "${EXISTING_TAGS[@]}"; do
+ echo "- $tag" >> $GITHUB_STEP_SUMMARY
+ done
+ echo "" >> $GITHUB_STEP_SUMMARY
+
+ if [ "${{ inputs.dry_run }}" = "false" ]; then
+ echo "**These components will NOT be republished.** Tags are immutable in git." >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "If you need to republish:" >> $GITHUB_STEP_SUMMARY
+ echo "1. Delete the existing tag: \`git push --delete origin \`" >> $GITHUB_STEP_SUMMARY
+ echo "2. Bump the version in the source file" >> $GITHUB_STEP_SUMMARY
+ echo "3. Run the publish workflow again" >> $GITHUB_STEP_SUMMARY
+ fi
+ fi
+
+ if [ ${#NEW_TAGS[@]} -eq 0 ] && [ ${#EXISTING_TAGS[@]} -gt 0 ]; then
+ echo "### βΉοΈ No New Tags to Create" >> $GITHUB_STEP_SUMMARY
+ echo "All specified components have already been tagged. Consider bumping versions if you need to publish new releases." >> $GITHUB_STEP_SUMMARY
+ elif [ ${#NEW_TAGS[@]} -gt 0 ]; then
+ echo "### β
Tags to be Created" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ for tag in "${NEW_TAGS[@]}"; do
+ echo "- $tag" >> $GITHUB_STEP_SUMMARY
+ done
+ fi
+
+ build-python-wheels:
+ name: Build Python wheels
+ needs: [validate, plan, check-tags]
+ if: |
+ needs.validate.outputs.has_targets == 'true' &&
+ needs.plan.outputs.has_python == 'true'
+ uses: ./.github/workflows/_build_python_wheels.yml
+ with:
+ upload_artifacts: true
+ use_latest_ci: ${{ inputs.use_latest_ci }}
+
+ # Sequential Rust crate publishing to handle dependencies properly
+ publish-rust-crates:
+ name: Publish Rust Crates
+ needs: [validate, plan, check-tags]
+ if: |
+ needs.validate.outputs.has_targets == 'true' &&
+ contains(inputs.publish_crates, 'rust-')
+ runs-on: ubuntu-latest
+ env:
+ CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
+ DRY_RUN: ${{ inputs.dry_run }}
+ outputs:
+ status: ${{ steps.final-status.outputs.status }}
+ steps:
+ - name: Save latest CI and scripts from master
+ if: inputs.use_latest_ci
+ run: |
+ # First checkout master to get latest CI and scripts
+ git clone --depth 1 --branch master https://github.com/${{ github.repository }}.git /tmp/repo-master
+ cp -r /tmp/repo-master/scripts /tmp/latest-scripts
+ cp -r /tmp/repo-master/.github /tmp/latest-github
+ echo "β
Saved latest CI and scripts from master branch"
+
+ - name: Checkout at commit
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ needs.validate.outputs.commit }}
+ fetch-depth: 0
+
+ - name: Apply latest CI and scripts
+ if: inputs.use_latest_ci
+ run: |
+ echo "π Applying latest CI and scripts from master branch..."
+ cp -r /tmp/latest-scripts/* scripts/
+ cp -r /tmp/latest-github/* .github/
+ chmod +x scripts/*.sh
+ echo "β
Latest CI and scripts applied"
+
+ - name: Setup Rust with cache
+ uses: ./.github/actions/utils/setup-rust-with-cache
+ with:
+ cache-targets: false
+ show-stats: false
+
+ - name: Extract versions
+ id: versions
+ run: |
+ # Extract version for each crate
+ chmod +x scripts/extract-version.sh
+
+ echo "common_version=$(scripts/extract-version.sh rust-common)" >> $GITHUB_OUTPUT
+ echo "protocol_version=$(scripts/extract-version.sh rust-binary-protocol)" >> $GITHUB_OUTPUT
+ echo "sdk_version=$(scripts/extract-version.sh rust-sdk)" >> $GITHUB_OUTPUT
+ echo "cli_version=$(scripts/extract-version.sh rust-cli)" >> $GITHUB_OUTPUT
+
+ # Step 1: Publish iggy_common first
+ - name: Publish iggy_common
+ if: contains(inputs.publish_crates, 'rust-common')
+ uses: ./.github/actions/rust/post-merge
+ with:
+ package: iggy_common
+ version: ${{ steps.versions.outputs.common_version }}
+ dry_run: ${{ inputs.dry_run }}
+
+ # Wait for crates.io to index (only in non-dry-run mode)
+ - name: Wait for iggy_common to be available
+ if: |
+ contains(inputs.publish_crates, 'rust-common') &&
+ inputs.dry_run == 'false'
+ run: |
+ echo "β³ Waiting for iggy_common to be available on crates.io..."
+ for i in {1..30}; do
+ if cargo search iggy_common --limit 1 | grep -q "^iggy_common = \"${{ steps.versions.outputs.common_version }}\""; then
+ echo "β
iggy_common is now available"
+ break
+ fi
+ echo "Waiting... (attempt $i/30)"
+ sleep 10
+ done
+
+ # Step 2: Publish iggy_binary_protocol (depends on common)
+ - name: Publish iggy_binary_protocol
+ if: contains(inputs.publish_crates, 'rust-binary-protocol')
+ uses: ./.github/actions/rust/post-merge
+ with:
+ package: iggy_binary_protocol
+ version: ${{ steps.versions.outputs.protocol_version }}
+ dry_run: ${{ inputs.dry_run }}
+
+ # Wait for crates.io to index
+ - name: Wait for iggy_binary_protocol to be available
+ if: |
+ contains(inputs.publish_crates, 'rust-binary-protocol') &&
+ inputs.dry_run == 'false'
+ run: |
+ echo "β³ Waiting for iggy_binary_protocol to be available on crates.io..."
+ for i in {1..30}; do
+ if cargo search iggy_binary_protocol --limit 1 | grep -q "^iggy_binary_protocol = \"${{ steps.versions.outputs.protocol_version }}\""; then
+ echo "β
iggy_binary_protocol is now available"
+ break
+ fi
+ echo "Waiting... (attempt $i/30)"
+ sleep 10
+ done
+
+ # Step 3: Publish iggy SDK (depends on common and protocol)
+ - name: Publish iggy SDK
+ if: contains(inputs.publish_crates, 'rust-sdk')
+ uses: ./.github/actions/rust/post-merge
+ with:
+ package: iggy
+ version: ${{ steps.versions.outputs.sdk_version }}
+ dry_run: ${{ inputs.dry_run }}
+
+ # Wait for crates.io to index
+ - name: Wait for iggy SDK to be available
+ if: |
+ contains(inputs.publish_crates, 'rust-sdk') &&
+ inputs.dry_run == 'false'
+ run: |
+ echo "β³ Waiting for iggy to be available on crates.io..."
+ for i in {1..30}; do
+ if cargo search iggy --limit 1 | grep -q "^iggy = \"${{ steps.versions.outputs.sdk_version }}\""; then
+ echo "β
iggy SDK is now available"
+ break
+ fi
+ echo "Waiting... (attempt $i/30)"
+ sleep 10
+ done
+
+ # Step 4: Publish iggy-cli (depends on SDK and protocol)
+ - name: Publish iggy-cli
+ if: contains(inputs.publish_crates, 'rust-cli')
+ uses: ./.github/actions/rust/post-merge
+ with:
+ package: iggy-cli
+ version: ${{ steps.versions.outputs.cli_version }}
+ dry_run: ${{ inputs.dry_run }}
+
+ - name: Set final status output
+ id: final-status
+ if: always()
+ run: echo "status=${{ job.status }}" >> "$GITHUB_OUTPUT"
+
+ publish:
+ name: ${{ matrix.name }}
+ needs: [validate, plan, check-tags, build-python-wheels, publish-rust-crates]
+ if: |
+ always() &&
+ needs.validate.outputs.has_targets == 'true' &&
+ fromJson(needs.plan.outputs.non_rust_targets).include[0].key != 'noop' &&
+ (needs.build-python-wheels.result == 'success' || needs.build-python-wheels.result == 'skipped') &&
+ (needs.publish-rust-crates.result == 'success' || needs.publish-rust-crates.result == 'skipped')
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.plan.outputs.non_rust_targets) }}
+ env:
+ CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
+ DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
+ PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
+ NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
+ NEXUS_USER: ${{ secrets.NEXUS_USER }}
+ NEXUS_PASSWORD: ${{ secrets.NEXUS_PASSWORD }}
+ NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }}
+ DRY_RUN: ${{ inputs.dry_run }}
+ outputs:
+ status: ${{ steps.status.outputs.status }}
+ version: ${{ steps.ver.outputs.version }}
+ tag: ${{ steps.ver.outputs.tag }}
+ steps:
+ - name: Save latest CI and scripts from master
+ if: inputs.use_latest_ci
+ run: |
+ # First checkout master to get latest CI and scripts
+ git clone --depth 1 --branch master https://github.com/${{ github.repository }}.git /tmp/repo-master
+ cp -r /tmp/repo-master/scripts /tmp/latest-scripts
+ cp -r /tmp/repo-master/.github /tmp/latest-github
+ echo "β
Saved latest CI and scripts from master branch"
+ echo "Scripts:"
+ ls -la /tmp/latest-scripts/
+ echo ".github:"
+ ls -la /tmp/latest-github/
+
+ - name: Checkout at commit
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ needs.validate.outputs.commit }}
+ fetch-depth: 0
+
+ - name: Apply latest CI and scripts
+ if: inputs.use_latest_ci
+ run: |
+ echo "π Applying latest CI and scripts from master branch..."
+ cp -r /tmp/latest-scripts/* scripts/
+ cp -r /tmp/latest-github/* .github/
+ chmod +x scripts/*.sh
+ echo "β
Latest CI and scripts applied"
+
+ - name: Ensure version extractor is executable
+ run: |
+ test -x scripts/extract-version.sh || chmod +x scripts/extract-version.sh
+
+ - name: Setup Rust toolchain (if needed)
+ if: matrix.type == 'rust' || matrix.type == 'docker' || matrix.type == 'python'
+ uses: ./.github/actions/utils/setup-rust-with-cache
+ with:
+ cache-targets: false
+ show-stats: false
+
+ - name: Debug matrix
+ run: echo '${{ toJson(matrix) }}'
+
+ - name: Extract version & tag
+ id: ver
+ shell: bash
+ run: |
+ set -euo pipefail
+ GO_FLAG=""
+ if [ "${{ matrix.key }}" = "sdk-go" ] && [ -n "${{ needs.plan.outputs.go_sdk_version }}" ]; then
+ GO_FLAG="--go-sdk-version ${{ needs.plan.outputs.go_sdk_version }}"
+ fi
+ VERSION=$(scripts/extract-version.sh "${{ matrix.key }}" $GO_FLAG)
+ # If a tag pattern exists for this component, ask the script for a tag as well
+ if [ -n "${{ matrix.tag_pattern }}" ] && [ "${{ matrix.tag_pattern }}" != "null" ]; then
+ TAG=$(scripts/extract-version.sh "${{ matrix.key }}" $GO_FLAG --tag)
+ else
+ TAG=""
+ fi
+ echo "version=$VERSION" >> "$GITHUB_OUTPUT"
+ echo "tag=$TAG" >> "$GITHUB_OUTPUT"
+ echo "β
Resolved ${{ matrix.key }} -> version=$VERSION tag=${TAG:-}"
+
+ # βββββββββββββββββββββββββββββββββββββββββ
+ # Docker Publishing
+ # βββββββββββββββββββββββββββββββββββββββββ
+ - name: Publish Docker image
+ if: matrix.type == 'docker'
+ uses: ./.github/actions/utils/docker-buildx
+ with:
+ task: publish
+ component: ${{ matrix.key }}
+ version: ${{ steps.ver.outputs.version }}
+ dry_run: ${{ inputs.dry_run }}
+
+ # βββββββββββββββββββββββββββββββββββββββββ
+ # Python SDK Publishing
+ # βββββββββββββββββββββββββββββββββββββββββ
+ - name: Publish Python SDK
+ if: matrix.type == 'python'
+ uses: ./.github/actions/python-maturin/post-merge
+ with:
+ version: ${{ steps.ver.outputs.version }}
+ dry_run: ${{ inputs.dry_run }}
+ wheels_artifact: python-wheels-all
+ wheels_path: dist
+
+ # βββββββββββββββββββββββββββββββββββββββββ
+ # Node SDK Publishing
+ # βββββββββββββββββββββββββββββββββββββββββ
+ - name: Publish Node SDK
+ if: matrix.type == 'node'
+ uses: ./.github/actions/node-npm/post-merge
+ with:
+ version: ${{ steps.ver.outputs.version }}
+ dry_run: ${{ inputs.dry_run }}
+
+ # βββββββββββββββββββββββββββββββββββββββββ
+ # Java SDK Publishing
+ # βββββββββββββββββββββββββββββββββββββββββ
+ - name: Publish Java SDK
+ if: matrix.type == 'java'
+ uses: ./.github/actions/java-gradle/post-merge
+ with:
+ version: ${{ steps.ver.outputs.version }}
+ dry_run: ${{ inputs.dry_run }}
+
+ # βββββββββββββββββββββββββββββββββββββββββ
+ # C# SDK Publishing
+ # βββββββββββββββββββββββββββββββββββββββββ
+ - name: Publish C# SDK
+ if: matrix.type == 'csharp'
+ uses: ./.github/actions/csharp-dotnet/post-merge
+ with:
+ version: ${{ steps.ver.outputs.version }}
+ dry_run: ${{ inputs.dry_run }}
+
+ # βββββββββββββββββββββββββββββββββββββββββ
+ # Go Module (Tag-only)
+ # βββββββββββββββββββββββββββββββββββββββββ
+ - name: Prepare Go tag
+ if: matrix.type == 'go'
+ uses: ./.github/actions/go/post-merge
+ with:
+ version: ${{ steps.ver.outputs.version }}
+ dry_run: ${{ inputs.dry_run }}
+
+ - name: Set status output
+ id: status
+ if: always()
+ run: echo "status=${{ job.status }}" >> "$GITHUB_OUTPUT"
+
+ create-tags:
+ name: Create Git tags
+ needs: [validate, plan, check-tags, build-python-wheels, publish-rust-crates, publish]
+ if: |
+ always() &&
+ needs.validate.outputs.has_targets == 'true' &&
+ inputs.dry_run == false &&
+ (needs.publish.result == 'success' || needs.publish.result == 'skipped') &&
+ (needs.publish-rust-crates.result == 'success' || needs.publish-rust-crates.result == 'skipped') &&
+ (needs.build-python-wheels.result == 'success' || needs.build-python-wheels.result == 'skipped')
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ needs.validate.outputs.commit }}
+ fetch-depth: 0
+
+ - name: Configure Git
+ run: |
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+
+ - name: Ensure version extractor is executable
+ run: |
+ test -x scripts/extract-version.sh || chmod +x scripts/extract-version.sh
+
+ - name: Create and push tags (for tagged components)
+ shell: bash
+ run: |
+ set -euo pipefail
+ TARGETS_JSON='${{ needs.plan.outputs.targets }}'
+ GO_SDK_VERSION='${{ needs.plan.outputs.go_sdk_version }}'
+
+ echo "$TARGETS_JSON" | jq -r '.include[] | select(.key!="noop") | @base64' | while read -r row; do
+ _jq() { echo "$row" | base64 -d | jq -r "$1"; }
+
+ KEY=$(_jq '.key')
+ NAME=$(_jq '.name')
+ TAG_PATTERN=$(_jq '.tag_pattern')
+
+ # Only components that define tag_pattern will be tagged
+ if [ -z "$TAG_PATTERN" ] || [ "$TAG_PATTERN" = "null" ]; then
+ continue
+ fi
+
+ GO_FLAG=""
+ if [ "$KEY" = "sdk-go" ] && [ -n "$GO_SDK_VERSION" ]; then
+ GO_FLAG="--go-sdk-version $GO_SDK_VERSION"
+ fi
+
+ TAG=$(scripts/extract-version.sh "$KEY" $GO_FLAG --tag)
+
+ echo "Creating tag: $TAG for $NAME"
+
+ if git rev-parse "$TAG" >/dev/null 2>&1; then
+ echo " β οΈ Tag $TAG already exists, skipping"
+ continue
+ fi
+
+ git tag -a "$TAG" "${{ needs.validate.outputs.commit }}" \
+ -m "Release $NAME ($TAG)
+ Component: $NAME
+ Tag: $TAG
+ Commit: ${{ needs.validate.outputs.commit }}
+ Released by: GitHub Actions
+ Date: $(date -u +"%Y-%m-%d %H:%M:%S UTC")"
+
+ git push origin "$TAG"
+ echo " β
Created and pushed tag: $TAG"
+ done
+
+ summary:
+ name: Publish Summary
+ needs: [validate, plan, check-tags, build-python-wheels, publish-rust-crates, publish, create-tags]
+ if: always() && needs.validate.outputs.has_targets == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ needs.validate.outputs.commit }}
+
+ - name: Ensure version extractor is executable
+ run: |
+ test -x scripts/extract-version.sh || chmod +x scripts/extract-version.sh
+
+ - name: Generate summary
+ run: |
+ {
+ echo "# π¦ Publish Summary"
+ echo
+ echo "## Configuration"
+ echo
+ echo "| Setting | Value |"
+ echo "|---------|-------|"
+ echo "| **Commit** | \`${{ needs.validate.outputs.commit }}\` |"
+ echo "| **Dry run** | \`${{ inputs.dry_run }}\` |"
+ echo "| **Total components** | ${{ needs.plan.outputs.count }} |"
+ echo
+
+ # Extract version information for all requested components
+ echo "## Component Versions"
+ echo
+ echo "| Component | Version | Tag | Registry | Status |"
+ echo "|-----------|---------|-----|----------|--------|"
+
+ # Parse the targets from plan job
+ TARGETS_JSON='${{ needs.plan.outputs.targets }}'
+ GO_SDK_VERSION='${{ needs.plan.outputs.go_sdk_version }}'
+
+ echo "$TARGETS_JSON" | jq -r '.include[] | select(.key!="noop") | @base64' | while read -r row; do
+ _jq() { echo "$row" | base64 -d | jq -r "$1"; }
+
+ KEY=$(_jq '.key')
+ NAME=$(_jq '.name')
+ REGISTRY=$(_jq '.registry')
+ TAG_PATTERN=$(_jq '.tag_pattern')
+
+ # Extract version using the script
+ GO_FLAG=""
+ if [ "$KEY" = "sdk-go" ] && [ -n "$GO_SDK_VERSION" ]; then
+ GO_FLAG="--go-sdk-version $GO_SDK_VERSION"
+ fi
+
+ VERSION=$(scripts/extract-version.sh "$KEY" $GO_FLAG 2>/dev/null || echo "N/A")
+
+ # Get tag if pattern exists
+ TAG=""
+ if [ -n "$TAG_PATTERN" ] && [ "$TAG_PATTERN" != "null" ]; then
+ TAG=$(scripts/extract-version.sh "$KEY" $GO_FLAG --tag 2>/dev/null || echo "N/A")
+ else
+ TAG="N/A"
+ fi
+
+ # Determine status emoji based on dry run
+ if [ "${{ inputs.dry_run }}" = "true" ]; then
+ STATUS="π Dry run"
+ else
+ STATUS="β
Published"
+ fi
+
+ # Format registry display
+ case "$REGISTRY" in
+ crates) REGISTRY_DISPLAY="crates.io" ;;
+ dockerhub) REGISTRY_DISPLAY="Docker Hub" ;;
+ pypi) REGISTRY_DISPLAY="PyPI" ;;
+ npm) REGISTRY_DISPLAY="npm" ;;
+ maven) REGISTRY_DISPLAY="Maven" ;;
+ nuget) REGISTRY_DISPLAY="NuGet" ;;
+ none) REGISTRY_DISPLAY="Tag only" ;;
+ *) REGISTRY_DISPLAY="$REGISTRY" ;;
+ esac
+
+ echo "| $NAME | \`$VERSION\` | \`$TAG\` | $REGISTRY_DISPLAY | $STATUS |"
+ done
+
+ echo
+
+ if [ -n "${{ inputs.publish_crates }}" ]; then
+ echo "### π¦ Rust Crates Requested"
+ echo '```'
+ echo "${{ inputs.publish_crates }}"
+ echo '```'
+ fi
+ if [ -n "${{ inputs.publish_dockerhub }}" ]; then
+ echo "### π³ Docker Images Requested"
+ echo '```'
+ echo "${{ inputs.publish_dockerhub }}"
+ echo '```'
+ fi
+ if [ -n "${{ inputs.publish_other }}" ]; then
+ echo "### π¦ Other SDKs Requested"
+ echo '```'
+ echo "${{ inputs.publish_other }}"
+ echo '```'
+ fi
+ echo
+ echo "## Results"
+ echo
+
+ # Python wheels building status
+ if [ "${{ needs.plan.outputs.has_python }}" = "true" ]; then
+ echo "### Python Wheels Building"
+ case "${{ needs.build-python-wheels.result }}" in
+ success) echo "β
**Python wheels built successfully for all platforms**" ;;
+ failure) echo "β **Python wheel building failed**" ;;
+ skipped) echo "βοΈ **Python wheel building was skipped**" ;;
+ esac
+ echo
+ fi
+
+ # Rust crates publishing status
+ if [ -n "${{ inputs.publish_crates }}" ]; then
+ echo "### Rust Crates Publishing (Sequential)"
+ case "${{ needs.publish-rust-crates.result }}" in
+ success) echo "β
**Rust crates published successfully in dependency order**" ;;
+ failure) echo "β **Rust crates publishing failed - check logs for details**" ;;
+ skipped) echo "βοΈ **Rust crates publishing was skipped**" ;;
+ esac
+ echo
+ fi
+
+ # Other publishing status
+ echo "### Other Publishing"
+ case "${{ needs.publish.result }}" in
+ success) echo "β
**Publishing completed successfully**" ;;
+ failure) echo "β **Publishing failed - check logs for details**" ;;
+ cancelled) echo "π« **Publishing was cancelled**" ;;
+ *) echo "βοΈ **Publishing was skipped**" ;;
+ esac
+ if [ "${{ inputs.dry_run }}" = "true" ]; then
+ echo
+ echo "**βΉοΈ This was a dry run - no actual publishing occurred**"
+ else
+ case "${{ needs.create-tags.result }}" in
+ success) echo "β
**Git tags created successfully**" ;;
+ failure) echo "β οΈ **Tag creation had issues**" ;;
+ skipped) echo "βοΈ **Tag creation was skipped (dry run or publish failed)**" ;;
+ esac
+ fi
+ echo
+ echo "---"
+ echo "*Workflow completed at $(date -u +"%Y-%m-%d %H:%M:%S UTC")*"
+ } >> "$GITHUB_STEP_SUMMARY"
+
+ notify-failure:
+ name: Notify on failure
+ needs: [validate, plan, build-python-wheels, publish-rust-crates, publish, create-tags, summary]
+ if: failure() && inputs.dry_run == false
+ runs-on: ubuntu-latest
+ steps:
+ - name: Notify failure
+ run: |
+ echo "β Publishing workflow failed!"
+ echo "Check the workflow run for details: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
diff --git a/.github/workflows/publish_bench.yml b/.github/workflows/publish_bench.yml
deleted file mode 100644
index 3d7bae08f..000000000
--- a/.github/workflows/publish_bench.yml
+++ /dev/null
@@ -1,118 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: publish_bench
-on:
- push:
- tags:
- - "iggy-bench-*"
-
-env:
- GITHUB_TOKEN: ${{ github.token }}
- CARGO_TERM_COLOR: always
-
-jobs:
- validate:
- if: startsWith(github.ref, 'refs/tags/iggy-bench-')
- runs-on: ubuntu-latest
- steps:
- - name: Extract tag name
- id: extract
- run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
-
- - name: Validate tag format
- run: |
- TAG=${TAG}
- if [[ ! "$TAG" =~ ^iggy-bench-([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$ ]]; then
- echo "Tag $TAG does not match strict semver format (iggy-bench-X.Y.Z where 0 <= X,Y,Z <= 999)"
- exit 1
- fi
- echo "Valid tag: $TAG"
-
- tag:
- needs: validate
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Extract tag name
- id: extract_tag
- run: |
- tag=${GITHUB_REF#refs/tags/}
- echo "tag_name=$tag" >> "$GITHUB_OUTPUT"
- echo "::notice ::Tag that triggered the workflow: $tag"
-
- - name: Extract iggy-bench version from Cargo.toml
- id: extract_version
- run: |
- version=$(cargo pkgid -p iggy-bench | cut -d@ -f2)
- echo "iggy_bench_version=$version" >> "$GITHUB_OUTPUT"
- echo "::notice ::Version from Cargo.toml $version"
-
- - name: Check if version from Cargo.toml is the same as the tag
- id: check_git_tag
- run: |
- if [[ "iggy-bench-${{ steps.extract_version.outputs.iggy_bench_version }}" == "${{ steps.extract_tag.outputs.tag_name }}" ]];
- then
- echo "::notice ::Tag ${{ steps.extract_tag.outputs.tag_name }} matches the version in Cargo.toml"
- echo "tag_matches=true" >> "$GITHUB_OUTPUT"
- else
- echo "::warning ::Tag ${{ steps.extract_tag.outputs.tag_name }} does not match the version from Cargo.toml"
- echo "tag_matches=false" >> "$GITHUB_OUTPUT"
- fi
-
- outputs:
- iggy_bench_version: ${{ steps.extract_tag.outputs.tag_name }}
- tag_created: ${{ steps.check_git_tag.outputs.tag_matches }}
-
- publish:
- name: Publish bench on crates.io
- needs: tag
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
-
- - run: |
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: publish
- run: |
- cargo login "${{ secrets.CARGO_REGISTRY_TOKEN }}"
- cargo publish -p iggy-bench
-
- finalize_bench:
- runs-on: ubuntu-latest
- needs:
- - publish
- if: always()
- steps:
- - uses: actions/checkout@v4
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
- - name: Something went wrong
- if: ${{ contains(needs.*.result, 'failure') }}
- uses: JasonEtco/create-an-issue@v2
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- GITHUB_BOT_CONTEXT_STRING: "publish to crates.io"
- with:
- filename: .github/BOT_ISSUE_TEMPLATE.md
diff --git a/.github/workflows/publish_cli.yml b/.github/workflows/publish_cli.yml
deleted file mode 100644
index 8e23d5b29..000000000
--- a/.github/workflows/publish_cli.yml
+++ /dev/null
@@ -1,126 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: publish_cli
-on:
- push:
- tags:
- - 'iggy-cli-*'
-
-env:
- GITHUB_TOKEN: ${{ github.token }}
- CARGO_TERM_COLOR: always
-
-jobs:
- validate:
- if: startsWith(github.ref, 'refs/tags/iggy-cli-')
- runs-on: ubuntu-latest
- steps:
- - name: Extract tag name
- id: extract
- run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
-
- - name: Validate tag format
- run: |
- TAG=${TAG}
- if [[ ! "$TAG" =~ ^iggy-cli-([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$ ]]; then
- echo "Tag $TAG does not match strict semver format (iggy-cli-X.Y.Z where 0 <= X,Y,Z <= 999)"
- exit 1
- fi
- echo "Valid tag: $TAG"
-
- tag:
- needs: validate
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Extract tag name
- id: extract_tag
- run: |
- tag=${GITHUB_REF#refs/tags/}
- echo "tag_name=$tag" >> "$GITHUB_OUTPUT"
- echo "::notice ::Tag that triggered the workflow: $tag"
-
- - name: Extract iggy-cli version from Cargo.toml
- id: extract_version
- run: |
- version=$(cargo pkgid -p iggy-cli | cut -d@ -f2)
- echo "iggy_cli_version=$version" >> "$GITHUB_OUTPUT"
- echo "::notice ::Version from Cargo.toml $version"
-
- - name: Check if version from Cargo.toml is the same as the tag
- id: check_git_tag
- run: |
- if [[ "iggy-cli-${{ steps.extract_version.outputs.iggy_cli_version }}" == "${{ steps.extract_tag.outputs.tag_name }}" ]];
- then
- echo "::notice ::Tag ${{ steps.extract_tag.outputs.tag_name }} matches the version in Cargo.toml"
- echo "tag_matches=true" >> "$GITHUB_OUTPUT"
- else
- echo "::warning ::Tag ${{ steps.extract_tag.outputs.tag_name }} does not match the version from Cargo.toml"
- echo "tag_matches=false" >> "$GITHUB_OUTPUT"
- fi
-
- outputs:
- iggy_cli_version: ${{ steps.extract_tag.outputs.tag_name }}
- tag_created: ${{ steps.check_git_tag.outputs.tag_matches }}
-
- publish:
- name: Publish CLI on crates.io
- needs: tag
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
-
- - run: |
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: publish
- run: |
- cargo login "${{ secrets.CARGO_REGISTRY_TOKEN }}"
- cargo publish -p iggy-cli
-
- github_release:
- uses: ./.github/workflows/release_cli.yml
- needs: tag
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- with:
- tag_name: "iggy-cli-${{ needs.tag.outputs.iggy_cli_version }}"
-
- finalize_cli:
- runs-on: ubuntu-latest
- needs:
- - publish
- - github_release
- if: always()
- steps:
- - uses: actions/checkout@v4
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
- - name: Something went wrong
- if: ${{ contains(needs.*.result, 'failure') }}
- uses: JasonEtco/create-an-issue@v2
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- GITHUB_BOT_CONTEXT_STRING: "publish to crates.io"
- with:
- filename: .github/BOT_ISSUE_TEMPLATE.md
diff --git a/.github/workflows/publish_connectors.yml b/.github/workflows/publish_connectors.yml
deleted file mode 100644
index d99e52f4c..000000000
--- a/.github/workflows/publish_connectors.yml
+++ /dev/null
@@ -1,285 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: publish_connectors
-
-on:
- push:
- tags:
- - "connectors-*"
-
-env:
- DOCKERHUB_REGISTRY_NAME: apache/iggy-connect
- CRATE_NAME: iggy-connectors
- GITHUB_TOKEN: ${{ github.token }}
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
-
-jobs:
- validate:
- if: startsWith(github.ref, 'refs/tags/connectors-')
- runs-on: ubuntu-latest
- steps:
- - name: Extract tag name
- id: extract
- run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
-
- - name: Validate tag format
- run: |
- TAG=${TAG}
- if [[ ! "$TAG" =~ ^connectors-([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$ ]]; then
- echo "Tag $TAG does not match strict semver format (connectors-X.Y.Z where 0 <= X,Y,Z <= 999)"
- exit 1
- fi
- echo "Valid tag: $TAG"
-
- tag:
- needs: validate
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Extract tag name
- id: extract_tag
- run: |
- tag=${GITHUB_REF#refs/tags/}
- echo "tag_name=$tag" >> "$GITHUB_OUTPUT"
- echo "::notice ::Tag that triggered the workflow: $tag"
-
- - name: Extract iggy-connectors version from Cargo.toml
- id: extract_version
- run: |
- version=$(cargo pkgid -p iggy-connectors | cut -d# -f2 | cut -d: -f2)
- echo "connectors_version=$version" >> "$GITHUB_OUTPUT"
- echo "::notice ::Version from Cargo.toml $version"
-
- - name: Check if version from Cargo.toml is the same as the tag
- id: check_git_tag
- run: |
- if [[ "connectors-${{ steps.extract_version.outputs.connectors_version }}" == "${{ steps.extract_tag.outputs.tag_name }}" ]];
- then
- echo "::notice ::Tag ${{ steps.extract_tag.outputs.tag_name }} matches the version in Cargo.toml"
- echo "tag_matches=true" >> "$GITHUB_OUTPUT"
- else
- echo "::warning ::Tag ${{ steps.extract_tag.outputs.tag_name }} does not match the version from Cargo.toml"
- echo "tag_matches=false" >> "$GITHUB_OUTPUT"
- fi
-
- outputs:
- connectors_version: ${{ steps.extract_version.outputs.connectors_version }}
- tag_created: ${{ steps.check_git_tag.outputs.tag_matches }}
-
- release_and_publish:
- name: build release ${{ matrix.platform.os_name }}
- needs: tag
- runs-on: ${{ matrix.platform.os }}
- strategy:
- matrix:
- platform:
- - os_name: Linux-x86_64-musl
- os: ubuntu-latest
- target: x86_64-unknown-linux-musl
- bin:
- - iggy-connectors
- name: iggy-connectors-Linux-x86_64-musl.tar.gz
- cargo_command: cargo
- docker_arch: linux/amd64
- cross: false
-
- - os_name: Linux-aarch64-musl
- os: ubuntu-latest
- target: aarch64-unknown-linux-musl
- bin:
- - iggy-connectors
- name: iggy-connectors-Linux-aarch64-musl.tar.gz
- docker_arch: linux/arm64/v8
- cross: true
-
- toolchain:
- - stable
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "publish-connectors-${{ matrix.platform.target }}"
-
- - name: Install musl-tools on Linux
- run: sudo apt-get update --yes && sudo apt-get install --yes musl-tools
- if: contains(matrix.platform.name, 'musl')
-
- - name: Prepare ${{ matrix.platform.target }} toolchain
- run: |
- rustup target add ${{ matrix.platform.target }}
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: Install cross
- uses: taiki-e/install-action@v2
- with:
- tool: cross
-
- - name: Build iggy-connectors ${{ matrix.platform.target }} release binary
- run: cross +stable build --verbose --release --target ${{ matrix.platform.target }} --bin iggy-connectors
-
- - name: Build iggy-cli ${{ matrix.platform.target }} release binary
- run: cross +stable build --verbose --release --no-default-features --target ${{ matrix.platform.target }} --bin iggy
-
- - name: Prepare artifacts
- run: |
- mkdir -p release_artifacts/
- cp target/${{ matrix.platform.target }}/release/iggy-connectors release_artifacts/
- cp target/${{ matrix.platform.target }}/release/iggy release_artifacts/
-
- - name: Print message
- run: echo "::notice ::Published ${{ needs.tag.outputs.connectors_version }} release artifacts on GitHub"
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
- with:
- platforms: "arm64,arm"
- if: ${{ matrix.platform.cross }}
-
- - name: Set up Docker
- uses: docker/setup-docker-action@v4
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
-
- - name: Login to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USER }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Docker meta
- id: meta
- uses: docker/metadata-action@v5
- with:
- images: ${{ env.DOCKERHUB_REGISTRY_NAME }}
-
- - name: Build and push by digest
- id: build
- uses: docker/build-push-action@v6
- with:
- context: ./
- file: ./Dockerfile.connectors.ci
- platforms: ${{ matrix.platform.docker_arch }}
- labels: ${{ steps.meta.outputs.labels }}
- outputs: type=image,name=${{ env.DOCKERHUB_REGISTRY_NAME }},push-by-digest=true,name-canonical=true,push=true
- build-args: |
- IGGY_CONNECTORS_PATH=target/${{ matrix.platform.target }}/release/iggy-connectors
-
- - name: Export digest
- run: |
- mkdir -p /tmp/digests
- digest="${{ steps.build.outputs.digest }}"
- touch "/tmp/digests/${digest#sha256:}"
-
- - name: Upload digest
- uses: actions/upload-artifact@v4
- with:
- name: digests-${{ matrix.platform.os_name }}
- path: /tmp/digests/*
- if-no-files-found: error
- retention-days: 1
-
- merge_docker_manifest:
- runs-on: ubuntu-latest
- needs:
- - release_and_publish
- - tag
- steps:
- - name: Download digests
- uses: actions/download-artifact@v4
- with:
- pattern: "digests-*"
- merge-multiple: true
- path: /tmp/digests
-
- - name: Login to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USER }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
-
- - name: Docker meta (tag was created)
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- uses: docker/metadata-action@v5
- with:
- images: ${{ env.DOCKERHUB_REGISTRY_NAME }}
- tags: |
- ${{ needs.tag.outputs.connectors_version }}
- latest
-
- - name: Docker meta (tag was not created)
- if: ${{ needs.tag.outputs.tag_created == '' }}
- uses: docker/metadata-action@v5
- with:
- images: ${{ env.DOCKERHUB_REGISTRY_NAME }}
- tags: |
- edge
-
- - name: Create manifest list and push
- working-directory: /tmp/digests
- run: |
- docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
- $(printf '${{ env.DOCKERHUB_REGISTRY_NAME }}@sha256:%s ' *)
-
- - name: Inspect image
- run: |
- docker buildx imagetools inspect ${{ env.DOCKERHUB_REGISTRY_NAME }}:latest
-
- github_release:
- uses: ./.github/workflows/release_connectors.yml
- needs: tag
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- with:
- tag_name: "connectors-${{ needs.tag.outputs.connectors_version }}"
-
- finalize_publish_connectors:
- runs-on: ubuntu-latest
- needs:
- - release_and_publish
- - merge_docker_manifest
- - github_release
- if: always()
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
-
- - name: Something went wrong
- if: ${{ contains(needs.*.result, 'failure') && github.event_name != 'workflow_dispatch' }}
- uses: JasonEtco/create-an-issue@v2
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- GITHUB_BOT_CONTEXT_STRING: "build and release to dockerhub"
- with:
- filename: .github/BOT_ISSUE_TEMPLATE.md
diff --git a/.github/workflows/publish_csharp_sdk.yml b/.github/workflows/publish_csharp_sdk.yml
deleted file mode 100644
index bc41a0cc0..000000000
--- a/.github/workflows/publish_csharp_sdk.yml
+++ /dev/null
@@ -1,112 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: publish_csharp_sdk
-on:
- push:
- tags:
- - 'csharp-sdk-*'
-
-env:
- GITHUB_TOKEN: ${{ github.token }}
-
-jobs:
- validate:
- if: startsWith(github.ref, 'refs/tags/csharp-sdk-')
- runs-on: ubuntu-latest
- steps:
- - name: Extract tag name
- id: extract
- run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
-
- - name: Validate tag format
- run: |
- TAG=${TAG}
- if [[ ! "$TAG" =~ ^csharp-sdk-([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$ ]]; then
- echo "Tag $TAG does not match strict semver format (csharp-sdk-X.Y.Z where 0 <= X,Y,Z <= 999)"
- exit 1
- fi
- echo "Valid tag: $TAG"
-
- tag:
- needs: validate
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Extract tag name
- id: extract_tag
- run: |
- tag=${GITHUB_REF#refs/tags/}
- echo "tag_name=$tag" >> "$GITHUB_OUTPUT"
- echo "::notice ::Tag that triggered the workflow: $tag"
-
- - name: Extract csharp sdk version from csproj
- id: extract_version
- working-directory: foreign/csharp/Iggy_SDK
- run: |
- # Check if Version property exists in csproj
- if grep -q "" Iggy_SDK.csproj; then
- version=$(grep -oP '(?<=)[^<]*' Iggy_SDK.csproj)
- echo "csharp_sdk_version=$version" >> "$GITHUB_OUTPUT"
- echo "::notice ::Version csharp sdk csproj version is $version"
- else
- echo "::error ::No Version property found in csproj. Please add X.Y.Z to Iggy_SDK.csproj"
- exit 1
- fi
-
- - name: Check if version from csproj is the same as the tag
- id: check_git_tag
- run: |
- if [[ "csharp-sdk-${{ steps.extract_version.outputs.csharp_sdk_version }}" == "${{ steps.extract_tag.outputs.tag_name }}" ]];
- then
- echo "::notice ::Tag ${{ steps.extract_tag.outputs.tag_name }} matches the version in csproj"
- echo "tag_matches=true" >> "$GITHUB_OUTPUT"
- else
- echo "::warning ::Tag ${{ steps.extract_tag.outputs.tag_name }} does not match the version from csproj"
- echo "tag_matches=false" >> "$GITHUB_OUTPUT"
- fi
-
- outputs:
- csharp_sdk_version: ${{ steps.extract_tag.outputs.tag_name }}
- tag_created: ${{ steps.check_git_tag.outputs.tag_matches }}
-
- publish:
- name: Publish csharp SDK
- needs: tag
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
- - name: Setup .NET 8
- uses: actions/setup-dotnet@v3
- with:
- dotnet-version: '8.0.x'
- - name: Restore dependencies
- working-directory: foreign/csharp/Iggy_SDK
- run: dotnet restore
- - name: Pack nuget
- working-directory: foreign/csharp/Iggy_SDK
- run: |
- dotnet pack -c release --no-restore -o .
- - name: Publish nuget
- working-directory: foreign/csharp/Iggy_SDK
- run: dotnet nuget push *.nupkg -k "${{ secrets.NUGET_API_KEY }}" -s https://api.nuget.org/v3/index.json
-
diff --git a/.github/workflows/publish_java_sdk.yml b/.github/workflows/publish_java_sdk.yml
deleted file mode 100644
index 450fb01c3..000000000
--- a/.github/workflows/publish_java_sdk.yml
+++ /dev/null
@@ -1,113 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: publish_java_sdk
-on:
- push:
- tags:
- - "java-sdk-*"
-
-env:
- GITHUB_TOKEN: ${{ github.token }}
-
-jobs:
- validate:
- if: startsWith(github.ref, 'refs/tags/java-sdk-')
- runs-on: ubuntu-latest
- steps:
- - name: Extract tag name
- id: extract
- run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
-
- - name: Validate tag format
- run: |
- TAG=${TAG}
- if [[ ! "$TAG" =~ ^java-sdk-([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$ ]]; then
- echo "Tag $TAG does not match strict semver format (java-sdk-X.Y.Z where 0 <= X,Y,Z <= 999)"
- exit 1
- fi
- echo "Valid tag: $TAG"
-
- tag:
- needs: validate
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Extract tag name
- id: extract_tag
- run: |
- tag=${GITHUB_REF#refs/tags/}
- echo "tag_name=$tag" >> "$GITHUB_OUTPUT"
- echo "::notice ::Tag that triggered the workflow: $tag"
-
- - name: Extract java-sdk version from build.gradle.kts
- id: extract_version
- run: |
- version=$(foreign/java/gradlew -p foreign/java/java-sdk properties -q | grep "version:" | cut -d: -f2 | tr -d ' ')
- echo "java_sdk_version=$version" >> "$GITHUB_OUTPUT"
- echo "::notice ::Version from gradle build file is $version"
-
- - name: Check if version from build.gradle is the same as the tag
- id: check_git_tag
- run: |
- if [[ "java-sdk-${{ steps.extract_version.outputs.java_sdk_version }}" == "${{ steps.extract_tag.outputs.tag_name }}" ]];
- then
- echo "::notice ::Tag ${{ steps.extract_tag.outputs.tag_name }} matches the version in build.gradle"
- echo "tag_matches=true" >> "$GITHUB_OUTPUT"
- else
- echo "::warning ::Tag ${{ steps.extract_tag.outputs.tag_name }} does not matche the version from build.gradle"
- echo "tag_matches=false" >> "$GITHUB_OUTPUT"
- fi
-
- outputs:
- java_sdk_version: ${{ steps.extract_tag.outputs.tag_name }}
- tag_created: ${{ steps.check_git_tag.outputs.tag_matches }}
-
- publish:
- name: Publish Java SDK to Maven Nexus
- needs: tag
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- runs-on: ubuntu-latest
- steps:
- - name: Checkout Code
- uses: actions/checkout@v4
-
- - name: Setup Java
- uses: actions/setup-java@v4
- with:
- java-version: "17"
- distribution: "temurin"
-
- - name: Setup Gradle
- uses: gradle/actions/setup-gradle@af1da67850ed9a4cedd57bfd976089dd991e2582 # v4.0.0
-
- - name: Build
- run: foreign/java/dev-support/checks/build.sh build -x test -x checkstyleMain -x checkstyleTest
-
- - name: Run tests
- run: foreign/java/dev-support/checks/build.sh test
-
- - name: Publish
- # To publish we use NEXUS_USER and NEXUS_PASSWORD as credentials
- env:
- NEXUS_USER: ${{ secrets.NEXUS_USER }}
- NEXUS_PASSWORD: ${{ secrets.NEXUS_PW }}
- run: foreign/java/dev-support/checks/build.sh build -x test -x checkstyleMain -x checkstyleTest publish
diff --git a/.github/workflows/publish_java_sdk_snapshots.yml b/.github/workflows/publish_java_sdk_snapshots.yml
deleted file mode 100644
index 4e86a89b7..000000000
--- a/.github/workflows/publish_java_sdk_snapshots.yml
+++ /dev/null
@@ -1,64 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: publish java-sdk SNAPSHOTs
-
-on:
- schedule:
- - cron: "5 0 * * *"
-
-jobs:
- publish:
- if: github.repository == 'apache/iggy'
- name: Publish Snapshots
- runs-on: ubuntu-latest
- steps:
- - name: Checkout Code
- uses: actions/checkout@v4
-
- - name: Setup Java
- uses: actions/setup-java@v4
- with:
- java-version: "17"
- distribution: "temurin"
-
- - name: Setup Gradle
- uses: gradle/actions/setup-gradle@af1da67850ed9a4cedd57bfd976089dd991e2582 # v4.0.0
-
- - name: Build
- run: foreign/java/dev-support/checks/build.sh build -x test -x checkstyleMain -x checkstyleTest -x sign
-
- - name: Run tests
- run: foreign/java/dev-support/checks/build.sh test
-
- - id: extract_version
- name: Extract version
- shell: bash
- if: ${{ success() }}
- run: |
- VERSION=$(foreign/java/gradlew -p foreign/java/java-sdk properties -q | grep "version:")
- if [[ "$VERSION" == *"SNAPSHOT"* ]]; then
- echo "snapshot=SNAPSHOT" >> $GITHUB_OUTPUT
- fi
-
- - name: Publish Snapshot
- # To publish we use NEXUS_USER and NEXUS_PASSWORD as credentials
- env:
- NEXUS_USER: ${{ secrets.NEXUS_USER }}
- NEXUS_PASSWORD: ${{ secrets.NEXUS_PW }}
- if: steps.extract_version.outputs.snapshot == 'SNAPSHOT'
- run: foreign/java/dev-support/checks/build.sh build -x test -x checkstyleMain -x checkstyleTest -x sign publish
diff --git a/.github/workflows/publish_mcp.yml b/.github/workflows/publish_mcp.yml
deleted file mode 100644
index 6a6c72dab..000000000
--- a/.github/workflows/publish_mcp.yml
+++ /dev/null
@@ -1,285 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: publish_mcp
-
-on:
- push:
- tags:
- - "mcp-*"
-
-env:
- DOCKERHUB_REGISTRY_NAME: apache/iggy-mcp
- CRATE_NAME: iggy-mcp
- GITHUB_TOKEN: ${{ github.token }}
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
-
-jobs:
- validate:
- if: startsWith(github.ref, 'refs/tags/mcp-')
- runs-on: ubuntu-latest
- steps:
- - name: Extract tag name
- id: extract
- run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
-
- - name: Validate tag format
- run: |
- TAG=${TAG}
- if [[ ! "$TAG" =~ ^mcp-([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$ ]]; then
- echo "Tag $TAG does not match strict semver format (mcp-X.Y.Z where 0 <= X,Y,Z <= 999)"
- exit 1
- fi
- echo "Valid tag: $TAG"
-
- tag:
- needs: validate
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Extract tag name
- id: extract_tag
- run: |
- tag=${GITHUB_REF#refs/tags/}
- echo "tag_name=$tag" >> "$GITHUB_OUTPUT"
- echo "::notice ::Tag that triggered the workflow: $tag"
-
- - name: Extract iggy-mcp version from Cargo.toml
- id: extract_version
- run: |
- version=$(cargo pkgid -p iggy-mcp | cut -d# -f2 | cut -d: -f2)
- echo "mcp_version=$version" >> "$GITHUB_OUTPUT"
- echo "::notice ::Version from Cargo.toml $version"
-
- - name: Check if version from Cargo.toml is the same as the tag
- id: check_git_tag
- run: |
- if [[ "mcp-${{ steps.extract_version.outputs.mcp_version }}" == "${{ steps.extract_tag.outputs.tag_name }}" ]];
- then
- echo "::notice ::Tag ${{ steps.extract_tag.outputs.tag_name }} matches the version in Cargo.toml"
- echo "tag_matches=true" >> "$GITHUB_OUTPUT"
- else
- echo "::warning ::Tag ${{ steps.extract_tag.outputs.tag_name }} does not match the version from Cargo.toml"
- echo "tag_matches=false" >> "$GITHUB_OUTPUT"
- fi
-
- outputs:
- mcp_version: ${{ steps.extract_version.outputs.mcp_version }}
- tag_created: ${{ steps.check_git_tag.outputs.tag_matches }}
-
- release_and_publish:
- name: build release ${{ matrix.platform.os_name }}
- needs: tag
- runs-on: ${{ matrix.platform.os }}
- strategy:
- matrix:
- platform:
- - os_name: Linux-x86_64-musl
- os: ubuntu-latest
- target: x86_64-unknown-linux-musl
- bin:
- - iggy-mcp
- name: iggy-mcp-Linux-x86_64-musl.tar.gz
- cargo_command: cargo
- docker_arch: linux/amd64
- cross: false
-
- - os_name: Linux-aarch64-musl
- os: ubuntu-latest
- target: aarch64-unknown-linux-musl
- bin:
- - iggy-mcp
- name: iggy-mcp-Linux-aarch64-musl.tar.gz
- docker_arch: linux/arm64/v8
- cross: true
-
- toolchain:
- - stable
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "publish-mcp-${{ matrix.platform.target }}"
-
- - name: Install musl-tools on Linux
- run: sudo apt-get update --yes && sudo apt-get install --yes musl-tools
- if: contains(matrix.platform.name, 'musl')
-
- - name: Prepare ${{ matrix.platform.target }} toolchain
- run: |
- rustup target add ${{ matrix.platform.target }}
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: Install cross
- uses: taiki-e/install-action@v2
- with:
- tool: cross
-
- - name: Build iggy-mcp ${{ matrix.platform.target }} release binary
- run: cross +stable build --verbose --release --target ${{ matrix.platform.target }} --bin iggy-mcp
-
- - name: Build iggy-cli ${{ matrix.platform.target }} release binary
- run: cross +stable build --verbose --release --no-default-features --target ${{ matrix.platform.target }} --bin iggy
-
- - name: Prepare artifacts
- run: |
- mkdir -p release_artifacts/
- cp target/${{ matrix.platform.target }}/release/iggy-mcp release_artifacts/
- cp target/${{ matrix.platform.target }}/release/iggy release_artifacts/
-
- - name: Print message
- run: echo "::notice ::Published ${{ needs.tag.outputs.mcp }} release artifacts on GitHub"
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
- with:
- platforms: "arm64,arm"
- if: ${{ matrix.platform.cross }}
-
- - name: Set up Docker
- uses: docker/setup-docker-action@v4
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
-
- - name: Login to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USER }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Docker meta
- id: meta
- uses: docker/metadata-action@v5
- with:
- images: ${{ env.DOCKERHUB_REGISTRY_NAME }}
-
- - name: Build and push by digest
- id: build
- uses: docker/build-push-action@v6
- with:
- context: ./
- file: ./Dockerfile.mcp.ci
- platforms: ${{ matrix.platform.docker_arch }}
- labels: ${{ steps.meta.outputs.labels }}
- outputs: type=image,name=${{ env.DOCKERHUB_REGISTRY_NAME }},push-by-digest=true,name-canonical=true,push=true
- build-args: |
- IGGY_MCP_PATH=target/${{ matrix.platform.target }}/release/iggy-mcp
-
- - name: Export digest
- run: |
- mkdir -p /tmp/digests
- digest="${{ steps.build.outputs.digest }}"
- touch "/tmp/digests/${digest#sha256:}"
-
- - name: Upload digest
- uses: actions/upload-artifact@v4
- with:
- name: digests-${{ matrix.platform.os_name }}
- path: /tmp/digests/*
- if-no-files-found: error
- retention-days: 1
-
- merge_docker_manifest:
- runs-on: ubuntu-latest
- needs:
- - release_and_publish
- - tag
- steps:
- - name: Download digests
- uses: actions/download-artifact@v4
- with:
- pattern: "digests-*"
- merge-multiple: true
- path: /tmp/digests
-
- - name: Login to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USER }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
-
- - name: Docker meta (tag was created)
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- uses: docker/metadata-action@v5
- with:
- images: ${{ env.DOCKERHUB_REGISTRY_NAME }}
- tags: |
- ${{ needs.tag.outputs.mcp_version }}
- latest
-
- - name: Docker meta (tag was not created)
- if: ${{ needs.tag.outputs.tag_created == '' }}
- uses: docker/metadata-action@v5
- with:
- images: ${{ env.DOCKERHUB_REGISTRY_NAME }}
- tags: |
- edge
-
- - name: Create manifest list and push
- working-directory: /tmp/digests
- run: |
- docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
- $(printf '${{ env.DOCKERHUB_REGISTRY_NAME }}@sha256:%s ' *)
-
- - name: Inspect image
- run: |
- docker buildx imagetools inspect ${{ env.DOCKERHUB_REGISTRY_NAME }}:latest
-
- github_release:
- uses: ./.github/workflows/release_mcp.yml
- needs: tag
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- with:
- tag_name: "mcp-${{ needs.tag.outputs.mcp_version }}"
-
- finalize_publish_mcp:
- runs-on: ubuntu-latest
- needs:
- - release_and_publish
- - merge_docker_manifest
- - github_release
- if: always()
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
-
- - name: Something went wrong
- if: ${{ contains(needs.*.result, 'failure') && github.event_name != 'workflow_dispatch' }}
- uses: JasonEtco/create-an-issue@v2
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- GITHUB_BOT_CONTEXT_STRING: "build and release to dockerhub"
- with:
- filename: .github/BOT_ISSUE_TEMPLATE.md
diff --git a/.github/workflows/publish_node_sdk.yml b/.github/workflows/publish_node_sdk.yml
deleted file mode 100644
index 5a9171395..000000000
--- a/.github/workflows/publish_node_sdk.yml
+++ /dev/null
@@ -1,129 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: publish_node_sdk
-on:
- push:
- tags:
- - 'node-sdk-*'
-
-env:
- GITHUB_TOKEN: ${{ github.token }}
-
-jobs:
- validate:
- if: startsWith(github.ref, 'refs/tags/node-sdk-')
- runs-on: ubuntu-latest
- steps:
- - name: Extract tag name
- id: extract
- run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
-
- - name: Validate tag format
- run: |
- TAG=${TAG}
- if [[ ! "$TAG" =~ ^node-sdk-([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$ ]]; then
- echo "Tag $TAG does not match strict semver format (node-sdk-X.Y.Z where 0 <= X,Y,Z <= 999)"
- exit 1
- fi
- echo "Valid tag: $TAG"
-
- tag:
- needs: validate
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Extract tag name
- id: extract_tag
- run: |
- tag=${GITHUB_REF#refs/tags/}
- echo "tag_name=$tag" >> "$GITHUB_OUTPUT"
- echo "::notice ::Tag that triggered the workflow: $tag"
-
- - name: Extract node sdk version from package.json
- id: extract_version
- run: |
- version=`grep -n '"version": ' package.json | sed -E 's/^.* "([^"]*)",/\1/' | tr -d '\n'`
- echo "node_sdk_version=$version" >> "$GITHUB_OUTPUT"
- echo "::notice ::Version node sdk package.json version is $version"
-
- - name: Check if version from package.json is the same as the tag
- id: check_git_tag
- run: |
- if [[ "node-sdk-${{ steps.extract_version.outputs.node_sdk_version }}" == "${{ steps.extract_tag.outputs.tag_name }}" ]];
- then
- echo "::notice ::Tag ${{ steps.extract_tag.outputs.tag_name }} matches the version in package.json"
- echo "tag_matches=true" >> "$GITHUB_OUTPUT"
- else
- echo "::warning ::Tag ${{ steps.extract_tag.outputs.tag_name }} does not matche the version from package.json"
- echo "tag_matches=false" >> "$GITHUB_OUTPUT"
- fi
-
- outputs:
- node_sdk_version: ${{ steps.extract_tag.outputs.tag_name }}
- tag_created: ${{ steps.check_git_tag.outputs.tag_matches }}
-
- publish:
- name: Publish SDK on npmjs
- needs: tag
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
- # Setup .npmrc file to publish to npm
- - uses: actions/setup-node@v4
- with:
- cache-dependency-path: foreign/node/package-lock.json
- node-version: 22
- cache: "npm"
- registry-url: 'https://registry.npmjs.org'
- - run: npm ci
- - run: npm run build
- - run: npm publish --provenance --access public
- env:
- NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
-
- # github_release:
- # uses: ./.github/workflows/release_sdk.yml
- # needs: tag
- # if: ${{ needs.tag.outputs.tag_created == 'true' }}
- # with:
- # tag_name: "${{ needs.tag.outputs.node_sdk_version }}"
-
- # finalize_sdk:
- # runs-on: ubuntu-latest
- # needs:
- # - publish
- # - github_release
- # if: always()
- # steps:
- # - uses: actions/checkout@v4
- # - name: Everything is fine
- # if: ${{ !(contains(needs.*.result, 'failure')) }}
- # run: exit 0
- # - name: Something went wrong
- # if: ${{ contains(needs.*.result, 'failure') }}
- # uses: JasonEtco/create-an-issue@v2
- # env:
- # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- # GITHUB_BOT_CONTEXT_STRING: "publish to crates.io"
- # with:
- # filename: .github/BOT_ISSUE_TEMPLATE.md
diff --git a/.github/workflows/publish_rust_sdk.yml b/.github/workflows/publish_rust_sdk.yml
deleted file mode 100644
index 28efbccd7..000000000
--- a/.github/workflows/publish_rust_sdk.yml
+++ /dev/null
@@ -1,126 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: publish_rust_sdk
-on:
- push:
- tags:
- - "iggy-*"
-
-env:
- GITHUB_TOKEN: ${{ github.token }}
- CARGO_TERM_COLOR: always
-
-jobs:
- validate:
- if: startsWith(github.ref, 'refs/tags/iggy-')
- runs-on: ubuntu-latest
- steps:
- - name: Extract tag name
- id: extract
- run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
-
- - name: Validate tag format
- run: |
- TAG=${TAG}
- if [[ ! "$TAG" =~ ^iggy-([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$ ]]; then
- echo "Tag $TAG does not match strict semver format (iggy-X.Y.Z where 0 <= X,Y,Z <= 999)"
- exit 1
- fi
- echo "Valid tag: $TAG"
-
- tag:
- needs: validate
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Extract tag name
- id: extract_tag
- run: |
- tag=${GITHUB_REF#refs/tags/}
- echo "tag_name=$tag" >> "$GITHUB_OUTPUT"
- echo "::notice ::Tag that triggered the workflow: $tag"
-
- - name: Extract iggy version from Cargo.toml
- id: extract_version
- run: |
- version=$(cargo pkgid -p iggy | cut -d@ -f2)
- echo "iggy_version=$version" >> "$GITHUB_OUTPUT"
- echo "::notice ::Version from Cargo.toml $version"
-
- - name: Check if version from Cargo.toml is the same as the tag
- id: check_git_tag
- run: |
- if [[ "iggy-${{ steps.extract_version.outputs.iggy_version }}" == "${{ steps.extract_tag.outputs.tag_name }}" ]];
- then
- echo "::notice ::Tag ${{ steps.extract_tag.outputs.tag_name }} matches the version in Cargo.toml"
- echo "tag_matches=true" >> "$GITHUB_OUTPUT"
- else
- echo "::warning ::Tag ${{ steps.extract_tag.outputs.tag_name }} does not matche the version from Cargo.toml"
- echo "tag_matches=false" >> "$GITHUB_OUTPUT"
- fi
-
- outputs:
- iggy_version: ${{ steps.extract_tag.outputs.tag_name }}
- tag_created: ${{ steps.check_git_tag.outputs.tag_matches }}
-
- publish:
- name: Publish SDK on crates.io
- needs: tag
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
-
- - run: |
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: publish
- run: |
- cargo login "${{ secrets.CARGO_REGISTRY_TOKEN }}"
- cargo publish -p iggy
-
- github_release:
- uses: ./.github/workflows/release_sdk.yml
- needs: tag
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- with:
- tag_name: "${{ needs.tag.outputs.iggy_version }}"
-
- finalize_sdk:
- runs-on: ubuntu-latest
- needs:
- - publish
- - github_release
- if: always()
- steps:
- - uses: actions/checkout@v4
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
- - name: Something went wrong
- if: ${{ contains(needs.*.result, 'failure') }}
- uses: JasonEtco/create-an-issue@v2
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- GITHUB_BOT_CONTEXT_STRING: "publish to crates.io"
- with:
- filename: .github/BOT_ISSUE_TEMPLATE.md
diff --git a/.github/workflows/publish_server.yml b/.github/workflows/publish_server.yml
deleted file mode 100644
index 0cf20b479..000000000
--- a/.github/workflows/publish_server.yml
+++ /dev/null
@@ -1,288 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: publish_server
-
-on:
- push:
- tags:
- - "server-*"
-
-env:
- DOCKERHUB_REGISTRY_NAME: apache/iggy
- CRATE_NAME: iggy
- GITHUB_TOKEN: ${{ github.token }}
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
-
-jobs:
- validate:
- if: startsWith(github.ref, 'refs/tags/server-')
- runs-on: ubuntu-latest
- steps:
- - name: Extract tag name
- id: extract
- run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
-
- - name: Validate tag format
- run: |
- TAG=${TAG}
- if [[ ! "$TAG" =~ ^server-([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$ ]]; then
- echo "Tag $TAG does not match strict semver format (server-X.Y.Z where 0 <= X,Y,Z <= 999)"
- exit 1
- fi
- echo "Valid tag: $TAG"
-
- tag:
- needs: validate
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Extract tag name
- id: extract_tag
- run: |
- tag=${GITHUB_REF#refs/tags/}
- echo "tag_name=$tag" >> "$GITHUB_OUTPUT"
- echo "::notice ::Tag that triggered the workflow: $tag"
-
- - name: Extract iggy-server version from Cargo.toml
- id: extract_version
- run: |
- version=$(cargo pkgid -p server | cut -d# -f2 | cut -d: -f2)
- echo "server_version=$version" >> "$GITHUB_OUTPUT"
- echo "::notice ::Version from Cargo.toml $version"
-
- - name: Check if version from Cargo.toml is the same as the tag
- id: check_git_tag
- run: |
- if [[ "server-${{ steps.extract_version.outputs.server_version }}" == "${{ steps.extract_tag.outputs.tag_name }}" ]];
- then
- echo "::notice ::Tag ${{ steps.extract_tag.outputs.tag_name }} matches the version in Cargo.toml"
- echo "tag_matches=true" >> "$GITHUB_OUTPUT"
- else
- echo "::warning ::Tag ${{ steps.extract_tag.outputs.tag_name }} does not match the version from Cargo.toml"
- echo "tag_matches=false" >> "$GITHUB_OUTPUT"
- fi
-
- outputs:
- server_version: ${{ steps.extract_version.outputs.server_version }}
- tag_created: ${{ steps.check_git_tag.outputs.tag_matches }}
-
- release_and_publish:
- name: build release ${{ matrix.platform.os_name }}
- needs: tag
- runs-on: ${{ matrix.platform.os }}
- strategy:
- matrix:
- platform:
- - os_name: Linux-x86_64-musl
- os: ubuntu-latest
- target: x86_64-unknown-linux-musl
- bin:
- - iggy-server
- - iggy
- name: iggy-Linux-x86_64-musl.tar.gz
- cargo_command: cargo
- docker_arch: linux/amd64
- cross: false
-
- - os_name: Linux-aarch64-musl
- os: ubuntu-latest
- target: aarch64-unknown-linux-musl
- bin:
- - iggy-server
- - iggy
- name: iggy-Linux-aarch64-musl.tar.gz
- docker_arch: linux/arm64/v8
- cross: true
-
- toolchain:
- - stable
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "publish-server-${{ matrix.platform.target }}"
-
- - name: Install musl-tools on Linux
- run: sudo apt-get update --yes && sudo apt-get install --yes musl-tools
- if: contains(matrix.platform.name, 'musl')
-
- - name: Prepare ${{ matrix.platform.target }} toolchain
- run: |
- rustup target add ${{ matrix.platform.target }}
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: Install cross
- uses: taiki-e/install-action@v2
- with:
- tool: cross
-
- - name: Build iggy-server ${{ matrix.platform.target }} release binary
- run: cross +stable build --verbose --release --target ${{ matrix.platform.target }} --bin iggy-server
-
- - name: Build iggy-cli ${{ matrix.platform.target }} release binary
- run: cross +stable build --verbose --release --no-default-features --target ${{ matrix.platform.target }} --bin iggy
-
- - name: Prepare artifacts
- run: |
- mkdir -p release_artifacts/
- cp target/${{ matrix.platform.target }}/release/iggy-server release_artifacts/
- cp target/${{ matrix.platform.target }}/release/iggy release_artifacts/
-
- - name: Print message
- run: echo "::notice ::Published ${{ needs.tag.outputs.server_version }} release artifacts on GitHub"
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
- with:
- platforms: "arm64,arm"
- if: ${{ matrix.platform.cross }}
-
- - name: Set up Docker
- uses: docker/setup-docker-action@v4
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
-
- - name: Login to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USER }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Docker meta
- id: meta
- uses: docker/metadata-action@v5
- with:
- images: ${{ env.DOCKERHUB_REGISTRY_NAME }}
-
- - name: Build and push by digest
- id: build
- uses: docker/build-push-action@v6
- with:
- context: ./
- file: ./Dockerfile.ci
- platforms: ${{ matrix.platform.docker_arch }}
- labels: ${{ steps.meta.outputs.labels }}
- outputs: type=image,name=${{ env.DOCKERHUB_REGISTRY_NAME }},push-by-digest=true,name-canonical=true,push=true
- build-args: |
- IGGY_CMD_PATH=target/${{ matrix.platform.target }}/release/iggy
- IGGY_SERVER_PATH=target/${{ matrix.platform.target }}/release/iggy-server
-
- - name: Export digest
- run: |
- mkdir -p /tmp/digests
- digest="${{ steps.build.outputs.digest }}"
- touch "/tmp/digests/${digest#sha256:}"
-
- - name: Upload digest
- uses: actions/upload-artifact@v4
- with:
- name: digests-${{ matrix.platform.os_name }}
- path: /tmp/digests/*
- if-no-files-found: error
- retention-days: 1
-
- merge_docker_manifest:
- runs-on: ubuntu-latest
- needs:
- - release_and_publish
- - tag
- steps:
- - name: Download digests
- uses: actions/download-artifact@v4
- with:
- pattern: "digests-*"
- merge-multiple: true
- path: /tmp/digests
-
- - name: Login to Docker Hub
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKERHUB_USER }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
-
- - name: Docker meta (tag was created)
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- uses: docker/metadata-action@v5
- with:
- images: ${{ env.DOCKERHUB_REGISTRY_NAME }}
- tags: |
- ${{ needs.tag.outputs.server_version }}
- latest
-
- - name: Docker meta (tag was not created)
- if: ${{ needs.tag.outputs.tag_created == '' }}
- uses: docker/metadata-action@v5
- with:
- images: ${{ env.DOCKERHUB_REGISTRY_NAME }}
- tags: |
- edge
-
- - name: Create manifest list and push
- working-directory: /tmp/digests
- run: |
- docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
- $(printf '${{ env.DOCKERHUB_REGISTRY_NAME }}@sha256:%s ' *)
-
- - name: Inspect image
- run: |
- docker buildx imagetools inspect ${{ env.DOCKERHUB_REGISTRY_NAME }}:latest
-
- github_release:
- uses: ./.github/workflows/release_server.yml
- needs: tag
- if: ${{ needs.tag.outputs.tag_created == 'true' }}
- with:
- tag_name: "server-${{ needs.tag.outputs.server_version }}"
-
- finalize_publish_server:
- runs-on: ubuntu-latest
- needs:
- - release_and_publish
- - merge_docker_manifest
- - github_release
- if: always()
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
-
- - name: Something went wrong
- if: ${{ contains(needs.*.result, 'failure') && github.event_name != 'workflow_dispatch' }}
- uses: JasonEtco/create-an-issue@v2
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- GITHUB_BOT_CONTEXT_STRING: "build and release to dockerhub"
- with:
- filename: .github/BOT_ISSUE_TEMPLATE.md
diff --git a/.github/workflows/release_bench.yml b/.github/workflows/release_bench.yml
deleted file mode 100644
index c20bf2ad2..000000000
--- a/.github/workflows/release_bench.yml
+++ /dev/null
@@ -1,120 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: release_bench
-
-on:
- workflow_call:
- inputs:
- tag_name:
- description: "The name of the tag to be released"
- required: true
- type: string
-
-env:
- GITHUB_TOKEN: ${{ github.token }}
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
-
-jobs:
- build_bench:
- runs-on: ${{ matrix.platform.os }}
- strategy:
- matrix:
- platform:
- - target: x86_64-unknown-linux-musl
- os: ubuntu-latest
- executable: iggy-bench
- file: iggy-bench-x86_64-unknown-linux-musl.tgz
- - target: aarch64-unknown-linux-musl
- os: ubuntu-latest
- executable: iggy-bench
- file: iggy-bench-aarch64-unknown-linux-musl.tgz
- - target: x86_64-unknown-linux-gnu
- os: ubuntu-latest
- executable: iggy-bench
- file: iggy-bench-x86_64-unknown-linux-gnu.tgz
- - target: x86_64-pc-windows-msvc
- os: windows-latest
- executable: iggy-bench.exe
- file: iggy-bench-x86_64-pc-windows-msvc.zip
- - target: aarch64-pc-windows-msvc
- os: windows-latest
- executable: iggy-bench.exe
- file: iggy-bench-aarch64-pc-windows-msvc.zip
- - target: x86_64-apple-darwin
- os: macos-latest
- executable: iggy-bench
- file: iggy-bench-x86_64-apple-darwin.zip
- - target: aarch64-apple-darwin
- os: macos-latest
- executable: iggy-bench
- file: iggy-bench-aarch64-apple-darwin.zip
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "release-bench-${{ matrix.platform.target }}"
-
- - name: Install musl-tools on Linux
- run: sudo apt-get update --yes && sudo apt-get install --yes musl-tools
- if: ${{ matrix.platform.target == 'x86_64-unknown-linux-musl' }}
-
- - name: Prepare ${{ matrix.platform.target }} toolchain
- run: |
- rustup target add ${{ matrix.platform.target }}
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: Install cross
- uses: taiki-e/install-action@v2
- with:
- tool: cross
-
- - name: Build ${{ matrix.platform.target }} release binary
- run: cross +stable build --verbose --release --target ${{ matrix.platform.target }} -p iggy-bench
-
- - name: Collect ${{ matrix.platform.target }} executable
- run: |
- cp target/${{ matrix.platform.target }}/release/${{ matrix.platform.executable }} .
-
- - name: Create ${{ matrix.platform.file }} artifact
- run: |
- tar cvfz ${{ matrix.platform.file }} ${{ matrix.platform.executable }}
- if: ${{ matrix.platform.os == 'ubuntu-latest' }}
-
- - name: Create ${{ matrix.platform.file }} artifact
- uses: vimtor/action-zip@v1
- with:
- files: ${{ matrix.platform.executable }}
- dest: ${{ matrix.platform.file }}
- if: ${{ matrix.platform.os == 'windows-latest' || matrix.platform.os == 'macos-latest' }}
-
- - name: Upload ${{ matrix.platform.file }} artifact
- uses: actions/upload-artifact@v4
- with:
- name: artifacts-${{ matrix.platform.target }}
- path: ${{ matrix.platform.file }}
-
- - name: Print message
- run: echo "::notice ::Created binary for ${{ matrix.platform.target }}"
-
- outputs:
- version: ${{ needs.tag.outputs.version }}
diff --git a/.github/workflows/release_cli.yml b/.github/workflows/release_cli.yml
deleted file mode 100644
index 4c1a74894..000000000
--- a/.github/workflows/release_cli.yml
+++ /dev/null
@@ -1,161 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: release_cli
-
-on:
- workflow_call:
- inputs:
- tag_name:
- description: "The name of the tag to be released"
- required: true
- type: string
-
-env:
- GITHUB_TOKEN: ${{ github.token }}
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
-
-jobs:
- build_cli:
- runs-on: ${{ matrix.platform.os }}
- strategy:
- matrix:
- platform:
- - target: x86_64-unknown-linux-musl
- os: ubuntu-latest
- executable: iggy
- file: iggy-cli-x86_64-unknown-linux-musl.tgz
- - target: aarch64-unknown-linux-musl
- os: ubuntu-latest
- executable: iggy
- file: iggy-cli-aarch64-unknown-linux-musl.tgz
- - target: x86_64-unknown-linux-gnu
- os: ubuntu-latest
- executable: iggy
- file: iggy-cli-x86_64-unknown-linux-gnu.tgz
- - target: x86_64-pc-windows-msvc
- os: windows-latest
- executable: iggy.exe
- file: iggy-cli-x86_64-pc-windows-msvc.zip
- - target: aarch64-pc-windows-msvc
- os: windows-latest
- executable: iggy.exe
- file: iggy-cli-aarch64-pc-windows-msvc.zip
- - target: x86_64-apple-darwin
- os: macos-latest
- executable: iggy
- file: iggy-cli-x86_64-apple-darwin.zip
- - target: aarch64-apple-darwin
- os: macos-latest
- executable: iggy
- file: iggy-cli-aarch64-apple-darwin.zip
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "release-cli-${{ matrix.platform.target }}"
-
- - name: Install musl-tools on Linux
- run: sudo apt-get update --yes && sudo apt-get install --yes musl-tools
- if: ${{ matrix.platform.target == 'x86_64-unknown-linux-musl' }}
-
- - name: Prepare ${{ matrix.platform.target }} toolchain
- run: |
- rustup target add ${{ matrix.platform.target }}
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: Install cross
- uses: taiki-e/install-action@v2
- with:
- tool: cross
-
- - name: Build ${{ matrix.platform.target }} release binary
- run: cross +stable build --verbose --release --target ${{ matrix.platform.target }} -p iggy
-
- - name: Collect ${{ matrix.platform.target }} executable
- run: |
- cp target/${{ matrix.platform.target }}/release/${{ matrix.platform.executable }} .
-
- - name: Create ${{ matrix.platform.file }} artifact
- run: |
- tar cvfz ${{ matrix.platform.file }} ${{ matrix.platform.executable }}
- if: ${{ matrix.platform.os == 'ubuntu-latest' }}
-
- - name: Create ${{ matrix.platform.file }} artifact
- uses: vimtor/action-zip@v1
- with:
- files: ${{ matrix.platform.executable }}
- dest: ${{ matrix.platform.file }}
- if: ${{ matrix.platform.os == 'windows-latest' || matrix.platform.os == 'macos-latest' }}
-
- - name: Upload ${{ matrix.platform.file }} artifact
- uses: actions/upload-artifact@v4
- with:
- name: artifacts-${{ matrix.platform.target }}
- path: ${{ matrix.platform.file }}
-
- - name: Print message
- run: echo "::notice ::Created binary for ${{ matrix.platform.target }}"
-
- outputs:
- version: ${{ needs.tag.outputs.version }}
-
- release_cli:
- name: Create iggy-cli release
- runs-on: ubuntu-latest
- needs: build_cli
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Download all artifacts
- uses: actions/download-artifact@v4
-
- - name: List files
- run: find
-
- # Action disallowed
- # - name: Create Changelog
- # uses: orhun/git-cliff-action@v4
- # id: changelog
- # with:
- # config: cliff.toml
- # args: -vv --latest
- # env:
- # OUTPUT: CHANGELOG.md
- # GITHUB_REPO: ${{ github.repository }}
-
- - name: Create GitHub Release
- uses: softprops/action-gh-release@v2
- with:
- body: ${{ steps.changelog.outputs.content }}
- files: |
- artifacts-*/*.tgz
- artifacts-*/*.zip
- CHANGELOG.md
- tag_name: ${{ inputs.tag_name }}
- draft: false
- prerelease: false
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/release_connectors.yml b/.github/workflows/release_connectors.yml
deleted file mode 100644
index 7e7f01c89..000000000
--- a/.github/workflows/release_connectors.yml
+++ /dev/null
@@ -1,120 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: release_connectors
-
-on:
- workflow_call:
- inputs:
- tag_name:
- description: "The name of the tag to be released"
- required: true
- type: string
-
-env:
- GITHUB_TOKEN: ${{ github.token }}
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
-
-jobs:
- build_connectors:
- runs-on: ${{ matrix.platform.os }}
- strategy:
- matrix:
- platform:
- - target: x86_64-unknown-linux-musl
- os: ubuntu-latest
- executable: iggy-connectors
- file: iggy-connectors-x86_64-unknown-linux-musl.tgz
- - target: aarch64-unknown-linux-musl
- os: ubuntu-latest
- executable: iggy-connectors
- file: iggy-connectors-aarch64-unknown-linux-musl.tgz
- - target: x86_64-unknown-linux-gnu
- os: ubuntu-latest
- executable: iggy-connectors
- file: iggy-connectors-x86_64-unknown-linux-gnu.tgz
- - target: x86_64-pc-windows-msvc
- os: windows-latest
- executable: iggy-connectors.exe
- file: iggy-connectors-x86_64-pc-windows-msvc.zip
- - target: aarch64-pc-windows-msvc
- os: windows-latest
- executable: iggy-connectors.exe
- file: iggy-connectors-aarch64-pc-windows-msvc.zip
- - target: x86_64-apple-darwin
- os: macos-latest
- executable: iggy-connectors
- file: iggy-connectors-x86_64-apple-darwin.zip
- - target: aarch64-apple-darwin
- os: macos-latest
- executable: iggy-connectors
- file: iggy-connectors-aarch64-apple-darwin.zip
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "release-connectors-${{ matrix.platform.target }}"
-
- - name: Install musl-tools on Linux
- run: sudo apt-get update --yes && sudo apt-get install --yes musl-tools
- if: ${{ matrix.platform.target == 'x86_64-unknown-linux-musl' }}
-
- - name: Prepare ${{ matrix.platform.target }} toolchain
- run: |
- rustup target add ${{ matrix.platform.target }}
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: Install cross
- uses: taiki-e/install-action@v2
- with:
- tool: cross
-
- - name: Build ${{ matrix.platform.target }} release binary
- run: cross +stable build --verbose --release --target ${{ matrix.platform.target }} -p iggy-connectors
-
- - name: Collect ${{ matrix.platform.target }} executable
- run: |
- cp target/${{ matrix.platform.target }}/release/${{ matrix.platform.executable }} .
-
- - name: Create ${{ matrix.platform.file }} artifact
- run: |
- tar cvfz ${{ matrix.platform.file }} ${{ matrix.platform.executable }}
- if: ${{ matrix.platform.os == 'ubuntu-latest' }}
-
- - name: Create ${{ matrix.platform.file }} artifact
- uses: vimtor/action-zip@v1
- with:
- files: ${{ matrix.platform.executable }}
- dest: ${{ matrix.platform.file }}
- if: ${{ matrix.platform.os == 'windows-latest' || matrix.platform.os == 'macos-latest' }}
-
- - name: Upload ${{ matrix.platform.file }} artifact
- uses: actions/upload-artifact@v4
- with:
- name: artifacts-${{ matrix.platform.target }}
- path: ${{ matrix.platform.file }}
-
- - name: Print message
- run: echo "::notice ::Created binary for ${{ matrix.platform.target }}"
-
- outputs:
- version: ${{ needs.tag.outputs.version }}
diff --git a/.github/workflows/release_mcp.yml b/.github/workflows/release_mcp.yml
deleted file mode 100644
index a1442ae1c..000000000
--- a/.github/workflows/release_mcp.yml
+++ /dev/null
@@ -1,120 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: release_mcp
-
-on:
- workflow_call:
- inputs:
- tag_name:
- description: "The name of the tag to be released"
- required: true
- type: string
-
-env:
- GITHUB_TOKEN: ${{ github.token }}
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
-
-jobs:
- build_mcp:
- runs-on: ${{ matrix.platform.os }}
- strategy:
- matrix:
- platform:
- - target: x86_64-unknown-linux-musl
- os: ubuntu-latest
- executable: iggy-mcp
- file: iggy-mcp-x86_64-unknown-linux-musl.tgz
- - target: aarch64-unknown-linux-musl
- os: ubuntu-latest
- executable: iggy-mcp
- file: iggy-mcp-aarch64-unknown-linux-musl.tgz
- - target: x86_64-unknown-linux-gnu
- os: ubuntu-latest
- executable: iggy-mcp
- file: iggy-mcp-x86_64-unknown-linux-gnu.tgz
- - target: x86_64-pc-windows-msvc
- os: windows-latest
- executable: iggy-mcp.exe
- file: iggy-mcp-x86_64-pc-windows-msvc.zip
- - target: aarch64-pc-windows-msvc
- os: windows-latest
- executable: iggy-mcp.exe
- file: iggy-mcp-aarch64-pc-windows-msvc.zip
- - target: x86_64-apple-darwin
- os: macos-latest
- executable: iggy-mcp
- file: iggy-mcp-x86_64-apple-darwin.zip
- - target: aarch64-apple-darwin
- os: macos-latest
- executable: iggy-mcp
- file: iggy-mcp-aarch64-apple-darwin.zip
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "release-mcp-${{ matrix.platform.target }}"
-
- - name: Install musl-tools on Linux
- run: sudo apt-get update --yes && sudo apt-get install --yes musl-tools
- if: ${{ matrix.platform.target == 'x86_64-unknown-linux-musl' }}
-
- - name: Prepare ${{ matrix.platform.target }} toolchain
- run: |
- rustup target add ${{ matrix.platform.target }}
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: Install cross
- uses: taiki-e/install-action@v2
- with:
- tool: cross
-
- - name: Build ${{ matrix.platform.target }} release binary
- run: cross +stable build --verbose --release --target ${{ matrix.platform.target }} -p iggy-mcp
-
- - name: Collect ${{ matrix.platform.target }} executable
- run: |
- cp target/${{ matrix.platform.target }}/release/${{ matrix.platform.executable }} .
-
- - name: Create ${{ matrix.platform.file }} artifact
- run: |
- tar cvfz ${{ matrix.platform.file }} ${{ matrix.platform.executable }}
- if: ${{ matrix.platform.os == 'ubuntu-latest' }}
-
- - name: Create ${{ matrix.platform.file }} artifact
- uses: vimtor/action-zip@v1
- with:
- files: ${{ matrix.platform.executable }}
- dest: ${{ matrix.platform.file }}
- if: ${{ matrix.platform.os == 'windows-latest' || matrix.platform.os == 'macos-latest' }}
-
- - name: Upload ${{ matrix.platform.file }} artifact
- uses: actions/upload-artifact@v4
- with:
- name: artifacts-${{ matrix.platform.target }}
- path: ${{ matrix.platform.file }}
-
- - name: Print message
- run: echo "::notice ::Created binary for ${{ matrix.platform.target }}"
-
- outputs:
- version: ${{ needs.tag.outputs.version }}
diff --git a/.github/workflows/release_sdk.yml b/.github/workflows/release_sdk.yml
deleted file mode 100644
index cd2fed6a8..000000000
--- a/.github/workflows/release_sdk.yml
+++ /dev/null
@@ -1,77 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: release_sdk
-
-on:
- workflow_call:
- inputs:
- tag_name:
- description: "The name of the tag to be released"
- required: true
- type: string
-
-env:
- GITHUB_TOKEN: ${{ github.token }}
-
-jobs:
- release_sdk:
- name: Release SDK
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- # Action disallowed
- # - name: Create Changelog
- # uses: orhun/git-cliff-action@v4
- # id: changelog
- # with:
- # config: cliff.toml
- # args: -vv --latest
- # env:
- # OUTPUT: CHANGELOG.md
- # GITHUB_REPO: ${{ github.repository }}
-
- - name: Create GitHub Release
- uses: softprops/action-gh-release@v2
- with:
- body: ${{ steps.changelog.outputs.content }}
- files: |
- CHANGELOG.md
- tag_name: ${{ inputs.tag_name }}
- draft: false
- prerelease: false
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
- finalize_release:
- name: Finalize release
- runs-on: ubuntu-latest
- needs:
- - release_sdk
- if: always()
- steps:
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
-
- - name: Some checks failed
- if: ${{ contains(needs.*.result, 'failure') }}
- run: exit 1
diff --git a/.github/workflows/release_server.yml b/.github/workflows/release_server.yml
deleted file mode 100644
index 0b6aff292..000000000
--- a/.github/workflows/release_server.yml
+++ /dev/null
@@ -1,124 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: release_server
-
-on:
- workflow_call:
- inputs:
- tag_name:
- description: "The name of the tag to be released"
- required: true
- type: string
-
-env:
- GITHUB_TOKEN: ${{ github.token }}
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
-
-jobs:
- release_server:
- name: Build and release server binary
- runs-on: ubuntu-latest
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Install musl-tools on Linux
- run: sudo apt-get update --yes && sudo apt-get install --yes musl-tools
-
- - name: Prepare x86_64-unknown-linux-musl toolchain
- run: |
- rustup target add x86_64-unknown-linux-musl
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: Install cross
- uses: taiki-e/install-action@v2
- with:
- tool: cross
-
- - name: Build iggy-server release binary for x86_64-unknown-linux-musl
- run: cross +stable build --verbose --target x86_64-unknown-linux-musl --release --bin iggy-server --target-dir target_x86
-
- - name: Prepare x86_64-unknown-linux-musl artifacts
- run: |
- mkdir -p all_artifacts/Linux-x86_64
- cp target_x86/x86_64-unknown-linux-musl/release/iggy-server all_artifacts/Linux-x86_64/
-
- - name: Prepare aarch64-unknown-linux-musl toolchain
- run: |
- rustup target add aarch64-unknown-linux-musl
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: Build iggy-server release binary for aarch64-unknown-linux-musl
- run: cross +stable build --verbose --target aarch64-unknown-linux-musl --release --bin iggy-server --target-dir target_aarch64
-
- - name: Prepare aarch64-unknown-linux-musl artifacts
- run: |
- mkdir -p all_artifacts/Linux-aarch64
- cp target_aarch64/aarch64-unknown-linux-musl/release/iggy-server all_artifacts/Linux-aarch64/
-
- - name: Zip artifacts for each platform
- run: |
- mkdir zipped_artifacts
- for dir in all_artifacts/*; do
- if [ -d "$dir" ]; then
- zip -r "zipped_artifacts/$(basename $dir).zip" "$dir"
- fi
- done
-
- # Action disallowed
- # - name: Create Changelog
- # uses: orhun/git-cliff-action@v4
- # id: changelog
- # with:
- # config: cliff.toml
- # args: -vv --latest
- # env:
- # OUTPUT: CHANGELOG.md
- # GITHUB_REPO: ${{ github.repository }}
-
- - name: Create GitHub Release
- uses: softprops/action-gh-release@v2
- with:
- body: ${{ steps.changelog.outputs.content }}
- files: |
- zipped_artifacts/*
- CHANGELOG.md
- tag_name: ${{ inputs.tag_name }}
- draft: false
- prerelease: false
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
- finalize_release:
- name: Finalize release
- runs-on: ubuntu-latest
- needs:
- - release_server
- if: always()
- steps:
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
-
- - name: Some checks failed
- if: ${{ contains(needs.*.result, 'failure') }}
- run: exit 1
diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml
deleted file mode 100644
index 63ad6f3f3..000000000
--- a/.github/workflows/security.yml
+++ /dev/null
@@ -1,61 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: security
-on:
- workflow_dispatch:
- schedule:
- - cron: '0 8 * * *' # At 8:00 AM UTC, which is 9:00 AM CET
-
-jobs:
- audit:
- name: Run cargo audit
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
- - uses: rustsec/audit-check@v2
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
- clippy:
- name: Run cargo clippy
- runs-on: ubuntu-latest
- permissions:
- contents: read
- security-events: write
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Install Rust toolchain
- run: |
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: Install clippy-sarif sarif-fmt
- run: cargo install clippy-sarif sarif-fmt
-
- - name: Run cargo clippy and convert to SARIF
- run:
- cargo clippy
- --all-features --all-targets
- --message-format=json | clippy-sarif | tee rust-clippy-results.sarif | sarif-fmt
- continue-on-error: true
-
- - name: Upload analysis results to GitHub
- uses: github/codeql-action/upload-sarif@v3
- with:
- sarif_file: rust-clippy-results.sarif
- wait-for-processing: true
diff --git a/.github/workflows/test_daily.yml b/.github/workflows/test_daily.yml
deleted file mode 100644
index 9cc4c72a3..000000000
--- a/.github/workflows/test_daily.yml
+++ /dev/null
@@ -1,125 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: test_daily
-on:
- workflow_dispatch:
- schedule:
- - cron: '0 8 * * *' # At 8:00 AM UTC, which is 9:00 AM CET
-
-env:
- CRATE_NAME: iggy
- GITHUB_TOKEN: ${{ github.token }}
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
- # Option needed for starting docker under cross to be able to lock memory
- # in order to be able to use keyring inside Docker container
- CROSS_CONTAINER_OPTS: "--cap-add ipc_lock"
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.ref }}
- cancel-in-progress: true
-
-jobs:
- build_and_test:
- name: 'build and test ${{ matrix.toolchain }} ${{ matrix.platform.os_name }}'
- runs-on: ${{ matrix.platform.os }}
- timeout-minutes: 120
- strategy:
- fail-fast: false
- matrix:
- platform:
-
- - os_name: Linux-x86_64-musl
- os: ubuntu-latest
- target: x86_64-unknown-linux-musl
- name: iggy-Linux-x86_64-musl.tar.gz
- cargo_command: cargo
- profile: release
- docker_arch: linux/amd64
- cross: false
-
- - os_name: Linux-aarch64-musl
- os: ubuntu-latest
- target: aarch64-unknown-linux-musl
- name: iggy-Linux-aarch64-musl.tar.gz
- docker_arch: linux/arm64/v8
- profile: release
- cross: true
-
- toolchain:
- - stable
- steps:
- - uses: actions/checkout@v4
-
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "test-daily-${{ matrix.platform.target }}"
-
- - name: Configure Git
- run: |
- git config --global user.email "jdoe@example.com"
- git config --global user.name "J. Doe"
-
- - name: Install musl-tools, gnome-keyring and keyutils on Linux
- run: |
- sudo apt-get update --yes && sudo apt-get install --yes musl-tools gnome-keyring keyutils
- rm -f $HOME/.local/share/keyrings/*
- echo -n "test" | gnome-keyring-daemon --unlock
- if: contains(matrix.platform.name, 'musl')
-
- - name: Prepare Cross.toml
- run: |
- scripts/prepare-cross-toml.sh
- cat Cross.toml
- if: ${{ matrix.platform.cross }}
-
- - name: Prepare ${{ matrix.platform.target }} toolchain
- run: |
- rustup target add ${{ matrix.platform.target }}
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: Install cross
- uses: taiki-e/install-action@v2
- with:
- tool: cross
-
- - name: Build binary ${{ matrix.platform.target }}
- run: cross +stable build --features ci-qemu --verbose --target ${{ matrix.platform.target }} ${{ matrix.platform.profile == 'release' && '--release' || '' }}
-
- - name: Run tests ${{ matrix.platform.target }}
- run: cross +stable test --features ci-qemu --verbose --target ${{ matrix.platform.target }} ${{ matrix.platform.profile == 'release' && '--release' || '' }}
-
- finalize_nightly:
- runs-on: ubuntu-latest
- needs: build_and_test
- if: always()
- steps:
- - uses: actions/checkout@v4
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
- - name: Something went wrong
- if: ${{ contains(needs.*.result, 'failure') && github.event_name != 'workflow_dispatch' }}
- uses: JasonEtco/create-an-issue@v2
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- GITHUB_BOT_CONTEXT_STRING: "nightly test suite"
- with:
- filename: .github/BOT_ISSUE_TEMPLATE.md
diff --git a/.github/workflows/test_nightly.yml b/.github/workflows/test_nightly.yml
deleted file mode 100644
index 11bbf2ce2..000000000
--- a/.github/workflows/test_nightly.yml
+++ /dev/null
@@ -1,121 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: test_nightly
-on:
- workflow_dispatch:
- schedule:
- - cron: '0 23 * * *' # At 11:00 PM UTC, which is 10:00 PM CET
-
-env:
- CRATE_NAME: iggy
- GITHUB_TOKEN: ${{ github.token }}
- RUST_BACKTRACE: 1
- CARGO_TERM_COLOR: always
- IGGY_CI_BUILD: true
- # Option needed for starting docker under cross to be able to lock memory
- # in order to be able to use keyring inside Docker container
- CROSS_CONTAINER_OPTS: "--cap-add ipc_lock"
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.ref }}
- cancel-in-progress: true
-
-jobs:
- build_and_test:
- name: 'build and test ${{ matrix.toolchain }} ${{ matrix.platform.os_name }}'
- runs-on: ${{ matrix.platform.os }}
- timeout-minutes: 120
- strategy:
- fail-fast: false
- matrix:
- platform:
-
- - os_name: Linux-x86_64-musl
- os: ubuntu-latest
- target: x86_64-unknown-linux-musl
- name: iggy-Linux-x86_64-musl.tar.gz
- cargo_command: cargo
- profile: release
- docker_arch: linux/amd64
- cross: false
-
- - os_name: Linux-aarch64-musl
- os: ubuntu-latest
- target: aarch64-unknown-linux-musl
- name: iggy-Linux-aarch64-musl.tar.gz
- docker_arch: linux/arm64/v8
- profile: release
- cross: true
-
- toolchain:
- - nightly
- - beta
- steps:
- - uses: actions/checkout@v4
-
- - name: Cache cargo & target directories
- uses: Swatinem/rust-cache@v2
- with:
- key: "test-nightly-${{ matrix.platform.target }}"
-
- - name: Configure Git
- run: |
- git config --global user.email "jdoe@example.com"
- git config --global user.name "J. Doe"
-
- - name: Install musl-tools, gnome-keyring and keyutils on Linux
- run: |
- sudo apt-get update --yes && sudo apt-get install --yes musl-tools gnome-keyring keyutils
- rm -f $HOME/.local/share/keyrings/*
- echo -n "test" | gnome-keyring-daemon --unlock
- if: contains(matrix.platform.name, 'musl')
-
- - name: Prepare Cross.toml
- run: |
- scripts/prepare-cross-toml.sh
- cat Cross.toml
- if: ${{ matrix.platform.cross }}
-
- - name: Prepare ${{ matrix.platform.target }} toolchain
- run: |
- rustup target add ${{ matrix.platform.target }}
- echo "Using Rust toolchain from rust-toolchain.toml: $(rustup show)"
-
- - name: Install cross
- uses: taiki-e/install-action@v2
- with:
- tool: cross
-
- - name: Build binary ${{ matrix.platform.target }}
- run: cross +stable build --features ci-qemu --verbose --target ${{ matrix.platform.target }} ${{ matrix.platform.profile == 'release' && '--release' || '' }}
-
- - name: Run tests ${{ matrix.platform.target }}
- run: cross +stable test --features ci-qemu --verbose --target ${{ matrix.platform.target }} ${{ matrix.platform.profile == 'release' && '--release' || '' }}
-
- finalize_nightly:
- runs-on: ubuntu-latest
- needs: build_and_test
- if: always()
- steps:
- - uses: actions/checkout@v4
- - name: Everything is fine
- if: ${{ !(contains(needs.*.result, 'failure')) }}
- run: exit 0
- - name: Something went wrong
- if: ${{ contains(needs.*.result, 'failure') && github.event_name != 'workflow_dispatch' }}
- run: exit 1
diff --git a/Cargo.lock b/Cargo.lock
index dc1ec2656..c26b8aa03 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -902,33 +902,6 @@ dependencies = [
"yew-router",
]
-[[package]]
-name = "bench-dashboard-server"
-version = "0.4.0"
-dependencies = [
- "actix-cors",
- "actix-files",
- "actix-web",
- "bench-dashboard-shared",
- "bench-report",
- "chrono",
- "clap",
- "dashmap",
- "file-operation",
- "notify",
- "octocrab",
- "serde",
- "serde_json",
- "tempfile",
- "thiserror 2.0.14",
- "tokio",
- "tracing",
- "tracing-subscriber",
- "uuid",
- "walkdir",
- "zip",
-]
-
[[package]]
name = "bench-dashboard-shared"
version = "0.1.0"
@@ -3847,6 +3820,33 @@ dependencies = [
"uuid",
]
+[[package]]
+name = "iggy-bench-dashboard-server"
+version = "0.4.0"
+dependencies = [
+ "actix-cors",
+ "actix-files",
+ "actix-web",
+ "bench-dashboard-shared",
+ "bench-report",
+ "chrono",
+ "clap",
+ "dashmap",
+ "file-operation",
+ "notify",
+ "octocrab",
+ "serde",
+ "serde_json",
+ "tempfile",
+ "thiserror 2.0.14",
+ "tokio",
+ "tracing",
+ "tracing-subscriber",
+ "uuid",
+ "walkdir",
+ "zip",
+]
+
[[package]]
name = "iggy-cli"
version = "0.9.0"
diff --git a/Cargo.toml b/Cargo.toml
index b379d95d3..8ecf29b5d 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -15,10 +15,6 @@
# specific language governing permissions and limitations
# under the License.
-[profile.release]
-lto = true
-codegen-units = 1
-
[workspace]
members = [
"bdd/rust",
@@ -49,6 +45,7 @@ exclude = ["foreign/python"]
resolver = "2"
[workspace.dependencies]
+aes-gcm = "0.10.3"
ahash = { version = "0.8.12", features = ["serde"] }
anyhow = "1.0.99"
async-broadcast = "0.7.2"
@@ -64,7 +61,13 @@ async_zip = { version = "0.0.18", features = [
] }
axum = "0.8.4"
axum-server = { version = "0.7.2", features = ["tls-rustls"] }
+base64 = "0.22.1"
bcrypt = "0.17.0"
+bench-dashboard-frontend = { path = "core/bench/dashboard/frontend" }
+bench-dashboard-server = { path = "core/bench/dashboard/server" }
+bench-dashboard-shared = { path = "core/bench/dashboard/shared" }
+bench-report = { path = "core/bench/report" }
+bench-runner = { path = "core/bench/runner" }
bincode = { version = "2.0.1", features = ["serde"] }
blake3 = "1.8.2"
bon = "3.7.0"
@@ -77,50 +80,78 @@ bytes = "1.10.1"
charming = "0.6.0"
chrono = { version = "0.4.41", features = ["serde"] }
clap = { version = "4.5.44", features = ["derive"] }
-config = { version = "0.15.13" }
+colored = "3.0.0"
comfy-table = "7.1.4"
+config = { version = "0.15.13" }
+console-subscriber = "0.4.1"
crc32fast = "1.5.0"
crossbeam = "0.8.4"
dashmap = "6.1.0"
+derive-new = "0.7.0"
derive_builder = "0.20.2"
derive_more = { version = "2.0.1", features = ["full"] }
-derive-new = "0.7.0"
dirs = "6.0.0"
dlopen2 = "0.8.0"
dotenvy = "0.15.7"
enum_dispatch = "0.3.13"
+env_logger = "0.11.8"
figlet-rs = "0.1.5"
flume = "0.11.1"
futures = "0.3.31"
futures-util = "0.3.31"
human-repr = "1.1.0"
humantime = "2.2.0"
+iggy = { path = "core/sdk", version = "0.7.0" }
+iggy_binary_protocol = { path = "core/binary_protocol", version = "0.7.0" }
+iggy_common = { path = "core/common", version = "0.7.0" }
+iggy_connector_sdk = { path = "core/connectors/sdk", version = "0.1.0" }
+integration = { path = "core/integration" }
keyring = { version = "3.6.3", features = ["sync-secret-service", "vendored"] }
+lazy_static = "1.5.0"
+log = "0.4.27"
+mimalloc = "0.1"
+mockall = "0.13.1"
nonzero_lit = "0.1.2"
once_cell = "1.21.3"
passterm = "=2.0.1"
-quinn = "0.11.8"
postcard = { version = "1.1.3", features = ["alloc"] }
+predicates = "3.1.3"
+quinn = "0.11.8"
rand = "0.9.2"
+regex = "1.11.1"
reqwest = { version = "0.12.22", default-features = false, features = [
"json",
"rustls-tls",
] }
reqwest-middleware = { version = "0.4.2", features = ["json"] }
reqwest-retry = "0.7.0"
+rust-s3 = { version = "0.36.0-beta.2", default-features = false, features = [
+ "tokio-rustls-tls",
+ "tags",
+] }
rustls = { version = "0.23.31", features = ["ring"] }
serde = { version = "1.0.219", features = ["derive", "rc"] }
serde_json = "1.0.142"
serde_with = { version = "3.14.0", features = ["base64", "macros"] }
serde_yml = "0.0.12"
serial_test = "3.2.0"
+server = { path = "core/server" }
simd-json = { version = "0.15.1", features = ["serde_impl"] }
+strum = { version = "0.27.2", features = ["derive"] }
+strum_macros = "0.27.2"
sysinfo = "0.37.0"
tempfile = "3.20.0"
+test-case = "3.3.1"
thiserror = "2.0.14"
tokio = { version = "1.47.1", features = ["full"] }
tokio-rustls = "0.26.2"
+tokio-util = { version = "0.7.16", features = ["compat"] }
toml = "0.9.5"
+tower-http = { version = "0.6.6", features = [
+ "add-extension",
+ "cors",
+ "trace",
+] }
tracing = "0.1.41"
tracing-appender = "0.2.3"
tracing-subscriber = { version = "0.3.19", default-features = false, features = [
@@ -128,6 +159,8 @@ tracing-subscriber = { version = "0.3.19", default-features = false, features =
"env-filter",
"ansi",
] }
+trait-variant = "0.1.2"
+twox-hash = { version = "2.1.1", features = ["xxhash32"] }
uuid = { version = "1.18.0", features = [
"v4",
"v7",
@@ -135,48 +168,9 @@ uuid = { version = "1.18.0", features = [
"serde",
"zerocopy",
] }
-rust-s3 = { version = "0.36.0-beta.2", default-features = false, features = [
- "tokio-rustls-tls",
- "tags",
-] }
-strum = { version = "0.27.2", features = ["derive"] }
-strum_macros = "0.27.2"
-aes-gcm = "0.10.3"
-base64 = "0.22.1"
-twox-hash = { version = "2.1.1", features = ["xxhash32"] }
-
-# Common dependencies across multiple packages
-colored = "3.0.0"
-env_logger = "0.11.8"
-lazy_static = "1.5.0"
-log = "0.4.27"
-mockall = "0.13.1"
-predicates = "3.1.3"
-regex = "1.11.1"
-test-case = "3.3.1"
-tokio-util = { version = "0.7.16", features = ["compat"] }
-tower-http = { version = "0.6.6", features = [
- "add-extension",
- "cors",
- "trace",
-] }
-trait-variant = "0.1.2"
webpki-roots = "1.0.2"
zip = "4.3.0"
-# Optional dependencies
-mimalloc = "0.1"
-console-subscriber = "0.4.1"
-
-# Path dependencies
-iggy_binary_protocol = { path = "core/binary_protocol", version = "0.7.0" }
-iggy_common = { path = "core/common", version = "0.7.0" }
-iggy_connector_sdk = { path = "core/connectors/sdk", version = "0.1.0" }
-iggy = { path = "core/sdk", version = "0.7.0" }
-server = { path = "core/server" }
-integration = { path = "core/integration" }
-bench-report = { path = "core/bench/report" }
-bench-runner = { path = "core/bench/runner" }
-bench-dashboard-frontend = { path = "core/bench/dashboard/frontend" }
-bench-dashboard-server = { path = "core/bench/dashboard/server" }
-bench-dashboard-shared = { path = "core/bench/dashboard/shared" }
+[profile.release]
+lto = true
+codegen-units = 1
diff --git a/Cross.toml b/Cross.toml
deleted file mode 100644
index ac93743e2..000000000
--- a/Cross.toml
+++ /dev/null
@@ -1,25 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# This script is used to generate Cross.toml file for user which executes
-# this script. This is needed since Cross.toml build.dockerfile.build-args
-# section requires statically defined Docker build arguments and parameters
-# like current UID or GID must be entered (cannot be generated or fetched
-# during cross execution time).
-
-[build.env]
-passthrough = ["IGGY_SYSTEM_PATH", "IGGY_CI_BUILD", "RUST_BACKTRACE=1"]
diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index 2e0dbc747..8b37a37f1 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -72,7 +72,6 @@ bcrypt: 0.17.0, "MIT",
bdd: 0.0.1, "Apache-2.0",
beef: 0.5.2, "Apache-2.0 OR MIT",
bench-dashboard-frontend: 0.4.1, "Apache-2.0",
-bench-dashboard-server: 0.4.0, "Apache-2.0",
bench-dashboard-shared: 0.1.0, "Apache-2.0",
bench-report: 0.2.2, "Apache-2.0",
bench-runner: 0.1.0, "Apache-2.0",
@@ -342,6 +341,7 @@ idna: 1.0.3, "Apache-2.0 OR MIT",
idna_adapter: 1.2.1, "Apache-2.0 OR MIT",
iggy: 0.7.0, "Apache-2.0",
iggy-bench: 0.2.4, "Apache-2.0",
+iggy-bench-dashboard-server: 0.4.0, "Apache-2.0",
iggy-cli: 0.9.0, "Apache-2.0",
iggy-connectors: 0.1.0, "Apache-2.0",
iggy-mcp: 0.1.0, "Apache-2.0",
diff --git a/Dockerfile.ci b/Dockerfile.ci
deleted file mode 100644
index 1d5c6ed8f..000000000
--- a/Dockerfile.ci
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-FROM alpine:latest
-
-ARG IGGY_CMD_PATH
-RUN test -n "$IGGY_CMD_PATH" || (echo "IGGY_CMD_PATH not set" && false)
-
-ARG IGGY_SERVER_PATH
-RUN test -n "$IGGY_SERVER_PATH" || (echo "IGGY_SERVER_PATH not set" && false)
-
-WORKDIR /iggy
-
-COPY ./core/configs ./configs
-COPY ${IGGY_CMD_PATH} ./
-COPY ${IGGY_SERVER_PATH} ./
-
-RUN chmod +x ./iggy
-RUN chmod +x ./iggy-server
-
-RUN ln -sf /iggy/iggy /usr/bin/iggy
-
-CMD ["/iggy/iggy-server"]
diff --git a/Dockerfile.connectors.ci b/Dockerfile.connectors.ci
deleted file mode 100644
index b195c9141..000000000
--- a/Dockerfile.connectors.ci
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-FROM alpine:latest
-
-ARG IGGY_CONNECTORS_PATH
-RUN test -n "$IGGY_CONNECTORS_PATH" || (echo "IGGY_CONNECTORS_PATH not set" && false)
-
-WORKDIR /iggy
-
-COPY ./core/connectors/runtime/config.toml ./config.toml
-COPY ${IGGY_CONNECTORS_PATH} ./
-
-RUN chmod +x ./iggy-connectors
-
-RUN ln -sf /iggy/iggy-connectors /usr/bin/iggy-connectors
-
-CMD ["/iggy/iggy-connectors"]
diff --git a/Dockerfile.cross b/Dockerfile.cross
deleted file mode 100644
index 3c7e0ee08..000000000
--- a/Dockerfile.cross
+++ /dev/null
@@ -1,42 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-ARG CROSS_BASE_IMAGE
-FROM $CROSS_BASE_IMAGE
-
-ARG USER
-ARG CROSS_CONTAINER_UID
-ARG CROSS_CONTAINER_GID
-
-USER root
-RUN apt-get update \
- && apt-get install gnome-keyring \
- --yes --no-install-recommends \
- && apt-get clean && rm -rf /var/lib/apt/lists/*
-
-# Copy the entrypoint script into the container
-COPY scripts/cross-docker-entrypoint.sh /usr/local/bin/
-
-# Make the entrypoint script executable
-RUN chmod +x /usr/local/bin/cross-docker-entrypoint.sh
-
-# Add user with specified UID and GID
-RUN groupadd -g $CROSS_CONTAINER_GID $USER
-RUN useradd -r -u $CROSS_CONTAINER_UID -g $CROSS_CONTAINER_GID -m $USER
-
-# Set the entry point
-ENTRYPOINT ["/usr/local/bin/cross-docker-entrypoint.sh"]
diff --git a/Dockerfile.cross.dockerignore b/Dockerfile.cross.dockerignore
deleted file mode 100644
index 251cad505..000000000
--- a/Dockerfile.cross.dockerignore
+++ /dev/null
@@ -1,27 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-.config
-.github
-/assets
-/local_data
-/certs
-Dockerfile
-docker-compose.yml
-.dockerignore
-.git
-.gitignore
diff --git a/Dockerfile.debug b/Dockerfile.debug
deleted file mode 100644
index 412dda438..000000000
--- a/Dockerfile.debug
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Debug/test build for faster compilation during development and testing
-FROM rust:1.89.0-alpine3.22 AS builder
-RUN apk add musl-dev
-WORKDIR /build
-COPY . /build
-# Build in debug mode for faster compilation times during testing
-RUN cargo build --bin iggy-server
-
-FROM debian:bookworm-slim
-RUN apt-get update && apt-get install -y \
- ca-certificates \
- liblzma5 \
- && rm -rf /var/lib/apt/lists/*
-COPY ./core/configs ./configs
-COPY --from=builder /build/target/debug/iggy-server .
-
-CMD ["/iggy-server"]
diff --git a/Dockerfile.mcp.ci b/Dockerfile.mcp.ci
deleted file mode 100644
index bc73ebe02..000000000
--- a/Dockerfile.mcp.ci
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-FROM alpine:latest
-
-ARG IGGY_MCP_PATH
-RUN test -n "$IGGY_MCP_PATH" || (echo "IGGY_MCP_PATH not set" && false)
-
-WORKDIR /iggy
-
-COPY ./core/ai/mcp/config.toml ./config.toml
-COPY ${IGGY_MCP_PATH} ./
-
-RUN chmod +x ./iggy-mcp
-
-RUN ln -sf /iggy/iggy-mcp /usr/bin/iggy-mcp
-
-CMD ["/iggy/iggy-mcp"]
diff --git a/README.md b/README.md
index e0f3a4f14..83b39bf57 100644
--- a/README.md
+++ b/README.md
@@ -175,7 +175,7 @@ The [Model Context Protocol](https://modelcontextprotocol.io) (MCP) is an open p
The official Apache Iggy images can be found in [Docker Hub](https://hub.docker.com/r/apache/iggy), simply type `docker pull apache/iggy` to pull the image.
-You can also find the images for all the different tooling such as Connectors, MCP Server etc. [here](https://hub.docker.com/u/apache?page=1&search=iggy).
+You can also find the images for all the different tooling such as Connectors, MCP Server etc. at [Docker Hub](https://hub.docker.com/u/apache?page=1&search=iggy).
Please note that the images tagged as `latest` are based on the official, stable releases, while the `edge` ones are updated directly from latest version of the `master` branch.
diff --git a/bdd/docker-compose.yml b/bdd/docker-compose.yml
index fb2a6c1a5..08313b928 100644
--- a/bdd/docker-compose.yml
+++ b/bdd/docker-compose.yml
@@ -17,83 +17,95 @@
services:
iggy-server:
- image: bdd-iggy-server:latest
+ platform: linux/amd64
build:
context: ..
- dockerfile: bdd/Dockerfile
+ dockerfile: core/server/Dockerfile
+ target: runtime-prebuilt
+ args:
+ PREBUILT_IGGY_SERVER: ${IGGY_SERVER_PATH:-target/debug/iggy-server}
+ PREBUILT_IGGY_CLI: ${IGGY_CLI_PATH:-target/debug/iggy}
+ LIBC: glibc
+ PROFILE: debug
+ command: [ "--fresh" ]
environment:
- RUST_LOG=info
- IGGY_SYSTEM_PATH=local_data
- healthcheck:
- test: ["CMD", "curl", "-f", "http://localhost:3000/stats"]
- interval: 5s
- timeout: 10s
- retries: 10
- start_period: 30s
volumes:
- iggy_data:/app/local_data
+ networks:
+ - iggy-bdd-network
rust-bdd:
build:
context: ..
dockerfile: bdd/rust/Dockerfile
depends_on:
- iggy-server:
- condition: service_healthy
+ - iggy-server
environment:
- IGGY_TCP_ADDRESS=iggy-server:8090
volumes:
- ./scenarios/basic_messaging.feature:/app/features/basic_messaging.feature
- command: ["cargo", "test", "-p", "bdd", "--features", "iggy-server-in-docker", "--features", "bdd"]
+ command: [ "cargo", "test", "-p", "bdd", "--features", "iggy-server-in-docker", "--features", "bdd" ]
+ networks:
+ - iggy-bdd-network
python-bdd:
build:
context: ..
dockerfile: bdd/python/Dockerfile
depends_on:
- iggy-server:
- condition: service_healthy
+ - iggy-server
environment:
- IGGY_TCP_ADDRESS=iggy-server:8090
volumes:
- ./scenarios/basic_messaging.feature:/app/features/basic_messaging.feature
working_dir: /app
- command: ["pytest", "tests/", "-v"]
+ command: [ "pytest", "tests/", "-v" ]
+ networks:
+ - iggy-bdd-network
go-bdd:
build:
context: ..
dockerfile: bdd/go/Dockerfile
depends_on:
- iggy-server:
- condition: service_healthy
+ - iggy-server
environment:
- IGGY_TCP_ADDRESS=iggy-server:8090
volumes:
- ./scenarios/basic_messaging.feature:/app/features/basic_messaging.feature
command: [ "go", "test", "-v", "./..." ]
+ networks:
+ - iggy-bdd-network
node-bdd:
build:
context: ..
dockerfile: bdd/node/Dockerfile
depends_on:
- iggy-server:
- condition: service_healthy
+ - iggy-server
environment:
- IGGY_TCP_ADDRESS=iggy-server:8090
- command: ["npm", "run", "test:bdd"]
+ command: [ "npm", "run", "test:bdd" ]
+ networks:
+ - iggy-bdd-network
csharp-bdd:
build:
context: ..
dockerfile: bdd/csharp/Dockerfile
depends_on:
- iggy-server:
- condition: service_healthy
+ - iggy-server
environment:
- IGGY_TCP_ADDRESS=iggy-server:8090
- command: ["dotnet", "test"]
-
+ command: [ "dotnet", "test" ]
+ networks:
+ - iggy-bdd-network
+
+networks:
+ iggy-bdd-network:
+ driver: bridge
+
volumes:
iggy_data:
diff --git a/bdd/python/Dockerfile b/bdd/python/Dockerfile
index 389f5205d..0241afdf6 100644
--- a/bdd/python/Dockerfile
+++ b/bdd/python/Dockerfile
@@ -15,12 +15,14 @@
# specific language governing permissions and limitations
# under the License.
-FROM rust:1.89-slim-bookworm
+# syntax=docker/dockerfile:1
+ARG RUST_VERSION=1.89
+FROM rust:${RUST_VERSION}-slim-trixie
RUN apt-get update && \
apt-get install -y \
- python3.11 \
- python3.11-dev \
+ python3 \
+ python3-dev \
python3-pip \
build-essential \
&& rm -rf /var/lib/apt/lists/*
@@ -39,7 +41,7 @@ WORKDIR /workspace/foreign/python
RUN pip3 install --no-cache-dir -r /workspace/bdd/python/requirements.txt
# Build and install the Iggy Python SDK
-RUN maturin build --release --out dist && \
+RUN maturin build --out dist && \
pip3 install dist/*.whl
# Set up BDD test directory
diff --git a/bdd/python/tests/conftest.py b/bdd/python/tests/conftest.py
index af3f2310a..8cf4a8f55 100644
--- a/bdd/python/tests/conftest.py
+++ b/bdd/python/tests/conftest.py
@@ -28,6 +28,7 @@
@dataclass
class GlobalContext:
"""Global test context similar to Rust implementation."""
+
client: Optional[object] = None # Will be IggyClient
server_addr: Optional[str] = None
last_stream_id: Optional[int] = None
diff --git a/bdd/rust/Dockerfile b/bdd/rust/Dockerfile
index dc4df8074..63119a37b 100644
--- a/bdd/rust/Dockerfile
+++ b/bdd/rust/Dockerfile
@@ -15,17 +15,19 @@
# specific language governing permissions and limitations
# under the License.
-FROM rust:1.89
+ARG RUST_VERSION=1.89
+FROM rust:${RUST_VERSION}
WORKDIR /app
-# Copy entire repository (needed for workspace)
+# Copy everything
COPY . .
# Create features directory for BDD feature files
RUN mkdir -p /app/features
-RUN cargo fetch
+# Build tests
+RUN cargo test --no-run -p bdd --features "iggy-server-in-docker bdd"
# Default command will be overridden by docker-compose
CMD ["cargo", "test", "-p", "bdd", "--features", "iggy-server-in-docker", "--features", "bdd"]
diff --git a/Dockerfile.ci.dockerignore b/core/ai/mcp/.dockerignore
similarity index 100%
rename from Dockerfile.ci.dockerignore
rename to core/ai/mcp/.dockerignore
diff --git a/core/ai/mcp/Dockerfile b/core/ai/mcp/Dockerfile
index 7adbeaed1..6f42a6106 100644
--- a/core/ai/mcp/Dockerfile
+++ b/core/ai/mcp/Dockerfile
@@ -1,31 +1,93 @@
# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
+# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
+# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
+# with the License. You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
+# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-FROM rust:1.89 AS builder
-WORKDIR /build
-COPY . /build
-RUN cargo build --bin iggy-mcp --release
+ARG RUST_VERSION=1.89
+ARG ALPINE_VERSION=3.22
-FROM debian:bookworm-slim
-RUN apt-get update && apt-get install -y \
- ca-certificates \
- liblzma5 \
- && rm -rf /var/lib/apt/lists/*
-COPY ./core/ai/mcp/config.toml ./config.toml
-COPY --from=builder /build/target/release/iggy-mcp .
+FROM --platform=$BUILDPLATFORM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION}-alpine AS chef
+WORKDIR /app
+RUN apk add --no-cache musl-dev pkgconfig openssl-dev openssl-libs-static
-CMD ["/iggy-mcp"]
+FROM --platform=$BUILDPLATFORM chef AS planner
+COPY . .
+RUN cargo chef prepare --recipe-path recipe.json
+
+FROM --platform=$BUILDPLATFORM chef AS builder
+ARG PROFILE=release
+ARG TARGETPLATFORM
+ARG LIBC=musl
+
+RUN apk add --no-cache zig && \
+ cargo install cargo-zigbuild --locked && \
+ rustup target add \
+ x86_64-unknown-linux-musl \
+ aarch64-unknown-linux-musl \
+ x86_64-unknown-linux-gnu \
+ aarch64-unknown-linux-gnu
+
+COPY --from=planner /app/recipe.json recipe.json
+
+#
+# Cook dependencies
+#
+RUN --mount=type=cache,target=/usr/local/cargo/registry,id=cargo-registry-${TARGETPLATFORM}-${LIBC} \
+ --mount=type=cache,target=/usr/local/cargo/git,id=cargo-git-${TARGETPLATFORM}-${LIBC} \
+ case "$TARGETPLATFORM:$LIBC" in \
+ "linux/amd64:musl") RUST_TARGET="x86_64-unknown-linux-musl" ;; \
+ "linux/arm64:musl") RUST_TARGET="aarch64-unknown-linux-musl" ;; \
+ "linux/amd64:glibc") RUST_TARGET="x86_64-unknown-linux-gnu" ;; \
+ "linux/arm64:glibc") RUST_TARGET="aarch64-unknown-linux-gnu" ;; \
+ *) echo "Unsupported platform/libc combination: $TARGETPLATFORM/$LIBC" && exit 1 ;; \
+ esac && \
+ if [ "$PROFILE" = "debug" ]; then \
+ cargo chef cook --recipe-path recipe.json --target ${RUST_TARGET} --zigbuild; \
+ else \
+ cargo chef cook --recipe-path recipe.json --target ${RUST_TARGET} --zigbuild --release; \
+ fi
+
+COPY . .
+
+#
+# Build
+#
+RUN --mount=type=cache,target=/usr/local/cargo/registry,id=cargo-registry-${TARGETPLATFORM}-${LIBC} \
+ --mount=type=cache,target=/usr/local/cargo/git,id=cargo-git-${TARGETPLATFORM}-${LIBC} \
+ --mount=type=cache,target=/app/target,id=cargo-target-${TARGETPLATFORM}-${LIBC} \
+ case "$TARGETPLATFORM:$LIBC" in \
+ "linux/amd64:musl") RUST_TARGET="x86_64-unknown-linux-musl" ;; \
+ "linux/arm64:musl") RUST_TARGET="aarch64-unknown-linux-musl" ;; \
+ "linux/amd64:glibc") RUST_TARGET="x86_64-unknown-linux-gnu" ;; \
+ "linux/arm64:glibc") RUST_TARGET="aarch64-unknown-linux-gnu" ;; \
+ *) echo "Unsupported platform/libc combination: $TARGETPLATFORM/$LIBC" && exit 1 ;; \
+ esac && \
+ if [ "$PROFILE" = "debug" ]; then \
+ cargo zigbuild --target ${RUST_TARGET} --bin iggy-mcp && \
+ cp /app/target/${RUST_TARGET}/debug/iggy-mcp /app/iggy-mcp; \
+ else \
+ cargo zigbuild --target ${RUST_TARGET} --bin iggy-mcp --release && \
+ cp /app/target/${RUST_TARGET}/release/iggy-mcp /app/iggy-mcp; \
+ fi
+
+#
+# Final runtime - Debian trixie Slim
+#
+FROM debian:trixie-slim AS runtime
+WORKDIR /app
+
+COPY --from=builder /app/iggy-mcp /usr/local/bin/iggy-mcp
+
+ENTRYPOINT ["iggy-mcp"]
diff --git a/core/bench/Cargo.toml b/core/bench/Cargo.toml
index db9a9bc8b..d1a342be6 100644
--- a/core/bench/Cargo.toml
+++ b/core/bench/Cargo.toml
@@ -25,6 +25,10 @@ homepage = "https://iggy.apache.org"
description = "Benchmarking CLI for Iggy message streaming platform"
keywords = ["iggy", "cli", "messaging", "streaming"]
readme = "../../README.md"
+
+[[bin]]
+name = "iggy-bench"
+path = "src/main.rs"
# Due to dependency to integration, which has a dependency to server, setting
# mimalloc on server is also setting it on bench.
@@ -54,10 +58,6 @@ tracing-appender = { workspace = true }
tracing-subscriber = { workspace = true }
uuid = { workspace = true }
-[[bin]]
-name = "iggy-bench"
-path = "src/main.rs"
-
[lints.clippy]
enum_glob_use = "deny"
pedantic = "deny"
diff --git a/core/bench/dashboard/server/Cargo.toml b/core/bench/dashboard/server/Cargo.toml
index 964f92a6c..2743de711 100644
--- a/core/bench/dashboard/server/Cargo.toml
+++ b/core/bench/dashboard/server/Cargo.toml
@@ -16,7 +16,7 @@
# under the License.
[package]
-name = "bench-dashboard-server"
+name = "iggy-bench-dashboard-server"
license = "Apache-2.0"
version = "0.4.0"
edition = "2024"
@@ -43,7 +43,3 @@ tracing-subscriber = { workspace = true }
uuid = { workspace = true }
walkdir = "2.5.0"
zip = { workspace = true, features = ["deflate"] }
-
-[[bin]]
-name = "iggy-bench-dashboard-server"
-path = "src/main.rs"
diff --git a/core/bench/dashboard/server/Dockerfile b/core/bench/dashboard/server/Dockerfile
index 93306597e..6673f19b4 100644
--- a/core/bench/dashboard/server/Dockerfile
+++ b/core/bench/dashboard/server/Dockerfile
@@ -15,8 +15,12 @@
# specific language governing permissions and limitations
# under the License.
+ARG RUST_VERSION=1.89
+
# Build stage
-FROM rust:1.89-slim-bookworm AS builder
+FROM rust:${RUST_VERSION}-slim-trixie AS builder
+ARG IGGY_CI_BUILD
+ENV IGGY_CI_BUILD=${IGGY_CI_BUILD}
WORKDIR /usr/src/iggy-bench-dashboard
@@ -47,10 +51,10 @@ COPY . .
RUN cd core/bench/dashboard/frontend && trunk build --release
# Build the server with release profile
-RUN cargo build --release --package bench-dashboard-server
+RUN cargo build --release --bin iggy-bench-dashboard-server
# Runtime stage
-FROM debian:bookworm-slim
+FROM debian:trixie-slim
WORKDIR /app
diff --git a/core/bench/runner/Cargo.toml b/core/bench/runner/Cargo.toml
index d078c64db..389519737 100644
--- a/core/bench/runner/Cargo.toml
+++ b/core/bench/runner/Cargo.toml
@@ -21,6 +21,10 @@ license = "Apache-2.0"
version = "0.1.0"
edition = "2024"
+[[bin]]
+name = "iggy-bench-runner"
+path = "src/main.rs"
+
[dependencies]
anyhow = { workspace = true }
clap = { workspace = true }
@@ -32,7 +36,3 @@ tempfile = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
-
-[[bin]]
-name = "iggy-bench-runner"
-path = "src/main.rs"
diff --git a/core/cli/Cargo.toml b/core/cli/Cargo.toml
index 78d1e59c3..90f1c3f7b 100644
--- a/core/cli/Cargo.toml
+++ b/core/cli/Cargo.toml
@@ -31,6 +31,10 @@ readme = "../../README.md"
pkg-url = "{ repo }/releases/download/{ name }-{ version }/{ name }-{ target }{ archive-suffix }"
bin-dir = "{ bin }{ binary-ext }"
+[[bin]]
+name = "iggy"
+path = "src/main.rs"
+
[features]
default = ["login-session"]
login-session = ["dep:keyring"]
@@ -50,7 +54,3 @@ tokio = { workspace = true }
tracing = { workspace = true }
tracing-appender = { workspace = true }
tracing-subscriber = { workspace = true, default-features = false }
-
-[[bin]]
-name = "iggy"
-path = "src/main.rs"
diff --git a/Dockerfile.connectors.ci.dockerignore b/core/connectors/runtime/.dockerignore
similarity index 100%
rename from Dockerfile.connectors.ci.dockerignore
rename to core/connectors/runtime/.dockerignore
diff --git a/core/connectors/runtime/Dockerfile b/core/connectors/runtime/Dockerfile
index bf4ef2c30..67311ce20 100644
--- a/core/connectors/runtime/Dockerfile
+++ b/core/connectors/runtime/Dockerfile
@@ -1,31 +1,93 @@
# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
+# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
+# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
+# with the License. You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
+# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-FROM rust:1.89 AS builder
-WORKDIR /build
-COPY . /build
-RUN cargo build --bin iggy-connectors --release
+ARG RUST_VERSION=1.89
+ARG ALPINE_VERSION=3.22
-FROM debian:bookworm-slim
-RUN apt-get update && apt-get install -y \
- ca-certificates \
- liblzma5 \
- && rm -rf /var/lib/apt/lists/*
-COPY ./core/connectors/runtime/config.toml ./config.toml
-COPY --from=builder /build/target/release/iggy-connectors .
+FROM --platform=$BUILDPLATFORM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION}-alpine AS chef
+WORKDIR /app
+RUN apk add --no-cache musl-dev pkgconfig openssl-dev openssl-libs-static
-CMD ["/iggy-connectors"]
+FROM --platform=$BUILDPLATFORM chef AS planner
+COPY . .
+RUN cargo chef prepare --recipe-path recipe.json
+
+FROM --platform=$BUILDPLATFORM chef AS builder
+ARG PROFILE=release
+ARG TARGETPLATFORM
+ARG LIBC=musl
+
+RUN apk add --no-cache zig && \
+ cargo install cargo-zigbuild --locked && \
+ rustup target add \
+ x86_64-unknown-linux-musl \
+ aarch64-unknown-linux-musl \
+ x86_64-unknown-linux-gnu \
+ aarch64-unknown-linux-gnu
+
+COPY --from=planner /app/recipe.json recipe.json
+
+#
+# Cook dependencies
+#
+RUN --mount=type=cache,target=/usr/local/cargo/registry,id=cargo-registry-${TARGETPLATFORM}-${LIBC} \
+ --mount=type=cache,target=/usr/local/cargo/git,id=cargo-git-${TARGETPLATFORM}-${LIBC} \
+ case "$TARGETPLATFORM:$LIBC" in \
+ "linux/amd64:musl") RUST_TARGET="x86_64-unknown-linux-musl" ;; \
+ "linux/arm64:musl") RUST_TARGET="aarch64-unknown-linux-musl" ;; \
+ "linux/amd64:glibc") RUST_TARGET="x86_64-unknown-linux-gnu" ;; \
+ "linux/arm64:glibc") RUST_TARGET="aarch64-unknown-linux-gnu" ;; \
+ *) echo "Unsupported platform/libc combination: $TARGETPLATFORM/$LIBC" && exit 1 ;; \
+ esac && \
+ if [ "$PROFILE" = "debug" ]; then \
+ cargo chef cook --recipe-path recipe.json --target ${RUST_TARGET} --zigbuild; \
+ else \
+ cargo chef cook --recipe-path recipe.json --target ${RUST_TARGET} --zigbuild --release; \
+ fi
+
+COPY . .
+
+#
+# Build
+#
+RUN --mount=type=cache,target=/usr/local/cargo/registry,id=cargo-registry-${TARGETPLATFORM}-${LIBC} \
+ --mount=type=cache,target=/usr/local/cargo/git,id=cargo-git-${TARGETPLATFORM}-${LIBC} \
+ --mount=type=cache,target=/app/target,id=cargo-target-${TARGETPLATFORM}-${LIBC} \
+ case "$TARGETPLATFORM:$LIBC" in \
+ "linux/amd64:musl") RUST_TARGET="x86_64-unknown-linux-musl" ;; \
+ "linux/arm64:musl") RUST_TARGET="aarch64-unknown-linux-musl" ;; \
+ "linux/amd64:glibc") RUST_TARGET="x86_64-unknown-linux-gnu" ;; \
+ "linux/arm64:glibc") RUST_TARGET="aarch64-unknown-linux-gnu" ;; \
+ *) echo "Unsupported platform/libc combination: $TARGETPLATFORM/$LIBC" && exit 1 ;; \
+ esac && \
+ if [ "$PROFILE" = "debug" ]; then \
+ cargo zigbuild --target ${RUST_TARGET} --bin iggy-connectors && \
+ cp /app/target/${RUST_TARGET}/debug/iggy-connectors /app/iggy-connectors; \
+ else \
+ cargo zigbuild --target ${RUST_TARGET} --bin iggy-connectors --release && \
+ cp /app/target/${RUST_TARGET}/release/iggy-connectors /app/iggy-connectors; \
+ fi
+
+#
+# Final runtime - Debian trixie Slim
+#
+FROM debian:trixie-slim AS runtime
+WORKDIR /app
+
+COPY --from=builder /app/iggy-connectors /usr/local/bin/iggy-connectors
+
+ENTRYPOINT ["iggy-connectors"]
diff --git a/core/connectors/sinks/quickwit_sink/Cargo.toml b/core/connectors/sinks/quickwit_sink/Cargo.toml
index 3537cbbd6..cdf402a8d 100644
--- a/core/connectors/sinks/quickwit_sink/Cargo.toml
+++ b/core/connectors/sinks/quickwit_sink/Cargo.toml
@@ -31,6 +31,9 @@ readme = "../../README.md"
[package.metadata.cargo-machete]
ignored = ["dashmap", "once_cell"]
+[lib]
+crate-type = ["cdylib", "lib"]
+
[dependencies]
async-trait = { workspace = true }
dashmap = { workspace = true }
@@ -41,6 +44,3 @@ serde = { workspace = true }
serde_yml = { workspace = true }
simd-json = { workspace = true }
tracing = { workspace = true }
-
-[lib]
-crate-type = ["cdylib", "lib"]
diff --git a/core/connectors/sinks/stdout_sink/Cargo.toml b/core/connectors/sinks/stdout_sink/Cargo.toml
index 5ad4fa402..2797e04ba 100644
--- a/core/connectors/sinks/stdout_sink/Cargo.toml
+++ b/core/connectors/sinks/stdout_sink/Cargo.toml
@@ -31,6 +31,9 @@ readme = "../../README.md"
[package.metadata.cargo-machete]
ignored = ["dashmap", "once_cell"]
+[lib]
+crate-type = ["cdylib", "lib"]
+
[dependencies]
async-trait = { workspace = true }
dashmap = { workspace = true }
@@ -39,6 +42,3 @@ once_cell = { workspace = true }
serde = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
-
-[lib]
-crate-type = ["cdylib", "lib"]
diff --git a/core/connectors/sources/random_source/Cargo.toml b/core/connectors/sources/random_source/Cargo.toml
index 53d17933b..ad645601d 100644
--- a/core/connectors/sources/random_source/Cargo.toml
+++ b/core/connectors/sources/random_source/Cargo.toml
@@ -31,6 +31,9 @@ readme = "../../README.md"
[package.metadata.cargo-machete]
ignored = ["dashmap", "once_cell"]
+[lib]
+crate-type = ["cdylib", "lib"]
+
[dependencies]
async-trait = { workspace = true }
dashmap = { workspace = true }
@@ -43,6 +46,3 @@ simd-json = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
uuid = { workspace = true }
-
-[lib]
-crate-type = ["cdylib", "lib"]
diff --git a/core/integration/tests/cli/common/mod.rs b/core/integration/tests/cli/common/mod.rs
index 077c8aa47..3bb3743a0 100644
--- a/core/integration/tests/cli/common/mod.rs
+++ b/core/integration/tests/cli/common/mod.rs
@@ -145,6 +145,8 @@ impl IggyCmdTest {
let command_args = test_case.get_command();
// Set environment variables for the command
command.envs(command_args.get_env());
+ // Set a fixed terminal width for consistent help output formatting
+ command.env("COLUMNS", "500");
// Set server address for the command - it's randomized for each test
command.args(test_case.protocol(&self.server));
@@ -205,6 +207,8 @@ impl IggyCmdTest {
let command_args = test_case.get_command();
// Set environment variables for the command
command.envs(command_args.get_env());
+ // Set a fixed terminal width for consistent help output formatting
+ command.env("COLUMNS", "500");
// Print used environment variables and command with all arguments.
// By default, it will not be visible but once test is executed with
diff --git a/core/integration/tests/cli/topic/test_topic_create_command.rs b/core/integration/tests/cli/topic/test_topic_create_command.rs
index 0227d79cc..f4dc9ad56 100644
--- a/core/integration/tests/cli/topic/test_topic_create_command.rs
+++ b/core/integration/tests/cli/topic/test_topic_create_command.rs
@@ -350,14 +350,10 @@ Arguments:
[MESSAGE_EXPIRY]... Message expiry time in human-readable format like "unlimited" or "15days 2min 2s" [default: server_default]
Options:
- -t, --topic-id
- Topic ID to create
- -m, --max-topic-size
- Max topic size in human-readable format like "unlimited" or "15GB" [default: server_default]
- -r, --replication-factor
- Replication factor for the topic [default: 1]
- -h, --help
- Print help (see more with '--help')
+ -t, --topic-id Topic ID to create
+ -m, --max-topic-size Max topic size in human-readable format like "unlimited" or "15GB" [default: server_default]
+ -r, --replication-factor Replication factor for the topic [default: 1]
+ -h, --help Print help (see more with '--help')
"#,
),
))
diff --git a/core/integration/tests/cli/topic/test_topic_update_command.rs b/core/integration/tests/cli/topic/test_topic_update_command.rs
index d250f4045..ac402345d 100644
--- a/core/integration/tests/cli/topic/test_topic_update_command.rs
+++ b/core/integration/tests/cli/topic/test_topic_update_command.rs
@@ -457,12 +457,9 @@ Arguments:
[MESSAGE_EXPIRY]... New message expiry time in human-readable format like "unlimited" or "15days 2min 2s" [default: server_default]
Options:
- -m, --max-topic-size
- New max topic size in human-readable format like "unlimited" or "15GB" [default: server_default]
- -r, --replication-factor
- New replication factor for the topic [default: 1]
- -h, --help
- Print help (see more with '--help')
+ -m, --max-topic-size New max topic size in human-readable format like "unlimited" or "15GB" [default: server_default]
+ -r, --replication-factor New replication factor for the topic [default: 1]
+ -h, --help Print help (see more with '--help')
"#,
),
))
diff --git a/core/integration/tests/cli/user/test_user_create_command.rs b/core/integration/tests/cli/user/test_user_create_command.rs
index ecae4c474..3678185bd 100644
--- a/core/integration/tests/cli/user/test_user_create_command.rs
+++ b/core/integration/tests/cli/user/test_user_create_command.rs
@@ -381,14 +381,10 @@ Arguments:
Password
Options:
- -u, --user-status
- User status [default: active] [possible values: active, inactive]
- -g, --global-permissions
- Set global permissions for created user
- -s, --stream-permissions
- Set stream permissions for created user
- -h, --help
- Print help (see more with '--help')
+ -u, --user-status User status [default: active] [possible values: active, inactive]
+ -g, --global-permissions Set global permissions for created user
+ -s, --stream-permissions Set stream permissions for created user
+ -h, --help Print help (see more with '--help')
"#,
),
))
diff --git a/core/integration/tests/cli/user/test_user_permissions_command.rs b/core/integration/tests/cli/user/test_user_permissions_command.rs
index c4cf30cb7..247641ef3 100644
--- a/core/integration/tests/cli/user/test_user_permissions_command.rs
+++ b/core/integration/tests/cli/user/test_user_permissions_command.rs
@@ -346,14 +346,10 @@ Arguments:
Password
Options:
- -u, --user-status
- User status [default: active] [possible values: active, inactive]
- -g, --global-permissions
- Set global permissions for created user
- -s, --stream-permissions
- Set stream permissions for created user
- -h, --help
- Print help (see more with '--help')
+ -u, --user-status User status [default: active] [possible values: active, inactive]
+ -g, --global-permissions Set global permissions for created user
+ -s, --stream-permissions Set stream permissions for created user
+ -h, --help Print help (see more with '--help')
"#,
),
))
diff --git a/Dockerfile.mcp.ci.dockerignore b/core/server/.dockerignore
similarity index 100%
rename from Dockerfile.mcp.ci.dockerignore
rename to core/server/.dockerignore
diff --git a/core/server/Cargo.toml b/core/server/Cargo.toml
index 3a0cf94e9..24d2b3176 100644
--- a/core/server/Cargo.toml
+++ b/core/server/Cargo.toml
@@ -28,6 +28,10 @@ normal = ["tracing-appender"]
[package.metadata.cargo-machete]
ignored = ["rust-s3"]
+[[bin]]
+name = "iggy-server"
+path = "src/main.rs"
+
[features]
default = ["mimalloc"]
tokio-console = ["dep:console-subscriber", "tokio/tracing"]
@@ -125,7 +129,3 @@ vergen-git2 = { version = "1.0.7", features = [
[dev-dependencies]
mockall = { workspace = true }
serial_test = { workspace = true }
-
-[[bin]]
-name = "iggy-server"
-path = "src/main.rs"
diff --git a/core/server/Dockerfile b/core/server/Dockerfile
new file mode 100644
index 000000000..f43b82dee
--- /dev/null
+++ b/core/server/Dockerfile
@@ -0,0 +1,143 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+ARG RUST_VERSION=1.89
+ARG ALPINE_VERSION=3.22
+
+# ββ from-source path (unchanged) ββββββββββββββββββββββββββββββββββββββββββββββ
+FROM --platform=$BUILDPLATFORM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION}-alpine AS chef
+WORKDIR /app
+RUN apk add --no-cache musl-dev pkgconfig openssl-dev openssl-libs-static
+
+FROM --platform=$BUILDPLATFORM chef AS planner
+COPY . .
+RUN cargo chef prepare --recipe-path recipe.json
+
+FROM --platform=$BUILDPLATFORM chef AS builder
+ARG PROFILE=release
+ARG TARGETPLATFORM
+ARG LIBC=musl
+
+RUN apk add --no-cache zig && \
+ cargo install cargo-zigbuild --locked && \
+ rustup target add \
+ x86_64-unknown-linux-musl \
+ aarch64-unknown-linux-musl \
+ x86_64-unknown-linux-gnu \
+ aarch64-unknown-linux-gnu
+
+COPY --from=planner /app/recipe.json recipe.json
+
+RUN --mount=type=cache,target=/usr/local/cargo/registry,id=cargo-registry-${TARGETPLATFORM}-${LIBC} \
+ --mount=type=cache,target=/usr/local/cargo/git,id=cargo-git-${TARGETPLATFORM}-${LIBC} \
+ case "$TARGETPLATFORM:$LIBC" in \
+ "linux/amd64:musl") RUST_TARGET="x86_64-unknown-linux-musl" ;; \
+ "linux/arm64:musl") RUST_TARGET="aarch64-unknown-linux-musl" ;; \
+ "linux/amd64:glibc") RUST_TARGET="x86_64-unknown-linux-gnu" ;; \
+ "linux/arm64:glibc") RUST_TARGET="aarch64-unknown-linux-gnu" ;; \
+ *) echo "Unsupported $TARGETPLATFORM/$LIBC" && exit 1 ;; \
+ esac && \
+ if [ "$PROFILE" = "debug" ]; then \
+ cargo chef cook --recipe-path recipe.json --target ${RUST_TARGET} --zigbuild; \
+ else \
+ cargo chef cook --recipe-path recipe.json --target ${RUST_TARGET} --zigbuild --release; \
+ fi
+
+COPY . .
+
+RUN --mount=type=cache,target=/usr/local/cargo/registry,id=cargo-registry-${TARGETPLATFORM}-${LIBC} \
+ --mount=type=cache,target=/usr/local/cargo/git,id=cargo-git-${TARGETPLATFORM}-${LIBC} \
+ --mount=type=cache,target=/app/target,id=cargo-target-${TARGETPLATFORM}-${LIBC} \
+ case "$TARGETPLATFORM:$LIBC" in \
+ "linux/amd64:musl") RUST_TARGET="x86_64-unknown-linux-musl" ;; \
+ "linux/arm64:musl") RUST_TARGET="aarch64-unknown-linux-musl" ;; \
+ "linux/amd64:glibc") RUST_TARGET="x86_64-unknown-linux-gnu" ;; \
+ "linux/arm64:glibc") RUST_TARGET="aarch64-unknown-linux-gnu" ;; \
+ *) echo "Unsupported $TARGETPLATFORM/$LIBC" && exit 1 ;; \
+ esac && \
+ if [ "$PROFILE" = "debug" ]; then \
+ cargo zigbuild --target ${RUST_TARGET} --bin iggy-server --bin iggy && \
+ cp /app/target/${RUST_TARGET}/debug/iggy-server /app/iggy-server && \
+ cp /app/target/${RUST_TARGET}/debug/iggy /app/iggy; \
+ else \
+ cargo zigbuild --target ${RUST_TARGET} --bin iggy-server --bin iggy --release && \
+ cp /app/target/${RUST_TARGET}/release/iggy-server /app/iggy-server && \
+ cp /app/target/${RUST_TARGET}/release/iggy /app/iggy; \
+ fi
+
+# ββ prebuilt path (FAST) ββββββββββββββββββββββββββββββββββββββββββββββββββββββ
+FROM debian:trixie-slim AS prebuilt
+WORKDIR /out
+ARG PREBUILT_IGGY_SERVER
+ARG PREBUILT_IGGY_CLI
+COPY ${PREBUILT_IGGY_SERVER} /out/iggy-server
+COPY ${PREBUILT_IGGY_CLI} /out/iggy
+RUN chmod +x /out/iggy-server /out/iggy
+
+# ββ final images ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
+FROM debian:trixie-slim AS runtime-prebuilt
+ARG TARGETPLATFORM
+ARG PREBUILT_IGGY_SERVER
+ARG PREBUILT_IGGY_CLI
+WORKDIR /app
+COPY --from=prebuilt /out/iggy-server /usr/local/bin/iggy-server
+COPY --from=prebuilt /out/iggy /usr/local/bin/iggy
+RUN echo "βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ" && \
+ echo " IGGY SERVER BUILD SUMMARY " && \
+ echo "βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ" && \
+ echo "Build Type: PREBUILT BINARIES" && \
+ echo "Platform: ${TARGETPLATFORM:-linux/amd64}" && \
+ echo "Source Path: ${PREBUILT_IGGY_SERVER:-not specified}" && \
+ echo "Binary Info:" && \
+ (command -v file >/dev/null 2>&1 && file /usr/local/bin/iggy-server | sed 's/^/ /' || \
+ echo " $(ldd /usr/local/bin/iggy-server 2>&1 | head -1)") && \
+ echo "Binary Size:" && \
+ ls -lh /usr/local/bin/iggy-server /usr/local/bin/iggy | awk '{print " " $9 ": " $5}' && \
+ echo "Build Date: $(date -u '+%Y-%m-%d %H:%M:%S UTC')" && \
+ echo "Container Base: debian:trixie-slim" && \
+ echo "βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ"
+ENTRYPOINT ["iggy-server"]
+
+FROM debian:trixie-slim AS runtime
+ARG TARGETPLATFORM
+ARG PROFILE=release
+ARG LIBC=musl
+WORKDIR /app
+COPY --from=builder /app/iggy-server /usr/local/bin/iggy-server
+COPY --from=builder /app/iggy /usr/local/bin/iggy
+RUN echo "βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ" && \
+ echo " IGGY SERVER BUILD SUMMARY " && \
+ echo "βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ" && \
+ echo "Build Type: FROM SOURCE" && \
+ echo "Platform: ${TARGETPLATFORM:-linux/amd64}" && \
+ echo "Profile: ${PROFILE}" && \
+ echo "Libc: ${LIBC}" && \
+ case "${TARGETPLATFORM:-linux/amd64}:${LIBC}" in \
+ "linux/amd64:musl") echo "Target: x86_64-unknown-linux-musl" ;; \
+ "linux/arm64:musl") echo "Target: aarch64-unknown-linux-musl" ;; \
+ "linux/amd64:glibc") echo "Target: x86_64-unknown-linux-gnu" ;; \
+ "linux/arm64:glibc") echo "Target: aarch64-unknown-linux-gnu" ;; \
+ *) echo "Target: unknown" ;; \
+ esac && \
+ echo "Binary Info:" && \
+ (command -v file >/dev/null 2>&1 && file /usr/local/bin/iggy-server | sed 's/^/ /' || \
+ echo " $(ldd /usr/local/bin/iggy-server 2>&1 | head -1)") && \
+ echo "Binary Size:" && \
+ ls -lh /usr/local/bin/iggy-server /usr/local/bin/iggy | awk '{print " " $9 ": " $5}' && \
+ echo "Build Date: $(date -u '+%Y-%m-%d %H:%M:%S UTC')" && \
+ echo "βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ"
+ENTRYPOINT ["iggy-server"]
diff --git a/core/tools/Cargo.toml b/core/tools/Cargo.toml
index be036b36a..55cc857e2 100644
--- a/core/tools/Cargo.toml
+++ b/core/tools/Cargo.toml
@@ -21,6 +21,10 @@ version = "0.1.0"
edition = "2024"
license = "Apache-2.0"
+[[bin]]
+name = "data-seeder-tool"
+path = "src/data-seeder/main.rs"
+
[dependencies]
anyhow = { workspace = true }
clap = { workspace = true }
@@ -29,7 +33,3 @@ rand = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
-
-[[bin]]
-name = "data-seeder-tool"
-path = "src/data-seeder/main.rs"
diff --git a/docker-compose.yml b/docker-compose.yml
index 48da4ee0e..3e485c106 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -17,7 +17,9 @@
services:
iggy-server:
- build: .
+ build:
+ context: .
+ dockerfile: core/server/Dockerfile
container_name: iggy-server
restart: unless-stopped
networks:
diff --git a/examples/python/getting-started/consumer.py b/examples/python/getting-started/consumer.py
index efd7bcbdd..37cb51e69 100644
--- a/examples/python/getting-started/consumer.py
+++ b/examples/python/getting-started/consumer.py
@@ -15,15 +15,15 @@
# specific language governing permissions and limitations
# under the License.
-import asyncio
-import urllib.parse
-from loguru import logger
import argparse
-import urllib
+import asyncio
import typing
+import urllib
+import urllib.parse
from collections import namedtuple
-from apache_iggy import IggyClient, ReceiveMessage, PollingStrategy
+from apache_iggy import IggyClient, PollingStrategy, ReceiveMessage
+from loguru import logger
STREAM_NAME = "sample-stream"
TOPIC_NAME = "sample-topic"
diff --git a/examples/python/getting-started/producer.py b/examples/python/getting-started/producer.py
index be20db629..71ccb2232 100644
--- a/examples/python/getting-started/producer.py
+++ b/examples/python/getting-started/producer.py
@@ -15,15 +15,16 @@
# specific language governing permissions and limitations
# under the License.
-import asyncio
-import urllib.parse
-from loguru import logger
import argparse
-import urllib
+import asyncio
import typing
+import urllib
+import urllib.parse
from collections import namedtuple
-from apache_iggy import IggyClient, SendMessage as Message, StreamDetails, TopicDetails
+from apache_iggy import IggyClient, StreamDetails, TopicDetails
+from apache_iggy import SendMessage as Message
+from loguru import logger
STREAM_NAME = "sample-stream"
TOPIC_NAME = "sample-topic"
diff --git a/foreign/csharp/Iggy_SDK_Tests/MapperTests/BinaryMapper.cs b/foreign/csharp/Iggy_SDK_Tests/MapperTests/BinaryMapper.cs
index 2d523c4cb..51c1a9d7c 100644
--- a/foreign/csharp/Iggy_SDK_Tests/MapperTests/BinaryMapper.cs
+++ b/foreign/csharp/Iggy_SDK_Tests/MapperTests/BinaryMapper.cs
@@ -112,7 +112,7 @@ public void MapMessagesTMessage_NoHeaders_ReturnsValidMessageResponse()
Text = text
};
});
- //Assert
+ //Assert
Assert.NotEmpty(response.Messages);
Assert.Equal(2, response.Messages.Count);
// Assert.Equal(response.Messages[0].Id, guid);
@@ -397,4 +397,4 @@ public void MapStats_ReturnsValidStatsResponse()
Assert.Equal(stats.OsVersion, stats.OsVersion);
Assert.Equal(stats.KernelVersion, response.KernelVersion);
}
-}
\ No newline at end of file
+}
diff --git a/foreign/csharp/scripts/pack.sh b/foreign/csharp/scripts/pack.sh
index b2d87f80c..6510e6ac2 100755
--- a/foreign/csharp/scripts/pack.sh
+++ b/foreign/csharp/scripts/pack.sh
@@ -39,14 +39,14 @@ echo "Extracted version: $version"
echo "Executing after success scripts on branch $GITHUB_REF_NAME"
echo "Triggering Nuget package build"
-cd Iggy_SDK
-dotnet pack -c release /p:PackageVersion=$version --no-restore -o .
+cd Iggy_SDK || exit
+dotnet pack -c release /p:PackageVersion="$version" --no-restore -o .
echo "Uploading Iggy package to Nuget using branch $GITHUB_REF_NAME"
case "$GITHUB_REF_NAME" in
"master")
- dotnet nuget push *.nupkg -k $NUGET_API_KEY -s https://api.nuget.org/v3/index.json
+ dotnet nuget push ./*.nupkg -k "$NUGET_API_KEY" -s https://api.nuget.org/v3/index.json
echo "Published package succesfully!"
;;
*)
diff --git a/foreign/python/apache_iggy.pyi b/foreign/python/apache_iggy.pyi
index c7300a576..801f05960 100644
--- a/foreign/python/apache_iggy.pyi
+++ b/foreign/python/apache_iggy.pyi
@@ -32,70 +32,86 @@ class AutoCommit:
r"""
The auto-commit is disabled and the offset must be stored manually by the consumer.
"""
+
__match_args__ = ((),)
def __new__(cls) -> AutoCommit.Disabled: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
class Interval(AutoCommit):
r"""
The auto-commit is enabled and the offset is stored on the server after a certain interval.
"""
+
__match_args__ = ("_0",)
@property
def _0(self) -> datetime.timedelta: ...
- def __new__(cls, _0:datetime.timedelta) -> AutoCommit.Interval: ...
+ def __new__(cls, _0: datetime.timedelta) -> AutoCommit.Interval: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
class IntervalOrWhen(AutoCommit):
r"""
The auto-commit is enabled and the offset is stored on the server after a certain interval or depending on the mode when consuming the messages.
"""
- __match_args__ = ("_0", "_1",)
+
+ __match_args__ = (
+ "_0",
+ "_1",
+ )
@property
def _0(self) -> datetime.timedelta: ...
@property
def _1(self) -> AutoCommitWhen: ...
- def __new__(cls, _0:datetime.timedelta, _1:AutoCommitWhen) -> AutoCommit.IntervalOrWhen: ...
+ def __new__(
+ cls, _0: datetime.timedelta, _1: AutoCommitWhen
+ ) -> AutoCommit.IntervalOrWhen: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
class IntervalOrAfter(AutoCommit):
r"""
The auto-commit is enabled and the offset is stored on the server after a certain interval or depending on the mode after consuming the messages.
"""
- __match_args__ = ("_0", "_1",)
+
+ __match_args__ = (
+ "_0",
+ "_1",
+ )
@property
def _0(self) -> datetime.timedelta: ...
@property
def _1(self) -> AutoCommitAfter: ...
- def __new__(cls, _0:datetime.timedelta, _1:AutoCommitAfter) -> AutoCommit.IntervalOrAfter: ...
+ def __new__(
+ cls, _0: datetime.timedelta, _1: AutoCommitAfter
+ ) -> AutoCommit.IntervalOrAfter: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
class When(AutoCommit):
r"""
The auto-commit is enabled and the offset is stored on the server depending on the mode when consuming the messages.
"""
+
__match_args__ = ("_0",)
@property
def _0(self) -> AutoCommitWhen: ...
- def __new__(cls, _0:AutoCommitWhen) -> AutoCommit.When: ...
+ def __new__(cls, _0: AutoCommitWhen) -> AutoCommit.When: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
class After(AutoCommit):
r"""
The auto-commit is enabled and the offset is stored on the server depending on the mode after consuming the messages.
"""
+
__match_args__ = ("_0",)
@property
def _0(self) -> AutoCommitAfter: ...
- def __new__(cls, _0:AutoCommitAfter) -> AutoCommit.After: ...
+ def __new__(cls, _0: AutoCommitAfter) -> AutoCommit.After: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
...
class AutoCommitAfter:
@@ -106,31 +122,36 @@ class AutoCommitAfter:
r"""
The offset is stored on the server after all the messages are consumed.
"""
+
__match_args__ = ((),)
def __new__(cls) -> AutoCommitAfter.ConsumingAllMessages: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
class ConsumingEachMessage(AutoCommitAfter):
r"""
The offset is stored on the server after consuming each message.
"""
+
__match_args__ = ((),)
def __new__(cls) -> AutoCommitAfter.ConsumingEachMessage: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
class ConsumingEveryNthMessage(AutoCommitAfter):
r"""
The offset is stored on the server after consuming every Nth message.
"""
+
__match_args__ = ("_0",)
@property
def _0(self) -> builtins.int: ...
- def __new__(cls, _0:builtins.int) -> AutoCommitAfter.ConsumingEveryNthMessage: ...
+ def __new__(
+ cls, _0: builtins.int
+ ) -> AutoCommitAfter.ConsumingEveryNthMessage: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
...
class AutoCommitWhen:
@@ -141,40 +162,46 @@ class AutoCommitWhen:
r"""
The offset is stored on the server when the messages are received.
"""
+
__match_args__ = ((),)
def __new__(cls) -> AutoCommitWhen.PollingMessages: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
class ConsumingAllMessages(AutoCommitWhen):
r"""
The offset is stored on the server when all the messages are consumed.
"""
+
__match_args__ = ((),)
def __new__(cls) -> AutoCommitWhen.ConsumingAllMessages: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
class ConsumingEachMessage(AutoCommitWhen):
r"""
The offset is stored on the server when consuming each message.
"""
+
__match_args__ = ((),)
def __new__(cls) -> AutoCommitWhen.ConsumingEachMessage: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
class ConsumingEveryNthMessage(AutoCommitWhen):
r"""
The offset is stored on the server when consuming every Nth message.
"""
+
__match_args__ = ("_0",)
@property
def _0(self) -> builtins.int: ...
- def __new__(cls, _0:builtins.int) -> AutoCommitWhen.ConsumingEveryNthMessage: ...
+ def __new__(
+ cls, _0: builtins.int
+ ) -> AutoCommitWhen.ConsumingEveryNthMessage: ...
def __len__(self) -> builtins.int: ...
- def __getitem__(self, key:builtins.int) -> typing.Any: ...
-
+ def __getitem__(self, key: builtins.int) -> typing.Any: ...
+
...
class IggyClient:
@@ -183,79 +210,127 @@ class IggyClient:
It wraps the RustIggyClient and provides asynchronous functionality
through the contained runtime.
"""
- def __new__(cls, conn:typing.Optional[builtins.str]=None) -> IggyClient:
+ def __new__(cls, conn: typing.Optional[builtins.str] = None) -> IggyClient:
r"""
Constructs a new IggyClient from a TCP server address.
-
+
This initializes a new runtime for asynchronous operations.
Future versions might utilize asyncio for more Pythonic async.
"""
@classmethod
- def from_connection_string(cls, connection_string:builtins.str) -> IggyClient:
+ def from_connection_string(cls, connection_string: builtins.str) -> IggyClient:
r"""
Constructs a new IggyClient from a connection string.
-
+
Returns an error if the connection string provided is invalid.
"""
def ping(self) -> collections.abc.Awaitable[None]:
r"""
Sends a ping request to the server to check connectivity.
-
+
Returns `Ok(())` if the server responds successfully, or a `PyRuntimeError`
if the connection fails.
"""
- def login_user(self, username:builtins.str, password:builtins.str) -> collections.abc.Awaitable[None]:
+ def login_user(
+ self, username: builtins.str, password: builtins.str
+ ) -> collections.abc.Awaitable[None]:
r"""
Logs in the user with the given credentials.
-
+
Returns `Ok(())` on success, or a PyRuntimeError on failure.
"""
def connect(self) -> collections.abc.Awaitable[None]:
r"""
Connects the IggyClient to its service.
-
+
Returns Ok(()) on successful connection or a PyRuntimeError on failure.
"""
- def create_stream(self, name:builtins.str, stream_id:typing.Optional[builtins.int]=None) -> collections.abc.Awaitable[None]:
+ def create_stream(
+ self, name: builtins.str, stream_id: typing.Optional[builtins.int] = None
+ ) -> collections.abc.Awaitable[None]:
r"""
Creates a new stream with the provided ID and name.
-
+
Returns Ok(()) on successful stream creation or a PyRuntimeError on failure.
"""
- def get_stream(self, stream_id:builtins.str | builtins.int) -> collections.abc.Awaitable[typing.Optional[StreamDetails]]:
+ def get_stream(
+ self, stream_id: builtins.str | builtins.int
+ ) -> collections.abc.Awaitable[typing.Optional[StreamDetails]]:
r"""
Gets stream by id.
-
+
Returns Option of stream details or a PyRuntimeError on failure.
"""
- def create_topic(self, stream:builtins.str | builtins.int, name:builtins.str, partitions_count:builtins.int, compression_algorithm:typing.Optional[builtins.str]=None, topic_id:typing.Optional[builtins.int]=None, replication_factor:typing.Optional[builtins.int]=None) -> collections.abc.Awaitable[None]:
+ def create_topic(
+ self,
+ stream: builtins.str | builtins.int,
+ name: builtins.str,
+ partitions_count: builtins.int,
+ compression_algorithm: typing.Optional[builtins.str] = None,
+ topic_id: typing.Optional[builtins.int] = None,
+ replication_factor: typing.Optional[builtins.int] = None,
+ ) -> collections.abc.Awaitable[None]:
r"""
Creates a new topic with the given parameters.
-
+
Returns Ok(()) on successful topic creation or a PyRuntimeError on failure.
"""
- def get_topic(self, stream_id:builtins.str | builtins.int, topic_id:builtins.str | builtins.int) -> collections.abc.Awaitable[typing.Optional[TopicDetails]]:
+ def get_topic(
+ self,
+ stream_id: builtins.str | builtins.int,
+ topic_id: builtins.str | builtins.int,
+ ) -> collections.abc.Awaitable[typing.Optional[TopicDetails]]:
r"""
Gets topic by stream and id.
-
+
Returns Option of topic details or a PyRuntimeError on failure.
"""
- def send_messages(self, stream:builtins.str | builtins.int, topic:builtins.str | builtins.int, partitioning:builtins.int, messages:list[SendMessage]) -> collections.abc.Awaitable[None]:
+ def send_messages(
+ self,
+ stream: builtins.str | builtins.int,
+ topic: builtins.str | builtins.int,
+ partitioning: builtins.int,
+ messages: list[SendMessage],
+ ) -> collections.abc.Awaitable[None]:
r"""
Sends a list of messages to the specified topic.
-
+
Returns Ok(()) on successful sending or a PyRuntimeError on failure.
"""
- def poll_messages(self, stream:builtins.str | builtins.int, topic:builtins.str | builtins.int, partition_id:builtins.int, polling_strategy:PollingStrategy, count:builtins.int, auto_commit:builtins.bool) -> collections.abc.Awaitable[list[ReceiveMessage]]:
+ def poll_messages(
+ self,
+ stream: builtins.str | builtins.int,
+ topic: builtins.str | builtins.int,
+ partition_id: builtins.int,
+ polling_strategy: PollingStrategy,
+ count: builtins.int,
+ auto_commit: builtins.bool,
+ ) -> collections.abc.Awaitable[list[ReceiveMessage]]:
r"""
Polls for messages from the specified topic and partition.
-
+
Returns a list of received messages or a PyRuntimeError on failure.
"""
- def consumer_group(self, name:builtins.str, stream:builtins.str, topic:builtins.str, partition_id:typing.Optional[builtins.int]=None, polling_strategy:typing.Optional[PollingStrategy]=None, batch_length:typing.Optional[builtins.int]=None, auto_commit:typing.Optional[AutoCommit]=None, create_consumer_group_if_not_exists:builtins.bool=True, auto_join_consumer_group:builtins.bool=True, poll_interval:typing.Optional[datetime.timedelta]=None, polling_retry_interval:typing.Optional[datetime.timedelta]=None, init_retries:typing.Optional[builtins.int]=None, init_retry_interval:typing.Optional[datetime.timedelta]=None, allow_replay:builtins.bool=False) -> IggyConsumer:
+ def consumer_group(
+ self,
+ name: builtins.str,
+ stream: builtins.str,
+ topic: builtins.str,
+ partition_id: typing.Optional[builtins.int] = None,
+ polling_strategy: typing.Optional[PollingStrategy] = None,
+ batch_length: typing.Optional[builtins.int] = None,
+ auto_commit: typing.Optional[AutoCommit] = None,
+ create_consumer_group_if_not_exists: builtins.bool = True,
+ auto_join_consumer_group: builtins.bool = True,
+ poll_interval: typing.Optional[datetime.timedelta] = None,
+ polling_retry_interval: typing.Optional[datetime.timedelta] = None,
+ init_retries: typing.Optional[builtins.int] = None,
+ init_retry_interval: typing.Optional[datetime.timedelta] = None,
+ allow_replay: builtins.bool = False,
+ ) -> IggyConsumer:
r"""
Creates a new consumer group consumer.
-
+
Returns the consumer or a PyRuntimeError on failure.
"""
@@ -265,11 +340,15 @@ class IggyConsumer:
It wraps the RustIggyConsumer and provides asynchronous functionality
through the contained runtime.
"""
- def get_last_consumed_offset(self, partition_id:builtins.int) -> typing.Optional[builtins.int]:
+ def get_last_consumed_offset(
+ self, partition_id: builtins.int
+ ) -> typing.Optional[builtins.int]:
r"""
Get the last consumed offset or `None` if no offset has been consumed yet.
"""
- def get_last_stored_offset(self, partition_id:builtins.int) -> typing.Optional[builtins.int]:
+ def get_last_stored_offset(
+ self, partition_id: builtins.int
+ ) -> typing.Optional[builtins.int]:
r"""
Get the last stored offset or `None` if no offset has been stored yet.
"""
@@ -289,26 +368,36 @@ class IggyConsumer:
r"""
Gets the name of the topic this consumer group is configured for.
"""
- def store_offset(self, offset:builtins.int, partition_id:typing.Optional[builtins.int]) -> collections.abc.Awaitable[None]:
+ def store_offset(
+ self, offset: builtins.int, partition_id: typing.Optional[builtins.int]
+ ) -> collections.abc.Awaitable[None]:
r"""
Stores the provided offset for the provided partition id or if none is specified
uses the current partition id for the consumer group.
-
+
Returns `Ok(())` if the server responds successfully, or a `PyRuntimeError`
if the operation fails.
"""
- def delete_offset(self, partition_id:typing.Optional[builtins.int]) -> collections.abc.Awaitable[None]:
+ def delete_offset(
+ self, partition_id: typing.Optional[builtins.int]
+ ) -> collections.abc.Awaitable[None]:
r"""
Deletes the offset for the provided partition id or if none is specified
uses the current partition id for the consumer group.
-
+
Returns `Ok(())` if the server responds successfully, or a `PyRuntimeError`
if the operation fails.
"""
- def consume_messages(self, callback:collections.abc.Callable[[ReceiveMessage], collections.abc.Awaitable[None]], shutdown_event:typing.Optional[asyncio.Event]) -> collections.abc.Awaitable[None]:
+ def consume_messages(
+ self,
+ callback: collections.abc.Callable[
+ [ReceiveMessage], collections.abc.Awaitable[None]
+ ],
+ shutdown_event: typing.Optional[asyncio.Event],
+ ) -> collections.abc.Awaitable[None]:
r"""
Consumes messages continuously using a callback function and an optional `asyncio.Event` for signaling shutdown.
-
+
Returns an awaitable that completes when shutdown is signaled or a PyRuntimeError on failure.
"""
@@ -317,82 +406,82 @@ class PollingStrategy:
__match_args__ = ("value",)
@property
def value(self) -> builtins.int: ...
- def __new__(cls, value:builtins.int) -> PollingStrategy.Offset: ...
-
+ def __new__(cls, value: builtins.int) -> PollingStrategy.Offset: ...
+
class Timestamp(PollingStrategy):
__match_args__ = ("value",)
@property
def value(self) -> builtins.int: ...
- def __new__(cls, value:builtins.int) -> PollingStrategy.Timestamp: ...
-
+ def __new__(cls, value: builtins.int) -> PollingStrategy.Timestamp: ...
+
class First(PollingStrategy):
__match_args__ = ((),)
def __new__(cls) -> PollingStrategy.First: ...
-
+
class Last(PollingStrategy):
__match_args__ = ((),)
def __new__(cls) -> PollingStrategy.Last: ...
-
+
class Next(PollingStrategy):
__match_args__ = ((),)
def __new__(cls) -> PollingStrategy.Next: ...
-
+
...
class ReceiveMessage:
r"""
A Python class representing a received message.
-
+
This class wraps a Rust message, allowing for access to its payload and offset from Python.
"""
def payload(self) -> bytes:
r"""
Retrieves the payload of the received message.
-
+
The payload is returned as a Python bytes object.
"""
def offset(self) -> builtins.int:
r"""
Retrieves the offset of the received message.
-
+
The offset represents the position of the message within its topic.
"""
def timestamp(self) -> builtins.int:
r"""
Retrieves the timestamp of the received message.
-
+
The timestamp represents the time of the message within its topic.
"""
def id(self) -> builtins.int:
r"""
Retrieves the id of the received message.
-
+
The id represents unique identifier of the message within its topic.
"""
def checksum(self) -> builtins.int:
r"""
Retrieves the checksum of the received message.
-
+
The checksum represents the integrity of the message within its topic.
"""
def length(self) -> builtins.int:
r"""
Retrieves the length of the received message.
-
+
The length represents the length of the payload.
"""
class SendMessage:
r"""
A Python class representing a message to be sent.
-
+
This class wraps a Rust message meant for sending, facilitating
the creation of such messages from Python and their subsequent use in Rust.
"""
- def __new__(cls, data:builtins.str | bytes) -> SendMessage:
+ def __new__(cls, data: builtins.str | bytes) -> SendMessage:
r"""
Constructs a new `SendMessage` instance from a string.
-
+
This method allows for the creation of a `SendMessage` instance
directly from Python using the provided string data.
"""
@@ -416,4 +505,3 @@ class TopicDetails:
def messages_count(self) -> builtins.int: ...
@property
def partitions_count(self) -> builtins.int: ...
-
diff --git a/foreign/python/docker-compose.test.yml b/foreign/python/docker-compose.test.yml
index 490aa5c74..92ffec6da 100644
--- a/foreign/python/docker-compose.test.yml
+++ b/foreign/python/docker-compose.test.yml
@@ -21,8 +21,9 @@ services:
iggy-server:
build:
context: ../..
- # Use debug build for faster compilation during testing
- dockerfile: Dockerfile.debug
+ dockerfile: core/server/Dockerfile
+ args:
+ PROFILE: debug
container_name: iggy-server-python-test
networks:
- python-test-network
@@ -31,7 +32,7 @@ services:
- "8080:8080"
- "8090:8090"
healthcheck:
- test: ["CMD-SHELL", "timeout 5 bash -c ' IggyClient:
@@ -67,7 +68,7 @@ async def iggy_client() -> IggyClient:
def configure_asyncio():
"""Configure asyncio settings for tests."""
# Set event loop policy if needed
- if os.name == 'nt': # Windows
+ if os.name == "nt": # Windows
asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
@@ -75,13 +76,9 @@ def configure_asyncio():
def pytest_configure(config):
"""Configure pytest with custom markers."""
config.addinivalue_line(
- "markers",
- "integration: marks tests as integration tests (may be slow)"
- )
- config.addinivalue_line(
- "markers",
- "unit: marks tests as unit tests (fast)"
+ "markers", "integration: marks tests as integration tests (may be slow)"
)
+ config.addinivalue_line("markers", "unit: marks tests as unit tests (fast)")
def pytest_collection_modifyitems(config, items):
diff --git a/foreign/python/tests/test_iggy_sdk.py b/foreign/python/tests/test_iggy_sdk.py
index 4cde470a0..604cd6bc2 100644
--- a/foreign/python/tests/test_iggy_sdk.py
+++ b/foreign/python/tests/test_iggy_sdk.py
@@ -23,16 +23,15 @@
"""
import asyncio
-from datetime import timedelta
import uuid
+from datetime import timedelta
import pytest
+from apache_iggy import AutoCommit, IggyClient, PollingStrategy, ReceiveMessage
+from apache_iggy import SendMessage as Message
from .utils import get_server_config, wait_for_ping, wait_for_server
-from apache_iggy import IggyClient, PollingStrategy, AutoCommit, ReceiveMessage
-from apache_iggy import SendMessage as Message
-
class TestConnectivity:
"""Test basic connectivity and authentication."""
@@ -69,7 +68,9 @@ def unique_stream_name(self):
return f"test-stream-{uuid.uuid4().hex[:8]}"
@pytest.mark.asyncio
- async def test_create_and_get_stream(self, iggy_client: IggyClient, unique_stream_name):
+ async def test_create_and_get_stream(
+ self, iggy_client: IggyClient, unique_stream_name
+ ):
"""Test stream creation and retrieval."""
# Create stream
await iggy_client.create_stream(unique_stream_name)
@@ -102,24 +103,22 @@ def unique_names(self):
"""Generate unique stream and topic names."""
unique_id = uuid.uuid4().hex[:8]
return {
- 'stream': f"test-stream-{unique_id}",
- 'topic': f"test-topic-{unique_id}"
+ "stream": f"test-stream-{unique_id}",
+ "topic": f"test-topic-{unique_id}",
}
@pytest.mark.asyncio
async def test_create_and_get_topic(self, iggy_client: IggyClient, unique_names):
"""Test topic creation and retrieval."""
- stream_name = unique_names['stream']
- topic_name = unique_names['topic']
+ stream_name = unique_names["stream"]
+ topic_name = unique_names["topic"]
# Create stream first
await iggy_client.create_stream(stream_name)
# Create topic
await iggy_client.create_topic(
- stream=stream_name,
- name=topic_name,
- partitions_count=2
+ stream=stream_name, name=topic_name, partitions_count=2
)
# Get topic by name
@@ -132,15 +131,13 @@ async def test_create_and_get_topic(self, iggy_client: IggyClient, unique_names)
@pytest.mark.asyncio
async def test_list_topics(self, iggy_client: IggyClient, unique_names):
"""Test listing topics in a stream."""
- stream_name = unique_names['stream']
- topic_name = unique_names['topic']
+ stream_name = unique_names["stream"]
+ topic_name = unique_names["topic"]
# Create stream and topic
await iggy_client.create_stream(stream_name)
await iggy_client.create_topic(
- stream=stream_name,
- name=topic_name,
- partitions_count=1
+ stream=stream_name, name=topic_name, partitions_count=1
)
# Get the topic we just created
@@ -159,28 +156,24 @@ def message_setup(self):
"""Setup unique names and test data for messaging tests."""
unique_id = uuid.uuid4().hex[:8]
return {
- 'stream': f"msg-stream-{unique_id}",
- 'topic': f"msg-topic-{unique_id}",
- 'partition_id': 1,
- 'messages': [
- f"Test message {i} - {unique_id}" for i in range(1, 4)
- ]
+ "stream": f"msg-stream-{unique_id}",
+ "topic": f"msg-topic-{unique_id}",
+ "partition_id": 1,
+ "messages": [f"Test message {i} - {unique_id}" for i in range(1, 4)],
}
@pytest.mark.asyncio
async def test_send_and_poll_messages(self, iggy_client: IggyClient, message_setup):
"""Test basic message sending and polling."""
- stream_name = message_setup['stream']
- topic_name = message_setup['topic']
- partition_id = message_setup['partition_id']
- test_messages = message_setup['messages']
+ stream_name = message_setup["stream"]
+ topic_name = message_setup["topic"]
+ partition_id = message_setup["partition_id"]
+ test_messages = message_setup["messages"]
# Setup stream and topic
await iggy_client.create_stream(stream_name)
await iggy_client.create_topic(
- stream=stream_name,
- name=topic_name,
- partitions_count=1
+ stream=stream_name, name=topic_name, partitions_count=1
)
# Send messages
@@ -189,7 +182,7 @@ async def test_send_and_poll_messages(self, iggy_client: IggyClient, message_set
stream=stream_name,
topic=topic_name,
partitioning=partition_id,
- messages=messages
+ messages=messages,
)
# Poll messages
@@ -199,7 +192,7 @@ async def test_send_and_poll_messages(self, iggy_client: IggyClient, message_set
partition_id=partition_id,
polling_strategy=PollingStrategy.First(),
count=10,
- auto_commit=True
+ auto_commit=True,
)
# Verify we got our messages
@@ -212,19 +205,19 @@ async def test_send_and_poll_messages(self, iggy_client: IggyClient, message_set
assert actual_payload == expected_msg
@pytest.mark.asyncio
- async def test_send_and_poll_messages_as_bytes(self, iggy_client: IggyClient, message_setup):
+ async def test_send_and_poll_messages_as_bytes(
+ self, iggy_client: IggyClient, message_setup
+ ):
"""Test basic message sending and polling with message payload as bytes."""
- stream_name = message_setup['stream']
- topic_name = message_setup['topic']
- partition_id = message_setup['partition_id']
- test_messages = message_setup['messages']
+ stream_name = message_setup["stream"]
+ topic_name = message_setup["topic"]
+ partition_id = message_setup["partition_id"]
+ test_messages = message_setup["messages"]
# Setup stream and topic
await iggy_client.create_stream(stream_name)
await iggy_client.create_topic(
- stream=stream_name,
- name=topic_name,
- partitions_count=1
+ stream=stream_name, name=topic_name, partitions_count=1
)
# Send messages
@@ -233,7 +226,7 @@ async def test_send_and_poll_messages_as_bytes(self, iggy_client: IggyClient, me
stream=stream_name,
topic=topic_name,
partitioning=partition_id,
- messages=messages
+ messages=messages,
)
# Poll messages
@@ -243,7 +236,7 @@ async def test_send_and_poll_messages_as_bytes(self, iggy_client: IggyClient, me
partition_id=partition_id,
polling_strategy=PollingStrategy.First(),
count=10,
- auto_commit=True
+ auto_commit=True,
)
# Verify we got our messages
@@ -258,16 +251,14 @@ async def test_send_and_poll_messages_as_bytes(self, iggy_client: IggyClient, me
@pytest.mark.asyncio
async def test_message_properties(self, iggy_client: IggyClient, message_setup):
"""Test access to message properties."""
- stream_name = message_setup['stream']
- topic_name = message_setup['topic']
- partition_id = message_setup['partition_id']
+ stream_name = message_setup["stream"]
+ topic_name = message_setup["topic"]
+ partition_id = message_setup["partition_id"]
# Setup
await iggy_client.create_stream(stream_name)
await iggy_client.create_topic(
- stream=stream_name,
- name=topic_name,
- partitions_count=1
+ stream=stream_name, name=topic_name, partitions_count=1
)
# Send a test message
@@ -277,7 +268,7 @@ async def test_message_properties(self, iggy_client: IggyClient, message_setup):
stream=stream_name,
topic=topic_name,
partitioning=partition_id,
- messages=[message]
+ messages=[message],
)
# Poll and verify properties
@@ -287,7 +278,7 @@ async def test_message_properties(self, iggy_client: IggyClient, message_setup):
partition_id=partition_id,
polling_strategy=PollingStrategy.Last(),
count=1,
- auto_commit=True
+ auto_commit=True,
)
assert len(polled_messages) >= 1
@@ -310,26 +301,24 @@ def polling_setup(self):
"""Setup for polling strategy tests."""
unique_id = uuid.uuid4().hex[:8]
return {
- 'stream': f"poll-stream-{unique_id}",
- 'topic': f"poll-topic-{unique_id}",
- 'partition_id': 1,
- 'messages': [f"Polling test {i} - {unique_id}" for i in range(5)]
+ "stream": f"poll-stream-{unique_id}",
+ "topic": f"poll-topic-{unique_id}",
+ "partition_id": 1,
+ "messages": [f"Polling test {i} - {unique_id}" for i in range(5)],
}
@pytest.mark.asyncio
async def test_polling_strategies(self, iggy_client: IggyClient, polling_setup):
"""Test different polling strategies work correctly."""
- stream_name = polling_setup['stream']
- topic_name = polling_setup['topic']
- partition_id = polling_setup['partition_id']
- test_messages = polling_setup['messages']
+ stream_name = polling_setup["stream"]
+ topic_name = polling_setup["topic"]
+ partition_id = polling_setup["partition_id"]
+ test_messages = polling_setup["messages"]
# Setup
await iggy_client.create_stream(stream_name)
await iggy_client.create_topic(
- stream=stream_name,
- name=topic_name,
- partitions_count=1
+ stream=stream_name, name=topic_name, partitions_count=1
)
# Send test messages
@@ -338,7 +327,7 @@ async def test_polling_strategies(self, iggy_client: IggyClient, polling_setup):
stream=stream_name,
topic=topic_name,
partitioning=partition_id,
- messages=messages
+ messages=messages,
)
# Test First strategy
@@ -348,7 +337,7 @@ async def test_polling_strategies(self, iggy_client: IggyClient, polling_setup):
partition_id=partition_id,
polling_strategy=PollingStrategy.First(),
count=1,
- auto_commit=False
+ auto_commit=False,
)
assert len(first_messages) >= 1
@@ -359,7 +348,7 @@ async def test_polling_strategies(self, iggy_client: IggyClient, polling_setup):
partition_id=partition_id,
polling_strategy=PollingStrategy.Last(),
count=1,
- auto_commit=False
+ auto_commit=False,
)
assert len(last_messages) >= 1
@@ -370,7 +359,7 @@ async def test_polling_strategies(self, iggy_client: IggyClient, polling_setup):
partition_id=partition_id,
polling_strategy=PollingStrategy.Next(),
count=2,
- auto_commit=False
+ auto_commit=False,
)
assert len(next_messages) >= 1
@@ -380,9 +369,11 @@ async def test_polling_strategies(self, iggy_client: IggyClient, polling_setup):
stream=stream_name,
topic=topic_name,
partition_id=partition_id,
- polling_strategy=PollingStrategy.Offset(value=first_messages[0].offset()),
+ polling_strategy=PollingStrategy.Offset(
+ value=first_messages[0].offset()
+ ),
count=1,
- auto_commit=False
+ auto_commit=False,
)
assert len(offset_messages) >= 1
@@ -421,11 +412,10 @@ async def test_create_topic_in_nonexistent_stream(self, iggy_client: IggyClient)
with pytest.raises(RuntimeError):
await iggy_client.create_topic(
- stream=nonexistent_stream,
- name=topic_name,
- partitions_count=1
+ stream=nonexistent_stream, name=topic_name, partitions_count=1
)
+
class TestConsumerGroup:
"""Test consumer groups."""
@@ -434,27 +424,25 @@ def consumer_group_setup(self):
"""Setup for polling strategy tests."""
unique_id = uuid.uuid4().hex[:8]
return {
- 'consumer': f"consumer-group-consumer-{unique_id}",
- 'stream': f"consumer-group-stream-{unique_id}",
- 'topic': f"consumer-group-topic-{unique_id}",
- 'partition_id': 1,
- 'messages': [f"Consumer group test {i} - {unique_id}" for i in range(5)]
+ "consumer": f"consumer-group-consumer-{unique_id}",
+ "stream": f"consumer-group-stream-{unique_id}",
+ "topic": f"consumer-group-topic-{unique_id}",
+ "partition_id": 1,
+ "messages": [f"Consumer group test {i} - {unique_id}" for i in range(5)],
}
@pytest.mark.asyncio
async def test_meta(self, iggy_client: IggyClient, consumer_group_setup):
"""Test that meta information can be read about the consumer group."""
- consumer_name = consumer_group_setup['consumer']
- stream_name = consumer_group_setup['stream']
- topic_name = consumer_group_setup['topic']
- partition_id = consumer_group_setup['partition_id']
+ consumer_name = consumer_group_setup["consumer"]
+ stream_name = consumer_group_setup["stream"]
+ topic_name = consumer_group_setup["topic"]
+ partition_id = consumer_group_setup["partition_id"]
# Setup
await iggy_client.create_stream(stream_name)
await iggy_client.create_topic(
- stream=stream_name,
- name=topic_name,
- partitions_count=1
+ stream=stream_name, name=topic_name, partitions_count=1
)
consumer = iggy_client.consumer_group(
consumer_name,
@@ -475,22 +463,22 @@ async def test_meta(self, iggy_client: IggyClient, consumer_group_setup):
assert consumer.get_last_stored_offset(partition_id) is None
@pytest.mark.asyncio
- async def test_consume_messages(self, iggy_client: IggyClient, consumer_group_setup):
+ async def test_consume_messages(
+ self, iggy_client: IggyClient, consumer_group_setup
+ ):
"""Test that the consumer group can consume messages."""
- consumer_name = consumer_group_setup['consumer']
- stream_name = consumer_group_setup['stream']
- topic_name = consumer_group_setup['topic']
- partition_id = consumer_group_setup['partition_id']
- test_messages = consumer_group_setup['messages']
+ consumer_name = consumer_group_setup["consumer"]
+ stream_name = consumer_group_setup["stream"]
+ topic_name = consumer_group_setup["topic"]
+ partition_id = consumer_group_setup["partition_id"]
+ test_messages = consumer_group_setup["messages"]
# Setup
received_messages = []
shutdown_event = asyncio.Event()
await iggy_client.create_stream(stream_name)
await iggy_client.create_topic(
- stream=stream_name,
- name=topic_name,
- partitions_count=1
+ stream=stream_name, name=topic_name, partitions_count=1
)
consumer = iggy_client.consumer_group(
@@ -509,9 +497,13 @@ async def take(message: ReceiveMessage) -> None:
if len(received_messages) == 5:
shutdown_event.set()
-
async def send() -> None:
- await iggy_client.send_messages(stream_name, topic_name, partition_id, [Message(m) for m in test_messages])
+ await iggy_client.send_messages(
+ stream_name,
+ topic_name,
+ partition_id,
+ [Message(m) for m in test_messages],
+ )
await asyncio.gather(consumer.consume_messages(take, shutdown_event), send())
@@ -520,18 +512,16 @@ async def send() -> None:
@pytest.mark.asyncio
async def test_shutdown(self, iggy_client: IggyClient, consumer_group_setup):
"""Test that the consumer group can be signaled to shutdown."""
- consumer_name = consumer_group_setup['consumer']
- stream_name = consumer_group_setup['stream']
- topic_name = consumer_group_setup['topic']
- partition_id = consumer_group_setup['partition_id']
+ consumer_name = consumer_group_setup["consumer"]
+ stream_name = consumer_group_setup["stream"]
+ topic_name = consumer_group_setup["topic"]
+ partition_id = consumer_group_setup["partition_id"]
# Setup
shutdown_event = asyncio.Event()
await iggy_client.create_stream(stream_name)
await iggy_client.create_topic(
- stream=stream_name,
- name=topic_name,
- partitions_count=1
+ stream=stream_name, name=topic_name, partitions_count=1
)
consumer = iggy_client.consumer_group(
diff --git a/scripts/check-backwards-compat.sh b/scripts/check-backwards-compat.sh
new file mode 100755
index 000000000..7f74677d9
--- /dev/null
+++ b/scripts/check-backwards-compat.sh
@@ -0,0 +1,251 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.
+
+set -euo pipefail
+
+# -----------------------------
+# Config (overridable via args)
+# -----------------------------
+MASTER_REF="${MASTER_REF:-master}" # branch or commit for "baseline"
+PR_REF="${PR_REF:-HEAD}" # commit to test (assumes current checkout)
+HOST="${HOST:-127.0.0.1}"
+PORT="${PORT:-8090}"
+WAIT_SECS="${WAIT_SECS:-60}"
+BATCHES="${BATCHES:-50}"
+MSGS_PER_BATCH="${MSGS_PER_BATCH:-100}"
+KEEP_TMP="${KEEP_TMP:-false}"
+
+# -----------------------------
+# Helpers
+# -----------------------------
+info(){ printf "\n\033[1;36mβ€ %s\033[0m\n" "$*"; }
+ok(){ printf "\033[0;32mβ %s\033[0m\n" "$*"; }
+err(){ printf "\033[0;31mβ %s\033[0m\n" "$*" >&2; }
+die(){ err "$*"; exit 1; }
+
+need() {
+ command -v "$1" >/dev/null 2>&1 || die "missing dependency: $1"
+}
+
+wait_for_port() {
+ local host="$1" port="$2" deadline=$((SECONDS + WAIT_SECS))
+ while (( SECONDS < deadline )); do
+ if command -v nc >/dev/null 2>&1; then
+ if nc -z "$host" "$port" 2>/dev/null; then return 0; fi
+ else
+ if (echo >"/dev/tcp/$host/$port") >/dev/null 2>&1; then return 0; fi
+ fi
+ sleep 1
+ done
+ return 1
+}
+
+stop_pid() {
+ local pid="$1" name="${2:-process}"
+ if kill -0 "$pid" 2>/dev/null; then
+ kill -TERM "$pid" || true
+ for _ in $(seq 1 15); do
+ kill -0 "$pid" 2>/dev/null || { ok "stopped $name (pid $pid)"; return 0; }
+ sleep 1
+ done
+ err "$name (pid $pid) still running; sending SIGKILL"
+ kill -KILL "$pid" || true
+ fi
+}
+
+print_logs_if_any() {
+ local dir="$1"
+ if compgen -G "$dir/local_data/logs/iggy*" > /dev/null; then
+ echo "---- $dir/local_data/logs ----"
+ cat "$dir"/local_data/logs/iggy* || true
+ echo "------------------------------"
+ else
+ echo "(no iggy logs found in $dir/local_data/logs)"
+ fi
+}
+
+# -----------------------------
+# Args
+# -----------------------------
+usage() {
+ cat </dev/null 2>&1 || true # optional, we'll use it if present
+
+REPO_ROOT="$(git rev-parse --show-toplevel 2>/dev/null || pwd)"
+cd "$REPO_ROOT"
+
+# Free the port proactively (best-effort)
+pkill -f iggy-server >/dev/null 2>&1 || true
+
+TMP_ROOT="$(mktemp -d -t iggy-backcompat-XXXXXX)"
+MASTER_DIR="$TMP_ROOT/master"
+PR_DIR="$REPO_ROOT" # assume script is run from PR checkout
+MASTER_LOG="$TMP_ROOT/server-master.stdout.log"
+PR_LOG="$TMP_ROOT/server-pr.stdout.log"
+
+cleanup() {
+ # Stop any leftover iggy-server
+ pkill -f iggy-server >/dev/null 2>&1 || true
+ git worktree remove --force "$MASTER_DIR" >/dev/null 2>&1 || true
+ if [[ "$KEEP_TMP" != "true" ]]; then
+ rm -rf "$TMP_ROOT" || true
+ else
+ info "keeping temp dir: $TMP_ROOT"
+ fi
+}
+trap cleanup EXIT
+
+# -----------------------------
+# 1) Prepare master worktree
+# -----------------------------
+info "Preparing baseline worktree at '$MASTER_REF'"
+git fetch --all --tags --prune >/dev/null 2>&1 || true
+git worktree add --force "$MASTER_DIR" "$MASTER_REF"
+ok "worktree at $MASTER_DIR"
+
+# -----------------------------
+# 2) Build & run master server
+# -----------------------------
+pushd "$MASTER_DIR" >/dev/null
+
+info "Building iggy-server & benches (baseline: $MASTER_REF)"
+IGGY_CI_BUILD=true cargo build --bins
+ok "built baseline"
+
+info "Starting iggy-server (baseline)"
+set +e
+( nohup target/debug/iggy-server >"$MASTER_LOG" 2>&1 & echo $! > "$TMP_ROOT/master.pid" )
+set -e
+MASTER_PID="$(cat "$TMP_ROOT/master.pid")"
+ok "iggy-server started (pid $MASTER_PID), logs: $MASTER_LOG"
+
+info "Waiting for $HOST:$PORT to be ready (up to ${WAIT_SECS}s)"
+if ! wait_for_port "$HOST" "$PORT"; then
+ err "server did not become ready in ${WAIT_SECS}s"
+ print_logs_if_any "$MASTER_DIR"
+ [[ -f "$MASTER_LOG" ]] && tail -n 200 "$MASTER_LOG" || true
+ exit 1
+fi
+ok "server is ready"
+
+# Producer bench (baseline)
+info "Running producer bench on baseline"
+BENCH_CMD=( target/debug/iggy-bench --verbose --message-batches "$BATCHES" --messages-per-batch "$MSGS_PER_BATCH" pinned-producer tcp )
+if command -v timeout >/dev/null 2>&1; then timeout 60s "${BENCH_CMD[@]}"; else "${BENCH_CMD[@]}"; fi
+ok "producer bench done"
+
+# Consumer bench (baseline)
+info "Running consumer bench on baseline"
+BENCH_CMD=( target/debug/iggy-bench --verbose --message-batches "$BATCHES" --messages-per-batch "$MSGS_PER_BATCH" pinned-consumer tcp )
+if command -v timeout >/dev/null 2>&1; then timeout 60s "${BENCH_CMD[@]}"; else "${BENCH_CMD[@]}"; fi
+ok "consumer bench done (baseline)"
+
+# Stop baseline server
+info "Stopping baseline server"
+stop_pid "$MASTER_PID" "iggy-server(baseline)"
+print_logs_if_any "$MASTER_DIR"
+
+# Clean baseline logs (like CI step)
+if compgen -G "local_data/logs/iggy*" > /dev/null; then
+ rm -f local_data/logs/iggy* || true
+fi
+
+# Snapshot local_data/
+info "Snapshotting baseline local_data/"
+cp -a local_data "$TMP_ROOT/local_data"
+ok "snapshot stored at $TMP_ROOT/local_data"
+
+popd >/dev/null
+
+# -----------------------------
+# 3) Build PR & restore data
+# -----------------------------
+pushd "$PR_DIR" >/dev/null
+info "Ensuring PR ref is present: $PR_REF"
+git rev-parse --verify "$PR_REF^{commit}" >/dev/null 2>&1 || die "PR_REF '$PR_REF' not found"
+git checkout -q "$PR_REF"
+
+info "Building iggy-server & benches (PR: $PR_REF)"
+IGGY_CI_BUILD=true cargo build --bins
+ok "built PR"
+
+info "Restoring baseline local_data/ into PR workspace"
+rm -rf local_data
+cp -a "$TMP_ROOT/local_data" ./local_data
+ok "restored local_data/"
+
+# -----------------------------
+# 4) Run PR server & consumer bench
+# -----------------------------
+info "Starting iggy-server (PR)"
+set +e
+( nohup target/debug/iggy-server >"$PR_LOG" 2>&1 & echo $! > "$TMP_ROOT/pr.pid" )
+set -e
+PR_PID="$(cat "$TMP_ROOT/pr.pid")"
+ok "iggy-server (PR) started (pid $PR_PID), logs: $PR_LOG"
+
+info "Waiting for $HOST:$PORT to be ready (up to ${WAIT_SECS}s)"
+if ! wait_for_port "$HOST" "$PORT"; then
+ err "PR server did not become ready in ${WAIT_SECS}s"
+ print_logs_if_any "$PR_DIR"
+ [[ -f "$PR_LOG" ]] && tail -n 200 "$PR_LOG" || true
+ exit 1
+fi
+ok "PR server is ready"
+
+# Only consumer bench against PR
+info "Running consumer bench on PR (compat check)"
+BENCH_CMD=( target/debug/iggy-bench --verbose --message-batches "$BATCHES" --messages-per-batch "$MSGS_PER_BATCH" pinned-consumer tcp )
+if command -v timeout >/dev/null 2>&1; then timeout 60s "${BENCH_CMD[@]}"; else "${BENCH_CMD[@]}"; fi
+ok "consumer bench done (PR)"
+
+# Stop PR server
+info "Stopping PR server"
+stop_pid "$PR_PID" "iggy-server(PR)"
+print_logs_if_any "$PR_DIR"
+
+ok "backwards-compatibility check PASSED"
+popd >/dev/null
diff --git a/scripts/extract-version.sh b/scripts/extract-version.sh
new file mode 100755
index 000000000..46d5d2da7
--- /dev/null
+++ b/scripts/extract-version.sh
@@ -0,0 +1,233 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Extract version information for Iggy components
+#
+# This script reads version information from various file formats based on
+# the configuration in .github/config/publish.yml. It supports extracting
+# versions from Cargo.toml, package.json, pyproject.toml, and other formats.
+#
+# Usage:
+# ./extract-version.sh [--tag] [--go-sdk-version ]
+#
+# Examples:
+# # Get version for Rust SDK
+# ./extract-version.sh rust-sdk # Output: 0.7.0
+#
+# # Get git tag for Rust SDK
+# ./extract-version.sh rust-sdk --tag # Output: iggy-0.7.0
+#
+# # Get version for Python SDK
+# ./extract-version.sh sdk-python # Output: 0.5.0
+#
+# # Get tag for Node SDK
+# ./extract-version.sh sdk-node --tag # Output: node-sdk-0.5.0
+#
+# # Get version for Go SDK (requires explicit version)
+# ./extract-version.sh sdk-go --go-sdk-version 1.2.3 # Output: 1.2.3
+#
+# The script uses the configuration from .github/config/publish.yml to determine:
+# - Where to find the version file (version_file)
+# - What regex pattern to use for extraction (version_regex)
+# - How to format the git tag (tag_pattern)
+# - Package name for Rust crates (package)
+
+set -euo pipefail
+
+# Check for required tools
+if ! command -v yq &> /dev/null; then
+ echo "Error: yq is required but not installed" >&2
+ exit 1
+fi
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
+CONFIG_FILE="$REPO_ROOT/.github/config/publish.yml"
+
+# Parse arguments
+COMPONENT="${1:-}"
+RETURN_TAG=false
+GO_SDK_VERSION=""
+
+shift || true
+while [[ $# -gt 0 ]]; do
+ case "$1" in
+ --tag)
+ RETURN_TAG=true
+ shift
+ ;;
+ --go-sdk-version)
+ GO_SDK_VERSION="${2:-}"
+ shift 2 || shift
+ ;;
+ *)
+ echo "Unknown option: $1" >&2
+ exit 1
+ ;;
+ esac
+done
+
+if [[ -z "$COMPONENT" ]]; then
+ echo "Usage: $0 [--tag] [--go-sdk-version ]" >&2
+ echo "" >&2
+ echo "Available components:" >&2
+ yq eval '.components | keys | .[]' "$CONFIG_FILE" | sed 's/^/ - /' >&2
+ exit 1
+fi
+
+# Check if component exists
+if ! yq eval ".components.\"$COMPONENT\"" "$CONFIG_FILE" | grep -q .; then
+ echo "Error: Unknown component '$COMPONENT'" >&2
+ echo "" >&2
+ echo "Available components:" >&2
+ yq eval '.components | keys | .[]' "$CONFIG_FILE" | sed 's/^/ - /' >&2
+ exit 1
+fi
+
+# Extract component configuration
+get_config() {
+ local key="$1"
+ yq eval ".components.\"$COMPONENT\".$key // \"\"" "$CONFIG_FILE"
+}
+
+# Generic regex-based extraction
+extract_version_with_regex() {
+ local file="$1"
+ local regex="$2"
+
+ if [[ ! -f "$REPO_ROOT/$file" ]]; then
+ echo "Error: File not found: $file" >&2
+ return 1
+ fi
+
+ # Special handling for XML files (C# .csproj)
+ if [[ "$file" == *.csproj ]] || [[ "$file" == *.xml ]]; then
+ # Extract version from XML tags like or
+ grep -E '<(PackageVersion|Version)>' "$REPO_ROOT/$file" | head -1 | sed -E 's/.*<[^>]+>([^<]+)<.*/\1/' | tr -d ' '
+ elif command -v perl &> /dev/null; then
+ # Use perl for more powerful regex support (supports multiline and lookarounds)
+ # Use m{} instead of // to avoid issues with slashes in regex
+ perl -0777 -ne "if (m{$regex}) { print \$1; exit; }" "$REPO_ROOT/$file"
+ else
+ # Fallback to grep -P if available
+ if grep -P "" /dev/null 2>/dev/null; then
+ grep -Pzo "$regex" "$REPO_ROOT/$file" | grep -Pao '[0-9]+\.[0-9]+\.[0-9]+[^"]*' | head -1
+ else
+ # Basic fallback - may not work for all patterns
+ grep -E "$regex" "$REPO_ROOT/$file" | head -1 | sed -E "s/.*$regex.*/\1/"
+ fi
+ fi
+}
+
+# Extract version using cargo metadata (for Rust packages)
+extract_cargo_version() {
+ local package="$1"
+ local cargo_file="$2"
+
+ cd "$REPO_ROOT"
+
+ # Try cargo metadata first (most reliable)
+ if command -v cargo &> /dev/null && command -v jq &> /dev/null; then
+ version=$(cargo metadata --no-deps --format-version=1 2>/dev/null | \
+ jq -r --arg pkg "$package" '.packages[] | select(.name == $pkg) | .version' | \
+ head -1)
+
+ if [[ -n "$version" ]]; then
+ echo "$version"
+ return 0
+ fi
+ fi
+
+ # Fallback to direct Cargo.toml parsing using the regex from config
+ local version_regex
+ version_regex=$(get_config "version_regex")
+ if [[ -n "$version_regex" && -f "$REPO_ROOT/$cargo_file" ]]; then
+ extract_version_with_regex "$cargo_file" "$version_regex"
+ fi
+}
+
+# Main version extraction logic
+VERSION=""
+VERSION_FILE=$(get_config "version_file")
+VERSION_REGEX=$(get_config "version_regex")
+PACKAGE=$(get_config "package")
+
+# Special handling for Go SDK (version must be provided)
+if [[ "$COMPONENT" == "sdk-go" ]]; then
+ VERSION="$GO_SDK_VERSION"
+ if [[ -z "$VERSION" ]]; then
+ echo "Error: Go version must be provided with --go-version flag" >&2
+ exit 1
+ fi
+# For Rust components with cargo metadata support
+elif [[ "$COMPONENT" == rust-* ]] && [[ -n "$PACKAGE" ]]; then
+ # Use package name from config if available
+ VERSION=$(extract_cargo_version "$PACKAGE" "$VERSION_FILE")
+
+ # Fallback to regex-based extraction if cargo metadata failed
+ if [[ -z "$VERSION" ]] && [[ -n "$VERSION_FILE" ]] && [[ -n "$VERSION_REGEX" ]]; then
+ VERSION=$(extract_version_with_regex "$VERSION_FILE" "$VERSION_REGEX")
+ fi
+# Generic extraction using version_file and version_regex
+elif [[ -n "$VERSION_FILE" ]] && [[ -n "$VERSION_REGEX" ]]; then
+ VERSION=$(extract_version_with_regex "$VERSION_FILE" "$VERSION_REGEX")
+else
+ echo "Error: No version extraction method available for component '$COMPONENT'" >&2
+ exit 1
+fi
+
+# Validate version was found
+if [[ -z "$VERSION" ]]; then
+ echo "Error: Could not extract version for component '$COMPONENT'" >&2
+ if [[ -n "$VERSION_FILE" ]]; then
+ echo " Checked file: $VERSION_FILE" >&2
+ fi
+ if [[ -n "$VERSION_REGEX" ]]; then
+ echo " Using regex: $VERSION_REGEX" >&2
+ fi
+ exit 1
+fi
+
+# Return tag or version based on flag
+if [[ "$RETURN_TAG" == "true" ]]; then
+ TAG_PATTERN=$(get_config "tag_pattern")
+ if [[ -z "$TAG_PATTERN" ]]; then
+ echo "Error: No tag pattern defined for component '$COMPONENT'" >&2
+ exit 1
+ fi
+
+ # Replace the capture group in the pattern with the actual version
+ # The pattern has a capture group like "^iggy-([0-9]+\\.[0-9]+\\.[0-9]+...)$"
+ # We need to replace the (...) part with the actual version
+
+ # Extract the prefix (everything before the first capture group)
+ PREFIX=$(echo "$TAG_PATTERN" | sed -E 's/^(\^?)([^(]*)\(.*/\2/')
+
+ # Extract the suffix (everything after the capture group)
+ SUFFIX=$(echo "$TAG_PATTERN" | sed -E 's/.*\)[^)]*(\$?)$/\1/')
+
+ # Build the tag
+ TAG="${PREFIX}${VERSION}${SUFFIX}"
+
+ # Remove regex anchors if present
+ TAG=$(echo "$TAG" | sed 's/^\^//; s/\$$//')
+
+ echo "$TAG"
+else
+ echo "$VERSION"
+fi
\ No newline at end of file
diff --git a/scripts/run-bdd-tests.sh b/scripts/run-bdd-tests.sh
index fe2097262..45d334c9a 100755
--- a/scripts/run-bdd-tests.sh
+++ b/scripts/run-bdd-tests.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -16,75 +16,56 @@
# specific language governing permissions and limitations
# under the License.
-set -e
+set -Eeuo pipefail
SDK=${1:-"all"}
FEATURE=${2:-"scenarios/basic_messaging.feature"}
-echo "π§ͺ Running BDD tests for SDK: $SDK"
-echo "π Feature file: $FEATURE"
+export DOCKER_BUILDKIT=1 FEATURE
-# Change to BDD directory
cd "$(dirname "$0")/../bdd"
-case $SDK in
-"rust")
- echo "π¦ Running Rust BDD tests..."
- docker compose build --no-cache iggy-server rust-bdd
- docker compose up --abort-on-container-exit rust-bdd
- ;;
-"python")
- echo "π Running Python BDD tests..."
- docker compose build --no-cache iggy-server python-bdd
- docker compose up --abort-on-container-exit python-bdd
- ;;
-"go")
- echo "πΉ Running Go BDD tests..."
- docker compose build --no-cache iggy-server go-bdd
- docker compose up --abort-on-container-exit go-bdd
- ;;
-"node")
- echo "π’π Running node BDD tests..."
- docker compose build --no-cache iggy-server node-bdd
- docker compose up --abort-on-container-exit node-bdd
- ;;
-"csharp")
- echo "π· Running csharp BDD tests..."
- docker compose build --no-cache iggy-server csharp-bdd
- docker compose up --abort-on-container-exit csharp-bdd
- ;;
-"all")
- echo "π Running all SDK BDD tests..."
- echo "π¦ Starting with Rust tests..."
- #docker compose build --no-cache iggy-server rust-bdd python-bdd go-bdd node-bdd csharp-bdd
- docker compose up --abort-on-container-exit rust-bdd
- echo "π Now running Python tests..."
- docker compose up --abort-on-container-exit python-bdd
- echo "πΉ Now running Go tests..."
- docker compose up --abort-on-container-exit go-bdd
- echo "π’π Now running node BDD tests..."
- docker compose up --abort-on-container-exit node-bdd
- echo "π· Now running csharp BDD tests..."
- docker compose up --abort-on-container-exit csharp-bdd
- ;;
-"clean")
- echo "π§Ή Cleaning up Docker resources..."
- docker compose down -v
- docker compose rm -f
- ;;
-*)
- echo "β Unknown SDK: $SDK"
- echo "π Usage: $0 [rust|python|go|all|clean] [feature_file]"
- echo "π Examples:"
- echo " $0 rust # Run Rust tests only"
- echo " $0 python # Run Python tests only"
- echo " $0 go # Run Go tests only"
- echo " $0 node # Run Node.js tests only"
- echo " $0 csharp # Run csharp tests only"
- echo " $0 all # Run all SDK tests"
- echo " $0 clean # Clean up Docker resources"
- exit 1
- ;;
+log(){ printf "%b\n" "$*"; }
+
+cleanup(){
+ log "π§Ή cleaning up containers & volumesβ¦"
+ docker compose down -v --remove-orphans >/dev/null 2>&1 || true
+}
+trap cleanup EXIT INT TERM
+
+log "π§ͺ Running BDD tests for SDK: ${SDK}"
+log "π Feature file: ${FEATURE}"
+
+run_suite(){
+ local svc="$1" emoji="$2" label="$3"
+ log "${emoji} ${label}β¦"
+ set +e
+ docker compose up --build --abort-on-container-exit --exit-code-from "$svc" "$svc"
+ local code=$?
+ set -e
+ docker compose down -v --remove-orphans >/dev/null 2>&1 || true
+ return "$code"
+}
+
+case "$SDK" in
+ rust) run_suite rust-bdd "π¦" "Running Rust BDD tests" ;;
+ python) run_suite python-bdd "π" "Running Python BDD tests" ;;
+ go) run_suite go-bdd "πΉ" "Running Go BDD tests" ;;
+ node) run_suite node-bdd "π’π" "Running Node BDD tests" ;;
+ csharp) run_suite csharp-bdd "π·" "Running C# BDD tests" ;;
+ all)
+ run_suite rust-bdd "π¦" "Running Rust BDD tests" || exit $?
+ run_suite python-bdd "π" "Running Python BDD tests" || exit $?
+ run_suite go-bdd "πΉ" "Running Go BDD tests" || exit $?
+ run_suite node-bdd "π’π" "Running Node BDD tests" || exit $?
+ run_suite csharp-bdd "π·" "Running C# BDD tests" || exit $?
+ ;;
+ clean)
+ cleanup; exit 0 ;;
+ *)
+ log "β Unknown SDK: ${SDK}"
+ log "π Usage: $0 [rust|python|go|node|csharp|all|clean] [feature_file]"
+ exit 2 ;;
esac
-echo "β
BDD tests completed for: $SDK"
+log "β
BDD tests completed for: ${SDK}"
diff --git a/scripts/run-go-examples-from-readme.sh b/scripts/run-go-examples-from-readme.sh
index 1bbdd478e..aca887be0 100755
--- a/scripts/run-go-examples-from-readme.sh
+++ b/scripts/run-go-examples-from-readme.sh
@@ -86,20 +86,30 @@ test -d local_data && rm -fr local_data
test -e ${LOG_FILE} && rm ${LOG_FILE}
test -e ${PID_FILE} && rm ${PID_FILE}
-# Build binaries
-echo "Building binaries..."
+# Check if server binary exists
+SERVER_BIN=""
if [ -n "${TARGET}" ]; then
- cargo build --target "${TARGET}"
+ SERVER_BIN="target/${TARGET}/debug/iggy-server"
else
- cargo build
+ SERVER_BIN="target/debug/iggy-server"
fi
-# Run iggy server and let it run in the background
-if [ -n "${TARGET}" ]; then
- cargo run --target "${TARGET}" --bin iggy-server &>${LOG_FILE} &
-else
- cargo run --bin iggy-server &>${LOG_FILE} &
+if [ ! -f "${SERVER_BIN}" ]; then
+ echo "Error: Server binary not found at ${SERVER_BIN}"
+ echo "Please build the server binary before running this script:"
+ if [ -n "${TARGET}" ]; then
+ echo " cargo build --target ${TARGET} --bin iggy-server"
+ else
+ echo " cargo build --bin iggy-server"
+ fi
+ exit 1
fi
+
+echo "Using server binary at ${SERVER_BIN}"
+
+# Run iggy server using the prebuilt binary
+echo "Starting server from ${SERVER_BIN}..."
+${SERVER_BIN} &>${LOG_FILE} &
echo $! >${PID_FILE}
# Wait until "Iggy server has started" string is present inside iggy-server.log
diff --git a/scripts/run-rust-examples-from-readme.sh b/scripts/run-rust-examples-from-readme.sh
index ddd4bcec7..ed2e5617d 100755
--- a/scripts/run-rust-examples-from-readme.sh
+++ b/scripts/run-rust-examples-from-readme.sh
@@ -55,20 +55,51 @@ test -d local_data && rm -fr local_data
test -e ${LOG_FILE} && rm ${LOG_FILE}
test -e ${PID_FILE} && rm ${PID_FILE}
-# Build binaries
-echo "Building binaries..."
+# Check if server binary exists
+SERVER_BIN=""
if [ -n "${TARGET}" ]; then
- cargo build --target "${TARGET}"
+ SERVER_BIN="target/${TARGET}/debug/iggy-server"
else
- cargo build
+ SERVER_BIN="target/debug/iggy-server"
fi
-# Run iggy server and let it run in the background
+if [ ! -f "${SERVER_BIN}" ]; then
+ echo "Error: Server binary not found at ${SERVER_BIN}"
+ echo "Please build the server binary before running this script:"
+ if [ -n "${TARGET}" ]; then
+ echo " cargo build --target ${TARGET} --bin iggy-server"
+ else
+ echo " cargo build --bin iggy-server"
+ fi
+ exit 1
+fi
+
+echo "Using server binary at ${SERVER_BIN}"
+
+# Check that CLI and examples are built
+CLI_BIN=""
if [ -n "${TARGET}" ]; then
- cargo run --target "${TARGET}" --bin iggy-server &>${LOG_FILE} &
+ CLI_BIN="target/${TARGET}/debug/iggy"
else
- cargo run --bin iggy-server &>${LOG_FILE} &
+ CLI_BIN="target/debug/iggy"
fi
+
+if [ ! -f "${CLI_BIN}" ]; then
+ echo "Error: CLI binary not found at ${CLI_BIN}"
+ echo "Please build the CLI and examples before running this script:"
+ if [ -n "${TARGET}" ]; then
+ echo " cargo build --target ${TARGET} --bin iggy --examples"
+ else
+ echo " cargo build --bin iggy --examples"
+ fi
+ exit 1
+fi
+
+echo "Using CLI binary at ${CLI_BIN}"
+
+# Run iggy server using the prebuilt binary
+echo "Starting server from ${SERVER_BIN}..."
+${SERVER_BIN} &>${LOG_FILE} &
echo $! >${PID_FILE}
# Wait until "Iggy server has started" string is present inside iggy-server.log
diff --git a/scripts/sync-rust-version.sh b/scripts/sync-rust-version.sh
index 09ebe2e67..ccaa8012f 100755
--- a/scripts/sync-rust-version.sh
+++ b/scripts/sync-rust-version.sh
@@ -16,27 +16,131 @@
# specific language governing permissions and limitations
# under the License.
+set -euo pipefail
+
+# Colors for output
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+NC='\033[0m' # No Color
+
+# Default mode
+MODE=""
+
+# Parse arguments
+while [[ $# -gt 0 ]]; do
+ case $1 in
+ --check)
+ MODE="check"
+ shift
+ ;;
+ --fix)
+ MODE="fix"
+ shift
+ ;;
+ --help|-h)
+ echo "Usage: $0 [--check|--fix]"
+ echo ""
+ echo "Sync Rust version from rust-toolchain.toml to all Dockerfiles"
+ echo ""
+ echo "Options:"
+ echo " --check Check if all Dockerfiles have the correct Rust version"
+ echo " --fix Update all Dockerfiles to use the correct Rust version"
+ echo " --help Show this help message"
+ exit 0
+ ;;
+ *)
+ echo -e "${RED}Error: Unknown option $1${NC}"
+ echo "Use --help for usage information"
+ exit 1
+ ;;
+ esac
+done
+
+# Require mode to be specified
+if [ -z "$MODE" ]; then
+ echo -e "${RED}Error: Please specify either --check or --fix${NC}"
+ echo "Use --help for usage information"
+ exit 1
+fi
+
+# Get the repository root (parent of scripts directory)
+REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
+cd "$REPO_ROOT"
# Extract Rust version from rust-toolchain.toml
RUST_VERSION=$(grep 'channel' rust-toolchain.toml | sed 's/.*"\(.*\)".*/\1/')
if [ -z "$RUST_VERSION" ]; then
- echo "Error: Could not extract Rust version from rust-toolchain.toml"
+ echo -e "${RED}Error: Could not extract Rust version from rust-toolchain.toml${NC}"
exit 1
fi
# Strip trailing ".0" -> e.g., 1.89.0 -> 1.89 (no change if it doesn't end in .0)
-RUST_TAG=$(echo "$RUST_VERSION" | sed -E 's/^([0-9]+)\.([0-9]+)\.0$/\1.\2/')
+RUST_VERSION_SHORT=$(echo "$RUST_VERSION" | sed -E 's/^([0-9]+)\.([0-9]+)\.0$/\1.\2/')
+
+echo "Rust version from rust-toolchain.toml: ${GREEN}$RUST_VERSION${NC} (using ${GREEN}$RUST_VERSION_SHORT${NC} for Dockerfiles)"
+echo ""
+
+# Find all Dockerfiles
+DOCKERFILES=$(find . -name "Dockerfile*" -type f | grep -v node_modules | grep -v target | sort)
+
+# Track misaligned files
+MISALIGNED_FILES=()
+TOTAL_FILES=0
+FIXED_FILES=0
+
+for dockerfile in $DOCKERFILES; do
+ # Skip files without ARG RUST_VERSION
+ if ! grep -q "^ARG RUST_VERSION=" "$dockerfile" 2>/dev/null; then
+ continue
+ fi
+
+ TOTAL_FILES=$((TOTAL_FILES + 1))
-echo "Syncing Rust version $RUST_VERSION (using tag: $RUST_TAG) to Dockerfiles..."
+ # Get current version in the Dockerfile
+ CURRENT_VERSION=$(grep "^ARG RUST_VERSION=" "$dockerfile" | head -1 | sed 's/^ARG RUST_VERSION=//')
-# Update regular rust image (no suffix)
-# Matches things like: FROM rust:1.88, FROM rust:1.88.1, etc.
-sed -Ei "s|(FROM[[:space:]]+rust:)[0-9]+(\.[0-9]+){1,2}|\1$RUST_TAG|g" bdd/rust/Dockerfile
+ if [ "$MODE" = "check" ]; then
+ if [ "$CURRENT_VERSION" != "$RUST_VERSION_SHORT" ]; then
+ MISALIGNED_FILES+=("$dockerfile")
+ echo -e "${RED}β${NC} $dockerfile: ${RED}$CURRENT_VERSION${NC} (expected: ${GREEN}$RUST_VERSION_SHORT${NC})"
+ else
+ echo -e "${GREEN}β${NC} $dockerfile: $CURRENT_VERSION"
+ fi
+ elif [ "$MODE" = "fix" ]; then
+ if [ "$CURRENT_VERSION" != "$RUST_VERSION_SHORT" ]; then
+ # Update the ARG RUST_VERSION line
+ sed -i "s/^ARG RUST_VERSION=.*/ARG RUST_VERSION=$RUST_VERSION_SHORT/" "$dockerfile"
+ FIXED_FILES=$((FIXED_FILES + 1))
+ echo -e "${GREEN}Fixed${NC} $dockerfile: ${RED}$CURRENT_VERSION${NC} -> ${GREEN}$RUST_VERSION_SHORT${NC}"
+ else
+ echo -e "${GREEN}β${NC} $dockerfile: already correct ($RUST_VERSION_SHORT)"
+ fi
+ fi
+done
-# Update slim-bookworm image
-sed -Ei "s|(FROM[[:space:]]+rust:)[0-9]+(\.[0-9]+){1,2}-slim-bookworm|\1$RUST_TAG-slim-bookworm|g" core/bench/dashboard/server/Dockerfile
+echo ""
+echo "ββββββββββββββββββββββββββββββββββββββββββββββββ"
-echo "Updated Dockerfiles to use:"
-echo " - Regular image: rust:$RUST_TAG"
-echo " - Slim bookworm: rust:$RUST_TAG-slim-bookworm"
+if [ "$MODE" = "check" ]; then
+ if [ ${#MISALIGNED_FILES[@]} -eq 0 ]; then
+ echo -e "${GREEN}β All $TOTAL_FILES Dockerfiles are aligned with Rust version $RUST_VERSION_SHORT${NC}"
+ exit 0
+ else
+ echo -e "${RED}β Found ${#MISALIGNED_FILES[@]} misaligned Dockerfile(s) out of $TOTAL_FILES:${NC}"
+ for file in "${MISALIGNED_FILES[@]}"; do
+ echo -e " ${RED}β’ $file${NC}"
+ done
+ echo ""
+ echo -e "${YELLOW}Run '$0 --fix' to fix these files${NC}"
+ exit 1
+ fi
+elif [ "$MODE" = "fix" ]; then
+ if [ $FIXED_FILES -eq 0 ]; then
+ echo -e "${GREEN}β All $TOTAL_FILES Dockerfiles were already aligned with Rust version $RUST_VERSION_SHORT${NC}"
+ else
+ echo -e "${GREEN}β Fixed $FIXED_FILES out of $TOTAL_FILES Dockerfiles to use Rust version $RUST_VERSION_SHORT${NC}"
+ fi
+ exit 0
+fi
\ No newline at end of file
diff --git a/web/Dockerfile b/web/Dockerfile
index 846f397fc..e234b44fe 100644
--- a/web/Dockerfile
+++ b/web/Dockerfile
@@ -16,6 +16,8 @@
# under the License.
FROM node:lts-alpine AS base
+ARG IGGY_CI_BUILD
+ENV IGGY_CI_BUILD=${IGGY_CI_BUILD}
ENV NPM_CONFIG_LOGLEVEL=warn
ENV NPM_CONFIG_COLOR=false
WORKDIR /home/node/app
@@ -30,4 +32,4 @@ WORKDIR /home/node/app
COPY --chown=node:node --from=development /home/node/app/node_modules /home/node/app/node_modules
RUN npm run build
EXPOSE 3050
-CMD ["npm", "run", "preview"]
\ No newline at end of file
+CMD ["npm", "run", "preview"]