name: "S3 Spark Integration Tests" on: pull_request: paths: - 'weed/s3api/**' - 'test/s3/spark/**' - 'test/s3tables/testutil/**' - '.github/workflows/s3-spark-tests.yml' workflow_dispatch: concurrency: group: ${{ github.head_ref }}/s3-spark-tests cancel-in-progress: true permissions: contents: read jobs: s3-spark-issue-repro-tests: name: S3 Spark Issue Reproduction Tests runs-on: ubuntu-22.04 timeout-minutes: 45 steps: - name: Check out code uses: actions/checkout@v6 - name: Set up Go uses: actions/setup-go@v6 with: go-version-file: 'go.mod' id: go - name: Set up Docker uses: docker/setup-buildx-action@v3 - name: Install SeaweedFS run: | go install -buildvcs=false ./weed - name: Pre-pull Spark image run: docker pull apache/spark:3.5.8 - name: Run S3 Spark integration tests working-directory: test/s3/spark timeout-minutes: 35 run: | set -x set -o pipefail echo "=== System Information ===" uname -a free -h df -h echo "=== Starting S3 Spark Integration Tests ===" go test -v -timeout 30m . 2>&1 | tee test-output.log || { echo "S3 Spark integration tests failed" exit 1 } - name: Show test output on failure if: failure() working-directory: test/s3/spark run: | echo "=== Test Output ===" if [ -f test-output.log ]; then tail -200 test-output.log fi echo "=== Process information ===" ps aux | grep -E "(weed|test|docker|spark)" || true - name: Upload test logs on failure if: failure() uses: actions/upload-artifact@v6 with: name: s3-spark-test-logs path: test/s3/spark/test-output.log retention-days: 3