From 8c22780091cd12621f67c9d7c944c5b0e5a7c0e2 Mon Sep 17 00:00:00 2001 From: chrislu Date: Sun, 23 Nov 2025 14:04:52 -0800 Subject: [PATCH] fix: restart SeaweedFS services before downloading files on test failure MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Problem: --abort-on-container-exit stops ALL containers when tests fail, so SeaweedFS services are down when file download step runs. Solution: 1. Use continue-on-error: true to capture test failure 2. Store exit code in GITHUB_OUTPUT for later checking 3. Add new step to restart SeaweedFS services if tests failed 4. Download step runs after services are back up 5. Final step checks test exit code and fails workflow This ensures: ✅ Services keep running for file analysis ✅ Parquet files are accessible via filer API ✅ Workflow still fails if tests failed ✅ All diagnostics can complete Now we'll actually be able to download and examine the Parquet files! --- .github/workflows/spark-integration-tests.yml | 32 +++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/.github/workflows/spark-integration-tests.yml b/.github/workflows/spark-integration-tests.yml index 509de0a10..f78ab6c8b 100644 --- a/.github/workflows/spark-integration-tests.yml +++ b/.github/workflows/spark-integration-tests.yml @@ -120,13 +120,34 @@ jobs: - name: Run Spark integration tests working-directory: test/java/spark + continue-on-error: true + id: test-run run: | echo "=== Running Spark Integration Tests ===" docker compose up --abort-on-container-exit --exit-code-from spark-tests spark-tests - echo "✓ Tests completed" + TEST_EXIT_CODE=$? + echo "exit_code=$TEST_EXIT_CODE" >> $GITHUB_OUTPUT + echo "Tests completed with exit code: $TEST_EXIT_CODE" + exit $TEST_EXIT_CODE + + - name: Restart SeaweedFS services for file download + if: steps.test-run.outcome == 'failure' + working-directory: test/java/spark + run: | + echo "=== Restarting SeaweedFS services to access files ===" + docker compose up -d seaweedfs-master seaweedfs-volume seaweedfs-filer + + echo "Waiting for filer to be ready..." + for i in {1..10}; do + if curl -f http://localhost:8888/ > /dev/null 2>&1; then + echo "✓ Filer is ready" + break + fi + sleep 2 + done - name: Download and examine Parquet files - if: failure() + if: steps.test-run.outcome == 'failure' working-directory: test/java/spark run: | echo "=== Downloading Parquet files for analysis ===" @@ -221,6 +242,13 @@ jobs: reporter: java-junit fail-on-error: true + - name: Check test results + if: steps.test-run.outcome == 'failure' + run: | + echo "❌ Tests failed with exit code: ${{ steps.test-run.outputs.exit_code }}" + echo "But file analysis was completed above." + exit 1 + # ======================================== # SPARK EXAMPLE (HOST-BASED) # ========================================