name: OpenAirframes Daily Release on: schedule: # 6:00pm UTC every day - runs on default branch, triggers both - cron: "0 06 * * *" workflow_dispatch: inputs: date: description: 'Date to process (YYYY-MM-DD format, default: yesterday)' required: false type: string permissions: contents: write actions: write jobs: trigger-releases: runs-on: ubuntu-latest if: github.event_name == 'schedule' steps: - name: Trigger main branch release uses: actions/github-script@v7 with: script: | await github.rest.actions.createWorkflowDispatch({ owner: context.repo.owner, repo: context.repo.repo, workflow_id: 'openairframes-daily-release.yaml', ref: 'main' }); - name: Trigger develop branch release uses: actions/github-script@v7 with: script: | await github.rest.actions.createWorkflowDispatch({ owner: context.repo.owner, repo: context.repo.repo, workflow_id: 'openairframes-daily-release.yaml', ref: 'develop' }); build-faa: runs-on: ubuntu-24.04-arm if: github.event_name != 'schedule' steps: - name: Checkout uses: actions/checkout@v6 with: fetch-depth: 0 - name: Setup Python uses: actions/setup-python@v6 with: python-version: "3.14" - name: Install dependencies run: | python -m pip install --upgrade pip pip install -r requirements.txt - name: Run FAA release script run: | python src/create_daily_faa_release.py ${{ inputs.date && format('--date {0}', inputs.date) || '' }} ls -lah data/faa_releasable ls -lah data/openairframes - name: Upload FAA artifacts uses: actions/upload-artifact@v4 with: name: faa-release path: | data/openairframes/openairframes_faa_*.csv data/faa_releasable/ReleasableAircraft_*.zip retention-days: 1 adsb-extract: runs-on: ubuntu-24.04-arm if: github.event_name != 'schedule' outputs: manifest-exists: ${{ steps.check.outputs.exists }} steps: - name: Checkout uses: actions/checkout@v6 with: fetch-depth: 0 - name: Setup Python uses: actions/setup-python@v6 with: python-version: "3.14" - name: Install dependencies run: | python -m pip install --upgrade pip pip install -r requirements.txt - name: Download and extract ADS-B data env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | python -m src.adsb.download_and_list_icaos ${{ inputs.date && format('--date {0}', inputs.date) || '' }} ls -lah data/output/ - name: Check manifest exists id: check run: | if ls data/output/icao_manifest_*.txt 1>/dev/null 2>&1; then echo "exists=true" >> "$GITHUB_OUTPUT" else echo "exists=false" >> "$GITHUB_OUTPUT" fi - name: Create tar of extracted data run: | cd data/output tar -cf extracted_data.tar *-planes-readsb-prod-0.tar_0 icao_manifest_*.txt ls -lah extracted_data.tar - name: Upload extracted data uses: actions/upload-artifact@v4 with: name: adsb-extracted path: data/output/extracted_data.tar retention-days: 1 compression-level: 0 # Already compressed trace files adsb-map: runs-on: ubuntu-24.04-arm needs: adsb-extract if: github.event_name != 'schedule' && needs.adsb-extract.outputs.manifest-exists == 'true' strategy: fail-fast: false matrix: chunk: [0, 1, 2, 3] steps: - name: Checkout uses: actions/checkout@v6 with: fetch-depth: 0 - name: Setup Python uses: actions/setup-python@v6 with: python-version: "3.14" - name: Install dependencies run: | python -m pip install --upgrade pip pip install -r requirements.txt - name: Download extracted data uses: actions/download-artifact@v4 with: name: adsb-extracted path: data/output/ - name: Extract tar run: | cd data/output tar -xf extracted_data.tar rm extracted_data.tar echo "=== Contents of data/output ===" ls -lah echo "=== Looking for manifest ===" cat icao_manifest_*.txt | head -20 || echo "No manifest found" echo "=== Looking for extracted dirs ===" ls -d *-planes-readsb-prod-0* 2>/dev/null || echo "No extracted dirs" - name: Process chunk ${{ matrix.chunk }} run: | python -m src.adsb.process_icao_chunk --chunk-id ${{ matrix.chunk }} --total-chunks 4 ${{ inputs.date && format('--date {0}', inputs.date) || '' }} mkdir -p data/output/adsb_chunks ls -lah data/output/adsb_chunks/ || echo "No chunks created" - name: Upload chunk artifacts uses: actions/upload-artifact@v4 with: name: adsb-chunk-${{ matrix.chunk }} path: data/output/adsb_chunks/ retention-days: 1 adsb-reduce: runs-on: ubuntu-24.04-arm needs: adsb-map if: github.event_name != 'schedule' steps: - name: Checkout uses: actions/checkout@v6 with: fetch-depth: 0 - name: Setup Python uses: actions/setup-python@v6 with: python-version: "3.14" - name: Install dependencies run: | python -m pip install --upgrade pip pip install -r requirements.txt - name: Download all chunk artifacts uses: actions/download-artifact@v4 with: pattern: adsb-chunk-* path: data/output/adsb_chunks/ merge-multiple: true - name: Debug downloaded files run: | echo "=== Listing data/ ===" find data/ -type f 2>/dev/null | head -50 || echo "No files in data/" echo "=== Looking for parquet files ===" find . -name "*.parquet" 2>/dev/null | head -20 || echo "No parquet files found" - name: Combine chunks to CSV run: | mkdir -p data/output/adsb_chunks ls -lah data/output/adsb_chunks/ || echo "Directory empty or does not exist" python -m src.adsb.combine_chunks_to_csv --chunks-dir data/output/adsb_chunks ${{ inputs.date && format('--date {0}', inputs.date) || '' }} ls -lah data/openairframes/ - name: Upload ADS-B artifacts uses: actions/upload-artifact@v4 with: name: adsb-release path: data/openairframes/openairframes_adsb_*.csv.gz retention-days: 1 build-community: runs-on: ubuntu-latest if: github.event_name != 'schedule' steps: - name: Checkout uses: actions/checkout@v6 with: fetch-depth: 0 - name: Setup Python uses: actions/setup-python@v6 with: python-version: "3.14" - name: Install dependencies run: | python -m pip install --upgrade pip pip install pandas - name: Run Community release script run: | python -m src.contributions.create_daily_community_release ls -lah data/openairframes - name: Upload Community artifacts uses: actions/upload-artifact@v4 with: name: community-release path: data/openairframes/openairframes_community_*.csv retention-days: 1 build-adsbexchange-json: runs-on: ubuntu-latest if: github.event_name != 'schedule' steps: - name: Checkout uses: actions/checkout@v6 with: fetch-depth: 0 - name: Setup Python uses: actions/setup-python@v6 with: python-version: "3.14" - name: Run ADS-B Exchange JSON release script run: | python -m src.contributions.create_daily_adsbexchange_release ${{ inputs.date && format('--date {0}', inputs.date) || '' }} ls -lah data/openairframes - name: Upload ADS-B Exchange JSON artifact uses: actions/upload-artifact@v4 with: name: adsbexchange-json path: data/openairframes/basic-ac-db_*.json.gz retention-days: 1 build-mictronics-db: runs-on: ubuntu-latest if: github.event_name != 'schedule' steps: - name: Checkout uses: actions/checkout@v6 with: fetch-depth: 0 - name: Setup Python uses: actions/setup-python@v6 with: python-version: "3.14" - name: Run Mictronics DB release script continue-on-error: true run: | python -m src.contributions.create_daily_microtonics_release ${{ inputs.date && format('--date {0}', inputs.date) || '' }} ls -lah data/openairframes - name: Upload Mictronics DB artifact uses: actions/upload-artifact@v4 with: name: mictronics-db path: data/openairframes/mictronics-db_*.zip retention-days: 1 if-no-files-found: ignore create-release: runs-on: ubuntu-latest needs: [build-faa, adsb-reduce, build-community, build-adsbexchange-json, build-mictronics-db] if: github.event_name != 'schedule' && !failure() && !cancelled() steps: - name: Checkout for gh CLI uses: actions/checkout@v4 with: sparse-checkout: | .github sparse-checkout-cone-mode: false - name: Download FAA artifacts uses: actions/download-artifact@v4 with: name: faa-release path: artifacts/faa - name: Download ADS-B artifacts uses: actions/download-artifact@v4 with: name: adsb-release path: artifacts/adsb - name: Download Community artifacts uses: actions/download-artifact@v4 with: name: community-release path: artifacts/community - name: Download ADS-B Exchange JSON artifact uses: actions/download-artifact@v4 with: name: adsbexchange-json path: artifacts/adsbexchange - name: Download Mictronics DB artifact uses: actions/download-artifact@v4 continue-on-error: true with: name: mictronics-db path: artifacts/mictronics - name: Debug artifact structure run: | echo "=== Full artifacts tree ===" find artifacts -type f 2>/dev/null || echo "No files found in artifacts" echo "=== FAA artifacts ===" find artifacts/faa -type f 2>/dev/null || echo "No files found in artifacts/faa" echo "=== ADS-B artifacts ===" find artifacts/adsb -type f 2>/dev/null || echo "No files found in artifacts/adsb" echo "=== Community artifacts ===" find artifacts/community -type f 2>/dev/null || echo "No files found in artifacts/community" echo "=== ADS-B Exchange JSON artifacts ===" find artifacts/adsbexchange -type f 2>/dev/null || echo "No files found in artifacts/adsbexchange" echo "=== Mictronics DB artifacts ===" find artifacts/mictronics -type f 2>/dev/null || echo "No files found in artifacts/mictronics" - name: Prepare release metadata id: meta run: | DATE=$(date -u +"%Y-%m-%d") BRANCH_NAME="${GITHUB_REF#refs/heads/}" BRANCH_SUFFIX="" if [ "$BRANCH_NAME" = "main" ]; then BRANCH_SUFFIX="-main" elif [ "$BRANCH_NAME" = "develop" ]; then BRANCH_SUFFIX="-develop" fi TAG="openairframes-${DATE}${BRANCH_SUFFIX}" # Find files from artifacts using find (handles nested structures) CSV_FILE_FAA=$(find artifacts/faa -name "openairframes_faa_*.csv" -type f 2>/dev/null | head -1) CSV_FILE_ADSB=$(find artifacts/adsb -name "openairframes_adsb_*.csv.gz" -type f 2>/dev/null | head -1) CSV_FILE_COMMUNITY=$(find artifacts/community -name "openairframes_community_*.csv" -type f 2>/dev/null | head -1) ZIP_FILE=$(find artifacts/faa -name "ReleasableAircraft_*.zip" -type f 2>/dev/null | head -1) JSON_FILE_ADSBX=$(find artifacts/adsbexchange -name "basic-ac-db_*.json.gz" -type f 2>/dev/null | head -1) ZIP_FILE_MICTRONICS=$(find artifacts/mictronics -name "mictronics-db_*.zip" -type f 2>/dev/null | head -1) # Validate required files exist MISSING_FILES="" if [ -z "$CSV_FILE_FAA" ] || [ ! -f "$CSV_FILE_FAA" ]; then MISSING_FILES="$MISSING_FILES FAA_CSV" fi if [ -z "$CSV_FILE_ADSB" ] || [ ! -f "$CSV_FILE_ADSB" ]; then MISSING_FILES="$MISSING_FILES ADSB_CSV" fi if [ -z "$ZIP_FILE" ] || [ ! -f "$ZIP_FILE" ]; then MISSING_FILES="$MISSING_FILES FAA_ZIP" fi if [ -z "$JSON_FILE_ADSBX" ] || [ ! -f "$JSON_FILE_ADSBX" ]; then MISSING_FILES="$MISSING_FILES ADSBX_JSON" fi # Optional files - warn but don't fail OPTIONAL_MISSING="" if [ -z "$ZIP_FILE_MICTRONICS" ] || [ ! -f "$ZIP_FILE_MICTRONICS" ]; then OPTIONAL_MISSING="$OPTIONAL_MISSING MICTRONICS_ZIP" ZIP_FILE_MICTRONICS="" fi if [ -n "$MISSING_FILES" ]; then echo "ERROR: Missing required release files:$MISSING_FILES" echo "FAA CSV: $CSV_FILE_FAA" echo "ADSB CSV: $CSV_FILE_ADSB" echo "ZIP: $ZIP_FILE" echo "ADSBX JSON: $JSON_FILE_ADSBX" echo "MICTRONICS ZIP: $ZIP_FILE_MICTRONICS" exit 1 fi # Get basenames for display CSV_BASENAME_FAA=$(basename "$CSV_FILE_FAA") CSV_BASENAME_ADSB=$(basename "$CSV_FILE_ADSB") CSV_BASENAME_COMMUNITY=$(basename "$CSV_FILE_COMMUNITY" 2>/dev/null || echo "") ZIP_BASENAME=$(basename "$ZIP_FILE") JSON_BASENAME_ADSBX=$(basename "$JSON_FILE_ADSBX") ZIP_BASENAME_MICTRONICS="" if [ -n "$ZIP_FILE_MICTRONICS" ]; then ZIP_BASENAME_MICTRONICS=$(basename "$ZIP_FILE_MICTRONICS") fi if [ -n "$OPTIONAL_MISSING" ]; then echo "WARNING: Optional files missing:$OPTIONAL_MISSING (will continue without them)" fi echo "date=$DATE" >> "$GITHUB_OUTPUT" echo "tag=$TAG" >> "$GITHUB_OUTPUT" echo "csv_file_faa=$CSV_FILE_FAA" >> "$GITHUB_OUTPUT" echo "csv_basename_faa=$CSV_BASENAME_FAA" >> "$GITHUB_OUTPUT" echo "csv_file_adsb=$CSV_FILE_ADSB" >> "$GITHUB_OUTPUT" echo "csv_basename_adsb=$CSV_BASENAME_ADSB" >> "$GITHUB_OUTPUT" echo "csv_file_community=$CSV_FILE_COMMUNITY" >> "$GITHUB_OUTPUT" echo "csv_basename_community=$CSV_BASENAME_COMMUNITY" >> "$GITHUB_OUTPUT" echo "zip_file=$ZIP_FILE" >> "$GITHUB_OUTPUT" echo "zip_basename=$ZIP_BASENAME" >> "$GITHUB_OUTPUT" echo "json_file_adsbx=$JSON_FILE_ADSBX" >> "$GITHUB_OUTPUT" echo "json_basename_adsbx=$JSON_BASENAME_ADSBX" >> "$GITHUB_OUTPUT" echo "zip_file_mictronics=$ZIP_FILE_MICTRONICS" >> "$GITHUB_OUTPUT" echo "zip_basename_mictronics=$ZIP_BASENAME_MICTRONICS" >> "$GITHUB_OUTPUT" echo "name=OpenAirframes snapshot ($DATE)${BRANCH_SUFFIX}" >> "$GITHUB_OUTPUT" echo "Found files:" echo " FAA CSV: $CSV_FILE_FAA" echo " ADSB CSV: $CSV_FILE_ADSB" echo " Community CSV: $CSV_FILE_COMMUNITY" echo " ZIP: $ZIP_FILE" echo " ADSBX JSON: $JSON_FILE_ADSBX" echo " MICTRONICS ZIP: $ZIP_FILE_MICTRONICS" - name: Delete existing release if exists run: | echo "Attempting to delete release: ${{ steps.meta.outputs.tag }}" gh release delete "${{ steps.meta.outputs.tag }}" --yes --cleanup-tag || echo "No existing release to delete" env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Create GitHub Release and upload assets uses: softprops/action-gh-release@v2 with: tag_name: ${{ steps.meta.outputs.tag }} name: ${{ steps.meta.outputs.name }} fail_on_unmatched_files: false body: | Automated daily snapshot generated at 06:00 UTC for ${{ steps.meta.outputs.date }}. Assets: - ${{ steps.meta.outputs.csv_basename_faa }} - ${{ steps.meta.outputs.csv_basename_adsb }} - ${{ steps.meta.outputs.csv_basename_community }} - ${{ steps.meta.outputs.zip_basename }} - ${{ steps.meta.outputs.json_basename_adsbx }} ${{ steps.meta.outputs.zip_basename_mictronics && format('- {0}', steps.meta.outputs.zip_basename_mictronics) || '' }} files: | ${{ steps.meta.outputs.csv_file_faa }} ${{ steps.meta.outputs.csv_file_adsb }} ${{ steps.meta.outputs.csv_file_community }} ${{ steps.meta.outputs.zip_file }} ${{ steps.meta.outputs.json_file_adsbx }} ${{ steps.meta.outputs.zip_file_mictronics }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}