mirror of
https://github.com/PlaneQuery/OpenAirframes.git
synced 2026-04-23 11:36:35 +02:00
updates to .github/workflows/historical-adsb.yaml
This commit is contained in:
@@ -3,52 +3,14 @@ name: Historical ADS-B Processing
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
start_date:
|
||||
description: 'Start date (YYYY-MM-DD, inclusive)'
|
||||
date:
|
||||
description: 'YYYY-MM-DD'
|
||||
required: true
|
||||
type: string
|
||||
end_date:
|
||||
description: 'End date (YYYY-MM-DD, exclusive)'
|
||||
required: true
|
||||
type: string
|
||||
chunk_days:
|
||||
description: 'Days per job chunk (default: 1)'
|
||||
required: false
|
||||
type: number
|
||||
default: 1
|
||||
|
||||
jobs:
|
||||
generate-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
chunks: ${{ steps.generate.outputs.chunks }}
|
||||
global_start: ${{ inputs.start_date }}
|
||||
global_end: ${{ inputs.end_date }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Generate date chunks
|
||||
id: generate
|
||||
env:
|
||||
INPUT_START_DATE: ${{ inputs.start_date }}
|
||||
INPUT_END_DATE: ${{ inputs.end_date }}
|
||||
INPUT_CHUNK_DAYS: ${{ inputs.chunk_days }}
|
||||
run: python src/adsb/historical_generate_matrix.py
|
||||
|
||||
adsb-extract:
|
||||
needs: generate-matrix
|
||||
runs-on: ubuntu-24.04-arm
|
||||
strategy:
|
||||
matrix:
|
||||
chunk: ${{ fromJson(needs.generate-matrix.outputs.chunks) }}
|
||||
max-parallel: 3
|
||||
fail-fast: true
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -63,81 +25,30 @@ jobs:
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
- name: Free disk space
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
df -h
|
||||
|
||||
- name: Download and extract ADS-B data
|
||||
- name: Download and split ADS-B data
|
||||
env:
|
||||
START_DATE: ${{ matrix.chunk.start_date }}
|
||||
END_DATE: ${{ matrix.chunk.end_date }}
|
||||
DATE: ${{ inputs.date }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
python -m src.adsb.download_and_list_icaos --start-date "$START_DATE" --end-date "$END_DATE"
|
||||
ls -lah data/output/
|
||||
python -m src.adsb.download_and_list_icaos --date "$DATE"
|
||||
ls -lah data/output/adsb_archives/"$DATE" || true
|
||||
|
||||
- name: Create tar of extracted data and split into chunks
|
||||
run: |
|
||||
cd data/output
|
||||
echo "=== Disk space before tar ==="
|
||||
df -h .
|
||||
echo "=== Files to tar ==="
|
||||
ls -lah *-planes-readsb-prod-0.tar_0 icao_manifest_*.txt 2>/dev/null || echo "No files found"
|
||||
|
||||
# Create tar with explicit error checking
|
||||
if ls *-planes-readsb-prod-0.tar_0 1>/dev/null 2>&1; then
|
||||
tar -cvf extracted_data.tar *-planes-readsb-prod-0.tar_0 icao_manifest_*.txt
|
||||
echo "=== Tar file created ==="
|
||||
ls -lah extracted_data.tar
|
||||
# Verify tar integrity
|
||||
tar -tf extracted_data.tar > /dev/null && echo "Tar integrity check passed" || { echo "Tar integrity check FAILED"; exit 1; }
|
||||
|
||||
# Record tar size and checksum for verification after reassembly
|
||||
echo "=== Recording tar metadata ==="
|
||||
ORIGINAL_SIZE=$(stat --format=%s extracted_data.tar)
|
||||
ORIGINAL_SHA=$(sha256sum extracted_data.tar | awk '{print $1}')
|
||||
echo "Size: $ORIGINAL_SIZE"
|
||||
echo "SHA256: $ORIGINAL_SHA"
|
||||
|
||||
# Split into 500MB chunks to avoid artifact upload issues
|
||||
echo "=== Splitting tar into 500MB chunks ==="
|
||||
mkdir -p tar_chunks
|
||||
split -b 500M extracted_data.tar tar_chunks/extracted_data.tar.part_
|
||||
rm extracted_data.tar
|
||||
|
||||
# Write metadata file (plain text so artifact upload won't skip it)
|
||||
echo "$ORIGINAL_SHA extracted_data.tar" > tar_chunks/checksum.txt
|
||||
echo "$ORIGINAL_SIZE" >> tar_chunks/checksum.txt
|
||||
|
||||
echo "=== Chunks created ==="
|
||||
ls -lah tar_chunks/
|
||||
echo "=== Checksum file ==="
|
||||
cat tar_chunks/checksum.txt
|
||||
else
|
||||
echo "ERROR: No extracted directories found, cannot create tar"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload extracted data chunks
|
||||
- name: Upload archives
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: adsb-extracted-${{ matrix.chunk.start_date }}-${{ matrix.chunk.end_date }}
|
||||
path: data/output/tar_chunks/
|
||||
name: adsb-archives-${{ inputs.date }}
|
||||
path: data/output/adsb_archives/${{ inputs.date }}
|
||||
retention-days: 1
|
||||
compression-level: 0
|
||||
if-no-files-found: warn
|
||||
if-no-files-found: error
|
||||
|
||||
adsb-map:
|
||||
needs: [generate-matrix, adsb-extract]
|
||||
needs: adsb-extract
|
||||
runs-on: ubuntu-24.04-arm
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
chunk: ${{ fromJson(needs.generate-matrix.outputs.chunks) }}
|
||||
icao_chunk: [0, 1, 2, 3]
|
||||
part_id: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -152,94 +63,29 @@ jobs:
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
- name: Free disk space
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
df -h
|
||||
|
||||
- name: Download extracted data
|
||||
- name: Download archives
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: adsb-extracted-${{ matrix.chunk.start_date }}-${{ matrix.chunk.end_date }}
|
||||
path: data/output/tar_chunks/
|
||||
name: adsb-archives-${{ inputs.date }}
|
||||
path: data/output/adsb_archives/${{ inputs.date }}
|
||||
|
||||
- name: Reassemble and extract tar
|
||||
id: extract
|
||||
run: |
|
||||
cd data/output
|
||||
if [ -d tar_chunks ] && ls tar_chunks/extracted_data.tar.part_* 1>/dev/null 2>&1; then
|
||||
echo "=== Chunk files info ==="
|
||||
ls -lah tar_chunks/
|
||||
|
||||
cd tar_chunks
|
||||
|
||||
# Reassemble tar with explicit sorting
|
||||
echo "=== Reassembling tar file ==="
|
||||
ls -1 extracted_data.tar.part_?? | sort | while read part; do
|
||||
echo "Appending $part..."
|
||||
cat "$part" >> ../extracted_data.tar
|
||||
done
|
||||
cd ..
|
||||
|
||||
echo "=== Reassembled tar file info ==="
|
||||
ls -lah extracted_data.tar
|
||||
|
||||
# Verify integrity
|
||||
echo "=== Verifying reassembled tar ==="
|
||||
if [ -f tar_chunks/checksum.txt ]; then
|
||||
EXPECTED_SHA=$(head -1 tar_chunks/checksum.txt | awk '{print $1}')
|
||||
EXPECTED_SIZE=$(sed -n '2p' tar_chunks/checksum.txt)
|
||||
ACTUAL_SHA=$(sha256sum extracted_data.tar | awk '{print $1}')
|
||||
ACTUAL_SIZE=$(stat --format=%s extracted_data.tar)
|
||||
echo "Expected: SHA=$EXPECTED_SHA Size=$EXPECTED_SIZE"
|
||||
echo "Actual: SHA=$ACTUAL_SHA Size=$ACTUAL_SIZE"
|
||||
if [ "$EXPECTED_SHA" != "$ACTUAL_SHA" ] || [ "$EXPECTED_SIZE" != "$ACTUAL_SIZE" ]; then
|
||||
echo "ERROR: Reassembled tar does not match original - data corrupted during transfer"
|
||||
exit 1
|
||||
fi
|
||||
echo "Checksum and size verified"
|
||||
else
|
||||
echo "WARNING: No checksum file found, falling back to tar integrity check"
|
||||
tar -tf extracted_data.tar > /dev/null || { echo "ERROR: Tar file is corrupted"; exit 1; }
|
||||
echo "Tar integrity check passed"
|
||||
fi
|
||||
|
||||
rm -rf tar_chunks
|
||||
|
||||
echo "=== Extracting ==="
|
||||
tar -xf extracted_data.tar
|
||||
rm extracted_data.tar
|
||||
echo "has_data=true" >> "$GITHUB_OUTPUT"
|
||||
echo "=== Contents of data/output ==="
|
||||
ls -lah
|
||||
else
|
||||
echo "No tar chunks found"
|
||||
echo "has_data=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Process ICAO chunk
|
||||
if: steps.extract.outputs.has_data == 'true'
|
||||
- name: Process part
|
||||
env:
|
||||
START_DATE: ${{ matrix.chunk.start_date }}
|
||||
END_DATE: ${{ matrix.chunk.end_date }}
|
||||
DATE: ${{ inputs.date }}
|
||||
run: |
|
||||
python -m src.adsb.process_icao_chunk --chunk-id ${{ matrix.icao_chunk }} --total-chunks 4 --start-date "$START_DATE" --end-date "$END_DATE"
|
||||
ls -lah data/output/adsb_chunks/ || echo "No chunks created"
|
||||
python -m src.adsb.process_icao_chunk --part-id ${{ matrix.part_id }} --date "$DATE"
|
||||
|
||||
- name: Upload chunk artifacts
|
||||
if: steps.extract.outputs.has_data == 'true'
|
||||
- name: Upload compressed outputs
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: adsb-map-${{ matrix.chunk.start_date }}-${{ matrix.chunk.end_date }}-chunk-${{ matrix.icao_chunk }}
|
||||
path: data/output/adsb_chunks/
|
||||
name: adsb-compressed-${{ inputs.date }}-part-${{ matrix.part_id }}
|
||||
path: data/output/compressed/${{ inputs.date }}
|
||||
retention-days: 1
|
||||
compression-level: 0
|
||||
if-no-files-found: ignore
|
||||
if-no-files-found: error
|
||||
|
||||
adsb-reduce:
|
||||
needs: [generate-matrix, adsb-map]
|
||||
needs: adsb-map
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -255,33 +101,23 @@ jobs:
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
- name: Download all chunk artifacts
|
||||
- name: Download compressed outputs
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: adsb-map-*
|
||||
path: data/output/adsb_chunks/
|
||||
pattern: adsb-compressed-${{ inputs.date }}-part-*
|
||||
path: outputs/compressed/${{ inputs.date }}
|
||||
merge-multiple: true
|
||||
|
||||
- name: Debug downloaded files
|
||||
run: |
|
||||
echo "=== Disk space before processing ==="
|
||||
df -h
|
||||
echo "=== Listing data/output/adsb_chunks/ ==="
|
||||
find data/output/adsb_chunks/ -type f 2>/dev/null | wc -l
|
||||
echo "=== Total parquet size ==="
|
||||
du -sh data/output/adsb_chunks/ || echo "No chunks dir"
|
||||
|
||||
- name: Combine chunks to CSV
|
||||
- name: Concatenate final outputs
|
||||
env:
|
||||
START_DATE: ${{ needs.generate-matrix.outputs.global_start }}
|
||||
END_DATE: ${{ needs.generate-matrix.outputs.global_end }}
|
||||
DATE: ${{ inputs.date }}
|
||||
run: |
|
||||
python -m src.adsb.combine_chunks_to_csv --chunks-dir data/output/adsb_chunks --start-date "$START_DATE" --end-date "$END_DATE" --skip-base --stream
|
||||
ls -lah data/openairframes/
|
||||
python src/adsb/concat_parquet_to_final.py --date "$DATE"
|
||||
ls -lah outputs/ || true
|
||||
|
||||
- name: Upload final artifact
|
||||
- name: Upload final artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: openairframes_adsb-${{ needs.generate-matrix.outputs.global_start }}-${{ needs.generate-matrix.outputs.global_end }}
|
||||
path: data/openairframes/*.csv.gz
|
||||
name: openairframes_adsb-${{ inputs.date }}
|
||||
path: outputs/openairframes_adsb_${{ inputs.date }}_${{ inputs.date }}.*
|
||||
retention-days: 30
|
||||
|
||||
Reference in New Issue
Block a user