2020-01-03 21:16:40 +01:00
#!/bin/bash
2020-10-17 00:25:32 +02:00
set -e
2023-04-26 14:07:07 +02:00
trap 'echo "[ERROR] Error in line $LINENO when executing: $BASH_COMMAND"' ERR
2023-05-05 16:25:30 +02:00
trap 'echo tar1090.sh: exiting; trap - SIGTERM; kill -- -$( ps opgid= $$ | tr -d " " ) || true; exit 0' SIGTERM SIGINT SIGHUP SIGQUIT
2020-01-03 21:16:40 +01:00
RUN_DIR = $1
SRC_DIR = $2
2020-04-19 06:20:27 +02:00
if ! [ [ -d $RUN_DIR ] ] ; then
2020-07-28 12:22:46 +02:00
echo " runtime directory (first argument: $RUN_DIR ) is not a directory, fatal error! "
exit 1
fi
2020-07-30 10:15:58 +08:00
2020-07-30 08:28:54 +02:00
if [ [ -z $SRC_DIR ] ] ; then
2020-07-28 12:22:46 +02:00
echo "source directory (2nd argument) not specified, fatal error!"
2020-04-19 06:20:27 +02:00
exit 1
2020-01-03 21:16:40 +01:00
fi
2020-04-19 06:20:27 +02:00
2020-01-03 21:16:40 +01:00
if [ [ -z $HISTORY_SIZE || -z $INTERVAL || -z $CHUNK_SIZE ] ]
then
2020-07-28 12:22:46 +02:00
echo "Syntax: bash tar1090.sh <runtime directory> <dump1090 source directory>"
2020-07-24 17:54:37 +02:00
echo "Missing some settings from environment variables, using defaults:"
echo "history interval: 8 seconds"
echo "history size: 450 entries"
echo "chunk size: 60 entries"
2020-07-28 12:22:46 +02:00
echo "really either use the file named default as a systemd environment file or export those variables yourself"
echo "in other words: figure it out ;)"
2020-07-24 17:54:37 +02:00
INTERVAL = 8
HISTORY_SIZE = 450
CHUNK_SIZE = 60
2020-01-03 21:16:40 +01:00
fi
if [ [ -z $URL_978 ] ] ; then
2020-07-24 17:54:37 +02:00
ENABLE_978 = no
2020-01-03 21:16:40 +01:00
fi
if [ [ -z $INT_978 ] ] ; then
2020-07-24 17:54:37 +02:00
INT_978 = 1
2020-01-03 21:16:40 +01:00
fi
2020-10-17 00:25:32 +02:00
if ( ( INT_978 > 2 ) ) || ( ( INT_978 < 1 ) ) ; then
2020-07-24 17:54:37 +02:00
INT_978 = 1
2020-05-18 23:03:21 +02:00
fi
2020-01-03 21:16:40 +01:00
2020-10-17 00:25:32 +02:00
if ( ( GZIP_LVL < 1 || GZIP_LVL > 9 ) ) ; then
2020-09-06 11:51:31 +02:00
echo "gzip level unspecified, using level 1"
2020-12-19 13:29:45 +01:00
GZIP_LVL = 1
2020-07-24 17:45:00 +02:00
fi
2020-01-03 21:16:40 +01:00
2020-10-17 00:25:32 +02:00
# determine number of chunks
chunks = $(( HISTORY_SIZE/CHUNK_SIZE + 1 ))
# increase chunk size to get total history size as close as we can
CHUNK_SIZE = $(( CHUNK_SIZE - ( ( CHUNK_SIZE - HISTORY_SIZE % CHUNK_SIZE) / chunks ) ))
2020-01-03 21:16:40 +01:00
2020-09-06 11:51:31 +02:00
if [ [ -z $PTRACKS ] ] ; then
PTRACKS = 8
fi
chunksAll = $( awk " function ceil(x){return int(x)+(x>int(x))} BEGIN {printf ceil( $PTRACKS * 3600 / $INTERVAL / $CHUNK_SIZE )} " )
2020-10-17 00:25:32 +02:00
if ( ( chunksAll < chunks ) ) ; then
2020-09-07 22:07:02 +02:00
chunksAll = " $chunks "
2020-09-06 11:51:31 +02:00
fi
2020-10-17 00:25:32 +02:00
newChunk( ) {
2020-09-11 09:09:50 +02:00
if [ [ " $1 " != "refresh" ] ] ; then
2024-05-15 01:23:38 +02:00
curChunk = " chunk_ $( date +%s%3N) .gz "
2020-10-17 00:25:32 +02:00
echo " $curChunk " >> chunk_list
echo " $curChunk " >> chunk_list_all
cp " $1 " " $curChunk "
2020-07-24 17:54:37 +02:00
fi
2020-10-17 00:25:32 +02:00
for ITEM in $( head -n-$chunksAll chunk_list_all) ; do
rm -f " $RUN_DIR / $ITEM "
done
2020-09-06 11:51:31 +02:00
tail -n$chunksAll chunk_list_all > chunk_list_all.tmp
mv chunk_list_all.tmp chunk_list_all
2020-07-24 17:54:37 +02:00
tail -n$chunks chunk_list > chunk_list.tmp
mv chunk_list.tmp chunk_list
2020-09-06 11:51:31 +02:00
2020-07-24 17:54:37 +02:00
# construct chunks.json
JSON = '{'
2020-10-17 00:25:32 +02:00
if [ -f pf.json ] ; then
JSON += ' "pf_data": "true",'
fi
if [ [ " $ENABLE_978 " = = "yes" ] ] ; then
JSON += ' "enable_uat": "true",'
fi
2020-07-24 17:54:37 +02:00
2020-10-17 00:25:32 +02:00
JSON += ' "chunks": [ '
JSON += " $( while read -r LINE; do echo -n " \" $LINE \", " ; done < chunk_list) "
JSON += ' "current_large.gz", "current_small.gz" ],'
2020-09-06 11:51:31 +02:00
2020-10-17 00:25:32 +02:00
JSON += ' "chunks_all": [ '
JSON += " $( while read -r LINE; do echo -n " \" $LINE \", " ; done < chunk_list_all) "
JSON += ' "current_large.gz", "current_small.gz" ] }'
2020-07-24 17:54:37 +02:00
echo " $JSON " > " $RUN_DIR /chunks.json "
2020-01-03 21:16:40 +01:00
}
prune( ) {
2020-07-24 17:54:37 +02:00
jq -c <" $1 " >" $2 " '
2020-12-25 13:19:57 +01:00
.aircraft | = map( select ( has( "seen" ) and .seen < '$INTERVAL' + 2) )
2020-07-24 17:54:37 +02:00
| .aircraft[ ] | = [ .hex,
( if .alt_baro != null then .alt_baro elif .altitude != null then .altitude else .alt_geom end) ,
( if .gs != null then .gs else .tas end) ,
.track, .lat, .lon, .seen_pos,
( if .mlat != null and ( .mlat | contains( [ "lat" ] ) ) then "mlat"
elif .tisb != null and ( .tisb | contains( [ "lat" ] ) ) then "tisb" else .type end) ,
.flight, .messages]
'
2020-01-03 21:16:40 +01:00
}
2020-10-17 00:25:32 +02:00
cd " $RUN_DIR "
2020-01-03 21:16:40 +01:00
2020-10-17 00:25:32 +02:00
rm -f chunk_list chunk_list_all ./chunk_*.gz ./current_*.gz history_*.json latest_*.json || true
2020-09-11 09:09:50 +02:00
2020-10-17 00:25:32 +02:00
echo "{ \"files\" : [ ] }" | gzip -1 > empty.gz
newChunk empty.gz
2020-01-03 21:16:40 +01:00
2020-10-17 00:25:32 +02:00
cp empty.gz current_small.gz
cp empty.gz current_large.gz
2020-05-24 12:40:03 +02:00
2020-10-17 00:25:32 +02:00
# integrate original dump1090-fa history on startup so we don't start blank
if [ [ -f " $SRC_DIR " /history_0.json ] ] ; then
for i in " $SRC_DIR " /history_*.json; do
FILE = $( basename " $i " )
if prune " $i " " $FILE " ; then
sed -i -e '$a,' " $FILE "
2020-05-24 12:40:03 +02:00
fi
2020-10-17 00:25:32 +02:00
done
2020-01-03 21:16:40 +01:00
2020-10-17 00:25:32 +02:00
if sed -e '1i{ "files" : [' -e '$a]}' -e '$d' history_*.json | gzip -1 > temp.gz; then
newChunk temp.gz
fi
# cleanup
rm -f history_*.json
fi
2020-01-03 21:16:40 +01:00
2020-10-17 00:25:32 +02:00
i = 0
2020-01-03 21:16:40 +01:00
2020-10-17 00:25:32 +02:00
while true; do
cd " $RUN_DIR "
if ! [ [ -f chunks.json ] ] ; then
echo " $RUN_DIR /chunks.json was corrupted or removed, fatal! "
exit 1
fi
sleep $INTERVAL &
2020-01-03 21:16:40 +01:00
2020-10-17 00:25:32 +02:00
if ! [ [ -f empty.gz ] ] ; then
echo "{ \"files\" : [ ] }" | gzip -1 > empty.gz
fi
2020-04-19 06:20:27 +02:00
2024-05-15 01:23:38 +02:00
date = $( date +%s%3N)
2020-07-24 17:54:37 +02:00
2022-05-10 17:27:53 +02:00
next_error = 0
error_printed = 0
while ! [ [ -f " $SRC_DIR /aircraft.json " ] ] || ! prune " $SRC_DIR /aircraft.json " " history_ $date .json " ; do
now = $( date +%s%N | head -c-7)
if ( ( now > next_error ) ) ; then
if ( ( next_error != 0 ) ) ; then
echo " No aircraft.json found in $SRC_DIR during the last 30 seconds! Try restarting dump1090 or reinstalling tar1090 if you switched dump1090 to readsb! "
error_printed = 1
fi
next_error = $(( now + 10000 ))
fi
2023-05-05 16:04:43 +02:00
sleep 2 & wait $!
2020-10-17 00:25:32 +02:00
done
2022-05-10 17:27:53 +02:00
if ( ( error_printed != 0 ) ) ; then
echo " Found aircraft.json in $SRC_DIR , continuing operation as per usual! "
fi
2020-10-17 00:25:32 +02:00
sed -i -e '$a,' " history_ $date .json "
2020-07-24 17:54:37 +02:00
2020-10-17 00:25:32 +02:00
if [ [ $ENABLE_978 = = "yes" ] ] && prune 978.json " history_978_ $date .json " ; then
sed -i -e '$a,' " history_978_ $date .json "
fi
2020-07-24 17:54:37 +02:00
2020-10-17 00:25:32 +02:00
if ( ( i % 6 = = 5 ) ) ; then
sed -e '1i{ "files" : [' -e '$a]}' -e '$d' history_*.json | gzip -1 > temp.gz
mv temp.gz current_large.gz
cp empty.gz current_small.gz
rm -f latest_*.json
else
if [ [ -f " history_ $date .json " ] ] ; then
ln -s " history_ $date .json " " latest_ $date .json "
fi
if [ [ $ENABLE_978 = = "yes" ] ] && [ [ -f " history_978_ $date .json " ] ] ; then
ln -s " history_978_ $date .json " " latest_978_ $date .json " || true
2020-07-24 17:54:37 +02:00
fi
2020-10-17 00:25:32 +02:00
sed -e '1i{ "files" : [' -e '$a]}' -e '$d' latest_*.json | gzip -1 > temp.gz
mv temp.gz current_small.gz
fi
2020-07-24 17:54:37 +02:00
2020-10-17 00:25:32 +02:00
i = $(( i + 1 ))
2020-07-24 17:54:37 +02:00
2020-10-17 00:25:32 +02:00
if ( ( i = = CHUNK_SIZE ) ) ; then
sed -e '1i{ "files" : [' -e '$a]}' -e '$d' history_*.json | gzip " - $GZIP_LVL " > temp.gz
newChunk temp.gz
cp empty.gz current_small.gz
cp empty.gz current_large.gz
i = 0
rm -f history_*.json latest_*.json
fi
2020-07-24 17:54:37 +02:00
2020-10-17 00:25:32 +02:00
wait
2020-01-03 21:16:40 +01:00
done &
if [ [ $( echo " $URL_978 " | head -c7) = = "FILE://" ] ] ; then
2020-07-24 17:54:37 +02:00
COMMAND_978 = " cp $( echo -n " $URL_978 " | tail -c+8) 978.tmp "
2020-01-03 21:16:40 +01:00
else
2020-07-24 17:54:37 +02:00
COMMAND_978 = " wget -T 5 -q -O 978.tmp $URL_978 /data/aircraft.json $COMPRESS_978 "
2020-01-03 21:16:40 +01:00
fi
if [ [ $ENABLE_978 = = "yes" ] ] ; then
2020-07-24 17:54:37 +02:00
while true
do
sleep $INT_978 &
if cd " $RUN_DIR " && $COMMAND_978 ; then
sed -i -e 's/"now" \?:/"uat_978":"true","now":/' 978.tmp
mv 978.tmp 978.json
fi
wait
done &
2020-01-03 21:16:40 +01:00
fi
2020-09-13 09:08:24 +02:00
if [ [ -n " $PF_URL " ] ] && [ [ " x $PF_ENABLE " != "x0" ] ] ; then
2023-05-05 16:04:43 +02:00
sleep 10 & wait $!
2020-07-24 17:54:37 +02:00
while true
do
2023-05-05 16:04:43 +02:00
sleep 10 & wait $!
2020-10-17 00:25:32 +02:00
TMP = " $RUN_DIR /tar1090-tmp.pf.json "
2020-09-13 09:08:24 +02:00
if cd " $RUN_DIR " && wget -T 5 -O " $TMP " " $PF_URL " & >/dev/null; then
sed -i -e 's/"user_l[a-z]*":"[0-9,.,-]*",//g' " $TMP "
mv " $TMP " pf.json
2020-07-24 17:54:37 +02:00
if ! grep -qs -e pf_data chunks.json; then
2020-10-17 00:25:32 +02:00
newChunk refresh
2020-07-24 17:54:37 +02:00
fi
else
2020-09-13 09:08:24 +02:00
rm -f " $TMP "
2023-05-05 16:04:43 +02:00
sleep 120 & wait $!
2020-07-24 17:54:37 +02:00
fi
wait
done &
2020-01-03 21:16:40 +01:00
fi
2023-04-26 14:07:07 +02:00
wait || true