From 3138b72b4bc46ed5da4cd30cb665e42e630e925a Mon Sep 17 00:00:00 2001 From: Matthias Wirth Date: Thu, 8 Aug 2019 23:35:58 +0200 Subject: [PATCH] chunk name list --- html/early.js | 6 +++-- html/script.js | 4 ++-- tar1090.sh | 61 ++++++++++++++++++++++++-------------------------- 3 files changed, 35 insertions(+), 36 deletions(-) diff --git a/html/early.js b/html/early.js index 8eb1839..de90b9e 100644 --- a/html/early.js +++ b/html/early.js @@ -5,6 +5,7 @@ var RefreshInterval = 1000; var enable_uat = false; var HistoryChunks = false; var nHistoryItems = 0; +var chunkNames; var PositionHistoryBuffer = []; var receiverJson; var deferHistory = []; @@ -34,7 +35,8 @@ $.when(get_receiver_defer).done(function(data){ $.when(test_chunk_defer).done(function(data) { test_chunk_defer = null; HistoryChunks = true; - nHistoryItems = data.chunks; + chunkNames = data.chunks; + nHistoryItems = chunkNames.length; enable_uat = (data.enable_uat == "true"); if (enable_uat) console.log("UAT/978 enabled!"); @@ -79,7 +81,7 @@ function get_history_item(i) { var request; if (HistoryChunks) { - request = $.ajax({ url: 'chunks/chunk_' + i + '.gz', + request = $.ajax({ url: 'chunks/' + chunkNames[i], timeout: nHistoryItems * 4000, // Allow 4s load time per history chunk dataType: 'json' }); diff --git a/html/script.js b/html/script.js index 59ae278..516e2d5 100644 --- a/html/script.js +++ b/html/script.js @@ -535,7 +535,7 @@ function parse_history() { if (PositionHistoryBuffer.length > 0) { // Sort history by timestamp - console.log("Sorting history"); + console.log("Sorting history: " + PositionHistoryBuffer.length); PositionHistoryBuffer.sort(function(x,y) { return (y.now - x.now); }); // Process history @@ -1362,7 +1362,7 @@ function refreshTableInfo() { classes += " other"; } - if (tableplane.selected) + if (tableplane.selected && !SelectedAllPlanes) classes += " selected"; if (tableplane.squawk in SpecialSquawks) { diff --git a/tar1090.sh b/tar1090.sh index 03cff43..a9b1f37 100755 --- a/tar1090.sh +++ b/tar1090.sh @@ -10,17 +10,27 @@ source /etc/default/tar1090 dir=/run/tar1090 hist=$(($HISTORY_SIZE)) -chunks=$(( $hist/$CS )) -partial=$(($hist%$CS)) -if [[ $partial != 0 ]] -then actual_chunks=$(($chunks+2)) -else actual_chunks=$(($chunks+1)) -fi +chunks=$(( $hist/$CS + 2 )) +#increase chunk size to get history size as close as we can +CS=$(( CS - ( (CS - hist % CS)/(chunks-1) ) )) +list="$dir/list_of_chunks" +new_chunk() { + cur_chunk="chunk_$(date +%s).gz" + echo $cur_chunk >> $list + for iterator in $(head -n-$chunks $list); do rm -f $dir/$iterator; done + tail -n$chunks $list > newlist + mv newlist $list + as_json="\"chunk_recent.gz\"$(for i in $(cat $list); do echo -n ", \"$i\""; done)" + sed -e "s/\"chunks\" : \[.*\]/\"chunks\" : [ $as_json ]/" $dir/chunks.json > $dir/chunks.tmp + echo "{ \"files\" : [ ] }" | gzip -1 > $cur_chunk + mv $dir/chunks.tmp $dir/chunks.json +} while true do cd $dir + rm -f $list rm -f $dir/*.gz rm -f $dir/*.json @@ -30,9 +40,9 @@ do continue fi if [[ $ENABLE_978 == "yes" ]]; then - sed -i -e "s?history\" : [0-9]*?chunks\" : $actual_chunks, \"enable_uat\" : \"true\"?" chunks.json + sed -i -e "s?history\" : [0-9]*?chunks\" : [], \"enable_uat\" : \"true\"?" chunks.json else - sed -i -e "s/history\" : [0-9]*/chunks\" : $actual_chunks/" chunks.json + sed -i -e "s/history\" : [0-9]*/chunks\" : []/" chunks.json fi # integrate original dump1090-fa history on startup so we don't start blank @@ -42,18 +52,15 @@ do sed -i -e '$a,' $i done sed -e '1i{ "files" : [' -e '$a]}' -e '$d' *history_*.json | gzip -9 > temp.gz - mv temp.gz chunk_0.gz + new_chunk + mv temp.gz $cur_chunk fi # cleanup rm -f history_*.json - - - # start with chunk 1 instead of 0 to not overwrite original dump1090-fa history just in case - i=0 - j=1 - sleep 2; + i=0 + new_chunk while true do @@ -80,15 +87,17 @@ do if [[ $((i%6)) == 5 ]] then sed -e '1i{ "files" : [' -e '$a]}' -e '$d' *history_*.json | gzip -1 > temp.gz - mv temp.gz chunk_$j.gz - rm -f *latest_*.json chunk_$(($actual_chunks - 1)).gz + echo "{ \"files\" : [ ] }" | gzip -1 > rec_temp.gz + mv temp.gz $cur_chunk + mv rec_temp.gz chunk_recent.gz + rm -f *latest_*.json else cp history_$((i%$CS)).json latest_$((i%6)).json if [[ $ENABLE_978 == "yes" ]]; then cp 978_history_$((i%$CS)).json 978_latest_$((i%6)).json fi sed -e '1i{ "files" : [' -e '$a]}' -e '$d' *latest_*.json | gzip -1 > temp.gz - mv temp.gz chunk_$(($actual_chunks - 1)).gz + mv temp.gz chunk_recent.gz fi i=$((i+1)) @@ -96,22 +105,10 @@ do if [[ $i == $CS ]] then sed -e '1i{ "files" : [' -e '$a]}' -e '$d' *history_*.json | gzip -9 > temp.gz - mv temp.gz chunk_$j.gz + mv temp.gz $cur_chunk i=0 - j=$((j+1)) - rm -f *history_*.json - fi - if [[ $j == $chunks ]] && [[ $i == $partial ]] - then - if [[ $i != 0 ]]; then - # only necessary if the last chunk is a partial one - sed -e '1i{ "files" : [' -e '$a]}' -e '$d' *history_*.json | gzip -9 > temp.gz - mv temp.gz chunk_$j.gz - fi - # reset counters and do cleanup - i=0 - j=0 rm -f *history_*.json + new_chunk fi wait