make /?pTracks better

This commit is contained in:
Matthias Wirth
2020-09-06 11:51:31 +02:00
parent 8c9d887617
commit 924b95ece3
5 changed files with 46 additions and 8 deletions

View File

@@ -35,7 +35,11 @@ Click the following URL and replace the IP address with address of your Raspberr
http://192.168.x.yy/tar1090
Check further down or keyboard shortcuts.
If you are curious about your coverage, try this URL:
http://192.168.x.yy/tar1090/?pTracks
Check further down for keyboard shortcuts.
## Update (same command as installation)
@@ -255,10 +259,12 @@ sudo wget -O /usr/local/share/tar1090/html/upintheair.json "http://www.heywhatst
```
- You should now have a range outline for the theoretical range for aircraft at 40000 ft on your tar1090 map
- It might be interesting to compare to http://192.168.x.yy/tar1090/?pTracks which will by default will display the last 8 hours of traces.
## A separate instance with longer data retention for gauging range
If this seems too complicated for you or you don't want a 2nd instance, changing / adding PTRACKS=24 to the /etc/default/tar1090 configuration should also extend the history (for /?pTracks only).
```
sudo nano /etc/default/tar1090_instances
```

View File

@@ -12,12 +12,18 @@ URL_978="http://127.0.0.1/skyaware978"
# 1-9 are valid, lower lvl: less CPU usage, higher level: less network bandwidth used when loading the page
GZIP_LVL=3
# hours of tracks that /?pTracks will show
PTRACKS=8
PF_ENABLE=0
# no need to change the lines below
CHUNK_SIZE=60
INT_978=1

View File

@@ -24,6 +24,7 @@ let heatPoints = [];
let replay = false;
let rData = [];
let StaleReceiverCount = 0;
let pTracks = false;
let databaseFolder = "db2";
@@ -113,6 +114,11 @@ try {
let val;
if (val = parseInt(search.get('heatmap'), 10))
heatmap.max = val;
}
if (search.has('pTracks')) {
pTracks = true;
}
} catch (error) {
@@ -229,7 +235,7 @@ if (uuid != null) {
} else {
$.when(test_chunk_defer).done(function(data) {
HistoryChunks = true;
chunkNames = data.chunks;
chunkNames = pTracks ? data.chunks_all : data.chunks;
nHistoryItems = chunkNames.length;
enable_uat = (data.enable_uat == "true");
enable_pf_data = (data.pf_data == "true");

View File

@@ -110,7 +110,6 @@ let lastActive = new Date().getTime();
let inactive = 0;
let firstFetchDone = false;
let overrideMapType = null;
let pTracks = false;
let shareLink = '';

View File

@@ -40,7 +40,7 @@ fi
if (( ${#GZIP_LVL} < 1 || ${#GZIP_LVL} > 9 ));
then
echo "gzip level unspecified, using level 3"
echo "gzip level unspecified, using level 1"
GZIP_LVL=3
fi
@@ -50,16 +50,32 @@ chunks=$(( hist/CHUNK_SIZE + 1 ))
#increase chunk size to get history size as close as we can
CHUNK_SIZE=$(( CHUNK_SIZE - ( (CHUNK_SIZE - hist % CHUNK_SIZE)/chunks ) ))
if [[ -z $PTRACKS ]]; then
PTRACKS=8
fi
chunksAll=$(awk "function ceil(x){return int(x)+(x>int(x))} BEGIN {printf ceil($PTRACKS * 3600 / $INTERVAL / $CHUNK_SIZE)}")
if (( ${#chunksAll} < ${#chunks} )); then
chunksAll=chunks
fi
new_chunk() {
if [[ $1 != "refresh" ]]; then
cur_chunk="chunk_$(date +%s%N | head -c-7).gz"
echo "$cur_chunk" >> chunk_list
echo "$cur_chunk" >> chunk_list_all
cp "$1" "$cur_chunk"
fi
for iterator in $(head -n-$chunks chunk_list); do rm -f "$RUN_DIR/$iterator"; done
for iterator in $(head -n-$chunksAll chunk_list_all); do rm -f "$RUN_DIR/$iterator"; done
tail -n$chunksAll chunk_list_all > chunk_list_all.tmp
mv chunk_list_all.tmp chunk_list_all
tail -n$chunks chunk_list > chunk_list.tmp
mv chunk_list.tmp chunk_list
# construct chunks.json
JSON='{'
@@ -68,6 +84,10 @@ new_chunk() {
JSON="$JSON"' "chunks": [ '
JSON="$JSON""$(while read -r i; do echo -n "\"$i\", "; done < chunk_list)"
JSON="$JSON"' "current_large.gz", "current_small.gz" ],'
JSON="$JSON"' "chunks_all": [ '
JSON="$JSON""$(while read -r i; do echo -n "\"$i\", "; done < chunk_list_all)"
JSON="$JSON"' "current_large.gz", "current_small.gz" ] }'
echo "$JSON" > "$RUN_DIR/chunks.json"
@@ -96,11 +116,12 @@ do
sleep 180
continue
fi
rm -f chunk_list ./chunk_*.gz ./current_*.gz history_*.json latest_*.json || true
rm -f chunk_list chunk_list_all ./chunk_*.gz ./current_*.gz history_*.json latest_*.json || true
cp empty.gz current_small.gz
cp empty.gz current_large.gz
touch chunk_list
touch chunk_list_all
# integrate original dump1090-fa history on startup so we don't start blank
if [[ -f "$SRC_DIR"/history_0.json ]]; then
@@ -207,7 +228,7 @@ fi
sleep 10
if [[ -n $PF_URL ]]; then
if [[ -n $PF_URL ]] && [[ "x$PF_ENABLE" != "x0" ]]; then
TMP="/tmp/tar1090-tmp.pf.json.$RANDOM$RANDOM"
while true
do