2019-05-28 20:49:58 +00:00
#!/bin/bash
echo not a script
exit 1
2022-03-20 06:15:57 +01:00
##
## add index.html banners
find -name index.html | sed -r 's/index.html$//' | while IFS = read -r dir; do f = " $dir /.prologue.html " ; [ -e " $f " ] || echo '<h1><a href="index.html">open index.html</a></h1>' >" $f " ; done
2021-01-23 19:25:25 +01:00
##
## delete all partial uploads
## (supports linux/macos, probably windows+msys2)
gzip -d < .hist/up2k.snap | jq -r '.[].tnam' | while IFS = read -r f; do rm -f -- " $f " ; done
2021-01-24 16:14:01 +01:00
gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS = read -r f; do wc -c -- " $f " | grep -qiE '^[^0-9a-z]*0' && rm -f -- " $f " ; done
2021-01-23 19:25:25 +01:00
2021-03-07 02:46:17 +01:00
##
## detect partial uploads based on file contents
## (in case of context loss or old copyparties)
echo; find -type f | while IFS = read -r x; do printf '\033[A\033[36m%s\033[K\033[0m\n' " $x " ; tail -c$(( 1024 * 1024 )) <" $x " | xxd -a | awk 'NR==1&&/^[0: ]+.{16}$/{next} NR==2&&/^\*$/{next} NR==3&&/^[0f]+: [0 ]+65 +.{16}$/{next} {e=1} END {exit e}' || continue ; printf '\033[A\033[31msus:\033[33m %s \033[0m\n\n' " $x " ; done
2024-01-27 18:52:08 +00:00
##
## sync pics/vids from phone
## (takes all files named (IMG|PXL|PANORAMA|Screenshot)_20231224_*)
cd /storage/emulated/0/DCIM/Camera
find -mindepth 1 -maxdepth 1 | sort | cut -c3- > ls
url = https://192.168.1.3:3923/rw/pics/Camera/$d /; awk -F_ '!/^[A-Z][A-Za-z]{1,16}_[0-9]{8}[_-]/{next} {d=substr($2,1,6)} !t[d]++{print d}' ls | while read d; do grep -E " ^[A-Z][A-Za-z]{1,16}_ $d " ls | tr '\n' '\0' | xargs -0 python3 ~/dev/copyparty/bin/u2c.py -td $url --; done
##
## convert symlinks to hardlinks (probably safe, no guarantees)
find -type l | while IFS = read -r lnk; do [ -h " $lnk " ] || { printf 'nonlink: %s\n' " $lnk " ; continue ; } ; dst = " $( readlink -f -- " $lnk " ) " ; [ -e " $dst " ] || { printf '???\n%s\n%s\n' " $lnk " " $dst " ; continue ; } ; printf 'relinking:\n %s\n %s\n' " $lnk " " $dst " ; rm -- " $lnk " ; ln -- " $dst " " $lnk " ; done
##
## convert hardlinks to symlinks (maybe not as safe? use with caution)
e = ; p = ; find -printf '%i %p\n' | awk '{i=$1;sub(/[^ ]+ /,"")} !n[i]++{p[i]=$0;next} {printf "real %s\nlink %s\n",p[i],$0}' | while read cls p; do [ -e " $p " ] || e = 1; p = " $( realpath -- " $p " ) " || e = 1; [ -e " $p " ] || e = 1; [ $cls = real ] && { real = " $p " ; continue ; } ; [ $cls = link ] || e = 1; [ " $p " ] || e = 1; [ $e ] && { echo " ERROR $p " ; break; } ; printf '\033[36m%s \033[0m -> \033[35m%s\033[0m\n' " $p " " $real " ; rm " $p " ; ln -s " $real " " $p " || { echo LINK FAILED; break; } ; done
2019-05-28 20:49:58 +00:00
##
## create a test payload
head -c $(( 2 * 1024 * 1024 * 1024 )) /dev/zero | openssl enc -aes-256-ctr -pass pass:hunter2 -nosalt > garbage.file
2019-05-26 16:30:19 +00:00
##
## testing multiple parallel uploads
## usage: para | tee log
2020-06-24 23:52:42 +00:00
para( ) { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s ; for r in { 1..4} ; do for ( ( n = 0; n<s; n++) ) ; do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:3923/ 2>& 1 & done ; wait; echo; done ; done ; }
2019-05-26 16:30:19 +00:00
##
## display average speed
## usage: avg logfile
avg( ) { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} csz=$1;sum=0;nsmp=0} {sub(/\r$/,"")} /^[0-9]+$/ {pr($1);next} / MiB/ {sub(/ MiB.*/,"");sub(/.* /,"");sum+=$1;nsmp++} END {pr(0)}' " $1 " ; }
2019-05-30 13:17:45 +00:00
2021-10-31 06:24:11 +01:00
##
## time between first and last upload
python3 -um copyparty -nw -v srv::rw -i 127.0.0.1 2>& 1 | tee log
2022-07-15 02:39:32 +02:00
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} t<p{t+=86400} !a{a=t;sa=s} {b=t;sb=s} END {print b-a,sa,sb}'
2021-10-31 06:24:11 +01:00
2022-07-14 02:33:35 +02:00
# or if the client youre measuring dies for ~15sec every once ina while and you wanna filter those out,
2022-07-15 02:39:32 +02:00
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} t<p{t+=86400} !p{a=t;p=t;r=0;next} t-p>1{printf "%.3f += %.3f - %.3f (%.3f) # %.3f -> %.3f\n",r,p,a,p-a,p,t;r+=p-a;a=t} {p=t} END {print r+p-a}'
2022-07-14 02:33:35 +02:00
2021-10-31 06:24:11 +01:00
2022-09-15 01:18:19 +02:00
##
## find uploads blocked by slow i/o or maybe deadlocks
awk '/^.\+. opened logfile/{print;next} {sub(/.$/,"")} !/^..36m[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3} /{next} !/0m(POST|writing) /{next} {c=0;p=$3} /0mPOST/{c=1} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");s=60*(60*$1+$2)+$3} c{t[p]=s;next} {d=s-t[p]} d>10{print $0 " # " d}'
2019-06-10 19:47:01 +00:00
##
## bad filenames
2021-09-15 21:53:30 +02:00
dirs = ( "./ほげ" "./ほげ/ぴよ" " ./ $( printf \\ xed\\ x91) " " ./ $( printf \\ xed\\ x91/\\ xed\\ x92) " './qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty' "'" 'uio&asd fgh' )
2019-06-12 16:39:43 +00:00
mkdir -p " ${ dirs [@] } "
2021-09-15 21:53:30 +02:00
for dir in " ${ dirs [@] } " ; do for fn in ふが " $( printf \\ xed\\ x93) " 'qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty' "'" 'uio&asd fgh' ; do echo " $dir " > " $dir / $fn .html " ; done ; done
2021-07-30 19:28:14 +02:00
# qw er+ty%20ui%%20op<as>df&gh&jk#zx'cv"bn`m=qw*er^ty?ui@op,as.df-gh_jk
2019-06-10 19:47:01 +00:00
2022-10-13 20:24:45 +02:00
2019-07-10 01:15:35 +00:00
##
## upload mojibake
fn = $( printf '\xba\xdc\xab.cab' )
echo asdf > " $fn "
2020-06-24 23:52:42 +00:00
curl --cookie cppwd = wark -sF "act=bput" -F " f=@ $fn " http://127.0.0.1:3923/moji/%ED%91/
2019-07-10 01:15:35 +00:00
2019-06-26 18:26:18 +00:00
##
## test compression
2020-06-24 23:52:42 +00:00
wget -S --header= 'Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:3923/.cpr/deps/ogv.js -O- | md5sum; p = ~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p ; gzip -d < $p | md5sum
2019-06-26 18:26:18 +00:00
2019-06-28 19:59:59 +00:00
##
## sha512(file) | base64
2019-07-02 00:14:55 +00:00
## usage: shab64 chunksize_mb filepath
2019-06-28 19:59:59 +00:00
2019-07-02 00:14:55 +00:00
shab64( ) { sp = $1 ; f = " $2 " ; v = 0; sz = $( stat -c%s " $f " ) ; while true; do w = $(( v+sp*1024*1024)) ; printf $( tail -c +$(( v+1)) " $f " | head -c $(( w-v)) | sha512sum | cut -c-64 | sed -r 's/ .*//;s/(..)/\\x\1/g' ) | base64 -w0 | cut -c-43 | tr '+/' '-_' ; v = $w ; [ $v -lt $sz ] || break; done ; }
2019-06-28 19:59:59 +00:00
2021-03-21 17:15:47 +01:00
##
## poll url for performance issues
command -v gdate && date( ) { gdate " $@ " ; } ; while true; do t = $( date +%s.%N) ; ( time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}' ) 2>& 1 | awk -v ts = $t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}' ; sleep 0.1 || break; done
2021-11-20 00:20:34 +01:00
##
## track an up2k upload and print all chunks in file-order
grep '"name": "2021-07-18 02-17-59.mkv"' fug.log | head -n 1 | sed -r 's/.*"hash": \[//; s/\].*//' | tr '"' '\n' | grep -E '^[a-zA-Z0-9_-]{44}$' | while IFS = read -r cid; do cat -n fug.log | grep -vF '"purl": "' | grep -- " $cid " ; echo; done | stdbuf -oL tr '\t' ' ' | while IFS = ' ' read -r ln _ _ _ _ _ ts ip port msg; do [ -z " $msg " ] && echo && continue ; printf '%6s [%s] [%s] %s\n' $ln " $ts " " $ip $port " " $msg " ; read -r ln _ _ _ _ _ ts ip port msg < <( cat -n fug.log | tail -n +$(( ln+1)) | grep -F " $ip $port " | head -n 1) ; printf '%6s [%s] [%s] %s\n' $ln " $ts " " $ip $port " " $msg " ; done
2021-04-03 00:35:46 +02:00
##
## js oneliners
# get all up2k search result URLs
var t = [ ] ; var b = document.location.href.split( '#' ) [ 0] .slice( 0, -1) ; document.querySelectorAll( '#u2tab .prog a' ) .forEach( ( x) = > { t.push( b+encodeURI( x.getAttribute( "href" ) ) ) } ) ; console.log( t.join( "\n" ) ) ;
2021-09-17 01:36:06 +02:00
# debug md-editor line tracking
var s = mknod( 'style' ) ; s.innerHTML= '*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}' ; document.head.appendChild( s) ;
2021-06-01 01:16:40 +02:00
2022-03-20 06:15:57 +01:00
2021-06-01 01:16:40 +02:00
##
## bash oneliners
# get the size and video-id of all youtube vids in folder, assuming filename ends with -id.ext, and create a copyparty search query
2021-06-01 02:53:54 +02:00
find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv|mp4|webm)$/{next} {sub(/\.[^\.]+$/,"");n=length($0);v=substr($0,n-10);print $1, v}' | tee /dev/stderr | awk 'BEGIN {p="("} {printf("%s name like *-%s.* ",p,$2);p="or"} END {print ")\n"}' | cat >& 2
2021-06-14 03:05:50 +02:00
# unique stacks in a stackdump
f = a; rm -rf stacks; mkdir stacks; grep -E '^#' $f | while IFS = read -r n; do awk -v n = " $n " '!$0{o=0} o; $0==n{o=1}' <$f >stacks/f; h = $( sha1sum <stacks/f | cut -c-16) ; mv stacks/f stacks/$h -" $n " ; done ; find stacks/ | sort | uniq -cw24
2024-08-17 15:55:55 +00:00
# find unused css variables
cat browser.css | sed -r 's/(var\()/\n\1/g' | awk '{sub(/:/," ")} $1~/^--/{d[$1]=1} /var\(/{sub(/.*var\(/,"");sub(/\).*/,"");u[$1]=1} END{for (x in u) delete d[x]; for (x in d) print x}' | tr '\n' '|'
2021-04-03 00:35:46 +02:00
2021-03-20 01:00:57 +01:00
##
## sqlite3 stuff
# find dupe metadata keys
sqlite3 up2k.db 'select mt1.w, mt1.k, mt1.v, mt2.v from mt mt1 inner join mt mt2 on mt1.w = mt2.w where mt1.k = mt2.k and mt1.rowid != mt2.rowid'
# partial reindex by deleting all tags for a list of files
2021-03-20 03:08:16 +00:00
time sqlite3 up2k.db 'select mt1.w from mt mt1 inner join mt mt2 on mt1.w = mt2.w where mt1.k = +mt2.k and mt1.rowid != mt2.rowid' > warks
2021-03-20 01:00:57 +01:00
cat warks | while IFS = read -r x; do sqlite3 up2k.db " delete from mt where w = ' $x ' " ; done
2021-03-29 04:47:59 +02:00
# dump all dbs
find -iname up2k.db | while IFS = read -r x; do sqlite3 " $x " 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS = read -r y; do printf '%s | %s\n' " $x " " $y " ; done ; done
2021-06-21 22:49:28 +00:00
# unschedule mtp scan for all files somewhere under "enc/"
sqlite3 -readonly up2k.db 'select substr(up.w,1,16) from up inner join mt on mt.w = substr(up.w,1,16) where rd like "enc/%" and +mt.k = "t:mtp"' > keys; awk '{printf "delete from mt where w = \"%s\" and +k = \"t:mtp\";\n", $0}' <keys | tee /dev/stderr | sqlite3 up2k.db
2021-06-22 22:21:39 +02:00
# compare metadata key "key" between two databases
sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by w' > k1; sqlite3 -readonly up2k.db 'select w, v from mt where k = "key" order by w' > k2; ok = 0; ng = 0; while IFS = '|' read w k2; do k1 = " $( grep -E " ^ $w " k1 | sed -r 's/.*\|//' ) " ; [ " $k1 " = " $k2 " ] && ok = $(( ok+1)) || { ng = $(( ng+1)) ; printf '%3s %3s %s\n' " $k1 " " $k2 " " $( sqlite3 -readonly up2k.db.key-full " select * from up where substr(w,1,16) = ' $w ' " | sed -r 's/\|/ | /g' ) " ; } ; done < <( cat k2) ; echo " match $ok diff $ng "
# actually this is much better
sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by w' > k1; sqlite3 -readonly up2k.db 'select mt.w, mt.v, up.rd, up.fn from mt inner join up on mt.w = substr(up.w,1,16) where mt.k = "key" order by up.rd, up.fn' > k2; ok = 0; ng = 0; while IFS = '|' read w k2 path; do k1 = " $( grep -E " ^ $w " k1 | sed -r 's/.*\|//' ) " ; [ " $k1 " = " $k2 " ] && ok = $(( ok+1)) || { ng = $(( ng+1)) ; printf '%3s %3s %s\n' " $k1 " " $k2 " " $path " ; } ; done < <( cat k2) ; echo " match $ok diff $ng "
2021-03-20 01:00:57 +01:00
2022-10-13 20:24:45 +02:00
##
## scanning for exceptions
cd /dev/shm
journalctl -aS '720 hour ago' -t python3 -o with-unit --utc | cut -d\ -f2,6- > cpp.log
tac cpp.log | awk '/RuntimeError: generator ignored GeneratorExit/{n=1} n{n--;if(n==0)print} 1' | grep 'generator ignored GeneratorExit' -C7 | head -n 100
awk '/Exception ignored in: <generator object StreamZip.gen/{s=1;next} /could not create thumbnail/{s=3;next} s{s--;next} 1' <cpp.log | less -R
less-search:
>: | Exception| Traceback
2022-09-24 22:41:00 +02:00
##
## tracking bitflips
l = log.tmux-1662316902 # your logfile (tmux-capture or decompressed -lo)
# grab handshakes to a smaller logfile
tr -d '\r' <$l | awk '/^.\[36m....-..-...\[0m.?$/{d=substr($0,6,10)} !d{next} /"purl": "/{t=substr($1,6);sub(/[^ ]+ /,"");sub(/ .\[34m[0-9]+ /," ");printf("%s %s %s %s\n",d,t,ip,$0)}' | while read d t ip f; do u = $( date +%s --date= " ${ d } T ${ t } Z " ) ; printf '%s\n' " $u $ip $f " ; done > handshakes
# quick list of affected files
grep 'your chunk got corrupted somehow' -A1 $l | tr -d '\r' | grep -E '^[a-zA-Z0-9_-]{44}$' | sort | uniq | while IFS = read -r x; do grep -F " $x " handshakes | head -c 200; echo; done | sed -r 's/.*"name": "//' | sort | uniq -cw20
# find all cases of corrupt chunks and print their respective handshakes (if any),
# timestamps are when the corrupted chunk was received (and also the order they are displayed),
# first checksum is the expected value from the handshake, second is what got uploaded
awk <$l '/^.\[36m....-..-...\[0m.?$/{d=substr($0,6,10)} /your chunk got corrupted somehow/{n=2;t=substr($1,6);next} !n{next} {n--;sub(/\r$/,"")} n{a=$0;next} {sub(/.\[0m,.*/,"");printf "%s %s %s %s\n",d,t,a,$0}' |
while read d t h1 h2; do printf '%s %s\n' $d $t ; (
printf ' %s [%s]\n' $h1 " $( grep -F $h1 <handshakes | head -n 1) "
printf ' %s [%s]\n' $h2 " $( grep -F $h2 <handshakes | head -n 1) "
) | sed 's/, "sprs":.*//' ; done | less -R
# notes; TODO clean up and put in the readme maybe --
# quickest way to drop the bad files (if a client generated bad hashes for the initial handshake) is shutting down copyparty and moving aside the unfinished file (both the .PARTIAL and the empty placeholder)
# BUT the clients will immediately re-handshake the upload with the same bitflipped hashes, so the uploaders have to refresh their browsers before you do that,
# so maybe just ask them to refresh and do nothing for 6 hours so the timeout kicks in, which deletes the placeholders/name-reservations and you can then manually delete the .PARTIALs at some point later
2022-10-13 20:24:45 +02:00
2021-03-21 05:57:24 +01:00
##
## media
# split track into test files
e = 6; s = 10; d = ~/dev/copyparty/srv/aus; n = 1; p = 0; e = $(( e*60)) ; rm -rf $d ; mkdir $d ; while true; do ffmpeg -hide_banner -ss $p -i 'nervous_testpilot - office.mp3' -c copy -t $s $d /$( printf %04d $n ) .mp3; n = $(( n+1)) ; p = $(( p+s)) ; [ $p -gt $e ] && break; done
-v srv/aus:aus:r:ce2dsa:ce2ts:cmtp= fgsfds = bin/mtag/sleep.py
sqlite3 .hist/up2k.db 'select * from mt where k="fgsfds" or k="t:mtp"' | tee /dev/stderr | wc -l
2021-08-29 00:11:06 +02:00
# generate the sine meme
for ( ( f = 420; f<1200; f++) ) ; do sz = $( ffmpeg -y -f lavfi -i sine = frequency = $f :duration= 2 -vf volume = 0.1 -ac 1 -ar 44100 -f s16le /dev/shm/a.wav 2>/dev/null; base64 -w0 </dev/shm/a.wav | gzip -c | wc -c) ; printf '%d %d\n' $f $sz ; done | tee /dev/stderr | sort -nrk2,2
ffmpeg -y -f lavfi -i sine = frequency = 1050:duration= 2 -vf volume = 0.1 -ac 1 -ar 44100 /dev/shm/a.wav
2023-04-29 09:31:53 +00:00
# better sine
sox -DnV -r8000 -b8 -c1 /dev/shm/a.wav synth 1.1 sin 400 vol 0.02
2021-09-01 22:35:27 +02:00
# play icon calibration pics
for w in 150 170 190 210 230 250; do for h in 130 150 170 190 210; do /c/Program\ Files/ImageMagick-7.0.11-Q16-HDRI/magick.exe convert -size ${ w } x${ h } xc:brown -fill orange -draw " circle $(( w/2)) , $(( h/2)) $(( w/2)) , $(( h/3)) " $w -$h .png; done ; done
play compressed s3xmodit chiptunes
adds support for playing gz, xz, and zip-compressed tracker files
using the de-facto naming convention for compressed modules;
* mod: mdz, mdgz, mdxz
* s3m: s3z, s3gz, s3xz
* xm: xmz, xmgz, xmxz
* it: itz, itgz, itxz
2024-05-10 12:45:17 +00:00
# compress chiptune modules
mkdir gz; for f in *.*; do pigz -c11 -I100 <" $f " >gz/" $f " gz; touch -r " $f " gz/" $f " gz; done
mkdir xz; for f in *.*; do xz -cz9 <" $f " >xz/" $f " xz; touch -r " $f " xz/" $f " xz; done
mkdir z; for f in *.*; do 7z a -tzip -mx= 9 -mm= lzma " z/ ${ f } z " " $f " && touch -r " $f " z/" $f " z; done
2021-03-21 05:57:24 +01:00
2019-05-30 13:17:45 +00:00
##
## vscode
# replace variable name
# (^|[^\w])oldname([^\w]|$) => $1newname$2
2019-06-26 19:02:15 +00:00
# monitor linter progress
htop -d 2 -p $( ps ax | awk '/electron[ ]/ {printf "%s%s", v, $1;v=","}' )
# prep debug env (vscode embedded terminal)
renice 20 -p $$
# cleanup after a busted shutdown
2019-07-02 23:36:16 +00:00
ps ax | awk '/python[23]? -m copyparty|python[ ]-c from multiproc/ {print $1}' | tee /dev/stderr | xargs kill
# last line of each function in a file
cat copyparty/httpcli.py | awk '/^[^a-zA-Z0-9]+def / {printf "%s\n%s\n\n", f, pl; f=$2} /[a-zA-Z0-9]/ {pl=$0}'
2019-07-10 01:15:35 +00:00
##
## meta
# create a folder with symlinks to big files
for d in /usr /var; do find $d -type f -size +30M 2>/dev/null; done | while IFS = read -r x; do ln -s " $x " big/; done
2019-11-11 04:12:25 +01:00
2024-09-12 21:42:33 +00:00
# up2k worst-case testfiles: create 64 GiB (256 x 256 MiB) of sparse files; each file takes 1 MiB disk space; each 1 MiB chunk is globally unique
for f in { 0..255} ; do echo $f ; truncate -s 256M $f ; b1 = $( printf '%02x' $f ) ; for o in { 0..255} ; do b2 = $( printf '%02x' $o ) ; printf " \x $b1 \x $b2 " | dd of = $f bs = 2 seek = $(( o*1024*1024)) conv = notrunc 2>/dev/null; done ; done
2024-10-11 19:48:44 +00:00
# create 6.06G file with 16 bytes of unique data at start+end of each 32M chunk
sz = 6509559808; truncate -s $sz f; csz = 33554432; sz = $(( sz/16)) ; step = $(( csz/16)) ; ofs = 0; while [ $ofs -lt $sz ] ; do dd if = /dev/urandom of = f bs = 16 count = 2 seek = $ofs conv = notrunc iflag = fullblock; [ $ofs = 0 ] && ofs = $(( ofs+step-1)) || ofs = $(( ofs+step)) ; done
2025-01-10 18:24:40 +00:00
# same but for chunksizes 16M (3.1G), 24M (4.1G), 48M (128.1G)
sz = 3321225472; csz = 16777216;
sz = 4394967296; csz = 25165824;
sz = 6509559808; csz = 33554432;
sz = 138438953472; csz = 50331648;
f = csz-$csz ; truncate -s $sz $f ; sz = $(( sz/16)) ; step = $(( csz/16)) ; ofs = 0; while [ $ofs -lt $sz ] ; do dd if = /dev/urandom of = $f bs = 16 count = 2 seek = $ofs conv = notrunc iflag = fullblock; [ $ofs = 0 ] && ofs = $(( ofs+step-1)) || ofs = $(( ofs+step)) ; done
2024-09-12 21:42:33 +00:00
2019-11-11 04:12:25 +01:00
# py2 on osx
brew install python@2
pip install virtualenv
2020-05-15 00:52:57 +02:00
2021-03-29 20:08:32 +02:00
# fix firefox phantom breakpoints,
# suggestions from bugtracker, doesnt work (debugger is not attachable)
devtools settings >> advanced >> enable browser chrome debugging + enable remote debugging
burger > developer >> browser toolbox ( ctrl-alt-shift-i)
iframe btn topright >> chrome://devtools/content/debugger/index.html
dbg.asyncStore.pendingBreakpoints = { }
# fix firefox phantom breakpoints
about:config >> devtools.debugger.prefs-schema-version = -1
2020-05-15 00:52:57 +02:00
2021-06-08 00:01:08 +02:00
# determine server version
2021-07-19 23:46:44 +02:00
git pull; git reset --hard origin/HEAD && git log --format= format:"%H %ai %d" --decorate= full > ../revs && cat ../{ util,browser,up2k} .js >../vr && cat ../revs | while read -r rev extra; do ( git reset --hard $rev >/dev/null 2>/dev/null && dsz = $( cat copyparty/web/{ util,browser,up2k} .js >../vg 2>/dev/null && diff -wNarU0 ../{ vg,vr} | wc -c) && printf '%s %6s %s\n' " $rev " $dsz " $extra " ) </dev/null; done
# download all sfx versions
2021-11-14 19:28:44 +01:00
curl https://api.github.com/repos/9001/copyparty/releases?per_page= 100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn = " $( printf '%s\n' " copyparty $v $t .py " | tr / -) " ; [ -e " $fn " ] || curl https://github.com/9001/copyparty/releases/download/$v /copyparty-sfx.py -Lo " $fn " ; done
2021-06-08 00:01:08 +02:00
2022-06-17 15:33:57 +02:00
# convert releasenotes to changelog
curl https://api.github.com/repos/9001/copyparty/releases?per_page= 100 | jq -r '.[] | "▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀ \n# \(.created_at) `\(.tag_name)` \(.name)\n\n\(.body)\n\n\n"' | sed -r 's/^# ([0-9]{4}-)([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})Z /# \1\2\3-\4\5 /' > changelog.md
2021-11-28 04:22:05 +01:00
# push to multiple git remotes
git config -l | grep '^remote'
git remote add all git@github.com:9001/copyparty.git
git remote set-url --add --push all git@gitlab.com:9001/copyparty.git
git remote set-url --add --push all git@github.com:9001/copyparty.git
2021-04-03 00:35:46 +02:00
2022-03-20 06:15:57 +01:00
2020-05-15 00:52:57 +02:00
##
## http 206
# az = abcdefghijklmnopqrstuvwxyz
printf '%s\r\n' 'GET /az HTTP/1.1' 'Host: ocv.me' 'Range: bytes=5-10' '' | ncat ocv.me 80
# Content-Range: bytes 5-10/26
# Content-Length: 6
# fghijk
Range: bytes = 0-1 "ab" Content-Range: bytes 0-1/26
Range: bytes = 24-24 "y" Content-Range: bytes 24-24/26
Range: bytes = 24-25 "yz" Content-Range: bytes 24-25/26
Range: bytes = 24- "yz" Content-Range: bytes 24-25/26
Range: bytes = 25-29 "z" Content-Range: bytes 25-25/26
Range: bytes = 26- Content-Range: bytes */26
HTTP/1.1 416 Requested Range Not Satisfiable
2020-05-17 00:33:34 +02:00
##
## md perf
var tsh = [ ] ;
function convert_markdown( md_text, dest_dom) {
2021-04-23 20:04:17 +02:00
tsh.push( Date.now( ) ) ;
2020-05-17 00:33:34 +02:00
while ( tsh.length > 10)
tsh.shift( ) ;
if ( tsh.length > 1) {
var end = tsh.slice( -2) ;
console.log( "render" , end.pop( ) - end.pop( ) , ( tsh[ tsh.length - 1] - tsh[ 0] ) / ( tsh.length - 1) ) ;
}
2020-06-24 23:53:23 +00:00
##
## tmpfiles.d meme
mk( ) { rm -rf /tmp/foo; sudo -u ed bash -c 'mkdir /tmp/foo; echo hi > /tmp/foo/bar' ; }
mk && t0 = " $( date) " && while true; do date -s " $( date '+ 1 hour' ) " ; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done ; echo " $t0 "
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0 = " $( date) " && for n in { 1..40} ; do date -s " $( date '+ 1 day' ) " ; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done ; echo " $t0 "
mk && t0 = " $( date) " && for n in { 1..40} ; do date -s " $( date '+ 1 day' ) " ; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done ; echo " $t0 "
2021-06-21 22:49:28 +00:00