Compare commits
583 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4087eaa3c6 | ||
|
|
32428941d7 | ||
|
|
0cc3397402 | ||
|
|
0c2dfd3213 | ||
|
|
e119181e3f | ||
|
|
64c7265578 | ||
|
|
eb652152f5 | ||
|
|
cd03dd4fdd | ||
|
|
056e142347 | ||
|
|
927a353dec | ||
|
|
bc68eeb7d2 | ||
|
|
de79f42aa6 | ||
|
|
e138ae88a1 | ||
|
|
e20a4a01ad | ||
|
|
a2b49fdc44 | ||
|
|
988a82179e | ||
|
|
4eb97b5958 | ||
|
|
4962d8e7bf | ||
|
|
a652779cc5 | ||
|
|
d7092e7733 | ||
|
|
8b5a7ebecf | ||
|
|
8effd431a6 | ||
|
|
22fb44ef28 | ||
|
|
c8dc67ea37 | ||
|
|
0dbba40fe1 | ||
|
|
97e567be77 | ||
|
|
76216ee390 | ||
|
|
c60c2819ac | ||
|
|
7cbef457cf | ||
|
|
4916bdc543 | ||
|
|
bb0e88e460 | ||
|
|
46e598cded | ||
|
|
ce130c4831 | ||
|
|
b6ac988601 | ||
|
|
c8dbbef8fc | ||
|
|
a974f167d1 | ||
|
|
2d3b3daf10 | ||
|
|
1d54c70164 | ||
|
|
6dafe32f6a | ||
|
|
3149d3610f | ||
|
|
f8f3bcfc36 | ||
|
|
8548cce4e5 | ||
|
|
ad2aca1803 | ||
|
|
8cf500ce44 | ||
|
|
ceda7f370e | ||
|
|
0692a10253 | ||
|
|
c1c2a96295 | ||
|
|
b4be9fa757 | ||
|
|
a8967f0b7d | ||
|
|
10af5cc250 | ||
|
|
0ea1b0324e | ||
|
|
4ece6efe60 | ||
|
|
15bc8e0ed2 | ||
|
|
33fad6339f | ||
|
|
93916104db | ||
|
|
3bb4fb0640 | ||
|
|
c9495763e5 | ||
|
|
a7825a881c | ||
|
|
a6bbd97b98 | ||
|
|
3500212da0 | ||
|
|
01ea9d7879 | ||
|
|
f19e1b8bfb | ||
|
|
f2202556d7 | ||
|
|
5a5b471fe4 | ||
|
|
ff0f20d1dd | ||
|
|
4898c852c1 | ||
|
|
adf5295e2b | ||
|
|
7514baaa5f | ||
|
|
0ba1a779ef | ||
|
|
3ea8a27914 | ||
|
|
2451ad3674 | ||
|
|
25804df238 | ||
|
|
474173ba54 | ||
|
|
049a3b703f | ||
|
|
ac77fde892 | ||
|
|
6ee9b22923 | ||
|
|
f355138cd2 | ||
|
|
478d135d1f | ||
|
|
80c9acdabe | ||
|
|
d4eaa7c543 | ||
|
|
2571550da4 | ||
|
|
b3ee3a3506 | ||
|
|
11feccd93b | ||
|
|
bb6ce5c013 | ||
|
|
a35aa9046e | ||
|
|
6c32da878d | ||
|
|
49c54bc896 | ||
|
|
4c9fa38ffb | ||
|
|
2856e78f16 | ||
|
|
33884925f4 | ||
|
|
a11ef96d9a | ||
|
|
7b6239d66a | ||
|
|
2c3bd140ff | ||
|
|
9d2087a0fb | ||
|
|
67db8110a1 | ||
|
|
ab1c56ff3e | ||
|
|
142f2e42ca | ||
|
|
e7764c1665 | ||
|
|
582cfea47c | ||
|
|
6f38d84a1c | ||
|
|
1fc46e66bc | ||
|
|
167673aa08 | ||
|
|
5ad8ee986f | ||
|
|
e9d7ee7e8e | ||
|
|
d21c3ec004 | ||
|
|
01c6931d53 | ||
|
|
493563bb6f | ||
|
|
ca5c71402b | ||
|
|
ad765a1ede | ||
|
|
9adee14e0f | ||
|
|
57c4bd0e7c | ||
|
|
1085dbc8ab | ||
|
|
fb9740003e | ||
|
|
087f16fbc5 | ||
|
|
fa96e80590 | ||
|
|
539d38fb4a | ||
|
|
caaf0b0e13 | ||
|
|
16958e516d | ||
|
|
74e4e0c4ec | ||
|
|
3efeb46500 | ||
|
|
0f2e933be1 | ||
|
|
a7f40b0d15 | ||
|
|
e6ac99458f | ||
|
|
92cadf26e9 | ||
|
|
305038a31d | ||
|
|
bd67d6f19f | ||
|
|
81eae4edbf | ||
|
|
776ef71574 | ||
|
|
31125ca489 | ||
|
|
29ab108764 | ||
|
|
61820f1670 | ||
|
|
7fafb8b5ae | ||
|
|
28e84c0c5a | ||
|
|
e629214bef | ||
|
|
5e9433b4a4 | ||
|
|
5f2082c6e9 | ||
|
|
12c0deadee | ||
|
|
6da766ef22 | ||
|
|
f278a4bfcf | ||
|
|
631fe91049 | ||
|
|
159f39227a | ||
|
|
670acef0b4 | ||
|
|
1165769aca | ||
|
|
613dd32a40 | ||
|
|
d7a88f904e | ||
|
|
a8344a231b | ||
|
|
11043e365a | ||
|
|
ad34ba78ea | ||
|
|
f9b4ae1308 | ||
|
|
7fee8f6bfe | ||
|
|
2e0ca3649c | ||
|
|
e0d44741e9 | ||
|
|
008d59c7d6 | ||
|
|
ed03b0d49f | ||
|
|
4cc1513e58 | ||
|
|
c768aeaf40 | ||
|
|
42ebb0e915 | ||
|
|
31ba9635eb | ||
|
|
dc58512ee6 | ||
|
|
4a58731441 | ||
|
|
c2b92d2d7d | ||
|
|
640b384d27 | ||
|
|
a2ef3d9f8e | ||
|
|
0456b4b62d | ||
|
|
92c9c82e73 | ||
|
|
c5ed1cff24 | ||
|
|
0710735546 | ||
|
|
7869df224e | ||
|
|
6f6274ec7d | ||
|
|
40da130066 | ||
|
|
5947d52c8d | ||
|
|
e4b73a7196 | ||
|
|
1ded1180dc | ||
|
|
5517d60e7a | ||
|
|
ed7e42625e | ||
|
|
d5cde896fb | ||
|
|
007c79f4a7 | ||
|
|
f1b523b5de | ||
|
|
c42e0d7291 | ||
|
|
1ee1b9acc6 | ||
|
|
9904633a99 | ||
|
|
c8791db75e | ||
|
|
21d1c7ebfe | ||
|
|
996a43be5b | ||
|
|
9e8127e577 | ||
|
|
cfcd324a11 | ||
|
|
6872634bf4 | ||
|
|
091090c6fd | ||
|
|
bd4fff4200 | ||
|
|
52dfd0be05 | ||
|
|
60f1737115 | ||
|
|
7a5d6baf02 | ||
|
|
44a332a77b | ||
|
|
beb0a2d6a4 | ||
|
|
9be66df52b | ||
|
|
da0117db1b | ||
|
|
4dbf01a604 | ||
|
|
36858ed3e2 | ||
|
|
370c586582 | ||
|
|
fdfffd96c9 | ||
|
|
6da9f58b23 | ||
|
|
12e3912a37 | ||
|
|
8147e7e1d7 | ||
|
|
19dba6651c | ||
|
|
274f96c710 | ||
|
|
09e1d0b6fc | ||
|
|
f4fb68e310 | ||
|
|
8edf7ae89b | ||
|
|
b6458e9eb7 | ||
|
|
375af1e7f6 | ||
|
|
76d0a72590 | ||
|
|
3255556835 | ||
|
|
d19122c039 | ||
|
|
5692f7b8b6 | ||
|
|
21cea0f009 | ||
|
|
193d762132 | ||
|
|
227fbf7a2e | ||
|
|
25a397bcc5 | ||
|
|
b0dca80b87 | ||
|
|
ea475b528f | ||
|
|
2036e3c5b3 | ||
|
|
584d9f11e8 | ||
|
|
df020281f1 | ||
|
|
78c1b8869e | ||
|
|
87d5e8340b | ||
|
|
e6423d2f43 | ||
|
|
fac44a12b0 | ||
|
|
99ca7b1674 | ||
|
|
e066724a2f | ||
|
|
dce032de31 | ||
|
|
2f578b2bc4 | ||
|
|
0c1656e6ab | ||
|
|
2b6d8a70f4 | ||
|
|
1a308e9671 | ||
|
|
7b21e5634c | ||
|
|
e4548a285d | ||
|
|
72e926f04c | ||
|
|
d9fa14b17c | ||
|
|
33c5abaaf4 | ||
|
|
2dfd61fcc5 | ||
|
|
eb58e747ce | ||
|
|
1d221a2289 | ||
|
|
2ffd0458d0 | ||
|
|
25f533a31b | ||
|
|
570dbce181 | ||
|
|
ccb63e971b | ||
|
|
8be4bce8bc | ||
|
|
e945706d2b | ||
|
|
6c748a6ab2 | ||
|
|
6abc7ca7d2 | ||
|
|
c57e0e467c | ||
|
|
e46b4adad2 | ||
|
|
5ef9b5354a | ||
|
|
34ca7d54be | ||
|
|
cb316f1992 | ||
|
|
da05a6cf1f | ||
|
|
f06c31e225 | ||
|
|
b4e5596ca2 | ||
|
|
49a54ce099 | ||
|
|
0349fd9078 | ||
|
|
118ef2813a | ||
|
|
256f74b71a | ||
|
|
4a84453ca4 | ||
|
|
34316cb166 | ||
|
|
0f7d35cdca | ||
|
|
2ee8a6f008 | ||
|
|
848a6745c0 | ||
|
|
0cbbedd27b | ||
|
|
e951a5b5c3 | ||
|
|
68bf3ba4a2 | ||
|
|
5b4f8f03dc | ||
|
|
d7c2215cbc | ||
|
|
629e59d3f9 | ||
|
|
8f68bc219e | ||
|
|
ba296377de | ||
|
|
e34927a996 | ||
|
|
3c6a917550 | ||
|
|
dbae2acf27 | ||
|
|
722e8eeabf | ||
|
|
a6a26a9999 | ||
|
|
a6328d5aee | ||
|
|
4e76ebe7cf | ||
|
|
c0a26ffb57 | ||
|
|
7dfb10cb51 | ||
|
|
de33906db5 | ||
|
|
605337b280 | ||
|
|
235cd4929f | ||
|
|
220a02543e | ||
|
|
8ac47c2397 | ||
|
|
d384978322 | ||
|
|
f02a479834 | ||
|
|
b5e8b36173 | ||
|
|
08a39f4df7 | ||
|
|
61ec51beec | ||
|
|
9adbdcdcc8 | ||
|
|
e7b05f72ca | ||
|
|
75f2f363a4 | ||
|
|
cc1bb9ac1d | ||
|
|
d498d1f2c8 | ||
|
|
8c0635bb2a | ||
|
|
309dbeeb52 | ||
|
|
4cc87bf81e | ||
|
|
f34bb42dcb | ||
|
|
59ec99809a | ||
|
|
4b963f96d2 | ||
|
|
58db8f66de | ||
|
|
95623eba58 | ||
|
|
8dba0617bd | ||
|
|
289073be8e | ||
|
|
f3c8015366 | ||
|
|
99e8118458 | ||
|
|
80745cfd1c | ||
|
|
92a06bccaf | ||
|
|
fde9ddf4d9 | ||
|
|
03a56c9982 | ||
|
|
d07a0df0fd | ||
|
|
848397fe63 | ||
|
|
0f9246c5c6 | ||
|
|
2e7f887970 | ||
|
|
ef9df6b058 | ||
|
|
baae0f6d6e | ||
|
|
0f369b682d | ||
|
|
1f1e4de254 | ||
|
|
75ddc0a5ba | ||
|
|
e4cb168138 | ||
|
|
63aebba754 | ||
|
|
8cf1a43d59 | ||
|
|
bbc8813b61 | ||
|
|
5b51dbd30f | ||
|
|
295c7972e7 | ||
|
|
b034661c38 | ||
|
|
f12fd95ee1 | ||
|
|
bc33313fc7 | ||
|
|
affc7fcf89 | ||
|
|
b8f1593a2c | ||
|
|
7879f4e118 | ||
|
|
4ba611ae01 | ||
|
|
82ff6d9c64 | ||
|
|
f603ea6186 | ||
|
|
fcf6a4568b | ||
|
|
2ad6cc1b51 | ||
|
|
025f7d31f2 | ||
|
|
9fdb281e4a | ||
|
|
11e28bde2a | ||
|
|
1faa6f977c | ||
|
|
6866e7397f | ||
|
|
fa0b3a5340 | ||
|
|
16c808bce8 | ||
|
|
ec4b2d0770 | ||
|
|
3b610fdfd1 | ||
|
|
8b93c5eefa | ||
|
|
f4bb9eae8f | ||
|
|
ecb14197cf | ||
|
|
95fd58e25a | ||
|
|
afc333ab49 | ||
|
|
eb6406bca4 | ||
|
|
d486aa130d | ||
|
|
d66d5226a2 | ||
|
|
d86da70eeb | ||
|
|
aa0b4b63a9 | ||
|
|
5f479e46b4 | ||
|
|
1e55d5a9d8 | ||
|
|
077a95b5e7 | ||
|
|
4f1399cf66 | ||
|
|
9590b30e66 | ||
|
|
34f3ee4c3e | ||
|
|
7d655543f5 | ||
|
|
5de3ed0d5e | ||
|
|
74c3287cc0 | ||
|
|
3a7f8072a0 | ||
|
|
5fa91580eb | ||
|
|
d8fbb55438 | ||
|
|
99eb4fed74 | ||
|
|
6b78b841df | ||
|
|
dae852db69 | ||
|
|
0c0de2bcbc | ||
|
|
9f2d2f2194 | ||
|
|
c6e0a400a9 | ||
|
|
d01122d542 | ||
|
|
690d6b51b8 | ||
|
|
2437a8b14f | ||
|
|
afa601fffe | ||
|
|
874feff471 | ||
|
|
05c53bba9f | ||
|
|
578f1d8c4e | ||
|
|
7d8e18440a | ||
|
|
66e112dd52 | ||
|
|
02ac022ab3 | ||
|
|
33ce74754e | ||
|
|
74dd6b977c | ||
|
|
387652731a | ||
|
|
e4601fd879 | ||
|
|
6eb25f96d9 | ||
|
|
22b9eeb5cb | ||
|
|
0dbedcb2f5 | ||
|
|
7d9712c16e | ||
|
|
82663c0fc2 | ||
|
|
9fb4884470 | ||
|
|
cf69c42f67 | ||
|
|
be658b9026 | ||
|
|
c505941595 | ||
|
|
96f471d6b7 | ||
|
|
4238016767 | ||
|
|
a35a67718d | ||
|
|
c2981b107a | ||
|
|
851375499a | ||
|
|
d064ecd466 | ||
|
|
fda37656c9 | ||
|
|
49a9c6f28f | ||
|
|
050ac5e3a9 | ||
|
|
fe0d3132e4 | ||
|
|
d7f18e05be | ||
|
|
9bf4240411 | ||
|
|
76df9a0424 | ||
|
|
a91bc67a43 | ||
|
|
48180b85d1 | ||
|
|
77d3bf2fe1 | ||
|
|
d8ec9ee77a | ||
|
|
0b46b95f81 | ||
|
|
73e66c56e5 | ||
|
|
fd2a4d8dce | ||
|
|
2209ebdd0c | ||
|
|
ccfb641cc2 | ||
|
|
220d8ddf65 | ||
|
|
235bc8e09e | ||
|
|
41fd67577a | ||
|
|
8347b341f5 | ||
|
|
9d5599939e | ||
|
|
1fd6decf91 | ||
|
|
4ddc6aa852 | ||
|
|
ab89f6bc14 | ||
|
|
cb8c1cfe91 | ||
|
|
52679bf708 | ||
|
|
a21c0439e9 | ||
|
|
216ffb3777 | ||
|
|
d4c30d998d | ||
|
|
003f37c3d3 | ||
|
|
d64a07c04c | ||
|
|
82b156813b | ||
|
|
b71da8f5a4 | ||
|
|
5618bcbac1 | ||
|
|
24abc36df2 | ||
|
|
9ceea8324a | ||
|
|
29653c350f | ||
|
|
b03ef1c36b | ||
|
|
a2d5414691 | ||
|
|
135ce06452 | ||
|
|
12862fcd02 | ||
|
|
9ab5c8a907 | ||
|
|
9652261b67 | ||
|
|
3887e0a23c | ||
|
|
84733731fe | ||
|
|
49f7ba960f | ||
|
|
f2ee6f789f | ||
|
|
9df0224fbf | ||
|
|
59493e021c | ||
|
|
7b98c0b261 | ||
|
|
a39d025a04 | ||
|
|
908d7a24f1 | ||
|
|
0bf1e84da4 | ||
|
|
3d8da0af65 | ||
|
|
abf730cea5 | ||
|
|
0a45cd3739 | ||
|
|
af17525071 | ||
|
|
592ac4bec6 | ||
|
|
960c8aa905 | ||
|
|
c1d795e850 | ||
|
|
984ac257a5 | ||
|
|
9b970f07e5 | ||
|
|
d6a6fb4a91 | ||
|
|
fd747c9615 | ||
|
|
69dd1ca9ce | ||
|
|
2c914b43b0 | ||
|
|
0d18bc1daa | ||
|
|
626e59f22c | ||
|
|
8c614404d8 | ||
|
|
98df182110 | ||
|
|
c856b5e7fc | ||
|
|
c0e91273fd | ||
|
|
e50a00064a | ||
|
|
ebc02f9a22 | ||
|
|
f57087bf6c | ||
|
|
6c6ea66989 | ||
|
|
6cc25118b9 | ||
|
|
3aa5215587 | ||
|
|
eb34a945bc | ||
|
|
ccb29f8eeb | ||
|
|
bf377ae323 | ||
|
|
6df2224be5 | ||
|
|
9dbb719d52 | ||
|
|
2608a02d5c | ||
|
|
eb2c5b7494 | ||
|
|
a1bbfbf410 | ||
|
|
2485831c47 | ||
|
|
2e869e9219 | ||
|
|
c39db7f130 | ||
|
|
c58d24bd16 | ||
|
|
aef1261068 | ||
|
|
2570269e29 | ||
|
|
f3c937bf14 | ||
|
|
2fab1a0fae | ||
|
|
935e3ccc40 | ||
|
|
f5ededcdd7 | ||
|
|
237a409ee0 | ||
|
|
058b9f5313 | ||
|
|
fdcb816585 | ||
|
|
d22fef149b | ||
|
|
9e035e98ba | ||
|
|
c9138b4649 | ||
|
|
0caba9da68 | ||
|
|
b2f0de2db5 | ||
|
|
0b0e031091 | ||
|
|
4eeac738dc | ||
|
|
d9416363b3 | ||
|
|
5906140921 | ||
|
|
58932e585e | ||
|
|
b9f8ce0da2 | ||
|
|
67aa5e6bcd | ||
|
|
2a05529ceb | ||
|
|
a97aa88cc9 | ||
|
|
febc0793f2 | ||
|
|
141eb3afcd | ||
|
|
517888b1fa | ||
|
|
376b100ed9 | ||
|
|
07fbec47e1 | ||
|
|
7836527305 | ||
|
|
21b18b7a52 | ||
|
|
28b09144f2 | ||
|
|
1e13fede82 | ||
|
|
1bfb719f08 | ||
|
|
e5b63f7550 | ||
|
|
91328ac448 | ||
|
|
46dbacd475 | ||
|
|
187742c20a | ||
|
|
5eae956750 | ||
|
|
37e8f8a20e | ||
|
|
b3125f3231 | ||
|
|
46fed98d6a | ||
|
|
3b5938cf5c | ||
|
|
c7aeb03ea4 | ||
|
|
ab1bd16b5c | ||
|
|
0412ca5019 | ||
|
|
4d238c8dc8 | ||
|
|
3bcc510cfd | ||
|
|
0d9593e14c | ||
|
|
28dfe2f1b2 | ||
|
|
c8f7231eae | ||
|
|
00e9ecb765 | ||
|
|
2892bbeb98 | ||
|
|
28f57a8836 | ||
|
|
943b8b11d5 | ||
|
|
88915dd8d7 | ||
|
|
60b39e27f8 | ||
|
|
8af87befbd | ||
|
|
95e34bb07a | ||
|
|
106eb5c063 | ||
|
|
e7f88eeed9 | ||
|
|
d07f100452 | ||
|
|
02c212e4a6 | ||
|
|
8989843042 | ||
|
|
a217a30133 | ||
|
|
8514d7bae8 | ||
|
|
d9084c3332 | ||
|
|
0f35cc00d5 | ||
|
|
a6a7ce8fa3 | ||
|
|
269a1e163b | ||
|
|
eb2b6ee870 | ||
|
|
79a4f72558 | ||
|
|
6316369e1a | ||
|
|
1b0272f3b3 | ||
|
|
aedc2bf48c | ||
|
|
5d3fd69e35 | ||
|
|
ae464f2c06 | ||
|
|
7d303d2bca | ||
|
|
dda3759028 | ||
|
|
d4e1a7a070 | ||
|
|
d401842eef | ||
|
|
1e4060f49c | ||
|
|
a6c7f996ba | ||
|
|
8fb36892cf | ||
|
|
16c02d8b8c | ||
|
|
badd4dbc78 | ||
|
|
a63418aaac |
10
.coveragerc
10
.coveragerc
@@ -1,2 +1,12 @@
|
||||
[run]
|
||||
omit =
|
||||
concurrency = multiprocessing
|
||||
[report]
|
||||
exclude_lines =
|
||||
pragma: no cover
|
||||
if TYPE_CHECKING:
|
||||
if typing.TYPE_CHECKING:
|
||||
def __repr__
|
||||
raise AssertionError
|
||||
assert False
|
||||
^\s*pass\b
|
||||
|
||||
68
.github/workflows/bundle_windows.yml
vendored
Normal file
68
.github/workflows/bundle_windows.yml
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
name: Bundle Windows EXE
|
||||
|
||||
|
||||
on:
|
||||
# Only trigger on release creation
|
||||
release:
|
||||
types:
|
||||
- created
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref_name:
|
||||
description: Name to use for the release
|
||||
env:
|
||||
target_tag: ${{ github.ref_name || github.event.inputs.ref_name }}
|
||||
sha: ${{ github.sha || github.event.inputs.ref_name }}
|
||||
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: windows-2022
|
||||
permissions:
|
||||
contents: write
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Get history and tags for SCM versioning to work
|
||||
run: |
|
||||
git fetch --prune --unshallow
|
||||
git fetch --depth=1 origin +refs/tags/*:refs/tags/*
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
pip install -e .
|
||||
pip install cx_freeze
|
||||
|
||||
- name: Bundle with cx_Freeze
|
||||
shell: bash
|
||||
run: |
|
||||
python setup_cxfreeze.py build_exe
|
||||
pip install pip-licenses
|
||||
pip-licenses --format=plain-vertical --with-license-file --no-license-path --output-file=lib_licenses.txt
|
||||
python setup_cxfreeze.py finalize_cxfreeze
|
||||
# Should only be one, but we don't know what it's named
|
||||
mv ./dist/*.zip hippolyzer-windows-${{ env.target_tag }}.zip
|
||||
|
||||
- name: Upload the artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: hippolyzer-windows-${{ env.sha }}
|
||||
path: ./hippolyzer-windows-${{ env.target_tag }}.zip
|
||||
|
||||
- uses: ncipollo/release-action@v1.10.0
|
||||
if: github.event_name != 'workflow_dispatch'
|
||||
with:
|
||||
artifacts: hippolyzer-windows-${{ env.target_tag }}.zip
|
||||
tag: ${{ env.target_tag }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
allowUpdates: true
|
||||
16
.github/workflows/pypi_publish.yml
vendored
16
.github/workflows/pypi_publish.yml
vendored
@@ -6,6 +6,8 @@ on:
|
||||
release:
|
||||
types:
|
||||
- created
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
# based on https://github.com/pypa/gh-action-pypi-publish
|
||||
|
||||
@@ -14,18 +16,22 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Get history and tags for SCM versioning to work
|
||||
run: |
|
||||
git fetch --prune --unshallow
|
||||
git fetch --depth=1 origin +refs/tags/*:refs/tags/*
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools wheel
|
||||
python -m pip install --upgrade pip setuptools wheel build
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
- name: Build
|
||||
run: >-
|
||||
python setup.py sdist bdist_wheel
|
||||
python -m build
|
||||
# We do this, since failures on test.pypi aren't that bad
|
||||
- name: Publish to Test PyPI
|
||||
if: startsWith(github.event.ref, 'refs/tags') || github.event_name == 'release'
|
||||
@@ -34,6 +40,7 @@ jobs:
|
||||
user: __token__
|
||||
password: ${{ secrets.TEST_PYPI_API_TOKEN }}
|
||||
repository_url: https://test.pypi.org/legacy/
|
||||
attestations: false
|
||||
|
||||
- name: Publish to PyPI
|
||||
if: startsWith(github.event.ref, 'refs/tags') || github.event_name == 'release'
|
||||
@@ -41,3 +48,4 @@ jobs:
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
attestations: false
|
||||
|
||||
46
.github/workflows/pytest.yml
vendored
46
.github/workflows/pytest.yml
vendored
@@ -1,6 +1,12 @@
|
||||
name: Run Python Tests
|
||||
|
||||
on: [push]
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- '*.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- '*.md'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -8,20 +14,44 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8, 3.9]
|
||||
python-version: ["3.12", "3.13"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Get history and tags for SCM versioning to work
|
||||
run: |
|
||||
git fetch --prune --unshallow
|
||||
git fetch --depth=1 origin +refs/tags/*:refs/tags/*
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
- name: Test with pytest
|
||||
python -m pip install --upgrade pip wheel
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements-test.txt
|
||||
sudo apt-get install libopenjp2-7
|
||||
pip install -e .
|
||||
- name: Run Flake8
|
||||
run: |
|
||||
pytest
|
||||
flake8 .
|
||||
- name: Test with pytest
|
||||
# Tests are intentionally covered to detect broken tests.
|
||||
run: |
|
||||
pytest --cov=./hippolyzer --cov=./tests --cov-report=xml
|
||||
|
||||
# Keep this in a workflow without any other secrets in it.
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./coverage.xml
|
||||
directory: ./coverage/reports/
|
||||
flags: unittests
|
||||
env_vars: OS,PYTHON
|
||||
name: codecov-umbrella
|
||||
fail_ci_if_error: false
|
||||
path_to_write_report: ./coverage/codecov_report.txt
|
||||
verbose: false
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,6 +1,7 @@
|
||||
#use glob syntax
|
||||
syntax: glob
|
||||
|
||||
__pycache__
|
||||
*.pyc
|
||||
build/*
|
||||
*.egg-info
|
||||
|
||||
102
README.md
102
README.md
@@ -1,6 +1,8 @@
|
||||
# Hippolyzer
|
||||
|
||||
[Hippolyzer](http://wiki.secondlife.com/wiki/Hippo) is a fork of Linden Lab's abandoned
|
||||
 [](https://codecov.io/gh/SaladDais/Hippolyzer)
|
||||
|
||||
[Hippolyzer](http://wiki.secondlife.com/wiki/Hippo) is a revival of Linden Lab's
|
||||
[PyOGP library](http://wiki.secondlife.com/wiki/PyOGP)
|
||||
targeting modern Python 3, with a focus on debugging issues in Second Life-compatible
|
||||
servers and clients. There is a secondary focus on mocking up new features without requiring a
|
||||
@@ -22,7 +24,10 @@ with low-level SL details. See the [Local Animation addon example](https://githu
|
||||

|
||||
|
||||
## Setup
|
||||
* Python 3.8 or above is **required**. If you're unable to upgrade your system Python package due to
|
||||
|
||||
### From Source
|
||||
|
||||
* Python 3.12 or above is **required**. If you're unable to upgrade your system Python package due to
|
||||
being on a stable distro, you can use [pyenv](https://github.com/pyenv/pyenv) to create
|
||||
a self-contained Python install with the appropriate version.
|
||||
* [Create a clean Python 3 virtualenv](https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment)
|
||||
@@ -32,14 +37,18 @@ with low-level SL details. See the [Local Animation addon example](https://githu
|
||||
* * Under Windows it's `<virtualenv_dir>\Scripts\activate.bat`
|
||||
* Run `pip install hippolyzer`, or run `pip install -e .` in a cloned repo to install an editable version
|
||||
|
||||
### Binary Windows Builds
|
||||
|
||||
Binary Windows builds are available on the [Releases page](https://github.com/SaladDais/Hippolyzer/releases/).
|
||||
I don't extensively test these, building from source is recommended.
|
||||
|
||||
## Proxy
|
||||
|
||||
A proxy is provided with both a CLI and Qt-based interface. The proxy application wraps a
|
||||
custom SOCKS 5 UDP proxy, as well as an HTTP proxy based on [mitmproxy](https://mitmproxy.org/).
|
||||
|
||||
Multiple clients are supported at a time, and UDP messages may be injected in either
|
||||
direction. The proxy UI was inspired by the Message Log and Message Builder as present in
|
||||
the [Alchemy](https://github.com/AlchemyViewer/Alchemy) viewer.
|
||||
direction.
|
||||
|
||||
### Proxy Setup
|
||||
|
||||
@@ -52,15 +61,30 @@ the [Alchemy](https://github.com/AlchemyViewer/Alchemy) viewer.
|
||||
On Linux that would be `~/.firestorm_x64/` if you're using Firestorm.
|
||||
* * Certificate validation can be disabled entirely through viewer debug setting `NoVerifySSLCert`,
|
||||
but is not recommended.
|
||||
|
||||
#### Windows
|
||||
|
||||
Windows viewers have broken SOCKS 5 proxy support. To work around that, you need to use a wrapper EXE that
|
||||
can make the viewer to correctly talk to Hippolyzer. Follow the instructions on https://github.com/SaladDais/WinHippoAutoProxy
|
||||
to start the viewer and run it through Hippolyzer.
|
||||
|
||||
The proxy should _not_ be configured through the viewer's own preferences panel, it won't work correctly.
|
||||
|
||||
#### OS X & Linux
|
||||
|
||||
SOCKS 5 works correctly on these platforms, so you can just configure it through the
|
||||
`preferences -> network -> proxy settings` panel:
|
||||
|
||||
* Start the viewer and configure it to use `127.0.0.1:9061` as a SOCKS proxy and `127.0.0.1:9062` as
|
||||
an HTTP proxy. You **must** select the option in the viewer to use the HTTP proxy for all HTTP
|
||||
traffic, or logins will fail.
|
||||
* Optionally, If you want to reduce HTTP proxy lag you can have asset requests bypass the HTTP proxy by setting
|
||||
the `no_proxy` env var appropriately. For ex. `no_proxy="asset-cdn.glb.agni.lindenlab.com" ./firestorm` or
|
||||
`setx /m "no_proxy" "asset-cdn.glb.agni.lindenlab.com"` on Windows.
|
||||
the `no_proxy` env var appropriately. For ex. `no_proxy="asset-cdn.glb.agni.lindenlab.com" ./firestorm`.
|
||||
* Log in!
|
||||
|
||||

|
||||
Or, if you're on Linux, you can instead use [LinHippoAutoProxy](https://github.com/SaladDais/LinHippoAutoProxy)
|
||||
to launch your viewer, which will configure everything for you. Note that connections from the in-viewer browser will
|
||||
likely _not_ be run through Hippolyzer when using LinHippoAutoProxy.
|
||||
|
||||
### Filtering
|
||||
|
||||
@@ -85,11 +109,14 @@ agent's session, you can do `(Meta.AgentID == None || Meta.AgentID == "d929385f-
|
||||
Vectors can also be compared. This will get any ObjectUpdate variant that occurs within a certain range:
|
||||
`(*ObjectUpdate*.ObjectData.*Data.Position > (110, 50, 100) && *ObjectUpdate*.ObjectData.*Data.Position < (115, 55, 105))`
|
||||
|
||||
If you want to compare against an enum or a flag class in defined in `templates.py`, you can just specify its name:
|
||||
`ViewerEffect.Effect.Type == ViewerEffectType.EFFECT_BEAM`
|
||||
|
||||
### Logging
|
||||
|
||||
Decoded messages are displayed in the log pane, clicking one will show the request and
|
||||
response for HTTP messages, and a human-friendly form for UDP messages. Some messages and
|
||||
fields have [special packers defined](https://github.com/SaladDais/Hippolyzer/blob/master/hippolyzer/lib/proxy/templates.py)
|
||||
fields have [special packers defined](https://github.com/SaladDais/Hippolyzer/blob/master/hippolyzer/lib/base/templates.py)
|
||||
that will give a more human-readable form of enum or binary fields, with the original form beside or below it.
|
||||
|
||||
For example, an `AgentUpdate` message may show up in the log pane like:
|
||||
@@ -200,7 +227,7 @@ OUT ObjectAdd
|
||||
```
|
||||
|
||||
The repeat spinner at the bottom of the window lets you send a message multiple times.
|
||||
an `i` variable is put into the eval context and can be used to vary messages accros repeats.
|
||||
an `i` variable is put into the eval context and can be used to vary messages across repeats.
|
||||
With repeat set to two:
|
||||
|
||||
```
|
||||
@@ -287,14 +314,26 @@ If you are a viewer developer, please put them in a viewer.
|
||||
apply the mesh to the local mesh target. It works on attachments too. Useful for testing rigs before a
|
||||
final, real upload.
|
||||
|
||||
## REPL
|
||||
|
||||
A quick and dirty REPL is also included for when you want to do ad-hoc introspection of proxy state.
|
||||
It can be launched at any time by typing `/524 spawn_repl` in chat.
|
||||
|
||||

|
||||
|
||||
The REPL is fully async aware and allows awaiting events without blocking:
|
||||
|
||||
```python
|
||||
>>> from hippolyzer.lib.client.object_manager import ObjectUpdateType
|
||||
>>> evt = await session.objects.events.wait_for((ObjectUpdateType.UPDATE,), timeout=2.0)
|
||||
>>> evt.updated
|
||||
{'Position'}
|
||||
```
|
||||
|
||||
## Potential Changes
|
||||
|
||||
* Make package-able for PyPI
|
||||
* GitHub action to build binary packages and pull together licenses bundle
|
||||
* AISv3 wrapper?
|
||||
* Higher level wrappers for common things? I don't really need these, so only if people want to write them.
|
||||
* Highlight matched portion of message in log view, if applicable
|
||||
* * Remember deep filters and return a map of them, have message formatter return text ranges?
|
||||
* Move things out of `templates.py`, right now most binary serialization stuff lives there
|
||||
because it's more convenient for me to hot-reload.
|
||||
* Ability to add menus?
|
||||
@@ -303,10 +342,23 @@ If you are a viewer developer, please put them in a viewer.
|
||||
|
||||
[LGPLv3](https://www.gnu.org/licenses/lgpl-3.0.en.html). If you have a good reason why, I might dual license.
|
||||
|
||||
This package [includes portions of the Second Life(TM) Viewer Artwork](https://github.com/SaladDais/Hippolyzer/tree/master/hippolyzer/lib/proxy/data),
|
||||
This package [includes portions of the Second Life(TM) Viewer Artwork](https://github.com/SaladDais/Hippolyzer/tree/master/hippolyzer/lib/base/data),
|
||||
Copyright (C) 2008 Linden Research, Inc. The viewer artwork is licensed under the Creative Commons
|
||||
Attribution-Share Alike 3.0 License.
|
||||
|
||||
## Contributing
|
||||
|
||||
Ensure that any patches are clean with no unnecessary whitespace or formatting changes, and that you
|
||||
add new tests for any added functionality.
|
||||
|
||||
## Philosophy
|
||||
|
||||
With a few notable exceptions, Hippolyzer focuses mainly on decomposition of data, and doesn't
|
||||
provide many high-level abstractions for interpreting or manipulating that data. It's careful
|
||||
to only do lossless transforms on data that are just prettier representations of the data sent
|
||||
over the wire. Hippolyzer's goal is to help people understand how Second Life actually works,
|
||||
automatically employing abstractions that hide how SL works is counter to that goal.
|
||||
|
||||
## For Client Developers
|
||||
|
||||
This section is mostly useful if you're developing a new SL-compatible client from scratch. Clients based
|
||||
@@ -320,18 +372,20 @@ UDP proxy and an HTTP proxy.
|
||||
To have your client's traffic proxied through Hippolyzer the general flow is:
|
||||
|
||||
* Open a TCP connection to Hippolyzer's SOCKS 5 proxy port
|
||||
* * This should be done once per logical user session, as Hippolyzer assumes a 1:1 mapping of SOCKS
|
||||
* * This should be done once per logical user session, as Hippolyzer assumes a 1:1 mapping of SOCKS TCP
|
||||
connections to SL sessions
|
||||
* Send a UDP associate command without authentication
|
||||
* The proxy will respond with a host / port pair that UDP messages may be sent through
|
||||
* At this point you will no longer need to use the TCP connection, but it must be kept
|
||||
* At this point you will no longer need to use the TCP connection, but it must be kept
|
||||
alive until you want to break the UDP association
|
||||
* Whenever you send a UDP packet to a remote host, you'll need to instead send it to the host / port
|
||||
from the UDP associate response. A SOCKS 5 header must be prepended to the data indicating the ultimate destination
|
||||
of the packet
|
||||
* Any received UDP packets will also have a SOCKS 5 header indicating the real source IP and address
|
||||
* * When in doubt, check `socks_proxy.py`, `packets.py` and the SOCKS 5 RFC for more info on how to deal with SOCKS.
|
||||
* All HTTP requests must be sent through the Hippolyzer's HTTP proxy port.
|
||||
* * <https://github.com/SaladDais/WinHippoAutoProxy/blob/master/winhippoautoproxy/socks5udphooker.cpp> is a simple
|
||||
example that wraps around `recvfrom()` and `sendto()` and could be used as a starting point.
|
||||
* All HTTP requests must be sent through the Hippolyzer's HTTP proxy port.
|
||||
* * You may not need to do any extra plumbing to get this to work if your chosen HTTP client
|
||||
respects the `HTTP_PROXY` environment variable.
|
||||
* All HTTPS connections will be encrypted with the proxy's TLS key. You'll need to either add it to whatever
|
||||
@@ -340,11 +394,21 @@ To have your client's traffic proxied through Hippolyzer the general flow is:
|
||||
* The proxy needs to use content sniffing to figure out which requests are login requests,
|
||||
so make sure your request would pass `MITMProxyEventManager._is_login_request()`
|
||||
|
||||
#### Do I have to do all that?
|
||||
|
||||
You might be able to automate some of it on Linux by using
|
||||
[LinHippoAutoProxy](https://github.com/SaladDais/LinHippoAutoProxy). If you're on Windows or MacOS the
|
||||
above is your only option.
|
||||
|
||||
### Should I use this library to make an SL client in Python?
|
||||
|
||||
No. If you just want to write a client in Python, you should instead look at using
|
||||
Probably not. If you just want to write a client in Python, you should instead look at using
|
||||
[libremetaverse](https://github.com/cinderblocks/libremetaverse/) via pythonnet.
|
||||
I removed the client-related code inherited from PyOGP because libremetaverse's was simply better.
|
||||
I removed the client-related code inherited from PyOGP because libremetaverse's was simply better
|
||||
for general use.
|
||||
|
||||
<https://github.com/CasperTech/node-metaverse/> also looks like a good, modern wrapper if you
|
||||
prefer TypeScript.
|
||||
|
||||
There is, however, a very low-level `HippoClient` class provided for testing, but it's unlikely
|
||||
to be what you want for writing a general-purpose bot.
|
||||
|
||||
@@ -9,7 +9,7 @@ from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class PropertyHelloWorldAddon(BaseAddon):
|
||||
class AddonStateHelloWorldAddon(BaseAddon):
|
||||
# How to say hello, value shared across sessions and will be the same
|
||||
# regardless of which session is active when accessed.
|
||||
# "hello_greeting" is added to session_manager.addon_ctx's dict and will survive reloads
|
||||
@@ -28,7 +28,11 @@ class PropertyHelloWorldAddon(BaseAddon):
|
||||
# Shared across sessions and will die if the addon is reloaded
|
||||
self.hello_punctuation = "!"
|
||||
|
||||
@handle_command(greeting=Parameter(str, sep=None))
|
||||
@handle_command(
|
||||
# Use the longer-form `Parameter()` for declaring this because
|
||||
# this field should be greedy and take the rest of the message (no separator.)
|
||||
greeting=Parameter(str, sep=None),
|
||||
)
|
||||
async def set_hello_greeting(self, _session: Session, _region: ProxiedRegion, greeting: str):
|
||||
"""Set the person to say hello to"""
|
||||
self.hello_greeting = greeting
|
||||
@@ -38,7 +42,10 @@ class PropertyHelloWorldAddon(BaseAddon):
|
||||
"""Set the person to say hello to"""
|
||||
self.hello_person = person
|
||||
|
||||
@handle_command(punctuation=Parameter(str, sep=None))
|
||||
@handle_command(
|
||||
# Punctuation should have no whitespace, so using a simple parameter is OK.
|
||||
punctuation=str,
|
||||
)
|
||||
async def set_hello_punctuation(self, _session: Session, _region: ProxiedRegion, punctuation: str):
|
||||
"""Set the punctuation to use for saying hello"""
|
||||
self.hello_punctuation = punctuation
|
||||
@@ -47,8 +54,8 @@ class PropertyHelloWorldAddon(BaseAddon):
|
||||
async def say_hello(self, _session: Session, _region: ProxiedRegion):
|
||||
"""Say hello using the configured hello variables"""
|
||||
# These aren't instance properties, they can be accessed via the class as well.
|
||||
hello_person = PropertyHelloWorldAddon.hello_person
|
||||
hello_person = AddonStateHelloWorldAddon.hello_person
|
||||
send_chat(f"{self.hello_greeting} {hello_person}{self.hello_punctuation}")
|
||||
|
||||
|
||||
addons = [PropertyHelloWorldAddon()]
|
||||
addons = [AddonStateHelloWorldAddon()]
|
||||
32
addon_examples/anim_mangler.py
Normal file
32
addon_examples/anim_mangler.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""
|
||||
Example anim mangler addon, to be used with local anim addon.
|
||||
|
||||
You can edit this live to apply various transforms to local anims,
|
||||
as well as any uploaded anims. Any changes will be reflected in currently
|
||||
playing local anims.
|
||||
|
||||
This example modifies any position keys of an animation's mHipRight joint.
|
||||
"""
|
||||
from hippolyzer.lib.base.llanim import Animation
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
|
||||
import local_anim
|
||||
AddonManager.hot_reload(local_anim, require_addons_loaded=True)
|
||||
|
||||
|
||||
def offset_right_hip(anim: Animation):
|
||||
hip_joint = anim.joints.get("mHipRight")
|
||||
if hip_joint:
|
||||
for pos_frame in hip_joint.pos_keyframes:
|
||||
pos_frame.pos.Z *= 2.5
|
||||
pos_frame.pos.X *= 5.0
|
||||
return anim
|
||||
|
||||
|
||||
class ExampleAnimManglerAddon(local_anim.BaseAnimManglerAddon):
|
||||
ANIM_MANGLERS = [
|
||||
offset_right_hip,
|
||||
]
|
||||
|
||||
|
||||
addons = [ExampleAnimManglerAddon()]
|
||||
125
addon_examples/anim_tracker.py
Normal file
125
addon_examples/anim_tracker.py
Normal file
@@ -0,0 +1,125 @@
|
||||
"""
|
||||
Debugger for detecting when animations within an object get started or stopped
|
||||
|
||||
Useful for tracking down animation sequence-related bugs within your LSL scripts,
|
||||
or debugging automatic animation stopping behavior in the viewer.
|
||||
|
||||
If an animation unexpectedly stops and nobody requested it be stopped, it's a potential viewer bug (or priority issue).
|
||||
If an animation unexpectedly stops and the viewer requested it be stopped, it's also a potential viewer bug.
|
||||
If an animation unexpectedly stops and only the server requested it be stopped, it's a potential script / server bug.
|
||||
"""
|
||||
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.base.templates import AssetType
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.addon_utils import show_message
|
||||
|
||||
|
||||
class AnimTrackerAddon(BaseAddon):
|
||||
should_track_anims: bool = SessionProperty(False)
|
||||
anims_lookup: Dict[UUID, str] = SessionProperty(dict)
|
||||
last_tracker_anims: Set[UUID] = SessionProperty(set)
|
||||
|
||||
def _format_anim_diffs(self, started_anims: Set[UUID], stopped_anims: Set[UUID]):
|
||||
added_strs = [f"+{self.anims_lookup[x]!r}" for x in started_anims]
|
||||
removed_strs = [f"-{self.anims_lookup[x]!r}" for x in stopped_anims]
|
||||
|
||||
return ", ".join(removed_strs + added_strs)
|
||||
|
||||
@handle_command()
|
||||
async def track_anims(self, session: Session, region: ProxiedRegion):
|
||||
"""Track when animations within this object get started or stopped"""
|
||||
if self.should_track_anims:
|
||||
self.last_tracker_anims.clear()
|
||||
self.anims_lookup.clear()
|
||||
|
||||
selected = region.objects.lookup_localid(session.selected.object_local)
|
||||
if not selected:
|
||||
return
|
||||
|
||||
self.should_track_anims = True
|
||||
|
||||
object_items = await region.objects.request_object_inv(selected)
|
||||
|
||||
anims: Dict[UUID, str] = {}
|
||||
for item in object_items:
|
||||
if item.type != AssetType.ANIMATION:
|
||||
continue
|
||||
anims[item.true_asset_id] = item.name
|
||||
|
||||
self.anims_lookup = anims
|
||||
|
||||
@handle_command()
|
||||
async def stop_tracking_anims(self, _session: Session, _region: ProxiedRegion):
|
||||
"""Stop reporting differences"""
|
||||
if self.should_track_anims:
|
||||
self.should_track_anims = False
|
||||
self.last_tracker_anims.clear()
|
||||
self.anims_lookup.clear()
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if not self.should_track_anims:
|
||||
return
|
||||
|
||||
if message.name != "AgentAnimation" or message.direction != Direction.OUT:
|
||||
# AgentAnimation is the message the viewer uses to request manually starting or stopping animations.
|
||||
# We don't care about other messages, we're just interested in distinguishing cases where the viewer
|
||||
# specifically requested something vs something being done by the server on its own.
|
||||
return
|
||||
av = session.objects.lookup_avatar(session.agent_id)
|
||||
if not av or not av.Object:
|
||||
print("Somehow didn't know about our own av object?")
|
||||
return
|
||||
|
||||
current_anims = set([x for x in av.Object.Animations if x in self.anims_lookup])
|
||||
started_anims: Set[UUID] = set()
|
||||
stopped_anims: Set[UUID] = set()
|
||||
|
||||
for block in message["AnimationList"]:
|
||||
anim_id = block["AnimID"]
|
||||
if anim_id not in self.anims_lookup:
|
||||
continue
|
||||
|
||||
start_anim = block["StartAnim"]
|
||||
already_started = anim_id in current_anims
|
||||
if start_anim == already_started:
|
||||
# No change
|
||||
continue
|
||||
|
||||
if start_anim:
|
||||
started_anims.add(anim_id)
|
||||
else:
|
||||
stopped_anims.add(anim_id)
|
||||
|
||||
if started_anims or stopped_anims:
|
||||
show_message("Viewer Requested Anims: " + self._format_anim_diffs(started_anims, stopped_anims))
|
||||
|
||||
def handle_object_updated(self, session: Session, region: ProxiedRegion,
|
||||
obj: Object, updated_props: Set[str], msg: Optional[Message]):
|
||||
if not self.should_track_anims:
|
||||
return
|
||||
if obj.FullID != session.agent_id:
|
||||
return
|
||||
if "Animations" not in updated_props:
|
||||
return
|
||||
|
||||
current_anims = set([x for x in obj.Animations if x in self.anims_lookup])
|
||||
started_anims = current_anims - self.last_tracker_anims
|
||||
stopped_anims = self.last_tracker_anims - current_anims
|
||||
|
||||
self.last_tracker_anims.clear()
|
||||
self.last_tracker_anims.update(current_anims)
|
||||
|
||||
if started_anims or stopped_anims:
|
||||
show_message("Anim Diffs: " + self._format_anim_diffs(started_anims, stopped_anims))
|
||||
|
||||
|
||||
addons = [AnimTrackerAddon()]
|
||||
@@ -4,11 +4,11 @@ All buttons make you go backwards.
|
||||
Except for backward, which makes you go left.
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.templates import AgentControlFlags
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import AgentControlFlags
|
||||
|
||||
|
||||
NUDGE_MASK = sum(x for x in AgentControlFlags if "NUDGE" in x.name)
|
||||
@@ -19,7 +19,7 @@ BACK_MASK = (AgentControlFlags.AT_NEG | AgentControlFlags.NUDGE_AT_NEG)
|
||||
|
||||
|
||||
class BackwardsAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.name == "AgentUpdate":
|
||||
agent_data_block = message["AgentData"][0]
|
||||
flags: AgentControlFlags = agent_data_block.deserialize_var("ControlFlags")
|
||||
|
||||
@@ -11,7 +11,7 @@ import secrets
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
@@ -41,7 +41,7 @@ class BezosifyAddon(BaseAddon):
|
||||
# random value to XOR all CRCs with
|
||||
self.bezos_crc_xor = secrets.randbits(32)
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.name == "ObjectUpdateCached":
|
||||
for block in message["ObjectData"]:
|
||||
# Cached only really has a CRC, this will force the cache miss.
|
||||
|
||||
@@ -11,21 +11,20 @@ import enum
|
||||
import os.path
|
||||
from typing import *
|
||||
|
||||
from PySide2 import QtCore, QtGui, QtWidgets
|
||||
from PySide6 import QtCore, QtGui, QtWidgets
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.base.ui_helpers import loadUi
|
||||
from hippolyzer.lib.base.templates import PCode
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.task_scheduler import TaskLifeScope
|
||||
from hippolyzer.lib.proxy.templates import PCode
|
||||
|
||||
|
||||
def _is_color_blueish(color: bytes) -> bool:
|
||||
@@ -81,7 +80,7 @@ class BlueishObjectListGUIAddon(BaseAddon):
|
||||
raise
|
||||
|
||||
def _highlight_object(self, session: Session, obj: Object):
|
||||
session.main_region.circuit.send_message(ProxiedMessage(
|
||||
session.main_region.circuit.send(Message(
|
||||
"ForceObjectSelect",
|
||||
Block("Header", ResetList=False),
|
||||
Block("Data", LocalID=obj.LocalID),
|
||||
@@ -89,7 +88,7 @@ class BlueishObjectListGUIAddon(BaseAddon):
|
||||
))
|
||||
|
||||
def _teleport_to_object(self, session: Session, obj: Object):
|
||||
session.main_region.circuit.send_message(ProxiedMessage(
|
||||
session.main_region.circuit.send(Message(
|
||||
"TeleportLocationRequest",
|
||||
Block("AgentData", AgentID=session.agent_id, SessionID=session.id),
|
||||
Block(
|
||||
@@ -115,7 +114,7 @@ class BlueishObjectListGUIAddon(BaseAddon):
|
||||
region.objects.request_missing_objects()
|
||||
|
||||
def handle_object_updated(self, session: Session, region: ProxiedRegion,
|
||||
obj: Object, updated_props: Set[str]):
|
||||
obj: Object, updated_props: Set[str], msg: Optional[Message]):
|
||||
if self.blueish_model is None:
|
||||
return
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
def handle_lludp_message(session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(session: Session, region: ProxiedRegion, message: Message):
|
||||
# addon_ctx will persist across addon reloads, use for storing data that
|
||||
# needs to survive across calls to this function
|
||||
ctx = session.addon_ctx
|
||||
ctx = session.addon_ctx[__name__]
|
||||
if message.name == "ChatFromViewer":
|
||||
chat = message["ChatData"]["Message"]
|
||||
if chat == "COUNT":
|
||||
|
||||
44
addon_examples/create_shape.py
Normal file
44
addon_examples/create_shape.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""
|
||||
Demonstrates item creation as well as bodypart / clothing upload
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.templates import WearableType, Permissions
|
||||
from hippolyzer.lib.base.wearables import Wearable, VISUAL_PARAMS
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class ShapeCreatorAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def create_shape(self, session: Session, region: ProxiedRegion):
|
||||
"""Make a shape with pre-set parameters and place it in the body parts folder"""
|
||||
|
||||
wearable = Wearable.make_default(WearableType.SHAPE)
|
||||
# Max out the jaw jut param
|
||||
jaw_param = VISUAL_PARAMS.by_name("Jaw Jut")
|
||||
wearable.parameters[jaw_param.id] = jaw_param.value_max
|
||||
wearable.name = "Cool Shape"
|
||||
|
||||
# A unique transaction ID is needed to tie the item creation to the following asset upload.
|
||||
transaction_id = UUID.random()
|
||||
item = await session.inventory.create_item(
|
||||
UUID.ZERO, # This will place it in the default folder for the type
|
||||
name=wearable.name,
|
||||
type=wearable.wearable_type.asset_type,
|
||||
inv_type=wearable.wearable_type.asset_type.inventory_type,
|
||||
wearable_type=wearable.wearable_type,
|
||||
next_mask=Permissions.MOVE | Permissions.MODIFY | Permissions.COPY | Permissions.TRANSFER,
|
||||
transaction_id=transaction_id,
|
||||
)
|
||||
print(f"Created {item!r}")
|
||||
await region.xfer_manager.upload_asset(
|
||||
wearable.wearable_type.asset_type,
|
||||
wearable.to_str(),
|
||||
transaction_id=transaction_id,
|
||||
)
|
||||
|
||||
|
||||
addons = [ShapeCreatorAddon()]
|
||||
@@ -10,13 +10,13 @@ message with a greeting.
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class CustomMetaExampleAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if not message.name.startswith("ChatFrom"):
|
||||
return
|
||||
|
||||
|
||||
@@ -4,8 +4,13 @@ Helper for making deformer anims. This could have a GUI I guess.
|
||||
import dataclasses
|
||||
from typing import *
|
||||
|
||||
import numpy as np
|
||||
import transformations
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3, Quaternion, UUID
|
||||
from hippolyzer.lib.base.llanim import Joint, Animation, PosKeyframe, RotKeyframe
|
||||
from hippolyzer.lib.base.mesh import MeshAsset, SegmentHeaderDict, SkinSegmentDict, LLMeshSerializer
|
||||
from hippolyzer.lib.base.serialization import BufferWriter
|
||||
from hippolyzer.lib.proxy.addon_utils import show_message, BaseAddon, SessionProperty
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.commands import handle_command, Parameter
|
||||
@@ -45,6 +50,58 @@ def build_deformer(joints: Dict[str, DeformerJoint]) -> bytes:
|
||||
return anim.to_bytes()
|
||||
|
||||
|
||||
def build_mesh_deformer(joints: Dict[str, DeformerJoint]) -> bytes:
|
||||
skin_seg = SkinSegmentDict(
|
||||
joint_names=[],
|
||||
bind_shape_matrix=identity_mat4(),
|
||||
inverse_bind_matrix=[],
|
||||
alt_inverse_bind_matrix=[],
|
||||
pelvis_offset=0.0,
|
||||
lock_scale_if_joint_position=False
|
||||
)
|
||||
for joint_name, joint in joints.items():
|
||||
# We can only represent joint translations, ignore this joint if it doesn't have any.
|
||||
if not joint.pos:
|
||||
continue
|
||||
skin_seg['joint_names'].append(joint_name)
|
||||
# Inverse bind matrix isn't actually used, so we can just give it a placeholder value of the
|
||||
# identity mat4. This might break things in weird ways because the matrix isn't actually sensible.
|
||||
skin_seg['inverse_bind_matrix'].append(identity_mat4())
|
||||
# Create a flattened mat4 that only has a translation component of our joint pos
|
||||
# The viewer ignores any other component of these matrices so no point putting shear
|
||||
# or perspective or whatever :)
|
||||
joint_mat4 = pos_to_mat4(joint.pos)
|
||||
# Ask the viewer to override this joint's usual parent-relative position with our matrix
|
||||
skin_seg['alt_inverse_bind_matrix'].append(joint_mat4)
|
||||
|
||||
# Make a dummy mesh and shove our skin segment onto it. None of the tris are rigged, so the
|
||||
# viewer will freak out and refuse to display the tri, only the joint translations will be used.
|
||||
# Supposedly a mesh with a `skin` segment but no weights on the material should just result in an
|
||||
# effectively unrigged material, but that's not the case. Oh well.
|
||||
mesh = MeshAsset.make_triangle()
|
||||
mesh.header['skin'] = SegmentHeaderDict(offset=0, size=0)
|
||||
mesh.segments['skin'] = skin_seg
|
||||
|
||||
writer = BufferWriter("!")
|
||||
writer.write(LLMeshSerializer(), mesh)
|
||||
return writer.copy_buffer()
|
||||
|
||||
|
||||
def identity_mat4() -> List[float]:
|
||||
"""
|
||||
Return an "Identity" mat4
|
||||
|
||||
Effectively represents a transform of no rot, no translation, no shear, no perspective
|
||||
and scaling by 1.0 on every axis.
|
||||
"""
|
||||
return list(np.identity(4).flatten('F'))
|
||||
|
||||
|
||||
def pos_to_mat4(pos: Vector3) -> List[float]:
|
||||
"""Convert a position Vector3 to a Translation Mat4"""
|
||||
return list(transformations.compose_matrix(translate=tuple(pos)).flatten('F'))
|
||||
|
||||
|
||||
class DeformerAddon(BaseAddon):
|
||||
deform_joints: Dict[str, DeformerJoint] = SessionProperty(dict)
|
||||
|
||||
@@ -95,7 +152,7 @@ class DeformerAddon(BaseAddon):
|
||||
local_anim.LocalAnimAddon.apply_local_anim(session, region, "deformer_addon", anim_data)
|
||||
|
||||
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
|
||||
cmd: str, options: List[str], param: str):
|
||||
behaviour: str, options: List[str], param: str):
|
||||
# An object in-world can also tell the client how to deform itself via
|
||||
# RLV-style commands.
|
||||
|
||||
@@ -103,9 +160,9 @@ class DeformerAddon(BaseAddon):
|
||||
if param != "force":
|
||||
return
|
||||
|
||||
if cmd == "stop_deforming":
|
||||
if behaviour == "stop_deforming":
|
||||
self.deform_joints.clear()
|
||||
elif cmd == "deform_joints":
|
||||
elif behaviour == "deform_joints":
|
||||
self.deform_joints.clear()
|
||||
for joint_data in options:
|
||||
joint_split = joint_data.split("|")
|
||||
@@ -118,5 +175,41 @@ class DeformerAddon(BaseAddon):
|
||||
self._reapply_deformer(session, region)
|
||||
return True
|
||||
|
||||
@handle_command()
|
||||
async def save_deformer_as_mesh(self, _session: Session, _region: ProxiedRegion):
|
||||
"""
|
||||
Export the deformer as a crafted rigged mesh rather than an animation
|
||||
|
||||
Mesh deformers have the advantage that they don't cause your joints to "stick"
|
||||
like animations do when using animations with pos keyframes.
|
||||
"""
|
||||
filename = await AddonManager.UI.save_file(filter_str="LL Mesh (*.llmesh)")
|
||||
if not filename:
|
||||
return
|
||||
with open(filename, "wb") as f:
|
||||
f.write(build_mesh_deformer(self.deform_joints))
|
||||
|
||||
@handle_command()
|
||||
async def upload_deformer_as_mesh(self, _session: Session, region: ProxiedRegion):
|
||||
"""Same as save_deformer_as_mesh, but uploads the mesh directly to SL."""
|
||||
|
||||
mesh_bytes = build_mesh_deformer(self.deform_joints)
|
||||
try:
|
||||
# Send off mesh to calculate upload cost
|
||||
upload_token = await region.asset_uploader.initiate_mesh_upload("deformer", mesh_bytes)
|
||||
except Exception as e:
|
||||
show_message(e)
|
||||
raise
|
||||
|
||||
if not await AddonManager.UI.confirm("Upload", f"Spend {upload_token.linden_cost}L on upload?"):
|
||||
return
|
||||
|
||||
# Do the actual upload
|
||||
try:
|
||||
await region.asset_uploader.complete_upload(upload_token)
|
||||
except Exception as e:
|
||||
show_message(e)
|
||||
raise
|
||||
|
||||
|
||||
addons = [DeformerAddon()]
|
||||
|
||||
158
addon_examples/demo_autoattacher.py
Normal file
158
addon_examples/demo_autoattacher.py
Normal file
@@ -0,0 +1,158 @@
|
||||
"""
|
||||
Detect receipt of a marketplace order for a demo, and auto-attach the most appropriate object
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import re
|
||||
from typing import List, Tuple, Dict, Optional, Sequence
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.templates import InventoryType, Permissions, FolderType
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
MARKETPLACE_TRANSACTION_ID = UUID('ffffffff-ffff-ffff-ffff-ffffffffffff')
|
||||
|
||||
|
||||
class DemoAutoAttacher(BaseAddon):
|
||||
def handle_eq_event(self, session: Session, region: ProxiedRegion, event: dict):
|
||||
if event["message"] != "BulkUpdateInventory":
|
||||
return
|
||||
# Check that this update even possibly came from the marketplace
|
||||
if event["body"]["AgentData"][0]["TransactionID"] != MARKETPLACE_TRANSACTION_ID:
|
||||
return
|
||||
# Make sure that the transaction targeted our real received items folder
|
||||
folders = event["body"]["FolderData"]
|
||||
received_folder = folders[0]
|
||||
if received_folder["Name"] != "Received Items":
|
||||
return
|
||||
skel = session.login_data['inventory-skeleton']
|
||||
actual_received = [x for x in skel if x['type_default'] == FolderType.INBOX]
|
||||
assert actual_received
|
||||
if UUID(actual_received[0]['folder_id']) != received_folder["FolderID"]:
|
||||
show_message(f"Strange received folder ID spoofing? {folders!r}")
|
||||
return
|
||||
|
||||
if not re.match(r".*\bdemo\b.*", folders[1]["Name"], flags=re.I):
|
||||
return
|
||||
# Alright, so we have a demo... thing from the marketplace. What now?
|
||||
items = event["body"]["ItemData"]
|
||||
object_items = [x for x in items if x["InvType"] == InventoryType.OBJECT]
|
||||
if not object_items:
|
||||
return
|
||||
self._schedule_task(self._attach_best_object(session, region, object_items))
|
||||
|
||||
async def _attach_best_object(self, session: Session, region: ProxiedRegion, object_items: List[Dict]):
|
||||
own_body_type = await self._guess_own_body(session, region)
|
||||
show_message(f"Trying to find demo for {own_body_type}")
|
||||
guess_patterns = self.BODY_CLOTHING_PATTERNS.get(own_body_type)
|
||||
to_attach = []
|
||||
if own_body_type and guess_patterns:
|
||||
matching_items = self._get_matching_items(object_items, guess_patterns)
|
||||
if matching_items:
|
||||
# Only take the first one
|
||||
to_attach.append(matching_items[0])
|
||||
if not to_attach:
|
||||
# Don't know what body's being used or couldn't figure out what item
|
||||
# would work best with our body. Just attach the first object in the folder.
|
||||
to_attach.append(object_items[0])
|
||||
|
||||
# Also attach whatever HUDs, maybe we need them.
|
||||
for hud in self._get_matching_items(object_items, ("hud",)):
|
||||
if hud not in to_attach:
|
||||
to_attach.append(hud)
|
||||
|
||||
region.circuit.send(Message(
|
||||
'RezMultipleAttachmentsFromInv',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block('HeaderData', CompoundMsgID=UUID.random(), TotalObjects=len(to_attach), FirstDetachAll=0),
|
||||
*[Block(
|
||||
'ObjectData',
|
||||
ItemID=o["ItemID"],
|
||||
OwnerID=session.agent_id,
|
||||
# 128 = "add", uses whatever attachmentpt was defined on the object
|
||||
AttachmentPt=128,
|
||||
ItemFlags_=(),
|
||||
GroupMask_=(),
|
||||
EveryoneMask_=(),
|
||||
NextOwnerMask_=(Permissions.COPY | Permissions.MOVE),
|
||||
Name=o["Name"],
|
||||
Description=o["Description"],
|
||||
) for o in to_attach]
|
||||
))
|
||||
|
||||
def _get_matching_items(self, items: List[dict], patterns: Sequence[str]):
|
||||
# Loop over patterns to search for our body type, in order of preference
|
||||
matched = []
|
||||
for guess_pattern in patterns:
|
||||
# Check each item for that pattern
|
||||
for item in items:
|
||||
if re.match(rf".*\b{guess_pattern}\b.*", item["Name"], re.I):
|
||||
matched.append(item)
|
||||
return matched
|
||||
|
||||
# We scan the agent's attached objects to guess what kind of body they use
|
||||
BODY_PREFIXES = {
|
||||
"-Belleza- Jake ": "jake",
|
||||
"-Belleza- Freya ": "freya",
|
||||
"-Belleza- Isis ": "isis",
|
||||
"-Belleza- Venus ": "venus",
|
||||
"[Signature] Gianni Body": "gianni",
|
||||
"[Signature] Geralt Body": "geralt",
|
||||
"Maitreya Mesh Body - Lara": "maitreya",
|
||||
"Slink Physique Hourglass Petite": "hg_petite",
|
||||
"Slink Physique Mesh Body Hourglass": "hourglass",
|
||||
"Slink Physique Original Petite": "phys_petite",
|
||||
"Slink Physique Mesh Body Original": "physique",
|
||||
"[BODY] Legacy (f)": "legacy_f",
|
||||
"[BODY] Legacy (m)": "legacy_m",
|
||||
"[Signature] Alice Body": "sig_alice",
|
||||
"Slink Physique MALE Mesh Body": "slink_male",
|
||||
"AESTHETIC - [Mesh Body]": "aesthetic",
|
||||
}
|
||||
|
||||
# Different bodies' clothes have different naming conventions according to different merchants.
|
||||
# These are common naming patterns we use to choose objects to attach, in order of preference.
|
||||
BODY_CLOTHING_PATTERNS: Dict[str, Tuple[str, ...]] = {
|
||||
"jake": ("jake", "belleza"),
|
||||
"freya": ("freya", "belleza"),
|
||||
"isis": ("isis", "belleza"),
|
||||
"venus": ("venus", "belleza"),
|
||||
"gianni": ("gianni", "signature", "sig"),
|
||||
"geralt": ("geralt", "signature", "sig"),
|
||||
"hg_petite": ("hourglass petite", "hg petite", "hourglass", "hg", "slink"),
|
||||
"hourglass": ("hourglass", "hg", "slink"),
|
||||
"phys_petite": ("physique petite", "phys petite", "physique", "phys", "slink"),
|
||||
"physique": ("physique", "phys", "slink"),
|
||||
"legacy_f": ("legacy",),
|
||||
"legacy_m": ("legacy",),
|
||||
"sig_alice": ("alice", "signature"),
|
||||
"slink_male": ("physique", "slink"),
|
||||
"aesthetic": ("aesthetic",),
|
||||
}
|
||||
|
||||
async def _guess_own_body(self, session: Session, region: ProxiedRegion) -> Optional[str]:
|
||||
agent_obj = region.objects.lookup_fullid(session.agent_id)
|
||||
if not agent_obj:
|
||||
return None
|
||||
# We probably won't know the names for all of our attachments, request them.
|
||||
# Could be obviated by looking at the COF, not worth it for this.
|
||||
try:
|
||||
await asyncio.wait(region.objects.request_object_properties(agent_obj.Children), timeout=0.5)
|
||||
except asyncio.TimeoutError:
|
||||
# We expect that we just won't ever receive some property requests, that's fine
|
||||
pass
|
||||
|
||||
for prefix, body_type in self.BODY_PREFIXES.items():
|
||||
for obj in agent_obj.Children:
|
||||
if not obj.Name:
|
||||
continue
|
||||
if obj.Name.startswith(prefix):
|
||||
return body_type
|
||||
return None
|
||||
|
||||
|
||||
addons = [DemoAutoAttacher()]
|
||||
@@ -16,8 +16,8 @@ import random
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketLayout
|
||||
from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
@@ -28,7 +28,7 @@ class PacketMutationAddon(BaseAddon):
|
||||
def __init__(self):
|
||||
self.serializer = UDPMessageSerializer()
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
# Only inbound messages, don't fiddle with the sim.
|
||||
if message.direction != Direction.IN:
|
||||
return
|
||||
|
||||
119
addon_examples/get_task_inventory_cap.py
Normal file
119
addon_examples/get_task_inventory_cap.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""
|
||||
Loading task inventory doesn't actually need to be slow.
|
||||
|
||||
By using a cap instead of the slow xfer path and sending the LLSD inventory
|
||||
model we get 15x speedups even when mocking things behind the scenes by using
|
||||
a hacked up version of xfer. See turbo_object_inventory.py
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
|
||||
import asgiref.wsgi
|
||||
from typing import *
|
||||
|
||||
from flask import Flask, Response, request
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.inventory import InventoryModel, InventoryObject
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.templates import XferFilePath, AssetType
|
||||
from hippolyzer.lib.proxy import addon_ctx
|
||||
from hippolyzer.lib.proxy.webapp_cap_addon import WebAppCapAddon
|
||||
|
||||
app = Flask("GetTaskInventoryCapApp")
|
||||
|
||||
|
||||
@app.route('/', methods=["GET"])
|
||||
async def get_task_inventory():
|
||||
# Should always have the current region, the cap handler is bound to one.
|
||||
# Just need to pull it from the `addon_ctx` module's global.
|
||||
region = addon_ctx.region.get()
|
||||
session = addon_ctx.session.get()
|
||||
obj_id = UUID(request.args["task_id"])
|
||||
obj = region.objects.lookup_fullid(obj_id)
|
||||
if not obj:
|
||||
return Response(f"Couldn't find {obj_id}", status=404, mimetype="text/plain")
|
||||
request_msg = Message(
|
||||
'RequestTaskInventory',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block('InventoryData', LocalID=obj.LocalID),
|
||||
)
|
||||
# Keep around a dict of chunks we saw previously in case we have to restart
|
||||
# an Xfer due to missing chunks. We don't expect chunks to change across Xfers
|
||||
# so this can be used to recover from dropped SendXferPackets in subsequent attempts
|
||||
existing_chunks: Dict[int, bytes] = {}
|
||||
for _ in range(3):
|
||||
# Any previous requests will have triggered a delete of the inventory file
|
||||
# by marking it complete on the server-side. Re-send our RequestTaskInventory
|
||||
# To make sure there's a fresh copy.
|
||||
region.circuit.send(request_msg.take())
|
||||
inv_message = await region.message_handler.wait_for(
|
||||
('ReplyTaskInventory',),
|
||||
predicate=lambda x: x["InventoryData"]["TaskID"] == obj.FullID,
|
||||
timeout=5.0,
|
||||
)
|
||||
# No task inventory, send the reply as-is
|
||||
file_name = inv_message["InventoryData"]["Filename"]
|
||||
if not file_name:
|
||||
# The "Contents" folder always has to be there, if we don't put it here
|
||||
# then the viewer will have to lie about it being there itself.
|
||||
return Response(
|
||||
llsd.format_xml({
|
||||
"inventory": [
|
||||
InventoryObject(
|
||||
name="Contents",
|
||||
parent_id=UUID.ZERO,
|
||||
type=AssetType.CATEGORY,
|
||||
obj_id=obj_id
|
||||
).to_llsd()
|
||||
],
|
||||
"inv_serial": inv_message["InventoryData"]["Serial"],
|
||||
}),
|
||||
headers={"Content-Type": "application/llsd+xml"},
|
||||
status=200,
|
||||
)
|
||||
|
||||
last_serial = request.args.get("last_serial", None)
|
||||
if last_serial:
|
||||
last_serial = int(last_serial)
|
||||
if inv_message["InventoryData"]["Serial"] == last_serial:
|
||||
# Nothing has changed since the version of the inventory they say they have, say so.
|
||||
return Response("", status=304)
|
||||
|
||||
xfer = region.xfer_manager.request(
|
||||
file_name=file_name,
|
||||
file_path=XferFilePath.CACHE,
|
||||
turbo=True,
|
||||
)
|
||||
xfer.chunks.update(existing_chunks)
|
||||
try:
|
||||
await xfer
|
||||
except asyncio.TimeoutError:
|
||||
# We likely failed the request due to missing chunks, store
|
||||
# the chunks that we _did_ get for the next attempt.
|
||||
existing_chunks.update(xfer.chunks)
|
||||
continue
|
||||
|
||||
inv_model = InventoryModel.from_str(xfer.reassemble_chunks().decode("utf8"))
|
||||
|
||||
return Response(
|
||||
llsd.format_xml({
|
||||
"inventory": inv_model.to_llsd(),
|
||||
"inv_serial": inv_message["InventoryData"]["Serial"],
|
||||
}),
|
||||
headers={"Content-Type": "application/llsd+xml"},
|
||||
)
|
||||
raise asyncio.TimeoutError("Failed to get inventory after 3 tries")
|
||||
|
||||
|
||||
class GetTaskInventoryCapExampleAddon(WebAppCapAddon):
|
||||
# A cap URL with this name will be tied to each region when
|
||||
# the sim is first connected to. The URL will be returned to the
|
||||
# viewer in the Seed if the viewer requests it by name.
|
||||
CAP_NAME = "GetTaskInventoryExample"
|
||||
# Any asgi app should be fine.
|
||||
APP = asgiref.wsgi.WsgiToAsgi(app)
|
||||
|
||||
|
||||
addons = [GetTaskInventoryCapExampleAddon()]
|
||||
@@ -9,23 +9,24 @@ class GreetingAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def greetings(self, session: Session, region: ProxiedRegion):
|
||||
"""Greet everyone around you"""
|
||||
agent_obj = region.objects.lookup_fullid(session.agent_id)
|
||||
if not agent_obj:
|
||||
our_avatar = region.objects.lookup_avatar(session.agent_id)
|
||||
if not our_avatar:
|
||||
show_message("Don't have an agent object?")
|
||||
|
||||
# Note that this will only have avatars closeish to your camera. The sim sends
|
||||
# KillObjects for avatars that get too far away.
|
||||
other_agents = [o for o in region.objects.all_avatars if o.FullID != agent_obj.FullID]
|
||||
# Look this up in the session object store since we may be next
|
||||
# to a region border.
|
||||
other_avatars = [o for o in session.objects.all_avatars if o.FullID != our_avatar.FullID]
|
||||
|
||||
if not other_agents:
|
||||
show_message("No other agents?")
|
||||
if not other_avatars:
|
||||
show_message("No other avatars?")
|
||||
|
||||
for other_agent in other_agents:
|
||||
dist = Vector3.dist(agent_obj.Position, other_agent.Position)
|
||||
for other_avatar in other_avatars:
|
||||
dist = Vector3.dist(our_avatar.GlobalPosition, other_avatar.GlobalPosition)
|
||||
if dist >= 19.0:
|
||||
continue
|
||||
nv = other_agent.NameValue.to_dict()
|
||||
send_chat(f"Greetings, {nv['FirstName']} {nv['LastName']}!")
|
||||
if other_avatar.PreferredName is None:
|
||||
continue
|
||||
send_chat(f"Greetings, {other_avatar.PreferredName}!")
|
||||
|
||||
|
||||
addons = [GreetingAddon()]
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
Drop outgoing packets that might leak what you're looking at, similar to Firestorm
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.base.templates import ViewerEffectType
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import ViewerEffectType
|
||||
|
||||
|
||||
BLOCKED_EFFECTS = (
|
||||
@@ -17,7 +17,7 @@ BLOCKED_EFFECTS = (
|
||||
)
|
||||
|
||||
|
||||
def handle_lludp_message(_session: Session, region: ProxiedRegion, msg: ProxiedMessage):
|
||||
def handle_lludp_message(_session: Session, region: ProxiedRegion, msg: Message):
|
||||
if msg.name == "ViewerEffect" and msg.direction == Direction.OUT:
|
||||
new_blocks = [b for b in msg["Effect"] if b["Type"] not in BLOCKED_EFFECTS]
|
||||
if new_blocks:
|
||||
|
||||
@@ -13,10 +13,10 @@ from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.llanim import Animation
|
||||
from hippolyzer.lib.proxy.addon_utils import AssetAliasTracker, BaseAddon, GlobalProperty
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
from hippolyzer.lib.proxy.vfs import STATIC_VFS
|
||||
from hippolyzer.lib.base.vfs import STATIC_VFS
|
||||
|
||||
|
||||
JOINT_REPLS = {
|
||||
@@ -53,7 +53,7 @@ class HorrorAnimatorAddon(BaseAddon):
|
||||
# We've reloaded, so make sure assets get new aliases
|
||||
self.horror_anim_tracker.invalidate_aliases()
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
tracker = self.horror_anim_tracker
|
||||
|
||||
if message.name == "AvatarAnimation":
|
||||
@@ -105,7 +105,7 @@ class HorrorAnimatorAddon(BaseAddon):
|
||||
# send the response back immediately
|
||||
block = STATIC_VFS[orig_anim_id]
|
||||
anim_data = STATIC_VFS.read_block(block)
|
||||
flow.response = mitmproxy.http.HTTPResponse.make(
|
||||
flow.response = mitmproxy.http.Response.make(
|
||||
200,
|
||||
_mutate_anim_bytes(anim_data),
|
||||
{
|
||||
|
||||
50
addon_examples/leap_example.py
Normal file
50
addon_examples/leap_example.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""
|
||||
Example of how to control a viewer over LEAP
|
||||
|
||||
Must launch the viewer with `outleap-agent` LEAP script.
|
||||
See https://github.com/SaladDais/outleap/ for more info on LEAP / outleap.
|
||||
"""
|
||||
|
||||
import outleap
|
||||
from outleap.scripts.inspector import LEAPInspectorGUI
|
||||
|
||||
from hippolyzer.lib.proxy.addon_utils import send_chat, BaseAddon, show_message
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
|
||||
|
||||
# Path found using `outleap-inspector`
|
||||
FPS_PATH = outleap.UIPath("/main_view/menu_stack/status_bar_container/status/time_and_media_bg/FPSText")
|
||||
|
||||
|
||||
class LEAPExampleAddon(BaseAddon):
|
||||
async def handle_leap_client_added(self, session_manager: SessionManager, leap_client: outleap.LEAPClient):
|
||||
# You can do things as soon as the LEAP client connects, like if you want to automate
|
||||
# login or whatever.
|
||||
viewer_control_api = outleap.LLViewerControlAPI(leap_client)
|
||||
# Ask for a config value and print it in the viewer logs
|
||||
print(await viewer_control_api.get("Global", "StatsPilotFile"))
|
||||
|
||||
@handle_command()
|
||||
async def show_ui_inspector(self, session: Session, _region: ProxiedRegion):
|
||||
"""Spawn a GUI for inspecting the UI state"""
|
||||
if not session.leap_client:
|
||||
show_message("No LEAP client connected?")
|
||||
return
|
||||
LEAPInspectorGUI(session.leap_client).show()
|
||||
|
||||
@handle_command()
|
||||
async def say_fps(self, session: Session, _region: ProxiedRegion):
|
||||
"""Say your current FPS in chat"""
|
||||
if not session.leap_client:
|
||||
show_message("No LEAP client connected?")
|
||||
return
|
||||
|
||||
window_api = outleap.LLWindowAPI(session.leap_client)
|
||||
fps = (await window_api.get_info(path=FPS_PATH))['value']
|
||||
|
||||
send_chat(f"LEAP says I'm running at {fps} FPS!")
|
||||
|
||||
|
||||
addons = [LEAPExampleAddon()]
|
||||
@@ -5,43 +5,58 @@ Local animations
|
||||
assuming you loaded something.anim
|
||||
/524 start_local_anim something
|
||||
/524 stop_local_anim something
|
||||
/524 save_local_anim something
|
||||
|
||||
If you want to trigger the animation from an object to simulate llStartAnimation():
|
||||
llOwnerSay("@start_local_anim:something=force");
|
||||
|
||||
Also includes a concept of "anim manglers" similar to the "mesh manglers" of the
|
||||
local mesh addon. This is useful if you want to test making procedural changes
|
||||
to animations before uploading them. The manglers will be applied to any uploaded
|
||||
animations as well.
|
||||
|
||||
May also be useful if you need to make ad-hoc changes to a bunch of animations on
|
||||
bulk upload, like changing priority or removing a joint.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import logging
|
||||
import pathlib
|
||||
from abc import abstractmethod
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base import serialization as se
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.helpers import get_mtime
|
||||
from hippolyzer.lib.base.llanim import Animation
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketFlags
|
||||
from hippolyzer.lib.proxy import addon_ctx
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty, GlobalProperty, show_message
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
def _get_mtime(path: str):
|
||||
try:
|
||||
return os.stat(path).st_mtime
|
||||
except:
|
||||
return None
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
|
||||
|
||||
class LocalAnimAddon(BaseAddon):
|
||||
# name -> path, only for anims actually from files
|
||||
local_anim_paths: Dict[str, str] = SessionProperty(dict)
|
||||
# name -> anim bytes
|
||||
local_anim_bytes: Dict[str, bytes] = SessionProperty(dict)
|
||||
# name -> mtime or None. Only for anims from files.
|
||||
local_anim_mtimes: Dict[str, Optional[float]] = SessionProperty(dict)
|
||||
# name -> current asset ID (changes each play)
|
||||
local_anim_playing_ids: Dict[str, UUID] = SessionProperty(dict)
|
||||
anim_manglers: List[Callable[[Animation], Animation]] = GlobalProperty(list)
|
||||
|
||||
def handle_init(self, session_manager: SessionManager):
|
||||
self.remangle_local_anims(session_manager)
|
||||
|
||||
def handle_session_init(self, session: Session):
|
||||
# Reload anims and reload any manglers if we have any
|
||||
self._schedule_task(self._try_reload_anims(session))
|
||||
|
||||
@handle_command()
|
||||
@@ -67,11 +82,23 @@ class LocalAnimAddon(BaseAddon):
|
||||
"""Stop a named local animation"""
|
||||
self.apply_local_anim(session, region, anim_name, new_data=None)
|
||||
|
||||
@handle_command(anim_name=str)
|
||||
async def save_local_anim(self, _session: Session, _region: ProxiedRegion, anim_name: str):
|
||||
"""Save a named local anim to disk"""
|
||||
anim_bytes = self.local_anim_bytes.get(anim_name)
|
||||
if not anim_bytes:
|
||||
return
|
||||
filename = await AddonManager.UI.save_file(filter_str="SL Anim (*.anim)", default_suffix="anim")
|
||||
if not filename:
|
||||
return
|
||||
with open(filename, "wb") as f:
|
||||
f.write(anim_bytes)
|
||||
|
||||
async def _try_reload_anims(self, session: Session):
|
||||
while True:
|
||||
region = session.main_region
|
||||
if not region:
|
||||
await asyncio.sleep(2.0)
|
||||
await asyncio.sleep(1.0)
|
||||
continue
|
||||
|
||||
# Loop over local anims we loaded
|
||||
@@ -80,19 +107,22 @@ class LocalAnimAddon(BaseAddon):
|
||||
if not anim_id:
|
||||
continue
|
||||
# is playing right now, check if there's a newer version
|
||||
self.apply_local_anim_from_file(session, region, anim_name, only_if_changed=True)
|
||||
await asyncio.sleep(2.0)
|
||||
try:
|
||||
self.apply_local_anim_from_file(session, region, anim_name, only_if_changed=True)
|
||||
except Exception:
|
||||
logging.exception("Exploded while replaying animation")
|
||||
await asyncio.sleep(1.0)
|
||||
|
||||
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
|
||||
cmd: str, options: List[str], param: str):
|
||||
behaviour: str, options: List[str], param: str):
|
||||
# We only handle commands
|
||||
if param != "force":
|
||||
return
|
||||
|
||||
if cmd == "stop_local_anim":
|
||||
if behaviour == "stop_local_anim":
|
||||
self.apply_local_anim(session, region, options[0], new_data=None)
|
||||
return True
|
||||
elif cmd == "start_local_anim":
|
||||
elif behaviour == "start_local_anim":
|
||||
self.apply_local_anim_from_file(session, region, options[0])
|
||||
return True
|
||||
|
||||
@@ -101,13 +131,14 @@ class LocalAnimAddon(BaseAddon):
|
||||
anim_name: str, new_data: Optional[bytes] = None):
|
||||
asset_repo: HTTPAssetRepo = session.session_manager.asset_repo
|
||||
next_id: Optional[UUID] = None
|
||||
new_msg = ProxiedMessage(
|
||||
new_msg = Message(
|
||||
"AgentAnimation",
|
||||
Block(
|
||||
"AgentData",
|
||||
AgentID=session.agent_id,
|
||||
SessionID=session.id,
|
||||
),
|
||||
flags=PacketFlags.RELIABLE,
|
||||
)
|
||||
|
||||
# Stop any old version of the anim that might be playing first
|
||||
@@ -128,11 +159,13 @@ class LocalAnimAddon(BaseAddon):
|
||||
StartAnim=True,
|
||||
))
|
||||
cls.local_anim_playing_ids[anim_name] = next_id
|
||||
cls.local_anim_bytes[anim_name] = new_data
|
||||
else:
|
||||
# No data means just stop the anim
|
||||
cls.local_anim_playing_ids.pop(anim_name, None)
|
||||
cls.local_anim_bytes.pop(anim_name, None)
|
||||
|
||||
region.circuit.send_message(new_msg)
|
||||
region.circuit.send(new_msg)
|
||||
print(f"Changing {anim_name} to {next_id}")
|
||||
|
||||
@classmethod
|
||||
@@ -142,11 +175,10 @@ class LocalAnimAddon(BaseAddon):
|
||||
anim_data = None
|
||||
if anim_path:
|
||||
old_mtime = cls.local_anim_mtimes.get(anim_name)
|
||||
mtime = _get_mtime(anim_path)
|
||||
mtime = get_mtime(anim_path)
|
||||
if only_if_changed and old_mtime == mtime:
|
||||
return
|
||||
|
||||
cls.local_anim_mtimes[anim_name] = mtime
|
||||
# file might not even exist anymore if mtime is `None`,
|
||||
# anim will automatically stop if that happens.
|
||||
if mtime:
|
||||
@@ -157,9 +189,95 @@ class LocalAnimAddon(BaseAddon):
|
||||
|
||||
with open(anim_path, "rb") as f:
|
||||
anim_data = f.read()
|
||||
anim_data = cls._mangle_anim(anim_data)
|
||||
cls.local_anim_mtimes[anim_name] = mtime
|
||||
else:
|
||||
print(f"Unknown anim {anim_name!r}")
|
||||
cls.apply_local_anim(session, region, anim_name, new_data=anim_data)
|
||||
|
||||
@classmethod
|
||||
def _mangle_anim(cls, anim_data: bytes) -> bytes:
|
||||
if not cls.anim_manglers:
|
||||
return anim_data
|
||||
reader = se.BufferReader("<", anim_data)
|
||||
spec = se.Dataclass(Animation)
|
||||
anim = reader.read(spec)
|
||||
for mangler in cls.anim_manglers:
|
||||
anim = mangler(anim)
|
||||
writer = se.BufferWriter("<")
|
||||
writer.write(spec, anim)
|
||||
return writer.copy_buffer()
|
||||
|
||||
@classmethod
|
||||
def remangle_local_anims(cls, session_manager: SessionManager):
|
||||
# Anim manglers are global, so we need to re-mangle anims for all sessions
|
||||
for session in session_manager.sessions:
|
||||
# Push the context of this session onto the stack so we can access
|
||||
# session-scoped properties
|
||||
with addon_ctx.push(new_session=session, new_region=session.main_region):
|
||||
cls.local_anim_mtimes.clear()
|
||||
|
||||
def handle_http_request(self, session_manager: SessionManager, flow: HippoHTTPFlow):
|
||||
if flow.name == "NewFileAgentInventoryUploader":
|
||||
# Don't bother looking at this if we have no manglers
|
||||
if not self.anim_manglers:
|
||||
return
|
||||
# This is kind of a crappy match but these magic bytes shouldn't match anything that SL
|
||||
# allows as an upload type but animations.
|
||||
if not flow.request.content or not flow.request.content.startswith(b"\x01\x00\x00\x00"):
|
||||
return
|
||||
|
||||
# Replace the uploaded anim with the mangled version
|
||||
flow.request.content = self._mangle_anim(flow.request.content)
|
||||
show_message("Mangled upload request")
|
||||
|
||||
|
||||
class BaseAnimManglerAddon(BaseAddon):
|
||||
"""Base class for addons that mangle uploaded or file-based local animations"""
|
||||
ANIM_MANGLERS: List[Callable[[Animation], Animation]]
|
||||
|
||||
def handle_init(self, session_manager: SessionManager):
|
||||
# Add our manglers into the list
|
||||
LocalAnimAddon.anim_manglers.extend(self.ANIM_MANGLERS)
|
||||
LocalAnimAddon.remangle_local_anims(session_manager)
|
||||
|
||||
def handle_unload(self, session_manager: SessionManager):
|
||||
# Clean up our manglers before we go away
|
||||
mangler_list = LocalAnimAddon.anim_manglers
|
||||
for mangler in self.ANIM_MANGLERS:
|
||||
if mangler in mangler_list:
|
||||
mangler_list.remove(mangler)
|
||||
LocalAnimAddon.remangle_local_anims(session_manager)
|
||||
|
||||
|
||||
class BaseAnimHelperAddon(BaseAddon):
|
||||
"""
|
||||
Base class for local creation of procedural animations
|
||||
|
||||
Animation generated by build_anim() gets applied to all active sessions
|
||||
"""
|
||||
ANIM_NAME: str
|
||||
|
||||
def handle_session_init(self, session: Session):
|
||||
self._reapply_anim(session, session.main_region)
|
||||
|
||||
def handle_session_closed(self, session: Session):
|
||||
LocalAnimAddon.apply_local_anim(session, session.main_region, self.ANIM_NAME, None)
|
||||
|
||||
def handle_unload(self, session_manager: SessionManager):
|
||||
for session in session_manager.sessions:
|
||||
# TODO: Nasty. Since we need to access session-local attrs we need to set the
|
||||
# context even though we also explicitly pass session and region.
|
||||
# Need to rethink the LocalAnimAddon API.
|
||||
with addon_ctx.push(session, session.main_region):
|
||||
LocalAnimAddon.apply_local_anim(session, session.main_region, self.ANIM_NAME, None)
|
||||
|
||||
@abstractmethod
|
||||
def build_anim(self) -> Animation:
|
||||
pass
|
||||
|
||||
def _reapply_anim(self, session: Session, region: ProxiedRegion):
|
||||
LocalAnimAddon.apply_local_anim(session, region, self.ANIM_NAME, self.build_anim().to_bytes())
|
||||
|
||||
|
||||
addons = [LocalAnimAddon()]
|
||||
|
||||
@@ -23,23 +23,22 @@ import ctypes
|
||||
import secrets
|
||||
from typing import *
|
||||
|
||||
import mitmproxy
|
||||
from mitmproxy.http import HTTPFlow
|
||||
import mitmproxy.http
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
from hippolyzer.lib.base.mesh import LLMeshSerializer, MeshAsset
|
||||
from hippolyzer.lib.base import serialization as se
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.base.templates import ExtraParamType
|
||||
from hippolyzer.lib.proxy import addon_ctx
|
||||
from hippolyzer.lib.proxy.addon_utils import show_message, BaseAddon, GlobalProperty, SessionProperty
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
from hippolyzer.lib.proxy.templates import ExtraParamType
|
||||
|
||||
|
||||
def _modify_crc(crc_tweak, crc_val):
|
||||
@@ -82,17 +81,16 @@ class MeshUploadInterceptingAddon(BaseAddon):
|
||||
|
||||
@handle_command()
|
||||
async def set_local_mesh_target(self, session: Session, region: ProxiedRegion):
|
||||
"""Set the currently selected object as the target for local mesh"""
|
||||
parent_object = region.objects.lookup_localid(session.selected.object_local)
|
||||
if not parent_object:
|
||||
"""Set the currently selected objects as the target for local mesh"""
|
||||
selected_links = [region.objects.lookup_localid(l_id) for l_id in session.selected.object_locals]
|
||||
selected_links = [o for o in selected_links if o is not None]
|
||||
if not selected_links:
|
||||
show_message("Nothing selected")
|
||||
return
|
||||
linkset_objects = [parent_object] + parent_object.Children
|
||||
|
||||
old_locals = self.local_mesh_target_locals
|
||||
self.local_mesh_target_locals = [
|
||||
x.LocalID
|
||||
for x in linkset_objects
|
||||
for x in selected_links
|
||||
if ExtraParamType.MESH in x.ExtraParams
|
||||
]
|
||||
|
||||
@@ -126,7 +124,7 @@ class MeshUploadInterceptingAddon(BaseAddon):
|
||||
region.objects.request_objects(old_locals)
|
||||
show_message(f"Cleared target {old_locals}")
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
# Replace any mesh asset IDs in tracked objects with our local assets
|
||||
if not self.local_mesh_target_locals:
|
||||
return
|
||||
@@ -202,7 +200,7 @@ class MeshUploadInterceptingAddon(BaseAddon):
|
||||
self.local_mesh_mapping = {x["mesh_name"]: x["mesh"] for x in instances}
|
||||
|
||||
# Fake a response, we don't want to actually send off the request.
|
||||
flow.response = mitmproxy.http.HTTPResponse.make(
|
||||
flow.response = mitmproxy.http.Response.make(
|
||||
200,
|
||||
b"",
|
||||
{
|
||||
@@ -232,7 +230,7 @@ class MeshUploadInterceptingAddon(BaseAddon):
|
||||
show_message("Mangled upload request")
|
||||
|
||||
def handle_object_updated(self, session: Session, region: ProxiedRegion,
|
||||
obj: Object, updated_props: Set[str]):
|
||||
obj: Object, updated_props: Set[str], msg: Optional[Message]):
|
||||
if obj.LocalID not in self.local_mesh_target_locals:
|
||||
return
|
||||
if "Name" not in updated_props or obj.Name is None:
|
||||
@@ -281,4 +279,23 @@ class MeshUploadInterceptingAddon(BaseAddon):
|
||||
cls._replace_local_mesh(session.main_region, asset_repo, mesh_list)
|
||||
|
||||
|
||||
class BaseMeshManglerAddon(BaseAddon):
|
||||
"""Base class for addons that mangle uploaded or local mesh"""
|
||||
MESH_MANGLERS: List[Callable[[MeshAsset], MeshAsset]]
|
||||
|
||||
def handle_init(self, session_manager: SessionManager):
|
||||
# Add our manglers into the list
|
||||
MeshUploadInterceptingAddon.mesh_manglers.extend(self.MESH_MANGLERS)
|
||||
# Tell the local mesh plugin that the mangler list changed, and to re-apply
|
||||
MeshUploadInterceptingAddon.remangle_local_mesh(session_manager)
|
||||
|
||||
def handle_unload(self, session_manager: SessionManager):
|
||||
# Clean up our manglers before we go away
|
||||
mangler_list = MeshUploadInterceptingAddon.mesh_manglers
|
||||
for mangler in self.MESH_MANGLERS:
|
||||
if mangler in mangler_list:
|
||||
mangler_list.remove(mangler)
|
||||
MeshUploadInterceptingAddon.remangle_local_mesh(session_manager)
|
||||
|
||||
|
||||
addons = [MeshUploadInterceptingAddon()]
|
||||
|
||||
@@ -8,28 +8,16 @@ applied to the mesh before upload.
|
||||
I personally use manglers to strip bounding box materials you need
|
||||
to add to give a mesh an arbitrary center of rotation / scaling.
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.base.helpers import reorient_coord
|
||||
from hippolyzer.lib.base.mesh import MeshAsset
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager
|
||||
|
||||
import local_mesh
|
||||
AddonManager.hot_reload(local_mesh, require_addons_loaded=True)
|
||||
|
||||
|
||||
def _reorient_coord(coord, orientation):
|
||||
coords = []
|
||||
for axis in orientation:
|
||||
axis_idx = abs(axis) - 1
|
||||
coords.append(coord[axis_idx] if axis >= 0 else 1.0 - coord[axis_idx])
|
||||
if coord.__class__ in (list, tuple):
|
||||
return coord.__class__(coords)
|
||||
return coord.__class__(*coords)
|
||||
|
||||
|
||||
def _reorient_coord_list(coord_list, orientation):
|
||||
return [_reorient_coord(x, orientation) for x in coord_list]
|
||||
def _reorient_coord_list(coord_list, orientation, min_val: int | float = 0):
|
||||
return [reorient_coord(x, orientation, min_val) for x in coord_list]
|
||||
|
||||
|
||||
def reorient_mesh(orientation):
|
||||
@@ -37,37 +25,23 @@ def reorient_mesh(orientation):
|
||||
# X=1, Y=2, Z=3
|
||||
def _reorienter(mesh: MeshAsset):
|
||||
for material in mesh.iter_lod_materials():
|
||||
if "Position" not in material:
|
||||
# Must be a NoGeometry LOD
|
||||
continue
|
||||
# We don't need to use positions_(to/from)_domain here since we're just naively
|
||||
# flipping the axes around.
|
||||
material["Position"] = _reorient_coord_list(material["Position"], orientation)
|
||||
# Are you even supposed to do this to the normals?
|
||||
material["Normal"] = _reorient_coord_list(material["Normal"], orientation)
|
||||
material["Normal"] = _reorient_coord_list(material["Normal"], orientation, min_val=-1)
|
||||
return mesh
|
||||
return _reorienter
|
||||
|
||||
|
||||
OUR_MANGLERS = [
|
||||
# Negate the X and Y axes on any mesh we upload or create temp
|
||||
reorient_mesh((-1, -2, 3)),
|
||||
]
|
||||
class ExampleMeshManglerAddon(local_mesh.BaseMeshManglerAddon):
|
||||
MESH_MANGLERS = [
|
||||
# Negate the X and Y axes on any mesh we upload or create temp
|
||||
reorient_mesh((-1, -2, 3)),
|
||||
]
|
||||
|
||||
|
||||
class MeshManglerExampleAddon(BaseAddon):
|
||||
def handle_init(self, session_manager: SessionManager):
|
||||
# Add our manglers into the list
|
||||
local_mesh_addon = local_mesh.MeshUploadInterceptingAddon
|
||||
local_mesh_addon.mesh_manglers.extend(OUR_MANGLERS)
|
||||
# Tell the local mesh plugin that the mangler list changed, and to re-apply
|
||||
local_mesh_addon.remangle_local_mesh(session_manager)
|
||||
|
||||
def handle_unload(self, session_manager: SessionManager):
|
||||
# Clean up our manglers before we go away
|
||||
local_mesh_addon = local_mesh.MeshUploadInterceptingAddon
|
||||
mangler_list = local_mesh_addon.mesh_manglers
|
||||
for mangler in OUR_MANGLERS:
|
||||
if mangler in mangler_list:
|
||||
mangler_list.remove(mangler)
|
||||
local_mesh_addon.remangle_local_mesh(session_manager)
|
||||
|
||||
|
||||
addons = [MeshManglerExampleAddon()]
|
||||
addons = [ExampleMeshManglerAddon()]
|
||||
|
||||
244
addon_examples/message_mirror.py
Normal file
244
addon_examples/message_mirror.py
Normal file
@@ -0,0 +1,244 @@
|
||||
"""
|
||||
Message Mirror
|
||||
|
||||
Re-routes messages through the circuit of another agent running through this proxy,
|
||||
rewriting the messages to use the credentials tied to that circuit.
|
||||
|
||||
Useful if you need to quickly QA authorization checks on a message handler or script.
|
||||
Or if you want to chat as two people at once. Whatever.
|
||||
Also shows some advanced ways of managing / rerouting Messages and HTTP flows.
|
||||
|
||||
Fiddle with the values of `SEND_NORMALLY` and `MIRROR` to change how and which
|
||||
messages get moved to other circuits.
|
||||
|
||||
Usage: /524 mirror_to <mirror_agent_uuid>
|
||||
To Disable: /524 mirror_to
|
||||
"""
|
||||
|
||||
import weakref
|
||||
from typing import Optional
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.template_dict import DEFAULT_TEMPLATE_DICT
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty, show_message
|
||||
from hippolyzer.lib.proxy.commands import handle_command, Parameter, parse_bool
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.caps import CapData, CapType
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
|
||||
# Things that make no sense to mirror, or will make everything explode if mirrored.
|
||||
SEND_NORMALLY = {
|
||||
'StartPingCheck', 'CompletePingCheck', 'PacketAck', 'SimulatorViewerTimeMessage', 'SimStats',
|
||||
'SoundTrigger', 'EventQueueGet', 'GetMesh', 'GetMesh2', 'ParcelDwellRequest', 'ViewerEffect', 'ViewerStats',
|
||||
'ParcelAccessListRequest', 'FirestormBridge', 'AvatarRenderInfo', 'ParcelPropertiesRequest', 'GetObjectCost',
|
||||
'RequestMultipleObjects', 'GetObjectPhysicsData', 'GetExperienceInfo', 'RequestTaskInventory', 'AgentRequestSit',
|
||||
'MuteListRequest', 'UpdateMuteListEntry', 'RemoveMuteListEntry', 'RequestImage',
|
||||
'AgentThrottle', 'UseCircuitCode', 'AgentWearablesRequest', 'AvatarPickerRequest', 'CloseCircuit',
|
||||
'CompleteAgentMovement', 'RegionHandshakeReply', 'LogoutRequest', 'ParcelPropertiesRequest',
|
||||
'ParcelPropertiesRequestByID', 'MapBlockRequest', 'MapLayerRequest', 'MapItemRequest', 'MapNameRequest',
|
||||
'ParcelAccessListRequest', 'AvatarPropertiesRequest', 'DirFindQuery',
|
||||
'SetAlwaysRun', 'GetDisplayNames', 'ViewerMetrics', 'AgentResume', 'AgentPause',
|
||||
'ViewerAsset', 'GetTexture', 'UUIDNameRequest', 'AgentUpdate', 'AgentAnimation'
|
||||
# Would just be confusing for everyone
|
||||
'ImprovedInstantMessage',
|
||||
# Xfer system isn't authed to begin with, and duping Xfers can lead to premature file deletion. Skip.
|
||||
'RequestXfer', 'ConfirmXferPacket', 'AbortXfer', 'SendXferPacket',
|
||||
}
|
||||
|
||||
# Messages that _must_ be sent normally, but are worth mirroring onto the target session to see how
|
||||
# they would respond
|
||||
MIRROR = {
|
||||
'RequestObjectPropertiesFamily', 'ObjectSelect', 'RequestObjectProperties', 'TransferRequest',
|
||||
'RequestMultipleObjects', 'RequestTaskInventory', 'FetchInventory2', 'ScriptDialogReply',
|
||||
'ObjectDeselect', 'GenericMessage', 'ChatFromViewer'
|
||||
}
|
||||
|
||||
for msg_name in DEFAULT_TEMPLATE_DICT.message_templates.keys():
|
||||
# There are a lot of these.
|
||||
if msg_name.startswith("Group") and msg_name.endswith("Request"):
|
||||
MIRROR.add(msg_name)
|
||||
|
||||
|
||||
class MessageMirrorAddon(BaseAddon):
|
||||
mirror_target_agent: Optional[UUID] = SessionProperty(None)
|
||||
mirror_use_target_session: bool = SessionProperty(True)
|
||||
mirror_use_target_agent: bool = SessionProperty(True)
|
||||
|
||||
@handle_command(target_agent=Parameter(UUID, optional=True))
|
||||
async def mirror_to(self, session: Session, _region, target_agent: Optional[UUID] = None):
|
||||
"""
|
||||
Send this session's outbound messages over another proxied agent's circuit
|
||||
"""
|
||||
if target_agent:
|
||||
if target_agent == session.agent_id:
|
||||
show_message("Can't mirror our own session")
|
||||
target_agent = None
|
||||
elif not any(s.agent_id == target_agent for s in session.session_manager.sessions):
|
||||
show_message(f"No active proxied session for agent {target_agent}")
|
||||
target_agent = None
|
||||
|
||||
self.mirror_target_agent = target_agent
|
||||
if target_agent:
|
||||
show_message(f"Mirroring to {target_agent}")
|
||||
else:
|
||||
show_message("Message mirroring disabled")
|
||||
|
||||
@handle_command(enabled=parse_bool)
|
||||
async def set_mirror_use_target_session(self, _session, _region, enabled):
|
||||
"""Replace the original session ID with the target session's ID when mirroring"""
|
||||
self.mirror_use_target_session = enabled
|
||||
|
||||
@handle_command(enabled=parse_bool)
|
||||
async def set_mirror_use_target_agent(self, _session, _region, enabled):
|
||||
"""Replace the original agent ID with the target agent's ID when mirroring"""
|
||||
self.mirror_use_target_agent = enabled
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.direction != Direction.OUT:
|
||||
return
|
||||
|
||||
if not self.mirror_target_agent:
|
||||
return
|
||||
|
||||
if message.name in SEND_NORMALLY:
|
||||
return
|
||||
|
||||
target_session = None
|
||||
for poss_session in session.session_manager.sessions:
|
||||
if poss_session.agent_id == self.mirror_target_agent:
|
||||
target_session = poss_session
|
||||
|
||||
if not target_session:
|
||||
print("Couldn't find target session?")
|
||||
return
|
||||
|
||||
target_region = None
|
||||
for poss_region in target_session.regions:
|
||||
if poss_region.circuit_addr == region.circuit_addr:
|
||||
target_region = poss_region
|
||||
|
||||
if not target_region:
|
||||
print("Couldn't find equivalent target region?")
|
||||
return
|
||||
|
||||
# Send the message normally first if we're mirroring
|
||||
if message.name in MIRROR:
|
||||
region.circuit.send(message)
|
||||
|
||||
# We're going to send the message on a new circuit, we need to take
|
||||
# it so we get a new packet ID and clean ACKs
|
||||
message = message.take()
|
||||
|
||||
self._lludp_fixups(target_session, message)
|
||||
target_region.circuit.send(message)
|
||||
return True
|
||||
|
||||
def _lludp_fixups(self, target_session: Session, message: Message):
|
||||
if "AgentData" in message:
|
||||
agent_block = message["AgentData"][0]
|
||||
if "AgentID" in agent_block and self.mirror_use_target_agent:
|
||||
agent_block["AgentID"] = target_session.agent_id
|
||||
if "SessionID" in agent_block and self.mirror_use_target_session:
|
||||
agent_block["SessionID"] = target_session.id
|
||||
|
||||
if message.name == "TransferRequest":
|
||||
transfer_block = message["TransferInfo"][0]
|
||||
# This is a duplicated message so we need to give it a new ID
|
||||
transfer_block["TransferID"] = UUID.random()
|
||||
params = transfer_block.deserialize_var("Params")
|
||||
# This kind of Transfer might not even use agent credentials
|
||||
if self.mirror_use_target_agent and hasattr(params, 'AgentID'):
|
||||
params.AgentID = target_session.agent_id
|
||||
if self.mirror_use_target_session and hasattr(params, 'SessionID'):
|
||||
params.SessionID = target_session.id
|
||||
transfer_block.serialize_var("Params", params)
|
||||
|
||||
def handle_http_request(self, session_manager: SessionManager, flow: HippoHTTPFlow):
|
||||
# Already mirrored, ignore.
|
||||
if flow.is_replay:
|
||||
return
|
||||
|
||||
cap_data = flow.cap_data
|
||||
if not cap_data:
|
||||
return
|
||||
if cap_data.cap_name in SEND_NORMALLY:
|
||||
return
|
||||
|
||||
if cap_data.asset_server_cap:
|
||||
return
|
||||
# Likely doesn't have an exact equivalent in the target session, this is a temporary
|
||||
# cap like an uploader URL or a stats URL.
|
||||
if cap_data.type == CapType.TEMPORARY:
|
||||
return
|
||||
|
||||
session: Optional[Session] = cap_data.session and cap_data.session()
|
||||
if not session:
|
||||
return
|
||||
|
||||
region: Optional[ProxiedRegion] = cap_data.region and cap_data.region()
|
||||
if not region:
|
||||
return
|
||||
|
||||
# Session-scoped, so we need to know if we have a session before checking
|
||||
if not self.mirror_target_agent:
|
||||
return
|
||||
|
||||
target_session: Optional[Session] = None
|
||||
for poss_session in session.session_manager.sessions:
|
||||
if poss_session.agent_id == self.mirror_target_agent:
|
||||
target_session = poss_session
|
||||
if not target_session:
|
||||
return
|
||||
|
||||
caps_source = target_session
|
||||
target_region: Optional[ProxiedRegion] = None
|
||||
if region:
|
||||
target_region = None
|
||||
for poss_region in target_session.regions:
|
||||
if poss_region.circuit_addr == region.circuit_addr:
|
||||
target_region = poss_region
|
||||
|
||||
if not target_region:
|
||||
print("No region in cap?")
|
||||
return
|
||||
caps_source = target_region
|
||||
|
||||
new_base_url = caps_source.cap_urls.get(cap_data.cap_name)
|
||||
if not new_base_url:
|
||||
print("No equiv cap?")
|
||||
return
|
||||
|
||||
if cap_data.cap_name in MIRROR:
|
||||
flow = flow.copy()
|
||||
|
||||
# Have the cap data reflect the new URL we're pointing at
|
||||
flow.metadata["cap_data"] = CapData(
|
||||
cap_name=cap_data.cap_name,
|
||||
region=weakref.ref(target_region) if target_region else None,
|
||||
session=weakref.ref(target_session),
|
||||
base_url=new_base_url,
|
||||
)
|
||||
|
||||
# Tack any params onto the new base URL for the cap
|
||||
new_url = new_base_url + flow.request.url[len(cap_data.base_url):]
|
||||
flow.request.url = new_url
|
||||
|
||||
if cap_data.cap_name in MIRROR:
|
||||
self._replay_flow(flow, session.session_manager)
|
||||
|
||||
def _replay_flow(self, flow: HippoHTTPFlow, session_manager: SessionManager):
|
||||
# Work around mitmproxy bug, changing the URL updates the Host header, which may
|
||||
# cause it to drop the port even when it shouldn't have. Fix the host header.
|
||||
if flow.request.port not in (80, 443) and ":" not in flow.request.host_header:
|
||||
flow.request.host_header = f"{flow.request.host}:{flow.request.port}"
|
||||
# Should get repopulated when it goes back through the MITM addon
|
||||
flow.metadata.pop("cap_data_ser", None)
|
||||
flow.metadata.pop("cap_data", None)
|
||||
proxy_queue = session_manager.flow_context.to_proxy_queue
|
||||
proxy_queue.put_nowait(("replay", None, flow.get_state()))
|
||||
|
||||
|
||||
addons = [MessageMirrorAddon()]
|
||||
49
addon_examples/mock_proxy_cap.py
Normal file
49
addon_examples/mock_proxy_cap.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""
|
||||
Example of proxy-provided caps
|
||||
|
||||
Useful for mocking out a cap that isn't actually implemented by the server
|
||||
while developing the viewer-side pieces of it.
|
||||
|
||||
Implements a cap that accepts an `obj_id` UUID query parameter and returns
|
||||
the name of the object.
|
||||
"""
|
||||
import asyncio
|
||||
import asgiref.wsgi
|
||||
|
||||
from flask import Flask, Response, request
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.proxy import addon_ctx
|
||||
from hippolyzer.lib.proxy.webapp_cap_addon import WebAppCapAddon
|
||||
|
||||
app = Flask("GetObjectNameCapApp")
|
||||
|
||||
|
||||
@app.route('/')
|
||||
async def get_object_name():
|
||||
# Should always have the current region, the cap handler is bound to one.
|
||||
# Just need to pull it from the `addon_ctx` module's global.
|
||||
obj_mgr = addon_ctx.region.get().objects
|
||||
obj_id = UUID(request.args['obj_id'])
|
||||
obj = obj_mgr.lookup_fullid(obj_id)
|
||||
if not obj:
|
||||
return Response(f"Couldn't find {obj_id!r}", status=404, mimetype="text/plain")
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(obj_mgr.request_object_properties(obj)[0], 1.0)
|
||||
except asyncio.TimeoutError:
|
||||
return Response(f"Timed out requesting {obj_id!r}'s properties", status=500, mimetype="text/plain")
|
||||
|
||||
return Response(obj.Name, mimetype="text/plain")
|
||||
|
||||
|
||||
class MockProxyCapExampleAddon(WebAppCapAddon):
|
||||
# A cap URL with this name will be tied to each region when
|
||||
# the sim is first connected to. The URL will be returned to the
|
||||
# viewer in the Seed if the viewer requests it by name.
|
||||
CAP_NAME = "GetObjectNameExample"
|
||||
# Any asgi app should be fine.
|
||||
APP = asgiref.wsgi.WsgiToAsgi(app)
|
||||
|
||||
|
||||
addons = [MockProxyCapExampleAddon()]
|
||||
@@ -27,16 +27,32 @@ from mitmproxy.http import HTTPFlow
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.jp2_utils import BufferedJp2k
|
||||
from hippolyzer.lib.base.multiprocessing_utils import ParentProcessWatcher
|
||||
from hippolyzer.lib.base.templates import TextureEntryCollection
|
||||
from hippolyzer.lib.proxy.addon_utils import AssetAliasTracker, BaseAddon, GlobalProperty, AddonProcess
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
from hippolyzer.lib.proxy.templates import TextureEntry
|
||||
|
||||
|
||||
glymur.set_option('lib.num_threads', 4)
|
||||
|
||||
# These should never be replaced, they're only used as aliases to tell the viewer
|
||||
# it should fetch the relevant texture from the appearance service
|
||||
BAKES_ON_MESH_TEXTURE_IDS = {UUID(x) for x in (
|
||||
"5a9f4a74-30f2-821c-b88d-70499d3e7183",
|
||||
"ae2de45c-d252-50b8-5c6e-19f39ce79317",
|
||||
"24daea5f-0539-cfcf-047f-fbc40b2786ba",
|
||||
"52cc6bb6-2ee5-e632-d3ad-50197b1dcb8a",
|
||||
"43529ce8-7faa-ad92-165a-bc4078371687",
|
||||
"09aac1fb-6bce-0bee-7d44-caac6dbb6c63",
|
||||
"ff62763f-d60a-9855-890b-0c96f8f8cd98",
|
||||
"8e915e25-31d1-cc95-ae08-d58a47488251",
|
||||
"9742065b-19b5-297c-858a-29711d539043",
|
||||
"03642e83-2bd1-4eb9-34b4-4c47ed586d2d",
|
||||
"edd51b77-fc10-ce7a-4b3d-011dfc349e4f",
|
||||
)}
|
||||
|
||||
|
||||
def _modify_crc(crc_tweak: int, crc_val: int):
|
||||
return ctypes.c_uint32(crc_val ^ crc_tweak).value
|
||||
@@ -82,7 +98,7 @@ class MonochromeAddon(BaseAddon):
|
||||
# Tell queue consumers to shut down
|
||||
self.mono_addon_shutdown_signal.set()
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
tracker = self.mono_tracker
|
||||
if message.name == "ObjectUpdateCached":
|
||||
for block in message["ObjectData"]:
|
||||
@@ -132,11 +148,13 @@ class MonochromeAddon(BaseAddon):
|
||||
message["RegionInfo"][field_name] = tracker.get_alias_uuid(val)
|
||||
|
||||
@staticmethod
|
||||
def _make_te_monochrome(tracker: AssetAliasTracker, parsed_te: TextureEntry):
|
||||
def _make_te_monochrome(tracker: AssetAliasTracker, parsed_te: TextureEntryCollection):
|
||||
# Need a deepcopy because TEs are owned by the ObjectManager
|
||||
# and we don't want to change the canonical view.
|
||||
parsed_te = copy.deepcopy(parsed_te)
|
||||
for k, v in parsed_te.Textures.items():
|
||||
if v in BAKES_ON_MESH_TEXTURE_IDS:
|
||||
continue
|
||||
# Replace textures with their alias to bust the viewer cache
|
||||
parsed_te.Textures[k] = tracker.get_alias_uuid(v)
|
||||
for k, v in parsed_te.Color.items():
|
||||
@@ -166,6 +184,8 @@ class MonochromeAddon(BaseAddon):
|
||||
orig_texture_id = self.mono_tracker.get_orig_uuid(UUID(texture_id))
|
||||
if not orig_texture_id:
|
||||
return
|
||||
if orig_texture_id in BAKES_ON_MESH_TEXTURE_IDS:
|
||||
return
|
||||
|
||||
# The request was for a fake texture ID we created, rewrite the request to
|
||||
# request the real asset and mark the flow for modification once we receive
|
||||
|
||||
111
addon_examples/object_management_validator.py
Normal file
111
addon_examples/object_management_validator.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""
|
||||
Check object manager state against region ViewerObject cache
|
||||
|
||||
Can't look at every object we've tracked and every object in VOCache
|
||||
and report mismatches due to weird VOCache cache eviction criteria and certain
|
||||
cacheable objects not being added to the VOCache.
|
||||
|
||||
Off the top of my head, animesh objects get explicit KillObjects at extreme
|
||||
view distances same as avatars, but will still be present in the cache even
|
||||
though they will not be in gObjectList.
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.objects import normalize_object_update_compressed_data
|
||||
from hippolyzer.lib.base.templates import ObjectUpdateFlags, PCode
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, GlobalProperty
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager, Session
|
||||
from hippolyzer.lib.proxy.vocache import is_valid_vocache_dir, RegionViewerObjectCacheChain
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ObjectManagementValidator(BaseAddon):
|
||||
base_cache_path: Optional[str] = GlobalProperty(None)
|
||||
orig_auto_request: Optional[bool] = GlobalProperty(None)
|
||||
|
||||
def handle_init(self, session_manager: SessionManager):
|
||||
if self.orig_auto_request is None:
|
||||
self.orig_auto_request = session_manager.settings.ALLOW_AUTO_REQUEST_OBJECTS
|
||||
session_manager.settings.ALLOW_AUTO_REQUEST_OBJECTS = False
|
||||
|
||||
async def _choose_cache_path():
|
||||
while not self.base_cache_path:
|
||||
cache_dir = await AddonManager.UI.open_dir("Choose the base cache directory")
|
||||
if not cache_dir:
|
||||
return
|
||||
if not is_valid_vocache_dir(cache_dir):
|
||||
continue
|
||||
self.base_cache_path = cache_dir
|
||||
|
||||
if not self.base_cache_path:
|
||||
self._schedule_task(_choose_cache_path(), session_scoped=False)
|
||||
|
||||
def handle_unload(self, session_manager: SessionManager):
|
||||
session_manager.settings.ALLOW_AUTO_REQUEST_OBJECTS = self.orig_auto_request
|
||||
|
||||
def handle_session_init(self, session: Session):
|
||||
# Use only the specified cache path for the vocache
|
||||
session.cache_dir = self.base_cache_path
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.name != "DisableSimulator":
|
||||
return
|
||||
# Send it off to the client without handling it normally,
|
||||
# we need to defer region teardown in the proxy
|
||||
region.circuit.send(message)
|
||||
self._schedule_task(self._check_cache_before_region_teardown(region))
|
||||
return True
|
||||
|
||||
async def _check_cache_before_region_teardown(self, region: ProxiedRegion):
|
||||
await asyncio.sleep(0.5)
|
||||
print("Ok, checking cache differences")
|
||||
try:
|
||||
# Index will have been rewritten, so re-read it.
|
||||
region_cache_chain = RegionViewerObjectCacheChain.for_region(
|
||||
handle=region.handle,
|
||||
cache_id=region.cache_id,
|
||||
cache_dir=self.base_cache_path
|
||||
)
|
||||
if not region_cache_chain.region_caches:
|
||||
print(f"no caches for {region!r}?")
|
||||
return
|
||||
all_full_ids = set()
|
||||
for obj in region.objects.all_objects:
|
||||
cacheable = True
|
||||
orig_obj = obj
|
||||
# Walk along the ancestry checking for things that would make the tree non-cacheable
|
||||
while obj is not None:
|
||||
if obj.UpdateFlags & ObjectUpdateFlags.TEMPORARY_ON_REZ:
|
||||
cacheable = False
|
||||
if obj.PCode == PCode.AVATAR:
|
||||
cacheable = False
|
||||
obj = obj.Parent
|
||||
if cacheable:
|
||||
all_full_ids.add(orig_obj.FullID)
|
||||
|
||||
for key in all_full_ids:
|
||||
obj = region.objects.lookup_fullid(key)
|
||||
cached_data = region_cache_chain.lookup_object_data(obj.LocalID, obj.CRC)
|
||||
if not cached_data:
|
||||
continue
|
||||
orig_dict = obj.to_dict()
|
||||
parsed_data = normalize_object_update_compressed_data(cached_data)
|
||||
updated = obj.update_properties(parsed_data)
|
||||
# Can't compare this yet
|
||||
updated -= {"TextureEntry"}
|
||||
if updated:
|
||||
print(key)
|
||||
for attr in updated:
|
||||
print("\t", attr, orig_dict[attr], parsed_data[attr])
|
||||
finally:
|
||||
# Ok to teardown region in the proxy now
|
||||
region.mark_dead()
|
||||
|
||||
|
||||
addons = [ObjectManagementValidator()]
|
||||
@@ -10,17 +10,18 @@ before you start tracking can help too.
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.base.templates import PCode
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message, SessionProperty
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import PCode
|
||||
|
||||
|
||||
class ObjectUpdateBlameAddon(BaseAddon):
|
||||
update_blame_counter: Counter[UUID] = SessionProperty(Counter)
|
||||
track_update_blame: bool = SessionProperty(False)
|
||||
should_track_update_blame: bool = SessionProperty(False)
|
||||
|
||||
@handle_command()
|
||||
async def precache_objects(self, _session: Session, region: ProxiedRegion):
|
||||
@@ -38,11 +39,11 @@ class ObjectUpdateBlameAddon(BaseAddon):
|
||||
|
||||
@handle_command()
|
||||
async def track_update_blame(self, _session: Session, _region: ProxiedRegion):
|
||||
self.track_update_blame = True
|
||||
self.should_track_update_blame = True
|
||||
|
||||
@handle_command()
|
||||
async def untrack_update_blame(self, _session: Session, _region: ProxiedRegion):
|
||||
self.track_update_blame = False
|
||||
self.should_track_update_blame = False
|
||||
|
||||
@handle_command()
|
||||
async def clear_update_blame(self, _session: Session, _region: ProxiedRegion):
|
||||
@@ -57,8 +58,8 @@ class ObjectUpdateBlameAddon(BaseAddon):
|
||||
print(f"{obj_id} ({name!r}): {count}")
|
||||
|
||||
def handle_object_updated(self, session: Session, region: ProxiedRegion,
|
||||
obj: Object, updated_props: Set[str]):
|
||||
if not self.track_update_blame:
|
||||
obj: Object, updated_props: Set[str], msg: Optional[Message]):
|
||||
if not self.should_track_update_blame:
|
||||
return
|
||||
if region != session.main_region:
|
||||
return
|
||||
|
||||
21
addon_examples/packet_stats.py
Normal file
21
addon_examples/packet_stats.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import collections
|
||||
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, GlobalProperty
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class PacketStatsAddon(BaseAddon):
|
||||
packet_stats: collections.Counter = GlobalProperty(collections.Counter)
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
self.packet_stats[message.name] += 1
|
||||
|
||||
@handle_command()
|
||||
async def print_packet_stats(self, _session: Session, _region: ProxiedRegion):
|
||||
print(self.packet_stats.most_common(10))
|
||||
|
||||
|
||||
addons = [PacketStatsAddon()]
|
||||
@@ -3,16 +3,15 @@ Do the money dance whenever someone in the sim pays you directly
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.templates import MoneyTransactionType, ChatType
|
||||
from hippolyzer.lib.proxy.addon_utils import send_chat, BaseAddon
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import MoneyTransactionType, PCode, ChatType
|
||||
|
||||
|
||||
class PaydayAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.name != "MoneyBalanceReply":
|
||||
return
|
||||
transaction_block = message["TransactionInfo"][0]
|
||||
@@ -28,8 +27,8 @@ class PaydayAddon(BaseAddon):
|
||||
return
|
||||
|
||||
# Check if they're likely to be in the sim
|
||||
sender_obj = region.objects.lookup_fullid(sender)
|
||||
if not sender_obj or sender_obj.PCode != PCode.AVATAR:
|
||||
sender_obj = region.objects.lookup_avatar(sender)
|
||||
if not sender_obj:
|
||||
return
|
||||
|
||||
amount = transaction_block['Amount']
|
||||
@@ -38,7 +37,7 @@ class PaydayAddon(BaseAddon):
|
||||
chat_type=ChatType.SHOUT,
|
||||
)
|
||||
# Do the traditional money dance.
|
||||
session.main_region.circuit.send_message(ProxiedMessage(
|
||||
session.main_region.circuit.send(Message(
|
||||
"AgentAnimation",
|
||||
Block("AgentData", AgentID=session.agent_id, SessionID=session.id),
|
||||
Block("AnimationList", AnimID=UUID("928cae18-e31d-76fd-9cc9-2f55160ff818"), StartAnim=True),
|
||||
|
||||
160
addon_examples/pixel_artist.py
Normal file
160
addon_examples/pixel_artist.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""
|
||||
Import a small image (like a nintendo sprite) and create it out of cube prims
|
||||
|
||||
Inefficient and doesn't even do line fill, expect it to take `width * height`
|
||||
prims for whatever image you import!
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import struct
|
||||
from typing import *
|
||||
|
||||
from PySide6.QtGui import QImage
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, Vector3, Quaternion
|
||||
from hippolyzer.lib.base.helpers import to_chunks
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.templates import ObjectUpdateFlags, PCode, MCode, MultipleObjectUpdateFlags, \
|
||||
TextureEntryCollection, JUST_CREATED_FLAGS
|
||||
from hippolyzer.lib.client.object_manager import ObjectEvent, ObjectUpdateType
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
PRIM_SCALE = 0.2
|
||||
|
||||
|
||||
class PixelArtistAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def import_pixel_art(self, session: Session, region: ProxiedRegion):
|
||||
"""
|
||||
Import a small image (like a nintendo sprite) and create it out of cube prims
|
||||
"""
|
||||
filename = await AddonManager.UI.open_file(
|
||||
"Open an image",
|
||||
filter_str="Images (*.png *.jpg *.jpeg *.bmp)",
|
||||
)
|
||||
if not filename:
|
||||
return
|
||||
img = QImage()
|
||||
with open(filename, "rb") as f:
|
||||
img.loadFromData(f.read(), format=None)
|
||||
img = img.convertToFormat(QImage.Format_RGBA8888)
|
||||
height = img.height()
|
||||
width = img.width()
|
||||
pixels: List[Optional[bytes]] = []
|
||||
needed_prims = 0
|
||||
for y in range(height):
|
||||
for x in range(width):
|
||||
color: int = img.pixel(x, y)
|
||||
# This will be ARGB, SL wants RGBA
|
||||
alpha = (color & 0xFF000000) >> 24
|
||||
color = color & 0x00FFFFFF
|
||||
if alpha > 20:
|
||||
# Repack RGBA to the bytes format we use for colors
|
||||
pixels.append(struct.pack("!I", (color << 8) | alpha))
|
||||
needed_prims += 1
|
||||
else:
|
||||
# Pretty transparent, skip it
|
||||
pixels.append(None)
|
||||
|
||||
if not await AddonManager.UI.confirm("Confirm prim use", f"This will take {needed_prims} prims"):
|
||||
return
|
||||
|
||||
agent_obj = region.objects.lookup_fullid(session.agent_id)
|
||||
agent_pos = agent_obj.RegionPosition
|
||||
|
||||
created_prims = []
|
||||
# Watch for any newly created prims, this is basically what the viewer does to find
|
||||
# prims that it just created with the build tool.
|
||||
with session.objects.events.subscribe_async(
|
||||
(ObjectUpdateType.UPDATE,),
|
||||
predicate=lambda e: e.object.UpdateFlags & JUST_CREATED_FLAGS and "LocalID" in e.updated
|
||||
) as get_events:
|
||||
# Create a pool of prims to use for building the pixel art
|
||||
for _ in range(needed_prims):
|
||||
# TODO: Can't get land group atm, just tries to rez with the user's active group
|
||||
group_id = session.active_group
|
||||
region.circuit.send(Message(
|
||||
'ObjectAdd',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id, GroupID=group_id),
|
||||
Block(
|
||||
'ObjectData',
|
||||
PCode=PCode.PRIMITIVE,
|
||||
Material=MCode.WOOD,
|
||||
AddFlags=ObjectUpdateFlags.CREATE_SELECTED,
|
||||
PathCurve=16,
|
||||
ProfileCurve=1,
|
||||
PathScaleX=100,
|
||||
PathScaleY=100,
|
||||
BypassRaycast=1,
|
||||
RayStart=agent_obj.RegionPosition + Vector3(0, 0, 2),
|
||||
RayEnd=agent_obj.RegionPosition + Vector3(0, 0, 2),
|
||||
RayTargetID=UUID(),
|
||||
RayEndIsIntersection=0,
|
||||
Scale=Vector3(PRIM_SCALE, PRIM_SCALE, PRIM_SCALE),
|
||||
Rotation=Quaternion(0.0, 0.0, 0.0, 1.0),
|
||||
fill_missing=True,
|
||||
),
|
||||
))
|
||||
# Don't spam a ton of creates at once
|
||||
await asyncio.sleep(0.02)
|
||||
|
||||
# Read any creation events that queued up while we were creating the objects
|
||||
# So we can figure out the newly-created objects' IDs
|
||||
for _ in range(needed_prims):
|
||||
evt: ObjectEvent = await asyncio.wait_for(get_events(), 1.0)
|
||||
created_prims.append(evt.object)
|
||||
|
||||
# Drawing origin starts at the top left, should be positioned just above the
|
||||
# avatar on Z and centered on Y.
|
||||
top_left = Vector3(0, (width * PRIM_SCALE) * -0.5, (height * PRIM_SCALE) + 2.0) + agent_pos
|
||||
positioning_blocks = []
|
||||
prim_idx = 0
|
||||
for i, pixel_color in enumerate(pixels):
|
||||
# Transparent, skip
|
||||
if pixel_color is None:
|
||||
continue
|
||||
x = i % width
|
||||
y = i // width
|
||||
obj = created_prims[prim_idx]
|
||||
# Set a blank texture on all faces
|
||||
te = TextureEntryCollection()
|
||||
te.Textures[None] = UUID('5748decc-f629-461c-9a36-a35a221fe21f')
|
||||
# Set the prim color to the color from the pixel
|
||||
te.Color[None] = pixel_color
|
||||
# Set the prim texture and color
|
||||
region.circuit.send(Message(
|
||||
'ObjectImage',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block('ObjectData', ObjectLocalID=obj.LocalID, MediaURL=b'', TextureEntry_=te),
|
||||
direction=Direction.OUT,
|
||||
))
|
||||
# Save the repositioning data for later since it uses a different message,
|
||||
# but it can be set in batches.
|
||||
positioning_blocks.append(Block(
|
||||
'ObjectData',
|
||||
ObjectLocalID=obj.LocalID,
|
||||
Type=MultipleObjectUpdateFlags.POSITION,
|
||||
Data_={'POSITION': top_left + Vector3(0, x * PRIM_SCALE, y * -PRIM_SCALE)},
|
||||
))
|
||||
await asyncio.sleep(0.01)
|
||||
# We actually used a prim for this, so increment the index
|
||||
prim_idx += 1
|
||||
|
||||
# Move the "pixels" to their correct position in chunks
|
||||
for chunk in to_chunks(positioning_blocks, 25):
|
||||
region.circuit.send(Message(
|
||||
'MultipleObjectUpdate',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
*chunk,
|
||||
direction=Direction.OUT,
|
||||
))
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
|
||||
addons = [PixelArtistAddon()]
|
||||
111
addon_examples/puppetry_example.py
Normal file
111
addon_examples/puppetry_example.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""
|
||||
Control a puppetry-enabled viewer and make your neck spin like crazy
|
||||
|
||||
It currently requires a custom rebased Firestorm with puppetry applied on top,
|
||||
and patches applied on top to make startup LEAP scripts be treated as puppetry modules.
|
||||
Basically, you probably don't want to use this yet. But hey, Puppetry is still only
|
||||
on the beta grid anyway.
|
||||
"""
|
||||
import asyncio
|
||||
import enum
|
||||
import logging
|
||||
import math
|
||||
from typing import *
|
||||
|
||||
import outleap
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Quaternion
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BodyPartMask(enum.IntFlag):
|
||||
"""Which joints to send the viewer as part of "move" puppetry command"""
|
||||
HEAD = 1 << 0
|
||||
FACE = 1 << 1
|
||||
LHAND = 1 << 2
|
||||
RHAND = 1 << 3
|
||||
FINGERS = 1 << 4
|
||||
|
||||
|
||||
def register_puppetry_command(func: Callable[[dict], Awaitable[None]]):
|
||||
"""Register a method as handling inbound puppetry commands from the viewer"""
|
||||
func._puppetry_command = True
|
||||
return func
|
||||
|
||||
|
||||
class PuppetryExampleAddon(BaseAddon):
|
||||
server_skeleton: Dict[str, Dict[str, Any]] = SessionProperty(dict)
|
||||
camera_num: int = SessionProperty(0)
|
||||
parts_active: BodyPartMask = SessionProperty(lambda: BodyPartMask(0x1F))
|
||||
puppetry_api: Optional[outleap.LLPuppetryAPI] = SessionProperty(None)
|
||||
leap_client: Optional[outleap.LEAPClient] = SessionProperty(None)
|
||||
|
||||
def handle_session_init(self, session: Session):
|
||||
if not session.leap_client:
|
||||
return
|
||||
self.puppetry_api = outleap.LLPuppetryAPI(session.leap_client)
|
||||
self.leap_client = session.leap_client
|
||||
self._schedule_task(self._serve())
|
||||
self._schedule_task(self._exorcist(session))
|
||||
|
||||
@register_puppetry_command
|
||||
async def enable_parts(self, args: dict):
|
||||
if (new_mask := args.get("parts_mask")) is not None:
|
||||
self.parts_active = BodyPartMask(new_mask)
|
||||
|
||||
@register_puppetry_command
|
||||
async def set_camera(self, args: dict):
|
||||
if (camera_num := args.get("camera_num")) is not None:
|
||||
self.camera_num = camera_num
|
||||
|
||||
@register_puppetry_command
|
||||
async def stop(self, _args: dict):
|
||||
LOG.info("Viewer asked us to stop puppetry")
|
||||
|
||||
@register_puppetry_command
|
||||
async def log(self, _args: dict):
|
||||
# Intentionally ignored, we don't care about things the viewer
|
||||
# asked us to log
|
||||
pass
|
||||
|
||||
@register_puppetry_command
|
||||
async def set_skeleton(self, args: dict):
|
||||
# Don't really care about what the viewer thinks the view of the skeleton is.
|
||||
# Just log store it.
|
||||
self.server_skeleton = args
|
||||
|
||||
async def _serve(self):
|
||||
"""Handle inbound puppetry commands from viewer in a loop"""
|
||||
async with self.leap_client.listen_scoped("puppetry.controller") as listener:
|
||||
while True:
|
||||
msg = await listener.get()
|
||||
cmd = msg["command"]
|
||||
handler = getattr(self, cmd, None)
|
||||
if handler is None or not hasattr(handler, "_puppetry_command"):
|
||||
LOG.warning(f"Unknown puppetry command {cmd!r}: {msg!r}")
|
||||
continue
|
||||
await handler(msg.get("args", {}))
|
||||
|
||||
async def _exorcist(self, session):
|
||||
"""Do the Linda Blair thing with your neck"""
|
||||
spin_rad = 0.0
|
||||
while True:
|
||||
await asyncio.sleep(0.05)
|
||||
if not session.main_region:
|
||||
continue
|
||||
# Wrap spin_rad around if necessary
|
||||
while spin_rad > math.pi:
|
||||
spin_rad -= math.pi * 2
|
||||
|
||||
# LEAP wants rot as a quaternion with just the imaginary parts.
|
||||
neck_rot = Quaternion.from_euler(0, 0, spin_rad).data(3)
|
||||
self.puppetry_api.move({
|
||||
"mNeck": {"no_constraint": True, "local_rot": neck_rot},
|
||||
})
|
||||
spin_rad += math.pi / 25
|
||||
|
||||
|
||||
addons = [PuppetryExampleAddon()]
|
||||
151
addon_examples/recapitator.py
Normal file
151
addon_examples/recapitator.py
Normal file
@@ -0,0 +1,151 @@
|
||||
"""
|
||||
Recapitator addon, merges a base head shape into body shapes.
|
||||
|
||||
Only works if both the base shapes and shapes you need to edit are modify.
|
||||
|
||||
Useful if you switch heads a lot. Most heads come with a base shape you
|
||||
have to start from if you don't want the head to look like garbage. If you
|
||||
have an existing shape for your body, you have to write down all the values
|
||||
of the base shape's head sliders and edit them onto your body shapes.
|
||||
|
||||
This addon does basically the same thing by intercepting shape uploads. After
|
||||
enabling recapitation, you save the base head shape once. Then the next time you
|
||||
edit and save a body shape, it will be saved with the head sliders from your base
|
||||
shape.
|
||||
"""
|
||||
import logging
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.templates import AssetType, WearableType
|
||||
from hippolyzer.lib.base.wearables import Wearable, VISUAL_PARAMS
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty, AssetAliasTracker, show_message
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
|
||||
|
||||
# Get all VisualParam IDs that belong to head sliders
|
||||
HEAD_EDIT_GROUPS = ("shape_head", "shape_eyes", "shape_ears", "shape_nose", "shape_mouth", "shape_chin")
|
||||
HEAD_PARAM_IDS = [v.id for v in VISUAL_PARAMS if v.edit_group in HEAD_EDIT_GROUPS]
|
||||
|
||||
|
||||
class RecapitatorAddon(BaseAddon):
|
||||
transaction_remappings: AssetAliasTracker = SessionProperty(AssetAliasTracker)
|
||||
recapitating: bool = SessionProperty(bool)
|
||||
recapitation_mappings: Dict[int, float] = SessionProperty(dict)
|
||||
|
||||
@handle_command()
|
||||
async def enable_recapitation(self, _session: Session, _region: ProxiedRegion):
|
||||
"""Apply base head shape when saving subsequent shapes"""
|
||||
self.recapitating = True
|
||||
self.recapitation_mappings.clear()
|
||||
show_message("Recapitation enabled, wear the base shape containing the head parameters and save it.")
|
||||
|
||||
@handle_command()
|
||||
async def disable_recapitation(self, _session: Session, _region: ProxiedRegion):
|
||||
self.recapitating = False
|
||||
show_message("Recapitation disabled")
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if not self.recapitating:
|
||||
return
|
||||
if message.direction != Direction.OUT:
|
||||
return
|
||||
if message.name != "AssetUploadRequest":
|
||||
return
|
||||
if message["AssetBlock"]["Type"] != AssetType.BODYPART:
|
||||
return
|
||||
|
||||
# Pending asset upload for a bodypart asset. Take the message and request
|
||||
# it from the client ourself so we can see what it wants to upload
|
||||
new_message = message.take()
|
||||
self._schedule_task(self._proxy_bodypart_upload(session, region, new_message))
|
||||
return True
|
||||
|
||||
async def _proxy_bodypart_upload(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
asset_block = message["AssetBlock"]
|
||||
# Asset will already be in the viewer's VFS as the expected asset ID, calculate it.
|
||||
asset_id = session.transaction_to_assetid(asset_block["TransactionID"])
|
||||
success = False
|
||||
try:
|
||||
# Xfer the asset from the viewer if it wasn't small enough to fit in AssetData
|
||||
if asset_block["AssetData"]:
|
||||
asset_data = asset_block["AssetData"]
|
||||
else:
|
||||
xfer = await region.xfer_manager.request(
|
||||
vfile_id=asset_id,
|
||||
vfile_type=AssetType.BODYPART,
|
||||
direction=Direction.IN,
|
||||
)
|
||||
asset_data = xfer.reassemble_chunks()
|
||||
|
||||
wearable = Wearable.from_bytes(asset_data)
|
||||
# If they're uploading a shape, process it.
|
||||
if wearable.wearable_type == WearableType.SHAPE:
|
||||
if self.recapitation_mappings:
|
||||
# Copy our previously saved head params over
|
||||
for key, value in self.recapitation_mappings.items():
|
||||
wearable.parameters[key] = value
|
||||
# Upload the changed version
|
||||
asset_data = wearable.to_bytes()
|
||||
show_message("Recapitated shape")
|
||||
else:
|
||||
# Don't have a recapitation mapping yet, use this shape as the base.
|
||||
for param_id in HEAD_PARAM_IDS:
|
||||
self.recapitation_mappings[param_id] = wearable.parameters[param_id]
|
||||
show_message("Got base parameters for recapitation, head parameters will be copied")
|
||||
|
||||
# Upload it ourselves with a new transaction ID that can be traced back to
|
||||
# the original. This is important because otherwise the viewer will use its
|
||||
# own cached version of the shape, under the assumption it wasn't modified
|
||||
# during upload.
|
||||
new_transaction_id = self.transaction_remappings.get_alias_uuid(
|
||||
asset_block["TransactionID"]
|
||||
)
|
||||
await region.xfer_manager.upload_asset(
|
||||
asset_type=AssetType.BODYPART,
|
||||
data=asset_data,
|
||||
transaction_id=new_transaction_id,
|
||||
)
|
||||
success = True
|
||||
except:
|
||||
logging.exception("Exception while recapitating")
|
||||
# Tell the viewer about the status of its original upload
|
||||
region.circuit.send(Message(
|
||||
"AssetUploadComplete",
|
||||
Block("AssetBlock", UUID=asset_id, Type=asset_block["Type"], Success=success),
|
||||
direction=Direction.IN,
|
||||
))
|
||||
|
||||
def handle_http_request(self, session_manager: SessionManager, flow: HippoHTTPFlow):
|
||||
# Skip requests that aren't related to patching an existing item
|
||||
if flow.cap_data.cap_name != "InventoryAPIv3":
|
||||
return
|
||||
if flow.request.method != "PATCH":
|
||||
return
|
||||
if "/item/" not in flow.request.url:
|
||||
return
|
||||
|
||||
parsed = llsd.parse_xml(flow.request.content)
|
||||
if parsed.get("type") != "bodypart":
|
||||
return
|
||||
# `hash_id` being present means we're updating the item to point to a newly
|
||||
# uploaded asset. It's actually a transaction ID.
|
||||
transaction_id: Optional[UUID] = parsed.get("hash_id")
|
||||
if not transaction_id:
|
||||
return
|
||||
# We have an original transaction ID, do we need to remap it to an alias ID?
|
||||
orig_id = self.transaction_remappings.get_alias_uuid(transaction_id, create=False)
|
||||
if not orig_id:
|
||||
return
|
||||
|
||||
parsed["hash_id"] = orig_id
|
||||
flow.request.content = llsd.format_xml(parsed)
|
||||
|
||||
|
||||
addons = [RecapitatorAddon()]
|
||||
@@ -1,12 +1,12 @@
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class REPLExampleAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.name == "ChatFromViewer":
|
||||
chat_msg = message["ChatData"]["Message"]
|
||||
if not chat_msg:
|
||||
|
||||
53
addon_examples/rlv_at_home.py
Normal file
53
addon_examples/rlv_at_home.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""
|
||||
You don't need RLV, we have RLV at home.
|
||||
|
||||
RLV at home:
|
||||
"""
|
||||
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.templates import ChatType
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, send_chat
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
def send_rlv_chat(channel: int, message: str):
|
||||
# We always shout.
|
||||
send_chat(channel=channel, message=message, chat_type=ChatType.NORMAL)
|
||||
|
||||
|
||||
class RLVAtHomeAddon(BaseAddon):
|
||||
"""
|
||||
Addon for pretending to be an RLV-enabled viewer
|
||||
|
||||
Useful if you want only a specific subset of RLV and don't want everything RLV normally allows,
|
||||
or want to override some RLV builtins.
|
||||
"""
|
||||
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
|
||||
behaviour: str, options: List[str], param: str) -> bool | None:
|
||||
# print(behaviour, options, param)
|
||||
if behaviour == "clear":
|
||||
return True
|
||||
elif behaviour in ("versionnum", "versionnew", "version"):
|
||||
# People tend to just check that this returned anything at all. Just say we're 2.0.0 for all of these.
|
||||
send_rlv_chat(int(param), "2.0.0")
|
||||
return True
|
||||
elif behaviour == "getinv":
|
||||
# Pretend we don't have anything
|
||||
send_rlv_chat(int(param), "")
|
||||
return True
|
||||
elif behaviour == "sit":
|
||||
# Sure, we can sit on stuff, whatever.
|
||||
region.circuit.send(Message(
|
||||
'AgentRequestSit',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block('TargetObject', TargetID=UUID(options[0]), Offset=(0, 0, 0)),
|
||||
))
|
||||
return True
|
||||
return None
|
||||
|
||||
|
||||
addons = [RLVAtHomeAddon()]
|
||||
@@ -15,8 +15,8 @@ from hippolyzer.lib.base import serialization as se
|
||||
from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
|
||||
from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.packets import ProxiedUDPPacket
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import UDPPacket
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager, Session
|
||||
|
||||
@@ -28,11 +28,12 @@ class SerializationSanityChecker(BaseAddon):
|
||||
self.serializer = UDPMessageSerializer()
|
||||
self.deserializer = UDPMessageDeserializer()
|
||||
|
||||
def handle_proxied_packet(self, session_manager: SessionManager, packet: ProxiedUDPPacket,
|
||||
session: Optional[Session], region: Optional[ProxiedRegion],
|
||||
message: Optional[ProxiedMessage]):
|
||||
def handle_proxied_packet(self, session_manager: SessionManager, packet: UDPPacket,
|
||||
session: Optional[Session], region: Optional[ProxiedRegion]):
|
||||
# Well this doesn't even parse as a message, can't do anything about it.
|
||||
if message is None:
|
||||
try:
|
||||
message = self.deserializer.deserialize(packet.data)
|
||||
except:
|
||||
LOG.error(f"Received unparseable message from {packet.src_addr!r}: {packet.data!r}")
|
||||
return
|
||||
try:
|
||||
@@ -63,7 +64,7 @@ class SerializationSanityChecker(BaseAddon):
|
||||
except:
|
||||
LOG.exception(f"Exception during message validation:\n{message!r}")
|
||||
|
||||
def _roundtrip_var_serializers(self, message: ProxiedMessage):
|
||||
def _roundtrip_var_serializers(self, message: Message):
|
||||
for block in itertools.chain(*message.blocks.values()):
|
||||
for var_name in block.vars.keys():
|
||||
orig_val = block[var_name]
|
||||
|
||||
@@ -1,18 +1,23 @@
|
||||
"""Block potentially bad things"""
|
||||
from hippolyzer.lib.base.templates import IMDialogType, XferFilePath
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import IMDialogType
|
||||
|
||||
SUSPICIOUS_PACKETS = {"RequestXfer", "TransferRequest", "UUIDNameRequest",
|
||||
"UUIDGroupNameRequest", "OpenCircuit"}
|
||||
SUSPICIOUS_PACKETS = {
|
||||
"TransferRequest",
|
||||
"UUIDNameRequest",
|
||||
"UUIDGroupNameRequest",
|
||||
"OpenCircuit",
|
||||
"AddCircuitCode",
|
||||
}
|
||||
REGULAR_IM_DIALOGS = (IMDialogType.TYPING_STOP, IMDialogType.TYPING_STOP, IMDialogType.NOTHING_SPECIAL)
|
||||
|
||||
|
||||
class ShieldAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.direction != Direction.IN:
|
||||
return
|
||||
if message.name in SUSPICIOUS_PACKETS:
|
||||
@@ -29,6 +34,13 @@ class ShieldAddon(BaseAddon):
|
||||
else:
|
||||
expected_id = from_agent ^ session.agent_id
|
||||
msg_block["ID"] = expected_id
|
||||
if message.name == "RequestXfer":
|
||||
xfer_block = message["XferID"][0]
|
||||
# Don't allow Xfers for files, only assets
|
||||
if xfer_block["FilePath"] != XferFilePath.NONE or xfer_block["Filename"]:
|
||||
show_message(f"Blocked suspicious {message.name} packet")
|
||||
region.circuit.drop_message(message)
|
||||
return True
|
||||
|
||||
|
||||
addons = [ShieldAddon()]
|
||||
|
||||
22
addon_examples/simulate_packet_loss.py
Normal file
22
addon_examples/simulate_packet_loss.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import random
|
||||
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class SimulatePacketLossAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
# Messing with these may kill your circuit
|
||||
if message.name in {"PacketAck", "StartPingCheck", "CompletePingCheck", "UseCircuitCode",
|
||||
"CompleteAgentMovement", "AgentMovementComplete"}:
|
||||
return
|
||||
# Simulate 30% packet loss
|
||||
if random.random() > 0.7:
|
||||
# Do nothing, drop this packet on the floor
|
||||
return True
|
||||
return
|
||||
|
||||
|
||||
addons = [SimulatePacketLossAddon()]
|
||||
@@ -1,6 +1,6 @@
|
||||
import itertools
|
||||
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
@@ -12,8 +12,8 @@ def _to_spongecase(val):
|
||||
return "".join(itertools.chain(*spongecased))
|
||||
|
||||
|
||||
def handle_lludp_message(session: Session, _region: ProxiedRegion, message: ProxiedMessage):
|
||||
ctx = session.addon_ctx
|
||||
def handle_lludp_message(session: Session, _region: ProxiedRegion, message: Message):
|
||||
ctx = session.addon_ctx[__name__]
|
||||
ctx.setdefault("spongecase", False)
|
||||
if message.name == "ChatFromViewer":
|
||||
chat = message["ChatData"]["Message"]
|
||||
|
||||
55
addon_examples/tail_anim.py
Normal file
55
addon_examples/tail_anim.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""
|
||||
Tail animation generator
|
||||
|
||||
Demonstrates programmatic generation of local motions using BaseAnimHelperAddon
|
||||
|
||||
You can use this to create an animation with a script, fiddle with it until it
|
||||
looks right, then finally save it with /524 save_local_anim <ANIM_NAME>.
|
||||
|
||||
The built animation is automatically applied to all active sessions when loaded,
|
||||
and is re-generated whenever the script is edited. Unloading the script stops
|
||||
the animations.
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.base.anim_utils import shift_keyframes, smooth_rot
|
||||
from hippolyzer.lib.base.datatypes import Quaternion
|
||||
from hippolyzer.lib.base.llanim import Animation, Joint
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
|
||||
import local_anim
|
||||
AddonManager.hot_reload(local_anim, require_addons_loaded=True)
|
||||
|
||||
|
||||
class TailAnimator(local_anim.BaseAnimHelperAddon):
|
||||
# Should be unique
|
||||
ANIM_NAME = "tail_anim"
|
||||
|
||||
def build_anim(self) -> Animation:
|
||||
anim = Animation(
|
||||
base_priority=5,
|
||||
duration=5.0,
|
||||
loop_out_point=5.0,
|
||||
loop=True,
|
||||
)
|
||||
# Iterate along tail joints 1 through 6
|
||||
for joint_num in range(1, 7):
|
||||
# Give further along joints a wider range of motion
|
||||
start_rot = Quaternion.from_euler(0.2, -0.3, 0.15 * joint_num)
|
||||
end_rot = Quaternion.from_euler(-0.2, -0.3, -0.15 * joint_num)
|
||||
rot_keyframes = [
|
||||
# Tween between start_rot and end_rot, using smooth interpolation.
|
||||
# SL's keyframes only allow linear interpolation which doesn't look great
|
||||
# for natural motions. `smooth_rot()` gets around that by generating
|
||||
# smooth inter frames for SL to linearly interpolate between.
|
||||
*smooth_rot(start_rot, end_rot, inter_frames=10, time=0.0, duration=2.5),
|
||||
*smooth_rot(end_rot, start_rot, inter_frames=10, time=2.5, duration=2.5),
|
||||
]
|
||||
anim.joints[f"mTail{joint_num}"] = Joint(
|
||||
priority=5,
|
||||
# Each joint's frames should be ahead of the previous joint's by 2 frames
|
||||
rot_keyframes=shift_keyframes(rot_keyframes, joint_num * 2),
|
||||
)
|
||||
return anim
|
||||
|
||||
|
||||
addons = [TailAnimator()]
|
||||
@@ -3,14 +3,9 @@ Example of how to request a Transfer
|
||||
"""
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.legacy_inv import InventoryModel, InventoryItem
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import (
|
||||
from hippolyzer.lib.base.inventory import InventoryModel, InventoryItem
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.templates import (
|
||||
AssetType,
|
||||
EstateAssetType,
|
||||
TransferRequestParamsSimEstate,
|
||||
@@ -18,6 +13,10 @@ from hippolyzer.lib.proxy.templates import (
|
||||
TransferSourceType,
|
||||
XferFilePath,
|
||||
)
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class TransferExampleAddon(BaseAddon):
|
||||
@@ -36,19 +35,19 @@ class TransferExampleAddon(BaseAddon):
|
||||
async def get_first_script(self, session: Session, region: ProxiedRegion):
|
||||
"""Get the contents of the first script in the selected object"""
|
||||
# Ask for the object inventory so we can find a script
|
||||
region.circuit.send_message(ProxiedMessage(
|
||||
region.circuit.send(Message(
|
||||
'RequestTaskInventory',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block('InventoryData', LocalID=session.selected.object_local),
|
||||
))
|
||||
inv_message = await region.message_handler.wait_for('ReplyTaskInventory', timeout=5.0)
|
||||
inv_message = await region.message_handler.wait_for(('ReplyTaskInventory',), timeout=5.0)
|
||||
|
||||
# Xfer the inventory file and look for a script
|
||||
xfer = await region.xfer_manager.request(
|
||||
file_name=inv_message["InventoryData"]["Filename"], file_path=XferFilePath.CACHE)
|
||||
inv_model = InventoryModel.from_bytes(xfer.reassemble_chunks())
|
||||
first_script: Optional[InventoryItem] = None
|
||||
for item in inv_model.items.values():
|
||||
for item in inv_model.all_items:
|
||||
if item.type == "lsltext":
|
||||
first_script = item
|
||||
if not first_script:
|
||||
|
||||
105
addon_examples/turbo_object_inventory.py
Normal file
105
addon_examples/turbo_object_inventory.py
Normal file
@@ -0,0 +1,105 @@
|
||||
"""
|
||||
Speed up outbound object inventory listing requests
|
||||
by 20x at the cost of potentially failing to request some due to
|
||||
dropped packets.
|
||||
|
||||
Useful for builders working on objects with very large inventories that
|
||||
change very often.
|
||||
|
||||
Object Inventory transfers use the Xfer system. Xfers have their own,
|
||||
terrible reliability system that probably pre-dates LLUDP reliability.
|
||||
Each packet has to be ACKed before the far end will send the next packet.
|
||||
Each packet can be around 1200 bytes and will fit 1.5 inventory items worth of data.
|
||||
|
||||
Let's say your sim ping is 100 ms. Because each packet needs to be ACKed
|
||||
before the next will be sent, it'll take around `num_items * 100 / 1.5`
|
||||
milliseconds before you receive the full inventory list of an object.
|
||||
That means for an object with 300 items, it'll take about 20 seconds
|
||||
for you to download the full inventory, and those downloads are triggered
|
||||
every time the inventory is changed.
|
||||
|
||||
By faking ACKs for packets we haven't received yet, we can trick the server
|
||||
into sending us packets much faster than it would otherwise. The only problem
|
||||
is that if an inbound SendXferPacket gets lost after we faked an ACK for it,
|
||||
we have no way to re-request it. The Xfer will just fail. The viewer will also
|
||||
drop any out-of-order xfer packets, so packet re-ordering is a problem.
|
||||
|
||||
To deal with that, the proxy attempts its own Xfers using all the chunks
|
||||
from the previous attempts before sending a final, reconstructed Xfer
|
||||
to the viewer.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.templates import XferFilePath
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.base.xfer_manager import Xfer
|
||||
|
||||
|
||||
class TurboObjectInventoryAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.direction != Direction.OUT:
|
||||
return
|
||||
if message.name != "RequestTaskInventory":
|
||||
return
|
||||
|
||||
self._schedule_task(self._proxy_task_inventory_request(region, message.take()))
|
||||
return True
|
||||
|
||||
async def _proxy_task_inventory_request(
|
||||
self,
|
||||
region: ProxiedRegion,
|
||||
request_msg: Message
|
||||
):
|
||||
# Keep around a dict of chunks we saw previously in case we have to restart
|
||||
# an Xfer due to missing chunks. We don't expect chunks to change across Xfers
|
||||
# so this can be used to recover from dropped SendXferPackets in subsequent attempts
|
||||
existing_chunks: Dict[int, bytes] = {}
|
||||
for i in range(3):
|
||||
# Any previous requests will have triggered a delete of the inventory file
|
||||
# by marking it complete on the server-side. Re-send our RequestTaskInventory
|
||||
# To make sure there's a fresh copy.
|
||||
region.circuit.send(request_msg.take())
|
||||
inv_message = await region.message_handler.wait_for(('ReplyTaskInventory',), timeout=5.0)
|
||||
# No task inventory, send the reply as-is
|
||||
file_name = inv_message["InventoryData"]["Filename"]
|
||||
if not file_name:
|
||||
region.circuit.send(inv_message)
|
||||
return
|
||||
|
||||
xfer = region.xfer_manager.request(
|
||||
file_name=file_name,
|
||||
file_path=XferFilePath.CACHE,
|
||||
turbo=True,
|
||||
)
|
||||
xfer.chunks.update(existing_chunks)
|
||||
try:
|
||||
await xfer
|
||||
except asyncio.TimeoutError:
|
||||
# We likely failed the request due to missing chunks, store
|
||||
# the chunks that we _did_ get for the next attempt.
|
||||
existing_chunks.update(xfer.chunks)
|
||||
continue
|
||||
|
||||
# Send the original ReplyTaskInventory to the viewer so it knows the file is ready
|
||||
region.circuit.send(inv_message)
|
||||
proxied_xfer = Xfer(data=xfer.reassemble_chunks())
|
||||
|
||||
# Wait for the viewer to request the inventory file
|
||||
await region.xfer_manager.serve_inbound_xfer_request(
|
||||
xfer=proxied_xfer,
|
||||
request_predicate=lambda x: x["XferID"]["Filename"] == file_name,
|
||||
# indra's XferManager throttles confirms, so even local transfers will be
|
||||
# slow if we wait for confirmation.
|
||||
wait_for_confirm=False,
|
||||
)
|
||||
return
|
||||
raise asyncio.TimeoutError("Failed to get inventory after 3 tries")
|
||||
|
||||
|
||||
addons = [TurboObjectInventoryAddon()]
|
||||
@@ -2,24 +2,19 @@
|
||||
Example of how to upload assets, assumes assets are already encoded
|
||||
in the appropriate format.
|
||||
|
||||
/524 upload <asset type>
|
||||
/524 upload_asset <asset type>
|
||||
"""
|
||||
import pprint
|
||||
from pathlib import Path
|
||||
from typing import *
|
||||
|
||||
import aiohttp
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.mesh import LLMeshSerializer
|
||||
from hippolyzer.lib.base.serialization import BufferReader
|
||||
from hippolyzer.lib.base.templates import AssetType
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import ais_item_to_inventory_data, show_message, BaseAddon
|
||||
from hippolyzer.lib.proxy.addon_utils import show_message, BaseAddon
|
||||
from hippolyzer.lib.proxy.commands import handle_command, Parameter
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import AssetType
|
||||
|
||||
|
||||
class UploaderAddon(BaseAddon):
|
||||
@@ -30,7 +25,6 @@ class UploaderAddon(BaseAddon):
|
||||
async def upload_asset(self, _session: Session, region: ProxiedRegion,
|
||||
asset_type: AssetType, flags: Optional[int] = None):
|
||||
"""Upload a raw asset with optional flags"""
|
||||
inv_type = asset_type.inventory_type
|
||||
file = await AddonManager.UI.open_file()
|
||||
if not file:
|
||||
return
|
||||
@@ -43,67 +37,32 @@ class UploaderAddon(BaseAddon):
|
||||
with open(file, "rb") as f:
|
||||
file_body = f.read()
|
||||
|
||||
params = {
|
||||
"asset_type": asset_type.human_name,
|
||||
"description": "(No Description)",
|
||||
"everyone_mask": 0,
|
||||
"group_mask": 0,
|
||||
"folder_id": UUID(), # Puts it in the default folder, I guess. Undocumented.
|
||||
"inventory_type": inv_type.human_name,
|
||||
"name": name,
|
||||
"next_owner_mask": 581632,
|
||||
}
|
||||
if flags is not None:
|
||||
params['flags'] = flags
|
||||
try:
|
||||
if asset_type == AssetType.MESH:
|
||||
# Kicking off a mesh upload works a little differently internally
|
||||
# Half-parse the mesh so that we can figure out how many faces it has
|
||||
reader = BufferReader("!", file_body)
|
||||
mesh = reader.read(LLMeshSerializer(parse_segment_contents=False))
|
||||
upload_token = await region.asset_uploader.initiate_mesh_upload(
|
||||
name, mesh, flags=flags
|
||||
)
|
||||
else:
|
||||
upload_token = await region.asset_uploader.initiate_asset_upload(
|
||||
name, asset_type, file_body, flags=flags,
|
||||
)
|
||||
except Exception as e:
|
||||
show_message(e)
|
||||
raise
|
||||
|
||||
caps = region.caps_client
|
||||
async with aiohttp.ClientSession() as sess:
|
||||
async with caps.post('NewFileAgentInventory', llsd=params, session=sess) as resp:
|
||||
parsed = await resp.read_llsd()
|
||||
if "uploader" not in parsed:
|
||||
show_message(f"Upload error!: {parsed!r}")
|
||||
return
|
||||
print("Got upload URL, uploading...")
|
||||
if not await AddonManager.UI.confirm("Upload", f"Spend {upload_token.linden_cost}L on upload?"):
|
||||
return
|
||||
|
||||
async with caps.post(parsed["uploader"], data=file_body, session=sess) as resp:
|
||||
upload_parsed = await resp.read_llsd()
|
||||
|
||||
if "new_inventory_item" not in upload_parsed:
|
||||
show_message(f"Got weird upload resp: {pprint.pformat(upload_parsed)}")
|
||||
return
|
||||
|
||||
await self._force_inv_update(region, upload_parsed['new_inventory_item'])
|
||||
|
||||
@handle_command(item_id=UUID)
|
||||
async def force_inv_update(self, _session: Session, region: ProxiedRegion, item_id: UUID):
|
||||
"""Force an inventory update for a given item id"""
|
||||
await self._force_inv_update(region, item_id)
|
||||
|
||||
async def _force_inv_update(self, region: ProxiedRegion, item_id: UUID):
|
||||
session = region.session()
|
||||
ais_req_data = {
|
||||
"items": [
|
||||
{
|
||||
"owner_id": session.agent_id,
|
||||
"item_id": item_id,
|
||||
}
|
||||
]
|
||||
}
|
||||
async with region.caps_client.post('FetchInventory2', llsd=ais_req_data) as resp:
|
||||
ais_item = (await resp.read_llsd())["items"][0]
|
||||
|
||||
message = ProxiedMessage(
|
||||
"UpdateCreateInventoryItem",
|
||||
Block(
|
||||
"AgentData",
|
||||
AgentID=session.agent_id,
|
||||
SimApproved=1,
|
||||
TransactionID=UUID.random(),
|
||||
),
|
||||
ais_item_to_inventory_data(ais_item),
|
||||
direction=Direction.IN
|
||||
)
|
||||
region.circuit.send_message(message)
|
||||
# Do the actual upload
|
||||
try:
|
||||
await region.asset_uploader.complete_upload(upload_token)
|
||||
except Exception as e:
|
||||
show_message(e)
|
||||
raise
|
||||
|
||||
|
||||
addons = [UploaderAddon()]
|
||||
|
||||
@@ -1,28 +1,28 @@
|
||||
"""
|
||||
Example of how to request an Xfer
|
||||
"""
|
||||
from hippolyzer.lib.base.legacy_inv import InventoryModel
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.inventory import InventoryModel
|
||||
from hippolyzer.lib.base.templates import XferFilePath, AssetType, InventoryType, WearableType
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import XferFilePath
|
||||
|
||||
|
||||
class XferExampleAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def get_mute_list(self, session: Session, region: ProxiedRegion):
|
||||
"""Fetch the current user's mute list"""
|
||||
region.circuit.send_message(ProxiedMessage(
|
||||
region.circuit.send(Message(
|
||||
'MuteListRequest',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block("MuteData", MuteCRC=0),
|
||||
))
|
||||
|
||||
# Wait for any MuteListUpdate, dropping it before it reaches the viewer
|
||||
update_msg = await region.message_handler.wait_for('MuteListUpdate', timeout=5.0)
|
||||
update_msg = await region.message_handler.wait_for(('MuteListUpdate',), timeout=5.0)
|
||||
mute_file_name = update_msg["MuteData"]["Filename"]
|
||||
if not mute_file_name:
|
||||
show_message("Nobody muted?")
|
||||
@@ -35,14 +35,14 @@ class XferExampleAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def get_task_inventory(self, session: Session, region: ProxiedRegion):
|
||||
"""Get the inventory of the currently selected object"""
|
||||
region.circuit.send_message(ProxiedMessage(
|
||||
region.circuit.send(Message(
|
||||
'RequestTaskInventory',
|
||||
# If no session is passed in we'll use the active session when the coro was created
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block('InventoryData', LocalID=session.selected.object_local),
|
||||
))
|
||||
|
||||
inv_message = await region.message_handler.wait_for('ReplyTaskInventory', timeout=5.0)
|
||||
inv_message = await region.message_handler.wait_for(('ReplyTaskInventory',), timeout=5.0)
|
||||
|
||||
# Xfer doesn't need to be immediately awaited, multiple signals can be waited on.
|
||||
xfer = region.xfer_manager.request(
|
||||
@@ -57,8 +57,64 @@ class XferExampleAddon(BaseAddon):
|
||||
await xfer
|
||||
|
||||
inv_model = InventoryModel.from_bytes(xfer.reassemble_chunks())
|
||||
item_names = [item.name for item in inv_model.items.values()]
|
||||
item_names = [item.name for item in inv_model.all_items]
|
||||
show_message(item_names)
|
||||
|
||||
@handle_command()
|
||||
async def eyes_for_you(self, session: Session, region: ProxiedRegion):
|
||||
"""Upload an eye bodypart and create an item for it"""
|
||||
asset_data = f"""LLWearable version 22
|
||||
New Eyes
|
||||
|
||||
\tpermissions 0
|
||||
\t{{
|
||||
\t\tbase_mask\t7fffffff
|
||||
\t\towner_mask\t7fffffff
|
||||
\t\tgroup_mask\t00000000
|
||||
\t\teveryone_mask\t00000000
|
||||
\t\tnext_owner_mask\t00082000
|
||||
\t\tcreator_id\t{session.agent_id}
|
||||
\t\towner_id\t{session.agent_id}
|
||||
\t\tlast_owner_id\t00000000-0000-0000-0000-000000000000
|
||||
\t\tgroup_id\t00000000-0000-0000-0000-000000000000
|
||||
\t}}
|
||||
\tsale_info\t0
|
||||
\t{{
|
||||
\t\tsale_type\tnot
|
||||
\t\tsale_price\t10
|
||||
\t}}
|
||||
type 3
|
||||
parameters 2
|
||||
98 0
|
||||
99 0
|
||||
textures 1
|
||||
3 89556747-24cb-43ed-920b-47caed15465f
|
||||
"""
|
||||
# If we want to create an item containing the asset we need to know the transaction id
|
||||
# used to create the asset.
|
||||
transaction_id = UUID.random()
|
||||
await region.xfer_manager.upload_asset(
|
||||
AssetType.BODYPART,
|
||||
data=asset_data,
|
||||
transaction_id=transaction_id
|
||||
)
|
||||
region.circuit.send(Message(
|
||||
'CreateInventoryItem',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block(
|
||||
'InventoryBlock',
|
||||
CallbackID=0,
|
||||
# Null folder ID will put it in the default folder for the type
|
||||
FolderID=UUID(),
|
||||
TransactionID=transaction_id,
|
||||
NextOwnerMask=0x7fFFffFF,
|
||||
Type=AssetType.BODYPART,
|
||||
InvType=InventoryType.WEARABLE,
|
||||
WearableType=WearableType.EYES,
|
||||
Name='Eyes For You',
|
||||
Description=b''
|
||||
),
|
||||
))
|
||||
|
||||
|
||||
addons = [XferExampleAddon()]
|
||||
|
||||
53
client_examples/hello_client.py
Normal file
53
client_examples/hello_client.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""
|
||||
A simple client that just says hello to people
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import pprint
|
||||
from contextlib import aclosing
|
||||
import os
|
||||
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.templates import ChatType, ChatSourceType
|
||||
from hippolyzer.lib.client.hippo_client import HippoClient
|
||||
|
||||
|
||||
async def amain():
|
||||
client = HippoClient()
|
||||
|
||||
async def _respond_to_chat(message: Message):
|
||||
if message["ChatData"]["SourceID"] == client.session.agent_id:
|
||||
return
|
||||
if message["ChatData"]["SourceType"] != ChatSourceType.AGENT:
|
||||
return
|
||||
if "hello" not in message["ChatData"]["Message"].lower():
|
||||
return
|
||||
await client.send_chat(f'Hello {message["ChatData"]["FromName"]}!', chat_type=ChatType.SHOUT)
|
||||
|
||||
async with aclosing(client):
|
||||
await client.login(
|
||||
username=os.environ["HIPPO_USERNAME"],
|
||||
password=os.environ["HIPPO_PASSWORD"],
|
||||
start_location=os.environ.get("HIPPO_START_LOCATION", "last"),
|
||||
)
|
||||
print("I'm here")
|
||||
|
||||
# Wait until we have details about parcels and print them
|
||||
await client.main_region.parcel_manager.parcels_downloaded.wait()
|
||||
pprint.pprint(client.main_region.parcel_manager.parcels)
|
||||
|
||||
await client.send_chat("Hello World!", chat_type=ChatType.SHOUT)
|
||||
client.session.message_handler.subscribe("ChatFromSimulator", _respond_to_chat)
|
||||
# Example of how to work with caps
|
||||
async with client.main_caps_client.get("SimulatorFeatures") as features_resp:
|
||||
print("Features:", await features_resp.read_llsd())
|
||||
|
||||
while True:
|
||||
try:
|
||||
await asyncio.sleep(0.001)
|
||||
except (KeyboardInterrupt, asyncio.CancelledError):
|
||||
await client.send_chat("Goodbye World!", chat_type=ChatType.SHOUT)
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(amain())
|
||||
14
codecov.yml
Normal file
14
codecov.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
coverage:
|
||||
precision: 1
|
||||
round: down
|
||||
range: "50...80"
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
# Do not fail commits if the code coverage drops.
|
||||
target: 0%
|
||||
threshold: 100%
|
||||
base: auto
|
||||
patch:
|
||||
default:
|
||||
only_pulls: true
|
||||
@@ -191,7 +191,7 @@
|
||||
</size>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(80, 0, 0)</string>
|
||||
<string notr="true"/>
|
||||
</property>
|
||||
<property name="tabChangesFocus">
|
||||
<bool>true</bool>
|
||||
|
||||
@@ -1,43 +1,15 @@
|
||||
import collections
|
||||
import codecs
|
||||
import copy
|
||||
import enum
|
||||
import fnmatch
|
||||
import io
|
||||
import logging
|
||||
import pickle
|
||||
import queue
|
||||
import re
|
||||
import typing
|
||||
import weakref
|
||||
|
||||
from defusedxml import minidom
|
||||
from PySide2 import QtCore, QtGui
|
||||
from PySide6 import QtCore, QtGui
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion, CapType
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.sessions import Session, BaseMessageLogger
|
||||
|
||||
from .message_filter import compile_filter, BaseFilterNode, MessageFilterNode, MetaFieldSpecifier
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.message_logger import FilteringMessageLogger
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def bytes_unescape(val: bytes) -> bytes:
|
||||
# Only in CPython. bytes -> bytes with escape decoding.
|
||||
# https://stackoverflow.com/a/23151714
|
||||
return codecs.escape_decode(val)[0] # type: ignore
|
||||
|
||||
|
||||
def bytes_escape(val: bytes) -> bytes:
|
||||
# Try to keep newlines as-is
|
||||
return re.sub(rb"(?<!\\)\\n", b"\n", codecs.escape_encode(val)[0]) # type: ignore
|
||||
|
||||
|
||||
class MessageLogHeader(enum.IntEnum):
|
||||
Host = 0
|
||||
Type = enum.auto()
|
||||
@@ -46,582 +18,23 @@ class MessageLogHeader(enum.IntEnum):
|
||||
Summary = enum.auto()
|
||||
|
||||
|
||||
class AbstractMessageLogEntry:
|
||||
region: typing.Optional[ProxiedRegion]
|
||||
session: typing.Optional[Session]
|
||||
name: str
|
||||
type: str
|
||||
|
||||
__slots__ = ["_region", "_session", "_region_name", "_agent_id", "_summary", "meta"]
|
||||
|
||||
def __init__(self, region, session):
|
||||
if region and not isinstance(region, weakref.ReferenceType):
|
||||
region = weakref.ref(region)
|
||||
if session and not isinstance(session, weakref.ReferenceType):
|
||||
session = weakref.ref(session)
|
||||
|
||||
self._region: typing.Optional[weakref.ReferenceType] = region
|
||||
self._session: typing.Optional[weakref.ReferenceType] = session
|
||||
self._region_name = None
|
||||
self._agent_id = None
|
||||
self._summary = None
|
||||
if self.region:
|
||||
self._region_name = self.region.name
|
||||
if self.session:
|
||||
self._agent_id = self.session.agent_id
|
||||
|
||||
agent_obj = None
|
||||
if self.region is not None:
|
||||
agent_obj = self.region.objects.lookup_fullid(self.agent_id)
|
||||
self.meta = {
|
||||
"RegionName": self.region_name,
|
||||
"AgentID": self.agent_id,
|
||||
"SessionID": self.session.id if self.session else None,
|
||||
"AgentLocal": agent_obj.LocalID if agent_obj is not None else None,
|
||||
"Method": self.method,
|
||||
"Type": self.type,
|
||||
"SelectedLocal": self._current_selected_local(),
|
||||
"SelectedFull": self._current_selected_full(),
|
||||
}
|
||||
|
||||
def freeze(self):
|
||||
pass
|
||||
|
||||
def cache_summary(self):
|
||||
self._summary = self.summary
|
||||
|
||||
def _current_selected_local(self):
|
||||
if self.session:
|
||||
return self.session.selected.object_local
|
||||
return None
|
||||
|
||||
def _current_selected_full(self):
|
||||
selected_local = self._current_selected_local()
|
||||
if selected_local is None or self.region is None:
|
||||
return None
|
||||
obj = self.region.objects.lookup_localid(selected_local)
|
||||
return obj and obj.FullID
|
||||
|
||||
def _get_meta(self, name: str):
|
||||
# Slight difference in semantics. Filters are meant to return the same
|
||||
# thing no matter when they're run, so SelectedLocal and friends resolve
|
||||
# to the selected items _at the time the message was logged_. To handle
|
||||
# the case where we want to match on the selected object at the time the
|
||||
# filter is evaluated, we resolve these here.
|
||||
if name == "CurrentSelectedLocal":
|
||||
return self._current_selected_local()
|
||||
elif name == "CurrentSelectedFull":
|
||||
return self._current_selected_full()
|
||||
return self.meta.get(name)
|
||||
|
||||
@property
|
||||
def region(self) -> typing.Optional[ProxiedRegion]:
|
||||
if self._region:
|
||||
return self._region()
|
||||
return None
|
||||
|
||||
@property
|
||||
def session(self) -> typing.Optional[Session]:
|
||||
if self._session:
|
||||
return self._session()
|
||||
return None
|
||||
|
||||
@property
|
||||
def region_name(self) -> str:
|
||||
region = self.region
|
||||
if region:
|
||||
self._region_name = region.name
|
||||
return self._region_name
|
||||
# Region may die after a message is logged, need to keep this around.
|
||||
if self._region_name:
|
||||
return self._region_name
|
||||
|
||||
return ""
|
||||
|
||||
@property
|
||||
def agent_id(self) -> typing.Optional[UUID]:
|
||||
if self._agent_id:
|
||||
return self._agent_id
|
||||
|
||||
session = self.session
|
||||
if session:
|
||||
self._agent_id = session.agent_id
|
||||
return self._agent_id
|
||||
return None
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
region_name = self.region_name
|
||||
if not region_name:
|
||||
return ""
|
||||
session_str = ""
|
||||
agent_id = self.agent_id
|
||||
if agent_id:
|
||||
session_str = f" ({agent_id})"
|
||||
return region_name + session_str
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
return None
|
||||
|
||||
def response(self, beautify=False):
|
||||
return None
|
||||
|
||||
def _packet_root_matches(self, pattern):
|
||||
if fnmatch.fnmatchcase(self.name, pattern):
|
||||
return True
|
||||
if fnmatch.fnmatchcase(self.type, pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _val_matches(self, operator, val, expected):
|
||||
if isinstance(expected, MetaFieldSpecifier):
|
||||
expected = self._get_meta(str(expected))
|
||||
if not isinstance(expected, (int, float, bytes, str, type(None), tuple)):
|
||||
if callable(expected):
|
||||
expected = expected()
|
||||
else:
|
||||
expected = str(expected)
|
||||
elif expected is not None:
|
||||
# Unbox the expected value
|
||||
expected = expected.value
|
||||
if not isinstance(val, (int, float, bytes, str, type(None), tuple, TupleCoord)):
|
||||
val = str(val)
|
||||
|
||||
if not operator:
|
||||
return bool(val)
|
||||
elif operator == "==":
|
||||
return val == expected
|
||||
elif operator == "!=":
|
||||
return val != expected
|
||||
elif operator == "^=":
|
||||
if val is None:
|
||||
return False
|
||||
return val.startswith(expected)
|
||||
elif operator == "$=":
|
||||
if val is None:
|
||||
return False
|
||||
return val.endswith(expected)
|
||||
elif operator == "~=":
|
||||
if val is None:
|
||||
return False
|
||||
return expected in val
|
||||
elif operator == "<":
|
||||
return val < expected
|
||||
elif operator == "<=":
|
||||
return val <= expected
|
||||
elif operator == ">":
|
||||
return val > expected
|
||||
elif operator == ">=":
|
||||
return val >= expected
|
||||
else:
|
||||
raise ValueError(f"Unexpected operator {operator!r}")
|
||||
|
||||
def _base_matches(self, matcher: "MessageFilterNode") -> typing.Optional[bool]:
|
||||
if len(matcher.selector) == 1:
|
||||
# Comparison operators would make no sense here
|
||||
if matcher.value or matcher.operator:
|
||||
return False
|
||||
return self._packet_root_matches(matcher.selector[0])
|
||||
if len(matcher.selector) == 2 and matcher.selector[0] == "Meta":
|
||||
return self._val_matches(matcher.operator, self._get_meta(matcher.selector[1]), matcher.value)
|
||||
return None
|
||||
|
||||
def matches(self, matcher: "MessageFilterNode"):
|
||||
return self._base_matches(matcher) or False
|
||||
|
||||
@property
|
||||
def seq(self):
|
||||
return ""
|
||||
|
||||
@property
|
||||
def method(self):
|
||||
return ""
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def _format_llsd(parsed):
|
||||
xmlified = llsd.format_pretty_xml(parsed)
|
||||
# dedent <key> by 1 for easier visual scanning
|
||||
xmlified = re.sub(rb" <key>", b"<key>", xmlified)
|
||||
return xmlified.decode("utf8", errors="replace")
|
||||
|
||||
|
||||
class LLUDPMessageLogEntry(AbstractMessageLogEntry):
|
||||
__slots__ = ["_message", "_name", "_direction", "_frozen_message", "_seq", "_deserializer"]
|
||||
|
||||
def __init__(self, message: ProxiedMessage, region, session):
|
||||
self._message: ProxiedMessage = message
|
||||
self._deserializer = None
|
||||
self._name = message.name
|
||||
self._direction = message.direction
|
||||
self._frozen_message: typing.Optional[bytes] = None
|
||||
self._seq = message.packet_id
|
||||
super().__init__(region, session)
|
||||
|
||||
_MESSAGE_META_ATTRS = {
|
||||
"Injected", "Dropped", "Extra", "Resent", "Zerocoded", "Acks", "Reliable",
|
||||
}
|
||||
|
||||
def _get_meta(self, name: str):
|
||||
# These may change between when the message is logged and when we
|
||||
# actually filter on it, since logging happens before addons.
|
||||
msg = self.message
|
||||
if name in self._MESSAGE_META_ATTRS:
|
||||
return getattr(msg, name.lower(), None)
|
||||
msg_meta = getattr(msg, "meta", None)
|
||||
if msg_meta is not None:
|
||||
if name in msg_meta:
|
||||
return msg_meta[name]
|
||||
return super()._get_meta(name)
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
if self._message:
|
||||
return self._message
|
||||
elif self._frozen_message:
|
||||
message = pickle.loads(self._frozen_message)
|
||||
message.deserializer = self._deserializer
|
||||
return message
|
||||
else:
|
||||
raise ValueError("Didn't have a fresh or frozen message somehow")
|
||||
|
||||
def freeze(self):
|
||||
self.message.invalidate_caches()
|
||||
# These are expensive to keep around. pickle them and un-pickle on
|
||||
# an as-needed basis.
|
||||
self._deserializer = self.message.deserializer
|
||||
self.message.deserializer = None
|
||||
self._frozen_message = pickle.dumps(self._message, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
self._message = None
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return "LLUDP"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if self._message:
|
||||
self._name = self._message.name
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def method(self):
|
||||
if self._message:
|
||||
self._direction = self._message.direction
|
||||
return self._direction.name if self._direction is not None else ""
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
return self.message.to_human_string(replacements, beautify)
|
||||
|
||||
def matches(self, matcher):
|
||||
base_matched = self._base_matches(matcher)
|
||||
if base_matched is not None:
|
||||
return base_matched
|
||||
|
||||
if not self._packet_root_matches(matcher.selector[0]):
|
||||
return False
|
||||
|
||||
message = self.message
|
||||
|
||||
selector_len = len(matcher.selector)
|
||||
# name, block_name, var_name(, subfield_name)?
|
||||
if selector_len not in (3, 4):
|
||||
return False
|
||||
for block_name in message.blocks:
|
||||
if not fnmatch.fnmatchcase(block_name, matcher.selector[1]):
|
||||
continue
|
||||
for block in message[block_name]:
|
||||
for var_name in block.vars.keys():
|
||||
if not fnmatch.fnmatchcase(var_name, matcher.selector[2]):
|
||||
continue
|
||||
if selector_len == 3:
|
||||
if matcher.value is None:
|
||||
return True
|
||||
if self._val_matches(matcher.operator, block[var_name], matcher.value):
|
||||
return True
|
||||
elif selector_len == 4:
|
||||
try:
|
||||
deserialized = block.deserialize_var(var_name)
|
||||
except KeyError:
|
||||
continue
|
||||
# Discard the tag if this is a tagged union, we only want the value
|
||||
if isinstance(deserialized, TaggedUnion):
|
||||
deserialized = deserialized.value
|
||||
if not isinstance(deserialized, dict):
|
||||
return False
|
||||
for key in deserialized.keys():
|
||||
if fnmatch.fnmatchcase(str(key), matcher.selector[3]):
|
||||
if matcher.value is None:
|
||||
return True
|
||||
if self._val_matches(matcher.operator, deserialized[key], matcher.value):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
if self._summary is None:
|
||||
self._summary = self.message.to_summary()[:500]
|
||||
return self._summary
|
||||
|
||||
@property
|
||||
def seq(self):
|
||||
if self._message:
|
||||
self._seq = self._message.packet_id
|
||||
return self._seq
|
||||
|
||||
|
||||
class EQMessageLogEntry(AbstractMessageLogEntry):
|
||||
__slots__ = ["event"]
|
||||
|
||||
def __init__(self, event, region, session):
|
||||
super().__init__(region, session)
|
||||
self.event = event
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return "EQ"
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
return self._format_llsd(self.event["body"])
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.event["message"]
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
if self._summary is not None:
|
||||
return self._summary
|
||||
self._summary = ""
|
||||
self._summary = llsd.format_notation(self.event["body"]).decode("utf8")[:500]
|
||||
return self._summary
|
||||
|
||||
|
||||
class HTTPMessageLogEntry(AbstractMessageLogEntry):
|
||||
__slots__ = ["flow"]
|
||||
|
||||
def __init__(self, flow: HippoHTTPFlow):
|
||||
self.flow: HippoHTTPFlow = flow
|
||||
cap_data = self.flow.cap_data
|
||||
region = cap_data and cap_data.region
|
||||
session = cap_data and cap_data.session
|
||||
|
||||
super().__init__(region, session)
|
||||
# This was a request the proxy made through itself
|
||||
self.meta["Injected"] = flow.request_injected
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return "HTTP"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
cap_data = self.flow.cap_data
|
||||
name = cap_data and cap_data.cap_name
|
||||
if name:
|
||||
return name
|
||||
return self.flow.request.url
|
||||
|
||||
@property
|
||||
def method(self):
|
||||
return self.flow.request.method
|
||||
|
||||
def _format_http_message(self, want_request, beautify):
|
||||
message = self.flow.request if want_request else self.flow.response
|
||||
method = self.flow.request.method
|
||||
buf = io.StringIO()
|
||||
cap_data = self.flow.cap_data
|
||||
cap_name = cap_data and cap_data.cap_name
|
||||
base_url = cap_name and cap_data.base_url
|
||||
temporary_cap = cap_data and cap_data.type == CapType.TEMPORARY
|
||||
beautify_url = (beautify and base_url and cap_name and
|
||||
not temporary_cap and self.session and want_request)
|
||||
if want_request:
|
||||
buf.write(message.method)
|
||||
buf.write(" ")
|
||||
if beautify_url:
|
||||
buf.write(f"[[{cap_name}]]{message.url[len(base_url):]}")
|
||||
else:
|
||||
buf.write(message.url)
|
||||
buf.write(" ")
|
||||
buf.write(message.http_version)
|
||||
else:
|
||||
buf.write(message.http_version)
|
||||
buf.write(" ")
|
||||
buf.write(str(message.status_code))
|
||||
buf.write(" ")
|
||||
buf.write(message.reason)
|
||||
buf.write("\r\n")
|
||||
if beautify_url:
|
||||
buf.write("# ")
|
||||
buf.write(message.url)
|
||||
buf.write("\r\n")
|
||||
|
||||
headers = copy.deepcopy(message.headers)
|
||||
for key in tuple(headers.keys()):
|
||||
if key.lower().startswith("x-hippo-"):
|
||||
LOG.warning(f"Internal header {key!r} leaked out?")
|
||||
# If this header actually came from somewhere untrusted, we can't
|
||||
# include it. It may change the meaning of the message when replayed.
|
||||
headers[f"X-Untrusted-{key}"] = headers[key]
|
||||
headers.pop(key)
|
||||
beautified = None
|
||||
if beautify and message.content:
|
||||
try:
|
||||
serializer = se.HTTP_SERIALIZERS.get(cap_name)
|
||||
if serializer:
|
||||
if want_request:
|
||||
beautified = serializer.deserialize_req_body(method, message.content)
|
||||
else:
|
||||
beautified = serializer.deserialize_resp_body(method, message.content)
|
||||
|
||||
if beautified is se.UNSERIALIZABLE:
|
||||
beautified = None
|
||||
else:
|
||||
beautified = self._format_llsd(beautified)
|
||||
headers["X-Hippo-Beautify"] = "1"
|
||||
|
||||
if not beautified:
|
||||
content_type = self._guess_content_type(message)
|
||||
if content_type.startswith("application/llsd"):
|
||||
beautified = self._format_llsd(llsd.parse(message.content))
|
||||
elif any(content_type.startswith(x) for x in ("application/xml", "text/xml")):
|
||||
beautified = minidom.parseString(message.content).toprettyxml(indent=" ")
|
||||
# kill blank lines. will break cdata sections. meh.
|
||||
beautified = re.sub(r'\n\s*\n', '\n', beautified, flags=re.MULTILINE)
|
||||
beautified = re.sub(r'<([\w]+)>\s*</\1>', r'<\1></\1>',
|
||||
beautified, flags=re.MULTILINE)
|
||||
except:
|
||||
LOG.exception("Failed to beautify message")
|
||||
|
||||
message_body = beautified or message.content
|
||||
if isinstance(message_body, bytes):
|
||||
try:
|
||||
decoded = message.text
|
||||
# Valid in many codecs, but unprintable.
|
||||
if "\x00" in decoded:
|
||||
raise ValueError("Embedded null")
|
||||
message_body = decoded
|
||||
except (UnicodeError, ValueError):
|
||||
# non-printable characters, return the escaped version.
|
||||
headers["X-Hippo-Escaped-Body"] = "1"
|
||||
message_body = bytes_escape(message_body).decode("utf8")
|
||||
|
||||
buf.write(bytes(headers).decode("utf8", errors="replace"))
|
||||
buf.write("\r\n")
|
||||
|
||||
buf.write(message_body)
|
||||
return buf.getvalue()
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
return self._format_http_message(want_request=True, beautify=beautify)
|
||||
|
||||
def response(self, beautify=False):
|
||||
return self._format_http_message(want_request=False, beautify=beautify)
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
if self._summary is not None:
|
||||
return self._summary
|
||||
msg = self.flow.response
|
||||
self._summary = f"{msg.status_code}: "
|
||||
if not msg.content:
|
||||
return self._summary
|
||||
if len(msg.content) > 1000000:
|
||||
self._summary += "[too large...]"
|
||||
return self._summary
|
||||
content_type = self._guess_content_type(msg)
|
||||
if content_type.startswith("application/llsd"):
|
||||
notation = llsd.format_notation(llsd.parse(msg.content))
|
||||
self._summary += notation.decode("utf8")[:500]
|
||||
return self._summary
|
||||
|
||||
def _guess_content_type(self, message):
|
||||
content_type = message.headers.get("Content-Type", "")
|
||||
if not message.content or content_type.startswith("application/llsd"):
|
||||
return content_type
|
||||
# Sometimes gets sent with `text/plain` or `text/html`. Cool.
|
||||
if message.content.startswith(rb'<?xml version="1.0" ?><llsd>'):
|
||||
return "application/llsd+xml"
|
||||
if message.content.startswith(rb'<llsd>'):
|
||||
return "application/llsd+xml"
|
||||
if message.content.startswith(rb'<?xml '):
|
||||
return "application/xml"
|
||||
return content_type
|
||||
|
||||
|
||||
class MessageLogModel(QtCore.QAbstractTableModel, BaseMessageLogger):
|
||||
def __init__(self, parent=None):
|
||||
class MessageLogModel(QtCore.QAbstractTableModel, FilteringMessageLogger):
|
||||
def __init__(self, parent=None, maxlen=2000):
|
||||
QtCore.QAbstractTableModel.__init__(self, parent)
|
||||
BaseMessageLogger.__init__(self)
|
||||
self._raw_entries = collections.deque(maxlen=2000)
|
||||
self._queued_entries = queue.Queue()
|
||||
self._filtered_entries = []
|
||||
self._paused = False
|
||||
self.filter: typing.Optional[BaseFilterNode] = None
|
||||
FilteringMessageLogger.__init__(self, maxlen=maxlen)
|
||||
|
||||
def setFilter(self, filter_str: str):
|
||||
self.filter = compile_filter(filter_str)
|
||||
def _begin_insert(self, insert_idx: int):
|
||||
self.beginInsertRows(QtCore.QModelIndex(), insert_idx, insert_idx)
|
||||
|
||||
def _end_insert(self):
|
||||
self.endInsertRows()
|
||||
|
||||
def _begin_reset(self):
|
||||
self.beginResetModel()
|
||||
# Keep any entries that've aged out of the raw entries list that
|
||||
# match the new filter
|
||||
self._filtered_entries = [
|
||||
m for m in self._filtered_entries if
|
||||
m not in self._raw_entries and self.filter.match(m)
|
||||
]
|
||||
self._filtered_entries.extend((m for m in self._raw_entries if self.filter.match(m)))
|
||||
|
||||
def _end_reset(self):
|
||||
self.endResetModel()
|
||||
|
||||
def setPaused(self, paused: bool):
|
||||
self._paused = paused
|
||||
|
||||
def log_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
if self._paused:
|
||||
return
|
||||
self.queueLogEntry(LLUDPMessageLogEntry(message, region, session))
|
||||
|
||||
def log_http_response(self, flow: HippoHTTPFlow):
|
||||
if self._paused:
|
||||
return
|
||||
# These are huge, let's not log them for now.
|
||||
if flow.cap_data and flow.cap_data.asset_server_cap:
|
||||
return
|
||||
self.queueLogEntry(HTTPMessageLogEntry(flow))
|
||||
|
||||
def log_eq_event(self, session: Session, region: ProxiedRegion, event: dict):
|
||||
if self._paused:
|
||||
return
|
||||
self.queueLogEntry(EQMessageLogEntry(event, region, session))
|
||||
|
||||
def appendQueuedEntries(self):
|
||||
while not self._queued_entries.empty():
|
||||
entry: AbstractMessageLogEntry = self._queued_entries.get(block=False)
|
||||
# Paused, throw it away.
|
||||
if self._paused:
|
||||
continue
|
||||
self._raw_entries.append(entry)
|
||||
try:
|
||||
if self.filter.match(entry):
|
||||
next_idx = len(self._filtered_entries)
|
||||
self.beginInsertRows(QtCore.QModelIndex(), next_idx, next_idx)
|
||||
self._filtered_entries.append(entry)
|
||||
self.endInsertRows()
|
||||
|
||||
entry.cache_summary()
|
||||
# In the common case we don't need to keep around the serialization
|
||||
# caches anymore. If the filter changes, the caches will be repopulated
|
||||
# as necessary.
|
||||
entry.freeze()
|
||||
except Exception:
|
||||
LOG.exception("Failed to filter queued message")
|
||||
|
||||
def queueLogEntry(self, entry: AbstractMessageLogEntry):
|
||||
self._queued_entries.put(entry, block=False)
|
||||
|
||||
def rowCount(self, parent=None, *args, **kwargs):
|
||||
return len(self._filtered_entries)
|
||||
|
||||
@@ -656,14 +69,6 @@ class MessageLogModel(QtCore.QAbstractTableModel, BaseMessageLogger):
|
||||
if orientation == QtCore.Qt.Horizontal and role == QtCore.Qt.DisplayRole:
|
||||
return MessageLogHeader(col).name
|
||||
|
||||
def clear(self):
|
||||
self.beginResetModel()
|
||||
self._filtered_entries.clear()
|
||||
while not self._queued_entries.empty():
|
||||
self._queued_entries.get(block=False)
|
||||
self._raw_entries.clear()
|
||||
self.endResetModel()
|
||||
|
||||
|
||||
class RegionListModel(QtCore.QAbstractListModel):
|
||||
def __init__(self, parent, session_manager):
|
||||
|
||||
@@ -7,25 +7,28 @@ import sys
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
import mitmproxy.ctx
|
||||
import mitmproxy.exceptions
|
||||
import outleap
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.ca_utils import setup_ca
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, create_proxy_master, HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.http_event_manager import MITMProxyEventManager
|
||||
from hippolyzer.lib.proxy.lludp_proxy import SLSOCKS5Server
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager, Session
|
||||
from hippolyzer.lib.proxy.settings import ProxySettings
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SelectionManagerAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
selected = session.selected
|
||||
if message.name == "ObjectSelect":
|
||||
# ObjectDeselect intentionally ignored to deal with messages that
|
||||
@@ -42,7 +45,7 @@ class SelectionManagerAddon(BaseAddon):
|
||||
LOG.debug(f"Don't know about selected {local_id}, requesting object")
|
||||
needed_objects.add(local_id)
|
||||
|
||||
if needed_objects:
|
||||
if needed_objects and session.session_manager.settings.ALLOW_AUTO_REQUEST_OBJECTS:
|
||||
region.objects.request_objects(needed_objects)
|
||||
# ParcelDwellRequests are sent whenever "about land" is opened. This gives us a
|
||||
# decent mechanism for selecting parcels.
|
||||
@@ -74,6 +77,15 @@ class SelectionManagerAddon(BaseAddon):
|
||||
selected.task_item = parsed["item-id"]
|
||||
|
||||
|
||||
class AgentUpdaterAddon(BaseAddon):
|
||||
def handle_eq_event(self, session: Session, region: ProxiedRegion, event: dict):
|
||||
if event['message'] != 'AgentGroupDataUpdate':
|
||||
return
|
||||
session.groups.clear()
|
||||
for group in event['body']['GroupData']:
|
||||
session.groups.add(group['GroupID'])
|
||||
|
||||
|
||||
class REPLAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def spawn_repl(self, session: Session, region: ProxiedRegion):
|
||||
@@ -82,61 +94,69 @@ class REPLAddon(BaseAddon):
|
||||
AddonManager.spawn_repl()
|
||||
|
||||
|
||||
def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowContext):
|
||||
def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowContext, ssl_insecure=False):
|
||||
mitm_loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(mitm_loop)
|
||||
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
|
||||
mitmproxy_master.start_server()
|
||||
gc.freeze()
|
||||
mitm_loop.run_forever()
|
||||
|
||||
async def mitmproxy_loop():
|
||||
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context, ssl_insecure=ssl_insecure)
|
||||
gc.freeze()
|
||||
await mitmproxy_master.run()
|
||||
|
||||
asyncio.run(mitmproxy_loop())
|
||||
|
||||
|
||||
def start_proxy(extra_addons: Optional[list] = None, extra_addon_paths: Optional[list] = None,
|
||||
session_manager=None, proxy_host=None):
|
||||
def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] = None,
|
||||
extra_addon_paths: Optional[list] = None, proxy_host=None, ssl_insecure=False):
|
||||
extra_addons = extra_addons or []
|
||||
extra_addon_paths = extra_addon_paths or []
|
||||
extra_addons.append(SelectionManagerAddon())
|
||||
extra_addons.append(REPLAddon())
|
||||
extra_addons.append(AgentUpdaterAddon())
|
||||
|
||||
root_log = logging.getLogger()
|
||||
root_log.addHandler(logging.StreamHandler())
|
||||
root_log.setLevel(logging.INFO)
|
||||
logging.basicConfig()
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
loop = asyncio.get_event_loop_policy().get_event_loop()
|
||||
|
||||
udp_proxy_port = int(os.environ.get("HIPPO_UDP_PORT", 9061))
|
||||
http_proxy_port = int(os.environ.get("HIPPO_HTTP_PORT", 9062))
|
||||
udp_proxy_port = session_manager.settings.SOCKS_PROXY_PORT
|
||||
http_proxy_port = session_manager.settings.HTTP_PROXY_PORT
|
||||
leap_port = session_manager.settings.LEAP_PORT
|
||||
if proxy_host is None:
|
||||
proxy_host = os.environ.get("HIPPO_BIND_HOST", "127.0.0.1")
|
||||
proxy_host = session_manager.settings.PROXY_BIND_ADDR
|
||||
|
||||
session_manager = session_manager or SessionManager()
|
||||
flow_context = session_manager.flow_context
|
||||
session_manager.name_cache.load_viewer_caches()
|
||||
|
||||
# TODO: argparse
|
||||
if len(sys.argv) == 3:
|
||||
if sys.argv[1] == "--setup-ca":
|
||||
try:
|
||||
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
|
||||
except mitmproxy.exceptions.ServerException:
|
||||
# Proxy already running, create the master so we don't try to bind to a port
|
||||
mitmproxy_master = create_proxy_master(proxy_host, http_proxy_port, flow_context)
|
||||
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
|
||||
setup_ca(sys.argv[2], mitmproxy_master)
|
||||
return sys.exit(0)
|
||||
|
||||
http_proc = multiprocessing.Process(
|
||||
target=run_http_proxy_process,
|
||||
args=(proxy_host, http_proxy_port, flow_context),
|
||||
args=(proxy_host, http_proxy_port, flow_context, ssl_insecure),
|
||||
daemon=True,
|
||||
)
|
||||
http_proc.start()
|
||||
# These need to be set for mitmproxy's ASGIApp serving code to work.
|
||||
mitmproxy.ctx.master = None
|
||||
mitmproxy.ctx.log = logging.getLogger("mitmproxy log")
|
||||
|
||||
server = SLSOCKS5Server(session_manager)
|
||||
coro = asyncio.start_server(server.handle_connection, proxy_host, udp_proxy_port)
|
||||
async_server = loop.run_until_complete(coro)
|
||||
|
||||
leap_server = outleap.LEAPBridgeServer(session_manager.leap_client_connected)
|
||||
coro = asyncio.start_server(leap_server.handle_connection, proxy_host, leap_port)
|
||||
async_leap_server = loop.run_until_complete(coro)
|
||||
|
||||
event_manager = MITMProxyEventManager(session_manager, flow_context)
|
||||
loop.create_task(event_manager.pump_proxy_events())
|
||||
loop.create_task(event_manager.run())
|
||||
|
||||
addon_paths = sys.argv[1:]
|
||||
addon_paths.extend(extra_addon_paths)
|
||||
@@ -144,6 +164,7 @@ def start_proxy(extra_addons: Optional[list] = None, extra_addon_paths: Optional
|
||||
|
||||
# Everything in memory at this point should stay
|
||||
gc.freeze()
|
||||
gc.set_threshold(5000, 50, 10)
|
||||
|
||||
# Serve requests until Ctrl+C is pressed
|
||||
print(f"SOCKS and HTTP proxies running on {proxy_host}")
|
||||
@@ -160,6 +181,8 @@ def start_proxy(extra_addons: Optional[list] = None, extra_addon_paths: Optional
|
||||
# Close the server
|
||||
print("Closing SOCKS server")
|
||||
async_server.close()
|
||||
print("Shutting down LEAP server")
|
||||
async_leap_server.close()
|
||||
print("Shutting down addons")
|
||||
AddonManager.shutdown()
|
||||
print("Waiting for SOCKS server to close")
|
||||
@@ -178,10 +201,15 @@ def start_proxy(extra_addons: Optional[list] = None, extra_addon_paths: Optional
|
||||
|
||||
def _windows_timeout_killer(pid: int):
|
||||
time.sleep(2.0)
|
||||
print(f"Killing hanging event loop")
|
||||
print("Killing hanging event loop")
|
||||
os.kill(pid, 9)
|
||||
|
||||
|
||||
def main():
|
||||
multiprocessing.set_start_method("spawn")
|
||||
start_proxy()
|
||||
start_proxy(SessionManager(ProxySettings()))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
multiprocessing.freeze_support()
|
||||
main()
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import asyncio
|
||||
import base64
|
||||
import dataclasses
|
||||
import email
|
||||
import functools
|
||||
import html
|
||||
@@ -8,7 +9,6 @@ import json
|
||||
import logging
|
||||
import pathlib
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import socket
|
||||
@@ -17,51 +17,53 @@ import urllib.parse
|
||||
from typing import *
|
||||
|
||||
import multidict
|
||||
from qasync import QEventLoop
|
||||
from PySide2 import QtCore, QtWidgets, QtGui
|
||||
from qasync import QEventLoop, asyncSlot
|
||||
from PySide6 import QtCore, QtWidgets, QtGui
|
||||
|
||||
from hippolyzer.apps.model import (
|
||||
AbstractMessageLogEntry,
|
||||
LLUDPMessageLogEntry,
|
||||
MessageLogModel,
|
||||
MessageLogHeader,
|
||||
RegionListModel,
|
||||
bytes_unescape,
|
||||
bytes_escape,
|
||||
)
|
||||
from hippolyzer.apps.model import MessageLogModel, MessageLogHeader, RegionListModel
|
||||
from hippolyzer.apps.proxy import start_proxy
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import bytes_unescape, bytes_escape, get_resource_filename, create_logged_task
|
||||
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.message_formatting import (
|
||||
HumanMessageSerializer,
|
||||
VerbatimHumanVal,
|
||||
subfield_eval,
|
||||
SpannedString,
|
||||
)
|
||||
from hippolyzer.lib.base.message.msgtypes import MsgType
|
||||
from hippolyzer.lib.base.message.template_dict import TemplateDictionary
|
||||
from hippolyzer.lib.base.message.template_dict import DEFAULT_TEMPLATE_DICT
|
||||
from hippolyzer.lib.base.settings import SettingDescriptor
|
||||
from hippolyzer.lib.base.ui_helpers import loadUi
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base.network.transport import Direction, SocketUDPTransport
|
||||
from hippolyzer.lib.client.state import BaseClientSessionManager
|
||||
from hippolyzer.lib.proxy.addons import BaseInteractionManager, AddonManager
|
||||
from hippolyzer.lib.proxy.ca_utils import setup_ca_everywhere
|
||||
from hippolyzer.lib.proxy.caps_client import CapsClient
|
||||
from hippolyzer.lib.proxy.http_proxy import create_proxy_master, HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage, VerbatimHumanVal, proxy_eval
|
||||
from hippolyzer.lib.proxy.caps_client import ProxyCapsClient
|
||||
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry, AbstractMessageLogEntry, WrappingMessageLogger, \
|
||||
import_log_entries, export_log_entries
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
from hippolyzer.lib.proxy.settings import ProxySettings
|
||||
from hippolyzer.lib.proxy.templates import CAP_TEMPLATES
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
|
||||
MAIN_WINDOW_UI_PATH = os.path.join(BASE_PATH, "proxy_mainwindow.ui")
|
||||
MESSAGE_BUILDER_UI_PATH = os.path.join(BASE_PATH, "message_builder.ui")
|
||||
ADDON_DIALOG_UI_PATH = os.path.join(BASE_PATH, "addon_dialog.ui")
|
||||
FILTER_DIALOG_UI_PATH = os.path.join(BASE_PATH, "filter_dialog.ui")
|
||||
MAIN_WINDOW_UI_PATH = get_resource_filename("apps/proxy_mainwindow.ui")
|
||||
MESSAGE_BUILDER_UI_PATH = get_resource_filename("apps/message_builder.ui")
|
||||
ADDON_DIALOG_UI_PATH = get_resource_filename("apps/addon_dialog.ui")
|
||||
FILTER_DIALOG_UI_PATH = get_resource_filename("apps/filter_dialog.ui")
|
||||
|
||||
|
||||
def show_error_message(error_msg, parent=None):
|
||||
error_dialog = QtWidgets.QErrorMessage(parent=parent)
|
||||
# No obvious way to set this to plaintext, yuck...
|
||||
error_dialog.showMessage(html.escape(error_msg))
|
||||
error_dialog.exec_()
|
||||
error_dialog.exec()
|
||||
error_dialog.raise_()
|
||||
|
||||
|
||||
@@ -69,11 +71,12 @@ class GUISessionManager(SessionManager, QtCore.QObject):
|
||||
regionAdded = QtCore.Signal(ProxiedRegion)
|
||||
regionRemoved = QtCore.Signal(ProxiedRegion)
|
||||
|
||||
def __init__(self, model):
|
||||
SessionManager.__init__(self)
|
||||
def __init__(self, settings):
|
||||
BaseClientSessionManager.__init__(self)
|
||||
SessionManager.__init__(self, settings)
|
||||
QtCore.QObject.__init__(self)
|
||||
self.all_regions = []
|
||||
self.message_logger = model
|
||||
self.message_logger = WrappingMessageLogger()
|
||||
|
||||
def checkRegions(self):
|
||||
new_regions = itertools.chain(*[s.regions for s in self.sessions])
|
||||
@@ -88,13 +91,13 @@ class GUISessionManager(SessionManager, QtCore.QObject):
|
||||
self.all_regions = new_regions
|
||||
|
||||
|
||||
class GUIInteractionManager(BaseInteractionManager, QtCore.QObject):
|
||||
def __init__(self, parent):
|
||||
class GUIInteractionManager(BaseInteractionManager):
|
||||
def __init__(self, parent: QtWidgets.QWidget):
|
||||
BaseInteractionManager.__init__(self)
|
||||
QtCore.QObject.__init__(self, parent=parent)
|
||||
self._parent = parent
|
||||
|
||||
def main_window_handle(self) -> Any:
|
||||
return self.parent()
|
||||
return self._parent
|
||||
|
||||
def _dialog_async_exec(self, dialog: QtWidgets.QDialog):
|
||||
future = asyncio.Future()
|
||||
@@ -102,12 +105,16 @@ class GUIInteractionManager(BaseInteractionManager, QtCore.QObject):
|
||||
dialog.open()
|
||||
return future
|
||||
|
||||
async def _file_dialog(self, caption: str, directory: str, filter_str: str, mode: QtWidgets.QFileDialog.FileMode) \
|
||||
-> Tuple[bool, QtWidgets.QFileDialog]:
|
||||
dialog = QtWidgets.QFileDialog(self.parent(), caption=caption, directory=directory, filter=filter_str)
|
||||
async def _file_dialog(
|
||||
self, caption: str, directory: str, filter_str: str, mode: QtWidgets.QFileDialog.FileMode,
|
||||
default_suffix: str = '',
|
||||
) -> Tuple[bool, QtWidgets.QFileDialog]:
|
||||
dialog = QtWidgets.QFileDialog(self._parent, caption=caption, directory=directory, filter=filter_str)
|
||||
dialog.setFileMode(mode)
|
||||
if mode == QtWidgets.QFileDialog.FileMode.AnyFile:
|
||||
dialog.setAcceptMode(QtWidgets.QFileDialog.AcceptMode.AcceptSave)
|
||||
if default_suffix:
|
||||
dialog.setDefaultSuffix(default_suffix)
|
||||
res = await self._dialog_async_exec(dialog)
|
||||
return res, dialog
|
||||
|
||||
@@ -135,14 +142,46 @@ class GUIInteractionManager(BaseInteractionManager, QtCore.QObject):
|
||||
return None
|
||||
return dialog.selectedFiles()[0]
|
||||
|
||||
async def save_file(self, caption: str = '', directory: str = '', filter_str: str = '') -> Optional[str]:
|
||||
async def save_file(self, caption: str = '', directory: str = '', filter_str: str = '',
|
||||
default_suffix: str = '') -> Optional[str]:
|
||||
res, dialog = await self._file_dialog(
|
||||
caption, directory, filter_str, QtWidgets.QFileDialog.FileMode.AnyFile
|
||||
caption, directory, filter_str, QtWidgets.QFileDialog.FileMode.AnyFile, default_suffix,
|
||||
)
|
||||
if not res or not dialog.selectedFiles():
|
||||
return None
|
||||
return dialog.selectedFiles()[0]
|
||||
|
||||
async def confirm(self, title: str, caption: str) -> bool:
|
||||
msg = QtWidgets.QMessageBox(
|
||||
QtWidgets.QMessageBox.Icon.Question,
|
||||
title,
|
||||
caption,
|
||||
QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel,
|
||||
self._parent,
|
||||
)
|
||||
fut = asyncio.Future()
|
||||
msg.finished.connect(lambda r: fut.set_result(r))
|
||||
msg.open()
|
||||
return (await fut) == QtWidgets.QMessageBox.Ok
|
||||
|
||||
|
||||
class GUIProxySettings(ProxySettings):
|
||||
FIRST_RUN: bool = SettingDescriptor(True)
|
||||
|
||||
"""Persistent settings backed by QSettings"""
|
||||
def __init__(self, settings: QtCore.QSettings):
|
||||
super().__init__()
|
||||
self._settings_obj = settings
|
||||
|
||||
def get_setting(self, name: str) -> Any:
|
||||
val: Any = self._settings_obj.value(name, defaultValue=dataclasses.MISSING)
|
||||
if val is dataclasses.MISSING:
|
||||
return val
|
||||
return json.loads(val)
|
||||
|
||||
def set_setting(self, name: str, val: Any):
|
||||
self._settings_obj.setValue(name, json.dumps(val))
|
||||
|
||||
|
||||
def nonFatalExceptions(f):
|
||||
@functools.wraps(f)
|
||||
@@ -157,7 +196,35 @@ def nonFatalExceptions(f):
|
||||
return _wrapper
|
||||
|
||||
|
||||
class ProxyGUI(QtWidgets.QMainWindow):
|
||||
def buildReplacements(session: Session, region: ProxiedRegion):
|
||||
if not session or not region:
|
||||
return {}
|
||||
selected = session.selected
|
||||
agent_object = region.objects.lookup_fullid(session.agent_id)
|
||||
selected_local = selected.object_local
|
||||
selected_object = None
|
||||
if selected_local:
|
||||
# We may or may not have an object for this
|
||||
selected_object = region.objects.lookup_localid(selected_local)
|
||||
return {
|
||||
"SELECTED_LOCAL": selected_local,
|
||||
"SELECTED_FULL": selected_object.FullID if selected_object else None,
|
||||
"SELECTED_PARCEL_LOCAL": selected.parcel_local,
|
||||
"SELECTED_PARCEL_FULL": selected.parcel_full,
|
||||
"SELECTED_SCRIPT_ITEM": selected.script_item,
|
||||
"SELECTED_TASK_ITEM": selected.task_item,
|
||||
"AGENT_ID": session.agent_id,
|
||||
"AGENT_LOCAL": agent_object.LocalID if agent_object else None,
|
||||
"SESSION_ID": session.id,
|
||||
"AGENT_POS": agent_object.Position if agent_object else None,
|
||||
"NULL_KEY": UUID(),
|
||||
"RANDOM_KEY": UUID.random,
|
||||
"CIRCUIT_CODE": session.circuit_code,
|
||||
"REGION_HANDLE": region.handle,
|
||||
}
|
||||
|
||||
|
||||
class MessageLogWindow(QtWidgets.QMainWindow):
|
||||
DEFAULT_IGNORE = "StartPingCheck CompletePingCheck PacketAck SimulatorViewerTimeMessage SimStats " \
|
||||
"AgentUpdate AgentAnimation AvatarAnimation ViewerEffect CoarseLocationUpdate LayerData " \
|
||||
"CameraConstraint ObjectUpdateCached RequestMultipleObjects ObjectUpdate ObjectUpdateCompressed " \
|
||||
@@ -166,44 +233,65 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
"AvatarRenderInfo FirestormBridge ObjectAnimation ParcelDwellRequest ParcelAccessListRequest " \
|
||||
"ParcelDwellReply ParcelAccessListReply AttachedSoundGainChange " \
|
||||
"ParcelPropertiesRequest ParcelProperties GetObjectCost GetObjectPhysicsData ObjectImage " \
|
||||
"ViewerAsset GetTexture SetAlwaysRun GetDisplayNames MapImageService MapItemReply".split(" ")
|
||||
"ViewerAsset GetTexture SetAlwaysRun GetDisplayNames MapImageService MapItemReply " \
|
||||
"AgentFOV GenericStreamingMessage".split(" ")
|
||||
DEFAULT_FILTER = f"!({' || '.join(ignored for ignored in DEFAULT_IGNORE)})"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
textRequest: QtWidgets.QTextEdit
|
||||
|
||||
def __init__(
|
||||
self, settings: GUIProxySettings, session_manager: GUISessionManager,
|
||||
log_live_messages: bool, parent: Optional[QtWidgets.QWidget] = None,
|
||||
):
|
||||
super().__init__(parent=parent)
|
||||
loadUi(MAIN_WINDOW_UI_PATH, self)
|
||||
|
||||
self.settings = QtCore.QSettings("SaladDais", "hippolyzer")
|
||||
if parent:
|
||||
self.setWindowTitle("Message Log")
|
||||
self.menuBar.setEnabled(False) # type: ignore
|
||||
self.menuBar.hide() # type: ignore
|
||||
|
||||
self._selectedEntry: Optional[AbstractMessageLogEntry] = None
|
||||
|
||||
self.model = MessageLogModel(parent=self.tableView)
|
||||
self.settings = settings
|
||||
self.sessionManager = session_manager
|
||||
if log_live_messages:
|
||||
self.model = MessageLogModel(parent=self.tableView)
|
||||
session_manager.message_logger.loggers.append(self.model)
|
||||
else:
|
||||
self.model = MessageLogModel(parent=self.tableView, maxlen=None)
|
||||
self.tableView.setModel(self.model)
|
||||
self.model.rowsAboutToBeInserted.connect(self.beforeInsert)
|
||||
self.model.rowsInserted.connect(self.afterInsert)
|
||||
self.tableView.selectionModel().selectionChanged.connect(self._messageSelected)
|
||||
self.checkBeautify.clicked.connect(self._showSelectedMessage)
|
||||
self.checkPause.clicked.connect(self._setPaused)
|
||||
self._setFilter(self.DEFAULT_FILTER)
|
||||
self.setFilter(self.DEFAULT_FILTER)
|
||||
self.btnClearLog.clicked.connect(self.model.clear)
|
||||
self.lineEditFilter.editingFinished.connect(self._setFilter)
|
||||
self.lineEditFilter.editingFinished.connect(self.setFilter)
|
||||
self.btnMessageBuilder.clicked.connect(self._sendToMessageBuilder)
|
||||
self.btnCopyRepr.clicked.connect(self._copyRepr)
|
||||
self.actionInstallHTTPSCerts.triggered.connect(self._installHTTPSCerts)
|
||||
self.actionInstallHTTPSCerts.triggered.connect(self.installHTTPSCerts)
|
||||
self.actionManageAddons.triggered.connect(self._manageAddons)
|
||||
self.actionManageFilters.triggered.connect(self._manageFilters)
|
||||
self.actionOpenMessageBuilder.triggered.connect(self._openMessageBuilder)
|
||||
self.actionProxyRemotelyAccessible.setChecked(
|
||||
self.settings.value("RemotelyAccessible", False, type=bool))
|
||||
|
||||
self.actionProxyRemotelyAccessible.setChecked(self.settings.REMOTELY_ACCESSIBLE)
|
||||
self.actionProxySSLInsecure.setChecked(self.settings.SSL_INSECURE)
|
||||
self.actionUseViewerObjectCache.setChecked(self.settings.USE_VIEWER_OBJECT_CACHE)
|
||||
self.actionRequestMissingObjects.setChecked(self.settings.AUTOMATICALLY_REQUEST_MISSING_OBJECTS)
|
||||
self.actionProxyRemotelyAccessible.triggered.connect(self._setProxyRemotelyAccessible)
|
||||
self.actionProxySSLInsecure.triggered.connect(self._setProxySSLInsecure)
|
||||
self.actionUseViewerObjectCache.triggered.connect(self._setUseViewerObjectCache)
|
||||
self.actionRequestMissingObjects.triggered.connect(self._setRequestMissingObjects)
|
||||
self.actionOpenNewMessageLogWindow.triggered.connect(self._openNewMessageLogWindow)
|
||||
self.actionImportLogEntries.triggered.connect(self._importLogEntries)
|
||||
self.actionExportLogEntries.triggered.connect(self._exportLogEntries)
|
||||
|
||||
self._filterMenu = QtWidgets.QMenu()
|
||||
self._populateFilterMenu()
|
||||
self.toolButtonFilter.setMenu(self._filterMenu)
|
||||
|
||||
self.sessionManager = GUISessionManager(self.model)
|
||||
self.interactionManager = GUIInteractionManager(self)
|
||||
AddonManager.UI = self.interactionManager
|
||||
|
||||
self._shouldScrollOnInsert = True
|
||||
self.tableView.horizontalHeader().resizeSection(MessageLogHeader.Host, 80)
|
||||
self.tableView.horizontalHeader().resizeSection(MessageLogHeader.Method, 60)
|
||||
@@ -212,40 +300,46 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
|
||||
self.textResponse.hide()
|
||||
|
||||
def closeEvent(self, event) -> None:
|
||||
loggers = self.sessionManager.message_logger.loggers
|
||||
if self.model in loggers:
|
||||
loggers.remove(self.model)
|
||||
super().closeEvent(event)
|
||||
|
||||
def _populateFilterMenu(self):
|
||||
def _addFilterAction(text, filter_str):
|
||||
filter_action = QtWidgets.QAction(text, self)
|
||||
filter_action.triggered.connect(lambda: self._setFilter(filter_str))
|
||||
filter_action = QtGui.QAction(text, self)
|
||||
filter_action.triggered.connect(lambda: self.setFilter(filter_str))
|
||||
self._filterMenu.addAction(filter_action)
|
||||
|
||||
self._filterMenu.clear()
|
||||
|
||||
_addFilterAction("Default", self.DEFAULT_FILTER)
|
||||
filters = self.getFilterDict()
|
||||
filters = self.settings.FILTERS
|
||||
for preset_name, preset_filter in filters.items():
|
||||
_addFilterAction(preset_name, preset_filter)
|
||||
|
||||
def getFilterDict(self):
|
||||
return json.loads(str(self.settings.value("Filters", "{}")))
|
||||
return self.settings.FILTERS
|
||||
|
||||
def setFilterDict(self, val: dict):
|
||||
self.settings.setValue("Filters", json.dumps(val))
|
||||
self.settings.FILTERS = val
|
||||
self._populateFilterMenu()
|
||||
|
||||
def _manageFilters(self):
|
||||
dialog = FilterDialog(self)
|
||||
dialog.exec_()
|
||||
dialog.exec()
|
||||
|
||||
@nonFatalExceptions
|
||||
def _setFilter(self, filter_str=None):
|
||||
def setFilter(self, filter_str=None):
|
||||
if filter_str is None:
|
||||
filter_str = self.lineEditFilter.text()
|
||||
else:
|
||||
self.lineEditFilter.setText(filter_str)
|
||||
self.model.setFilter(filter_str)
|
||||
self.model.set_filter(filter_str)
|
||||
|
||||
def _setPaused(self, checked):
|
||||
self.model.setPaused(checked)
|
||||
self.model.set_paused(checked)
|
||||
|
||||
def _messageSelected(self, selected, _deselected):
|
||||
indexes = selected.indexes()
|
||||
@@ -269,10 +363,24 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
return
|
||||
req = entry.request(
|
||||
beautify=self.checkBeautify.isChecked(),
|
||||
replacements=self.buildReplacements(entry.session, entry.region),
|
||||
replacements=buildReplacements(entry.session, entry.region),
|
||||
)
|
||||
resp = entry.response(beautify=self.checkBeautify.isChecked())
|
||||
self.textRequest.setPlainText(req)
|
||||
# The string has a map of fields and their associated positions within the string,
|
||||
# use that to highlight any individual fields the filter matched on.
|
||||
if isinstance(req, SpannedString):
|
||||
for field in self.model.filter.match(entry, short_circuit=False).fields:
|
||||
field_span = req.spans.get(field)
|
||||
if not field_span:
|
||||
continue
|
||||
cursor = self.textRequest.textCursor()
|
||||
cursor.setPosition(field_span[0], QtGui.QTextCursor.MoveAnchor)
|
||||
cursor.setPosition(field_span[1], QtGui.QTextCursor.KeepAnchor)
|
||||
highlight_format = QtGui.QTextBlockFormat()
|
||||
highlight_format.setBackground(QtCore.Qt.yellow)
|
||||
cursor.setBlockFormat(highlight_format)
|
||||
|
||||
resp = entry.response(beautify=self.checkBeautify.isChecked())
|
||||
if resp:
|
||||
self.textResponse.show()
|
||||
self.textResponse.setPlainText(resp)
|
||||
@@ -294,7 +402,7 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
win.show()
|
||||
msg = self._selectedEntry
|
||||
beautify = self.checkBeautify.isChecked()
|
||||
replacements = self.buildReplacements(msg.session, msg.region)
|
||||
replacements = buildReplacements(msg.session, msg.region)
|
||||
win.setMessageText(msg.request(beautify=beautify, replacements=replacements))
|
||||
|
||||
@nonFatalExceptions
|
||||
@@ -310,37 +418,43 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
win = MessageBuilderWindow(self, self.sessionManager)
|
||||
win.show()
|
||||
|
||||
def buildReplacements(self, session: Session, region: ProxiedRegion):
|
||||
if not session or not region:
|
||||
return {}
|
||||
selected = session.selected
|
||||
agent_object = region.objects.lookup_fullid(session.agent_id)
|
||||
selected_local = selected.object_local
|
||||
selected_object = None
|
||||
if selected_local:
|
||||
# We may or may not have an object for this
|
||||
selected_object = region.objects.lookup_localid(selected_local)
|
||||
return {
|
||||
"SELECTED_LOCAL": selected_local,
|
||||
"SELECTED_FULL": selected_object.FullID if selected_object else None,
|
||||
"SELECTED_PARCEL_LOCAL": selected.parcel_local,
|
||||
"SELECTED_PARCEL_FULL": selected.parcel_full,
|
||||
"SELECTED_SCRIPT_ITEM": selected.script_item,
|
||||
"SELECTED_TASK_ITEM": selected.task_item,
|
||||
"AGENT_ID": session.agent_id,
|
||||
"AGENT_LOCAL": agent_object.LocalID if agent_object else None,
|
||||
"SESSION_ID": session.id,
|
||||
"AGENT_POS": agent_object.Position if agent_object else None,
|
||||
"NULL_KEY": UUID(),
|
||||
"RANDOM_KEY": UUID.random,
|
||||
"CIRCUIT_CODE": session.circuit_code,
|
||||
"REGION_HANDLE": region.handle,
|
||||
}
|
||||
def _openNewMessageLogWindow(self):
|
||||
win: QtWidgets.QMainWindow = MessageLogWindow(
|
||||
self.settings, self.sessionManager, log_live_messages=True, parent=self)
|
||||
win.setFilter(self.lineEditFilter.text())
|
||||
win.show()
|
||||
win.activateWindow()
|
||||
|
||||
def _installHTTPSCerts(self):
|
||||
@asyncSlot()
|
||||
async def _importLogEntries(self):
|
||||
log_file = await AddonManager.UI.open_file(
|
||||
caption="Import Log Entries", filter_str="Hippolyzer Logs (*.hippolog)"
|
||||
)
|
||||
if not log_file:
|
||||
return
|
||||
win = MessageLogWindow(self.settings, self.sessionManager, log_live_messages=False, parent=self)
|
||||
win.setFilter(self.lineEditFilter.text())
|
||||
with open(log_file, "rb") as f:
|
||||
entries = import_log_entries(f.read())
|
||||
for entry in entries:
|
||||
win.model.add_log_entry(entry)
|
||||
win.show()
|
||||
win.activateWindow()
|
||||
|
||||
@asyncSlot()
|
||||
async def _exportLogEntries(self):
|
||||
log_file = await AddonManager.UI.save_file(
|
||||
caption="Export Log Entries", filter_str="Hippolyzer Logs (*.hippolog)", default_suffix="hippolog",
|
||||
)
|
||||
if not log_file:
|
||||
return
|
||||
with open(log_file, "wb") as f:
|
||||
f.write(export_log_entries(self.model))
|
||||
|
||||
def installHTTPSCerts(self):
|
||||
msg = QtWidgets.QMessageBox()
|
||||
msg.setText("This will install the proxy's HTTPS certificate in the config dir"
|
||||
" of any installed viewers, continue?")
|
||||
msg.setText("Would you like to install the proxy's HTTPS certificate in the config dir"
|
||||
" of any installed viewers so that HTTPS connections will work?")
|
||||
yes_btn = msg.addButton("Yes", QtWidgets.QMessageBox.NoRole)
|
||||
msg.addButton("No", QtWidgets.QMessageBox.NoRole)
|
||||
msg.exec()
|
||||
@@ -348,7 +462,7 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
if clicked_btn is not yes_btn:
|
||||
return
|
||||
|
||||
master = create_proxy_master("127.0.0.1", -1, HTTPFlowContext())
|
||||
master = create_http_proxy("127.0.0.1", -1, HTTPFlowContext())
|
||||
dirs = setup_ca_everywhere(master)
|
||||
|
||||
msg = QtWidgets.QMessageBox()
|
||||
@@ -359,20 +473,32 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
msg.exec()
|
||||
|
||||
def _setProxyRemotelyAccessible(self, checked: bool):
|
||||
self.settings.setValue("RemotelyAccessible", checked)
|
||||
self.sessionManager.settings.REMOTELY_ACCESSIBLE = checked
|
||||
msg = QtWidgets.QMessageBox()
|
||||
msg.setText("Remote accessibility setting changes will take effect on next run")
|
||||
msg.exec()
|
||||
|
||||
def _setProxySSLInsecure(self, checked: bool):
|
||||
self.sessionManager.settings.SSL_INSECURE = checked
|
||||
msg = QtWidgets.QMessageBox()
|
||||
msg.setText("SSL security setting changes will take effect on next run")
|
||||
msg.exec()
|
||||
|
||||
def _setUseViewerObjectCache(self, checked: bool):
|
||||
self.sessionManager.settings.USE_VIEWER_OBJECT_CACHE = checked
|
||||
|
||||
def _setRequestMissingObjects(self, checked: bool):
|
||||
self.sessionManager.settings.AUTOMATICALLY_REQUEST_MISSING_OBJECTS = checked
|
||||
|
||||
def _manageAddons(self):
|
||||
dialog = AddonDialog(self)
|
||||
dialog.exec_()
|
||||
dialog.exec()
|
||||
|
||||
def getAddonList(self) -> List[str]:
|
||||
return json.loads(str(self.settings.value("Addons", "[]")))
|
||||
return self.sessionManager.settings.ADDON_SCRIPTS
|
||||
|
||||
def setAddonList(self, val: List[str]):
|
||||
self.settings.setValue("Addons", json.dumps(val))
|
||||
self.sessionManager.settings.ADDON_SCRIPTS = val
|
||||
|
||||
|
||||
BANNED_HEADERS = ("content-length", "host")
|
||||
@@ -410,7 +536,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
def __init__(self, parent, session_manager):
|
||||
super().__init__(parent=parent)
|
||||
loadUi(MESSAGE_BUILDER_UI_PATH, self)
|
||||
self.templateDict = TemplateDictionary()
|
||||
self.templateDict = DEFAULT_TEMPLATE_DICT
|
||||
self.llsdSerializer = LLSDMessageSerializer()
|
||||
self.sessionManager: SessionManager = session_manager
|
||||
self.regionModel = RegionListModel(self, self.sessionManager)
|
||||
@@ -450,12 +576,12 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
message_names = sorted(x.name for x in self.templateDict)
|
||||
|
||||
for message_name in message_names:
|
||||
if self.templateDict[message_name].msg_trust:
|
||||
if self.templateDict[message_name].trusted:
|
||||
self.comboTrusted.addItem(message_name)
|
||||
else:
|
||||
self.comboUntrusted.addItem(message_name)
|
||||
|
||||
cap_names = sorted(set(itertools.chain(*[r.caps.keys() for r in self.regionModel.regions])))
|
||||
cap_names = sorted(set(itertools.chain(*[r.cap_urls.keys() for r in self.regionModel.regions])))
|
||||
for cap_name in cap_names:
|
||||
if cap_name.endswith("ProxyWrapper"):
|
||||
continue
|
||||
@@ -486,7 +612,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
break
|
||||
self.textRequest.setPlainText(
|
||||
f"""{method} [[{cap_name}]]{path}{params} HTTP/1.1
|
||||
# {region.caps.get(cap_name, "<unknown URI>")}
|
||||
# {region.cap_urls.get(cap_name, "<unknown URI>")}
|
||||
{headers}
|
||||
{body}"""
|
||||
)
|
||||
@@ -497,7 +623,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
self.textRequest.clear()
|
||||
|
||||
template = self.templateDict[message_name]
|
||||
msg = ProxiedMessage(message_name, direction=Direction.OUT)
|
||||
msg = Message(message_name, direction=Direction.OUT)
|
||||
|
||||
for tmpl_block in template.blocks:
|
||||
num_blocks = tmpl_block.number or 1
|
||||
@@ -508,7 +634,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
msg_block = Block(tmpl_block.name, **fill_vars)
|
||||
msg.add_block(msg_block)
|
||||
self.textRequest.setPlainText(
|
||||
msg.to_human_string(replacements={}, beautify=True, template=template)
|
||||
HumanMessageSerializer.to_human_string(msg, replacements={}, beautify=True, template=template)
|
||||
)
|
||||
|
||||
def _getVarPlaceholder(self, msg, block, var):
|
||||
@@ -539,24 +665,9 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
if var.name in ("TaskID", "ObjectID"):
|
||||
return VerbatimHumanVal("[[SELECTED_FULL]]")
|
||||
|
||||
if var.type.is_int:
|
||||
return 0
|
||||
elif var.type.is_float:
|
||||
return 0.0
|
||||
elif var.type == MsgType.MVT_LLUUID:
|
||||
return UUID()
|
||||
elif var.type == MsgType.MVT_BOOL:
|
||||
return False
|
||||
elif var.type == MsgType.MVT_VARIABLE:
|
||||
return ""
|
||||
elif var.type in (MsgType.MVT_LLVector3, MsgType.MVT_LLVector3d, MsgType.MVT_LLQuaternion):
|
||||
return VerbatimHumanVal("(0.0, 0.0, 0.0)")
|
||||
elif var.type == MsgType.MVT_LLVector4:
|
||||
return VerbatimHumanVal("(0.0, 0.0, 0.0, 0.0)")
|
||||
elif var.type == MsgType.MVT_FIXED:
|
||||
return b"\x00" * var.size
|
||||
elif var.type == MsgType.MVT_IP_ADDR:
|
||||
return "0.0.0.0"
|
||||
default_val = var.default_value
|
||||
if default_val is not None:
|
||||
return default_val
|
||||
return VerbatimHumanVal("")
|
||||
|
||||
@nonFatalExceptions
|
||||
@@ -564,10 +675,12 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
session, region = self._getTarget()
|
||||
|
||||
msg_text = self.textRequest.toPlainText()
|
||||
replacements = self.parent().buildReplacements(session, region)
|
||||
replacements = buildReplacements(session, region)
|
||||
|
||||
if re.match(r"\A\s*(in|out)\s+", msg_text, re.I):
|
||||
sender_func = self._sendLLUDPMessage
|
||||
elif re.match(r"\A\s*(eq)\s+", msg_text, re.I):
|
||||
sender_func = self._sendEQMessage
|
||||
elif re.match(r"\A.*http/[0-9.]+\r?\n", msg_text, re.I):
|
||||
sender_func = self._sendHTTPMessage
|
||||
else:
|
||||
@@ -591,24 +704,42 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
env = self._buildEnv(session, region)
|
||||
# We specifically want to allow `eval()` in messages since
|
||||
# messages from here are trusted.
|
||||
msg = ProxiedMessage.from_human_string(msg_text, replacements, env, safe=False)
|
||||
msg = HumanMessageSerializer.from_human_string(msg_text, replacements, env, safe=False)
|
||||
if self.checkLLUDPViaCaps.isChecked():
|
||||
if msg.direction == Direction.IN:
|
||||
region.eq_manager.queue_event(
|
||||
self.llsdSerializer.serialize(msg, as_dict=True)
|
||||
)
|
||||
region.eq_manager.inject_message(msg)
|
||||
else:
|
||||
self._sendHTTPRequest(
|
||||
"POST",
|
||||
region.caps["UntrustedSimulatorMessage"],
|
||||
region.cap_urls["UntrustedSimulatorMessage"],
|
||||
{"Content-Type": "application/llsd+xml", "Accept": "application/llsd+xml"},
|
||||
self.llsdSerializer.serialize(msg),
|
||||
)
|
||||
else:
|
||||
transport = None
|
||||
if self.checkOffCircuit.isChecked():
|
||||
transport = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
region.circuit.send_message(msg, transport=transport)
|
||||
off_circuit = self.checkOffCircuit.isChecked()
|
||||
if off_circuit:
|
||||
transport = SocketUDPTransport(socket.socket(socket.AF_INET, socket.SOCK_DGRAM))
|
||||
region.circuit.send(msg, transport=transport)
|
||||
if off_circuit:
|
||||
transport.close()
|
||||
|
||||
def _sendEQMessage(self, session, region: Optional[ProxiedRegion], msg_text: str, replacements: dict):
|
||||
if not session or not region:
|
||||
raise RuntimeError("Need a valid session and region to send EQ event")
|
||||
message_line, _, body = (x.strip() for x in msg_text.partition("\n"))
|
||||
message_name = message_line.rsplit(" ", 1)[-1]
|
||||
|
||||
env = self._buildEnv(session, region)
|
||||
|
||||
def directive_handler(m):
|
||||
return self._handleHTTPDirective(env, replacements, False, m)
|
||||
body = re.sub(rb"<!HIPPO(\w+)\[\[(.*?)]]>", directive_handler, body.encode("utf8"), flags=re.S)
|
||||
|
||||
region.eq_manager.inject_event({
|
||||
"message": message_name,
|
||||
"body": llsd.parse_xml(body),
|
||||
})
|
||||
|
||||
def _sendHTTPMessage(self, session, region, msg_text: str, replacements: dict):
|
||||
env = self._buildEnv(session, region)
|
||||
@@ -631,7 +762,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
cap_name = match.group(1)
|
||||
cap_url = session.global_caps.get(cap_name)
|
||||
if not cap_url:
|
||||
cap_url = region.caps.get(cap_name)
|
||||
cap_url = region.cap_urls.get(cap_name)
|
||||
if not cap_url:
|
||||
raise ValueError("Don't have a Cap for %s" % cap_name)
|
||||
uri = cap_url + match.group(2)
|
||||
@@ -668,10 +799,13 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
elif directive == b"UNESCAPE":
|
||||
val = unescaped_contents
|
||||
elif directive == b"EVAL":
|
||||
val = proxy_eval(contents.decode("utf8").strip(), globals_={**env, **replacements})
|
||||
val = subfield_eval(contents.decode("utf8").strip(), globals_={**env, **replacements})
|
||||
val = _coerce_to_bytes(val)
|
||||
elif directive == b"REPL":
|
||||
val = _coerce_to_bytes(replacements[contents.decode("utf8").strip()])
|
||||
repl = replacements[contents.decode("utf8").strip()]
|
||||
if callable(repl):
|
||||
repl = repl()
|
||||
val = _coerce_to_bytes(repl)
|
||||
else:
|
||||
raise ValueError(f"Unknown directive {directive}")
|
||||
|
||||
@@ -683,7 +817,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
return val
|
||||
|
||||
def _sendHTTPRequest(self, method, uri, headers, body):
|
||||
caps_client = CapsClient()
|
||||
caps_client = ProxyCapsClient(self.sessionManager.settings)
|
||||
|
||||
async def _send_request():
|
||||
req = caps_client.request(method, uri, headers=headers, data=body)
|
||||
@@ -692,13 +826,13 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
# enough for the full response to pass through the proxy
|
||||
await resp.read()
|
||||
|
||||
asyncio.create_task(_send_request())
|
||||
create_logged_task(_send_request(), "Send HTTP Request")
|
||||
|
||||
|
||||
class AddonDialog(QtWidgets.QDialog):
|
||||
listAddons: QtWidgets.QListWidget
|
||||
|
||||
def __init__(self, parent: ProxyGUI):
|
||||
def __init__(self, parent: MessageLogWindow):
|
||||
super().__init__()
|
||||
|
||||
loadUi(ADDON_DIALOG_UI_PATH, self)
|
||||
@@ -749,7 +883,7 @@ class AddonDialog(QtWidgets.QDialog):
|
||||
class FilterDialog(QtWidgets.QDialog):
|
||||
listFilters: QtWidgets.QListWidget
|
||||
|
||||
def __init__(self, parent: ProxyGUI):
|
||||
def __init__(self, parent: MessageLogWindow):
|
||||
super().__init__()
|
||||
|
||||
loadUi(FILTER_DIALOG_UI_PATH, self)
|
||||
@@ -793,19 +927,30 @@ def gui_main():
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
loop = QEventLoop(app)
|
||||
asyncio.set_event_loop(loop)
|
||||
window = ProxyGUI()
|
||||
settings = GUIProxySettings(QtCore.QSettings("SaladDais", "hippolyzer"))
|
||||
session_manager = GUISessionManager(settings)
|
||||
window = MessageLogWindow(settings, session_manager, log_live_messages=True)
|
||||
AddonManager.UI = GUIInteractionManager(window)
|
||||
timer = QtCore.QTimer(app)
|
||||
timer.timeout.connect(window.sessionManager.checkRegions)
|
||||
timer.timeout.connect(window.model.appendQueuedEntries)
|
||||
timer.start(100)
|
||||
signal.signal(signal.SIGINT, lambda *args: QtWidgets.QApplication.quit())
|
||||
window.show()
|
||||
remote_access = window.settings.value("RemotelyAccessible", False, type=bool)
|
||||
http_host = None
|
||||
if remote_access:
|
||||
if window.sessionManager.settings.REMOTELY_ACCESSIBLE:
|
||||
http_host = "0.0.0.0"
|
||||
if settings.FIRST_RUN:
|
||||
settings.FIRST_RUN = False
|
||||
# Automatically offer to install the HTTPS certs on first run.
|
||||
window.installHTTPSCerts()
|
||||
start_proxy(
|
||||
session_manager=window.sessionManager,
|
||||
extra_addon_paths=window.getAddonList(),
|
||||
proxy_host=http_host,
|
||||
ssl_insecure=settings.SSL_INSECURE,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
multiprocessing.freeze_support()
|
||||
gui_main()
|
||||
|
||||
@@ -193,7 +193,7 @@
|
||||
</size>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(80, 0, 0)</string>
|
||||
<string notr="true"/>
|
||||
</property>
|
||||
<property name="tabChangesFocus">
|
||||
<bool>true</bool>
|
||||
@@ -213,7 +213,7 @@
|
||||
</widget>
|
||||
<widget class="QPlainTextEdit" name="textResponse">
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(0, 0, 80)</string>
|
||||
<string notr="true"/>
|
||||
</property>
|
||||
<property name="tabChangesFocus">
|
||||
<bool>true</bool>
|
||||
@@ -245,7 +245,7 @@
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>700</width>
|
||||
<height>22</height>
|
||||
<height>29</height>
|
||||
</rect>
|
||||
</property>
|
||||
<widget class="QMenu" name="menuFile">
|
||||
@@ -256,12 +256,19 @@
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
<addaction name="actionOpenMessageBuilder"/>
|
||||
<addaction name="actionOpenNewMessageLogWindow"/>
|
||||
<addaction name="separator"/>
|
||||
<addaction name="actionImportLogEntries"/>
|
||||
<addaction name="actionExportLogEntries"/>
|
||||
<addaction name="separator"/>
|
||||
<addaction name="actionInstallHTTPSCerts"/>
|
||||
<addaction name="actionManageAddons"/>
|
||||
<addaction name="actionManageFilters"/>
|
||||
<addaction name="separator"/>
|
||||
<addaction name="actionProxyRemotelyAccessible"/>
|
||||
<addaction name="actionUseViewerObjectCache"/>
|
||||
<addaction name="actionRequestMissingObjects"/>
|
||||
<addaction name="actionProxySSLInsecure"/>
|
||||
</widget>
|
||||
<addaction name="menuFile"/>
|
||||
</widget>
|
||||
@@ -299,6 +306,54 @@
|
||||
<string>Make the proxy accessible from other devices on the network</string>
|
||||
</property>
|
||||
</action>
|
||||
<action name="actionUseViewerObjectCache">
|
||||
<property name="checkable">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Use Viewer Object Cache</string>
|
||||
</property>
|
||||
<property name="toolTip">
|
||||
<string>Can help make the proxy aware of certain objects, but can cause slowdowns</string>
|
||||
</property>
|
||||
</action>
|
||||
<action name="actionRequestMissingObjects">
|
||||
<property name="checkable">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Automatically Request Missing Objects</string>
|
||||
</property>
|
||||
<property name="toolTip">
|
||||
<string>Force the proxy to request objects that it doesn't know about due to cache misses</string>
|
||||
</property>
|
||||
</action>
|
||||
<action name="actionOpenNewMessageLogWindow">
|
||||
<property name="text">
|
||||
<string>Open New Message Log Window</string>
|
||||
</property>
|
||||
</action>
|
||||
<action name="actionImportLogEntries">
|
||||
<property name="text">
|
||||
<string>Import Log Entries</string>
|
||||
</property>
|
||||
</action>
|
||||
<action name="actionExportLogEntries">
|
||||
<property name="text">
|
||||
<string>Export Log Entries</string>
|
||||
</property>
|
||||
</action>
|
||||
<action name="actionProxySSLInsecure">
|
||||
<property name="checkable">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Allow Insecure SSL Connections</string>
|
||||
</property>
|
||||
<property name="toolTip">
|
||||
<string>Allow invalid SSL certificates from upstream connections</string>
|
||||
</property>
|
||||
</action>
|
||||
</widget>
|
||||
<resources/>
|
||||
<connections/>
|
||||
|
||||
125
hippolyzer/lib/base/anim_utils.py
Normal file
125
hippolyzer/lib/base/anim_utils.py
Normal file
@@ -0,0 +1,125 @@
|
||||
"""
|
||||
Assorted utilities to make creating animations from scratch easier
|
||||
"""
|
||||
|
||||
import copy
|
||||
from typing import List, Union, Mapping
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3, Quaternion
|
||||
from hippolyzer.lib.base.llanim import PosKeyframe, RotKeyframe, JOINTS_DICT, Joint
|
||||
from hippolyzer.lib.base.mesh_skeleton import AVATAR_SKELETON
|
||||
from hippolyzer.lib.base.multidict import OrderedMultiDict
|
||||
|
||||
|
||||
def smooth_step(t: float):
|
||||
t = max(0.0, min(1.0, t))
|
||||
return t * t * (3 - 2 * t)
|
||||
|
||||
|
||||
def rot_interp(r0: Quaternion, r1: Quaternion, t: float):
|
||||
"""
|
||||
Bad quaternion interpolation
|
||||
|
||||
TODO: This is definitely not correct yet seems to work ok? Implement slerp.
|
||||
"""
|
||||
# Ignore W
|
||||
r0 = r0.data(3)
|
||||
r1 = r1.data(3)
|
||||
return Quaternion(*map(lambda pair: ((pair[0] * (1.0 - t)) + (pair[1] * t)), zip(r0, r1)))
|
||||
|
||||
|
||||
def unique_frames(frames: List[Union[PosKeyframe, RotKeyframe]]):
|
||||
"""Drop frames where time and coordinate are exact duplicates of another frame"""
|
||||
new_frames = []
|
||||
for frame in frames:
|
||||
# TODO: fudge factor for float comparison instead
|
||||
if frame not in new_frames:
|
||||
new_frames.append(frame)
|
||||
return new_frames
|
||||
|
||||
|
||||
def shift_keyframes(frames: List[Union[PosKeyframe, RotKeyframe]], num: int):
|
||||
"""
|
||||
Shift keyframes around by `num` frames
|
||||
|
||||
Assumes keyframes occur at a set cadence, and that first and last keyframe are at the same coord.
|
||||
"""
|
||||
|
||||
# Get rid of duplicate frames
|
||||
frames = unique_frames(frames)
|
||||
pop_idx = -1
|
||||
insert_idx = 0
|
||||
if num < 0:
|
||||
insert_idx = len(frames) - 1
|
||||
pop_idx = 0
|
||||
num = -num
|
||||
old_times = [f.time for f in frames]
|
||||
new_frames = frames.copy()
|
||||
# Drop last, duped frame. We'll copy the first frame to replace it later
|
||||
new_frames.pop(-1)
|
||||
for _ in range(num):
|
||||
new_frames.insert(insert_idx, new_frames.pop(pop_idx))
|
||||
|
||||
# Put first frame back on the end
|
||||
new_frames.append(copy.copy(new_frames[0]))
|
||||
|
||||
assert len(old_times) == len(new_frames)
|
||||
assert new_frames[0] == new_frames[-1]
|
||||
# Make the times of the shifted keyframes match up with the previous timeline
|
||||
for old_time, new_frame in zip(old_times, new_frames):
|
||||
new_frame.time = old_time
|
||||
return new_frames
|
||||
|
||||
|
||||
def smooth_pos(start: Vector3, end: Vector3, inter_frames: int, time: float, duration: float) -> List[PosKeyframe]:
|
||||
"""Generate keyframes to smoothly interpolate between two positions"""
|
||||
frames = [PosKeyframe(time=time, pos=start)]
|
||||
for i in range(0, inter_frames):
|
||||
t = (i + 1) / (inter_frames + 1)
|
||||
smooth_t = smooth_step(t)
|
||||
pos = Vector3(smooth_t, smooth_t, smooth_t).interpolate(start, end)
|
||||
frames.append(PosKeyframe(time=time + (t * duration), pos=pos))
|
||||
return frames + [PosKeyframe(time=time + duration, pos=end)]
|
||||
|
||||
|
||||
def smooth_rot(start: Quaternion, end: Quaternion, inter_frames: int, time: float, duration: float)\
|
||||
-> List[RotKeyframe]:
|
||||
"""Generate keyframes to smoothly interpolate between two rotations"""
|
||||
frames = [RotKeyframe(time=time, rot=start)]
|
||||
for i in range(0, inter_frames):
|
||||
t = (i + 1) / (inter_frames + 1)
|
||||
smooth_t = smooth_step(t)
|
||||
frames.append(RotKeyframe(time=time + (t * duration), rot=rot_interp(start, end, smooth_t)))
|
||||
return frames + [RotKeyframe(time=time + duration, rot=end)]
|
||||
|
||||
|
||||
def mirror_joints(joints_dict: Mapping[str, Joint]) -> JOINTS_DICT:
|
||||
"""Mirror a joints dict so left / right are swapped, including transformations"""
|
||||
new_joints: JOINTS_DICT = OrderedMultiDict()
|
||||
|
||||
for joint_name, joint in joints_dict.items():
|
||||
inverse_joint_node = AVATAR_SKELETON[joint_name].inverse
|
||||
if not inverse_joint_node:
|
||||
new_joints[joint_name] = joint
|
||||
continue
|
||||
|
||||
# Okay, this is one we have to actually mirror
|
||||
new_joint = Joint(joint.priority, [], [])
|
||||
|
||||
for rot_keyframe in joint.rot_keyframes:
|
||||
new_joint.rot_keyframes.append(RotKeyframe(
|
||||
time=rot_keyframe.time,
|
||||
# Just need to mirror on yaw and roll
|
||||
rot=Quaternion.from_euler(*(rot_keyframe.rot.to_euler() * Vector3(-1, 1, -1)))
|
||||
))
|
||||
|
||||
for pos_keyframe in joint.pos_keyframes:
|
||||
new_joint.pos_keyframes.append(PosKeyframe(
|
||||
time=pos_keyframe.time,
|
||||
# Y is left / right so just negate it.
|
||||
pos=pos_keyframe.pos * Vector3(1, -1, 1)
|
||||
))
|
||||
|
||||
new_joints[inverse_joint_node.name] = new_joint
|
||||
|
||||
return new_joints
|
||||
330
hippolyzer/lib/base/colladatools.py
Normal file
330
hippolyzer/lib/base/colladatools.py
Normal file
@@ -0,0 +1,330 @@
|
||||
# This currently implements basic LLMesh -> Collada.
|
||||
#
|
||||
# TODO:
|
||||
# * inverse, Collada -> LLMesh (for simple cases, maybe using impasse rather than pycollada)
|
||||
# * round-tripping tests, LLMesh->Collada->LLMesh
|
||||
# * * Can't really test using Collada->LLMesh->Collada because Collada->LLMesh is almost always
|
||||
# going to be lossy due to how SL represents vertex data and materials compared to what
|
||||
# Collada allows.
|
||||
# * Eventually scrap this and just use GLTF instead once we know we have the semantics correct
|
||||
# * * Collada was just easier to bootstrap given that it's the only officially supported input format
|
||||
# * * Collada tooling sucks and even LL is moving away from it
|
||||
# * * Ensuring LLMesh->Collada and LLMesh->GLTF conversion don't differ semantically is easy via assimp.
|
||||
|
||||
import logging
|
||||
import os.path
|
||||
import secrets
|
||||
import sys
|
||||
from typing import Dict, Optional
|
||||
|
||||
import collada
|
||||
import collada.source
|
||||
from collada import E
|
||||
from lxml import etree
|
||||
import numpy as np
|
||||
import transformations
|
||||
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
from hippolyzer.lib.base.serialization import BufferReader
|
||||
from hippolyzer.lib.base.mesh import (
|
||||
LLMeshSerializer,
|
||||
MeshAsset,
|
||||
positions_from_domain,
|
||||
SkinSegmentDict,
|
||||
llsd_to_mat4,
|
||||
)
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
|
||||
def mat4_to_collada(mat: np.ndarray) -> np.ndarray:
|
||||
return mat.flatten(order='C')
|
||||
|
||||
|
||||
def mesh_to_collada(ll_mesh: MeshAsset, include_skin=True) -> collada.Collada:
|
||||
dae = collada.Collada()
|
||||
axis = collada.asset.UP_AXIS.Z_UP
|
||||
dae.assetInfo.upaxis = axis
|
||||
scene = collada.scene.Scene("scene", [llmesh_to_node(ll_mesh, dae, include_skin=include_skin)])
|
||||
|
||||
dae.scenes.append(scene)
|
||||
dae.scene = scene
|
||||
return dae
|
||||
|
||||
|
||||
def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
|
||||
include_skin=True, node_transform: Optional[np.ndarray] = None) -> collada.scene.Node:
|
||||
if node_transform is None:
|
||||
node_transform = np.identity(4)
|
||||
|
||||
should_skin = False
|
||||
skin_seg = ll_mesh.segments.get('skin')
|
||||
bind_shape_matrix = None
|
||||
if include_skin and skin_seg:
|
||||
bind_shape_matrix = llsd_to_mat4(skin_seg["bind_shape_matrix"])
|
||||
should_skin = True
|
||||
# Transform from the skin will be applied on the controller, not the node
|
||||
node_transform = np.identity(4)
|
||||
|
||||
if not uniq:
|
||||
uniq = secrets.token_urlsafe(4)
|
||||
|
||||
geom_nodes = []
|
||||
node_name = f"mainnode{uniq}"
|
||||
# TODO: do the other LODs?
|
||||
for submesh_num, submesh in enumerate(ll_mesh.segments["high_lod"]):
|
||||
# Make sure none of our IDs collide with those of other nodes
|
||||
sub_uniq = uniq + str(submesh_num)
|
||||
|
||||
range_xyz = positions_from_domain(submesh["Position"], submesh["PositionDomain"])
|
||||
xyz = np.array([x.data() for x in range_xyz])
|
||||
|
||||
range_uv = positions_from_domain(submesh['TexCoord0'], submesh['TexCoord0Domain'])
|
||||
uv = np.array([x.data() for x in range_uv]).flatten()
|
||||
|
||||
norms = np.array([x.data() for x in submesh["Normal"]])
|
||||
|
||||
effect = collada.material.Effect(
|
||||
id=f"effect{sub_uniq}",
|
||||
params=[],
|
||||
specular=(0.0, 0.0, 0.0, 0.0),
|
||||
reflectivity=(0.0, 0.0, 0.0, 0.0),
|
||||
emission=(0.0, 0.0, 0.0, 0.0),
|
||||
ambient=(0.0, 0.0, 0.0, 0.0),
|
||||
reflective=0.0,
|
||||
shadingtype="blinn",
|
||||
shininess=0.0,
|
||||
diffuse=(1.0, 1.0, 1.0),
|
||||
)
|
||||
mat = collada.material.Material(f"material{sub_uniq}", f"material{sub_uniq}", effect)
|
||||
|
||||
dae.materials.append(mat)
|
||||
dae.effects.append(effect)
|
||||
|
||||
vert_src = collada.source.FloatSource(f"verts-array{sub_uniq}", xyz.flatten(), ("X", "Y", "Z"))
|
||||
norm_src = collada.source.FloatSource(f"norms-array{sub_uniq}", norms.flatten(), ("X", "Y", "Z"))
|
||||
# UV maps have to have the same name or they'll behave weirdly when objects are merged.
|
||||
uv_src = collada.source.FloatSource("uvs-array", np.array(uv), ("U", "V"))
|
||||
|
||||
geom = collada.geometry.Geometry(dae, f"geometry{sub_uniq}", "geometry", [vert_src, norm_src, uv_src])
|
||||
|
||||
input_list = collada.source.InputList()
|
||||
input_list.addInput(0, 'VERTEX', f'#verts-array{sub_uniq}', set="0")
|
||||
input_list.addInput(0, 'NORMAL', f'#norms-array{sub_uniq}', set="0")
|
||||
input_list.addInput(0, 'TEXCOORD', '#uvs-array', set="0")
|
||||
|
||||
tri_idxs = np.array(submesh["TriangleList"]).flatten()
|
||||
matnode = collada.scene.MaterialNode(f"materialref{sub_uniq}", mat, inputs=[])
|
||||
tri_set = geom.createTriangleSet(tri_idxs, input_list, f'materialref{sub_uniq}')
|
||||
geom.primitives.append(tri_set)
|
||||
dae.geometries.append(geom)
|
||||
|
||||
if should_skin:
|
||||
joint_names = np.array(skin_seg['joint_names'], dtype=object)
|
||||
joints_source = collada.source.NameSource(f"joint-names{sub_uniq}", joint_names, ("JOINT",))
|
||||
# PyCollada has a bug where it doesn't set the source URI correctly. Fix it.
|
||||
accessor = joints_source.xmlnode.find(f"{dae.tag('technique_common')}/{dae.tag('accessor')}")
|
||||
if not accessor.get('source').startswith('#'):
|
||||
accessor.set('source', f"#{accessor.get('source')}")
|
||||
|
||||
flattened_bind_poses = []
|
||||
for bind_pose in skin_seg['inverse_bind_matrix']:
|
||||
flattened_bind_poses.append(mat4_to_collada(llsd_to_mat4(bind_pose)))
|
||||
flattened_bind_poses = np.array(flattened_bind_poses)
|
||||
inv_bind_source = _create_mat4_source(f"bind-poses{sub_uniq}", flattened_bind_poses, "TRANSFORM")
|
||||
|
||||
weight_joint_idxs = []
|
||||
weights = []
|
||||
vert_weight_counts = []
|
||||
cur_weight_idx = 0
|
||||
for vert_weights in submesh['Weights']:
|
||||
vert_weight_counts.append(len(vert_weights))
|
||||
for vert_weight in vert_weights:
|
||||
weights.append(vert_weight.weight)
|
||||
weight_joint_idxs.append(vert_weight.joint_idx)
|
||||
weight_joint_idxs.append(cur_weight_idx)
|
||||
cur_weight_idx += 1
|
||||
|
||||
weights_source = collada.source.FloatSource(f"skin-weights{sub_uniq}", np.array(weights), ("WEIGHT",))
|
||||
# We need to make a controller for each material since materials are essentially distinct meshes
|
||||
# in SL, with their own distinct sets of weights and vertex data.
|
||||
controller_node = E.controller(
|
||||
E.skin(
|
||||
E.bind_shape_matrix(' '.join(str(x) for x in mat4_to_collada(bind_shape_matrix))),
|
||||
joints_source.xmlnode,
|
||||
inv_bind_source.xmlnode,
|
||||
weights_source.xmlnode,
|
||||
E.joints(
|
||||
E.input(semantic="JOINT", source=f"#joint-names{sub_uniq}"),
|
||||
E.input(semantic="INV_BIND_MATRIX", source=f"#bind-poses{sub_uniq}")
|
||||
),
|
||||
E.vertex_weights(
|
||||
E.input(semantic="JOINT", source=f"#joint-names{sub_uniq}", offset="0"),
|
||||
E.input(semantic="WEIGHT", source=f"#skin-weights{sub_uniq}", offset="1"),
|
||||
E.vcount(' '.join(str(x) for x in vert_weight_counts)),
|
||||
E.v(' '.join(str(x) for x in weight_joint_idxs)),
|
||||
count=str(len(submesh['Weights']))
|
||||
),
|
||||
source=f"#geometry{sub_uniq}"
|
||||
),
|
||||
id=f"Armature-{sub_uniq}",
|
||||
name=node_name
|
||||
)
|
||||
controller = collada.controller.Controller.load(dae, {}, controller_node)
|
||||
dae.controllers.append(controller)
|
||||
geom_node = collada.scene.ControllerNode(controller, [matnode])
|
||||
else:
|
||||
geom_node = collada.scene.GeometryNode(geom, [matnode])
|
||||
|
||||
geom_nodes.append(geom_node)
|
||||
|
||||
node = collada.scene.Node(
|
||||
node_name,
|
||||
children=geom_nodes,
|
||||
transforms=[collada.scene.MatrixTransform(mat4_to_collada(node_transform))],
|
||||
)
|
||||
if should_skin:
|
||||
# We need a skeleton per _mesh asset_ because you could have incongruous skeletons
|
||||
# within the same linkset.
|
||||
# TODO: can we maintain some kind of skeleton cache, where if this skeleton has no conflicts
|
||||
# with another skeleton in the cache, we just use that skeleton and add any additional joints?
|
||||
skel_root = load_skeleton_nodes()
|
||||
transform_skeleton(skel_root, dae, skin_seg)
|
||||
skel = collada.scene.Node.load(dae, skel_root, {})
|
||||
skel.children.append(node)
|
||||
skel.id = f"Skel-{uniq}"
|
||||
skel.save()
|
||||
node = skel
|
||||
return node
|
||||
|
||||
|
||||
def load_skeleton_nodes() -> etree.ElementBase:
|
||||
# TODO: this sucks. Can't we construct nodes with the appropriate transformation
|
||||
# matrices from the data in `avatar_skeleton.xml`?
|
||||
skel_path = get_resource_filename("lib/base/data/male_collada_joints.xml")
|
||||
with open(skel_path, 'r') as f:
|
||||
return etree.fromstring(f.read())
|
||||
|
||||
|
||||
def transform_skeleton(skel_root: etree.ElementBase, dae: collada.Collada, skin_seg: SkinSegmentDict,
|
||||
include_unreferenced_bones=False):
|
||||
"""Update skeleton XML nodes to account for joint translations in the mesh"""
|
||||
joint_nodes: Dict[str, collada.scene.Node] = {}
|
||||
for skel_node in skel_root.iter():
|
||||
# xpath is loathsome so this is easier.
|
||||
if skel_node.tag != dae.tag('node') or skel_node.get('type') != 'JOINT':
|
||||
continue
|
||||
joint_nodes[skel_node.get('name')] = collada.scene.Node.load(dae, skel_node, {})
|
||||
for joint_name, matrix in zip(skin_seg['joint_names'], skin_seg.get('alt_inverse_bind_matrix', [])):
|
||||
joint_node = joint_nodes[joint_name]
|
||||
joint_decomp = transformations.decompose_matrix(llsd_to_mat4(matrix))
|
||||
joint_node.matrix = mat4_to_collada(transformations.compose_matrix(translate=joint_decomp[3]))
|
||||
# Update the underlying XML element with the new transform matrix
|
||||
joint_node.save()
|
||||
|
||||
if not include_unreferenced_bones:
|
||||
needed_heirarchy = set()
|
||||
for skel_node in joint_nodes.values():
|
||||
skel_node = skel_node.xmlnode
|
||||
if skel_node.get('name') in skin_seg['joint_names']:
|
||||
# Add this joint and any ancestors the list of needed joints
|
||||
while skel_node is not None:
|
||||
needed_heirarchy.add(skel_node.get('name'))
|
||||
skel_node = skel_node.getparent()
|
||||
|
||||
for skel_node in joint_nodes.values():
|
||||
skel_node = skel_node.xmlnode
|
||||
if skel_node.get('name') not in needed_heirarchy:
|
||||
skel_node.getparent().remove(skel_node)
|
||||
|
||||
pelvis_offset = skin_seg.get('pelvis_offset')
|
||||
|
||||
# TODO: should we even do this here? It's not present in the collada, just
|
||||
# something that's specified in the uploader before conversion to LLMesh.
|
||||
if pelvis_offset and 'mPelvis' in joint_nodes:
|
||||
pelvis_node = joint_nodes['mPelvis']
|
||||
# Column-major!
|
||||
pelvis_node.matrix[3][2] += pelvis_offset
|
||||
pelvis_node.save()
|
||||
|
||||
|
||||
def _create_mat4_source(name: str, data: np.ndarray, semantic: str):
|
||||
# PyCollada has no way to make a source with a float4x4 semantic. Do it a bad way.
|
||||
# Note that collada demands column-major matrices whereas LLSD mesh has them row-major!
|
||||
source = collada.source.FloatSource(name, data, tuple(f"M{x}" for x in range(16)))
|
||||
accessor = source.xmlnode[1][0]
|
||||
for child in list(accessor):
|
||||
accessor.remove(child)
|
||||
accessor.append(E.param(name=semantic, type="float4x4"))
|
||||
return source
|
||||
|
||||
|
||||
def fix_weird_bind_matrices(skin_seg: SkinSegmentDict) -> None:
|
||||
"""
|
||||
Fix weird-looking bind matrices to have sensible scaling and rotations
|
||||
|
||||
Sometimes we get enormous inverse bind matrices (each component 10k+) and tiny
|
||||
bind shape matrix components. This detects inverse bind shape matrices
|
||||
with weird scales and tries to set them to what they "should" be without
|
||||
the weird inverted scaling.
|
||||
"""
|
||||
|
||||
# Sometimes we get mesh assets that have the vertex data naturally in y-up orientation,
|
||||
# and get re-oriented to z-up not through the bind shape matrix, but through the
|
||||
# transforms in the inverse bind matrices!
|
||||
#
|
||||
# Blender, for one, does not like this very much, and generally won't generate mesh
|
||||
# assets like this, as explained here https://developer.blender.org/T38660.
|
||||
# In vanilla Blender, these mesh assets will show up scaled and rotated _only_ according
|
||||
# to the bind shape matrix, which may end up with the model 25 meters tall and sitting
|
||||
# on its side.
|
||||
#
|
||||
# https://avalab.org/avastar/292/knowledge/compare-workbench/, while somewhat outdated,
|
||||
# has some information on rest pose vs default pose and scaling that I believe is relevant.
|
||||
# https://github.com/KhronosGroup/glTF-Blender-IO/issues/994 as well.
|
||||
#
|
||||
# While trying to figure out what was going on, I searched for something like
|
||||
# "inverse bind matrix scale collada", "bind pose scale blender", etc. Pretty much every
|
||||
# result was either a bug filed by, or a question asked by the creator of Avastar, or an SL user.
|
||||
# I think that says a lot about how annoying it is to author mesh for SL in particular.
|
||||
#
|
||||
# I spent a good month or so tearing my hair out over this wondering how these values could
|
||||
# even be possible. I wasn't sure how I should write mesh import code if I don't understand
|
||||
# how to interpret existing data, or how it even ended up the way it did. Turns out I wasn't
|
||||
# misinterpreting the data, the data really is just weird.
|
||||
#
|
||||
# I'd also had the idea that you could sniff which body a given rigged asset was meant
|
||||
# for by doing trivial matching on the inverse bind matrices, but obviously that isn't true!
|
||||
#
|
||||
# Basically:
|
||||
# 1) Maya is evil and generates evil, this evil bleeds into SL's assets through transforms.
|
||||
# 2) Blender is also evil, but in a manner that doesn't agree with Maya's evil.
|
||||
# 3) Collada was a valiant effort, but is evil in practice. Seemingly simple Collada
|
||||
# files are interpreted completely differently by Blender, Maya, and sometimes SL.
|
||||
# 4) Those three evils collude to make an interop nightmare for everyone like "oh my rigger
|
||||
# rigs using Maya and now my model is huge and all my normals are fucked on reimport"
|
||||
# 5) Yes, there's still good reasons to be using Avastar in 2022 even though nobody authoring
|
||||
# rigged mesh for any other use has to use something similar.
|
||||
|
||||
if not skin_seg['joint_names']:
|
||||
return
|
||||
|
||||
# TODO: calculate the correct inverse bind matrix scale & rotations from avatar_skeleton.xml
|
||||
# definitions. If the rotation and scale factors are the same across all inverse bind matrices then
|
||||
# they can be moved over to the bind shape matrix to keep Blender happy.
|
||||
# Maybe add a scaled / rotated empty as a parent for the armature instead?
|
||||
return
|
||||
|
||||
|
||||
def main():
|
||||
# Take an llmesh file as an argument and spit out basename-converted.dae
|
||||
with open(sys.argv[1], "rb") as f:
|
||||
reader = BufferReader("<", f.read())
|
||||
|
||||
mesh = mesh_to_collada(reader.read(LLMeshSerializer(parse_segment_contents=True)))
|
||||
mesh.write(sys.argv[1].rsplit(".", 1)[0] + "-converted.dae")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
17503
hippolyzer/lib/base/data/avatar_lad.xml
Normal file
17503
hippolyzer/lib/base/data/avatar_lad.xml
Normal file
File diff suppressed because it is too large
Load Diff
232
hippolyzer/lib/base/data/avatar_skeleton.xml
Normal file
232
hippolyzer/lib/base/data/avatar_skeleton.xml
Normal file
@@ -0,0 +1,232 @@
|
||||
<linden_skeleton num_bones="133" num_collision_volumes="26" version="2.0">
|
||||
<bone aliases="hip avatar_mPelvis" connected="false" end="0.000 0.000 0.084" group="Torso" name="mPelvis" pivot="0.000000 0.000000 1.067015" pos="0.000 0.000 1.067" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.030 0.000 0.095" group="Collision" name="PELVIS" pos="-0.01 0 -0.02" rot="0.000000 8.00000 0.000000" scale="0.12 0.16 0.17" support="base"/>
|
||||
<collision_volume end="-0.100 0.000 0.000" group="Collision" name="BUTT" pos="-0.06 0 -0.1" rot="0.000000 0.00000 0.000000" scale="0.1 0.1 0.1" support="base"/>
|
||||
<bone connected="true" end="0.000 0.000 -0.084" group="Spine" name="mSpine1" pivot="0.000000 0.000000 0.084073" pos="0.000 0.000 0.084" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.000 0.000 0.084" group="Spine" name="mSpine2" pivot="0.000000 0.000000 -0.084073" pos="0.000 0.000 -0.084" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone aliases="abdomen avatar_mTorso" connected="true" end="-0.015 0.000 0.205" group="Torso" name="mTorso" pivot="0.000000 0.000000 0.084073" pos="0.000 0.000 0.084" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.028 0.000 0.094" group="Collision" name="BELLY" pos="0.028 0 0.04" rot="0.000000 8.00000 0.000000" scale="0.09 0.13 0.15" support="base"/>
|
||||
<collision_volume end="0.000 0.100 0.000" group="Collision" name="LEFT_HANDLE" pos="0.0 0.10 0.058" rot="0.000000 0.00000 0.000000" scale="0.05 0.05 0.05" support="base"/>
|
||||
<collision_volume end="0.000 -0.100 0.000" group="Collision" name="RIGHT_HANDLE" pos="0.0 -0.10 0.058" rot="0.000000 0.00000 0.000000" scale="0.05 0.05 0.05" support="base"/>
|
||||
<collision_volume end="-0.100 0.000 0.000" group="Collision" name="LOWER_BACK" pos="0.0 0.0 0.023" rot="0.000000 0.00000 0.000000" scale="0.09 0.13 0.15" support="base"/>
|
||||
<bone connected="true" end="0.015 0.000 -0.205" group="Spine" name="mSpine3" pivot="-0.015368 0.000000 0.204877" pos="-0.015 0.000 0.205" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.015 0.000 0.205" group="Spine" name="mSpine4" pivot="0.015368 0.000000 -0.204877" pos="0.015 0.000 -0.205" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone aliases="chest avatar_mChest" connected="true" end="-0.010 0.000 0.250" group="Torso" name="mChest" pivot="-0.015368 0.000000 0.204877" pos="-0.015 0.000 0.205" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="-0.096 0.000 0.152" group="Collision" name="CHEST" pos="0.028 0 0.07" rot="0.000000 -10.00000 0.000000" scale="0.11 0.15 0.2" support="base"/>
|
||||
<collision_volume end="0.080 0.000 -0.006" group="Collision" name="LEFT_PEC" pos="0.119 0.082 0.042" rot="0.000000 4.29000 0.000000" scale="0.05 0.05 0.05" support="base"/>
|
||||
<collision_volume end="0.080 0.000 -0.006" group="Collision" name="RIGHT_PEC" pos="0.119 -0.082 0.042" rot="0.000000 4.29000 0.000000" scale="0.05 0.05 0.05" support="base"/>
|
||||
<collision_volume end="-0.100 0.000 0.000" group="Collision" name="UPPER_BACK" pos="0.0 0.0 0.017" rot="0.000000 0.00000 0.000000" scale="0.09 0.13 0.15" support="base"/>
|
||||
<bone aliases="neck avatar_mNeck" connected="true" end="0.000 0.000 0.077" group="Torso" name="mNeck" pivot="-0.009507 0.000000 0.251108" pos="-0.010 0.000 0.251" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.000 0.080" group="Collision" name="NECK" pos="0.0 0 0.02" rot="0.000000 0.000000 0.000000" scale="0.05 0.06 0.08" support="base"/>
|
||||
<bone aliases="head avatar_mHead" connected="true" end="0.000 0.000 0.079" group="Torso" name="mHead" pivot="0.000000 -0.000000 0.075630" pos="0.000 -0.000 0.076" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.000 0.100" group="Collision" name="HEAD" pos="0.02 0 0.07" rot="0.000000 0.000000 0.000000" scale="0.11 0.09 0.12" support="base"/>
|
||||
<bone aliases="figureHair avatar_mSkull" connected="false" end="0.000 0.000 0.033" group="Extra" name="mSkull" pivot="0.000000 0.000000 0.079000" pos="0.000 0.000 0.079" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base"/>
|
||||
<bone aliases="avatar_mEyeRight" connected="false" end="0.025 0.000 0.000" group="Extra" name="mEyeRight" pivot="0.098466 -0.036000 0.079000" pos="0.098 -0.036 0.079" rot="0.000000 0.000000 -0.000000" scale="1.000 1.000 1.000" support="base"/>
|
||||
<bone aliases="avatar_mEyeLeft" connected="false" end="0.025 0.000 0.000" group="Extra" name="mEyeLeft" pivot="0.098461 0.036000 0.079000" pos="0.098 0.036 0.079" rot="0.000000 -0.000000 0.000000" scale="1.000 1.000 1.000" support="base"/>
|
||||
<bone connected="false" end="0.020 0.000 0.000" group="Face" name="mFaceRoot" pivot="0.025000 0.000000 0.045000" pos="0.025 0.000 0.045" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="false" end="0.025 0.000 0.000" group="Face" name="mFaceEyeAltRight" pivot="0.073466 -0.036000 0.0339300" pos="0.073 -0.036 0.034" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.025 0.000 0.000" group="Face" name="mFaceEyeAltLeft" pivot="0.073461 0.036000 0.0339300" pos="0.073 0.036 0.034" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.024 0.004 0.018" group="Face" name="mFaceForeheadLeft" pivot="0.061 0.035 0.083" pos="0.061 0.035 0.083" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.024 -0.004 0.018" group="Face" name="mFaceForeheadRight" pivot="0.061 -0.035 0.083" pos="0.061 -0.035 0.083" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.023 0.013 0.000" group="Eyes" name="mFaceEyebrowOuterLeft" pivot="0.064 0.051 0.048" pos="0.064 0.051 0.048" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.027 0.000 0.000" group="Eyes" name="mFaceEyebrowCenterLeft" pivot="0.070 0.043 0.056" pos="0.070 0.043 0.056" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.026 0.000 0.000" group="Eyes" name="mFaceEyebrowInnerLeft" pivot="0.075 0.022 0.051" pos="0.075 0.022 0.051" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.023 -0.013 0.000" group="Eyes" name="mFaceEyebrowOuterRight" pivot="0.064 -0.051 0.048" pos="0.064 -0.051 0.048" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.027 0.000 0.000" group="Eyes" name="mFaceEyebrowCenterRight" pivot="0.070 -0.043 0.056" pos="0.070 -0.043 0.056" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.026 0.000 0.000" group="Eyes" name="mFaceEyebrowInnerRight" pivot="0.075 -0.022 0.051" pos="0.075 -0.022 0.051" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.027 0.000 0.005" group="Eyes" name="mFaceEyeLidUpperLeft" pivot="0.073 0.036 0.034" pos="0.073 0.036 0.034" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.024 0.000 -0.007" group="Eyes" name="mFaceEyeLidLowerLeft" pivot="0.073 0.036 0.034" pos="0.073 0.036 0.034" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.027 0.000 0.005" group="Eyes" name="mFaceEyeLidUpperRight" pivot="0.073 -0.036 0.034" pos="0.073 -0.036 0.034" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.024 0.000 -0.007" group="Eyes" name="mFaceEyeLidLowerRight" pivot="0.073 -0.036 0.034" pos="0.073 -0.036 0.034" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="-0.019 0.018 0.025" group="Ears" name="mFaceEar1Left" pivot="0.000 0.080 0.002" pos="0.000 0.080 0.002" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.000 0.000 0.033" group="Ears" name="mFaceEar2Left" pivot="-0.019 0.018 0.025" pos="-0.019 0.018 0.025" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.019 -0.018 0.025" group="Ears" name="mFaceEar1Right" pivot="0.000 -0.080 0.002" pos="0.000 -0.080 0.002" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.000 0.000 0.033" group="Ears" name="mFaceEar2Right" pivot="-0.019 -0.018 0.025" pos="-0.019 -0.018 0.025" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
<bone connected="false" end="0.015 0.004 0.000" group="Face" name="mFaceNoseLeft" pivot="0.086 0.015 -0.004" pos="0.086 0.015 -0.004" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.025 0.000 0.000" group="Face" name="mFaceNoseCenter" pivot="0.102 0.000 0.000" pos="0.102 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.015 -0.004 0.000" group="Face" name="mFaceNoseRight" pivot="0.086 -0.015 -0.004" pos="0.086 -0.015 -0.004" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.013 0.030 0.000" group="Face" name="mFaceCheekLowerLeft" pivot="0.050 0.034 -0.031" pos="0.050 0.034 -0.031" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.022 0.015 0.000" group="Face" name="mFaceCheekUpperLeft" pivot="0.070 0.034 -0.005" pos="0.070 0.034 -0.005" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.013 -0.030 0.000" group="Face" name="mFaceCheekLowerRight" pivot="0.050 -0.034 -0.031" pos="0.050 -0.034 -0.031" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.022 -0.015 0.000" group="Face" name="mFaceCheekUpperRight" pivot="0.070 -0.034 -0.005" pos="0.070 -0.034 -0.005" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.059 0.000 -0.039" group="Mouth" name="mFaceJaw" pivot="-0.001 0.000 -0.015" pos="-0.001 0.000 -0.015" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="false" end="0.021 0.000 -0.018" group="Mouth" name="mFaceChin" pivot="0.074 0.000 -0.054" pos="0.074 0.000 -0.054" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.035 0.000 0.000" group="Mouth" name="mFaceTeethLower" pivot="0.021 0.000 -0.039" pos="0.021 0.000 -0.039" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="false" end="0.034 0.017 0.005" group="Lips" name="mFaceLipLowerLeft" pivot="0.045 0.000 0.000" pos="0.045 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.034 -0.017 0.005" group="Lips" name="mFaceLipLowerRight" pivot="0.045 0.000 0.000" pos="0.045 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.040 0.000 0.002" group="Lips" name="mFaceLipLowerCenter" pivot="0.045 0.000 0.000" pos="0.045 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.022 0.000 0.007" group="Mouth" name="mFaceTongueBase" pivot="0.039 0.000 0.005" pos="0.039 0.000 0.005" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.010 0.000 0.000" group="Mouth" name="mFaceTongueTip" pivot="0.022 0.000 0.007" pos="0.022 0.000 0.007" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.017 0.000 0.000" group="Face" name="mFaceJawShaper" pivot="0.000 0.000 0.000" pos="0.000 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.036 0.000 0.000" group="Face" name="mFaceForeheadCenter" pivot="0.069 0.000 0.065" pos="0.069 0.000 0.065" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.014 0.000 0.000" group="Nose" name="mFaceNoseBase" pivot="0.094 0.000 -0.016" pos="0.094 0.000 -0.016" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.035 0.000 0.000" group="Mouth" name="mFaceTeethUpper" pivot="0.020 0.000 -0.030" pos="0.020 0.000 -0.030" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="false" end="0.041 0.015 0.000" group="Lips" name="mFaceLipUpperLeft" pivot="0.045 0.000 -0.003" pos="0.045 0.000 -0.003" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.041 -0.015 0.000" group="Lips" name="mFaceLipUpperRight" pivot="0.045 0.000 -0.003" pos="0.045 0.000 -0.003" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.045 0.051 0.000" group="Lips" name="mFaceLipCornerLeft" pivot="0.028 -0.019 -0.010" pos="0.028 -0.019 -0.010" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.045 -0.051 0.000" group="Lips" name="mFaceLipCornerRight" pivot="0.028 0.019 -0.010" pos="0.028 0.019 -0.010" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.043 0.000 0.002" group="Lips" name="mFaceLipUpperCenter" pivot="0.045 0.000 -0.003" pos="0.045 0.000 -0.003" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
<bone connected="false" end="0.016 0.000 0.000" group="Face" name="mFaceEyecornerInnerLeft" pivot="0.075 0.017 0.032" pos="0.075 0.017 0.032" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.016 0.000 0.000" group="Face" name="mFaceEyecornerInnerRight" pivot="0.075 -0.017 0.032" pos="0.075 -0.017 0.032" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.015 0.000 0.008" group="Nose" name="mFaceNoseBridge" pivot="0.091 0.000 0.020" pos="0.091 0.000 0.020" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone aliases="lCollar avatar_mCollarLeft" connected="false" end="0.000 0.079 0.000" group="Arms" name="mCollarLeft" pivot="-0.020927 0.084665 0.165396" pos="-0.021 0.085 0.165" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.100 0.000" group="Collision" name="L_CLAVICLE" pos="0.02 0 0.02" rot="0.000000 0.00000 0.000000" scale="0.07 0.14 0.05" support="base"/>
|
||||
<bone aliases="lShldr avatar_mShoulderLeft" connected="true" end="0.000 0.247 0.000" group="Arms" name="mShoulderLeft" pivot="0.000000 0.079000 -0.000000" pos="0.000 0.079 -0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.130 -0.003" group="Collision" name="L_UPPER_ARM" pos="0.0 0.12 0.01" rot="-5.000000 0.00000 0.000000" scale="0.05 0.17 0.05" support="base"/>
|
||||
<bone aliases="lForeArm avatar_mElbowLeft" connected="true" end="0.000 0.205 0.000" group="Arms" name="mElbowLeft" pivot="0.000000 0.248000 0.000000" pos="0.000 0.248 0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.100 -0.001" group="Collision" name="L_LOWER_ARM" pos="0.0 0.1 0.0" rot="-3.000000 0.00000 0.000000" scale="0.04 0.14 0.04" support="base"/>
|
||||
<bone aliases="lHand avatar_mWristLeft" connected="true" end="0.000 0.060 0.000" group="Arms" name="mWristLeft" pivot="-0.000000 0.204846 0.000000" pos="-0.000 0.205 0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.005 0.049 -0.001" group="Collision" name="L_HAND" pos="0.01 0.05 0.0" rot="-3.000000 0.00000 -10.000000" scale="0.05 0.08 0.03" support="base"/>
|
||||
<bone connected="false" end="-0.001 0.040 -0.006" group="Hand" name="mHandMiddle1Left" pivot="0.013 0.101 0.015" pos="0.013 0.101 0.015" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.001 0.049 -0.008" group="Hand" name="mHandMiddle2Left" pivot="-0.001 0.040 -0.006" pos="-0.001 0.040 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.002 0.033 -0.006" group="Hand" name="mHandMiddle3Left" pivot="-0.001 0.049 -0.008" pos="-0.001 0.049 -0.008" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="0.017 0.036 -0.006" group="Hand" name="mHandIndex1Left" pivot="0.038 0.097 0.015" pos="0.038 0.097 0.015" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.014 0.032 -0.006" group="Hand" name="mHandIndex2Left" pivot="0.017 0.036 -0.006" pos="0.017 0.036 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.011 0.025 -0.004" group="Hand" name="mHandIndex3Left" pivot="0.014 0.032 -0.006" pos="0.014 0.032 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.013 0.038 -0.008" group="Hand" name="mHandRing1Left" pivot="-0.010 0.099 0.009" pos="-0.010 0.099 0.009" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.013 0.040 -0.009" group="Hand" name="mHandRing2Left" pivot="-0.013 0.038 -0.008" pos="-0.013 0.038 -0.008" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.010 0.028 -0.006" group="Hand" name="mHandRing3Left" pivot="-0.013 0.040 -0.009" pos="-0.013 0.040 -0.009" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.024 0.025 -0.006" group="Hand" name="mHandPinky1Left" pivot="-0.031 0.095 0.003" pos="-0.031 0.095 0.003" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.015 0.018 -0.004" group="Hand" name="mHandPinky2Left" pivot="-0.024 0.025 -0.006" pos="-0.024 0.025 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.013 0.016 -0.004" group="Hand" name="mHandPinky3Left" pivot="-0.015 0.018 -0.004" pos="-0.015 0.018 -0.004" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="0.028 0.032 0.000" group="Hand" name="mHandThumb1Left" pivot="0.031 0.026 0.004" pos="0.031 0.026 0.004" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.023 0.031 0.000" group="Hand" name="mHandThumb2Left" pivot="0.028 0.032 -0.001" pos="0.028 0.032 -0.001" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.015 0.025 0.000" group="Hand" name="mHandThumb3Left" pivot="0.023 0.031 -0.001" pos="0.023 0.031 -0.001" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone aliases="rCollar avatar_mCollarRight" connected="false" end="0.000 -0.079 0.000" group="Arms" name="mCollarRight" pivot="-0.020927 -0.085000 0.165396" pos="-0.021 -0.085 0.165" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 -0.100 0.000" group="Collision" name="R_CLAVICLE" pos="0.02 0 0.02" rot="0.000000 0.00000 0.000000" scale="0.07 0.14 0.05" support="base"/>
|
||||
<bone aliases="rShldr avatar_mShoulderRight" connected="true" end="0.000 -0.247 0.000" group="Arms" name="mShoulderRight" pivot="0.000000 -0.079418 -0.000000" pos="0.000 -0.079 -0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 -0.130 -0.003" group="Collision" name="R_UPPER_ARM" pos="0.0 -0.12 0.01" rot="5.000000 0.00000 0.000000" scale="0.05 0.17 0.05" support="base"/>
|
||||
<bone aliases="rForeArm avatar_mElbowRight" connected="true" end="0.000 -0.205 0.000" group="Arms" name="mElbowRight" pivot="0.000000 -0.248000 -0.000000" pos="0.000 -0.248 -0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 -0.100 -0.001" group="Collision" name="R_LOWER_ARM" pos="0.0 -0.1 0.0" rot="3.000000 0.00000 0.000000" scale="0.04 0.14 0.04" support="base"/>
|
||||
<bone aliases="rHand avatar_mWristRight" connected="true" end="0.000 -0.060 0.000" group="Arms" name="mWristRight" pivot="-0.000000 -0.205000 -0.000000" pos="0.000 -0.205 -0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.005 -0.049 -0.001" group="Collision" name="R_HAND" pos="0.01 -0.05 0.0" rot="3.000000 0.00000 10.000000" scale="0.05 0.08 0.03" support="base"/>
|
||||
<bone connected="false" end="-0.001 -0.040 -0.006" group="Hand" name="mHandMiddle1Right" pivot="0.013 -0.101 0.015" pos="0.013 -0.101 0.015" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.001 -0.049 -0.008" group="Hand" name="mHandMiddle2Right" pivot="-0.001 -0.040 -0.006" pos="-0.001 -0.040 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.002 -0.033 -0.006" group="Hand" name="mHandMiddle3Right" pivot="-0.001 -0.049 -0.008" pos="-0.001 -0.049 -0.008" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="0.017 -0.036 -0.006" group="Hand" name="mHandIndex1Right" pivot="0.038 -0.097 0.015" pos="0.038 -0.097 0.015" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.014 -0.032 -0.006" group="Hand" name="mHandIndex2Right" pivot="0.017 -0.036 -0.006" pos="0.017 -0.036 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.011 -0.025 -0.004" group="Hand" name="mHandIndex3Right" pivot="0.014 -0.032 -0.006" pos="0.014 -0.032 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.013 -0.038 -0.008" group="Hand" name="mHandRing1Right" pivot="-0.010 -0.099 0.009" pos="-0.010 -0.099 0.009" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.013 -0.040 -0.009" group="Hand" name="mHandRing2Right" pivot="-0.013 -0.038 -0.008" pos="-0.013 -0.038 -0.008" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.010 -0.028 -0.006" group="Hand" name="mHandRing3Right" pivot="-0.013 -0.040 -0.009" pos="-0.013 -0.040 -0.009" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.024 -0.025 -0.006" group="Hand" name="mHandPinky1Right" pivot="-0.031 -0.095 0.003" pos="-0.031 -0.095 0.003" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.015 -0.018 -0.004" group="Hand" name="mHandPinky2Right" pivot="-0.024 -0.025 -0.006" pos="-0.024 -0.025 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.013 -0.016 -0.004" group="Hand" name="mHandPinky3Right" pivot="-0.015 -0.018 -0.004" pos="-0.015 -0.018 -0.004" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="0.028 -0.032 0.000" group="Hand" name="mHandThumb1Right" pivot="0.031 -0.026 0.004" pos="0.031 -0.026 0.004" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.023 -0.031 0.000" group="Hand" name="mHandThumb2Right" pivot="0.028 -0.032 -0.001" pos="0.028 -0.032 -0.001" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.015 -0.025 0.000" group="Hand" name="mHandThumb3Right" pivot="0.023 -0.031 -0.001" pos="0.023 -0.031 -0.001" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.061 0.000 0.000" group="Wing" name="mWingsRoot" pivot="-0.014 0.000 0.000" pos="-0.014 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="false" end="-0.168 0.169 0.067" group="Wing" name="mWing1Left" pivot="-0.099 0.105 0.181" pos="-0.099 0.105 0.181" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.181 0.183 0.000" group="Wing" name="mWing2Left" pivot="-0.168 0.169 0.067" pos="-0.168 0.169 0.067" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.171 0.173 0.000" group="Wing" name="mWing3Left" pivot="-0.181 0.183 0.000" pos="-0.181 0.183 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.146 0.132 0.000" group="Wing" name="mWing4Left" pivot="-0.171 0.173 0.000" pos="-0.171 0.173 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="true" end="-0.068 0.062 -0.159" group="Wing" name="mWing4FanLeft" pivot="-0.171 0.173 0.000" pos="-0.171 0.173 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.168 -0.169 0.067" group="Wing" name="mWing1Right" pivot="-0.099 -0.105 0.181" pos="-0.099 -0.105 0.181" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.181 -0.183 0.000" group="Wing" name="mWing2Right" pivot="-0.168 -0.169 0.067" pos="-0.168 -0.169 0.067" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.171 -0.173 0.000" group="Wing" name="mWing3Right" pivot="-0.181 -0.183 0.000" pos="-0.181 -0.183 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.146 -0.132 0.000" group="Wing" name="mWing4Right" pivot="-0.171 -0.173 0.000" pos="-0.171 -0.173 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="true" end="-0.068 -0.062 -0.159" group="Wing" name="mWing4FanRight" pivot="-0.171 -0.173 0.000" pos="-0.171 -0.173 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone aliases="rThigh avatar_mHipRight" connected="false" end="-0.001 0.049 -0.491" group="Legs" name="mHipRight" pivot="0.033620 -0.128806 -0.041086" pos="0.034 -0.129 -0.041" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.000 -0.200" group="Collision" name="R_UPPER_LEG" pos="-0.02 0.05 -0.22" rot="0.000000 0.00000 0.000000" scale="0.09 0.09 0.32" support="base"/>
|
||||
<bone aliases="rShin avatar_mKneeRight" connected="true" end="-0.029 0.000 -0.469" group="Legs" name="mKneeRight" pivot="-0.000780 0.048635 -0.490922" pos="-0.001 0.049 -0.491" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="-0.010 0.000 -0.150" group="Collision" name="R_LOWER_LEG" pos="-0.02 0.0 -0.2" rot="0.000000 0.00000 0.000000" scale="0.06 0.06 0.25" support="base"/>
|
||||
<bone aliases="rFoot avatar_mAnkleRight" connected="true" end="0.112 0.000 -0.061" group="Legs" name="mAnkleRight" pivot="-0.028869 0.000000 -0.468494" pos="-0.029 0.000 -0.468" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.089 0.000 -0.026" group="Collision" name="R_FOOT" pos="0.077 0.0 -0.041" rot="0.000000 10.00000 0.000000" scale="0.13 0.05 0.05" support="base"/>
|
||||
<bone aliases="avatar_mFootRight" connected="true" end="0.105 -0.010 0.000" group="Extra" name="mFootRight" pivot="0.111956 -0.000000 -0.060637" pos="0.112 -0.000 -0.061" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<bone aliases="avatar_mToeRight" connected="false" end="0.020 0.000 0.000" group="Extra" name="mToeRight" pivot="0.105399 -0.010408 -0.000104" pos="0.109 0.000 0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone aliases="lThigh avatar_mHipLeft" connected="false" end="-0.001 -0.046 -0.491" group="Legs" name="mHipLeft" pivot="0.033757 0.126765 -0.040998" pos="0.034 0.127 -0.041" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.000 -0.200" group="Collision" name="L_UPPER_LEG" pos="-0.02 -0.05 -0.22" rot="0.000000 0.00000 0.000000" scale="0.09 0.09 0.32" support="base"/>
|
||||
<bone aliases="lShin avatar_mKneeLeft" connected="true" end="-0.029 0.001 -0.469" group="Legs" name="mKneeLeft" pivot="-0.000887 -0.045568 -0.491053" pos="-0.001 -0.046 -0.491" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="-0.010 0.000 -0.150" group="Collision" name="L_LOWER_LEG" pos="-0.02 0.0 -0.2" rot="0.000000 0.00000 0.000000" scale="0.06 0.06 0.25" support="base"/>
|
||||
<bone aliases="lFoot avatar_mAnkleLeft" connected="true" end="0.112 0.000 -0.061" group="Legs" name="mAnkleLeft" pivot="-0.028887 0.001378 -0.468449" pos="-0.029 0.001 -0.468" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.089 0.000 -0.026" group="Collision" name="L_FOOT" pos="0.077 0.0 -0.041" rot="0.000000 10.00000 0.000000" scale="0.13 0.05 0.05" support="base"/>
|
||||
<bone aliases="avatar_mFootLeft" connected="true" end="0.105 0.008 0.001" group="Extra" name="mFootLeft" pivot="0.111956 -0.000000 -0.060620" pos="0.112 -0.000 -0.061" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<bone aliases="avatar_mToeLeft" connected="false" end="0.020 0.000 0.000" group="Extra" name="mToeLeft" pivot="0.105387 0.008270 0.000871" pos="0.109 0.000 0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.197 0.000 0.000" group="Tail" name="mTail1" pivot="-0.116 0.000 0.047" pos="-0.116 0.000 0.047" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.168 0.000 0.000" group="Tail" name="mTail2" pivot="-0.197 0.000 0.000" pos="-0.197 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.142 0.000 0.000" group="Tail" name="mTail3" pivot="-0.168 0.000 0.000" pos="-0.168 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.112 0.000 0.000" group="Tail" name="mTail4" pivot="-0.142 0.000 0.000" pos="-0.142 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.094 0.000 0.000" group="Tail" name="mTail5" pivot="-0.112 0.000 0.000" pos="-0.112 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.089 0.000 0.000" group="Tail" name="mTail6" pivot="-0.094 0.000 0.000" pos="-0.094 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="0.004 0.000 -0.066" group="Groin" name="mGroin" pivot="0.064 0.000 -0.097" pos="0.064 0.000 -0.097" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="-0.204 0.000 0.000" group="Limb" name="mHindLimbsRoot" pivot="-0.200 0.000 0.084" pos="-0.200 0.000 0.084" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="false" end="0.002 -0.046 -0.491" group="Limb" name="mHindLimb1Left" pivot="-0.204 0.129 -0.125" pos="-0.204 0.129 -0.125" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.030 -0.003 -0.468" group="Limb" name="mHindLimb2Left" pivot="0.002 -0.046 -0.491" pos="0.002 -0.046 -0.491" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.112 0.000 -0.061" group="Limb" name="mHindLimb3Left" pivot="-0.030 -0.003 -0.468" pos="-0.030 -0.003 -0.468" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.105 0.008 0.000" group="Limb" name="mHindLimb4Left" pivot="0.112 0.000 -0.061" pos="0.112 0.000 -0.061" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="0.002 0.046 -0.491" group="Limb" name="mHindLimb1Right" pivot="-0.204 -0.129 -0.125" pos="-0.204 -0.129 -0.125" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.030 0.003 -0.468" group="Limb" name="mHindLimb2Right" pivot="0.002 0.046 -0.491" pos="0.002 0.046 -0.491" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.112 0.000 -0.061" group="Limb" name="mHindLimb3Right" pivot="-0.030 0.003 -0.468" pos="-0.030 0.003 -0.468" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.105 -0.008 0.000" group="Limb" name="mHindLimb4Right" pivot="0.112 0.000 -0.061" pos="0.112 0.000 -0.061" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</linden_skeleton>
|
||||
485
hippolyzer/lib/base/data/male_collada_joints.xml
Normal file
485
hippolyzer/lib/base/data/male_collada_joints.xml
Normal file
@@ -0,0 +1,485 @@
|
||||
<!-- from http://wiki.secondlife.com/wiki/Project_Bento_Resources_and_Information collada -->
|
||||
<node id="Avatar" name="Avatar" type="NODE" xmlns="http://www.collada.org/2005/11/COLLADASchema">
|
||||
<translate sid="location">0 0 0</translate>
|
||||
<rotate sid="rotationZ">0 0 1 0</rotate>
|
||||
<rotate sid="rotationY">0 1 0 0</rotate>
|
||||
<rotate sid="rotationX">1 0 0 0</rotate>
|
||||
<scale sid="scale">1 1 1</scale>
|
||||
<node id="mPelvis" name="mPelvis" sid="mPelvis" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0 0 0 1 1.067 0 0 0 1</matrix>
|
||||
<node id="PELVIS" name="PELVIS" sid="PELVIS" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.01 0 1 0 0 0 0 1 -0.02 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="BUTT" name="BUTT" sid="BUTT" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.06 0 1 0 0 0 0 1 -0.1 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mSpine1" name="mSpine1" sid="mSpine1" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0 0 0 1 0.084 0 0 0 1</matrix>
|
||||
<node id="mSpine2" name="mSpine2" sid="mSpine2" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0 0 0 1 -0.084 0 0 0 1</matrix>
|
||||
<node id="mTorso" name="mTorso" sid="mTorso" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0 0 0 1 0.084 0 0 0 1</matrix>
|
||||
<node id="BELLY" name="BELLY" sid="BELLY" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.028 0 1 0 0 0 0 1 0.04 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="LEFT_HANDLE" name="LEFT_HANDLE" sid="LEFT_HANDLE" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0.1 0 0 1 0.058 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="RIGHT_HANDLE" name="RIGHT_HANDLE" sid="RIGHT_HANDLE" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 -0.1 0 0 1 0.058 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="LOWER_BACK" name="LOWER_BACK" sid="LOWER_BACK" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0 0 0 1 0.023 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mSpine3" name="mSpine3" sid="mSpine3" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.015 0 1 0 0 0 0 1 0.205 0 0 0 1</matrix>
|
||||
<node id="mSpine4" name="mSpine4" sid="mSpine4" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.015 0 1 0 0 0 0 1 -0.205 0 0 0 1</matrix>
|
||||
<node id="mChest" name="mChest" sid="mChest" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.015 0 1 0 0 0 0 1 0.205 0 0 0 1</matrix>
|
||||
<node id="CHEST" name="CHEST" sid="CHEST" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.028 0 1 0 0 0 0 1 0.07 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="LEFT_PEC" name="LEFT_PEC" sid="LEFT_PEC" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.119 0 1 0 0.082 0 0 1 0.042 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="RIGHT_PEC" name="RIGHT_PEC" sid="RIGHT_PEC" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.119 0 1 0 -0.082 0 0 1 0.042 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="UPPER_BACK" name="UPPER_BACK" sid="UPPER_BACK" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0 0 0 1 0.017 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mNeck" name="mNeck" sid="mNeck" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.01 0 1 0 0 0 0 1 0.251 0 0 0 1</matrix>
|
||||
<node id="NECK" name="NECK" sid="NECK" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0 0 0 1 0.02 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mHead" name="mHead" sid="mHead" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0 0 0 1 0.076 0 0 0 1</matrix>
|
||||
<node id="HEAD" name="HEAD" sid="HEAD" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.02 0 1 0 0 0 0 1 0.07 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mSkull" name="mSkull" sid="mSkull" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0 0 0 1 0.079 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mEyeRight" name="mEyeRight" sid="mEyeRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.098 0 1 0 -0.036 0 0 1 0.079 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mEyeLeft" name="mEyeLeft" sid="mEyeLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.098 0 1 0 0.036 0 0 1 0.079 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceRoot" name="mFaceRoot" sid="mFaceRoot" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.025 0 1 0 0 0 0 1 0.045 0 0 0 1</matrix>
|
||||
<node id="mFaceEyeAltRight" name="mFaceEyeAltRight" sid="mFaceEyeAltRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.073 0 1 0 -0.036 0 0 1 0.034 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEyeAltLeft" name="mFaceEyeAltLeft" sid="mFaceEyeAltLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.073 0 1 0 0.036 0 0 1 0.034 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceForeheadLeft" name="mFaceForeheadLeft" sid="mFaceForeheadLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.061 0 1 0 0.035 0 0 1 0.083 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceForeheadRight" name="mFaceForeheadRight" sid="mFaceForeheadRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.061 0 1 0 -0.035 0 0 1 0.083 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEyebrowOuterLeft" name="mFaceEyebrowOuterLeft" sid="mFaceEyebrowOuterLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.064 0 1 0 0.051 0 0 1 0.048 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEyebrowCenterLeft" name="mFaceEyebrowCenterLeft" sid="mFaceEyebrowCenterLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.07 0 1 0 0.043 0 0 1 0.056 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEyebrowInnerLeft" name="mFaceEyebrowInnerLeft" sid="mFaceEyebrowInnerLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.075 0 1 0 0.022 0 0 1 0.051 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEyebrowOuterRight" name="mFaceEyebrowOuterRight" sid="mFaceEyebrowOuterRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.064 0 1 0 -0.051 0 0 1 0.048 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEyebrowCenterRight" name="mFaceEyebrowCenterRight" sid="mFaceEyebrowCenterRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.07 0 1 0 -0.043 0 0 1 0.056 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEyebrowInnerRight" name="mFaceEyebrowInnerRight" sid="mFaceEyebrowInnerRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.075 0 1 0 -0.022 0 0 1 0.051 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEyeLidUpperLeft" name="mFaceEyeLidUpperLeft" sid="mFaceEyeLidUpperLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.073 0 1 0 0.036 0 0 1 0.034 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEyeLidLowerLeft" name="mFaceEyeLidLowerLeft" sid="mFaceEyeLidLowerLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.073 0 1 0 0.036 0 0 1 0.034 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEyeLidUpperRight" name="mFaceEyeLidUpperRight" sid="mFaceEyeLidUpperRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.073 0 1 0 -0.036 0 0 1 0.034 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEyeLidLowerRight" name="mFaceEyeLidLowerRight" sid="mFaceEyeLidLowerRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.073 0 1 0 -0.036 0 0 1 0.034 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEar1Left" name="mFaceEar1Left" sid="mFaceEar1Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0.08 0 0 1 0.002 0 0 0 1</matrix>
|
||||
<node id="mFaceEar2Left" name="mFaceEar2Left" sid="mFaceEar2Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.019 0 1 0 0.018 0 0 1 0.025 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mFaceEar1Right" name="mFaceEar1Right" sid="mFaceEar1Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 -0.08 0 0 1 0.002 0 0 0 1</matrix>
|
||||
<node id="mFaceEar2Right" name="mFaceEar2Right" sid="mFaceEar2Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.019 0 1 0 -0.018 0 0 1 0.025 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mFaceNoseLeft" name="mFaceNoseLeft" sid="mFaceNoseLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.086 0 1 0 0.015 0 0 1 -0.004 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceNoseCenter" name="mFaceNoseCenter" sid="mFaceNoseCenter" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.102 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceNoseRight" name="mFaceNoseRight" sid="mFaceNoseRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.086 0 1 0 -0.015 0 0 1 -0.004 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceCheekLowerLeft" name="mFaceCheekLowerLeft" sid="mFaceCheekLowerLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.05 0 1 0 0.034 0 0 1 -0.031 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceCheekUpperLeft" name="mFaceCheekUpperLeft" sid="mFaceCheekUpperLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.07 0 1 0 0.034 0 0 1 -0.005 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceCheekLowerRight" name="mFaceCheekLowerRight" sid="mFaceCheekLowerRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.05 0 1 0 -0.034 0 0 1 -0.031 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceCheekUpperRight" name="mFaceCheekUpperRight" sid="mFaceCheekUpperRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.07 0 1 0 -0.034 0 0 1 -0.005 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceJaw" name="mFaceJaw" sid="mFaceJaw" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.001 0 1 0 0 0 0 1 -0.015 0 0 0 1</matrix>
|
||||
<node id="mFaceChin" name="mFaceChin" sid="mFaceChin" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.074 0 1 0 0 0 0 1 -0.054 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceTeethLower" name="mFaceTeethLower" sid="mFaceTeethLower" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.021 0 1 0 0 0 0 1 -0.039 0 0 0 1</matrix>
|
||||
<node id="mFaceLipLowerLeft" name="mFaceLipLowerLeft" sid="mFaceLipLowerLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.045 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceLipLowerRight" name="mFaceLipLowerRight" sid="mFaceLipLowerRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.045 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceLipLowerCenter" name="mFaceLipLowerCenter" sid="mFaceLipLowerCenter" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.045 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceTongueBase" name="mFaceTongueBase" sid="mFaceTongueBase" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.039 0 1 0 0 0 0 1 0.005 0 0 0 1</matrix>
|
||||
<node id="mFaceTongueTip" name="mFaceTongueTip" sid="mFaceTongueTip" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.022 0 1 0 0 0 0 1 0.007 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mFaceJawShaper" name="mFaceJawShaper" sid="mFaceJawShaper" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceForeheadCenter" name="mFaceForeheadCenter" sid="mFaceForeheadCenter" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.069 0 1 0 0 0 0 1 0.065 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceNoseBase" name="mFaceNoseBase" sid="mFaceNoseBase" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.094 0 1 0 0 0 0 1 -0.016 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceTeethUpper" name="mFaceTeethUpper" sid="mFaceTeethUpper" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.02 0 1 0 0 0 0 1 -0.03 0 0 0 1</matrix>
|
||||
<node id="mFaceLipUpperLeft" name="mFaceLipUpperLeft" sid="mFaceLipUpperLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.045 0 1 0 0 0 0 1 -0.003 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceLipUpperRight" name="mFaceLipUpperRight" sid="mFaceLipUpperRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.045 0 1 0 0 0 0 1 -0.003 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceLipCornerLeft" name="mFaceLipCornerLeft" sid="mFaceLipCornerLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.028 0 1 0 -0.019 0 0 1 -0.01 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceLipCornerRight" name="mFaceLipCornerRight" sid="mFaceLipCornerRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.028 0 1 0 0.019 0 0 1 -0.01 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceLipUpperCenter" name="mFaceLipUpperCenter" sid="mFaceLipUpperCenter" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.045 0 1 0 0 0 0 1 -0.003 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mFaceEyecornerInnerLeft" name="mFaceEyecornerInnerLeft" sid="mFaceEyecornerInnerLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.075 0 1 0 0.017 0 0 1 0.032 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceEyecornerInnerRight" name="mFaceEyecornerInnerRight" sid="mFaceEyecornerInnerRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.075 0 1 0 -0.017 0 0 1 0.032 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFaceNoseBridge" name="mFaceNoseBridge" sid="mFaceNoseBridge" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.091 0 1 0 0 0 0 1 0.02 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mCollarLeft" name="mCollarLeft" sid="mCollarLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.021 0 1 0 0.085 0 0 1 0.165 0 0 0 1</matrix>
|
||||
<node id="L_CLAVICLE" name="L_CLAVICLE" sid="L_CLAVICLE" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.02 0 1 0 0 0 0 1 0.02 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mShoulderLeft" name="mShoulderLeft" sid="mShoulderLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0.079 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="L_UPPER_ARM" name="L_UPPER_ARM" sid="L_UPPER_ARM" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0.12 0 0 1 0.01 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mElbowLeft" name="mElbowLeft" sid="mElbowLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0.248 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="L_LOWER_ARM" name="L_LOWER_ARM" sid="L_LOWER_ARM" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0.1 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mWristLeft" name="mWristLeft" sid="mWristLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 0.205 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="L_HAND" name="L_HAND" sid="L_HAND" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.01 0 1 0 0.05 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mHandMiddle1Left" name="mHandMiddle1Left" sid="mHandMiddle1Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.013 0 1 0 0.101 0 0 1 0.015 0 0 0 1</matrix>
|
||||
<node id="mHandMiddle2Left" name="mHandMiddle2Left" sid="mHandMiddle2Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.001 0 1 0 0.04 0 0 1 -0.006 0 0 0 1</matrix>
|
||||
<node id="mHandMiddle3Left" name="mHandMiddle3Left" sid="mHandMiddle3Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.001 0 1 0 0.049 0 0 1 -0.008 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mHandIndex1Left" name="mHandIndex1Left" sid="mHandIndex1Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.038 0 1 0 0.097 0 0 1 0.015 0 0 0 1</matrix>
|
||||
<node id="mHandIndex2Left" name="mHandIndex2Left" sid="mHandIndex2Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.017 0 1 0 0.036 0 0 1 -0.006 0 0 0 1</matrix>
|
||||
<node id="mHandIndex3Left" name="mHandIndex3Left" sid="mHandIndex3Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.014 0 1 0 0.032 0 0 1 -0.006 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mHandRing1Left" name="mHandRing1Left" sid="mHandRing1Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.01 0 1 0 0.099 0 0 1 0.009 0 0 0 1</matrix>
|
||||
<node id="mHandRing2Left" name="mHandRing2Left" sid="mHandRing2Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.013 0 1 0 0.038 0 0 1 -0.008 0 0 0 1</matrix>
|
||||
<node id="mHandRing3Left" name="mHandRing3Left" sid="mHandRing3Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.013 0 1 0 0.04 0 0 1 -0.009 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mHandPinky1Left" name="mHandPinky1Left" sid="mHandPinky1Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.031 0 1 0 0.095 0 0 1 0.003 0 0 0 1</matrix>
|
||||
<node id="mHandPinky2Left" name="mHandPinky2Left" sid="mHandPinky2Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.024 0 1 0 0.025 0 0 1 -0.006 0 0 0 1</matrix>
|
||||
<node id="mHandPinky3Left" name="mHandPinky3Left" sid="mHandPinky3Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.015 0 1 0 0.018 0 0 1 -0.004 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mHandThumb1Left" name="mHandThumb1Left" sid="mHandThumb1Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.031 0 1 0 0.026 0 0 1 0.004 0 0 0 1</matrix>
|
||||
<node id="mHandThumb2Left" name="mHandThumb2Left" sid="mHandThumb2Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.028 0 1 0 0.032 0 0 1 -0.001 0 0 0 1</matrix>
|
||||
<node id="mHandThumb3Left" name="mHandThumb3Left" sid="mHandThumb3Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.023 0 1 0 0.031 0 0 1 -0.001 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mCollarRight" name="mCollarRight" sid="mCollarRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.021 0 1 0 -0.085 0 0 1 0.165 0 0 0 1</matrix>
|
||||
<node id="R_CLAVICLE" name="R_CLAVICLE" sid="R_CLAVICLE" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.02 0 1 0 0 0 0 1 0.02 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mShoulderRight" name="mShoulderRight" sid="mShoulderRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 -0.079 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="R_UPPER_ARM" name="R_UPPER_ARM" sid="R_UPPER_ARM" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 -0.12 0 0 1 0.01 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mElbowRight" name="mElbowRight" sid="mElbowRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 -0.248 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="R_LOWER_ARM" name="R_LOWER_ARM" sid="R_LOWER_ARM" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 -0.1 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mWristRight" name="mWristRight" sid="mWristRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0 0 1 0 -0.205 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="R_HAND" name="R_HAND" sid="R_HAND" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.01 0 1 0 -0.05 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mHandMiddle1Right" name="mHandMiddle1Right" sid="mHandMiddle1Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.013 0 1 0 -0.101 0 0 1 0.015 0 0 0 1</matrix>
|
||||
<node id="mHandMiddle2Right" name="mHandMiddle2Right" sid="mHandMiddle2Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.001 0 1 0 -0.04 0 0 1 -0.006 0 0 0 1</matrix>
|
||||
<node id="mHandMiddle3Right" name="mHandMiddle3Right" sid="mHandMiddle3Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.001 0 1 0 -0.049 0 0 1 -0.008 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mHandIndex1Right" name="mHandIndex1Right" sid="mHandIndex1Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.038 0 1 0 -0.097 0 0 1 0.015 0 0 0 1</matrix>
|
||||
<node id="mHandIndex2Right" name="mHandIndex2Right" sid="mHandIndex2Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.017 0 1 0 -0.036 0 0 1 -0.006 0 0 0 1</matrix>
|
||||
<node id="mHandIndex3Right" name="mHandIndex3Right" sid="mHandIndex3Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.014 0 1 0 -0.032 0 0 1 -0.006 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mHandRing1Right" name="mHandRing1Right" sid="mHandRing1Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.01 0 1 0 -0.099 0 0 1 0.009 0 0 0 1</matrix>
|
||||
<node id="mHandRing2Right" name="mHandRing2Right" sid="mHandRing2Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.013 0 1 0 -0.038 0 0 1 -0.008 0 0 0 1</matrix>
|
||||
<node id="mHandRing3Right" name="mHandRing3Right" sid="mHandRing3Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.013 0 1 0 -0.04 0 0 1 -0.009 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mHandPinky1Right" name="mHandPinky1Right" sid="mHandPinky1Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.031 0 1 0 -0.095 0 0 1 0.003 0 0 0 1</matrix>
|
||||
<node id="mHandPinky2Right" name="mHandPinky2Right" sid="mHandPinky2Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.024 0 1 0 -0.025 0 0 1 -0.006 0 0 0 1</matrix>
|
||||
<node id="mHandPinky3Right" name="mHandPinky3Right" sid="mHandPinky3Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.015 0 1 0 -0.018 0 0 1 -0.004 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mHandThumb1Right" name="mHandThumb1Right" sid="mHandThumb1Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.031 0 1 0 -0.026 0 0 1 0.004 0 0 0 1</matrix>
|
||||
<node id="mHandThumb2Right" name="mHandThumb2Right" sid="mHandThumb2Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.028 0 1 0 -0.032 0 0 1 -0.001 0 0 0 1</matrix>
|
||||
<node id="mHandThumb3Right" name="mHandThumb3Right" sid="mHandThumb3Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.023 0 1 0 -0.031 0 0 1 -0.001 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mWingsRoot" name="mWingsRoot" sid="mWingsRoot" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.014 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="mWing1Left" name="mWing1Left" sid="mWing1Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.099 0 1 0 0.105 0 0 1 0.181 0 0 0 1</matrix>
|
||||
<node id="mWing2Left" name="mWing2Left" sid="mWing2Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.168 0 1 0 0.169 0 0 1 0.067 0 0 0 1</matrix>
|
||||
<node id="mWing3Left" name="mWing3Left" sid="mWing3Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.181 0 1 0 0.183 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="mWing4Left" name="mWing4Left" sid="mWing4Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.171 0 1 0 0.173 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mWing4FanLeft" name="mWing4FanLeft" sid="mWing4FanLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.171 0 1 0 0.173 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mWing1Right" name="mWing1Right" sid="mWing1Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.099 0 1 0 -0.105 0 0 1 0.181 0 0 0 1</matrix>
|
||||
<node id="mWing2Right" name="mWing2Right" sid="mWing2Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.168 0 1 0 -0.169 0 0 1 0.067 0 0 0 1</matrix>
|
||||
<node id="mWing3Right" name="mWing3Right" sid="mWing3Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.181 0 1 0 -0.183 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="mWing4Right" name="mWing4Right" sid="mWing4Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.171 0 1 0 -0.173 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mWing4FanRight" name="mWing4FanRight" sid="mWing4FanRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.171 0 1 0 -0.173 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mHipRight" name="mHipRight" sid="mHipRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.034 0 1 0 -0.129 0 0 1 -0.041 0 0 0 1</matrix>
|
||||
<node id="R_UPPER_LEG" name="R_UPPER_LEG" sid="R_UPPER_LEG" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.02 0 1 0 0.05 0 0 1 -0.22 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mKneeRight" name="mKneeRight" sid="mKneeRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.001 0 1 0 0.049 0 0 1 -0.491 0 0 0 1</matrix>
|
||||
<node id="R_LOWER_LEG" name="R_LOWER_LEG" sid="R_LOWER_LEG" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.02 0 1 0 0 0 0 1 -0.2 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mAnkleRight" name="mAnkleRight" sid="mAnkleRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.029 0 1 0 0 0 0 1 -0.468 0 0 0 1</matrix>
|
||||
<node id="R_FOOT" name="R_FOOT" sid="R_FOOT" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.077 0 1 0 0 0 0 1 -0.041 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFootRight" name="mFootRight" sid="mFootRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.112 0 1 0 0 0 0 1 -0.061 0 0 0 1</matrix>
|
||||
<node id="mToeRight" name="mToeRight" sid="mToeRight" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.109 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mHipLeft" name="mHipLeft" sid="mHipLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.034 0 1 0 0.127 0 0 1 -0.041 0 0 0 1</matrix>
|
||||
<node id="L_UPPER_LEG" name="L_UPPER_LEG" sid="L_UPPER_LEG" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.02 0 1 0 -0.05 0 0 1 -0.22 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mKneeLeft" name="mKneeLeft" sid="mKneeLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.001 0 1 0 -0.046 0 0 1 -0.491 0 0 0 1</matrix>
|
||||
<node id="L_LOWER_LEG" name="L_LOWER_LEG" sid="L_LOWER_LEG" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.02 0 1 0 0 0 0 1 -0.2 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mAnkleLeft" name="mAnkleLeft" sid="mAnkleLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.029 0 1 0 0.001 0 0 1 -0.468 0 0 0 1</matrix>
|
||||
<node id="L_FOOT" name="L_FOOT" sid="L_FOOT" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.077 0 1 0 0 0 0 1 -0.041 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mFootLeft" name="mFootLeft" sid="mFootLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.112 0 1 0 0 0 0 1 -0.061 0 0 0 1</matrix>
|
||||
<node id="mToeLeft" name="mToeLeft" sid="mToeLeft" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.109 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mTail1" name="mTail1" sid="mTail1" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.116 0 1 0 0 0 0 1 0.047 0 0 0 1</matrix>
|
||||
<node id="mTail2" name="mTail2" sid="mTail2" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.197 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="mTail3" name="mTail3" sid="mTail3" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.168 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="mTail4" name="mTail4" sid="mTail4" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.142 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="mTail5" name="mTail5" sid="mTail5" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.112 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
<node id="mTail6" name="mTail6" sid="mTail6" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.094 0 1 0 0 0 0 1 0 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mGroin" name="mGroin" sid="mGroin" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.064 0 1 0 0 0 0 1 -0.097 0 0 0 1</matrix>
|
||||
</node>
|
||||
<node id="mHindLimbsRoot" name="mHindLimbsRoot" sid="mHindLimbsRoot" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.2 0 1 0 0 0 0 1 0.084 0 0 0 1</matrix>
|
||||
<node id="mHindLimb1Left" name="mHindLimb1Left" sid="mHindLimb1Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.204 0 1 0 0.129 0 0 1 -0.125 0 0 0 1</matrix>
|
||||
<node id="mHindLimb2Left" name="mHindLimb2Left" sid="mHindLimb2Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.002 0 1 0 -0.046 0 0 1 -0.491 0 0 0 1</matrix>
|
||||
<node id="mHindLimb3Left" name="mHindLimb3Left" sid="mHindLimb3Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.03 0 1 0 -0.003 0 0 1 -0.468 0 0 0 1</matrix>
|
||||
<node id="mHindLimb4Left" name="mHindLimb4Left" sid="mHindLimb4Left" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.112 0 1 0 0 0 0 1 -0.061 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
<node id="mHindLimb1Right" name="mHindLimb1Right" sid="mHindLimb1Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.204 0 1 0 -0.129 0 0 1 -0.125 0 0 0 1</matrix>
|
||||
<node id="mHindLimb2Right" name="mHindLimb2Right" sid="mHindLimb2Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.002 0 1 0 0.046 0 0 1 -0.491 0 0 0 1</matrix>
|
||||
<node id="mHindLimb3Right" name="mHindLimb3Right" sid="mHindLimb3Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 -0.03 0 1 0 0.003 0 0 1 -0.468 0 0 0 1</matrix>
|
||||
<node id="mHindLimb4Right" name="mHindLimb4Right" sid="mHindLimb4Right" type="JOINT">
|
||||
<matrix sid="transform">1 0 0 0.112 0 1 0 0 0 0 1 -0.061 0 0 0 1</matrix>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
</node>
|
||||
@@ -18,6 +18,8 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import ast
|
||||
import enum
|
||||
import hashlib
|
||||
@@ -27,6 +29,7 @@ import math
|
||||
from typing import *
|
||||
|
||||
import recordclass
|
||||
import transformations
|
||||
|
||||
logger = getLogger('hippolyzer.lib.base.datatypes')
|
||||
|
||||
@@ -36,12 +39,13 @@ class _IterableStub:
|
||||
__iter__: Callable
|
||||
|
||||
|
||||
class TupleCoord(recordclass.datatuple, _IterableStub): # type: ignore
|
||||
__options__ = {
|
||||
"fast_new": False,
|
||||
}
|
||||
RAD_TO_DEG = 180 / math.pi
|
||||
|
||||
|
||||
class TupleCoord(recordclass.RecordClass, _IterableStub):
|
||||
def __init__(self, *args):
|
||||
# Only to help typing, doesn't actually do anything.
|
||||
# All the important stuff happens in `__new__()`
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
@@ -58,6 +62,9 @@ class TupleCoord(recordclass.datatuple, _IterableStub): # type: ignore
|
||||
def __abs__(self):
|
||||
return self.__class__(*(abs(x) for x in self))
|
||||
|
||||
def __neg__(self):
|
||||
return self.__class__(*(-x for x in self))
|
||||
|
||||
def __add__(self, other):
|
||||
return self.__class__(*(x + y for x, y in zip(self, other)))
|
||||
|
||||
@@ -215,6 +222,15 @@ class Quaternion(TupleCoord):
|
||||
)
|
||||
return super().__mul__(other)
|
||||
|
||||
@classmethod
|
||||
def from_transformations(cls, coord) -> Quaternion:
|
||||
"""Convert to W (S) last form"""
|
||||
return cls(coord[1], coord[2], coord[3], coord[0])
|
||||
|
||||
def to_transformations(self) -> Tuple[float, float, float, float]:
|
||||
"""Convert to W (S) first form for use with the transformations lib"""
|
||||
return self.W, self.X, self.Y, self.Z
|
||||
|
||||
@classmethod
|
||||
def from_euler(cls, roll, pitch, yaw, degrees=False):
|
||||
if degrees:
|
||||
@@ -236,6 +252,9 @@ class Quaternion(TupleCoord):
|
||||
|
||||
return cls(X=x, Y=y, Z=z, W=w)
|
||||
|
||||
def to_euler(self) -> Vector3:
|
||||
return Vector3(*transformations.euler_from_quaternion(self.to_transformations()))
|
||||
|
||||
def data(self, wanted_components=None):
|
||||
if wanted_components == 3:
|
||||
return self.X, self.Y, self.Z
|
||||
@@ -244,6 +263,7 @@ class Quaternion(TupleCoord):
|
||||
|
||||
class UUID(uuid.UUID):
|
||||
_NULL_UUID_STR = '00000000-0000-0000-0000-000000000000'
|
||||
ZERO: UUID
|
||||
__slots__ = ()
|
||||
|
||||
def __init__(self, val: Union[uuid.UUID, str, None] = None, bytes=None, int=None):
|
||||
@@ -268,18 +288,25 @@ class UUID(uuid.UUID):
|
||||
return self.__class__(int=self.int ^ other.int)
|
||||
|
||||
|
||||
UUID.ZERO = UUID()
|
||||
|
||||
|
||||
class JankStringyBytes(bytes):
|
||||
"""
|
||||
Treat bytes as UTF8 if used in string context
|
||||
|
||||
Sinful, but necessary evil for now since templates don't specify what's
|
||||
binary and what's a string.
|
||||
binary and what's a string. There are also certain fields where the value
|
||||
may be either binary _or_ a string, depending on the context.
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
def __str__(self):
|
||||
return self.rstrip(b"\x00").decode("utf8", errors="replace")
|
||||
|
||||
def __bool__(self):
|
||||
return not (super().__eq__(b"") or super().__eq__(b"\x00"))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, str):
|
||||
return str(self) == other
|
||||
@@ -288,23 +315,96 @@ class JankStringyBytes(bytes):
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __contains__(self, item):
|
||||
if isinstance(item, str):
|
||||
return item in str(self)
|
||||
return item in bytes(self)
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, bytes):
|
||||
return JankStringyBytes(bytes(self) + other)
|
||||
return str(self) + other
|
||||
|
||||
def __radd__(self, other):
|
||||
if isinstance(other, bytes):
|
||||
return JankStringyBytes(other + bytes(self))
|
||||
return other + str(self)
|
||||
|
||||
def lower(self):
|
||||
return str(self).lower()
|
||||
|
||||
def upper(self):
|
||||
return str(self).upper()
|
||||
|
||||
def startswith(self, __prefix, __start=None, __end=None):
|
||||
if __start or __end:
|
||||
raise RuntimeError("Can't handle __start or __end")
|
||||
if isinstance(__prefix, str):
|
||||
return str(self).startswith(__prefix)
|
||||
return self.startswith(__prefix)
|
||||
|
||||
def endswith(self, __prefix, __start=None, __end=None):
|
||||
if __start or __end:
|
||||
raise RuntimeError("Can't handle __start or __end")
|
||||
if isinstance(__prefix, str):
|
||||
return str(self).endswith(__prefix)
|
||||
return self.endswith(__prefix)
|
||||
|
||||
|
||||
class RawBytes(bytes):
|
||||
__slots__ = ()
|
||||
pass
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class Pretty(Generic[_T]):
|
||||
"""Wrapper for var values so Messages will know to serialize"""
|
||||
__slots__ = ("value",)
|
||||
|
||||
def __init__(self, value: _T):
|
||||
self.value: _T = value
|
||||
|
||||
|
||||
class StringEnum(str, enum.Enum):
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
||||
|
||||
class TaggedUnion(recordclass.datatuple): # type: ignore
|
||||
class IntEnum(enum.IntEnum):
|
||||
# Give a special repr() that'll eval in a REPL.
|
||||
def __repr__(self):
|
||||
return f"{self.__class__.__name__}.{self.name}"
|
||||
|
||||
|
||||
class IntFlag(enum.IntFlag):
|
||||
def __repr__(self):
|
||||
# Make an ORed together version of the flags based on the POD version
|
||||
flags = flags_to_pod(type(self), self)
|
||||
flags = " | ".join(
|
||||
(f"{self.__class__.__name__}.{v}" if isinstance(v, str) else str(v))
|
||||
for v in flags
|
||||
)
|
||||
return f"({flags})"
|
||||
|
||||
|
||||
def flags_to_pod(flag_cls: Type[enum.IntFlag], val: int) -> Tuple[Union[str, int], ...]:
|
||||
# Shove any bits not represented in the IntFlag into an int
|
||||
left_over = val
|
||||
for flag in iter(flag_cls):
|
||||
left_over &= ~flag.value
|
||||
extra = (int(left_over),) if left_over else ()
|
||||
return tuple(flag.name for flag in iter(flag_cls) if val & flag.value) + extra
|
||||
|
||||
|
||||
class TaggedUnion(recordclass.RecordClass):
|
||||
tag: Any
|
||||
value: Any
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Vector3", "Vector4", "Vector2", "Quaternion", "TupleCoord",
|
||||
"UUID", "RawBytes", "StringEnum", "JankStringyBytes", "TaggedUnion"
|
||||
"UUID", "RawBytes", "StringEnum", "JankStringyBytes", "TaggedUnion",
|
||||
"IntEnum", "IntFlag", "flags_to_pod", "Pretty", "RAD_TO_DEG"
|
||||
]
|
||||
|
||||
@@ -18,17 +18,20 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from logging import getLogger
|
||||
from hippolyzer.lib.base.helpers import create_logged_task
|
||||
|
||||
logger = getLogger('utilities.events')
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Event:
|
||||
""" an object containing data which will be passed out to all subscribers """
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, name=None):
|
||||
self.subscribers = []
|
||||
self.name = name
|
||||
|
||||
def subscribe(self, handler, *args, one_shot=False, predicate=None, **kwargs):
|
||||
""" establish the subscribers (handlers) to this event """
|
||||
@@ -38,7 +41,8 @@ class Event:
|
||||
|
||||
return self
|
||||
|
||||
def _handler_key(self, handler):
|
||||
@staticmethod
|
||||
def _handler_key(handler):
|
||||
return handler[:3]
|
||||
|
||||
def unsubscribe(self, handler, *args, **kwargs):
|
||||
@@ -52,24 +56,37 @@ class Event:
|
||||
raise ValueError(f"Handler {handler!r} is not subscribed to this event.")
|
||||
return self
|
||||
|
||||
def _create_async_wrapper(self, handler, args, inner_args, kwargs):
|
||||
# Note that unsubscription may be delayed due to asyncio scheduling :)
|
||||
async def _run_handler_wrapper():
|
||||
unsubscribe = await handler(args, *inner_args, **kwargs)
|
||||
if unsubscribe:
|
||||
_ = self.unsubscribe(handler, *inner_args, **kwargs)
|
||||
return _run_handler_wrapper
|
||||
|
||||
def notify(self, args):
|
||||
for handler in self.subscribers[:]:
|
||||
instance, inner_args, kwargs, one_shot, predicate = handler
|
||||
for subscriber in self.subscribers[:]:
|
||||
handler, inner_args, kwargs, one_shot, predicate = subscriber
|
||||
if predicate and not predicate(args):
|
||||
continue
|
||||
if one_shot:
|
||||
self.unsubscribe(instance, *inner_args, **kwargs)
|
||||
if instance(args, *inner_args, **kwargs):
|
||||
self.unsubscribe(instance, *inner_args, **kwargs)
|
||||
self.unsubscribe(handler, *inner_args, **kwargs)
|
||||
if asyncio.iscoroutinefunction(handler):
|
||||
create_logged_task(self._create_async_wrapper(handler, args, inner_args, kwargs)(), self.name, LOG)
|
||||
else:
|
||||
try:
|
||||
if handler(args, *inner_args, **kwargs) and not one_shot:
|
||||
self.unsubscribe(handler, *inner_args, **kwargs)
|
||||
except:
|
||||
# One handler failing shouldn't prevent notification of other handlers.
|
||||
LOG.exception(f"Failed in handler for {self.name}")
|
||||
|
||||
def get_subscriber_count(self):
|
||||
def __len__(self):
|
||||
return len(self.subscribers)
|
||||
|
||||
def clear_subscribers(self):
|
||||
self.subscribers.clear()
|
||||
return self
|
||||
|
||||
__iadd__ = subscribe
|
||||
__isub__ = unsubscribe
|
||||
__call__ = notify
|
||||
__len__ = get_subscriber_count
|
||||
|
||||
@@ -176,7 +176,7 @@ class MessageTemplateNotFound(MessageSystemError):
|
||||
self.template = template
|
||||
|
||||
def __str__(self):
|
||||
return "No message template found, context: '%s'" % self.context
|
||||
return "No message template found for %s, context: '%s'" % (self.template, self.context)
|
||||
|
||||
|
||||
class MessageTemplateParsingError(MessageSystemError):
|
||||
@@ -347,7 +347,7 @@ class RegionCapNotAvailable(RegionDomainError):
|
||||
|
||||
class RegionMessageError(RegionDomainError):
|
||||
""" an error raised when a region does not have a connection
|
||||
over which it can send UDP messages
|
||||
over which it can send UDP messages
|
||||
|
||||
accepts a region object as an attribute
|
||||
|
||||
|
||||
528
hippolyzer/lib/base/gltftools.py
Normal file
528
hippolyzer/lib/base/gltftools.py
Normal file
@@ -0,0 +1,528 @@
|
||||
"""
|
||||
WIP LLMesh -> glTF converter, for testing eventual glTF -> LLMesh conversion logic.
|
||||
"""
|
||||
# TODO:
|
||||
# * Simple tests
|
||||
# * Round-tripping skinning data from Blender-compatible glTF back to LLMesh (maybe through rig retargeting?)
|
||||
# * Panda3D-glTF viewer for LLMesh? The glTFs seem to work fine in Panda3D-glTF's `gltf-viewer`.
|
||||
# * Check if skew and projection components of transform matrices are ignored in practice as the spec requires.
|
||||
# I suppose this would render some real assets impossible to represent with glTF.
|
||||
|
||||
import dataclasses
|
||||
import math
|
||||
import pprint
|
||||
import sys
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from typing import *
|
||||
|
||||
import gltflib
|
||||
import numpy as np
|
||||
import transformations
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3
|
||||
from hippolyzer.lib.base.mesh import (
|
||||
LLMeshSerializer, MeshAsset, positions_from_domain, SkinSegmentDict, VertexWeight, llsd_to_mat4
|
||||
)
|
||||
from hippolyzer.lib.base.mesh_skeleton import AVATAR_SKELETON
|
||||
from hippolyzer.lib.base.serialization import BufferReader
|
||||
|
||||
|
||||
class IdentityList(list):
|
||||
"""
|
||||
List, but does index() by object identity, not equality
|
||||
|
||||
GLTF references objects by their index within some list, but we prefer to pass around
|
||||
actual object references internally. If we don't do this, then when we try and get
|
||||
a GLTF reference to a given object via `.index()` then we could end up actually getting
|
||||
a reference to some other object that just happens to be equal. This was causing issues
|
||||
with all primitives ending up with the same material, due to the default material's value
|
||||
being the same across all primitives.
|
||||
"""
|
||||
def index(self, value, start: Optional[int] = None, stop: Optional[int] = None) -> int:
|
||||
view = self[start:stop]
|
||||
for i, x in enumerate(view):
|
||||
if x is value:
|
||||
if start:
|
||||
return i + start
|
||||
return i
|
||||
raise ValueError(value)
|
||||
|
||||
|
||||
def sl_to_gltf_coords(coords):
|
||||
"""
|
||||
SL (X, Y, Z) -> GL (X, Z, Y), as GLTF commandeth
|
||||
|
||||
Note that this will only work when reordering axes, flipping an axis is more complicated.
|
||||
"""
|
||||
return coords[0], coords[2], coords[1], *coords[3:]
|
||||
|
||||
|
||||
def sl_to_gltf_uv(uv):
|
||||
"""Flip the V coordinate of a UV to match glTF convention"""
|
||||
return [uv[0], -uv[1]]
|
||||
|
||||
|
||||
def sl_mat4_to_gltf(mat: np.ndarray) -> List[float]:
|
||||
"""
|
||||
Convert an SL Mat4 to the glTF coordinate system
|
||||
|
||||
This should only be done immediately before storing the matrix in a glTF structure!
|
||||
"""
|
||||
# TODO: This is probably not correct. We definitely need to flip Z but there's
|
||||
# probably a better way to do it.
|
||||
decomp = [sl_to_gltf_coords(x) for x in transformations.decompose_matrix(mat)]
|
||||
trans = decomp[3]
|
||||
decomp[3] = (trans[0], trans[1], -trans[2])
|
||||
return list(transformations.compose_matrix(*decomp).flatten(order='F'))
|
||||
|
||||
|
||||
# Mat3 to convert points from SL coordinate space to GLTF coordinate space
|
||||
POINT_TO_GLTF_MAT = transformations.compose_matrix(angles=(-(math.pi / 2), 0, 0))[:3, :3]
|
||||
|
||||
|
||||
def sl_vec3_array_to_gltf(vec_list: np.ndarray) -> np.ndarray:
|
||||
new_array = []
|
||||
for x in vec_list:
|
||||
new_array.append(POINT_TO_GLTF_MAT.dot(x))
|
||||
return np.array(new_array)
|
||||
|
||||
|
||||
def sl_weights_to_gltf(sl_weights: List[List[VertexWeight]]) -> Tuple[np.ndarray, np.ndarray]:
|
||||
"""Convert SL Weights to separate JOINTS_0 and WEIGHTS_0 vec4 arrays"""
|
||||
joints = np.zeros((len(sl_weights), 4), dtype=np.uint8)
|
||||
weights = np.zeros((len(sl_weights), 4), dtype=np.float32)
|
||||
|
||||
for i, vert_weights in enumerate(sl_weights):
|
||||
# We need to re-normalize these since the quantization can mess them up
|
||||
collected_weights = []
|
||||
for j, vert_weight in enumerate(vert_weights):
|
||||
joints[i, j] = vert_weight.joint_idx
|
||||
collected_weights.append(vert_weight.weight)
|
||||
weight_sum = sum(collected_weights)
|
||||
if weight_sum:
|
||||
for j, weight in enumerate(collected_weights):
|
||||
weights[i, j] = weight / weight_sum
|
||||
|
||||
return joints, weights
|
||||
|
||||
|
||||
def normalize_vec3(a):
|
||||
norm = np.linalg.norm(a)
|
||||
if norm == 0:
|
||||
return a
|
||||
return a / norm
|
||||
|
||||
|
||||
def apply_bind_shape_matrix(bind_shape_matrix: np.ndarray, verts: np.ndarray, norms: np.ndarray) \
|
||||
-> Tuple[np.ndarray, np.ndarray]:
|
||||
"""
|
||||
Apply the bind shape matrix to the mesh data
|
||||
|
||||
glTF expects all verts and normals to be in armature-local space so that mesh data can be shared
|
||||
between differently-oriented armatures. Or something.
|
||||
# https://github.com/KhronosGroup/glTF-Blender-IO/issues/566#issuecomment-523119339
|
||||
|
||||
glTF also doesn't have a concept of a "bind shape matrix" like Collada does
|
||||
per its skinning docs, so we have to mix it into the mesh data manually.
|
||||
See https://github.com/KhronosGroup/glTF-Tutorials/blob/master/gltfTutorial/gltfTutorial_020_Skins.md
|
||||
"""
|
||||
scale, _, angles, translation, _ = transformations.decompose_matrix(bind_shape_matrix)
|
||||
scale_mat = transformations.compose_matrix(scale=scale)[:3, :3]
|
||||
rot_mat = transformations.euler_matrix(*angles)[:3, :3]
|
||||
rot_scale_mat = scale_mat @ np.linalg.inv(rot_mat)
|
||||
|
||||
# Apply the SRT transform to each vert
|
||||
verts = (verts @ rot_scale_mat) + translation
|
||||
|
||||
# Our scale is unlikely to be uniform, so we have to fix up our normals as well.
|
||||
# https://paroj.github.io/gltut/Illumination/Tut09%20Normal%20Transformation.html
|
||||
inv_transpose_mat = np.transpose(np.linalg.inv(bind_shape_matrix)[:3, :3])
|
||||
new_norms = [normalize_vec3(inv_transpose_mat @ norm) for norm in norms]
|
||||
|
||||
return verts, np.array(new_norms)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class JointContext:
|
||||
node: gltflib.Node
|
||||
# Original matrix for the bone, may have custom translation, but otherwise the same.
|
||||
orig_matrix: np.ndarray
|
||||
# xform that must be applied to inverse bind matrices to account for the changed bone
|
||||
fixup_matrix: np.ndarray
|
||||
|
||||
|
||||
JOINT_CONTEXT_DICT = Dict[str, JointContext]
|
||||
|
||||
|
||||
class GLTFBuilder:
|
||||
def __init__(self, blender_compatibility=False):
|
||||
self.scene = gltflib.Scene(nodes=IdentityList())
|
||||
self.model = gltflib.GLTFModel(
|
||||
asset=gltflib.Asset(version="2.0"),
|
||||
accessors=IdentityList(),
|
||||
nodes=IdentityList(),
|
||||
materials=IdentityList(),
|
||||
buffers=IdentityList(),
|
||||
bufferViews=IdentityList(),
|
||||
meshes=IdentityList(),
|
||||
skins=IdentityList(),
|
||||
scenes=IdentityList((self.scene,)),
|
||||
extensionsUsed=["KHR_materials_specular"],
|
||||
scene=0,
|
||||
)
|
||||
self.gltf = gltflib.GLTF(
|
||||
model=self.model,
|
||||
resources=IdentityList(),
|
||||
)
|
||||
self.blender_compatibility = blender_compatibility
|
||||
|
||||
def add_nodes_from_llmesh(self, mesh: MeshAsset, name: str, mesh_transform: Optional[np.ndarray] = None):
|
||||
"""Build a glTF version of a mesh asset, appending it and its armature to the scene root"""
|
||||
# TODO: mesh data instancing?
|
||||
# consider https://github.com/KhronosGroup/glTF-Blender-IO/issues/1634.
|
||||
if mesh_transform is None:
|
||||
mesh_transform = np.identity(4)
|
||||
|
||||
skin_seg: Optional[SkinSegmentDict] = mesh.segments.get('skin')
|
||||
skin = None
|
||||
if skin_seg:
|
||||
mesh_transform = llsd_to_mat4(skin_seg['bind_shape_matrix'])
|
||||
joint_ctxs = self.add_joints(skin_seg)
|
||||
|
||||
# Give our armature a root node and parent the pelvis to it
|
||||
armature_node = self.add_node("Armature")
|
||||
self.scene.nodes.append(self.model.nodes.index(armature_node))
|
||||
armature_node.children.append(self.model.nodes.index(joint_ctxs['mPelvis'].node))
|
||||
skin = self.add_skin("Armature", joint_ctxs, skin_seg)
|
||||
skin.skeleton = self.model.nodes.index(armature_node)
|
||||
|
||||
primitives = []
|
||||
# Just the high LOD for now
|
||||
for submesh in mesh.segments['high_lod']:
|
||||
verts = np.array(positions_from_domain(submesh['Position'], submesh['PositionDomain']))
|
||||
norms = np.array(submesh['Normal'])
|
||||
tris = np.array(submesh['TriangleList'])
|
||||
joints = np.array([])
|
||||
weights = np.array([])
|
||||
range_uv = np.array([])
|
||||
if "TexCoord0" in submesh:
|
||||
range_uv = np.array(positions_from_domain(submesh['TexCoord0'], submesh['TexCoord0Domain']))
|
||||
if 'Weights' in submesh:
|
||||
joints, weights = sl_weights_to_gltf(submesh['Weights'])
|
||||
|
||||
if skin:
|
||||
# Convert verts and norms to armature-local space
|
||||
verts, norms = apply_bind_shape_matrix(mesh_transform, verts, norms)
|
||||
|
||||
primitives.append(self.add_primitive(
|
||||
tris=tris,
|
||||
positions=verts,
|
||||
normals=norms,
|
||||
uvs=range_uv,
|
||||
joints=joints,
|
||||
weights=weights,
|
||||
))
|
||||
|
||||
mesh_node = self.add_node(
|
||||
name,
|
||||
self.add_mesh(name, primitives),
|
||||
transform=mesh_transform,
|
||||
)
|
||||
if skin:
|
||||
# Node translation isn't relevant, we're going to use the bind matrices
|
||||
# If you pull this into Blender you may want to untick "Guess Original Bind Pose",
|
||||
# it guesses that based on the inverse bind matrices which may have Maya poisoning.
|
||||
# TODO: Maybe we could automatically undo that by comparing expected bone scale and rot
|
||||
# to scale and rot in the inverse bind matrices, and applying fixups to the
|
||||
# bind shape matrix and inverse bind matrices?
|
||||
mesh_node.matrix = None
|
||||
mesh_node.skin = self.model.skins.index(skin)
|
||||
|
||||
self.scene.nodes.append(self.model.nodes.index(mesh_node))
|
||||
|
||||
def add_node(
|
||||
self,
|
||||
name: str,
|
||||
mesh: Optional[gltflib.Mesh] = None,
|
||||
transform: Optional[np.ndarray] = None,
|
||||
) -> gltflib.Node:
|
||||
node = gltflib.Node(
|
||||
name=name,
|
||||
mesh=self.model.meshes.index(mesh) if mesh else None,
|
||||
matrix=sl_mat4_to_gltf(transform) if transform is not None else None,
|
||||
children=[],
|
||||
)
|
||||
self.model.nodes.append(node)
|
||||
return node
|
||||
|
||||
def add_mesh(
|
||||
self,
|
||||
name: str,
|
||||
primitives: List[gltflib.Primitive],
|
||||
) -> gltflib.Mesh:
|
||||
for i, prim in enumerate(primitives):
|
||||
# Give the materials a name relating to what "face" they belong to
|
||||
self.model.materials[prim.material].name = f"{name}.{i:03}"
|
||||
mesh = gltflib.Mesh(name=name, primitives=primitives)
|
||||
self.model.meshes.append(mesh)
|
||||
return mesh
|
||||
|
||||
def add_primitive(
|
||||
self,
|
||||
tris: np.ndarray,
|
||||
positions: np.ndarray,
|
||||
normals: np.ndarray,
|
||||
uvs: np.ndarray,
|
||||
weights: np.ndarray,
|
||||
joints: np.ndarray,
|
||||
) -> gltflib.Primitive:
|
||||
# Make a Material for the primitive. Materials pretty much _are_ the primitives in
|
||||
# LLMesh, so just make them both in one go. We need a unique material for each primitive.
|
||||
material = gltflib.Material(
|
||||
pbrMetallicRoughness=gltflib.PBRMetallicRoughness(
|
||||
baseColorFactor=[1.0, 1.0, 1.0, 1.0],
|
||||
metallicFactor=0.0,
|
||||
roughnessFactor=0.0,
|
||||
),
|
||||
extensions={
|
||||
"KHR_materials_specular": {
|
||||
"specularFactor": 0.0,
|
||||
"specularColorFactor": [0, 0, 0]
|
||||
},
|
||||
}
|
||||
)
|
||||
self.model.materials.append(material)
|
||||
|
||||
attributes = gltflib.Attributes(
|
||||
POSITION=self.maybe_add_vec_array(sl_vec3_array_to_gltf(positions), gltflib.AccessorType.VEC3),
|
||||
NORMAL=self.maybe_add_vec_array(sl_vec3_array_to_gltf(normals), gltflib.AccessorType.VEC3),
|
||||
TEXCOORD_0=self.maybe_add_vec_array(np.array([sl_to_gltf_uv(uv) for uv in uvs]), gltflib.AccessorType.VEC2),
|
||||
JOINTS_0=self.maybe_add_vec_array(joints, gltflib.AccessorType.VEC4, gltflib.ComponentType.UNSIGNED_BYTE),
|
||||
WEIGHTS_0=self.maybe_add_vec_array(weights, gltflib.AccessorType.VEC4),
|
||||
)
|
||||
|
||||
return gltflib.Primitive(
|
||||
attributes=attributes,
|
||||
indices=self.model.accessors.index(self.add_scalars(tris)),
|
||||
material=self.model.materials.index(material),
|
||||
mode=gltflib.PrimitiveMode.TRIANGLES,
|
||||
)
|
||||
|
||||
def add_scalars(self, scalars: np.ndarray) -> gltflib.Accessor:
|
||||
"""
|
||||
Add a potentially multidimensional array of scalars, returning the accessor
|
||||
|
||||
Generally only used for triangle indices
|
||||
"""
|
||||
scalar_bytes = scalars.astype(np.uint32).flatten().tobytes()
|
||||
buffer_view = self.add_buffer_view(scalar_bytes, None)
|
||||
accessor = gltflib.Accessor(
|
||||
bufferView=self.model.bufferViews.index(buffer_view),
|
||||
componentType=gltflib.ComponentType.UNSIGNED_INT,
|
||||
count=scalars.size, # use the flattened size!
|
||||
type=gltflib.AccessorType.SCALAR.value, # type: ignore
|
||||
min=[int(scalars.min())], # type: ignore
|
||||
max=[int(scalars.max())], # type: ignore
|
||||
)
|
||||
self.model.accessors.append(accessor)
|
||||
return accessor
|
||||
|
||||
def maybe_add_vec_array(
|
||||
self,
|
||||
vecs: np.ndarray,
|
||||
vec_type: gltflib.AccessorType,
|
||||
component_type: gltflib.ComponentType = gltflib.ComponentType.FLOAT,
|
||||
) -> Optional[int]:
|
||||
if not vecs.size:
|
||||
return None
|
||||
accessor = self.add_vec_array(vecs, vec_type, component_type)
|
||||
return self.model.accessors.index(accessor)
|
||||
|
||||
def add_vec_array(
|
||||
self,
|
||||
vecs: np.ndarray,
|
||||
vec_type: gltflib.AccessorType,
|
||||
component_type: gltflib.ComponentType = gltflib.ComponentType.FLOAT
|
||||
) -> gltflib.Accessor:
|
||||
"""
|
||||
Add a two-dimensional array of vecs (positions, normals, weights, UVs) returning the accessor
|
||||
|
||||
Vec type may be a vec2, vec3, or a vec4.
|
||||
"""
|
||||
# Pretty much all of these are float32 except the ones that aren't
|
||||
dtype = np.float32
|
||||
if component_type == gltflib.ComponentType.UNSIGNED_BYTE:
|
||||
dtype = np.uint8
|
||||
vec_data = vecs.astype(dtype).tobytes()
|
||||
buffer_view = self.add_buffer_view(vec_data, target=None)
|
||||
accessor = gltflib.Accessor(
|
||||
bufferView=self.model.bufferViews.index(buffer_view),
|
||||
componentType=component_type,
|
||||
count=len(vecs),
|
||||
type=vec_type.value, # type: ignore
|
||||
min=vecs.min(axis=0).tolist(), # type: ignore
|
||||
max=vecs.max(axis=0).tolist(), # type: ignore
|
||||
)
|
||||
self.model.accessors.append(accessor)
|
||||
return accessor
|
||||
|
||||
def add_buffer_view(self, data: bytes, target: Optional[gltflib.BufferTarget]) -> gltflib.BufferView:
|
||||
"""Create a buffer view and associated buffer and resource for a blob of data"""
|
||||
resource = gltflib.FileResource(filename=f"res-{uuid.uuid4()}.bin", data=data)
|
||||
self.gltf.resources.append(resource)
|
||||
|
||||
buffer = gltflib.Buffer(uri=resource.filename, byteLength=len(resource.data))
|
||||
self.model.buffers.append(buffer)
|
||||
|
||||
buffer_view = gltflib.BufferView(
|
||||
buffer=self.model.buffers.index(buffer),
|
||||
byteLength=buffer.byteLength,
|
||||
byteOffset=0,
|
||||
target=target
|
||||
)
|
||||
self.model.bufferViews.append(buffer_view)
|
||||
return buffer_view
|
||||
|
||||
def add_joints(self, skin: SkinSegmentDict) -> JOINT_CONTEXT_DICT:
|
||||
# There may be some joints not present in the mesh that we need to add to reach the mPelvis root
|
||||
required_joints = set()
|
||||
for joint_name in skin['joint_names']:
|
||||
joint_node = AVATAR_SKELETON[joint_name]
|
||||
required_joints.add(joint_node)
|
||||
required_joints.update(joint_node.ancestors)
|
||||
|
||||
# If this is present, it may override the joint positions from the skeleton definition
|
||||
if 'alt_inverse_bind_matrix' in skin:
|
||||
joint_overrides = dict(zip(skin['joint_names'], skin['alt_inverse_bind_matrix']))
|
||||
else:
|
||||
joint_overrides = {}
|
||||
|
||||
built_joints: JOINT_CONTEXT_DICT = {}
|
||||
for joint in required_joints:
|
||||
joint_matrix = joint.matrix
|
||||
|
||||
# Do we have a joint position override that would affect joint_matrix?
|
||||
override = joint_overrides.get(joint.name)
|
||||
if override:
|
||||
decomp = list(transformations.decompose_matrix(joint_matrix))
|
||||
# We specifically only want the translation from the override!
|
||||
translation = transformations.translation_from_matrix(llsd_to_mat4(override))
|
||||
# Only do it if the difference is over 0.1mm though
|
||||
if Vector3.dist(Vector3(*translation), joint.translation) > 0.0001:
|
||||
decomp[3] = translation
|
||||
joint_matrix = transformations.compose_matrix(*decomp)
|
||||
|
||||
# Do we need to mess with the bone's matrices to make Blender cooperate?
|
||||
orig_matrix = joint_matrix
|
||||
fixup_matrix = np.identity(4)
|
||||
if self.blender_compatibility:
|
||||
joint_matrix, fixup_matrix = self._fix_blender_joint(joint_matrix)
|
||||
|
||||
# TODO: populate "extras" here with the metadata the Blender collada stuff uses to store
|
||||
# "bind_mat" and "rest_mat" so we can go back to our original matrices when exporting
|
||||
# from blender to .dae!
|
||||
gltf_joint = self.add_node(joint.name, transform=joint_matrix)
|
||||
|
||||
# Store the node along with any fixups we may need to apply to the bind matrices later
|
||||
built_joints[joint.name] = JointContext(gltf_joint, orig_matrix, fixup_matrix)
|
||||
|
||||
# Add each joint to the child list of their respective parent
|
||||
for joint_name, joint_ctx in built_joints.items():
|
||||
if parent_name := AVATAR_SKELETON[joint_name].parent_name:
|
||||
built_joints[parent_name].node.children.append(self.model.nodes.index(joint_ctx.node))
|
||||
return built_joints
|
||||
|
||||
def _fix_blender_joint(self, joint_matrix: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
|
||||
"""
|
||||
Split a joint matrix into a joint matrix and fixup matrix
|
||||
|
||||
If we don't account for weird scaling on the collision volumes, then
|
||||
Blender freaks out. This is an issue in blender where it doesn't
|
||||
apply the inverse bind matrices relative to the scale and rotation of
|
||||
the bones themselves, as it should per the glTF spec. Blender's glTF loader
|
||||
tries to recover from this by applying certain transforms as a pose, but
|
||||
the damage has been done by that point. Nobody else runs really runs into
|
||||
this because they have the good sense to not use some nightmare abomination
|
||||
rig with scaling and rotation on the skeleton like SL does.
|
||||
|
||||
Blender will _only_ correctly handle the translation component of the joint,
|
||||
any other transforms need to be mixed into the inverse bind matrices themselves.
|
||||
There's no internal concept of bone scale or rot in Blender right now.
|
||||
|
||||
Should investigate an Avastar-style approach of optionally retargeting
|
||||
to a Blender-compatible rig with translation-only bones, and modify
|
||||
the bind matrices to accommodate. The glTF importer supports metadata through
|
||||
the "extras" fields, so we can potentially abuse the "bind_mat" metadata field
|
||||
that Blender already uses for the "Keep Bind Info" Collada import / export hack.
|
||||
|
||||
For context:
|
||||
* https://github.com/KhronosGroup/glTF-Blender-IO/issues/1305
|
||||
* https://developer.blender.org/T38660 (these are Collada, but still relevant)
|
||||
* https://developer.blender.org/T29246
|
||||
* https://developer.blender.org/T50412
|
||||
* https://developer.blender.org/T53620 (FBX but still relevant)
|
||||
"""
|
||||
scale, shear, angles, translate, projection = transformations.decompose_matrix(joint_matrix)
|
||||
joint_matrix = transformations.compose_matrix(translate=translate)
|
||||
fixup_matrix = transformations.compose_matrix(scale=scale, angles=angles)
|
||||
return joint_matrix, fixup_matrix
|
||||
|
||||
def add_skin(self, name: str, joint_nodes: JOINT_CONTEXT_DICT, skin_seg: SkinSegmentDict) -> gltflib.Skin:
|
||||
joints_arr = []
|
||||
for joint_name in skin_seg['joint_names']:
|
||||
joint_ctx = joint_nodes[joint_name]
|
||||
joints_arr.append(self.model.nodes.index(joint_ctx.node))
|
||||
|
||||
inv_binds = []
|
||||
for joint_name, inv_bind in zip(skin_seg['joint_names'], skin_seg['inverse_bind_matrix']):
|
||||
joint_ctx = joint_nodes[joint_name]
|
||||
inv_bind = joint_ctx.fixup_matrix @ llsd_to_mat4(inv_bind)
|
||||
inv_binds.append(sl_mat4_to_gltf(inv_bind))
|
||||
inv_binds_data = np.array(inv_binds, dtype=np.float32).tobytes()
|
||||
buffer_view = self.add_buffer_view(inv_binds_data, target=None)
|
||||
accessor = gltflib.Accessor(
|
||||
bufferView=self.model.bufferViews.index(buffer_view),
|
||||
componentType=gltflib.ComponentType.FLOAT,
|
||||
count=len(inv_binds),
|
||||
type=gltflib.AccessorType.MAT4.value, # type: ignore
|
||||
)
|
||||
self.model.accessors.append(accessor)
|
||||
accessor_idx = self.model.accessors.index(accessor)
|
||||
|
||||
skin = gltflib.Skin(name=name, joints=joints_arr, inverseBindMatrices=accessor_idx)
|
||||
self.model.skins.append(skin)
|
||||
return skin
|
||||
|
||||
def finalize(self):
|
||||
"""Clean up the mesh to pass the glTF smell test, should be done last"""
|
||||
def _nullify_empty_lists(dc):
|
||||
for field in dataclasses.fields(dc):
|
||||
# Empty lists should be replaced with None
|
||||
if getattr(dc, field.name) == []:
|
||||
setattr(dc, field.name, None)
|
||||
|
||||
for node in self.model.nodes:
|
||||
_nullify_empty_lists(node)
|
||||
_nullify_empty_lists(self.model)
|
||||
return self.gltf
|
||||
|
||||
|
||||
def main():
|
||||
# Take an llmesh file as an argument and spit out basename-converted.gltf
|
||||
with open(sys.argv[1], "rb") as f:
|
||||
reader = BufferReader("<", f.read())
|
||||
|
||||
filename = Path(sys.argv[1]).stem
|
||||
mesh: MeshAsset = reader.read(LLMeshSerializer(parse_segment_contents=True))
|
||||
|
||||
builder = GLTFBuilder(blender_compatibility=True)
|
||||
builder.add_nodes_from_llmesh(mesh, filename)
|
||||
gltf = builder.finalize()
|
||||
|
||||
pprint.pprint(gltf.model)
|
||||
gltf.export_glb(sys.argv[1].rsplit(".", 1)[0] + "-converted.gltf")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,6 +1,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import codecs
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
|
||||
import lazy_object_proxy
|
||||
import pkg_resources
|
||||
import re
|
||||
import weakref
|
||||
from pprint import PrettyPrinter
|
||||
from typing import *
|
||||
@@ -14,7 +22,7 @@ def _with_patched_multidict(f):
|
||||
# There's no way to tell pprint "hey, this is a dict,
|
||||
# this is how you access its items." A lot of the formatting logic
|
||||
# is in the module-level `_safe_repr()` which we don't want to mess with.
|
||||
# Instead, pretend our MultiDict has dict's __repr__ and while we're inside
|
||||
# Instead, pretend our MultiDict has dict's __repr__ while we're inside
|
||||
# calls to pprint. Hooray.
|
||||
orig_repr = MultiDict.__repr__
|
||||
if orig_repr is dict.__repr__:
|
||||
@@ -62,6 +70,9 @@ class HippoPrettyPrinter(PrettyPrinter):
|
||||
return f"({reprs})"
|
||||
|
||||
def pformat(self, obj: object, *args, **kwargs) -> str:
|
||||
# Unwrap lazy object proxies before pprinting them
|
||||
if isinstance(obj, lazy_object_proxy.Proxy):
|
||||
obj = obj.__wrapped__
|
||||
if isinstance(obj, (bytes, str)):
|
||||
return self._str_format(obj)
|
||||
return self._base_pformat(obj, *args, **kwargs)
|
||||
@@ -121,3 +132,84 @@ def proxify(obj: Union[Callable[[], _T], weakref.ReferenceType, _T]) -> _T:
|
||||
if obj is not None and not isinstance(obj, weakref.ProxyTypes):
|
||||
return weakref.proxy(obj)
|
||||
return obj
|
||||
|
||||
|
||||
class BiDiDict(Generic[_T]):
|
||||
"""Dictionary for bidirectional lookups"""
|
||||
def __init__(self, values: Dict[_T, _T]):
|
||||
self.forward = {**values}
|
||||
self.backward = {value: key for (key, value) in values.items()}
|
||||
|
||||
|
||||
def bytes_unescape(val: bytes) -> bytes:
|
||||
# Only in CPython. bytes -> bytes with escape decoding.
|
||||
# https://stackoverflow.com/a/23151714
|
||||
return codecs.escape_decode(val)[0] # type: ignore
|
||||
|
||||
|
||||
def bytes_escape(val: bytes) -> bytes:
|
||||
# Try to keep newlines as-is
|
||||
return re.sub(rb"(?<!\\)\\n", b"\n", codecs.escape_encode(val)[0]) # type: ignore
|
||||
|
||||
|
||||
def get_resource_filename(resource_filename: str):
|
||||
return pkg_resources.resource_filename("hippolyzer", resource_filename)
|
||||
|
||||
|
||||
def to_chunks(chunkable: Sequence[_T], chunk_size: int) -> Generator[Sequence[_T], None, None]:
|
||||
while chunkable:
|
||||
yield chunkable[:chunk_size]
|
||||
chunkable = chunkable[chunk_size:]
|
||||
|
||||
|
||||
def get_mtime(path):
|
||||
try:
|
||||
return os.stat(path).st_mtime
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def fut_logger(name: str, logger: logging.Logger, fut: asyncio.Future, *args) -> None:
|
||||
"""Callback suitable for exception logging in `Future.add_done_callback()`"""
|
||||
if not fut.cancelled() and fut.exception():
|
||||
if isinstance(fut.exception(), asyncio.CancelledError):
|
||||
# Don't really care if the task was just cancelled
|
||||
return
|
||||
logger.exception(f"Failed in task for {name}", exc_info=fut.exception())
|
||||
|
||||
|
||||
def add_future_logger(
|
||||
fut: asyncio.Future,
|
||||
name: Optional[str] = None,
|
||||
logger: Optional[logging.Logger] = None,
|
||||
):
|
||||
"""Add a logger to Futures that will never be directly `await`ed, logging exceptions"""
|
||||
fut.add_done_callback(functools.partial(fut_logger, name, logger or logging.getLogger()))
|
||||
|
||||
|
||||
def create_logged_task(
|
||||
coro: Coroutine,
|
||||
name: Optional[str] = None,
|
||||
logger: Optional[logging.Logger] = None,
|
||||
) -> asyncio.Task:
|
||||
task = asyncio.create_task(coro, name=name)
|
||||
add_future_logger(task, name, logger)
|
||||
return task
|
||||
|
||||
|
||||
def reorient_coord(coord, new_orientation, min_val: int | float = 0):
|
||||
"""
|
||||
Reorient a coordinate instance such that its components are negated and transposed appropriately.
|
||||
|
||||
For ex:
|
||||
reorient_coord((1,2,3), (3,-2,-1)) == (3,-2,-1)
|
||||
"""
|
||||
min_val = abs(min_val)
|
||||
coords = []
|
||||
for axis in new_orientation:
|
||||
axis_idx = abs(axis) - 1
|
||||
new_coord = coord[axis_idx] if axis >= 0 else min_val - coord[axis_idx]
|
||||
coords.append(new_coord)
|
||||
if coord.__class__ in (list, tuple):
|
||||
return coord.__class__(coords)
|
||||
return coord.__class__(*coords)
|
||||
|
||||
749
hippolyzer/lib/base/inventory.py
Normal file
749
hippolyzer/lib/base/inventory.py
Normal file
@@ -0,0 +1,749 @@
|
||||
"""
|
||||
Parse the horrible legacy inventory-related format.
|
||||
|
||||
It's typically only used for object contents now.
|
||||
"""
|
||||
|
||||
# TODO: Maybe handle CRC calculation? Does anything care about that?
|
||||
# I don't think anything in the viewer actually looks at the result
|
||||
# of the CRC check for UDP stuff.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import asyncio
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import inspect
|
||||
import logging
|
||||
import secrets
|
||||
import struct
|
||||
import weakref
|
||||
from io import StringIO
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.legacy_schema import (
|
||||
parse_schema_line,
|
||||
SchemaBase,
|
||||
SchemaDate,
|
||||
SchemaFieldSerializer,
|
||||
SchemaHexInt,
|
||||
SchemaInt,
|
||||
SchemaLLSD,
|
||||
SchemaMultilineStr,
|
||||
SchemaParsingError,
|
||||
SchemaStr,
|
||||
SchemaUUID,
|
||||
schema_field,
|
||||
)
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.templates import SaleType, InventoryType, LookupIntEnum, AssetType, FolderType
|
||||
|
||||
MAGIC_ID = UUID("3c115e51-04f4-523c-9fa6-98aff1034730")
|
||||
LOG = logging.getLogger(__name__)
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class SchemaFlagField(SchemaHexInt):
|
||||
"""Like a hex int, but must be serialized as bytes in LLSD due to being a U32"""
|
||||
@classmethod
|
||||
def from_llsd(cls, val: Any, flavor: str) -> int:
|
||||
# Sometimes values in S32 range will just come through normally
|
||||
if isinstance(val, int):
|
||||
return val
|
||||
|
||||
if flavor == "legacy":
|
||||
return struct.unpack("!I", val)[0]
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def to_llsd(cls, val: int, flavor: str) -> Any:
|
||||
if flavor == "legacy":
|
||||
return struct.pack("!I", val)
|
||||
return val
|
||||
|
||||
|
||||
class SchemaEnumField(SchemaStr, Generic[_T]):
|
||||
def __init__(self, enum_cls: Type[LookupIntEnum]):
|
||||
super().__init__()
|
||||
self._enum_cls = enum_cls
|
||||
|
||||
def deserialize(self, val: str) -> _T:
|
||||
return self._enum_cls.from_lookup_name(val)
|
||||
|
||||
def serialize(self, val: _T) -> str:
|
||||
return self._enum_cls(val).to_lookup_name()
|
||||
|
||||
def from_llsd(self, val: Union[str, int], flavor: str) -> _T:
|
||||
if flavor == "legacy":
|
||||
return self.deserialize(val)
|
||||
return self._enum_cls(val)
|
||||
|
||||
def to_llsd(self, val: _T, flavor: str) -> Union[int, str]:
|
||||
if flavor == "legacy":
|
||||
return self.serialize(val)
|
||||
return int(val)
|
||||
|
||||
|
||||
def _yield_schema_tokens(reader: StringIO):
|
||||
in_bracket = False
|
||||
# empty str == EOF in Python
|
||||
while line := reader.readline():
|
||||
line = line.strip()
|
||||
# Whitespace-only lines are automatically skipped
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
key, val = parse_schema_line(line)
|
||||
except SchemaParsingError:
|
||||
# Can happen if there's a malformed multi-line string, just
|
||||
# skip by it.
|
||||
LOG.warning(f"Found invalid inventory line {line!r}")
|
||||
continue
|
||||
if key == "{":
|
||||
if in_bracket:
|
||||
LOG.warning("Found multiple opening brackets inside structure, "
|
||||
"was a nested structure not handled?")
|
||||
in_bracket = True
|
||||
continue
|
||||
if key == "}":
|
||||
if not in_bracket:
|
||||
LOG.warning("Unexpected closing bracket")
|
||||
in_bracket = False
|
||||
break
|
||||
yield key, val
|
||||
if in_bracket:
|
||||
LOG.warning("Reached EOF while inside a bracket")
|
||||
|
||||
|
||||
class InventoryBase(SchemaBase):
|
||||
SCHEMA_NAME: ClassVar[str]
|
||||
|
||||
@classmethod
|
||||
def from_reader(cls, reader: StringIO, read_header=False) -> InventoryBase:
|
||||
tok_iter = _yield_schema_tokens(reader)
|
||||
# Someone else hasn't already read the header for us
|
||||
if read_header:
|
||||
schema_name, _ = next(tok_iter)
|
||||
if schema_name != cls.SCHEMA_NAME:
|
||||
raise ValueError(f"Expected schema name {schema_name!r} to be {cls.SCHEMA_NAME!r}")
|
||||
|
||||
fields = cls._get_fields_dict()
|
||||
obj_dict = {}
|
||||
for key, val in tok_iter:
|
||||
if key in fields:
|
||||
field: dataclasses.Field = fields[key]
|
||||
spec = field.metadata.get("spec")
|
||||
# Not a real key, an internal var on our dataclass
|
||||
if not spec:
|
||||
LOG.warning(f"Internal key {key!r}")
|
||||
continue
|
||||
|
||||
spec_cls = spec
|
||||
if not inspect.isclass(spec_cls):
|
||||
spec_cls = spec_cls.__class__
|
||||
# some kind of nested structure like sale_info
|
||||
if issubclass(spec_cls, SchemaBase):
|
||||
obj_dict[key] = spec.from_reader(reader)
|
||||
elif issubclass(spec_cls, SchemaFieldSerializer):
|
||||
obj_dict[key] = spec.deserialize(val)
|
||||
else:
|
||||
raise ValueError(f"Unsupported spec for {key!r}, {spec!r}")
|
||||
else:
|
||||
LOG.warning(f"Unknown key {key!r}")
|
||||
return cls._obj_from_dict(obj_dict)
|
||||
|
||||
def to_writer(self, writer: StringIO):
|
||||
writer.write(f"\t{self.SCHEMA_NAME}")
|
||||
if self.SCHEMA_NAME == "permissions":
|
||||
writer.write(" 0\n")
|
||||
else:
|
||||
writer.write("\t0\n")
|
||||
writer.write("\t{\n")
|
||||
|
||||
# Make sure the ID field always comes first, if there is one.
|
||||
fields_dict: Dict[str, dataclasses.Field] = {}
|
||||
if hasattr(self, "ID_ATTR"):
|
||||
fields_dict = {getattr(self, "ID_ATTR"): dataclasses.field()}
|
||||
# update()ing will put all fields that aren't yet in the dict after the ID attr.
|
||||
fields_dict.update(self._get_fields_dict())
|
||||
|
||||
for field_name, field in fields_dict.items():
|
||||
spec = field.metadata.get("spec")
|
||||
# Not meant to be serialized
|
||||
if not spec:
|
||||
continue
|
||||
if field.metadata.get("llsd_only"):
|
||||
continue
|
||||
|
||||
val = getattr(self, field_name)
|
||||
if val is None and not field.metadata.get("include_none"):
|
||||
continue
|
||||
|
||||
spec_cls = spec
|
||||
if not inspect.isclass(spec_cls):
|
||||
spec_cls = spec_cls.__class__
|
||||
# Some kind of nested structure like sale_info
|
||||
if isinstance(val, SchemaBase):
|
||||
val.to_writer(writer)
|
||||
elif issubclass(spec_cls, SchemaFieldSerializer):
|
||||
writer.write(f"\t\t{field_name}\t{spec.serialize(val)}\n")
|
||||
else:
|
||||
raise ValueError(f"Bad inventory spec {spec!r}")
|
||||
writer.write("\t}\n")
|
||||
|
||||
|
||||
class InventoryDifferences(NamedTuple):
|
||||
changed: List[InventoryNodeBase]
|
||||
removed: List[InventoryNodeBase]
|
||||
|
||||
|
||||
class InventoryModel(InventoryBase):
|
||||
def __init__(self):
|
||||
self.nodes: Dict[UUID, InventoryNodeBase] = {}
|
||||
self.root: Optional[InventoryContainerBase] = None
|
||||
self.any_dirty = asyncio.Event()
|
||||
|
||||
@classmethod
|
||||
def from_reader(cls, reader: StringIO, read_header=False) -> InventoryModel:
|
||||
model = cls()
|
||||
for key, value in _yield_schema_tokens(reader):
|
||||
if key == "inv_object":
|
||||
obj = InventoryObject.from_reader(reader)
|
||||
if obj is not None:
|
||||
model.add(obj)
|
||||
elif key == "inv_category":
|
||||
cat = InventoryCategory.from_reader(reader)
|
||||
if cat is not None:
|
||||
model.add(cat)
|
||||
elif key == "inv_item":
|
||||
item = InventoryItem.from_reader(reader)
|
||||
if item is not None:
|
||||
model.add(item)
|
||||
else:
|
||||
LOG.warning("Unknown key {0}".format(key))
|
||||
return model
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, llsd_val: List[Dict], flavor: str = "legacy") -> Self:
|
||||
model = cls()
|
||||
for obj_dict in llsd_val:
|
||||
obj = None
|
||||
for inv_type in INVENTORY_TYPES:
|
||||
if inv_type.ID_ATTR in obj_dict:
|
||||
if (obj := inv_type.from_llsd(obj_dict, flavor)) is not None:
|
||||
model.add(obj)
|
||||
break
|
||||
if obj is None:
|
||||
LOG.warning(f"Unknown object type {obj_dict!r}")
|
||||
return model
|
||||
|
||||
@property
|
||||
def ordered_nodes(self) -> Iterable[InventoryNodeBase]:
|
||||
yield from self.all_containers
|
||||
yield from self.all_items
|
||||
|
||||
@property
|
||||
def all_containers(self) -> Iterable[InventoryContainerBase]:
|
||||
for node in self.nodes.values():
|
||||
if isinstance(node, InventoryContainerBase):
|
||||
yield node
|
||||
|
||||
@property
|
||||
def dirty_categories(self) -> Iterable[InventoryCategory]:
|
||||
for node in self.nodes:
|
||||
if isinstance(node, InventoryCategory) and node.version == InventoryCategory.VERSION_NONE:
|
||||
yield node
|
||||
|
||||
@property
|
||||
def all_items(self) -> Iterable[InventoryItem]:
|
||||
for node in self.nodes.values():
|
||||
if not isinstance(node, InventoryContainerBase):
|
||||
yield node # type: ignore
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, InventoryModel):
|
||||
return False
|
||||
return set(self.nodes.values()) == set(other.nodes.values())
|
||||
|
||||
def to_writer(self, writer: StringIO):
|
||||
for node in self.ordered_nodes:
|
||||
node.to_writer(writer)
|
||||
|
||||
def to_llsd(self, flavor: str = "legacy"):
|
||||
return list(node.to_llsd(flavor) for node in self.ordered_nodes)
|
||||
|
||||
def add(self, node: InventoryNodeBase):
|
||||
if node.node_id in self.nodes:
|
||||
raise KeyError(f"{node.node_id} already exists in the inventory model")
|
||||
|
||||
self.nodes[node.node_id] = node
|
||||
if isinstance(node, InventoryContainerBase):
|
||||
if node.parent_id == UUID.ZERO:
|
||||
self.root = node
|
||||
node.model = weakref.proxy(self)
|
||||
return node
|
||||
|
||||
def update(self, node: InventoryNodeBase, update_fields: Optional[Iterable[str]] = None) -> InventoryNodeBase:
|
||||
"""Update an existing node, optionally only updating specific fields"""
|
||||
if node.node_id not in self.nodes:
|
||||
raise KeyError(f"{node.node_id} not in the inventory model")
|
||||
|
||||
orig_node = self.nodes[node.node_id]
|
||||
if node.__class__ != orig_node.__class__:
|
||||
raise ValueError(f"Tried to update {orig_node!r} from non-matching {node!r}")
|
||||
|
||||
if not update_fields:
|
||||
# Update everything but the model parameter
|
||||
update_fields = node.get_field_names()
|
||||
for field_name in update_fields:
|
||||
setattr(orig_node, field_name, getattr(node, field_name))
|
||||
return orig_node
|
||||
|
||||
def upsert(self, node: InventoryNodeBase, update_fields: Optional[Iterable[str]] = None) -> InventoryNodeBase:
|
||||
"""Add or update a node"""
|
||||
if node.node_id in self.nodes:
|
||||
return self.update(node, update_fields)
|
||||
return self.add(node)
|
||||
|
||||
def unlink(self, node: InventoryNodeBase, single_only: bool = False) -> Sequence[InventoryNodeBase]:
|
||||
"""Unlink a node and its descendants from the tree, returning the removed nodes"""
|
||||
assert node.model == self
|
||||
if node == self.root:
|
||||
self.root = None
|
||||
unlinked = [node]
|
||||
if isinstance(node, InventoryContainerBase) and not single_only:
|
||||
for child in node.children:
|
||||
unlinked.extend(self.unlink(child))
|
||||
self.nodes.pop(node.node_id, None)
|
||||
node.model = None
|
||||
return unlinked
|
||||
|
||||
def get_differences(self, other: InventoryModel) -> InventoryDifferences:
|
||||
# Includes modified things with the same ID
|
||||
changed_in_other = []
|
||||
removed_in_other = []
|
||||
|
||||
other_keys = set(other.nodes.keys())
|
||||
our_keys = set(self.nodes.keys())
|
||||
|
||||
# Removed
|
||||
for key in our_keys - other_keys:
|
||||
removed_in_other.append(self.nodes[key])
|
||||
|
||||
# Updated
|
||||
for key in other_keys.intersection(our_keys):
|
||||
other_node = other.nodes[key]
|
||||
if other_node != self.nodes[key]:
|
||||
changed_in_other.append(other_node)
|
||||
|
||||
# Added
|
||||
for key in other_keys - our_keys:
|
||||
changed_in_other.append(other.nodes[key])
|
||||
return InventoryDifferences(
|
||||
changed=changed_in_other,
|
||||
removed=removed_in_other,
|
||||
)
|
||||
|
||||
def flag_if_dirty(self):
|
||||
if any(self.dirty_categories):
|
||||
self.any_dirty.set()
|
||||
|
||||
def __getitem__(self, item: UUID) -> InventoryNodeBase:
|
||||
return self.nodes[item]
|
||||
|
||||
def __contains__(self, item: UUID):
|
||||
return item in self.nodes
|
||||
|
||||
def get(self, key: UUID) -> Optional[InventoryNodeBase]:
|
||||
return self.nodes.get(key)
|
||||
|
||||
def get_category(self, key: UUID) -> InventoryCategory:
|
||||
node = self.get(key)
|
||||
if not isinstance(node, InventoryCategory):
|
||||
raise ValueError(f"{node!r} is not a category")
|
||||
return node
|
||||
|
||||
def get_item(self, key: UUID) -> InventoryItem:
|
||||
node = self.get(key)
|
||||
if not isinstance(node, InventoryItem):
|
||||
raise ValueError(f"{node!r} is not an item")
|
||||
return node
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryPermissions(InventoryBase):
|
||||
SCHEMA_NAME: ClassVar[str] = "permissions"
|
||||
|
||||
base_mask: int = schema_field(SchemaHexInt)
|
||||
owner_mask: int = schema_field(SchemaHexInt)
|
||||
group_mask: int = schema_field(SchemaHexInt)
|
||||
everyone_mask: int = schema_field(SchemaHexInt)
|
||||
next_owner_mask: int = schema_field(SchemaHexInt)
|
||||
creator_id: UUID = schema_field(SchemaUUID)
|
||||
owner_id: UUID = schema_field(SchemaUUID)
|
||||
last_owner_id: UUID = schema_field(SchemaUUID)
|
||||
group_id: UUID = schema_field(SchemaUUID)
|
||||
# Nothing actually cares about this, but it could be there.
|
||||
# It's kind of redundant since it just means owner_id == NULL_KEY && group_id != NULL_KEY.
|
||||
is_owner_group: Optional[int] = schema_field(SchemaInt, default=None, llsd_only=True)
|
||||
|
||||
@classmethod
|
||||
def make_default(cls) -> Self:
|
||||
return cls(
|
||||
base_mask=0xFFffFFff,
|
||||
owner_mask=0xFFffFFff,
|
||||
group_mask=0,
|
||||
everyone_mask=0,
|
||||
next_owner_mask=0x82000,
|
||||
creator_id=UUID.ZERO,
|
||||
owner_id=UUID.ZERO,
|
||||
last_owner_id=UUID.ZERO,
|
||||
group_id=UUID.ZERO,
|
||||
is_owner_group=None
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventorySaleInfo(InventoryBase):
|
||||
SCHEMA_NAME: ClassVar[str] = "sale_info"
|
||||
|
||||
sale_type: SaleType = schema_field(SchemaEnumField(SaleType))
|
||||
sale_price: int = schema_field(SchemaInt)
|
||||
|
||||
@classmethod
|
||||
def make_default(cls) -> Self:
|
||||
return cls(sale_type=SaleType.NOT, sale_price=10)
|
||||
|
||||
|
||||
class _HasBaseNodeAttrs(abc.ABC):
|
||||
"""
|
||||
Only exists so that we can assert that all subclasses should have this without forcing
|
||||
a particular serialization order, as would happen if this was present on InventoryNodeBase.
|
||||
"""
|
||||
name: str
|
||||
type: AssetType
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryNodeBase(InventoryBase, _HasBaseNodeAttrs):
|
||||
ID_ATTR: ClassVar[str]
|
||||
|
||||
parent_id: Optional[UUID] = schema_field(SchemaUUID)
|
||||
|
||||
model: Optional[InventoryModel] = dataclasses.field(
|
||||
default=None, init=False, hash=False, compare=False, repr=False
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_field_names(cls) -> Set[str]:
|
||||
return set(cls._get_fields_dict().keys()) - {"model"}
|
||||
|
||||
@property
|
||||
def node_id(self) -> UUID:
|
||||
return getattr(self, self.ID_ATTR)
|
||||
|
||||
@node_id.setter
|
||||
def node_id(self, val: UUID):
|
||||
setattr(self, self.ID_ATTR, val)
|
||||
|
||||
@property
|
||||
def parent(self) -> Optional[InventoryContainerBase]:
|
||||
return self.model.nodes.get(self.parent_id)
|
||||
|
||||
def unlink(self) -> Sequence[InventoryNodeBase]:
|
||||
return self.model.unlink(self)
|
||||
|
||||
@classmethod
|
||||
def _obj_from_dict(cls, obj_dict):
|
||||
# Bad entry, ignore
|
||||
# TODO: Check on these. might be symlinks or something.
|
||||
if obj_dict.get("type") == "-1":
|
||||
LOG.warning(f"Skipping bad object with type == -1: {obj_dict!r}")
|
||||
return None
|
||||
return super()._obj_from_dict(obj_dict)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.node_id)
|
||||
|
||||
def __iter__(self) -> Iterator[InventoryNodeBase]:
|
||||
return iter(())
|
||||
|
||||
def __contains__(self, item) -> bool:
|
||||
return item in tuple(self)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryContainerBase(InventoryNodeBase):
|
||||
type: AssetType = schema_field(SchemaEnumField(AssetType))
|
||||
|
||||
@property
|
||||
def children(self) -> Sequence[InventoryNodeBase]:
|
||||
return tuple(
|
||||
x for x in self.model.nodes.values()
|
||||
if x.parent_id == self.node_id
|
||||
)
|
||||
|
||||
@property
|
||||
def descendents(self) -> List[InventoryNodeBase]:
|
||||
new_children: List[InventoryNodeBase] = [self]
|
||||
descendents = []
|
||||
while new_children:
|
||||
to_check = new_children[:]
|
||||
new_children.clear()
|
||||
for obj in to_check:
|
||||
if isinstance(obj, InventoryContainerBase):
|
||||
for child in obj.children:
|
||||
if child in descendents:
|
||||
continue
|
||||
new_children.append(child)
|
||||
descendents.append(child)
|
||||
else:
|
||||
if obj not in descendents:
|
||||
descendents.append(obj)
|
||||
return descendents
|
||||
|
||||
def __getitem__(self, item: Union[int, str]) -> InventoryNodeBase:
|
||||
if isinstance(item, int):
|
||||
return self.children[item]
|
||||
|
||||
for child in self.children:
|
||||
if child.name == item:
|
||||
return child
|
||||
raise KeyError(f"{item!r} not found in children")
|
||||
|
||||
def __iter__(self) -> Iterator[InventoryNodeBase]:
|
||||
return iter(self.children)
|
||||
|
||||
def get_or_create_subcategory(self, name: str) -> InventoryCategory:
|
||||
for child in self:
|
||||
if child.name == name and isinstance(child, InventoryCategory):
|
||||
return child
|
||||
child = InventoryCategory(
|
||||
name=name,
|
||||
cat_id=UUID.random(),
|
||||
parent_id=self.node_id,
|
||||
type=AssetType.CATEGORY,
|
||||
pref_type=FolderType.NONE,
|
||||
owner_id=getattr(self, 'owner_id', UUID.ZERO),
|
||||
version=1,
|
||||
)
|
||||
self.model.add(child)
|
||||
return child
|
||||
|
||||
# So autogenerated __hash__ doesn't kill our inherited one
|
||||
__hash__ = InventoryNodeBase.__hash__
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryObject(InventoryContainerBase):
|
||||
SCHEMA_NAME: ClassVar[str] = "inv_object"
|
||||
ID_ATTR: ClassVar[str] = "obj_id"
|
||||
|
||||
obj_id: UUID = schema_field(SchemaUUID)
|
||||
name: str = schema_field(SchemaMultilineStr)
|
||||
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=True)
|
||||
|
||||
__hash__ = InventoryNodeBase.__hash__
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryCategory(InventoryContainerBase):
|
||||
ID_ATTR: ClassVar[str] = "cat_id"
|
||||
# AIS calls this something else...
|
||||
ID_ATTR_AIS: ClassVar[str] = "category_id"
|
||||
SCHEMA_NAME: ClassVar[str] = "inv_category"
|
||||
VERSION_NONE: ClassVar[int] = -1
|
||||
|
||||
cat_id: UUID = schema_field(SchemaUUID)
|
||||
pref_type: FolderType = schema_field(SchemaEnumField(FolderType), llsd_name="preferred_type")
|
||||
name: str = schema_field(SchemaMultilineStr)
|
||||
owner_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
|
||||
version: int = schema_field(SchemaInt, default=VERSION_NONE, llsd_only=True)
|
||||
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=False)
|
||||
|
||||
def to_folder_data(self) -> Block:
|
||||
return Block(
|
||||
"FolderData",
|
||||
FolderID=self.cat_id,
|
||||
ParentID=self.parent_id,
|
||||
CallbackID=0,
|
||||
Type=self.pref_type,
|
||||
Name=self.name,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_folder_data(cls, block: Block):
|
||||
return cls(
|
||||
cat_id=block["FolderID"],
|
||||
parent_id=block["ParentID"],
|
||||
pref_type=block["Type"],
|
||||
name=block["Name"],
|
||||
type=AssetType.CATEGORY,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy") -> Self:
|
||||
if flavor == "ais" and "type" not in inv_dict:
|
||||
inv_dict = inv_dict.copy()
|
||||
inv_dict["type"] = AssetType.CATEGORY
|
||||
return super().from_llsd(inv_dict, flavor)
|
||||
|
||||
def to_llsd(self, flavor: str = "legacy"):
|
||||
payload = super().to_llsd(flavor)
|
||||
if flavor == "ais":
|
||||
# AIS already knows the inventory type is category
|
||||
payload.pop("type", None)
|
||||
return payload
|
||||
|
||||
@classmethod
|
||||
def _get_fields_dict(cls, llsd_flavor: Optional[str] = None):
|
||||
fields = super()._get_fields_dict(llsd_flavor)
|
||||
if llsd_flavor == "ais":
|
||||
# These have different names though
|
||||
fields["type_default"] = fields.pop("preferred_type")
|
||||
fields["agent_id"] = fields.pop("owner_id")
|
||||
fields["category_id"] = fields.pop("cat_id")
|
||||
return fields
|
||||
|
||||
__hash__ = InventoryNodeBase.__hash__
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryItem(InventoryNodeBase):
|
||||
SCHEMA_NAME: ClassVar[str] = "inv_item"
|
||||
ID_ATTR: ClassVar[str] = "item_id"
|
||||
|
||||
item_id: UUID = schema_field(SchemaUUID)
|
||||
permissions: InventoryPermissions = schema_field(InventoryPermissions)
|
||||
asset_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
|
||||
shadow_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
|
||||
type: Optional[AssetType] = schema_field(SchemaEnumField(AssetType), default=None)
|
||||
inv_type: Optional[InventoryType] = schema_field(SchemaEnumField(InventoryType), default=None)
|
||||
flags: Optional[int] = schema_field(SchemaFlagField, default=None)
|
||||
sale_info: Optional[InventorySaleInfo] = schema_field(InventorySaleInfo, default=None)
|
||||
name: Optional[str] = schema_field(SchemaMultilineStr, default=None)
|
||||
desc: Optional[str] = schema_field(SchemaMultilineStr, default=None)
|
||||
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=True)
|
||||
"""Specifically for script metadata, generally just experience info"""
|
||||
thumbnail: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=False)
|
||||
"""Generally just a dict with the thumbnail UUID in it"""
|
||||
creation_date: Optional[dt.datetime] = schema_field(SchemaDate, llsd_name="created_at", default=None)
|
||||
|
||||
__hash__ = InventoryNodeBase.__hash__
|
||||
|
||||
@property
|
||||
def true_asset_id(self) -> UUID:
|
||||
if self.asset_id is not None:
|
||||
return self.asset_id
|
||||
return self.shadow_id ^ MAGIC_ID
|
||||
|
||||
def to_inventory_data(self, block_name: str = "InventoryData") -> Block:
|
||||
return Block(
|
||||
block_name,
|
||||
ItemID=self.item_id,
|
||||
FolderID=self.parent_id,
|
||||
CallbackID=0,
|
||||
CreatorID=self.permissions.creator_id,
|
||||
OwnerID=self.permissions.owner_id,
|
||||
GroupID=self.permissions.group_id,
|
||||
BaseMask=self.permissions.base_mask,
|
||||
OwnerMask=self.permissions.owner_mask,
|
||||
GroupMask=self.permissions.group_mask,
|
||||
EveryoneMask=self.permissions.everyone_mask,
|
||||
NextOwnerMask=self.permissions.next_owner_mask,
|
||||
GroupOwned=self.permissions.owner_id == UUID.ZERO and self.permissions.group_id != UUID.ZERO,
|
||||
AssetID=self.true_asset_id,
|
||||
Type=self.type,
|
||||
InvType=self.inv_type,
|
||||
Flags=self.flags,
|
||||
SaleType=self.sale_info.sale_type,
|
||||
SalePrice=self.sale_info.sale_price,
|
||||
Name=self.name,
|
||||
Description=self.desc,
|
||||
CreationDate=SchemaDate.to_llsd(self.creation_date, "legacy"),
|
||||
# Meaningless here
|
||||
CRC=secrets.randbits(32),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_inventory_data(cls, block: Block):
|
||||
return cls(
|
||||
item_id=block["ItemID"],
|
||||
# Might be under one of two names
|
||||
parent_id=block.get("ParentID", block["FolderID"]),
|
||||
permissions=InventoryPermissions(
|
||||
creator_id=block["CreatorID"],
|
||||
owner_id=block["OwnerID"],
|
||||
# Unknown, not sent in this schema
|
||||
last_owner_id=block.get("LastOwnerID", UUID.ZERO),
|
||||
group_id=block["GroupID"],
|
||||
base_mask=block["BaseMask"],
|
||||
owner_mask=block["OwnerMask"],
|
||||
group_mask=block["GroupMask"],
|
||||
everyone_mask=block["EveryoneMask"],
|
||||
next_owner_mask=block["NextOwnerMask"],
|
||||
),
|
||||
# May be missing in UpdateInventoryItem
|
||||
asset_id=block.get("AssetID"),
|
||||
type=AssetType(block["Type"]),
|
||||
inv_type=InventoryType(block["InvType"]),
|
||||
flags=block["Flags"],
|
||||
sale_info=InventorySaleInfo(
|
||||
sale_type=SaleType(block["SaleType"]),
|
||||
sale_price=block["SalePrice"],
|
||||
),
|
||||
name=block["Name"],
|
||||
desc=block["Description"],
|
||||
creation_date=SchemaDate.from_llsd(block["CreationDate"], "legacy"),
|
||||
)
|
||||
|
||||
def to_llsd(self, flavor: str = "legacy"):
|
||||
val = super().to_llsd(flavor=flavor)
|
||||
if flavor == "ais":
|
||||
# There's little chance this differs from owner ID, just place it.
|
||||
val["agent_id"] = val["permissions"]["owner_id"]
|
||||
if val["type"] == AssetType.LINK:
|
||||
# For link items, there is no asset, only a linked ID.
|
||||
val["linked_id"] = val.pop("asset_id")
|
||||
# These don't exist either
|
||||
val.pop("permissions", None)
|
||||
val.pop("sale_info", None)
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy") -> Self:
|
||||
if flavor == "ais" and "linked_id" in inv_dict:
|
||||
# Links get represented differently than other items for whatever reason.
|
||||
# This is incredibly annoying, under *NIX there's nothing really special about symlinks.
|
||||
inv_dict = inv_dict.copy()
|
||||
# Fill this in since it needs to be there
|
||||
if "permissions" not in inv_dict:
|
||||
inv_dict["permissions"] = InventoryPermissions(
|
||||
base_mask=0xFFffFFff,
|
||||
owner_mask=0xFFffFFff,
|
||||
group_mask=0xFFffFFff,
|
||||
everyone_mask=0,
|
||||
next_owner_mask=0xFFffFFff,
|
||||
creator_id=UUID.ZERO,
|
||||
owner_id=UUID.ZERO,
|
||||
last_owner_id=UUID.ZERO,
|
||||
group_id=UUID.ZERO,
|
||||
).to_llsd("ais")
|
||||
if "sale_info" not in inv_dict:
|
||||
inv_dict["sale_info"] = InventorySaleInfo(
|
||||
sale_type=SaleType.NOT,
|
||||
sale_price=0,
|
||||
).to_llsd("ais")
|
||||
if "type" not in inv_dict:
|
||||
inv_dict["type"] = AssetType.LINK
|
||||
|
||||
# In the context of symlinks, asset id means linked item ID.
|
||||
# This is also how indra stores symlinks. Why the asymmetry in AIS if none of the
|
||||
# consumers actually want it? Who knows.
|
||||
inv_dict["asset_id"] = inv_dict.pop("linked_id")
|
||||
return super().from_llsd(inv_dict, flavor)
|
||||
|
||||
|
||||
INVENTORY_TYPES: Tuple[Type[InventoryNodeBase], ...] = (InventoryCategory, InventoryObject, InventoryItem)
|
||||
@@ -1,19 +1,12 @@
|
||||
import os
|
||||
import tempfile
|
||||
from io import BytesIO
|
||||
from typing import *
|
||||
|
||||
import defusedxml.cElementTree
|
||||
import defusedxml.ElementTree
|
||||
from glymur import jp2box, Jp2k
|
||||
|
||||
# Replace glymur's ElementTree with a safe one
|
||||
jp2box.ET = defusedxml.cElementTree
|
||||
|
||||
|
||||
SL_DEFAULT_ENCODE = {
|
||||
"cratios": (1920.0, 480.0, 120.0, 30.0, 10.0),
|
||||
"irreversible": True,
|
||||
}
|
||||
jp2box.ET = defusedxml.ElementTree
|
||||
|
||||
|
||||
class BufferedJp2k(Jp2k):
|
||||
@@ -24,12 +17,7 @@ class BufferedJp2k(Jp2k):
|
||||
based on filename, so this is the least brittle approach.
|
||||
"""
|
||||
|
||||
def __init__(self, contents: bytes, encode_kwargs: Optional[Dict] = None):
|
||||
if encode_kwargs is None:
|
||||
self.encode_kwargs = SL_DEFAULT_ENCODE.copy()
|
||||
else:
|
||||
self.encode_kwargs = encode_kwargs
|
||||
|
||||
def __init__(self, contents: bytes):
|
||||
stream = BytesIO(contents)
|
||||
self.temp_file = tempfile.NamedTemporaryFile(delete=False)
|
||||
stream.seek(0)
|
||||
@@ -44,11 +32,12 @@ class BufferedJp2k(Jp2k):
|
||||
os.remove(self.temp_file.name)
|
||||
self.temp_file = None
|
||||
|
||||
def _write(self, img_array, verbose=False, **kwargs):
|
||||
# Glymur normally only lets you control encode params when a write happens within
|
||||
# the constructor. Keep around the encode params from the constructor and pass
|
||||
# them to successive write calls.
|
||||
return super()._write(img_array, verbose=False, **self.encode_kwargs, **kwargs)
|
||||
def _populate_cparams(self, img_array):
|
||||
if self._cratios is None:
|
||||
self._cratios = (1920.0, 480.0, 120.0, 30.0, 10.0)
|
||||
if self._irreversible is None:
|
||||
self.irreversible = True
|
||||
return super()._populate_cparams(img_array)
|
||||
|
||||
def __bytes__(self):
|
||||
with open(self.temp_file.name, "rb") as f:
|
||||
|
||||
@@ -1,255 +0,0 @@
|
||||
"""
|
||||
Parse the horrible legacy inventory format
|
||||
|
||||
It's typically only used for object contents now.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import itertools
|
||||
import logging
|
||||
import re
|
||||
import weakref
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
MAGIC_ID = UUID("3c115e51-04f4-523c-9fa6-98aff1034730")
|
||||
|
||||
|
||||
def _parse_str(val: str):
|
||||
return val.rstrip("|")
|
||||
|
||||
|
||||
def _int_from_hex(val: str):
|
||||
return int(val, 16)
|
||||
|
||||
|
||||
def _parse_date(val: str):
|
||||
return dt.datetime.utcfromtimestamp(int(val))
|
||||
|
||||
|
||||
class InventoryParsingError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _inv_field(spec: Union[Callable, Type], *, default=dataclasses.MISSING, init=True, repr=True, # noqa
|
||||
hash=None, compare=True) -> dataclasses.Field: # noqa
|
||||
"""Describe a field in the inventory schema and the shape of its value"""
|
||||
return dataclasses.field(
|
||||
metadata={"spec": spec}, default=default, init=init,
|
||||
repr=repr, hash=hash, compare=compare
|
||||
)
|
||||
|
||||
|
||||
# The schema is meant to allow multi-line strings, but in practice
|
||||
# it does not due to scanf() shenanigans. This is fine.
|
||||
_INV_TOKEN_RE = re.compile(r'\A\s*([^\s]+)(\s+([^\t\r\n]+))?$')
|
||||
|
||||
|
||||
def _parse_inv_line(line: str):
|
||||
g = _INV_TOKEN_RE.search(line)
|
||||
if not g:
|
||||
raise InventoryParsingError("%r doesn't match the token regex" % line)
|
||||
return g.group(1), g.group(3)
|
||||
|
||||
|
||||
def _yield_inv_tokens(line_iter: Iterator[str]):
|
||||
in_bracket = False
|
||||
for line in line_iter:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
key, val = _parse_inv_line(line)
|
||||
except InventoryParsingError:
|
||||
# Can happen if there's a malformed multi-line string, just
|
||||
# skip by it.
|
||||
LOG.warning(f"Found invalid inventory line {line!r}")
|
||||
continue
|
||||
if key == "{":
|
||||
if in_bracket:
|
||||
LOG.warning("Found multiple opening brackets inside structure, "
|
||||
"was a nested structure not handled?")
|
||||
in_bracket = True
|
||||
continue
|
||||
if key == "}":
|
||||
in_bracket = False
|
||||
break
|
||||
yield key, val
|
||||
if in_bracket:
|
||||
raise LOG.warning("Reached EOF while inside a bracket")
|
||||
|
||||
|
||||
class InventoryModel:
|
||||
def __init__(self):
|
||||
self.containers: Dict[UUID, InventoryContainerBase] = {}
|
||||
self.items: Dict[UUID, InventoryItem] = {}
|
||||
self.root: Optional[InventoryContainerBase] = None
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, text: str):
|
||||
return cls.from_iter(iter(text.splitlines()))
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes):
|
||||
return cls.from_str(data.decode("utf8"))
|
||||
|
||||
@classmethod
|
||||
def from_iter(cls, line_iter: Iterator[str]) -> InventoryModel:
|
||||
model = cls()
|
||||
for key, value in _yield_inv_tokens(line_iter):
|
||||
if key == "inv_object":
|
||||
obj = InventoryObject.from_iter(line_iter)
|
||||
if obj is not None:
|
||||
model.add_container(obj)
|
||||
elif key == "inv_category":
|
||||
cat = InventoryCategory.from_iter(line_iter)
|
||||
if cat is not None:
|
||||
model.add_container(cat)
|
||||
elif key == "inv_item":
|
||||
item = InventoryItem.from_iter(line_iter)
|
||||
if item is not None:
|
||||
model.add_item(item)
|
||||
else:
|
||||
LOG.warning("Unknown key {0}".format(key))
|
||||
model.reparent_nodes()
|
||||
return model
|
||||
|
||||
def add_container(self, container: InventoryContainerBase):
|
||||
self.containers[container.node_id] = container
|
||||
container.model = weakref.proxy(self)
|
||||
|
||||
def add_item(self, item: InventoryItem):
|
||||
self.items[item.item_id] = item
|
||||
item.model = weakref.proxy(self)
|
||||
|
||||
def reparent_nodes(self):
|
||||
self.root = None
|
||||
for container in self.containers.values():
|
||||
container.children.clear()
|
||||
if container.parent_id == UUID():
|
||||
self.root = container
|
||||
for obj in itertools.chain(self.items.values(), self.containers.values()):
|
||||
if not obj.parent_id or obj.parent_id == UUID():
|
||||
continue
|
||||
parent_container = self.containers.get(obj.parent_id)
|
||||
if not parent_container:
|
||||
LOG.warning("{0} had an invalid parent {1}".format(obj, obj.parent_id))
|
||||
continue
|
||||
parent_container.children.append(obj)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryBase(abc.ABC):
|
||||
@classmethod
|
||||
def _fields_dict(cls):
|
||||
return {f.name: f for f in dataclasses.fields(cls)}
|
||||
|
||||
@classmethod
|
||||
def from_iter(cls, line_iter: Iterator[str]):
|
||||
fields = cls._fields_dict()
|
||||
obj = {}
|
||||
for key, val in _yield_inv_tokens(line_iter):
|
||||
if key in fields:
|
||||
field: dataclasses.Field = fields[key]
|
||||
spec = field.metadata.get("spec")
|
||||
# Not a real key, an internal var on our dataclass
|
||||
if not spec:
|
||||
LOG.warning(f"Internal key {key!r}")
|
||||
continue
|
||||
# some kind of nested structure like sale_info
|
||||
if isinstance(spec, type) and issubclass(spec, InventoryBase):
|
||||
obj[key] = spec.from_iter(line_iter)
|
||||
else:
|
||||
obj[key] = spec(val)
|
||||
else:
|
||||
LOG.warning(f"Unknown key {key!r}")
|
||||
|
||||
# Bad entry, ignore
|
||||
# TODO: Check on these. might be symlinks or something.
|
||||
if obj.get("type") == "-1":
|
||||
LOG.warning(f"Skipping bad object with type == -1: {obj!r}")
|
||||
return None
|
||||
return cls(**obj) # type: ignore
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryPermissions(InventoryBase):
|
||||
base_mask: int = _inv_field(_int_from_hex)
|
||||
owner_mask: int = _inv_field(_int_from_hex)
|
||||
group_mask: int = _inv_field(_int_from_hex)
|
||||
everyone_mask: int = _inv_field(_int_from_hex)
|
||||
next_owner_mask: int = _inv_field(_int_from_hex)
|
||||
creator_id: UUID = _inv_field(UUID)
|
||||
owner_id: UUID = _inv_field(UUID)
|
||||
last_owner_id: UUID = _inv_field(UUID)
|
||||
group_id: UUID = _inv_field(UUID)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventorySaleInfo(InventoryBase):
|
||||
sale_type: str = _inv_field(str)
|
||||
sale_price: int = _inv_field(int)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryNodeBase(InventoryBase):
|
||||
ID_ATTR: ClassVar[str]
|
||||
parent_id: Optional[UUID] = _inv_field(UUID)
|
||||
model: Optional[InventoryModel] = dataclasses.field(default=None, init=False)
|
||||
|
||||
@property
|
||||
def node_id(self) -> UUID:
|
||||
return getattr(self, self.ID_ATTR)
|
||||
|
||||
@property
|
||||
def parent(self):
|
||||
return self.model.containers.get(self.parent_id)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryContainerBase(InventoryNodeBase):
|
||||
type: str = _inv_field(str)
|
||||
name: str = _inv_field(_parse_str)
|
||||
children: List[InventoryNodeBase] = dataclasses.field(default_factory=list, init=False)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryObject(InventoryContainerBase):
|
||||
ID_ATTR: ClassVar[str] = "obj_id"
|
||||
obj_id: UUID = _inv_field(UUID)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryCategory(InventoryContainerBase):
|
||||
ID_ATTR: ClassVar[str] = "cat_id"
|
||||
cat_id: UUID = _inv_field(UUID)
|
||||
pref_type: str = _inv_field(str)
|
||||
owner_id: UUID = _inv_field(UUID)
|
||||
version: int = _inv_field(int)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryItem(InventoryNodeBase):
|
||||
ID_ATTR: ClassVar[str] = "item_id"
|
||||
item_id: UUID = _inv_field(UUID)
|
||||
type: str = _inv_field(str)
|
||||
inv_type: str = _inv_field(str)
|
||||
flags: int = _inv_field(_int_from_hex)
|
||||
name: str = _inv_field(_parse_str)
|
||||
desc: str = _inv_field(_parse_str)
|
||||
creation_date: dt.datetime = _inv_field(_parse_date)
|
||||
permissions: InventoryPermissions = _inv_field(InventoryPermissions)
|
||||
sale_info: InventorySaleInfo = _inv_field(InventorySaleInfo)
|
||||
asset_id: Optional[UUID] = _inv_field(UUID, default=None)
|
||||
shadow_id: Optional[UUID] = _inv_field(UUID, default=None)
|
||||
|
||||
@property
|
||||
def true_asset_id(self) -> UUID:
|
||||
if self.asset_id is not None:
|
||||
return self.asset_id
|
||||
return self.shadow_id ^ MAGIC_ID
|
||||
266
hippolyzer/lib/base/legacy_schema.py
Normal file
266
hippolyzer/lib/base/legacy_schema.py
Normal file
@@ -0,0 +1,266 @@
|
||||
"""
|
||||
Legacy line-oriented schema parser base classes
|
||||
|
||||
Used for task inventory and wearables.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import calendar
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import inspect
|
||||
import logging
|
||||
import re
|
||||
from io import StringIO
|
||||
from typing import *
|
||||
|
||||
import hippolyzer.lib.base.llsd as llsd
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class SchemaFieldSerializer(abc.ABC, Generic[_T]):
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def deserialize(cls, val: str) -> _T:
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def serialize(cls, val: _T) -> str:
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, val: Any, flavor: str) -> _T:
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def to_llsd(cls, val: _T, flavor: str) -> Any:
|
||||
return val
|
||||
|
||||
|
||||
class SchemaDate(SchemaFieldSerializer[dt.datetime]):
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> dt.datetime:
|
||||
return dt.datetime.fromtimestamp(int(val), dt.timezone.utc)
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: dt.datetime) -> str:
|
||||
return str(calendar.timegm(val.utctimetuple()))
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, val: Any, flavor: str) -> dt.datetime:
|
||||
return dt.datetime.fromtimestamp(val, dt.timezone.utc)
|
||||
|
||||
@classmethod
|
||||
def to_llsd(cls, val: dt.datetime, flavor: str):
|
||||
return calendar.timegm(val.utctimetuple())
|
||||
|
||||
|
||||
class SchemaHexInt(SchemaFieldSerializer[int]):
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> int:
|
||||
return int(val, 16)
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: int) -> str:
|
||||
return "%08x" % val
|
||||
|
||||
|
||||
class SchemaInt(SchemaFieldSerializer[int]):
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> int:
|
||||
return int(val)
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: int) -> str:
|
||||
return str(val)
|
||||
|
||||
|
||||
class SchemaMultilineStr(SchemaFieldSerializer[str]):
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> str:
|
||||
# llinventory claims that it will parse multiple lines until it finds
|
||||
# an "|" terminator. That's not true. Use llinventory's _actual_ behaviour.
|
||||
return val.partition("|")[0]
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: str) -> str:
|
||||
return val + "|"
|
||||
|
||||
|
||||
class SchemaStr(SchemaFieldSerializer[str]):
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> str:
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: str) -> str:
|
||||
return val
|
||||
|
||||
|
||||
class SchemaUUID(SchemaFieldSerializer[UUID]):
|
||||
@classmethod
|
||||
def from_llsd(cls, val: Any, flavor: str) -> UUID:
|
||||
# FetchInventory2 will return a string, but we want a UUID. It's not an issue
|
||||
# for us to return a UUID later there because it'll just cast to string if
|
||||
# that's what it wants
|
||||
return UUID(val)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> UUID:
|
||||
return UUID(val)
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: UUID) -> str:
|
||||
return str(val)
|
||||
|
||||
|
||||
class SchemaLLSD(SchemaFieldSerializer[_T]):
|
||||
"""Arbitrary LLSD embedded in a field"""
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> _T:
|
||||
return llsd.parse_xml(val.partition("|")[0].encode("utf8"))
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: _T) -> str:
|
||||
# Don't include the XML header
|
||||
return llsd.format_xml(val).split(b">", 1)[1].decode("utf8") + "\n|"
|
||||
|
||||
|
||||
_SCHEMA_SPEC = Union[Type[Union["SchemaBase", SchemaFieldSerializer]], SchemaFieldSerializer]
|
||||
|
||||
|
||||
def schema_field(spec: _SCHEMA_SPEC, *, default=dataclasses.MISSING, init=True,
|
||||
repr=True, hash=None, compare=True, llsd_name=None, llsd_only=False,
|
||||
include_none=False) -> dataclasses.Field: # noqa
|
||||
"""Describe a field in the inventory schema and the shape of its value"""
|
||||
return dataclasses.field( # noqa
|
||||
metadata={"spec": spec, "llsd_name": llsd_name, "llsd_only": llsd_only, "include_none": include_none},
|
||||
default=default, init=init, repr=repr, hash=hash, compare=compare,
|
||||
)
|
||||
|
||||
|
||||
class SchemaParsingError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
# The schema is meant to allow multi-line strings, but in practice
|
||||
# it does not due to scanf() shenanigans. This is fine.
|
||||
_SCHEMA_LINE_TOKENS_RE = re.compile(r'\A\s*([^\s]+)(\s+([^\t\r\n]+))?$')
|
||||
|
||||
|
||||
def parse_schema_line(line: str):
|
||||
g = _SCHEMA_LINE_TOKENS_RE.search(line)
|
||||
if not g:
|
||||
raise SchemaParsingError(f"{line!r} doesn't match the token regex")
|
||||
return g.group(1), g.group(3)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class SchemaBase(abc.ABC):
|
||||
@classmethod
|
||||
def _get_fields_dict(cls, llsd_flavor: Optional[str] = None) -> Dict[str, dataclasses.Field]:
|
||||
fields_dict = {}
|
||||
for field in dataclasses.fields(cls):
|
||||
field_name = field.name
|
||||
if llsd_flavor:
|
||||
field_name = field.metadata.get("llsd_name") or field_name
|
||||
fields_dict[field_name] = field
|
||||
return fields_dict
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, text: str) -> Self:
|
||||
return cls.from_reader(StringIO(text))
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def from_reader(cls: Type[_T], reader: StringIO) -> _T:
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes) -> Self:
|
||||
return cls.from_str(data.decode("utf8"))
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy") -> Self:
|
||||
fields = cls._get_fields_dict(llsd_flavor=flavor)
|
||||
obj_dict = {}
|
||||
try:
|
||||
for key, val in inv_dict.items():
|
||||
if key in fields:
|
||||
field = fields[key]
|
||||
key = field.name
|
||||
spec = field.metadata.get("spec")
|
||||
# Not a real key, an internal var on our dataclass
|
||||
if not spec:
|
||||
LOG.warning(f"Internal key {key!r}")
|
||||
continue
|
||||
|
||||
spec_cls = spec
|
||||
if not inspect.isclass(spec_cls):
|
||||
spec_cls = spec_cls.__class__
|
||||
|
||||
# some kind of nested structure like sale_info
|
||||
if issubclass(spec_cls, SchemaBase):
|
||||
obj_dict[key] = spec.from_llsd(val, flavor)
|
||||
elif issubclass(spec_cls, SchemaFieldSerializer):
|
||||
obj_dict[key] = spec.from_llsd(val, flavor)
|
||||
else:
|
||||
raise ValueError(f"Unsupported spec for {key!r}, {spec!r}")
|
||||
else:
|
||||
if flavor != "ais":
|
||||
# AIS has a number of different fields that are irrelevant depending on
|
||||
# what exactly sent the payload
|
||||
LOG.warning(f"Unknown key {key!r}")
|
||||
except:
|
||||
LOG.error(f"Failed to parse inventory schema: {inv_dict!r}")
|
||||
raise
|
||||
return cls._obj_from_dict(obj_dict)
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
return self.to_str().encode("utf8")
|
||||
|
||||
def to_str(self) -> str:
|
||||
writer = StringIO()
|
||||
self.to_writer(writer)
|
||||
writer.seek(0)
|
||||
return writer.read()
|
||||
|
||||
def to_llsd(self, flavor: str = "legacy"):
|
||||
obj_dict = {}
|
||||
for field_name, field in self._get_fields_dict(llsd_flavor=flavor).items():
|
||||
spec = field.metadata.get("spec")
|
||||
# Not meant to be serialized
|
||||
if not spec:
|
||||
continue
|
||||
|
||||
val = getattr(self, field.name)
|
||||
if val is None:
|
||||
continue
|
||||
|
||||
spec_cls = spec
|
||||
if not inspect.isclass(spec_cls):
|
||||
spec_cls = spec_cls.__class__
|
||||
|
||||
# Some kind of nested structure like sale_info
|
||||
if isinstance(val, SchemaBase):
|
||||
val = val.to_llsd(flavor)
|
||||
elif issubclass(spec_cls, SchemaFieldSerializer):
|
||||
val = spec.to_llsd(val, flavor)
|
||||
else:
|
||||
raise ValueError(f"Bad inventory spec {spec!r}")
|
||||
obj_dict[field_name] = val
|
||||
return obj_dict
|
||||
|
||||
@abc.abstractmethod
|
||||
def to_writer(self, writer: StringIO):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def _obj_from_dict(cls, obj_dict: Dict) -> Self:
|
||||
return cls(**obj_dict) # type: ignore
|
||||
@@ -15,6 +15,8 @@ CONSTRAINT_DATACLASS = se.ForwardSerializable(lambda: se.Dataclass(Constraint))
|
||||
POSKEYFRAME_DATACLASS = se.ForwardSerializable(lambda: se.Dataclass(PosKeyframe))
|
||||
ROTKEYFRAME_DATACLASS = se.ForwardSerializable(lambda: se.Dataclass(RotKeyframe))
|
||||
|
||||
JOINTS_DICT = OrderedMultiDict[str, "Joint"]
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Animation:
|
||||
@@ -29,7 +31,7 @@ class Animation:
|
||||
ease_in_duration: float = se.dataclass_field(se.F32)
|
||||
ease_out_duration: float = se.dataclass_field(se.F32)
|
||||
hand_pose: HandPose = se.dataclass_field(lambda: se.IntEnum(HandPose, se.U32), default=0)
|
||||
joints: OrderedMultiDict[str, Joint] = se.dataclass_field(se.MultiDictAdapter(
|
||||
joints: JOINTS_DICT = se.dataclass_field(se.MultiDictAdapter(
|
||||
se.Collection(se.U32, se.Tuple(se.CStr(), JOINT_DATACLASS)),
|
||||
))
|
||||
constraints: List[Constraint] = se.dataclass_field(
|
||||
|
||||
@@ -1,20 +1,27 @@
|
||||
import calendar
|
||||
import datetime
|
||||
import struct
|
||||
import typing
|
||||
import uuid
|
||||
import zlib
|
||||
|
||||
from llbase.llsd import *
|
||||
from llsd import *
|
||||
# So we can directly reference the original wrapper funcs where necessary
|
||||
import llbase.llsd
|
||||
import llsd as base_llsd
|
||||
from llsd.base import is_string, is_unicode
|
||||
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
|
||||
|
||||
class HippoLLSDBaseFormatter(llbase.llsd.LLSDBaseFormatter):
|
||||
class HippoLLSDBaseFormatter(base_llsd.base.LLSDBaseFormatter):
|
||||
UUID: callable
|
||||
ARRAY: callable
|
||||
BINARY: callable
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.type_map[UUID] = self.UUID
|
||||
self.type_map[JankStringyBytes] = self.BINARY
|
||||
self.type_map[Vector2] = self.TUPLECOORD
|
||||
self.type_map[Vector3] = self.TUPLECOORD
|
||||
self.type_map[Vector4] = self.TUPLECOORD
|
||||
@@ -24,44 +31,131 @@ class HippoLLSDBaseFormatter(llbase.llsd.LLSDBaseFormatter):
|
||||
return self.ARRAY(v.data())
|
||||
|
||||
|
||||
class HippoLLSDXMLFormatter(llbase.llsd.LLSDXMLFormatter, HippoLLSDBaseFormatter):
|
||||
class HippoLLSDXMLFormatter(base_llsd.serde_xml.LLSDXMLFormatter, HippoLLSDBaseFormatter):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def _generate(self, something):
|
||||
if isinstance(something, int) and type(something) is not int:
|
||||
# The lookup in the underlying library will fail if we don't convert IntEnums to actual ints.
|
||||
something = int(something)
|
||||
return super()._generate(something)
|
||||
|
||||
|
||||
class HippoLLSDXMLPrettyFormatter(base_llsd.serde_xml.LLSDXMLPrettyFormatter, HippoLLSDBaseFormatter):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
|
||||
class HippoLLSDXMLPrettyFormatter(llbase.llsd.LLSDXMLPrettyFormatter, HippoLLSDBaseFormatter):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
|
||||
def format_pretty_xml(val: typing.Any):
|
||||
def format_pretty_xml(val: typing.Any) -> bytes:
|
||||
return HippoLLSDXMLPrettyFormatter().format(val)
|
||||
|
||||
|
||||
def format_xml(val: typing.Any):
|
||||
def format_xml(val: typing.Any) -> bytes:
|
||||
return HippoLLSDXMLFormatter().format(val)
|
||||
|
||||
|
||||
class HippoLLSDNotationFormatter(llbase.llsd.LLSDNotationFormatter, HippoLLSDBaseFormatter):
|
||||
class HippoLLSDNotationFormatter(base_llsd.serde_notation.LLSDNotationFormatter, HippoLLSDBaseFormatter):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def STRING(self, v):
|
||||
# llbase's notation LLSD encoder isn't suitable for generating line-delimited
|
||||
# LLSD because the string formatter leaves \n unencoded, unlike indra's llcommon.
|
||||
# Add our own escaping rule.
|
||||
return super().STRING(v).replace(b"\n", b"\\n")
|
||||
|
||||
def format_notation(val: typing.Any):
|
||||
|
||||
def format_notation(val: typing.Any) -> bytes:
|
||||
return HippoLLSDNotationFormatter().format(val)
|
||||
|
||||
|
||||
def format_binary(val: typing.Any, with_header=True):
|
||||
val = llbase.llsd.format_binary(val)
|
||||
if not with_header:
|
||||
return val.split(b"\n", 1)[1]
|
||||
def format_binary(val: typing.Any, with_header=True) -> bytes:
|
||||
val = _format_binary_recurse(val)
|
||||
if with_header:
|
||||
return b'<?llsd/binary?>\n' + val
|
||||
return val
|
||||
|
||||
|
||||
class HippoLLSDBinaryParser(llbase.llsd.LLSDBinaryParser):
|
||||
# This is copied almost wholesale from https://bitbucket.org/lindenlab/llbase/src/master/llbase/llsd.py
|
||||
# With a few minor changes to make serialization round-trip correctly. It's evil.
|
||||
def _format_binary_recurse(something) -> bytes:
|
||||
"""Binary formatter workhorse."""
|
||||
def _format_list(list_something):
|
||||
array_builder = [b'[' + struct.pack('!i', len(list_something))]
|
||||
for item in list_something:
|
||||
array_builder.append(_format_binary_recurse(item))
|
||||
array_builder.append(b']')
|
||||
return b''.join(array_builder)
|
||||
|
||||
if something is None:
|
||||
return b'!'
|
||||
elif isinstance(something, LLSD):
|
||||
return _format_binary_recurse(something.thing)
|
||||
elif isinstance(something, bool):
|
||||
if something:
|
||||
return b'1'
|
||||
else:
|
||||
return b'0'
|
||||
elif isinstance(something, int):
|
||||
try:
|
||||
return b'i' + struct.pack('!i', something)
|
||||
except (OverflowError, struct.error) as exc:
|
||||
raise LLSDSerializationError(str(exc), something)
|
||||
elif isinstance(something, float):
|
||||
try:
|
||||
return b'r' + struct.pack('!d', something)
|
||||
except SystemError as exc:
|
||||
raise LLSDSerializationError(str(exc), something)
|
||||
elif isinstance(something, uuid.UUID):
|
||||
return b'u' + something.bytes
|
||||
elif isinstance(something, (binary, JankStringyBytes)):
|
||||
return b'b' + struct.pack('!i', len(something)) + something
|
||||
elif is_string(something):
|
||||
if is_unicode(something):
|
||||
something = something.encode("utf8")
|
||||
return b's' + struct.pack('!i', len(something)) + something
|
||||
elif isinstance(something, uri):
|
||||
return b'l' + struct.pack('!i', len(something)) + something.encode("utf8")
|
||||
elif isinstance(something, datetime.datetime):
|
||||
return b'd' + struct.pack('<d', something.timestamp())
|
||||
elif isinstance(something, datetime.date):
|
||||
seconds_since_epoch = calendar.timegm(something.timetuple())
|
||||
return b'd' + struct.pack('<d', seconds_since_epoch)
|
||||
elif isinstance(something, (list, tuple)):
|
||||
return _format_list(something)
|
||||
elif isinstance(something, dict):
|
||||
map_builder = [b'{' + struct.pack('!i', len(something))]
|
||||
for key, value in something.items():
|
||||
if isinstance(key, str):
|
||||
key = key.encode("utf8")
|
||||
map_builder.append(b'k' + struct.pack('!i', len(key)) + key)
|
||||
map_builder.append(_format_binary_recurse(value))
|
||||
map_builder.append(b'}')
|
||||
return b''.join(map_builder)
|
||||
else:
|
||||
try:
|
||||
return _format_list(list(something))
|
||||
except TypeError:
|
||||
raise LLSDSerializationError(
|
||||
"Cannot serialize unknown type: %s (%s)" %
|
||||
(type(something), something))
|
||||
|
||||
|
||||
class HippoLLSDBinaryParser(base_llsd.serde_binary.LLSDBinaryParser):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._dispatch[ord('u')] = lambda: UUID(bytes=self._getc(16))
|
||||
self._dispatch[ord('d')] = self._parse_date
|
||||
|
||||
def _parse_date(self):
|
||||
seconds = struct.unpack("<d", self._getc(8))[0]
|
||||
try:
|
||||
return datetime.datetime.fromtimestamp(seconds, tz=datetime.timezone.utc)
|
||||
except OverflowError as exc:
|
||||
# A garbage seconds value can cause utcfromtimestamp() to raise
|
||||
# OverflowError: timestamp out of range for platform time_t
|
||||
self._error(exc, -8)
|
||||
|
||||
def _parse_string(self):
|
||||
# LLSD's C++ API lets you stuff binary in a string field even though it's only
|
||||
@@ -74,22 +168,26 @@ class HippoLLSDBinaryParser(llbase.llsd.LLSDBinaryParser):
|
||||
return bytes_val
|
||||
|
||||
|
||||
# Python uses one, C++ uses the other, and everyone's unhappy.
|
||||
_BINARY_HEADERS = (b'<? LLSD/Binary ?>', b'<?llsd/binary?>')
|
||||
|
||||
|
||||
def parse_binary(data: bytes):
|
||||
if data.startswith(b'<?llsd/binary?>'):
|
||||
if any(data.startswith(x) for x in _BINARY_HEADERS):
|
||||
data = data.split(b'\n', 1)[1]
|
||||
return HippoLLSDBinaryParser().parse(data)
|
||||
|
||||
|
||||
def parse_xml(data: bytes):
|
||||
return llbase.llsd.parse_xml(data)
|
||||
return base_llsd.parse_xml(data)
|
||||
|
||||
|
||||
def parse_notation(data: bytes):
|
||||
return llbase.llsd.parse_notation(data)
|
||||
return base_llsd.parse_notation(data)
|
||||
|
||||
|
||||
def zip_llsd(val: typing.Any):
|
||||
return zlib.compress(format_binary(val, with_header=False))
|
||||
return zlib.compress(format_binary(val, with_header=False), level=zlib.Z_BEST_COMPRESSION)
|
||||
|
||||
|
||||
def unzip_llsd(data: bytes):
|
||||
@@ -101,13 +199,13 @@ def parse(data: bytes):
|
||||
# content-type is usually nonsense.
|
||||
try:
|
||||
data = data.lstrip()
|
||||
if data.startswith(b'<?llsd/binary?>'):
|
||||
if any(data.startswith(x) for x in _BINARY_HEADERS):
|
||||
return parse_binary(data)
|
||||
elif data.startswith(b'<'):
|
||||
return parse_xml(data)
|
||||
else:
|
||||
return parse_notation(data)
|
||||
except KeyError as e:
|
||||
raise llbase.llsd.LLSDParseError('LLSD could not be parsed: %s' % (e,))
|
||||
raise base_llsd.LLSDParseError('LLSD could not be parsed: %s' % (e,))
|
||||
except TypeError as e:
|
||||
raise llbase.llsd.LLSDParseError('Input stream not of type bytes. %s' % (e,))
|
||||
raise base_llsd.LLSDParseError('Input stream not of type bytes. %s' % (e,))
|
||||
|
||||
@@ -11,21 +11,75 @@ from typing import *
|
||||
import zlib
|
||||
from copy import deepcopy
|
||||
|
||||
import numpy as np
|
||||
import recordclass
|
||||
|
||||
from hippolyzer.lib.base import serialization as se
|
||||
from hippolyzer.lib.base.datatypes import Vector3, Vector2, UUID, TupleCoord
|
||||
from hippolyzer.lib.base.llsd import zip_llsd, unzip_llsd
|
||||
from hippolyzer.lib.base.serialization import ParseContext
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def llsd_to_mat4(mat: Union[np.ndarray, Sequence[float]]) -> np.ndarray:
|
||||
return np.array(mat).reshape((4, 4), order='F')
|
||||
|
||||
|
||||
def mat4_to_llsd(mat: np.ndarray) -> List[float]:
|
||||
return list(mat.flatten(order='F'))
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class MeshAsset:
|
||||
header: MeshHeaderDict = dataclasses.field(default_factory=dict)
|
||||
segments: MeshSegmentDict = dataclasses.field(default_factory=dict)
|
||||
raw_segments: Dict[str, bytes] = dataclasses.field(default_factory=dict)
|
||||
|
||||
@classmethod
|
||||
def make_triangle(cls) -> MeshAsset:
|
||||
"""Make an asset representing an un-rigged single-sided mesh triangle"""
|
||||
inst = cls()
|
||||
inst.header = {
|
||||
"version": 1,
|
||||
"high_lod": {"offset": 0, "size": 0},
|
||||
"physics_mesh": {"offset": 0, "size": 0},
|
||||
"physics_convex": {"offset": 0, "size": 0},
|
||||
}
|
||||
base_lod: LODSegmentDict = {
|
||||
'Normal': [
|
||||
Vector3(-0.0, -0.0, -1.0),
|
||||
Vector3(-0.0, -0.0, -1.0),
|
||||
Vector3(-0.0, -0.0, -1.0)
|
||||
],
|
||||
'PositionDomain': {'Max': [0.5, 0.5, 0.0], 'Min': [-0.5, -0.5, 0.0]},
|
||||
'Position': [
|
||||
Vector3(0.0, 0.0, 0.0),
|
||||
Vector3(1.0, 0.0, 0.0),
|
||||
Vector3(0.5, 1.0, 0.0)
|
||||
],
|
||||
'TexCoord0Domain': {'Max': [1.0, 1.0], 'Min': [0.0, 0.0]},
|
||||
'TexCoord0': [
|
||||
Vector2(0.0, 0.0),
|
||||
Vector2(1.0, 0.0),
|
||||
Vector2(0.5, 1.0)
|
||||
],
|
||||
'TriangleList': [[0, 1, 2]],
|
||||
}
|
||||
inst.segments['physics_mesh'] = [deepcopy(base_lod)]
|
||||
inst.segments['high_lod'] = [deepcopy(base_lod)]
|
||||
convex_segment: PhysicsConvexSegmentDict = {
|
||||
'BoundingVerts': [
|
||||
Vector3(-0.0, 1.0, -1.0),
|
||||
Vector3(-1.0, -1.0, -1.0),
|
||||
Vector3(1.0, -1.0, -1.0)
|
||||
],
|
||||
'Max': [0.5, 0.5, 0.0],
|
||||
'Min': [-0.5, -0.5, 0.0]
|
||||
}
|
||||
inst.segments['physics_convex'] = convex_segment
|
||||
return inst
|
||||
|
||||
def iter_lods(self) -> Generator[List[LODSegmentDict], None, None]:
|
||||
for lod_name, lod_val in self.segments.items():
|
||||
if lod_name.endswith("_lod"):
|
||||
@@ -39,6 +93,7 @@ class MeshAsset:
|
||||
# These TypedDicts describe the expected shape of the LLSD in the mesh
|
||||
# header and various segments. They're mainly for type hinting.
|
||||
class MeshHeaderDict(TypedDict, total=False):
|
||||
"""Header of the mesh file, includes offsets & sizes for segments' LLSD"""
|
||||
version: int
|
||||
creator: UUID
|
||||
date: dt.datetime
|
||||
@@ -54,6 +109,7 @@ class MeshHeaderDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class SegmentHeaderDict(TypedDict):
|
||||
"""Standard shape for segment references within the header"""
|
||||
offset: int
|
||||
size: int
|
||||
|
||||
@@ -73,6 +129,7 @@ class PhysicsHavokSegmentHeaderDict(PhysicsSegmentHeaderDict, total=False):
|
||||
|
||||
|
||||
class PhysicsCostDataHeaderDict(TypedDict, total=False):
|
||||
"""Cost of physical representation, populated by server"""
|
||||
decomposition: float
|
||||
decomposition_discounted_vertices: int
|
||||
decomposition_hulls: int
|
||||
@@ -85,6 +142,7 @@ class PhysicsCostDataHeaderDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class MeshSegmentDict(TypedDict, total=False):
|
||||
"""Dict of segments unpacked using the MeshHeaderDict"""
|
||||
high_lod: List[LODSegmentDict]
|
||||
medium_lod: List[LODSegmentDict]
|
||||
low_lod: List[LODSegmentDict]
|
||||
@@ -96,6 +154,7 @@ class MeshSegmentDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class LODSegmentDict(TypedDict, total=False):
|
||||
"""Represents a single entry within the material list of a LOD segment"""
|
||||
# Only present if True and no geometry
|
||||
NoGeometry: bool
|
||||
# -1.0 - 1.0
|
||||
@@ -113,45 +172,59 @@ class LODSegmentDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class DomainDict(TypedDict):
|
||||
"""Description of the real range for quantized coordinates"""
|
||||
# number of elems depends on what the domain is for, Vec2 or Vec3
|
||||
Max: List[float]
|
||||
Min: List[float]
|
||||
|
||||
|
||||
class VertexWeight(recordclass.datatuple): # type: ignore
|
||||
class VertexWeight(recordclass.RecordClass):
|
||||
"""Vertex weight for a specific joint on a specific vertex"""
|
||||
# index of the joint within the joint_names list in the skin segment
|
||||
joint_idx: int
|
||||
# 0.0 - 1.0
|
||||
weight: float
|
||||
|
||||
|
||||
class SkinSegmentDict(TypedDict, total=False):
|
||||
"""Rigging information"""
|
||||
joint_names: List[str]
|
||||
# model -> world transform matrix for model
|
||||
# model -> world transform mat4 for model
|
||||
bind_shape_matrix: List[float]
|
||||
# world -> joint local transform matrices
|
||||
# world -> joint local transform mat4s
|
||||
inverse_bind_matrix: List[List[float]]
|
||||
# offset matrices for joints, translation-only.
|
||||
# Not sure what these are relative to, base joint or model <0,0,0>.
|
||||
# Transform mat4s for the joint nodes themselves.
|
||||
# The matrices may have scale or other components, but only the
|
||||
# translation component will be used by the viewer.
|
||||
# All translations are relative to the joint's parent.
|
||||
alt_inverse_bind_matrix: List[List[float]]
|
||||
lock_scale_if_joint_position: bool
|
||||
pelvis_offset: float
|
||||
|
||||
|
||||
class PhysicsConvexSegmentDict(DomainDict, total=False):
|
||||
"""
|
||||
Data for convex hull collisions, populated by the client
|
||||
|
||||
Min / Max pos domain vals are inline, unlike for LODs, so this inherits from DomainDict
|
||||
"""
|
||||
# Indices into the Positions list
|
||||
HullList: List[int]
|
||||
# -1.0 - 1.0
|
||||
# -1.0 - 1.0, dequantized from binary field of U16s
|
||||
Positions: List[Vector3]
|
||||
# -1.0 - 1.0
|
||||
# -1.0 - 1.0, dequantized from binary field of U16s
|
||||
BoundingVerts: List[Vector3]
|
||||
|
||||
|
||||
class PhysicsHavokSegmentDict(TypedDict, total=False):
|
||||
HullMassProps: MassPropsDict
|
||||
MOPP: MOPPDict
|
||||
MeshDecompMassProps: MassPropsDict
|
||||
"""Cached data for Havok collisions, populated by sim and not used by client."""
|
||||
HullMassProps: HavokMassPropsDict
|
||||
MOPP: HavokMOPPDict
|
||||
MeshDecompMassProps: HavokMassPropsDict
|
||||
WeldingData: bytes
|
||||
|
||||
|
||||
class MassPropsDict(TypedDict, total=False):
|
||||
class HavokMassPropsDict(TypedDict, total=False):
|
||||
# Vec, center of mass
|
||||
CoM: List[float]
|
||||
# 9 floats, Mat3?
|
||||
@@ -160,7 +233,7 @@ class MassPropsDict(TypedDict, total=False):
|
||||
volume: float
|
||||
|
||||
|
||||
class MOPPDict(TypedDict, total=False):
|
||||
class HavokMOPPDict(TypedDict, total=False):
|
||||
"""Memory Optimized Partial Polytope"""
|
||||
BuildType: int
|
||||
MoppData: bytes
|
||||
@@ -169,8 +242,11 @@ class MOPPDict(TypedDict, total=False):
|
||||
|
||||
|
||||
def positions_from_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
# Used for turning positions into their actual positions within the mesh / domain
|
||||
# for ex: positions_from_domain(lod["Position"], lod["PositionDomain])
|
||||
"""
|
||||
Used for turning positions into their actual positions within the mesh / domain
|
||||
|
||||
for ex: positions_from_domain(lod["Position"], lod["PositionDomain])
|
||||
"""
|
||||
lower = domain['Min']
|
||||
upper = domain['Max']
|
||||
return [
|
||||
@@ -179,7 +255,7 @@ def positions_from_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
|
||||
|
||||
def positions_to_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
# Used for turning positions into their actual positions within the mesh / domain
|
||||
"""Used for turning positions into their actual positions within the mesh / domain"""
|
||||
lower = domain['Min']
|
||||
upper = domain['Max']
|
||||
return [
|
||||
@@ -187,7 +263,47 @@ def positions_to_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
]
|
||||
|
||||
|
||||
class VertexWeights(se.SerializableBase):
|
||||
"""Serializer for a list of joint weights on a single vertex"""
|
||||
INFLUENCE_LIMIT = 4
|
||||
INFLUENCE_TERM = 0xFF
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, vals, writer: se.BufferWriter, ctx=None):
|
||||
if len(vals) > cls.INFLUENCE_LIMIT:
|
||||
raise ValueError(f"{vals!r} is too long, can only have {cls.INFLUENCE_LIMIT} influences!")
|
||||
for val in vals:
|
||||
joint_idx, influence = val
|
||||
writer.write(se.U8, joint_idx)
|
||||
writer.write(se.U16, round(influence * 0xFFff), ctx=ctx)
|
||||
if len(vals) != cls.INFLUENCE_LIMIT:
|
||||
writer.write(se.U8, cls.INFLUENCE_TERM)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, reader: se.Reader, ctx=None):
|
||||
# NOTE: normally you'd want to do something like arrange this into a nicely
|
||||
# aligned byte array with zero padding so that you could vectorize the decoding.
|
||||
# In cases where having a vertex with no weights is semantically equivalent to
|
||||
# having a vertex _with_ weights of a value of 0.0 that's fine. This isn't the case
|
||||
# in LL's implementation of mesh:
|
||||
#
|
||||
# https://bitbucket.org/lindenlab/viewer/src/d31a83fb946c49a38376ea3b312b5380d0c8c065/indra/llmath/llvolume.cpp#lines-2560:2628
|
||||
#
|
||||
# Consider the difference between handling of b"\x00\x00\x00\xFF" and b"\xFF" with the above logic.
|
||||
# To simplify round-tripping while preserving those semantics, we don't do a vectorized decode.
|
||||
# I had a vectorized numpy version, but those requirements made everything a bit of a mess.
|
||||
influence_list = []
|
||||
for _ in range(cls.INFLUENCE_LIMIT):
|
||||
joint_idx = reader.read_bytes(1)[0]
|
||||
if joint_idx == cls.INFLUENCE_TERM:
|
||||
break
|
||||
weight = reader.read(se.U16, ctx=ctx) / 0xFFff
|
||||
influence_list.append(VertexWeight(joint_idx, weight))
|
||||
return influence_list
|
||||
|
||||
|
||||
class SegmentSerializer:
|
||||
"""Serializer for binary fields within an LLSD object"""
|
||||
def __init__(self, templates):
|
||||
self._templates: Dict[str, se.SerializableBase] = templates
|
||||
|
||||
@@ -217,43 +333,46 @@ class SegmentSerializer:
|
||||
return new_segment
|
||||
|
||||
|
||||
class VertexWeights(se.SerializableBase):
|
||||
INFLUENCE_SER = se.QuantizedFloat(se.U16, 0.0, 1.0)
|
||||
INFLUENCE_LIMIT = 4
|
||||
INFLUENCE_TERM = 0xFF
|
||||
class VecListAdapter(se.Adapter):
|
||||
def __init__(self, child_spec: se.SERIALIZABLE_TYPE, vec_type: Type):
|
||||
super().__init__(child_spec)
|
||||
self.vec_type = vec_type
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, vals, writer: se.BufferWriter, ctx=None):
|
||||
if len(vals) > cls.INFLUENCE_LIMIT:
|
||||
raise ValueError(f"{vals!r} is too long, can only have {cls.INFLUENCE_LIMIT} influences!")
|
||||
for val in vals:
|
||||
joint_idx, influence = val
|
||||
writer.write(se.U8, joint_idx)
|
||||
writer.write(cls.INFLUENCE_SER, influence, ctx=ctx)
|
||||
if len(vals) != cls.INFLUENCE_LIMIT:
|
||||
writer.write(se.U8, cls.INFLUENCE_TERM)
|
||||
def encode(self, val: Any, ctx: Optional[ParseContext]) -> Any:
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, reader: se.Reader, ctx=None):
|
||||
influence_list = []
|
||||
for _ in range(cls.INFLUENCE_LIMIT):
|
||||
joint_idx = reader.read(se.U8)
|
||||
if joint_idx == cls.INFLUENCE_TERM:
|
||||
break
|
||||
influence_list.append(VertexWeight(joint_idx, reader.read(cls.INFLUENCE_SER, ctx=ctx)))
|
||||
return influence_list
|
||||
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
|
||||
new_vals = []
|
||||
for elem in val:
|
||||
new_vals.append(self.vec_type(*elem))
|
||||
return new_vals
|
||||
|
||||
|
||||
LE_U16: np.dtype = np.dtype(np.uint16).newbyteorder('<') # noqa
|
||||
|
||||
|
||||
LOD_SEGMENT_SERIALIZER = SegmentSerializer({
|
||||
# 16-bit indices to the verts making up the tri. Imposes a 16-bit
|
||||
# upper limit on verts in any given material in the mesh.
|
||||
"TriangleList": se.Collection(None, se.Collection(3, se.U16)),
|
||||
"TriangleList": se.ExprAdapter(
|
||||
se.NumPyArray(se.BytesGreedy(), LE_U16, 3),
|
||||
decode_func=lambda x: x.tolist(),
|
||||
),
|
||||
# These are used to interpolate between values in their respective domains
|
||||
# Each position represents a single vert.
|
||||
"Position": se.Collection(None, se.Vector3U16(0.0, 1.0)),
|
||||
"TexCoord0": se.Collection(None, se.Vector2U16(0.0, 1.0)),
|
||||
# Normals have a static domain between -1 and 1
|
||||
"Normal": se.Collection(None, se.Vector3U16(0.0, 1.0)),
|
||||
"Position": VecListAdapter(
|
||||
se.QuantizedNumPyArray(se.NumPyArray(se.BytesGreedy(), LE_U16, 3), 0.0, 1.0),
|
||||
Vector3,
|
||||
),
|
||||
"TexCoord0": VecListAdapter(
|
||||
se.QuantizedNumPyArray(se.NumPyArray(se.BytesGreedy(), LE_U16, 2), 0.0, 1.0),
|
||||
Vector2,
|
||||
),
|
||||
# Normals have a static domain between -1 and 1, so we just use that rather than 0.0 - 1.0.
|
||||
"Normal": VecListAdapter(
|
||||
se.QuantizedNumPyArray(se.NumPyArray(se.BytesGreedy(), LE_U16, 3), -1.0, 1.0),
|
||||
Vector3,
|
||||
),
|
||||
"Weights": se.Collection(None, VertexWeights)
|
||||
})
|
||||
|
||||
@@ -265,6 +384,7 @@ class LLMeshSerializer(se.SerializableBase):
|
||||
KNOWN_SEGMENTS = ("lowest_lod", "low_lod", "medium_lod", "high_lod",
|
||||
"physics_mesh", "physics_convex", "skin", "physics_havok")
|
||||
|
||||
# Define unpackers for specific binary fields within the parsed LLSD segments
|
||||
SEGMENT_TEMPLATES: Dict[str, SegmentSerializer] = {
|
||||
"lowest_lod": LOD_SEGMENT_SERIALIZER,
|
||||
"low_lod": LOD_SEGMENT_SERIALIZER,
|
||||
|
||||
182
hippolyzer/lib/base/mesh_skeleton.py
Normal file
182
hippolyzer/lib/base/mesh_skeleton.py
Normal file
@@ -0,0 +1,182 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
import dataclasses
|
||||
import re
|
||||
import weakref
|
||||
from typing import *
|
||||
|
||||
import transformations
|
||||
from lxml import etree
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3, RAD_TO_DEG
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
from hippolyzer.lib.base.mesh import MeshAsset, SkinSegmentDict, llsd_to_mat4
|
||||
|
||||
MAYBE_JOINT_REF = Optional[str]
|
||||
SKELETON_REF = Optional[Callable[[], "Skeleton"]]
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class JointNode:
|
||||
name: str
|
||||
parent_name: MAYBE_JOINT_REF
|
||||
skeleton: SKELETON_REF
|
||||
translation: Vector3
|
||||
pivot: Vector3 # pivot point for the joint, generally the same as translation
|
||||
rotation: Vector3 # Euler rotation in degrees
|
||||
scale: Vector3
|
||||
type: str # bone or collision_volume
|
||||
support: str
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.name, self.type))
|
||||
|
||||
@property
|
||||
def matrix(self):
|
||||
return transformations.compose_matrix(
|
||||
scale=tuple(self.scale),
|
||||
angles=tuple(self.rotation / RAD_TO_DEG),
|
||||
translate=tuple(self.translation),
|
||||
)
|
||||
|
||||
@property
|
||||
def parent(self) -> Optional[JointNode]:
|
||||
if self.parent_name:
|
||||
return self.skeleton()[self.parent_name]
|
||||
return None
|
||||
|
||||
@property
|
||||
def index(self) -> int:
|
||||
bone_idx = 0
|
||||
for node in self.skeleton().joint_dict.values():
|
||||
if node.type != "bone":
|
||||
continue
|
||||
if self is node:
|
||||
return bone_idx
|
||||
bone_idx += 1
|
||||
raise KeyError(f"{self.name!r} doesn't exist in skeleton")
|
||||
|
||||
@property
|
||||
def ancestors(self) -> Sequence[JointNode]:
|
||||
joint_node = self
|
||||
skeleton = self.skeleton()
|
||||
ancestors: List[JointNode] = []
|
||||
while joint_node.parent_name:
|
||||
joint_node = skeleton.joint_dict.get(joint_node.parent_name)
|
||||
ancestors.append(joint_node)
|
||||
return ancestors
|
||||
|
||||
@property
|
||||
def children(self) -> Sequence[JointNode]:
|
||||
children: List[JointNode] = []
|
||||
for node in self.skeleton().joint_dict.values():
|
||||
if node.parent_name and node.parent_name == self.name:
|
||||
children.append(node)
|
||||
return children
|
||||
|
||||
@property
|
||||
def inverse(self) -> Optional[JointNode]:
|
||||
l_re = re.compile(r"(.*?(?:_|\b))L((?:_|\b).*)")
|
||||
r_re = re.compile(r"(.*?(?:_|\b))R((?:_|\b).*)")
|
||||
|
||||
inverse_name = None
|
||||
if "Left" in self.name:
|
||||
inverse_name = self.name.replace("Left", "Right")
|
||||
elif "LEFT" in self.name:
|
||||
inverse_name = self.name.replace("LEFT", "RIGHT")
|
||||
elif l_re.match(self.name):
|
||||
inverse_name = re.sub(l_re, r"\1R\2", self.name)
|
||||
elif "Right" in self.name:
|
||||
inverse_name = self.name.replace("Right", "Left")
|
||||
elif "RIGHT" in self.name:
|
||||
inverse_name = self.name.replace("RIGHT", "LEFT")
|
||||
elif r_re.match(self.name):
|
||||
inverse_name = re.sub(r_re, r"\1L\2", self.name)
|
||||
|
||||
if inverse_name:
|
||||
return self.skeleton().joint_dict.get(inverse_name)
|
||||
return None
|
||||
|
||||
@property
|
||||
def descendents(self) -> Set[JointNode]:
|
||||
descendents: Set[JointNode] = set()
|
||||
ancestors: Set[str] = {self.name}
|
||||
last_ancestors: Set[str] = set()
|
||||
while last_ancestors != ancestors:
|
||||
last_ancestors = ancestors.copy()
|
||||
for node in self.skeleton().joint_dict.values():
|
||||
if node.parent_name and node.parent_name in ancestors:
|
||||
ancestors.add(node.name)
|
||||
descendents.add(node)
|
||||
return descendents
|
||||
|
||||
|
||||
class Skeleton:
|
||||
def __init__(self, root_node: Optional[etree.ElementBase] = None):
|
||||
self.joint_dict: Dict[str, JointNode] = {}
|
||||
if root_node is not None:
|
||||
self._parse_node_children(root_node, None)
|
||||
|
||||
def __getitem__(self, item: str) -> JointNode:
|
||||
return self.joint_dict[item]
|
||||
|
||||
def clone(self) -> Self:
|
||||
val = copy.deepcopy(self)
|
||||
skel_ref = weakref.ref(val)
|
||||
for joint in val.joint_dict.values():
|
||||
joint.skeleton = skel_ref
|
||||
return val
|
||||
|
||||
def _parse_node_children(self, node: etree.ElementBase, parent_name: MAYBE_JOINT_REF):
|
||||
name = node.get('name')
|
||||
joint = JointNode(
|
||||
name=name,
|
||||
parent_name=parent_name,
|
||||
skeleton=weakref.ref(self),
|
||||
translation=_get_vec_attr(node, "pos", Vector3()),
|
||||
pivot=_get_vec_attr(node, "pivot", Vector3()),
|
||||
rotation=_get_vec_attr(node, "rot", Vector3()),
|
||||
scale=_get_vec_attr(node, "scale", Vector3(1, 1, 1)),
|
||||
support=node.get('support', 'base'),
|
||||
type=node.tag,
|
||||
)
|
||||
self.joint_dict[name] = joint
|
||||
for child in node.iterchildren():
|
||||
self._parse_node_children(child, joint.name)
|
||||
|
||||
def merge_mesh_skeleton(self, mesh: MeshAsset) -> None:
|
||||
"""Update this skeleton with a skeleton definition from a mesh asset"""
|
||||
skin_seg: Optional[SkinSegmentDict] = mesh.segments.get('skin')
|
||||
if not skin_seg:
|
||||
return
|
||||
|
||||
for joint_name, matrix in zip(skin_seg['joint_names'], skin_seg.get('alt_inverse_bind_matrix', [])):
|
||||
# We're only meant to use the translation component from the alt inverse bind matrix.
|
||||
joint_decomp = transformations.decompose_matrix(llsd_to_mat4(matrix))
|
||||
joint_node = self.joint_dict.get(joint_name)
|
||||
if not joint_node:
|
||||
continue
|
||||
joint_node.translation = Vector3(*joint_decomp[3])
|
||||
|
||||
if pelvis_offset := skin_seg.get('pelvis_offset'):
|
||||
# TODO: Should we even do this?
|
||||
pelvis_node = self["mPelvis"]
|
||||
pelvis_node.translation += Vector3(0, 0, pelvis_offset)
|
||||
|
||||
|
||||
def _get_vec_attr(node, attr_name: str, default: Vector3) -> Vector3:
|
||||
attr_val = node.get(attr_name, None)
|
||||
if not attr_val:
|
||||
return default
|
||||
return Vector3(*(float(x) for x in attr_val.split(" ") if x))
|
||||
|
||||
|
||||
def load_avatar_skeleton() -> Skeleton:
|
||||
skel_path = get_resource_filename("lib/base/data/avatar_skeleton.xml")
|
||||
with open(skel_path, 'r') as f:
|
||||
skel_root = etree.fromstring(f.read())
|
||||
return Skeleton(skel_root.getchildren()[0])
|
||||
|
||||
|
||||
AVATAR_SKELETON = load_avatar_skeleton()
|
||||
@@ -19,5 +19,3 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
|
||||
|
||||
158
hippolyzer/lib/base/message/circuit.py
Normal file
158
hippolyzer/lib/base/message/circuit.py
Normal file
@@ -0,0 +1,158 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import asyncio
|
||||
import copy
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import logging
|
||||
from collections import deque
|
||||
from typing import *
|
||||
from typing import Optional
|
||||
|
||||
from .message_handler import MessageHandler
|
||||
from ..network.transport import AbstractUDPTransport, UDPPacket, Direction, ADDR_TUPLE
|
||||
from .message import Block, Message
|
||||
from .msgtypes import PacketFlags
|
||||
from .udpserializer import UDPMessageSerializer
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ReliableResendInfo:
|
||||
last_resent: dt.datetime
|
||||
message: Message
|
||||
completed: asyncio.Future = dataclasses.field(default_factory=asyncio.Future)
|
||||
tries_left: int = 10
|
||||
|
||||
|
||||
class Circuit:
|
||||
def __init__(
|
||||
self,
|
||||
near_host: Optional[ADDR_TUPLE],
|
||||
far_host: ADDR_TUPLE,
|
||||
transport: Optional[AbstractUDPTransport] = None,
|
||||
):
|
||||
self.near_host: Optional[ADDR_TUPLE] = near_host
|
||||
self.host: ADDR_TUPLE = far_host
|
||||
self.is_alive = True
|
||||
self.transport = transport
|
||||
self.serializer = UDPMessageSerializer()
|
||||
self.last_packet_at = dt.datetime.now()
|
||||
self.packet_id_base = 0
|
||||
self.unacked_reliable: Dict[Tuple[Direction, int], ReliableResendInfo] = {}
|
||||
self.resend_every: float = 3.0
|
||||
# Reliable messages that we've already seen and handled, for resend suppression
|
||||
self.seen_reliable: deque[int] = deque(maxlen=1_000)
|
||||
|
||||
def _send_prepared_message(self, message: Message, transport=None):
|
||||
try:
|
||||
serialized = self.serializer.serialize(message)
|
||||
except:
|
||||
logging.exception(f"Failed to serialize: {message.to_dict()!r}")
|
||||
raise
|
||||
return self.send_datagram(serialized, message.direction, transport=transport)
|
||||
|
||||
def disconnect(self):
|
||||
self.packet_id_base = 0
|
||||
self.unacked_reliable.clear()
|
||||
self.is_alive = False
|
||||
|
||||
def send_datagram(self, data: bytes, direction: Direction, transport=None):
|
||||
self.last_packet_at = dt.datetime.now()
|
||||
src_addr, dst_addr = self.host, self.near_host
|
||||
if direction == Direction.OUT:
|
||||
src_addr, dst_addr = self.near_host, self.host
|
||||
|
||||
packet = UDPPacket(src_addr, dst_addr, data, direction)
|
||||
(transport or self.transport).send_packet(packet)
|
||||
return packet
|
||||
|
||||
def prepare_message(self, message: Message):
|
||||
if message.finalized:
|
||||
raise RuntimeError(f"Trying to re-send finalized {message!r}")
|
||||
message.packet_id = self.packet_id_base
|
||||
self.packet_id_base += 1
|
||||
if message.acks:
|
||||
message.send_flags |= PacketFlags.ACK
|
||||
else:
|
||||
message.send_flags &= ~PacketFlags.ACK
|
||||
# If it was queued, it's not anymore
|
||||
message.queued = False
|
||||
message.finalized = True
|
||||
return True
|
||||
|
||||
def send(self, message: Message, transport=None) -> UDPPacket:
|
||||
if self.prepare_message(message):
|
||||
# If the message originates from us then we're responsible for resends.
|
||||
if message.reliable and message.synthetic and not transport:
|
||||
self.unacked_reliable[(message.direction, message.packet_id)] = ReliableResendInfo(
|
||||
last_resent=dt.datetime.now(),
|
||||
message=message,
|
||||
)
|
||||
return self._send_prepared_message(message, transport)
|
||||
|
||||
def send_reliable(self, message: Message, transport=None) -> asyncio.Future:
|
||||
"""send() wrapper that always sends reliably and allows `await`ing ACK receipt"""
|
||||
if not message.synthetic:
|
||||
raise ValueError("Not able to send non-synthetic message reliably!")
|
||||
message.send_flags |= PacketFlags.RELIABLE
|
||||
self.send(message, transport)
|
||||
return self.unacked_reliable[(message.direction, message.packet_id)].completed
|
||||
|
||||
def collect_acks(self, message: Message):
|
||||
effective_acks = list(message.acks)
|
||||
if message.name == "PacketAck":
|
||||
effective_acks.extend(x["ID"] for x in message["Packets"])
|
||||
for ack in effective_acks:
|
||||
resend_info = self.unacked_reliable.pop((~message.direction, ack), None)
|
||||
if resend_info:
|
||||
resend_info.completed.set_result(None)
|
||||
|
||||
def resend_unacked(self):
|
||||
for resend_info in list(self.unacked_reliable.values()):
|
||||
# Not time to attempt a resend yet
|
||||
if dt.datetime.now() - resend_info.last_resent < dt.timedelta(seconds=self.resend_every):
|
||||
continue
|
||||
|
||||
msg = copy.copy(resend_info.message)
|
||||
resend_info.tries_left -= 1
|
||||
# We were on our last try and we never received an ack
|
||||
if not resend_info.tries_left:
|
||||
logging.warning(f"Giving up on unacked {msg.packet_id}")
|
||||
del self.unacked_reliable[(msg.direction, msg.packet_id)]
|
||||
resend_info.completed.set_exception(TimeoutError("Exceeded resend limit"))
|
||||
continue
|
||||
resend_info.last_resent = dt.datetime.now()
|
||||
msg.send_flags |= PacketFlags.RESENT
|
||||
self._send_prepared_message(msg)
|
||||
|
||||
def send_acks(self, to_ack: Sequence[int], direction=Direction.OUT, packet_id=None):
|
||||
logging.debug("%r acking %r" % (direction, to_ack))
|
||||
# TODO: maybe tack this onto `.acks` for next message?
|
||||
message = Message('PacketAck', *[Block('Packets', ID=x) for x in to_ack])
|
||||
message.packet_id = packet_id
|
||||
message.direction = direction
|
||||
self.send(message)
|
||||
|
||||
def track_reliable(self, packet_id: int) -> bool:
|
||||
"""Tracks a reliable packet, returning if it's a new message"""
|
||||
if packet_id in self.seen_reliable:
|
||||
return False
|
||||
self.seen_reliable.append(packet_id)
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %r : %r>" % (self.__class__.__name__, self.near_host, self.host)
|
||||
|
||||
|
||||
class ConnectionHolder(abc.ABC):
|
||||
"""
|
||||
Any object that has both a circuit and a message handler
|
||||
|
||||
Preferred to explicitly passing around a circuit, message handler pair
|
||||
because generally a ConnectionHolder represents a region or a client.
|
||||
The same region or client may have multiple different circuits across the
|
||||
lifetime of a session (due to region restarts, etc.)
|
||||
"""
|
||||
circuit: Optional[Circuit]
|
||||
message_handler: MessageHandler[Message, str]
|
||||
@@ -20,8 +20,8 @@ along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
import os
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
|
||||
msg_tmpl = open(os.path.join(os.path.dirname(__file__), 'message_template.msg'))
|
||||
with open(os.path.join(os.path.dirname(__file__), 'message.xml'), "rb") as _f:
|
||||
msg_tmpl = open(get_resource_filename("lib/base/message/data/message_template.msg"))
|
||||
with open(get_resource_filename("lib/base/message/data/message.xml"), "rb") as _f:
|
||||
msg_details = _f.read()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -29,7 +29,10 @@ from hippolyzer.lib.base.message.msgtypes import MsgType
|
||||
|
||||
PACKER = Callable[[Any], bytes]
|
||||
UNPACKER = Callable[[bytes], Any]
|
||||
LLSD_PACKER = Callable[[Any], Any]
|
||||
LLSD_UNPACKER = Callable[[Any], Any]
|
||||
SPEC = Tuple[UNPACKER, PACKER]
|
||||
LLSD_SPEC = Tuple[LLSD_UNPACKER, LLSD_PACKER]
|
||||
|
||||
|
||||
def _pack_string(pack_string):
|
||||
@@ -64,6 +67,21 @@ def _make_tuplecoord_spec(typ: Type[TupleCoord], struct_fmt: str,
|
||||
return lambda x: typ(*struct_obj.unpack(x)), _packer
|
||||
|
||||
|
||||
def _make_llsd_tuplecoord_spec(typ: Type[TupleCoord], needed_elems: Optional[int] = None):
|
||||
if needed_elems is None:
|
||||
# Number of elems needed matches the number in the coord type
|
||||
def _packer(x):
|
||||
return list(x)
|
||||
else:
|
||||
# Special case, we only want to pack some of the components.
|
||||
# Mostly for Quaternion since we don't actually need to send W.
|
||||
def _packer(x):
|
||||
if isinstance(x, TupleCoord):
|
||||
x = x.data()
|
||||
return list(x.data(needed_elems))
|
||||
return lambda x: typ(*x), _packer
|
||||
|
||||
|
||||
def _unpack_specs(cls):
|
||||
cls.UNPACKERS = {k: v[0] for (k, v) in cls.SPECS.items()}
|
||||
cls.PACKERS = {k: v[1] for (k, v) in cls.SPECS.items()}
|
||||
@@ -78,7 +96,7 @@ class TemplateDataPacker:
|
||||
MsgType.MVT_S8: _make_struct_spec('b'),
|
||||
MsgType.MVT_U8: _make_struct_spec('B'),
|
||||
MsgType.MVT_BOOL: _make_struct_spec('B'),
|
||||
MsgType.MVT_LLUUID: (lambda x: UUID(bytes=bytes(x)), lambda x: x.bytes),
|
||||
MsgType.MVT_LLUUID: (lambda x: UUID(bytes=bytes(x)), lambda x: UUID(x).bytes),
|
||||
MsgType.MVT_IP_ADDR: (socket.inet_ntoa, socket.inet_aton),
|
||||
MsgType.MVT_IP_PORT: _make_struct_spec('!H'),
|
||||
MsgType.MVT_U16: _make_struct_spec('<H'),
|
||||
@@ -110,10 +128,15 @@ class TemplateDataPacker:
|
||||
class LLSDDataPacker(TemplateDataPacker):
|
||||
# Some template var types aren't directly representable in LLSD, so they
|
||||
# get encoded to binary fields.
|
||||
SPECS = {
|
||||
SPECS: Dict[MsgType, LLSD_SPEC] = {
|
||||
MsgType.MVT_IP_ADDR: (socket.inet_ntoa, socket.inet_aton),
|
||||
# LLSD ints are technically bound to S32 range.
|
||||
MsgType.MVT_U32: _make_struct_spec('!I'),
|
||||
MsgType.MVT_U64: _make_struct_spec('!Q'),
|
||||
MsgType.MVT_S64: _make_struct_spec('!q'),
|
||||
# These are arrays in LLSD, we need to turn them into coords.
|
||||
MsgType.MVT_LLVector3: _make_llsd_tuplecoord_spec(Vector3),
|
||||
MsgType.MVT_LLVector3d: _make_llsd_tuplecoord_spec(Vector3),
|
||||
MsgType.MVT_LLVector4: _make_llsd_tuplecoord_spec(Vector4),
|
||||
MsgType.MVT_LLQuaternion: _make_llsd_tuplecoord_spec(Quaternion, needed_elems=3)
|
||||
}
|
||||
|
||||
@@ -5,14 +5,13 @@ from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.message.data_packer import LLSDDataPacker
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.template import MessageTemplateVariable
|
||||
from hippolyzer.lib.base.message.template_dict import TemplateDictionary
|
||||
|
||||
from hippolyzer.lib.base.message.template_dict import TemplateDictionary, DEFAULT_TEMPLATE_DICT
|
||||
|
||||
VAR_PAIR = Tuple[dict, MessageTemplateVariable]
|
||||
|
||||
|
||||
class LLSDMessageSerializer:
|
||||
DEFAULT_TEMPLATE = TemplateDictionary()
|
||||
DEFAULT_TEMPLATE = DEFAULT_TEMPLATE_DICT
|
||||
|
||||
def __init__(self, message_template=None, message_cls: Type[Message] = Message):
|
||||
if message_template is not None:
|
||||
|
||||
@@ -18,40 +18,65 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
import enum
|
||||
import importlib
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
from typing import *
|
||||
|
||||
from .. import serialization as se
|
||||
from ..datatypes import *
|
||||
from .msgtypes import PacketFlags
|
||||
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
import hippolyzer.lib.base.templates as templates
|
||||
from hippolyzer.lib.base.datatypes import Pretty
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketFlags
|
||||
from hippolyzer.lib.base.network.transport import Direction, ADDR_TUPLE
|
||||
|
||||
BLOCK_DICT = Dict[str, "MsgBlockList"]
|
||||
VAR_TYPE = Union[TupleCoord, bytes, str, float, int, Tuple, UUID]
|
||||
|
||||
_TEMPLATES_MTIME = os.stat(templates.__file__).st_mtime
|
||||
|
||||
|
||||
def maybe_reload_templates():
|
||||
# Templates may be modified at runtime during development, check
|
||||
# if they've changed since startup and reload if they have.
|
||||
global _TEMPLATES_MTIME
|
||||
templates_mtime = os.stat(templates.__file__).st_mtime
|
||||
|
||||
if _TEMPLATES_MTIME is None or _TEMPLATES_MTIME < templates_mtime:
|
||||
print("Reloading templates")
|
||||
try:
|
||||
importlib.reload(templates) # type: ignore
|
||||
_TEMPLATES_MTIME = templates_mtime
|
||||
except:
|
||||
logging.exception("Failed to reload templates!")
|
||||
|
||||
|
||||
class Block:
|
||||
"""
|
||||
"""
|
||||
base representation of a block
|
||||
Block expects a name, and kwargs for variables (var_name = value)
|
||||
"""
|
||||
__slots__ = ('name', 'size', 'vars', 'message_name', '_ser_cache', 'fill_missing',)
|
||||
PARENT_MESSAGE_NAME: ClassVar[Optional[str]] = None
|
||||
|
||||
def __init__(self, name, /, fill_missing=False, **kwargs):
|
||||
def __init__(self, name, /, *, fill_missing=False, **kwargs):
|
||||
self.name = name
|
||||
self.size = 0
|
||||
self.message_name: Optional[str] = None
|
||||
self.message_name: Optional[str] = self.PARENT_MESSAGE_NAME
|
||||
self.vars: Dict[str, VAR_TYPE] = {}
|
||||
self._ser_cache: Dict[str, Any] = {}
|
||||
self.fill_missing = fill_missing
|
||||
for var_name, val in kwargs.items():
|
||||
self[var_name] = val
|
||||
|
||||
def get_variable(self, var_name):
|
||||
return self.vars.get(var_name)
|
||||
def get(self, var_name, default: Optional[VAR_TYPE] = None) -> Optional[VAR_TYPE]:
|
||||
return self.vars.get(var_name, default)
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.vars
|
||||
@@ -60,6 +85,9 @@ class Block:
|
||||
return self.vars[name]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if isinstance(value, Pretty):
|
||||
return self.serialize_var(key, value.value)
|
||||
|
||||
# These don't pickle well since they're likely to get hot-reloaded
|
||||
if isinstance(value, (enum.IntEnum, enum.IntFlag)):
|
||||
value = int(value)
|
||||
@@ -129,24 +157,7 @@ class Block:
|
||||
continue
|
||||
# We have a serializer, include the pretty output in the repr,
|
||||
# using the _ suffix so the builder knows it needs to be serialized.
|
||||
deserialized = self.deserialize_var(key)
|
||||
type_name = type(deserialized).__name__
|
||||
# TODO: replace __repr__ for these in a context manager so nested
|
||||
# Enums / Flags get handled correctly as well. The point of the
|
||||
# pretty repr() is to make messages directly paste-able into code.
|
||||
if isinstance(deserialized, enum.IntEnum):
|
||||
deserialized = f"{type_name}.{deserialized.name}"
|
||||
elif isinstance(deserialized, enum.IntFlag):
|
||||
# Make an ORed together version of the flags based on the POD version
|
||||
flags = se.flags_to_pod(type(deserialized), deserialized)
|
||||
flags = " | ".join(
|
||||
(f"{type_name}.{v}" if isinstance(v, str) else str(v))
|
||||
for v in flags
|
||||
)
|
||||
deserialized = f"({flags})"
|
||||
else:
|
||||
deserialized = repr(deserialized)
|
||||
block_vars[f"{key}_"] = deserialized
|
||||
block_vars[f"{key}_"] = repr(self.deserialize_var(key))
|
||||
else:
|
||||
block_vars = self.vars
|
||||
|
||||
@@ -175,18 +186,24 @@ class MsgBlockList(List["Block"]):
|
||||
|
||||
|
||||
class Message:
|
||||
__slots__ = ("name", "send_flags", "_packet_id", "acks", "body_boundaries", "queued",
|
||||
"offset", "raw_extra", "raw_body", "deserializer", "_blocks", "finalized")
|
||||
__slots__ = ("name", "send_flags", "packet_id", "acks", "body_boundaries", "queued",
|
||||
"offset", "raw_extra", "raw_body", "deserializer", "_blocks", "finalized",
|
||||
"direction", "meta", "synthetic", "dropped", "sender", "unknown_message")
|
||||
|
||||
def __init__(self, name, *args, packet_id=None, flags=0, acks=None, direction=None):
|
||||
# TODO: Do this on a timer or something.
|
||||
maybe_reload_templates()
|
||||
|
||||
def __init__(self, name, *args, packet_id=None, flags=0, acks=None):
|
||||
self.name = name
|
||||
self.send_flags = flags
|
||||
self._packet_id: Optional[int] = packet_id # aka, sequence number
|
||||
self.packet_id: Optional[int] = packet_id # aka, sequence number
|
||||
|
||||
self.acks = acks if acks is not None else tuple()
|
||||
self.body_boundaries = (-1, -1)
|
||||
self.unknown_message = False
|
||||
self.offset = 0
|
||||
self.raw_extra = b""
|
||||
self.direction: Direction = direction if direction is not None else Direction.OUT
|
||||
# For lazy deserialization
|
||||
self.raw_body = None
|
||||
self.deserializer = None
|
||||
@@ -196,23 +213,17 @@ class Message:
|
||||
# Whether message is owned by the queue or should be sent immediately
|
||||
self.queued: bool = False
|
||||
self._blocks: BLOCK_DICT = {}
|
||||
self.meta = {}
|
||||
self.synthetic = packet_id is None
|
||||
self.dropped = False
|
||||
self.sender: Optional[ADDR_TUPLE] = None
|
||||
|
||||
self.add_blocks(args)
|
||||
|
||||
@property
|
||||
def packet_id(self) -> Optional[int]:
|
||||
return self._packet_id
|
||||
|
||||
@packet_id.setter
|
||||
def packet_id(self, val: Optional[int]):
|
||||
self._packet_id = val
|
||||
# Changing packet ID clears the finalized flag
|
||||
self.finalized = False
|
||||
|
||||
def add_blocks(self, block_list):
|
||||
# can have a list of blocks if it is multiple or variable
|
||||
for block in block_list:
|
||||
if type(block) == list:
|
||||
if type(block) is list:
|
||||
for bl in block:
|
||||
self.add_block(bl)
|
||||
else:
|
||||
@@ -256,7 +267,7 @@ class Message:
|
||||
block.message_name = self.name
|
||||
block.finalize()
|
||||
|
||||
def get_block(self, block_name: str, default=None, /) -> Optional[Block]:
|
||||
def get_blocks(self, block_name: str, default=None, /) -> Optional[MsgBlockList]:
|
||||
return self.blocks.get(block_name, default)
|
||||
|
||||
@property
|
||||
@@ -278,10 +289,10 @@ class Message:
|
||||
|
||||
def ensure_parsed(self):
|
||||
# This is a little magic, think about whether we want this.
|
||||
if self.raw_body and self.deserializer():
|
||||
if self.raw_body and self.deserializer and self.deserializer():
|
||||
self.deserializer().parse_message_body(self)
|
||||
|
||||
def to_dict(self):
|
||||
def to_dict(self, extended=False):
|
||||
""" A dict representation of a message.
|
||||
|
||||
This is the form used for templated messages sent via EQ.
|
||||
@@ -297,6 +308,18 @@ class Message:
|
||||
new_vars[var_name] = val
|
||||
dict_blocks.append(new_vars)
|
||||
|
||||
if extended:
|
||||
base_repr.update({
|
||||
"packet_id": self.packet_id,
|
||||
"meta": self.meta.copy(),
|
||||
"dropped": self.dropped,
|
||||
"synthetic": self.synthetic,
|
||||
"direction": self.direction.name,
|
||||
"send_flags": int(self.send_flags),
|
||||
"extra": self.extra,
|
||||
"acks": self.acks,
|
||||
})
|
||||
|
||||
return base_repr
|
||||
|
||||
@classmethod
|
||||
@@ -306,9 +329,38 @@ class Message:
|
||||
msg.create_block_list(block_type)
|
||||
for block in blocks:
|
||||
msg.add_block(Block(block_type, **block))
|
||||
|
||||
if 'packet_id' in dict_val:
|
||||
# extended format
|
||||
msg.packet_id = dict_val['packet_id']
|
||||
msg.meta = dict_val['meta']
|
||||
msg.dropped = dict_val['dropped']
|
||||
msg.synthetic = dict_val['synthetic']
|
||||
msg.direction = Direction[dict_val['direction']]
|
||||
msg.send_flags = dict_val['send_flags']
|
||||
msg.extra = dict_val['extra']
|
||||
msg.acks = dict_val['acks']
|
||||
return msg
|
||||
|
||||
@classmethod
|
||||
def from_eq_event(cls, event) -> Message:
|
||||
# If this isn't a templated message (like some EQ-only events are),
|
||||
# then we wrap it in a synthetic `Message` so that the API for handling
|
||||
# both EQ-only and templated message events can be the same. Ick.
|
||||
msg = cls(event["message"])
|
||||
if isinstance(event["body"], dict):
|
||||
msg.add_block(Block("EventData", **event["body"]))
|
||||
else:
|
||||
# Shouldn't be any events that have anything other than a dict
|
||||
# as a body, but just to be sure...
|
||||
msg.add_block(Block("EventData", Data=event["body"]))
|
||||
msg.synthetic = True
|
||||
return msg
|
||||
|
||||
def invalidate_caches(self):
|
||||
# Don't have any caches if we haven't even parsed
|
||||
if self.raw_body:
|
||||
return
|
||||
for blocks in self.blocks.values():
|
||||
for block in blocks:
|
||||
block.invalidate_caches()
|
||||
@@ -331,7 +383,7 @@ class Message:
|
||||
block_reprs = sep.join(x.repr(pretty=pretty) for x in itertools.chain(*self.blocks.values()))
|
||||
if block_reprs:
|
||||
block_reprs = sep + block_reprs
|
||||
return f"{self.name!r}{block_reprs}"
|
||||
return f"{self.name!r}{block_reprs}, direction=Direction.{self.direction.name}"
|
||||
|
||||
def repr(self, pretty=False):
|
||||
self.ensure_parsed()
|
||||
@@ -341,14 +393,30 @@ class Message:
|
||||
message_copy = copy.deepcopy(self)
|
||||
|
||||
# Set the queued flag so the original will be dropped and acks will be sent
|
||||
self.queued = True
|
||||
if not self.finalized:
|
||||
self.queued = True
|
||||
|
||||
# Original was dropped so let's make sure we have clean acks and packet id
|
||||
message_copy.acks = tuple()
|
||||
message_copy.send_flags &= ~PacketFlags.ACK
|
||||
message_copy.packet_id = None
|
||||
message_copy.dropped = False
|
||||
message_copy.finalized = False
|
||||
message_copy.queued = False
|
||||
return message_copy
|
||||
|
||||
def to_summary(self):
|
||||
string = ""
|
||||
for block_name, block_list in self.blocks.items():
|
||||
for block in block_list:
|
||||
for var_name, val in block.items():
|
||||
if block.name == "AgentData" and var_name in ("AgentID", "SessionID"):
|
||||
continue
|
||||
if string:
|
||||
string += ", "
|
||||
string += f"{var_name}={_trunc_repr(val, 10)}"
|
||||
return string
|
||||
|
||||
def __repr__(self):
|
||||
return self.repr()
|
||||
|
||||
@@ -356,3 +424,16 @@ class Message:
|
||||
if not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
return self.to_dict() == other.to_dict()
|
||||
|
||||
|
||||
def _trunc_repr(val, max_len):
|
||||
if isinstance(val, (uuid.UUID, TupleCoord)):
|
||||
val = str(val)
|
||||
repr_val = repr(val)
|
||||
if isinstance(val, str):
|
||||
repr_val = repr_val[1:-1]
|
||||
if isinstance(val, bytes):
|
||||
repr_val = repr_val[2:-1]
|
||||
if len(repr_val) > max_len:
|
||||
return repr_val[:max_len] + "…"
|
||||
return repr_val
|
||||
|
||||
@@ -20,7 +20,7 @@ Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
from logging import getLogger
|
||||
|
||||
from llbase import llsd
|
||||
import llsd
|
||||
|
||||
from hippolyzer.lib.base.message.data import msg_details
|
||||
|
||||
|
||||
@@ -1,53 +1,19 @@
|
||||
import ast
|
||||
import base64
|
||||
import importlib
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
import re
|
||||
import uuid
|
||||
from typing import *
|
||||
|
||||
import hippolyzer.lib.base.datatypes
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.helpers import HippoPrettyPrinter
|
||||
from hippolyzer.lib.base.message.message import Message, Block, PacketFlags
|
||||
import hippolyzer.lib.proxy.templates as templates
|
||||
from hippolyzer.lib.base.message.msgtypes import MsgBlockType
|
||||
from hippolyzer.lib.base.message.template import MessageTemplate
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
|
||||
_TEMPLATES_MTIME = os.stat(templates.__file__).st_mtime
|
||||
|
||||
|
||||
def _maybe_reload_templates():
|
||||
# Templates may be modified at runtime during development, check
|
||||
# if they've changed since startup and reload if they have.
|
||||
global _TEMPLATES_MTIME
|
||||
templates_mtime = os.stat(templates.__file__).st_mtime
|
||||
|
||||
if _TEMPLATES_MTIME is None or _TEMPLATES_MTIME < templates_mtime:
|
||||
print("Reloading templates")
|
||||
try:
|
||||
importlib.reload(templates) # type: ignore
|
||||
_TEMPLATES_MTIME = templates_mtime
|
||||
except:
|
||||
logging.exception("Failed to reload templates!")
|
||||
|
||||
|
||||
def _trunc_repr(val, max_len):
|
||||
if isinstance(val, (uuid.UUID, TupleCoord)):
|
||||
val = str(val)
|
||||
repr_val = repr(val)
|
||||
if isinstance(val, str):
|
||||
repr_val = repr_val[1:-1]
|
||||
if isinstance(val, bytes):
|
||||
repr_val = repr_val[2:-1]
|
||||
if len(repr_val) > max_len:
|
||||
return repr_val[:max_len] + "…"
|
||||
return repr_val
|
||||
from .. import datatypes
|
||||
from .. import llsd
|
||||
from .. import serialization as se
|
||||
from ..helpers import HippoPrettyPrinter
|
||||
from ..network.transport import Direction
|
||||
from .msgtypes import PacketFlags, MsgBlockType
|
||||
from .template import MessageTemplate
|
||||
from .message import Message, Block, maybe_reload_templates
|
||||
|
||||
|
||||
class VerbatimHumanVal(str):
|
||||
@@ -58,135 +24,31 @@ def _filtered_exports(mod):
|
||||
return {k: getattr(mod, k) for k in mod.__all__}
|
||||
|
||||
|
||||
def proxy_eval(eval_str: str, globals_=None, locals_=None):
|
||||
def subfield_eval(eval_str: str, globals_=None, locals_=None):
|
||||
return eval(
|
||||
eval_str,
|
||||
{
|
||||
"llsd": llsd,
|
||||
"base64": base64,
|
||||
"math": math,
|
||||
**_filtered_exports(hippolyzer.lib.base.datatypes),
|
||||
**_filtered_exports(datatypes),
|
||||
**(globals_ or {})},
|
||||
locals_
|
||||
)
|
||||
|
||||
|
||||
class ProxiedMessage(Message):
|
||||
__slots__ = ("meta", "injected", "dropped", "direction")
|
||||
TextSpan = Tuple[int, int]
|
||||
SpanDict = Dict[Tuple[Union[str, int], ...], TextSpan]
|
||||
|
||||
def __init__(self, *args, direction=None, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.direction = direction if direction is not None else Direction.OUT
|
||||
self.meta = {}
|
||||
self.injected = False
|
||||
self.dropped = False
|
||||
_maybe_reload_templates()
|
||||
|
||||
def to_human_string(self, replacements=None, beautify=False,
|
||||
template: Optional[MessageTemplate] = None):
|
||||
replacements = replacements or {}
|
||||
_maybe_reload_templates()
|
||||
string = ""
|
||||
if self.direction is not None:
|
||||
string += f'{self.direction.name} '
|
||||
string += self.name
|
||||
if self.packet_id is not None:
|
||||
string += f'\n# {self.packet_id}: {PacketFlags(self.send_flags)!r}'
|
||||
string += f'{", DROPPED" if self.dropped else ""}{", INJECTED" if self.injected else ""}'
|
||||
if self.extra:
|
||||
string += f'\n# EXTRA: {self.extra!r}'
|
||||
string += '\n\n'
|
||||
class SpannedString(str):
|
||||
spans: SpanDict = {}
|
||||
|
||||
for block_name, block_list in self.blocks.items():
|
||||
block_suffix = ""
|
||||
if template and template.get_block(block_name).block_type == MsgBlockType.MBT_VARIABLE:
|
||||
block_suffix = ' # Variable'
|
||||
for block in block_list:
|
||||
string += f"[{block_name}]{block_suffix}\n"
|
||||
for var_name, val in block.items():
|
||||
string += self._format_var(block, var_name, val, replacements, beautify)
|
||||
return string
|
||||
|
||||
def _format_var(self, block, var_name, var_val, replacements, beautify=False):
|
||||
string = ""
|
||||
# Check if we have a more human-readable way to present this field
|
||||
ser_key = (self.name, block.name, var_name)
|
||||
serializer = se.SUBFIELD_SERIALIZERS.get(ser_key)
|
||||
field_prefix = ""
|
||||
if isinstance(var_val, VerbatimHumanVal):
|
||||
var_data = var_val
|
||||
elif isinstance(var_val, (uuid.UUID, TupleCoord)):
|
||||
var_data = str(var_val)
|
||||
elif isinstance(var_val, (str, bytes)) and not serializer:
|
||||
var_data = self._multi_line_pformat(var_val)
|
||||
else:
|
||||
var_data = repr(var_val)
|
||||
if serializer and beautify and not isinstance(var_val, VerbatimHumanVal):
|
||||
try:
|
||||
pretty_data = serializer.deserialize(block, var_val, pod=True)
|
||||
if pretty_data is not se.UNSERIALIZABLE:
|
||||
string += f" {var_name} =| {self._multi_line_pformat(pretty_data)}"
|
||||
if serializer.AS_HEX and isinstance(var_val, int):
|
||||
var_data = hex(var_val)
|
||||
if serializer.ORIG_INLINE:
|
||||
string += f" #{var_data}\n"
|
||||
return string
|
||||
else:
|
||||
string += "\n"
|
||||
# Human-readable version should be used, orig data is commented out
|
||||
field_prefix = "#"
|
||||
except:
|
||||
logging.exception(f"Failed in subfield serializer {ser_key!r}")
|
||||
if beautify:
|
||||
if block.name == "AgentData":
|
||||
if var_name == "AgentID" and var_val == replacements.get("AGENT_ID"):
|
||||
var_data = "[[AGENT_ID]]"
|
||||
elif var_name == "SessionID" and var_val == replacements.get("SESSION_ID"):
|
||||
var_data = "[[SESSION_ID]]"
|
||||
if "CircuitCode" in var_name or ("Code" in var_name and "Circuit" in block.name):
|
||||
if var_val == replacements.get("CIRCUIT_CODE"):
|
||||
var_data = "[[CIRCUIT_CODE]]"
|
||||
string += f" {field_prefix}{var_name} = {var_data}\n"
|
||||
return string
|
||||
|
||||
@staticmethod
|
||||
def _multi_line_pformat(val):
|
||||
printer = HippoPrettyPrinter(width=100)
|
||||
val = printer.pformat(val)
|
||||
newstr = ""
|
||||
# Now we need to rebuild this to add in the appropriate
|
||||
# line continuations.
|
||||
lines = list(val.splitlines())
|
||||
first_line = True
|
||||
while lines:
|
||||
line = lines.pop(0)
|
||||
prefix = ""
|
||||
suffix = ""
|
||||
if first_line:
|
||||
first_line = False
|
||||
else:
|
||||
prefix = " "
|
||||
|
||||
if lines:
|
||||
suffix = " \\\n"
|
||||
newstr += f"{prefix}{line}{suffix}"
|
||||
return newstr
|
||||
|
||||
def to_summary(self):
|
||||
string = ""
|
||||
for block_name, block_list in self.blocks.items():
|
||||
for block in block_list:
|
||||
for var_name, val in block.items():
|
||||
if block.name == "AgentData" and var_name in ("AgentID", "SessionID"):
|
||||
continue
|
||||
if string:
|
||||
string += ", "
|
||||
string += f"{var_name}={_trunc_repr(val, 10)}"
|
||||
return string
|
||||
|
||||
class HumanMessageSerializer:
|
||||
@classmethod
|
||||
def from_human_string(cls, string, replacements=None, env=None, safe=True):
|
||||
_maybe_reload_templates()
|
||||
maybe_reload_templates()
|
||||
replacements = replacements or {}
|
||||
env = env or {}
|
||||
first_line = True
|
||||
@@ -200,9 +62,16 @@ class ProxiedMessage(Message):
|
||||
continue
|
||||
|
||||
if first_line:
|
||||
direction, message_name = line.split(" ", 1)
|
||||
msg = ProxiedMessage(message_name)
|
||||
first_split = [x for x in line.split(" ") if x]
|
||||
direction, message_name = first_split[:2]
|
||||
options = [x.strip("[]") for x in first_split[2:]]
|
||||
msg = Message(message_name)
|
||||
msg.direction = Direction[direction.upper()]
|
||||
for option in options:
|
||||
if option in PacketFlags.__members__:
|
||||
msg.send_flags |= PacketFlags[option]
|
||||
elif re.match(r"^\d+$", option):
|
||||
msg.send_flags |= int(option)
|
||||
first_line = False
|
||||
continue
|
||||
|
||||
@@ -240,14 +109,14 @@ class ProxiedMessage(Message):
|
||||
var_val = tuple(float(x) for x in var_val.split(","))
|
||||
# UUID-ish
|
||||
elif re.match(r"\A\w+-\w+-.*", var_val):
|
||||
var_val = UUID(var_val)
|
||||
var_val = datatypes.UUID(var_val)
|
||||
else:
|
||||
var_val = ast.literal_eval(var_val)
|
||||
|
||||
# Normally gross, but necessary for expressiveness in built messages
|
||||
# unless a metalanguage is added.
|
||||
if evaled:
|
||||
var_val = proxy_eval(
|
||||
var_val = subfield_eval(
|
||||
var_val,
|
||||
globals_={**env, **replacements},
|
||||
locals_={"block": cur_block}
|
||||
@@ -265,6 +134,110 @@ class ProxiedMessage(Message):
|
||||
cur_block[var_name] = var_val
|
||||
return msg
|
||||
|
||||
def _args_repr(self, pretty=False):
|
||||
base = super()._args_repr(pretty=pretty)
|
||||
return f"{base}, direction=Direction.{self.direction.name}"
|
||||
@classmethod
|
||||
def to_human_string(cls, msg: Message, replacements=None, beautify=False,
|
||||
template: Optional[MessageTemplate] = None) -> SpannedString:
|
||||
replacements = replacements or {}
|
||||
maybe_reload_templates()
|
||||
spans: SpanDict = {}
|
||||
string = ""
|
||||
if msg.direction is not None:
|
||||
string += f'{msg.direction.name} '
|
||||
string += msg.name
|
||||
flags = msg.send_flags
|
||||
for poss_flag in iter(PacketFlags):
|
||||
if flags & poss_flag:
|
||||
flags &= ~poss_flag
|
||||
string += f" [{poss_flag.name}]"
|
||||
# Make sure flags with unknown meanings don't get lost
|
||||
if flags:
|
||||
string += f" [{int(flags)}]"
|
||||
if msg.packet_id is not None:
|
||||
string += f'\n# ID: {msg.packet_id}'
|
||||
string += f'{", DROPPED" if msg.dropped else ""}{", SYNTHETIC" if msg.synthetic else ""}'
|
||||
if msg.extra:
|
||||
string += f'\n# EXTRA: {msg.extra!r}'
|
||||
string += '\n\n'
|
||||
|
||||
for block_name, block_list in msg.blocks.items():
|
||||
block_suffix = ""
|
||||
if template and template.get_block(block_name).block_type == MsgBlockType.MBT_VARIABLE:
|
||||
block_suffix = ' # Variable'
|
||||
for block_num, block in enumerate(block_list):
|
||||
string += f"[{block_name}]{block_suffix}\n"
|
||||
for var_name, val in block.items():
|
||||
start_len = len(string)
|
||||
string += cls._format_var(msg, block, var_name, val, replacements, beautify)
|
||||
end_len = len(string)
|
||||
# Store the spans for each var so we can highlight specific matches
|
||||
spans[(msg.name, block_name, block_num, var_name)] = (start_len, end_len)
|
||||
string += "\n"
|
||||
spanned = SpannedString(string)
|
||||
spanned.spans = spans
|
||||
return spanned
|
||||
|
||||
@classmethod
|
||||
def _format_var(cls, msg, block, var_name, var_val, replacements, beautify=False):
|
||||
string = ""
|
||||
# Check if we have a more human-readable way to present this field
|
||||
ser_key = (msg.name, block.name, var_name)
|
||||
serializer = se.SUBFIELD_SERIALIZERS.get(ser_key)
|
||||
field_prefix = ""
|
||||
if isinstance(var_val, VerbatimHumanVal):
|
||||
var_data = var_val
|
||||
elif isinstance(var_val, (uuid.UUID, datatypes.TupleCoord)):
|
||||
var_data = str(var_val)
|
||||
elif isinstance(var_val, (str, bytes)) and not serializer:
|
||||
var_data = cls._multi_line_pformat(var_val)
|
||||
else:
|
||||
var_data = repr(var_val)
|
||||
if serializer and beautify and not isinstance(var_val, VerbatimHumanVal):
|
||||
try:
|
||||
pretty_data = serializer.deserialize(block, var_val, pod=True)
|
||||
if pretty_data is not se.UNSERIALIZABLE:
|
||||
string += f" {var_name} =| {cls._multi_line_pformat(pretty_data)}"
|
||||
if serializer.AS_HEX and isinstance(var_val, int):
|
||||
var_data = hex(var_val)
|
||||
if serializer.ORIG_INLINE:
|
||||
string += f" #{var_data}"
|
||||
return string
|
||||
else:
|
||||
string += "\n"
|
||||
# Human-readable version should be used, orig data is commented out
|
||||
field_prefix = "#"
|
||||
except:
|
||||
logging.exception(f"Failed in subfield serializer {ser_key!r}")
|
||||
if beautify:
|
||||
if block.name == "AgentData":
|
||||
if var_name == "AgentID" and var_val == replacements.get("AGENT_ID"):
|
||||
var_data = "[[AGENT_ID]]"
|
||||
elif var_name == "SessionID" and var_val == replacements.get("SESSION_ID"):
|
||||
var_data = "[[SESSION_ID]]"
|
||||
if "CircuitCode" in var_name or ("Code" in var_name and "Circuit" in block.name):
|
||||
if var_val == replacements.get("CIRCUIT_CODE"):
|
||||
var_data = "[[CIRCUIT_CODE]]"
|
||||
string += f" {field_prefix}{var_name} = {var_data}"
|
||||
return string
|
||||
|
||||
@staticmethod
|
||||
def _multi_line_pformat(val):
|
||||
printer = HippoPrettyPrinter(width=100)
|
||||
val = printer.pformat(val)
|
||||
newstr = ""
|
||||
# Now we need to rebuild this to add in the appropriate
|
||||
# line continuations.
|
||||
lines = list(val.splitlines())
|
||||
first_line = True
|
||||
while lines:
|
||||
line = lines.pop(0)
|
||||
prefix = ""
|
||||
suffix = ""
|
||||
if first_line:
|
||||
first_line = False
|
||||
else:
|
||||
prefix = " "
|
||||
|
||||
if lines:
|
||||
suffix = " \\\n"
|
||||
newstr += f"{prefix}{line}{suffix}"
|
||||
return newstr
|
||||
@@ -28,36 +28,36 @@ from hippolyzer.lib.base.events import Event
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
_T = TypeVar("_T")
|
||||
_K = TypeVar("_K", bound=Hashable)
|
||||
MESSAGE_HANDLER = Callable[[_T], Any]
|
||||
PREDICATE = Callable[[_T], bool]
|
||||
MESSAGE_NAMES = Union[str, Iterable[str]]
|
||||
# TODO: Can't do `Iterable[Union[_K, Literal["*"]]]` apparently?
|
||||
MESSAGE_NAMES = Iterable[Union[_K, str]]
|
||||
|
||||
|
||||
class MessageHandler(Generic[_T]):
|
||||
def __init__(self):
|
||||
self.handlers: Dict[str, Event] = {}
|
||||
class MessageHandler(Generic[_T, _K]):
|
||||
def __init__(self, take_by_default: bool = True):
|
||||
self.handlers: Dict[_K, Event] = {}
|
||||
self.take_by_default = take_by_default
|
||||
|
||||
def register(self, message_name: str) -> Event:
|
||||
def register(self, message_name: _K) -> Event:
|
||||
LOG.debug('Creating a monitor for %s' % message_name)
|
||||
return self.handlers.setdefault(message_name, Event())
|
||||
return self.handlers.setdefault(message_name, Event(message_name))
|
||||
|
||||
def subscribe(self, message_name: str, handler: MESSAGE_HANDLER) -> Event:
|
||||
def subscribe(self, message_name: Union[_K, Literal["*"]], handler: MESSAGE_HANDLER):
|
||||
notifier = self.register(message_name)
|
||||
notifier.subscribe(handler)
|
||||
return notifier
|
||||
|
||||
def _subscribe_all(self, message_names: MESSAGE_NAMES, handler: MESSAGE_HANDLER,
|
||||
predicate: Optional[PREDICATE] = None) -> List[Event]:
|
||||
if isinstance(message_names, str):
|
||||
message_names = (message_names,)
|
||||
notifiers = [self.register(name) for name in message_names]
|
||||
for n in notifiers:
|
||||
n.subscribe(handler, predicate=predicate)
|
||||
return notifiers
|
||||
|
||||
@contextlib.contextmanager
|
||||
def subscribe_async(self, message_names: MESSAGE_NAMES, take: bool = True,
|
||||
predicate: Optional[PREDICATE] = None) -> ContextManager[Callable[[], Awaitable[_T]]]:
|
||||
def subscribe_async(self, message_names: MESSAGE_NAMES, predicate: Optional[PREDICATE] = None,
|
||||
take: Optional[bool] = None) -> Generator[Callable[[], Awaitable[_T]], None, None]:
|
||||
"""
|
||||
Subscribe to a set of message matching predicate while within a block
|
||||
|
||||
@@ -69,6 +69,8 @@ class MessageHandler(Generic[_T]):
|
||||
If a subscriber is just an observer that will never drop or modify a message, take=False
|
||||
may be used and messages will be sent as usual.
|
||||
"""
|
||||
if take is None:
|
||||
take = self.take_by_default
|
||||
msg_queue = asyncio.Queue()
|
||||
|
||||
def _handler_wrapper(message: _T):
|
||||
@@ -79,14 +81,21 @@ class MessageHandler(Generic[_T]):
|
||||
|
||||
notifiers = self._subscribe_all(message_names, _handler_wrapper, predicate=predicate)
|
||||
|
||||
async def _get_wrapper():
|
||||
msg = await msg_queue.get()
|
||||
# Consumption is completion
|
||||
msg_queue.task_done()
|
||||
return msg
|
||||
|
||||
try:
|
||||
yield msg_queue.get
|
||||
yield _get_wrapper
|
||||
finally:
|
||||
for n in notifiers:
|
||||
n.unsubscribe(_handler_wrapper)
|
||||
return None
|
||||
|
||||
def wait_for(self, message_names: MESSAGE_NAMES,
|
||||
predicate: Optional[PREDICATE] = None, timeout=None, take=True) -> Awaitable[_T]:
|
||||
def wait_for(self, message_names: MESSAGE_NAMES, predicate: Optional[PREDICATE] = None,
|
||||
timeout: Optional[float] = None, take: Optional[bool] = None) -> Awaitable[_T]:
|
||||
"""
|
||||
Wait for a single instance one of message_names matching predicate
|
||||
|
||||
@@ -95,16 +104,18 @@ class MessageHandler(Generic[_T]):
|
||||
sequence of packets, since multiple packets may come in after the future has already
|
||||
been marked completed, causing some to be missed.
|
||||
"""
|
||||
if isinstance(message_names, str):
|
||||
message_names = (message_names,)
|
||||
if take is None:
|
||||
take = self.take_by_default
|
||||
notifiers = [self.register(name) for name in message_names]
|
||||
|
||||
fut = asyncio.get_event_loop().create_future()
|
||||
loop = asyncio.get_event_loop_policy().get_event_loop()
|
||||
fut = loop.create_future()
|
||||
timeout_task = None
|
||||
|
||||
async def _canceller():
|
||||
await asyncio.sleep(timeout)
|
||||
fut.set_exception(asyncio.exceptions.TimeoutError("Timed out waiting for packet"))
|
||||
if not fut.done():
|
||||
fut.set_exception(asyncio.exceptions.TimeoutError("Timed out waiting for packet"))
|
||||
for n in notifiers:
|
||||
n.unsubscribe(_handler)
|
||||
|
||||
@@ -117,7 +128,8 @@ class MessageHandler(Generic[_T]):
|
||||
# Whatever was awaiting this future now owns this message
|
||||
if take:
|
||||
message = message.take()
|
||||
fut.set_result(message)
|
||||
if not fut.done():
|
||||
fut.set_result(message)
|
||||
# Make sure to unregister this handler for all message types
|
||||
for n in notifiers:
|
||||
n.unsubscribe(_handler)
|
||||
@@ -126,7 +138,7 @@ class MessageHandler(Generic[_T]):
|
||||
notifier.subscribe(_handler, predicate=predicate)
|
||||
return fut
|
||||
|
||||
def is_handled(self, message_name: str):
|
||||
def is_handled(self, message_name: _K):
|
||||
return message_name in self.handlers
|
||||
|
||||
def handle(self, message: _T):
|
||||
@@ -134,7 +146,7 @@ class MessageHandler(Generic[_T]):
|
||||
# Always try to call wildcard handlers
|
||||
self._handle_type('*', message)
|
||||
|
||||
def _handle_type(self, name: str, message: _T):
|
||||
def _handle_type(self, name: Union[_K, Literal["*"]], message: _T):
|
||||
handler = self.handlers.get(name)
|
||||
if not handler:
|
||||
return
|
||||
|
||||
@@ -47,7 +47,6 @@ class MsgBlockType:
|
||||
MBT_SINGLE = 0
|
||||
MBT_MULTIPLE = 1
|
||||
MBT_VARIABLE = 2
|
||||
MBT_String_List = ['Single', 'Multiple', 'Variable']
|
||||
|
||||
|
||||
class PacketFlags(enum.IntFlag):
|
||||
@@ -55,6 +54,8 @@ class PacketFlags(enum.IntFlag):
|
||||
RELIABLE = 0x40
|
||||
RESENT = 0x20
|
||||
ACK = 0x10
|
||||
# Not a real flag, just used for display.
|
||||
EQ = 1 << 10
|
||||
|
||||
|
||||
# frequency for messages
|
||||
@@ -62,28 +63,23 @@ class PacketFlags(enum.IntFlag):
|
||||
# = '\xFF\xFF'
|
||||
# = '\xFF'
|
||||
# = ''
|
||||
class MsgFrequency:
|
||||
FIXED_FREQUENCY_MESSAGE = -1 # marking it
|
||||
LOW_FREQUENCY_MESSAGE = 4
|
||||
MEDIUM_FREQUENCY_MESSAGE = 2
|
||||
HIGH_FREQUENCY_MESSAGE = 1
|
||||
class MsgFrequency(enum.IntEnum):
|
||||
FIXED = -1 # marking it
|
||||
LOW = 4
|
||||
MEDIUM = 2
|
||||
HIGH = 1
|
||||
|
||||
|
||||
class MsgTrust:
|
||||
LL_NOTRUST = 0
|
||||
LL_TRUSTED = 1
|
||||
class MsgEncoding(enum.IntEnum):
|
||||
UNENCODED = 0
|
||||
ZEROCODED = 1
|
||||
|
||||
|
||||
class MsgEncoding:
|
||||
LL_UNENCODED = 0
|
||||
LL_ZEROCODED = 1
|
||||
|
||||
|
||||
class MsgDeprecation:
|
||||
LL_DEPRECATED = 0
|
||||
LL_UDPDEPRECATED = 1
|
||||
LL_UDPBLACKLISTED = 2
|
||||
LL_NOTDEPRECATED = 3
|
||||
class MsgDeprecation(enum.IntEnum):
|
||||
DEPRECATED = 0
|
||||
UDPDEPRECATED = 1
|
||||
UDPBLACKLISTED = 2
|
||||
NOTDEPRECATED = 3
|
||||
|
||||
|
||||
# message variable types
|
||||
|
||||
@@ -21,7 +21,8 @@ Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
import typing
|
||||
|
||||
from .msgtypes import MsgType, MsgBlockType
|
||||
from .msgtypes import MsgType, MsgBlockType, MsgFrequency
|
||||
from ..datatypes import UUID
|
||||
|
||||
|
||||
class MessageTemplateVariable:
|
||||
@@ -36,7 +37,7 @@ class MessageTemplateVariable:
|
||||
return f"{self.__class__.__name__}(name={self.name!r}, tp={self.type!r}, size={self.size!r})"
|
||||
|
||||
@property
|
||||
def probably_binary(self):
|
||||
def probably_binary(self) -> bool:
|
||||
if self._probably_binary is not None:
|
||||
return self._probably_binary
|
||||
|
||||
@@ -48,7 +49,7 @@ class MessageTemplateVariable:
|
||||
return self._probably_binary
|
||||
|
||||
@property
|
||||
def probably_text(self):
|
||||
def probably_text(self) -> bool:
|
||||
if self._probably_text is not None:
|
||||
return self._probably_text
|
||||
|
||||
@@ -56,62 +57,76 @@ class MessageTemplateVariable:
|
||||
self._probably_text = False
|
||||
else:
|
||||
self._probably_text = any(x in self.name for x in (
|
||||
"Name", "Text", "Title", "Description", "Message", "Label", "Method"))
|
||||
"Name", "Text", "Title", "Description", "Message", "Label", "Method", "Filename",
|
||||
))
|
||||
self._probably_text = self._probably_text and self.name != "NameValue"
|
||||
return self._probably_text
|
||||
|
||||
@property
|
||||
def default_value(self):
|
||||
if self.type.is_int:
|
||||
return 0
|
||||
elif self.type.is_float:
|
||||
return 0.0
|
||||
elif self.type == MsgType.MVT_LLUUID:
|
||||
return UUID()
|
||||
elif self.type == MsgType.MVT_BOOL:
|
||||
return False
|
||||
elif self.type == MsgType.MVT_VARIABLE:
|
||||
if self.probably_binary:
|
||||
return b""
|
||||
if self.probably_text:
|
||||
return ""
|
||||
return b""
|
||||
elif self.type in (MsgType.MVT_LLVector3, MsgType.MVT_LLVector3d, MsgType.MVT_LLQuaternion):
|
||||
return 0.0, 0.0, 0.0
|
||||
elif self.type == MsgType.MVT_LLVector4:
|
||||
return 0.0, 0.0, 0.0, 0.0
|
||||
elif self.type == MsgType.MVT_FIXED:
|
||||
return b"\x00" * self.size
|
||||
elif self.type == MsgType.MVT_IP_ADDR:
|
||||
return "0.0.0.0"
|
||||
return None
|
||||
|
||||
|
||||
class MessageTemplateBlock:
|
||||
def __init__(self, name):
|
||||
self.variables: typing.List[MessageTemplateVariable] = []
|
||||
self.variable_map: typing.Dict[str, MessageTemplateVariable] = {}
|
||||
self.name = name
|
||||
self.block_type = 0
|
||||
self.block_type: MsgBlockType = MsgBlockType.MBT_SINGLE
|
||||
self.number = 0
|
||||
|
||||
def add_variable(self, var):
|
||||
def add_variable(self, var: MessageTemplateVariable):
|
||||
self.variable_map[var.name] = var
|
||||
self.variables.append(var)
|
||||
|
||||
def get_variable(self, name):
|
||||
def get_variable(self, name) -> MessageTemplateVariable:
|
||||
return self.variable_map[name]
|
||||
|
||||
|
||||
class MessageTemplate(object):
|
||||
frequency_strings = {-1: 'fixed', 1: 'high', 2: 'medium', 4: 'low'} # strings for printout
|
||||
deprecation_strings = ["Deprecated", "UDPDeprecated", "UDPBlackListed", "NotDeprecated"] # using _as_string methods
|
||||
encoding_strings = ["Unencoded", "Zerocoded"] # etc
|
||||
trusted_strings = ["Trusted", "NotTrusted"] # etc LDE 24oct2008
|
||||
|
||||
class MessageTemplate:
|
||||
def __init__(self, name):
|
||||
self.blocks: typing.List[MessageTemplateBlock] = []
|
||||
self.block_map: typing.Dict[str, MessageTemplateBlock] = {}
|
||||
|
||||
# this is the function or object that will handle this type of message
|
||||
self.received_count = 0
|
||||
|
||||
self.name = name
|
||||
self.frequency = None
|
||||
self.msg_num = 0
|
||||
self.msg_freq_num_bytes = None
|
||||
self.msg_trust = None
|
||||
self.msg_deprecation = None
|
||||
self.msg_encoding = None
|
||||
self.frequency: typing.Optional[MsgFrequency] = None
|
||||
self.num = 0
|
||||
# Frequency + msg num as bytes
|
||||
self.freq_num_bytes = None
|
||||
self.trusted = False
|
||||
self.deprecation = None
|
||||
self.encoding = None
|
||||
|
||||
def add_block(self, block):
|
||||
def add_block(self, block: MessageTemplateBlock):
|
||||
self.block_map[block.name] = block
|
||||
self.blocks.append(block)
|
||||
|
||||
def get_block(self, name):
|
||||
def get_block(self, name) -> MessageTemplateBlock:
|
||||
return self.block_map[name]
|
||||
|
||||
def get_msg_freq_num_len(self):
|
||||
if self.frequency == -1:
|
||||
if self.frequency == MsgFrequency.FIXED:
|
||||
return 4
|
||||
return self.frequency
|
||||
|
||||
def get_frequency_as_string(self):
|
||||
return MessageTemplate.frequency_strings[self.frequency]
|
||||
|
||||
def get_deprecation_as_string(self):
|
||||
return MessageTemplate.deprecation_strings[self.msg_deprecation]
|
||||
|
||||
@@ -27,25 +27,35 @@ from .template import MessageTemplate
|
||||
from .template_parser import MessageTemplateParser
|
||||
|
||||
|
||||
DEFAULT_PARSER = MessageTemplateParser(msg_tmpl)
|
||||
|
||||
|
||||
class TemplateDictionary:
|
||||
"""the dictionary with all known templates"""
|
||||
|
||||
def __init__(self, template_list=None, message_template=None):
|
||||
if template_list is None:
|
||||
if message_template is None:
|
||||
parser = MessageTemplateParser(msg_tmpl)
|
||||
parser = DEFAULT_PARSER
|
||||
else:
|
||||
parser = MessageTemplateParser(message_template)
|
||||
template_list = parser.message_templates
|
||||
|
||||
self.template_list: typing.List[MessageTemplate] = template_list
|
||||
|
||||
self.template_list: typing.List[MessageTemplate] = []
|
||||
# maps name to template
|
||||
self.message_templates = {}
|
||||
self.message_templates: typing.Dict[str, MessageTemplate] = {}
|
||||
|
||||
# maps (freq,num) to template
|
||||
self.message_dict = {}
|
||||
|
||||
self.load_templates(template_list)
|
||||
|
||||
def load_templates(self, template_list):
|
||||
self.template_list.clear()
|
||||
self.template_list.extend(template_list)
|
||||
self.message_templates.clear()
|
||||
self.message_dict.clear()
|
||||
|
||||
self.build_dictionaries(template_list)
|
||||
self.build_message_ids()
|
||||
|
||||
@@ -58,32 +68,32 @@ class TemplateDictionary:
|
||||
|
||||
# do a mapping of type to a string for easier reference
|
||||
frequency_str = ''
|
||||
if template.frequency == MsgFrequency.FIXED_FREQUENCY_MESSAGE:
|
||||
if template.frequency == MsgFrequency.FIXED:
|
||||
frequency_str = "Fixed"
|
||||
elif template.frequency == MsgFrequency.LOW_FREQUENCY_MESSAGE:
|
||||
elif template.frequency == MsgFrequency.LOW:
|
||||
frequency_str = "Low"
|
||||
elif template.frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE:
|
||||
elif template.frequency == MsgFrequency.MEDIUM:
|
||||
frequency_str = "Medium"
|
||||
elif template.frequency == MsgFrequency.HIGH_FREQUENCY_MESSAGE:
|
||||
elif template.frequency == MsgFrequency.HIGH:
|
||||
frequency_str = "High"
|
||||
|
||||
self.message_dict[(frequency_str,
|
||||
template.msg_num)] = template
|
||||
template.num)] = template
|
||||
|
||||
def build_message_ids(self):
|
||||
for template in list(self.message_templates.values()):
|
||||
frequency = template.frequency
|
||||
num_bytes = None
|
||||
if frequency == MsgFrequency.FIXED_FREQUENCY_MESSAGE:
|
||||
if frequency == MsgFrequency.FIXED:
|
||||
# have to do this because Fixed messages are stored as a long in the template
|
||||
num_bytes = b'\xff\xff\xff' + struct.pack("B", template.msg_num)
|
||||
elif frequency == MsgFrequency.LOW_FREQUENCY_MESSAGE:
|
||||
num_bytes = b'\xff\xff' + struct.pack("!H", template.msg_num)
|
||||
elif frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE:
|
||||
num_bytes = b'\xff' + struct.pack("B", template.msg_num)
|
||||
elif frequency == MsgFrequency.HIGH_FREQUENCY_MESSAGE:
|
||||
num_bytes = struct.pack("B", template.msg_num)
|
||||
template.msg_freq_num_bytes = num_bytes
|
||||
num_bytes = b'\xff\xff\xff' + struct.pack("B", template.num)
|
||||
elif frequency == MsgFrequency.LOW:
|
||||
num_bytes = b'\xff\xff' + struct.pack("!H", template.num)
|
||||
elif frequency == MsgFrequency.MEDIUM:
|
||||
num_bytes = b'\xff' + struct.pack("B", template.num)
|
||||
elif frequency == MsgFrequency.HIGH:
|
||||
num_bytes = struct.pack("B", template.num)
|
||||
template.freq_num_bytes = num_bytes
|
||||
|
||||
def get_template_by_name(self, template_name) -> typing.Optional[MessageTemplate]:
|
||||
return self.message_templates.get(template_name)
|
||||
@@ -99,3 +109,6 @@ class TemplateDictionary:
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.template_list)
|
||||
|
||||
|
||||
DEFAULT_TEMPLATE_DICT = TemplateDictionary()
|
||||
|
||||
@@ -22,7 +22,7 @@ import struct
|
||||
import re
|
||||
|
||||
from . import template
|
||||
from .msgtypes import MsgFrequency, MsgTrust, MsgEncoding
|
||||
from .msgtypes import MsgFrequency, MsgEncoding
|
||||
from .msgtypes import MsgDeprecation, MsgBlockType, MsgType
|
||||
from ..exc import MessageTemplateParsingError, MessageTemplateNotFound
|
||||
|
||||
@@ -112,67 +112,69 @@ class MessageTemplateParser:
|
||||
frequency = None
|
||||
freq_str = match.group(2)
|
||||
if freq_str == 'Low':
|
||||
frequency = MsgFrequency.LOW_FREQUENCY_MESSAGE
|
||||
frequency = MsgFrequency.LOW
|
||||
elif freq_str == 'Medium':
|
||||
frequency = MsgFrequency.MEDIUM_FREQUENCY_MESSAGE
|
||||
frequency = MsgFrequency.MEDIUM
|
||||
elif freq_str == 'High':
|
||||
frequency = MsgFrequency.HIGH_FREQUENCY_MESSAGE
|
||||
frequency = MsgFrequency.HIGH
|
||||
elif freq_str == 'Fixed':
|
||||
frequency = MsgFrequency.FIXED_FREQUENCY_MESSAGE
|
||||
frequency = MsgFrequency.FIXED
|
||||
|
||||
new_template.frequency = frequency
|
||||
|
||||
msg_num = int(match.group(3), 0)
|
||||
if frequency == MsgFrequency.FIXED_FREQUENCY_MESSAGE:
|
||||
if frequency == MsgFrequency.FIXED:
|
||||
# have to do this because Fixed messages are stored as a long in the template
|
||||
msg_num &= 0xff
|
||||
msg_num_bytes = struct.pack('!BBBB', 0xff, 0xff, 0xff, msg_num)
|
||||
elif frequency == MsgFrequency.LOW_FREQUENCY_MESSAGE:
|
||||
elif frequency == MsgFrequency.LOW:
|
||||
msg_num_bytes = struct.pack('!BBH', 0xff, 0xff, msg_num)
|
||||
elif frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE:
|
||||
elif frequency == MsgFrequency.MEDIUM:
|
||||
msg_num_bytes = struct.pack('!BB', 0xff, msg_num)
|
||||
elif frequency == MsgFrequency.HIGH_FREQUENCY_MESSAGE:
|
||||
elif frequency == MsgFrequency.HIGH:
|
||||
msg_num_bytes = struct.pack('!B', msg_num)
|
||||
else:
|
||||
raise Exception("don't know about frequency %s" % frequency)
|
||||
|
||||
new_template.msg_num = msg_num
|
||||
new_template.msg_freq_num_bytes = msg_num_bytes
|
||||
new_template.num = msg_num
|
||||
new_template.freq_num_bytes = msg_num_bytes
|
||||
|
||||
msg_trust = None
|
||||
msg_trust_str = match.group(4)
|
||||
if msg_trust_str == 'Trusted':
|
||||
msg_trust = MsgTrust.LL_TRUSTED
|
||||
msg_trust = True
|
||||
elif msg_trust_str == 'NotTrusted':
|
||||
msg_trust = MsgTrust.LL_NOTRUST
|
||||
msg_trust = False
|
||||
else:
|
||||
raise ValueError(f"Invalid trust {msg_trust_str}")
|
||||
|
||||
new_template.msg_trust = msg_trust
|
||||
new_template.trusted = msg_trust
|
||||
|
||||
msg_encoding = None
|
||||
msg_encoding_str = match.group(5)
|
||||
if msg_encoding_str == 'Unencoded':
|
||||
msg_encoding = MsgEncoding.LL_UNENCODED
|
||||
msg_encoding = MsgEncoding.UNENCODED
|
||||
elif msg_encoding_str == 'Zerocoded':
|
||||
msg_encoding = MsgEncoding.LL_ZEROCODED
|
||||
msg_encoding = MsgEncoding.ZEROCODED
|
||||
else:
|
||||
raise ValueError(f"Invalid encoding {msg_encoding_str}")
|
||||
|
||||
new_template.msg_encoding = msg_encoding
|
||||
new_template.encoding = msg_encoding
|
||||
|
||||
msg_dep = None
|
||||
msg_dep_str = match.group(7)
|
||||
if msg_dep_str:
|
||||
if msg_dep_str == 'Deprecated':
|
||||
msg_dep = MsgDeprecation.LL_DEPRECATED
|
||||
msg_dep = MsgDeprecation.DEPRECATED
|
||||
elif msg_dep_str == 'UDPDeprecated':
|
||||
msg_dep = MsgDeprecation.LL_UDPDEPRECATED
|
||||
msg_dep = MsgDeprecation.UDPDEPRECATED
|
||||
elif msg_dep_str == 'UDPBlackListed':
|
||||
msg_dep = MsgDeprecation.LL_UDPBLACKLISTED
|
||||
msg_dep = MsgDeprecation.UDPBLACKLISTED
|
||||
elif msg_dep_str == 'NotDeprecated':
|
||||
msg_dep = MsgDeprecation.LL_NOTDEPRECATED
|
||||
msg_dep = MsgDeprecation.NOTDEPRECATED
|
||||
else:
|
||||
msg_dep = MsgDeprecation.LL_NOTDEPRECATED
|
||||
msg_dep = MsgDeprecation.NOTDEPRECATED
|
||||
if msg_dep is None:
|
||||
raise MessageTemplateParsingError("Unknown msg_dep field %s" % match.group(0))
|
||||
new_template.msg_deprecation = msg_dep
|
||||
new_template.deprecation = msg_dep
|
||||
|
||||
return new_template
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ from logging import getLogger
|
||||
from hippolyzer.lib.base.datatypes import JankStringyBytes
|
||||
from hippolyzer.lib.base.settings import Settings
|
||||
from .template import MessageTemplateVariable
|
||||
from .template_dict import TemplateDictionary
|
||||
from .template_dict import DEFAULT_TEMPLATE_DICT
|
||||
from .msgtypes import MsgType, MsgBlockType, PacketLayout
|
||||
from .data_packer import TemplateDataPacker
|
||||
from .message import Message, Block
|
||||
@@ -62,14 +62,13 @@ def _parse_msg_num(reader: se.BufferReader):
|
||||
|
||||
|
||||
class UDPMessageDeserializer:
|
||||
DEFAULT_TEMPLATE = TemplateDictionary()
|
||||
DEFAULT_TEMPLATE = DEFAULT_TEMPLATE_DICT
|
||||
|
||||
def __init__(self, settings=None, message_cls: Type[Message] = Message):
|
||||
def __init__(self, settings=None):
|
||||
self.settings = settings or Settings()
|
||||
self.template_dict = self.DEFAULT_TEMPLATE
|
||||
self.message_cls = message_cls
|
||||
|
||||
def deserialize(self, msg_buff: bytes):
|
||||
def deserialize(self, msg_buff: bytes) -> Message:
|
||||
msg = self._parse_message_header(msg_buff)
|
||||
if not self.settings.ENABLE_DEFERRED_PACKET_PARSING:
|
||||
try:
|
||||
@@ -85,7 +84,8 @@ class UDPMessageDeserializer:
|
||||
|
||||
reader = se.BufferReader("!", data)
|
||||
|
||||
msg: Message = self.message_cls("Placeholder")
|
||||
msg: Message = Message("Placeholder")
|
||||
msg.synthetic = False
|
||||
msg.send_flags = reader.read(se.U8)
|
||||
msg.packet_id = reader.read(se.U32)
|
||||
|
||||
@@ -126,8 +126,14 @@ class UDPMessageDeserializer:
|
||||
frequency, num = _parse_msg_num(reader)
|
||||
current_template = self.template_dict.get_template_by_pair(frequency, num)
|
||||
if current_template is None:
|
||||
raise exc.MessageTemplateNotFound("deserializing data")
|
||||
msg.name = current_template.name
|
||||
if self.settings.ALLOW_UNKNOWN_MESSAGES:
|
||||
LOG.warning(f"Unknown message type {frequency}:{num}")
|
||||
msg.unknown_message = True
|
||||
msg.name = "UnknownMessage:%d" % num
|
||||
else:
|
||||
raise exc.MessageTemplateNotFound("deserializing data", f"{frequency}:{num}")
|
||||
else:
|
||||
msg.name = current_template.name
|
||||
|
||||
# extra field, see note regarding msg.offset
|
||||
msg.raw_extra = reader.read_bytes(msg.offset)
|
||||
@@ -143,6 +149,12 @@ class UDPMessageDeserializer:
|
||||
# Already parsed if we don't have a raw body
|
||||
if not raw_body:
|
||||
return
|
||||
|
||||
if msg.unknown_message:
|
||||
# We can't parse this, we don't know anything about it
|
||||
msg.deserializer = None
|
||||
return
|
||||
|
||||
msg.raw_body = None
|
||||
msg.deserializer = None
|
||||
|
||||
@@ -157,7 +169,6 @@ class UDPMessageDeserializer:
|
||||
reader.seek(current_template.get_msg_freq_num_len() + msg.offset)
|
||||
|
||||
for tmpl_block in current_template.blocks:
|
||||
LOG.debug("Parsing %s:%s" % (msg.name, tmpl_block.name))
|
||||
# EOF?
|
||||
if not len(reader):
|
||||
# Seems like even some "Single" blocks are optional?
|
||||
@@ -180,7 +191,6 @@ class UDPMessageDeserializer:
|
||||
|
||||
for i in range(repeat_count):
|
||||
current_block = Block(tmpl_block.name)
|
||||
LOG.debug("Adding block %s" % current_block.name)
|
||||
msg.add_block(current_block)
|
||||
|
||||
for tmpl_variable in tmpl_block.variables:
|
||||
@@ -222,11 +232,17 @@ class UDPMessageDeserializer:
|
||||
if tmpl_variable.probably_binary:
|
||||
return unpacked_data
|
||||
# Truncated strings need to be treated carefully
|
||||
if tmpl_variable.probably_text and unpacked_data.endswith(b"\x00"):
|
||||
try:
|
||||
return unpacked_data.decode("utf8").rstrip("\x00")
|
||||
except UnicodeDecodeError:
|
||||
return JankStringyBytes(unpacked_data)
|
||||
if tmpl_variable.probably_text:
|
||||
# If it has a null terminator, let's try to decode it first.
|
||||
# We don't want to do this if there isn't one, because that may change
|
||||
# the meaning of the data.
|
||||
if unpacked_data.endswith(b"\x00"):
|
||||
try:
|
||||
return unpacked_data.decode("utf8").rstrip("\x00")
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
# Failed, return jank stringy bytes
|
||||
return JankStringyBytes(unpacked_data)
|
||||
elif tmpl_variable.type in {MsgType.MVT_FIXED, MsgType.MVT_VARIABLE}:
|
||||
# No idea if this should be bytes or a string... make an object that's sort of both.
|
||||
return JankStringyBytes(unpacked_data)
|
||||
|
||||
@@ -26,7 +26,7 @@ from .data_packer import TemplateDataPacker
|
||||
from .message import Message, MsgBlockList
|
||||
from .msgtypes import MsgType, MsgBlockType
|
||||
from .template import MessageTemplateVariable, MessageTemplateBlock
|
||||
from .template_dict import TemplateDictionary
|
||||
from .template_dict import TemplateDictionary, DEFAULT_TEMPLATE_DICT
|
||||
from hippolyzer.lib.base import exc
|
||||
from hippolyzer.lib.base import serialization as se
|
||||
from hippolyzer.lib.base.datatypes import RawBytes
|
||||
@@ -35,7 +35,7 @@ logger = getLogger('message.udpserializer')
|
||||
|
||||
|
||||
class UDPMessageSerializer:
|
||||
DEFAULT_TEMPLATE = TemplateDictionary(None)
|
||||
DEFAULT_TEMPLATE = DEFAULT_TEMPLATE_DICT
|
||||
|
||||
def __init__(self, message_template=None):
|
||||
if message_template is not None:
|
||||
@@ -45,7 +45,7 @@ class UDPMessageSerializer:
|
||||
|
||||
def serialize(self, msg: Message):
|
||||
current_template = self.template_dict.get_template_by_name(msg.name)
|
||||
if current_template is None:
|
||||
if current_template is None and msg.raw_body is None:
|
||||
raise exc.MessageSerializationError("message name", "invalid message name")
|
||||
|
||||
# Header and trailers are all big-endian
|
||||
@@ -69,13 +69,13 @@ class UDPMessageSerializer:
|
||||
# frequency and message number. The template stores it because it doesn't
|
||||
# change per template.
|
||||
body_writer = se.BufferWriter("<")
|
||||
body_writer.write_bytes(current_template.msg_freq_num_bytes)
|
||||
body_writer.write_bytes(current_template.freq_num_bytes)
|
||||
body_writer.write_bytes(msg.extra)
|
||||
|
||||
# We're going to pop off keys as we go, so shallow copy the dict.
|
||||
blocks = copy.copy(msg.blocks)
|
||||
|
||||
missing_block = None
|
||||
missing_blocks: List[MessageTemplateBlock] = []
|
||||
# Iterate based on the order of the blocks in the message template
|
||||
for tmpl_block in current_template.blocks:
|
||||
block_list = blocks.pop(tmpl_block.name, None)
|
||||
@@ -83,13 +83,21 @@ class UDPMessageSerializer:
|
||||
# omitted by SL. Not an error unless another block containing data follows it.
|
||||
# Keep track.
|
||||
if block_list is None:
|
||||
missing_block = tmpl_block.name
|
||||
missing_blocks.append(tmpl_block)
|
||||
logger.debug("No block %s, bailing out" % tmpl_block.name)
|
||||
continue
|
||||
# Had a missing block before, but we found one later in the template?
|
||||
elif missing_block:
|
||||
raise ValueError(f"Unexpected {tmpl_block.name} block after missing {missing_block}")
|
||||
self._serialize_block(body_writer, tmpl_block, block_list)
|
||||
# Had a missing block before, but we specified one defined later in the template?
|
||||
elif missing_blocks:
|
||||
if not all(x.block_type == MsgBlockType.MBT_VARIABLE for x in missing_blocks):
|
||||
raise ValueError(f"Unexpected {tmpl_block.name} block after missing {missing_blocks!r}")
|
||||
# This is okay, we just need to put empty blocks for all the variable blocks that came before.
|
||||
# Normally we wouldn't even put these to match SL behavior, but in this case we need the
|
||||
# empty blocks so the decoder will decode these as the correct block type.
|
||||
for missing_block in missing_blocks:
|
||||
self._serialize_block_list(body_writer, missing_block, MsgBlockList())
|
||||
missing_blocks.clear()
|
||||
|
||||
self._serialize_block_list(body_writer, tmpl_block, block_list)
|
||||
if blocks:
|
||||
raise KeyError(f"Unexpected {tuple(blocks.keys())!r} blocks in {msg.name}")
|
||||
|
||||
@@ -105,8 +113,8 @@ class UDPMessageSerializer:
|
||||
writer.write(se.U8, len(msg.acks))
|
||||
return writer.copy_buffer()
|
||||
|
||||
def _serialize_block(self, writer: se.BufferWriter, tmpl_block: MessageTemplateBlock,
|
||||
block_list: MsgBlockList):
|
||||
def _serialize_block_list(self, writer: se.BufferWriter, tmpl_block: MessageTemplateBlock,
|
||||
block_list: MsgBlockList):
|
||||
block_count = len(block_list)
|
||||
# Multiple block type means there is a static number of blocks
|
||||
if tmpl_block.block_type == MsgBlockType.MBT_MULTIPLE:
|
||||
|
||||
@@ -19,6 +19,3 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
|
||||
|
||||
|
||||
174
hippolyzer/lib/base/network/caps_client.py
Normal file
174
hippolyzer/lib/base/network/caps_client.py
Normal file
@@ -0,0 +1,174 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import copy
|
||||
import dataclasses
|
||||
from types import TracebackType
|
||||
from typing import *
|
||||
|
||||
import aiohttp
|
||||
import multidict
|
||||
|
||||
from hippolyzer.lib.base import llsd as llsd_lib
|
||||
|
||||
|
||||
class CapsClientResponse(aiohttp.ClientResponse):
|
||||
"""
|
||||
Not actually instantiated, used for lying to the type system
|
||||
since we'll dynamically put this onto a ClientResponse instance
|
||||
Will fail isinstance().
|
||||
"""
|
||||
async def read_llsd(self) -> Any:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class _HippoSessionRequestContextManager:
|
||||
"""
|
||||
_SessionRequestContextManager but with a symmetrical API
|
||||
|
||||
aiohttp.request() and aiohttp.ClientSession.request() have different APIs.
|
||||
One is sync returning a context manager, one is async returning a coro.
|
||||
aiohttp.request() also doesn't accept the arguments that we need for custom
|
||||
SSL contexts. To deal with requests that have existing sessions and those without,
|
||||
just give them both the same wrapper and don't close the session on context manager
|
||||
exit if it wasn't our session.
|
||||
"""
|
||||
__slots__ = ("_coro", "_resp", "_session", "_session_owned")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coro: Coroutine[asyncio.Future[Any], None, aiohttp.ClientResponse],
|
||||
session: aiohttp.ClientSession,
|
||||
session_owned: bool = True,
|
||||
) -> None:
|
||||
self._coro = coro
|
||||
self._resp: Optional[aiohttp.ClientResponse] = None
|
||||
self._session = session
|
||||
self._session_owned = session_owned
|
||||
|
||||
async def __aenter__(self) -> CapsClientResponse:
|
||||
try:
|
||||
self._resp = await self._coro
|
||||
|
||||
# We don't control creation of the ClientResponse, so tack on
|
||||
# a convenience method for reading LLSD.
|
||||
async def _read_llsd():
|
||||
return llsd_lib.parse_xml(await self._resp.read())
|
||||
self._resp.read_llsd = _read_llsd
|
||||
except BaseException:
|
||||
if self._session_owned:
|
||||
await self._session.close()
|
||||
raise
|
||||
else:
|
||||
# intentionally fooling the type system
|
||||
return self._resp # type: ignore
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc: Optional[BaseException],
|
||||
tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
assert self._resp is not None
|
||||
self._resp.close()
|
||||
if self._session_owned:
|
||||
await self._session.close()
|
||||
|
||||
|
||||
CAPS_DICT = Union[
|
||||
Mapping[str, str],
|
||||
multidict.MultiDict[str],
|
||||
]
|
||||
|
||||
|
||||
class CapsClient:
|
||||
def __init__(self, caps: Optional[CAPS_DICT] = None, session: Optional[aiohttp.ClientSession] = None) -> None:
|
||||
self._caps = caps
|
||||
self._session = session
|
||||
|
||||
def _request_fixups(self, cap_or_url: str, headers: Dict, proxy: Optional[bool], ssl: Any):
|
||||
return cap_or_url, headers, proxy, ssl
|
||||
|
||||
def _get_caps(self) -> Optional[CAPS_DICT]:
|
||||
return self._caps
|
||||
|
||||
def request(self, method: str, cap_or_url: str, *, path: str = "", data: Any = None,
|
||||
headers: Optional[Dict] = None, session: Optional[aiohttp.ClientSession] = None,
|
||||
llsd: Any = dataclasses.MISSING, params: Optional[Dict[str, Any]] = None,
|
||||
proxy: Optional[str] = None, skip_auto_headers: Optional[Sequence[str]] = None,
|
||||
**kwargs) -> _HippoSessionRequestContextManager:
|
||||
if cap_or_url.startswith("http"):
|
||||
if path:
|
||||
raise ValueError("Specifying both path and a full URL not supported")
|
||||
else:
|
||||
caps = self._get_caps()
|
||||
if caps is None:
|
||||
raise RuntimeError(f"Need a caps dict to request a Cap like {cap_or_url}")
|
||||
if cap_or_url not in caps:
|
||||
raise KeyError(f"{cap_or_url} is not a full URL and not a Cap")
|
||||
cap_or_url = caps[cap_or_url]
|
||||
if path:
|
||||
cap_or_url += path
|
||||
|
||||
if params is not None:
|
||||
for pname, pval in params.items():
|
||||
if not isinstance(pval, str):
|
||||
params[pname] = str(pval)
|
||||
|
||||
session_owned = False
|
||||
# Use an existing session if we have one to take advantage of connection pooling
|
||||
# otherwise create one
|
||||
session = session or self._session
|
||||
if session is None:
|
||||
session_owned = True
|
||||
session = aiohttp.ClientSession(
|
||||
connector=aiohttp.TCPConnector(force_close=True),
|
||||
connector_owner=True
|
||||
)
|
||||
|
||||
if headers is None:
|
||||
headers = {}
|
||||
else:
|
||||
headers = copy.copy(headers)
|
||||
|
||||
# Use sentinel val so explicit `None` can be passed
|
||||
if llsd is not dataclasses.MISSING:
|
||||
data = llsd_lib.format_xml(llsd)
|
||||
# Sometimes needed even on GETs.
|
||||
if "Content-Type" not in headers:
|
||||
headers["Content-Type"] = "application/llsd+xml"
|
||||
# Always present, usually ignored by the server.
|
||||
if "Accept" not in headers:
|
||||
headers["Accept"] = "application/llsd+xml"
|
||||
# Ask to keep the connection open if we're sharing a session
|
||||
if not session_owned:
|
||||
headers["Connection"] = "keep-alive"
|
||||
headers["Keep-alive"] = "300"
|
||||
|
||||
ssl = kwargs.pop('ssl', None)
|
||||
cap_or_url, headers, proxy, ssl = self._request_fixups(cap_or_url, headers, proxy, ssl)
|
||||
|
||||
resp = session._request(method, cap_or_url, data=data, headers=headers, # noqa: need internal call
|
||||
params=params, ssl=ssl, proxy=proxy,
|
||||
skip_auto_headers=skip_auto_headers or ("User-Agent",), **kwargs)
|
||||
return _HippoSessionRequestContextManager(resp, session, session_owned=session_owned)
|
||||
|
||||
def get(self, cap_or_url: str, *, path: str = "", headers: Optional[dict] = None,
|
||||
session: Optional[aiohttp.ClientSession] = None, params: Optional[Dict[str, Any]] = None,
|
||||
proxy: Optional[str] = None, **kwargs) -> _HippoSessionRequestContextManager:
|
||||
return self.request("GET", cap_or_url=cap_or_url, path=path, headers=headers,
|
||||
session=session, params=params, proxy=proxy, **kwargs)
|
||||
|
||||
def post(self, cap_or_url: str, *, path: str = "", data: Any = None,
|
||||
headers: Optional[dict] = None, session: Optional[aiohttp.ClientSession] = None,
|
||||
llsd: Any = dataclasses.MISSING, params: Optional[Dict[str, Any]] = None,
|
||||
proxy: Optional[str] = None, **kwargs) -> _HippoSessionRequestContextManager:
|
||||
return self.request("POST", cap_or_url=cap_or_url, path=path, headers=headers, data=data,
|
||||
llsd=llsd, session=session, params=params, proxy=proxy, **kwargs)
|
||||
|
||||
def put(self, cap_or_url: str, *, path: str = "", data: Any = None,
|
||||
headers: Optional[dict] = None, session: Optional[aiohttp.ClientSession] = None,
|
||||
llsd: Any = dataclasses.MISSING, params: Optional[Dict[str, Any]] = None,
|
||||
proxy: Optional[str] = None, **kwargs) -> _HippoSessionRequestContextManager:
|
||||
return self.request("PUT", cap_or_url=cap_or_url, path=path, headers=headers, data=data,
|
||||
llsd=llsd, session=session, params=params, proxy=proxy, **kwargs)
|
||||
76
hippolyzer/lib/base/network/transport.py
Normal file
76
hippolyzer/lib/base/network/transport.py
Normal file
@@ -0,0 +1,76 @@
|
||||
import abc
|
||||
import asyncio
|
||||
import enum
|
||||
import socket
|
||||
from typing import *
|
||||
|
||||
|
||||
ADDR_TUPLE = Tuple[str, int]
|
||||
|
||||
|
||||
class Direction(enum.Enum):
|
||||
OUT = enum.auto()
|
||||
IN = enum.auto()
|
||||
|
||||
def __invert__(self):
|
||||
if self == self.OUT:
|
||||
return self.IN
|
||||
return self.OUT
|
||||
|
||||
|
||||
class UDPPacket:
|
||||
def __init__(
|
||||
self,
|
||||
src_addr: Optional[ADDR_TUPLE],
|
||||
dst_addr: ADDR_TUPLE,
|
||||
data: bytes,
|
||||
direction: Direction
|
||||
):
|
||||
self.src_addr = src_addr
|
||||
self.dst_addr = dst_addr
|
||||
self.data = data
|
||||
self.direction = direction
|
||||
self.meta = {}
|
||||
|
||||
@property
|
||||
def outgoing(self):
|
||||
return self.direction == Direction.OUT
|
||||
|
||||
@property
|
||||
def incoming(self):
|
||||
return self.direction == Direction.IN
|
||||
|
||||
@property
|
||||
def far_addr(self):
|
||||
if self.outgoing:
|
||||
return self.dst_addr
|
||||
return self.src_addr
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{self.__class__.__name__} src_addr={self.src_addr!r} dst_addr={self.dst_addr!r} data={self.data!r}>"
|
||||
|
||||
|
||||
class AbstractUDPTransport(abc.ABC):
|
||||
__slots__ = ()
|
||||
|
||||
@abc.abstractmethod
|
||||
def send_packet(self, packet: UDPPacket) -> None:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def close(self) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class SocketUDPTransport(AbstractUDPTransport):
|
||||
def __init__(self, transport: Union[asyncio.DatagramTransport, socket.socket]):
|
||||
super().__init__()
|
||||
self.transport = transport
|
||||
|
||||
def send_packet(self, packet: UDPPacket) -> None:
|
||||
if not packet.outgoing:
|
||||
raise ValueError(f"{self.__class__.__name__} can only send outbound packets")
|
||||
self.transport.sendto(packet.data, packet.dst_addr)
|
||||
|
||||
def close(self) -> None:
|
||||
self.transport.close()
|
||||
@@ -18,208 +18,127 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import logging
|
||||
import struct
|
||||
from typing import *
|
||||
|
||||
import lazy_object_proxy
|
||||
import recordclass
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3, Quaternion, Vector4
|
||||
from hippolyzer.lib.base.datatypes import Vector3, Quaternion, Vector4, UUID, TaggedUnion
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.namevalue import NameValueCollection
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
import hippolyzer.lib.base.templates as tmpls
|
||||
|
||||
|
||||
class Object:
|
||||
""" represents an Object
|
||||
class Object(recordclass.RecordClass, use_weakref=True): # type: ignore
|
||||
LocalID: Optional[int] = None
|
||||
State: Optional[int] = None
|
||||
FullID: Optional[UUID] = None
|
||||
CRC: Optional[int] = None
|
||||
PCode: Optional[tmpls.PCode] = None
|
||||
Material: Optional[tmpls.MCode] = None
|
||||
ClickAction: Optional[tmpls.ClickAction] = None
|
||||
Scale: Optional[Vector3] = None
|
||||
ParentID: Optional[int] = None
|
||||
# Actually contains a weakref proxy
|
||||
Parent: Optional[Object] = None
|
||||
UpdateFlags: Optional[int] = None
|
||||
PathCurve: Optional[int] = None
|
||||
ProfileCurve: Optional[int] = None
|
||||
PathBegin: Optional[int] = None
|
||||
PathEnd: Optional[int] = None
|
||||
PathScaleX: Optional[int] = None
|
||||
PathScaleY: Optional[int] = None
|
||||
PathShearX: Optional[int] = None
|
||||
PathShearY: Optional[int] = None
|
||||
PathTwist: Optional[int] = None
|
||||
PathTwistBegin: Optional[int] = None
|
||||
PathRadiusOffset: Optional[int] = None
|
||||
PathTaperX: Optional[int] = None
|
||||
PathTaperY: Optional[int] = None
|
||||
PathRevolutions: Optional[int] = None
|
||||
PathSkew: Optional[int] = None
|
||||
ProfileBegin: Optional[int] = None
|
||||
ProfileEnd: Optional[int] = None
|
||||
ProfileHollow: Optional[int] = None
|
||||
TextureEntry: Optional[tmpls.TextureEntryCollection] = None
|
||||
TextureAnim: Optional[tmpls.TextureAnim] = None
|
||||
NameValue: Optional[Any] = None
|
||||
Data: Optional[Any] = None
|
||||
Text: Optional[str] = None
|
||||
TextColor: Optional[bytes] = None
|
||||
MediaURL: Optional[str] = None
|
||||
PSBlock: Optional[Dict] = None
|
||||
ExtraParams: Optional[Dict[tmpls.ExtraParamType, Any]] = None
|
||||
Sound: Optional[UUID] = None
|
||||
OwnerID: Optional[UUID] = None
|
||||
SoundGain: Optional[float] = None
|
||||
SoundFlags: Optional[int] = None
|
||||
SoundRadius: Optional[float] = None
|
||||
JointType: Optional[int] = None
|
||||
JointPivot: Optional[int] = None
|
||||
JointAxisOrAnchor: Optional[int] = None
|
||||
TreeSpecies: Optional[int] = None
|
||||
ScratchPad: Optional[bytes] = None
|
||||
ObjectCosts: Optional[Dict] = None
|
||||
ChildIDs: Optional[List[int]] = None
|
||||
# Same as parent, contains weakref proxies.
|
||||
Children: Optional[List[Object]] = None
|
||||
|
||||
Initialize the Object class instance
|
||||
>>> obj = Object()
|
||||
"""
|
||||
FootCollisionPlane: Optional[Vector4] = None
|
||||
Position: Optional[Vector3] = None
|
||||
Velocity: Optional[Vector3] = None
|
||||
Acceleration: Optional[Vector3] = None
|
||||
Rotation: Optional[Quaternion] = None
|
||||
AngularVelocity: Optional[Vector3] = None
|
||||
|
||||
__slots__ = (
|
||||
"LocalID",
|
||||
"State",
|
||||
"FullID",
|
||||
"CRC",
|
||||
"PCode",
|
||||
"Material",
|
||||
"ClickAction",
|
||||
"Scale",
|
||||
"ParentID",
|
||||
"UpdateFlags",
|
||||
"PathCurve",
|
||||
"ProfileCurve",
|
||||
"PathBegin",
|
||||
"PathEnd",
|
||||
"PathScaleX",
|
||||
"PathScaleY",
|
||||
"PathShearX",
|
||||
"PathShearY",
|
||||
"PathTwist",
|
||||
"PathTwistBegin",
|
||||
"PathRadiusOffset",
|
||||
"PathTaperX",
|
||||
"PathTaperY",
|
||||
"PathRevolutions",
|
||||
"PathSkew",
|
||||
"ProfileBegin",
|
||||
"ProfileEnd",
|
||||
"ProfileHollow",
|
||||
"TextureEntry",
|
||||
"TextureAnim",
|
||||
"NameValue",
|
||||
"Data",
|
||||
"Text",
|
||||
"TextColor",
|
||||
"MediaURL",
|
||||
"PSBlock",
|
||||
"ExtraParams",
|
||||
"Sound",
|
||||
"OwnerID",
|
||||
"SoundGain",
|
||||
"SoundFlags",
|
||||
"SoundRadius",
|
||||
"JointType",
|
||||
"JointPivot",
|
||||
"JointAxisOrAnchor",
|
||||
"TreeSpecies",
|
||||
"ObjectCosts",
|
||||
"FootCollisionPlane",
|
||||
"Position",
|
||||
"Velocity",
|
||||
"Acceleration",
|
||||
"Rotation",
|
||||
"AngularVelocity",
|
||||
"CreatorID",
|
||||
"GroupID",
|
||||
"CreationDate",
|
||||
"BaseMask",
|
||||
"OwnerMask",
|
||||
"GroupMask",
|
||||
"EveryoneMask",
|
||||
"NextOwnerMask",
|
||||
"OwnershipCost",
|
||||
"SaleType",
|
||||
"SalePrice",
|
||||
"AggregatePerms",
|
||||
"AggregatePermTextures",
|
||||
"AggregatePermTexturesOwner",
|
||||
"Category",
|
||||
"InventorySerial",
|
||||
"ItemID",
|
||||
"FolderID",
|
||||
"FromTaskID",
|
||||
"LastOwnerID",
|
||||
"Name",
|
||||
"Description",
|
||||
"TouchName",
|
||||
"SitName",
|
||||
"TextureID",
|
||||
"ChildIDs",
|
||||
"Children",
|
||||
"Parent",
|
||||
"ScratchPad",
|
||||
"__weakref__",
|
||||
)
|
||||
# from ObjectProperties
|
||||
CreatorID: Optional[UUID] = None
|
||||
GroupID: Optional[UUID] = None
|
||||
CreationDate: Optional[int] = None
|
||||
BaseMask: Optional[int] = None
|
||||
OwnerMask: Optional[int] = None
|
||||
GroupMask: Optional[int] = None
|
||||
EveryoneMask: Optional[int] = None
|
||||
NextOwnerMask: Optional[int] = None
|
||||
OwnershipCost: Optional[int] = None
|
||||
# TaxRate
|
||||
SaleType: Optional[int] = None
|
||||
SalePrice: Optional[int] = None
|
||||
AggregatePerms: Optional[int] = None
|
||||
AggregatePermTextures: Optional[int] = None
|
||||
AggregatePermTexturesOwner: Optional[int] = None
|
||||
Category: Optional[int] = None
|
||||
InventorySerial: Optional[int] = None
|
||||
ItemID: Optional[UUID] = None
|
||||
FolderID: Optional[UUID] = None
|
||||
FromTaskID: Optional[UUID] = None
|
||||
LastOwnerID: Optional[UUID] = None
|
||||
Name: Optional[str] = None
|
||||
Description: Optional[str] = None
|
||||
TouchName: Optional[str] = None
|
||||
SitName: Optional[str] = None
|
||||
TextureID: Optional[List[UUID]] = None
|
||||
RegionHandle: Optional[int] = None
|
||||
Animations: Optional[List[UUID]] = None
|
||||
|
||||
def __init__(self, *, ID=None, LocalID=None, State=None, FullID=None, CRC=None, PCode=None, Material=None,
|
||||
ClickAction=None, Scale=None, ParentID=None, UpdateFlags=None, PathCurve=None, ProfileCurve=None,
|
||||
PathBegin=None, PathEnd=None, PathScaleX=None, PathScaleY=None, PathShearX=None, PathShearY=None,
|
||||
PathTwist=None, PathTwistBegin=None, PathRadiusOffset=None, PathTaperX=None, PathTaperY=None,
|
||||
PathRevolutions=None, PathSkew=None, ProfileBegin=None, ProfileEnd=None, ProfileHollow=None,
|
||||
TextureEntry=None, TextureAnim=None, NameValue=None, Data=None, Text=None, TextColor=None,
|
||||
MediaURL=None, PSBlock=None, ExtraParams=None, Sound=None, OwnerID=None, SoundGain=None,
|
||||
SoundFlags=None, SoundRadius=None, JointType=None, JointPivot=None, JointAxisOrAnchor=None,
|
||||
FootCollisionPlane=None, Position=None, Velocity=None, Acceleration=None, Rotation=None,
|
||||
AngularVelocity=None, TreeSpecies=None, ObjectCosts=None, ScratchPad=None):
|
||||
def __init__(self, **_kwargs):
|
||||
""" set up the object attributes """
|
||||
|
||||
self.LocalID = LocalID or ID # U32
|
||||
self.State = State # U8
|
||||
self.FullID = FullID # LLUUID
|
||||
self.CRC = CRC # U32 // TEMPORARY HACK FOR JAMES
|
||||
self.PCode = PCode # U8
|
||||
self.Material = Material # U8
|
||||
self.ClickAction = ClickAction # U8
|
||||
self.Scale = Scale # LLVector3
|
||||
self.ParentID = ParentID # U32
|
||||
# Actually contains a weakref proxy
|
||||
self.Parent: Optional[Object] = None
|
||||
self.UpdateFlags = UpdateFlags # U32 // U32, see object_flags.h
|
||||
self.PathCurve = PathCurve # U8
|
||||
self.ProfileCurve = ProfileCurve # U8
|
||||
self.PathBegin = PathBegin # U16 // 0 to 1, quanta = 0.01
|
||||
self.PathEnd = PathEnd # U16 // 0 to 1, quanta = 0.01
|
||||
self.PathScaleX = PathScaleX # U8 // 0 to 1, quanta = 0.01
|
||||
self.PathScaleY = PathScaleY # U8 // 0 to 1, quanta = 0.01
|
||||
self.PathShearX = PathShearX # U8 // -.5 to .5, quanta = 0.01
|
||||
self.PathShearY = PathShearY # U8 // -.5 to .5, quanta = 0.01
|
||||
self.PathTwist = PathTwist # S8 // -1 to 1, quanta = 0.01
|
||||
self.PathTwistBegin = PathTwistBegin # S8 // -1 to 1, quanta = 0.01
|
||||
self.PathRadiusOffset = PathRadiusOffset # S8 // -1 to 1, quanta = 0.01
|
||||
self.PathTaperX = PathTaperX # S8 // -1 to 1, quanta = 0.01
|
||||
self.PathTaperY = PathTaperY # S8 // -1 to 1, quanta = 0.01
|
||||
self.PathRevolutions = PathRevolutions # U8 // 0 to 3, quanta = 0.015
|
||||
self.PathSkew = PathSkew # S8 // -1 to 1, quanta = 0.01
|
||||
self.ProfileBegin = ProfileBegin # U16 // 0 to 1, quanta = 0.01
|
||||
self.ProfileEnd = ProfileEnd # U16 // 0 to 1, quanta = 0.01
|
||||
self.ProfileHollow = ProfileHollow # U16 // 0 to 1, quanta = 0.01
|
||||
self.TextureEntry = TextureEntry # Variable 2
|
||||
self.TextureAnim = TextureAnim # Variable 1
|
||||
self.NameValue = NameValue # Variable 2
|
||||
self.Data = Data # Variable 2
|
||||
self.Text = Text # Variable 1 // llSetText() hovering text
|
||||
self.TextColor = TextColor # Fixed 4 // actually, a LLColor4U
|
||||
self.MediaURL = MediaURL # Variable 1 // URL for web page, movie, etc.
|
||||
self.PSBlock = PSBlock # Variable 1
|
||||
self.ExtraParams = ExtraParams or {} # Variable 1
|
||||
self.Sound = Sound # LLUUID
|
||||
self.OwnerID = OwnerID # LLUUID // HACK object's owner id, only set if non-null sound, for muting
|
||||
self.SoundGain = SoundGain # F32
|
||||
self.SoundFlags = SoundFlags # U8
|
||||
self.SoundRadius = SoundRadius # F32 // cutoff radius
|
||||
self.JointType = JointType # U8
|
||||
self.JointPivot = JointPivot # LLVector3
|
||||
self.JointAxisOrAnchor = JointAxisOrAnchor # LLVector3
|
||||
self.TreeSpecies = TreeSpecies
|
||||
self.ScratchPad = ScratchPad
|
||||
self.ObjectCosts = ObjectCosts or {}
|
||||
self.ExtraParams = self.ExtraParams or {} # Variable 1
|
||||
self.ObjectCosts = self.ObjectCosts or {}
|
||||
self.ChildIDs = []
|
||||
self.Animations = self.Animations or []
|
||||
# Same as parent, contains weakref proxies.
|
||||
self.Children: List[Object] = []
|
||||
|
||||
# from ObjectUpdateCompressed
|
||||
self.FootCollisionPlane: Optional[Vector4] = FootCollisionPlane
|
||||
self.Position: Optional[Vector3] = Position
|
||||
self.Velocity: Optional[Vector3] = Velocity
|
||||
self.Acceleration: Optional[Vector3] = Acceleration
|
||||
self.Rotation: Optional[Quaternion] = Rotation
|
||||
self.AngularVelocity: Optional[Vector3] = AngularVelocity
|
||||
|
||||
# from ObjectProperties
|
||||
self.CreatorID = None
|
||||
self.GroupID = None
|
||||
self.CreationDate = None
|
||||
self.BaseMask = None
|
||||
self.OwnerMask = None
|
||||
self.GroupMask = None
|
||||
self.EveryoneMask = None
|
||||
self.NextOwnerMask = None
|
||||
self.OwnershipCost = None
|
||||
# TaxRate
|
||||
self.SaleType = None
|
||||
self.SalePrice = None
|
||||
self.AggregatePerms = None
|
||||
self.AggregatePermTextures = None
|
||||
self.AggregatePermTexturesOwner = None
|
||||
self.Category = None
|
||||
self.InventorySerial = None
|
||||
self.ItemID = None
|
||||
self.FolderID = None
|
||||
self.FromTaskID = None
|
||||
self.LastOwnerID = None
|
||||
self.Name = None
|
||||
self.Description = None
|
||||
self.TouchName = None
|
||||
self.SitName = None
|
||||
self.TextureID = None
|
||||
@property
|
||||
def GlobalPosition(self) -> Vector3:
|
||||
return handle_to_global_pos(self.RegionHandle) + self.RegionPosition
|
||||
|
||||
@property
|
||||
def RegionPosition(self) -> Vector3:
|
||||
@@ -243,23 +162,320 @@ class Object:
|
||||
# TODO: Cache this and dirty cache if ancestor updates rot?
|
||||
return self.Rotation * self.Parent.RegionRotation
|
||||
|
||||
@property
|
||||
def AncestorsKnown(self) -> bool:
|
||||
obj = self
|
||||
while obj.ParentID:
|
||||
if not obj.Parent:
|
||||
return False
|
||||
obj = obj.Parent
|
||||
return True
|
||||
|
||||
def update_properties(self, properties: Dict[str, Any]) -> Set[str]:
|
||||
""" takes a dictionary of attribute:value and makes it so """
|
||||
updated_properties = set()
|
||||
for key, val in properties.items():
|
||||
if hasattr(self, key):
|
||||
old_val = getattr(self, key, val)
|
||||
old_val = getattr(self, key, dataclasses.MISSING)
|
||||
# Don't check equality if we're using a lazy proxy,
|
||||
# parsing is deferred until we actually use it.
|
||||
is_proxy = isinstance(val, lazy_object_proxy.Proxy)
|
||||
if is_proxy or old_val != val:
|
||||
if any(isinstance(x, lazy_object_proxy.Proxy) for x in (old_val, val)):
|
||||
# TODO: be smarter about this. Can we store the raw bytes and
|
||||
# compare those if it's an unparsed object?
|
||||
is_updated = old_val is not val
|
||||
else:
|
||||
is_updated = old_val != val
|
||||
if is_updated:
|
||||
updated_properties.add(key)
|
||||
setattr(self, key, val)
|
||||
return updated_properties
|
||||
|
||||
def to_dict(self):
|
||||
val = recordclass.asdict(self)
|
||||
del val["Children"]
|
||||
del val["Parent"]
|
||||
return val
|
||||
|
||||
@property
|
||||
def Ancestors(self) -> List[Object]:
|
||||
obj = self
|
||||
ancestors = []
|
||||
while obj.Parent:
|
||||
obj = obj.Parent
|
||||
ancestors.append(obj)
|
||||
return ancestors
|
||||
|
||||
@property
|
||||
def Descendents(self) -> List[Object]:
|
||||
new_children = [self]
|
||||
descendents = []
|
||||
while new_children:
|
||||
to_check = new_children[:]
|
||||
new_children.clear()
|
||||
for obj in to_check:
|
||||
for child in obj.Children:
|
||||
new_children.append(child)
|
||||
descendents.append(child)
|
||||
return descendents
|
||||
|
||||
|
||||
def handle_to_gridxy(handle: int) -> Tuple[int, int]:
|
||||
return (handle >> 32) // 256, (handle & 0xFFffFFff) // 256
|
||||
|
||||
|
||||
def gridxy_to_handle(x: int, y: int):
|
||||
return ((x * 256) << 32) | (y * 256)
|
||||
|
||||
|
||||
def handle_to_global_pos(handle: int) -> Vector3:
|
||||
return Vector3(handle >> 32, handle & 0xFFffFFff)
|
||||
|
||||
|
||||
def normalize_object_update(block: Block, handle: int):
|
||||
object_data = {
|
||||
"RegionHandle": handle,
|
||||
"FootCollisionPlane": None,
|
||||
"SoundFlags": block["Flags"],
|
||||
"SoundGain": block["Gain"],
|
||||
"SoundRadius": block["Radius"],
|
||||
**dict(block.items()),
|
||||
"TextureEntry": block.deserialize_var("TextureEntry", make_copy=False),
|
||||
"NameValue": block.deserialize_var("NameValue", make_copy=False),
|
||||
"TextureAnim": block.deserialize_var("TextureAnim", make_copy=False),
|
||||
"ExtraParams": block.deserialize_var("ExtraParams", make_copy=False) or {},
|
||||
"ClickAction": block.deserialize_var("ClickAction", make_copy=False),
|
||||
"PSBlock": block.deserialize_var("PSBlock", make_copy=False).value,
|
||||
"UpdateFlags": block.deserialize_var("UpdateFlags", make_copy=False),
|
||||
"State": block.deserialize_var("State", make_copy=False),
|
||||
**block.deserialize_var("ObjectData", make_copy=False).value,
|
||||
}
|
||||
object_data["LocalID"] = object_data.pop("ID")
|
||||
# Empty == not updated
|
||||
if not object_data["TextureEntry"]:
|
||||
object_data.pop("TextureEntry")
|
||||
# OwnerID is only set in this packet if a sound is playing. Don't allow
|
||||
# ObjectUpdates to clobber _real_ OwnerIDs we had from ObjectProperties
|
||||
# with a null UUID.
|
||||
if object_data["OwnerID"] == UUID.ZERO:
|
||||
del object_data["OwnerID"]
|
||||
del object_data["Flags"]
|
||||
del object_data["Gain"]
|
||||
del object_data["Radius"]
|
||||
del object_data["ObjectData"]
|
||||
return object_data
|
||||
|
||||
|
||||
def normalize_terse_object_update(block: Block, handle: int):
|
||||
object_data = {
|
||||
**block.deserialize_var("Data", make_copy=False),
|
||||
**dict(block.items()),
|
||||
"TextureEntry": block.deserialize_var("TextureEntry", make_copy=False),
|
||||
"RegionHandle": handle,
|
||||
}
|
||||
object_data["LocalID"] = object_data.pop("ID")
|
||||
object_data.pop("Data")
|
||||
# Empty == not updated
|
||||
if object_data["TextureEntry"] is None:
|
||||
object_data.pop("TextureEntry")
|
||||
return object_data
|
||||
|
||||
|
||||
def normalize_object_update_compressed_data(data: bytes):
|
||||
# Shared by ObjectUpdateCompressed and VOCache case
|
||||
compressed = FastObjectUpdateCompressedDataDeserializer.read(data)
|
||||
# TODO: ObjectUpdateCompressed doesn't provide a default value for unused
|
||||
# fields, whereas ObjectUpdate and friends do (TextColor, etc.)
|
||||
# need some way to normalize ObjectUpdates so they won't appear to have
|
||||
# changed just because an ObjectUpdate got sent with a default value
|
||||
# Only used for determining which sections are present
|
||||
del compressed["Flags"]
|
||||
|
||||
# Unlike other ObjectUpdate types, a null value in an ObjectUpdateCompressed
|
||||
# always means that there is no value, not that the value hasn't changed
|
||||
# from the client's view. Use the default value when that happens.
|
||||
ps_block = compressed.pop("PSBlockNew", None)
|
||||
if ps_block is None:
|
||||
ps_block = compressed.pop("PSBlock", None)
|
||||
if ps_block is None:
|
||||
ps_block = TaggedUnion(0, None)
|
||||
compressed.pop("PSBlock", None)
|
||||
if compressed["NameValue"] is None:
|
||||
compressed["NameValue"] = NameValueCollection()
|
||||
if compressed["Text"] is None:
|
||||
compressed["Text"] = b""
|
||||
compressed["TextColor"] = b""
|
||||
if compressed["MediaURL"] is None:
|
||||
compressed["MediaURL"] = b""
|
||||
if compressed["AngularVelocity"] is None:
|
||||
compressed["AngularVelocity"] = Vector3()
|
||||
if compressed["SoundFlags"] is None:
|
||||
compressed["SoundFlags"] = 0
|
||||
compressed["SoundGain"] = 0.0
|
||||
compressed["SoundRadius"] = 0.0
|
||||
compressed["Sound"] = UUID.ZERO
|
||||
if compressed["TextureEntry"] is None:
|
||||
compressed["TextureEntry"] = tmpls.TextureEntryCollection()
|
||||
|
||||
object_data = {
|
||||
"PSBlock": ps_block.value,
|
||||
# Parent flag not set means explicitly un-parented
|
||||
"ParentID": compressed.pop("ParentID", None) or 0,
|
||||
"LocalID": compressed.pop("ID"),
|
||||
**compressed,
|
||||
}
|
||||
# Don't clobber OwnerID in case the object has a proper one from
|
||||
# a previous ObjectProperties. OwnerID isn't expected to be populated
|
||||
# on ObjectUpdates unless an attached sound is playing.
|
||||
if object_data["OwnerID"] == UUID.ZERO:
|
||||
del object_data["OwnerID"]
|
||||
return object_data
|
||||
|
||||
|
||||
def normalize_object_update_compressed(block: Block, handle: int):
|
||||
compressed = normalize_object_update_compressed_data(block["Data"])
|
||||
compressed["UpdateFlags"] = block.deserialize_var("UpdateFlags", make_copy=False)
|
||||
compressed["RegionHandle"] = handle
|
||||
return compressed
|
||||
|
||||
|
||||
class SimpleStructReader(se.BufferReader):
|
||||
def read_struct(self, spec: struct.Struct, peek=False) -> Tuple[Any, ...]:
|
||||
val = spec.unpack_from(self._buffer, self._pos)
|
||||
if not peek:
|
||||
self._pos += spec.size
|
||||
return val
|
||||
|
||||
def read_bytes_null_term(self) -> bytes:
|
||||
old_offset = self._pos
|
||||
while self._buffer[self._pos] != 0:
|
||||
self._pos += 1
|
||||
val = self._buffer[old_offset:self._pos]
|
||||
self._pos += 1
|
||||
return val
|
||||
|
||||
|
||||
class FastObjectUpdateCompressedDataDeserializer:
|
||||
HEADER_STRUCT = struct.Struct("<16sIBBIBB3f3f3fI16s")
|
||||
ANGULAR_VELOCITY_STRUCT = struct.Struct("<3f")
|
||||
PARENT_ID_STRUCT = struct.Struct("<I")
|
||||
TREE_SPECIES_STRUCT = struct.Struct("<B")
|
||||
DATAPACKER_LEN = struct.Struct("<I")
|
||||
COLOR_ADAPTER = tmpls.Color4()
|
||||
PARTICLES_OLD = se.TypedBytesFixed(86, tmpls.PSBLOCK_TEMPLATE)
|
||||
SOUND_STRUCT = struct.Struct("<16sfBf")
|
||||
PRIM_PARAMS_STRUCT = struct.Struct("<BBHHBBBBbbbbbBbHHH")
|
||||
ATTACHMENT_STATE_ADAPTER = tmpls.AttachmentStateAdapter(None)
|
||||
|
||||
@classmethod
|
||||
def read(cls, data: bytes) -> Dict:
|
||||
reader = SimpleStructReader("<", data)
|
||||
foo = reader.read_struct(cls.HEADER_STRUCT)
|
||||
full_id, local_id, pcode, state, crc, material, click_action, \
|
||||
scalex, scaley, scalez, posx, posy, posz, rotx, roty, rotz, \
|
||||
flags, owner_id = foo
|
||||
scale = Vector3(scalex, scaley, scalez)
|
||||
full_id = UUID(bytes=full_id)
|
||||
pcode = tmpls.PCode(pcode)
|
||||
if pcode == tmpls.PCode.AVATAR:
|
||||
state = tmpls.AgentState(state)
|
||||
elif pcode == tmpls.PCode.PRIMITIVE:
|
||||
state = cls.ATTACHMENT_STATE_ADAPTER.decode(state, None)
|
||||
pos = Vector3(posx, posy, posz)
|
||||
rot = Quaternion(rotx, roty, rotz)
|
||||
owner_id = UUID(bytes=owner_id)
|
||||
ang_vel = None
|
||||
if flags & tmpls.CompressedFlags.ANGULAR_VELOCITY.value:
|
||||
ang_vel = Vector3(*reader.read_struct(cls.ANGULAR_VELOCITY_STRUCT))
|
||||
parent_id = None
|
||||
if flags & tmpls.CompressedFlags.PARENT_ID.value:
|
||||
parent_id = reader.read_struct(cls.PARENT_ID_STRUCT)[0]
|
||||
tree_species = None
|
||||
if flags & tmpls.CompressedFlags.TREE.value:
|
||||
tree_species = reader.read_struct(cls.TREE_SPECIES_STRUCT)[0]
|
||||
scratchpad = None
|
||||
if flags & tmpls.CompressedFlags.SCRATCHPAD.value:
|
||||
scratchpad = reader.read_bytes(reader.read_struct(cls.DATAPACKER_LEN)[0])
|
||||
text = None
|
||||
text_color = None
|
||||
if flags & tmpls.CompressedFlags.TEXT.value:
|
||||
text = reader.read_bytes_null_term().decode("utf8")
|
||||
text_color = cls.COLOR_ADAPTER.decode(reader.read_bytes(4), ctx=None)
|
||||
media_url = None
|
||||
if flags & tmpls.CompressedFlags.MEDIA_URL.value:
|
||||
media_url = reader.read_bytes_null_term().decode("utf8")
|
||||
psblock = None
|
||||
if flags & tmpls.CompressedFlags.PARTICLES.value:
|
||||
psblock = reader.read(cls.PARTICLES_OLD)
|
||||
extra_params = reader.read(tmpls.EXTRA_PARAM_COLLECTION)
|
||||
sound, sound_gain, sound_flags, sound_radius = None, None, None, None
|
||||
if flags & tmpls.CompressedFlags.SOUND.value:
|
||||
sound, sound_gain, sound_flags, sound_radius = reader.read_struct(cls.SOUND_STRUCT)
|
||||
sound = UUID(bytes=sound)
|
||||
sound_flags = tmpls.SoundFlags(sound_flags)
|
||||
name_value = None
|
||||
if flags & tmpls.CompressedFlags.NAME_VALUES.value:
|
||||
name_value = reader.read(tmpls.NAMEVALUES_TERMINATED_TEMPLATE)
|
||||
path_curve, profile_curve, path_begin, path_end, path_scale_x, path_scale_y, \
|
||||
path_shear_x, path_shear_y, path_twist, path_twist_begin, path_radius_offset, \
|
||||
path_taper_x, path_taper_y, path_revolutions, path_skew, profile_begin, \
|
||||
profile_end, profile_hollow = reader.read_struct(cls.PRIM_PARAMS_STRUCT)
|
||||
texture_entry = reader.read(tmpls.DATA_PACKER_TE_TEMPLATE)
|
||||
texture_anim = None
|
||||
if flags & tmpls.CompressedFlags.TEXTURE_ANIM.value:
|
||||
texture_anim = reader.read(se.TypedByteArray(se.U32, tmpls.TA_TEMPLATE))
|
||||
psblock_new = None
|
||||
if flags & tmpls.CompressedFlags.PARTICLES_NEW.value:
|
||||
psblock_new = reader.read(tmpls.PSBLOCK_TEMPLATE)
|
||||
|
||||
if len(reader):
|
||||
logging.warning(f"{len(reader)} bytes left at end of buffer for compressed {data!r}")
|
||||
|
||||
return {
|
||||
x: getattr(self, x) for x in dir(self)
|
||||
if not isinstance(getattr(self.__class__, x, None), property) and
|
||||
not callable(getattr(self, x)) and not x.startswith("_")
|
||||
"FullID": full_id,
|
||||
"ID": local_id,
|
||||
"PCode": pcode,
|
||||
"State": state,
|
||||
"CRC": crc,
|
||||
"Material": tmpls.MCode(material),
|
||||
"ClickAction": tmpls.ClickAction(click_action),
|
||||
"Scale": scale,
|
||||
"Position": pos,
|
||||
"Rotation": rot,
|
||||
"Flags": flags,
|
||||
"OwnerID": owner_id,
|
||||
"AngularVelocity": ang_vel,
|
||||
"ParentID": parent_id,
|
||||
"TreeSpecies": tree_species,
|
||||
"ScratchPad": scratchpad,
|
||||
"Text": text,
|
||||
"TextColor": text_color,
|
||||
"MediaURL": media_url,
|
||||
"PSBlock": psblock,
|
||||
"ExtraParams": extra_params,
|
||||
"Sound": sound,
|
||||
"SoundGain": sound_gain,
|
||||
"SoundFlags": sound_flags,
|
||||
"SoundRadius": sound_radius,
|
||||
"NameValue": name_value,
|
||||
"PathCurve": path_curve,
|
||||
"ProfileCurve": profile_curve,
|
||||
"PathBegin": path_begin, # 0 to 1, quanta = 0.01
|
||||
"PathEnd": path_end, # 0 to 1, quanta = 0.01
|
||||
"PathScaleX": path_scale_x, # 0 to 1, quanta = 0.01
|
||||
"PathScaleY": path_scale_y, # 0 to 1, quanta = 0.01
|
||||
"PathShearX": path_shear_x, # -.5 to .5, quanta = 0.01
|
||||
"PathShearY": path_shear_y, # -.5 to .5, quanta = 0.01
|
||||
"PathTwist": path_twist, # -1 to 1, quanta = 0.01
|
||||
"PathTwistBegin": path_twist_begin, # -1 to 1, quanta = 0.01
|
||||
"PathRadiusOffset": path_radius_offset, # -1 to 1, quanta = 0.01
|
||||
"PathTaperX": path_taper_x, # -1 to 1, quanta = 0.01
|
||||
"PathTaperY": path_taper_y, # -1 to 1, quanta = 0.01
|
||||
"PathRevolutions": path_revolutions, # 0 to 3, quanta = 0.015
|
||||
"PathSkew": path_skew, # -1 to 1, quanta = 0.01
|
||||
"ProfileBegin": profile_begin, # 0 to 1, quanta = 0.01
|
||||
"ProfileEnd": profile_end, # 0 to 1, quanta = 0.01
|
||||
"ProfileHollow": profile_hollow, # 0 to 1, quanta = 0.01
|
||||
"TextureEntry": texture_entry,
|
||||
"TextureAnim": texture_anim,
|
||||
"PSBlockNew": psblock_new,
|
||||
}
|
||||
|
||||
@@ -5,12 +5,12 @@ import enum
|
||||
import math
|
||||
import struct
|
||||
import types
|
||||
import typing
|
||||
import weakref
|
||||
from io import SEEK_CUR, SEEK_SET, SEEK_END, RawIOBase, BufferedIOBase
|
||||
from typing import *
|
||||
|
||||
import lazy_object_proxy
|
||||
import numpy as np
|
||||
|
||||
import hippolyzer.lib.base.llsd as llsd
|
||||
import hippolyzer.lib.base.datatypes as dtypes
|
||||
@@ -28,6 +28,14 @@ class _Unserializable:
|
||||
return False
|
||||
|
||||
|
||||
class MissingType:
|
||||
"""Simple sentinel type like dataclasses._MISSING_TYPE"""
|
||||
pass
|
||||
|
||||
|
||||
MISSING = MissingType()
|
||||
|
||||
|
||||
UNSERIALIZABLE = _Unserializable()
|
||||
_T = TypeVar("_T")
|
||||
|
||||
@@ -289,7 +297,7 @@ class SerializableBase(abc.ABC):
|
||||
@classmethod
|
||||
def default_value(cls) -> Any:
|
||||
# None may be a valid default, so return MISSING as a sentinel val
|
||||
return dataclasses.MISSING
|
||||
return MISSING
|
||||
|
||||
|
||||
class Adapter(SerializableBase, abc.ABC):
|
||||
@@ -329,18 +337,18 @@ class ForwardSerializable(SerializableBase):
|
||||
def __init__(self, func: Callable[[], SERIALIZABLE_TYPE]):
|
||||
super().__init__()
|
||||
self._func = func
|
||||
self._wrapped = dataclasses.MISSING
|
||||
self._wrapped: Union[MissingType, SERIALIZABLE_TYPE] = MISSING
|
||||
|
||||
def _ensure_evaled(self):
|
||||
if self._wrapped is dataclasses.MISSING:
|
||||
if self._wrapped is MISSING:
|
||||
self._wrapped = self._func()
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self._wrapped, attr)
|
||||
|
||||
def default_value(self) -> Any:
|
||||
if self._wrapped is dataclasses.MISSING:
|
||||
return dataclasses.MISSING
|
||||
if self._wrapped is MISSING:
|
||||
return MISSING
|
||||
return self._wrapped.default_value()
|
||||
|
||||
def serialize(self, val, writer: BufferWriter, ctx: Optional[ParseContext]):
|
||||
@@ -358,10 +366,10 @@ class Template(SerializableBase):
|
||||
def __init__(self, template_spec: Dict[str, SERIALIZABLE_TYPE], skip_missing=False):
|
||||
self._template_spec = template_spec
|
||||
self._skip_missing = skip_missing
|
||||
self._size = dataclasses.MISSING
|
||||
self._size = MISSING
|
||||
|
||||
def calc_size(self):
|
||||
if self._size is not dataclasses.MISSING:
|
||||
if self._size is not MISSING:
|
||||
return self._size
|
||||
sum_bytes = 0
|
||||
for _, field_type in self._template_spec.items():
|
||||
@@ -831,7 +839,7 @@ class QuantizedFloat(QuantizedFloatBase):
|
||||
super().__init__(prim_spec, zero_median=False)
|
||||
self.lower = lower
|
||||
self.upper = upper
|
||||
# We know the range in `QuantizedFloat` when it's constructed, so we can infer
|
||||
# We know the range in `QuantizedFloat` when it's constructed, so we can infer
|
||||
# whether or not we should round towards zero in __init__
|
||||
max_error = (upper - lower) * self.step_mag
|
||||
midpoint = (upper + lower) / 2.0
|
||||
@@ -891,7 +899,23 @@ class TupleCoord(SerializableBase):
|
||||
return cls.COORD_CLS
|
||||
|
||||
|
||||
class QuantizedTupleCoord(TupleCoord):
|
||||
class EncodedTupleCoord(TupleCoord, abc.ABC):
|
||||
_elem_specs: Sequence[SERIALIZABLE_TYPE]
|
||||
|
||||
def serialize(self, vals, writer: BufferWriter, ctx):
|
||||
vals = self._vals_to_tuple(vals)
|
||||
for spec, val in zip(self._elem_specs, vals):
|
||||
writer.write(spec, val, ctx=ctx)
|
||||
|
||||
def deserialize(self, reader: Reader, ctx):
|
||||
vals = (reader.read(spec, ctx=ctx) for spec in self._elem_specs)
|
||||
val = self.COORD_CLS(*vals)
|
||||
if self.need_pod(reader):
|
||||
return tuple(val)
|
||||
return val
|
||||
|
||||
|
||||
class QuantizedTupleCoord(EncodedTupleCoord):
|
||||
def __init__(self, lower=None, upper=None, component_scales=None):
|
||||
super().__init__()
|
||||
if component_scales:
|
||||
@@ -907,17 +931,14 @@ class QuantizedTupleCoord(TupleCoord):
|
||||
)
|
||||
assert len(self._elem_specs) == self.NUM_ELEMS
|
||||
|
||||
def serialize(self, vals, writer: BufferWriter, ctx):
|
||||
vals = self._vals_to_tuple(vals)
|
||||
for spec, val in zip(self._elem_specs, vals):
|
||||
writer.write(spec, val, ctx=ctx)
|
||||
|
||||
def deserialize(self, reader: Reader, ctx):
|
||||
vals = (reader.read(spec, ctx=ctx) for spec in self._elem_specs)
|
||||
val = self.COORD_CLS(*vals)
|
||||
if self.need_pod(reader):
|
||||
return tuple(val)
|
||||
return val
|
||||
class FixedPointTupleCoord(EncodedTupleCoord):
|
||||
def __init__(self, int_bits: int, frac_bits: int, signed: bool):
|
||||
super().__init__()
|
||||
self._elem_specs = tuple(
|
||||
FixedPoint(self.ELEM_SPEC, int_bits, frac_bits, signed)
|
||||
for _ in range(self.NUM_ELEMS)
|
||||
)
|
||||
|
||||
|
||||
class Vector3(TupleCoord):
|
||||
@@ -993,6 +1014,12 @@ class Vector4U8(QuantizedTupleCoord):
|
||||
COORD_CLS = dtypes.Vector4
|
||||
|
||||
|
||||
class FixedPointVector3U16(FixedPointTupleCoord):
|
||||
ELEM_SPEC = U16
|
||||
NUM_ELEMS = 3
|
||||
COORD_CLS = dtypes.Vector3
|
||||
|
||||
|
||||
class OptionalPrefixed(SerializableBase):
|
||||
"""Field prefixed by a U8 indicating whether or not it's present"""
|
||||
OPTIONAL = True
|
||||
@@ -1092,15 +1119,6 @@ class IntEnum(Adapter):
|
||||
return lambda: self.enum_cls(0)
|
||||
|
||||
|
||||
def flags_to_pod(flag_cls: Type[enum.IntFlag], val: int) -> typing.Tuple[Union[str, int], ...]:
|
||||
# Shove any bits not represented in the IntFlag into an int
|
||||
left_over = val
|
||||
for flag in iter(flag_cls):
|
||||
left_over &= ~flag.value
|
||||
extra = (int(left_over),) if left_over else ()
|
||||
return tuple(flag.name for flag in iter(flag_cls) if val & flag.value) + extra
|
||||
|
||||
|
||||
class IntFlag(Adapter):
|
||||
def __init__(self, flag_cls: Type[enum.IntFlag],
|
||||
flag_spec: Optional[SerializablePrimitive] = None):
|
||||
@@ -1121,7 +1139,7 @@ class IntFlag(Adapter):
|
||||
|
||||
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
|
||||
if pod:
|
||||
return flags_to_pod(self.flag_cls, val)
|
||||
return dtypes.flags_to_pod(self.flag_cls, val)
|
||||
return self.flag_cls(val)
|
||||
|
||||
def default_value(self) -> Any:
|
||||
@@ -1187,9 +1205,9 @@ class ContextMixin(Generic[_T]):
|
||||
def _choose_option(self, ctx: Optional[ParseContext]) -> _T:
|
||||
idx = self._fun(ctx)
|
||||
if idx not in self._options:
|
||||
if dataclasses.MISSING not in self._options:
|
||||
if MISSING not in self._options:
|
||||
raise KeyError(f"{idx!r} not found in {self._options!r}")
|
||||
idx = dataclasses.MISSING
|
||||
idx = MISSING
|
||||
return self._options[idx]
|
||||
|
||||
|
||||
@@ -1330,6 +1348,12 @@ class TypedBytesBase(SerializableBase, abc.ABC):
|
||||
return self._spec.default_value()
|
||||
|
||||
|
||||
class TypedBytesGreedy(TypedBytesBase):
|
||||
def __init__(self, spec, empty_is_none=False, check_trailing_bytes=True, lazy=False):
|
||||
self._bytes_tmpl = BytesGreedy()
|
||||
super().__init__(spec, empty_is_none, check_trailing_bytes, lazy=lazy)
|
||||
|
||||
|
||||
class TypedByteArray(TypedBytesBase):
|
||||
def __init__(self, len_spec, spec, empty_is_none=False, check_trailing_bytes=True, lazy=False):
|
||||
self._bytes_tmpl = ByteArray(len_spec)
|
||||
@@ -1427,7 +1451,7 @@ class StringEnumAdapter(Adapter):
|
||||
class FixedPoint(SerializableBase):
|
||||
def __init__(self, ser_spec, int_bits, frac_bits, signed=False):
|
||||
# Should never be used due to how this handles signs :/
|
||||
assert(not ser_spec.is_signed)
|
||||
assert (not ser_spec.is_signed)
|
||||
|
||||
self._ser_spec: SerializablePrimitive = ser_spec
|
||||
self._signed = signed
|
||||
@@ -1437,7 +1461,7 @@ class FixedPoint(SerializableBase):
|
||||
self._min_val = ((1 << int_bits) * -1) if signed else 0
|
||||
self._max_val = 1 << int_bits
|
||||
|
||||
assert(required_bits == (ser_spec.calc_size() * 8))
|
||||
assert (required_bits == (ser_spec.calc_size() * 8))
|
||||
|
||||
def deserialize(self, reader: Reader, ctx):
|
||||
fixed_val = float(self._ser_spec.deserialize(reader, ctx))
|
||||
@@ -1467,8 +1491,8 @@ def _make_undefined_raiser():
|
||||
return f
|
||||
|
||||
|
||||
def dataclass_field(spec: Union[SERIALIZABLE_TYPE, Callable], *, default=dataclasses.MISSING,
|
||||
default_factory=dataclasses.MISSING, init=True, repr=True, # noqa
|
||||
def dataclass_field(spec: Union[SERIALIZABLE_TYPE, Callable], *, default: Any = dataclasses.MISSING,
|
||||
default_factory: Any = dataclasses.MISSING, init=True, repr=True, # noqa
|
||||
hash=None, compare=True) -> dataclasses.Field: # noqa
|
||||
enrich_factory = False
|
||||
# Lambda, need to defer evaluation of spec until it's actually used.
|
||||
@@ -1489,7 +1513,7 @@ def dataclass_field(spec: Union[SERIALIZABLE_TYPE, Callable], *, default=datacla
|
||||
metadata={"spec": spec}, default=default, default_factory=default_factory, init=init,
|
||||
repr=repr, hash=hash, compare=compare
|
||||
)
|
||||
# Need to stuff this on so it knows which field went unspecified.
|
||||
# Need to stuff this on, so it knows which field went unspecified.
|
||||
if enrich_factory:
|
||||
default_factory.field = field
|
||||
return field
|
||||
@@ -1501,6 +1525,9 @@ class DataclassAdapter(Adapter):
|
||||
self._data_cls = data_cls
|
||||
|
||||
def encode(self, val: Any, ctx: Optional[ParseContext]) -> Any:
|
||||
if isinstance(val, lazy_object_proxy.Proxy):
|
||||
# Have to unwrap these or the dataclass check will fail
|
||||
val = val.__wrapped__
|
||||
if dataclasses.is_dataclass(val):
|
||||
val = dataclasses.asdict(val)
|
||||
return val
|
||||
@@ -1553,8 +1580,16 @@ def bitfield_field(bits: int, *, adapter: Optional[Adapter] = None, default=0, i
|
||||
|
||||
|
||||
class BitfieldDataclass(DataclassAdapter):
|
||||
def __init__(self, data_cls: Type,
|
||||
prim_spec: Optional[SerializablePrimitive] = None, shift: bool = True):
|
||||
PRIM_SPEC: ClassVar[Optional[SerializablePrimitive]] = None
|
||||
|
||||
def __init__(self, data_cls: Optional[Type] = None,
|
||||
prim_spec: Optional[SerializablePrimitive] = None, shift: Optional[bool] = None):
|
||||
if not dataclasses.is_dataclass(data_cls):
|
||||
raise ValueError(f"{data_cls!r} is not a dataclass")
|
||||
if prim_spec is None:
|
||||
prim_spec = getattr(data_cls, 'PRIM_SPEC', None)
|
||||
if shift is None:
|
||||
shift = getattr(data_cls, 'SHIFT', True)
|
||||
super().__init__(data_cls, prim_spec)
|
||||
self._shift = shift
|
||||
self._bitfield_spec = self._build_bitfield(data_cls)
|
||||
@@ -1584,7 +1619,9 @@ class BitfieldDataclass(DataclassAdapter):
|
||||
|
||||
|
||||
class ExprAdapter(Adapter):
|
||||
def __init__(self, child_spec: SERIALIZABLE_TYPE, decode_func: Callable, encode_func: Callable):
|
||||
_ID = lambda x: x
|
||||
|
||||
def __init__(self, child_spec: SERIALIZABLE_TYPE, decode_func: Callable = _ID, encode_func: Callable = _ID):
|
||||
super().__init__(child_spec)
|
||||
self._decode_func = decode_func
|
||||
self._encode_func = encode_func
|
||||
@@ -1613,7 +1650,7 @@ class BufferedLLSDBinaryParser(llsd.HippoLLSDBinaryParser):
|
||||
byte = self._getc()[0]
|
||||
except IndexError:
|
||||
byte = None
|
||||
raise llsd.LLSDParseError("%s at byte %d: %s" % (message, self._index+offset, byte))
|
||||
raise llsd.LLSDParseError("%s at byte %d: %s" % (message, self._index + offset, byte))
|
||||
|
||||
def _getc(self, num=1):
|
||||
return self._buffer.read_bytes(num)
|
||||
@@ -1633,16 +1670,77 @@ class BinaryLLSD(SerializableBase):
|
||||
writer.write_bytes(llsd.format_binary(val, with_header=False))
|
||||
|
||||
|
||||
class NumPyArray(Adapter):
|
||||
"""
|
||||
An 2-dimensional, dynamic-length array of data from numpy. Greedy.
|
||||
|
||||
Unlike most other serializers, your endianness _must_ be specified in the dtype!
|
||||
"""
|
||||
__slots__ = ['dtype', 'elems']
|
||||
|
||||
def __init__(self, child_spec: Optional[SERIALIZABLE_TYPE], dtype: np.dtype, elems: int):
|
||||
super().__init__(child_spec)
|
||||
self.dtype = dtype
|
||||
self.elems = elems
|
||||
|
||||
def _pick_dtype(self, endian: str) -> np.dtype:
|
||||
return self.dtype.newbyteorder('>') if endian != "<" else self.dtype
|
||||
|
||||
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
|
||||
num_elems = len(val) // self.dtype.itemsize
|
||||
num_ndims = num_elems // self.elems
|
||||
buf_array = np.frombuffer(val, dtype=self.dtype, count=num_elems)
|
||||
return buf_array.reshape((num_ndims, self.elems))
|
||||
|
||||
def encode(self, val, ctx: Optional[ParseContext]) -> Any:
|
||||
val: np.ndarray = np.array(val, dtype=self.dtype).flatten()
|
||||
return val.tobytes()
|
||||
|
||||
|
||||
class QuantizedNumPyArray(Adapter):
|
||||
"""Like QuantizedFloat. Only works correctly for unsigned types, no zero midpoint rounding!"""
|
||||
def __init__(self, child_spec: NumPyArray, lower: float, upper: float):
|
||||
super().__init__(child_spec)
|
||||
self.dtype = child_spec.dtype
|
||||
self.lower = lower
|
||||
self.upper = upper
|
||||
self.step_mag = 1.0 / ((2 ** (self.dtype.itemsize * 8)) - 1)
|
||||
|
||||
def encode(self, val: Any, ctx: Optional[ParseContext]) -> Any:
|
||||
val = np.array(val, dtype=np.float64)
|
||||
val = np.clip(val, self.lower, self.upper)
|
||||
delta = self.upper - self.lower
|
||||
if delta == 0.0:
|
||||
return np.zeros(val.shape, dtype=self.dtype)
|
||||
|
||||
val -= self.lower
|
||||
val /= delta
|
||||
val /= self.step_mag
|
||||
return np.rint(val).astype(self.dtype)
|
||||
|
||||
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
|
||||
val = val.astype(np.float64)
|
||||
val *= self.step_mag
|
||||
val *= self.upper - self.lower
|
||||
val += self.lower
|
||||
return val
|
||||
|
||||
|
||||
def subfield_serializer(msg_name, block_name, var_name):
|
||||
def f(orig_cls):
|
||||
global SUBFIELD_SERIALIZERS
|
||||
SUBFIELD_SERIALIZERS[(msg_name, block_name, var_name)] = orig_cls
|
||||
return orig_cls
|
||||
return f
|
||||
|
||||
|
||||
_ENUM_TYPE = TypeVar("_ENUM_TYPE", bound=Type[dtypes.IntEnum])
|
||||
_FLAG_TYPE = TypeVar("_FLAG_TYPE", bound=Type[dtypes.IntFlag])
|
||||
|
||||
|
||||
def enum_field_serializer(msg_name, block_name, var_name):
|
||||
def f(orig_cls):
|
||||
def f(orig_cls: _ENUM_TYPE) -> _ENUM_TYPE:
|
||||
if not issubclass(orig_cls, dtypes.IntEnum):
|
||||
raise ValueError(f"{orig_cls} must be a subclass of Hippolyzer's IntEnum class")
|
||||
wrapper = subfield_serializer(msg_name, block_name, var_name)
|
||||
wrapper(IntEnumSubfieldSerializer(orig_cls))
|
||||
return orig_cls
|
||||
@@ -1650,7 +1748,9 @@ def enum_field_serializer(msg_name, block_name, var_name):
|
||||
|
||||
|
||||
def flag_field_serializer(msg_name, block_name, var_name):
|
||||
def f(orig_cls):
|
||||
def f(orig_cls: _FLAG_TYPE) -> _FLAG_TYPE:
|
||||
if not issubclass(orig_cls, dtypes.IntFlag):
|
||||
raise ValueError(f"{orig_cls!r} must be a subclass of Hippolyzer's IntFlag class")
|
||||
wrapper = subfield_serializer(msg_name, block_name, var_name)
|
||||
wrapper(IntFlagSubfieldSerializer(orig_cls))
|
||||
return orig_cls
|
||||
@@ -1703,7 +1803,7 @@ class BaseSubfieldSerializer(abc.ABC):
|
||||
"""Guess at which template a val might correspond to"""
|
||||
if dataclasses.is_dataclass(val):
|
||||
val = dataclasses.asdict(val) # noqa
|
||||
if isinstance(val, bytes):
|
||||
if isinstance(val, (bytes, bytearray)):
|
||||
template_checker = cls._template_sizes_match
|
||||
elif isinstance(val, dict):
|
||||
template_checker = cls._template_keys_match
|
||||
@@ -1824,7 +1924,7 @@ class IntEnumSubfieldSerializer(AdapterInstanceSubfieldSerializer):
|
||||
val = super().deserialize(ctx_obj, val, pod=pod)
|
||||
# Don't pretend we were able to deserialize this if we
|
||||
# had to fall through to the `int` case.
|
||||
if pod and type(val) == int:
|
||||
if pod and type(val) is int:
|
||||
return UNSERIALIZABLE
|
||||
return val
|
||||
|
||||
@@ -1839,7 +1939,6 @@ class IntFlagSubfieldSerializer(AdapterInstanceSubfieldSerializer):
|
||||
|
||||
def http_serializer(msg_name):
|
||||
def f(orig_cls):
|
||||
global HTTP_SERIALIZERS
|
||||
HTTP_SERIALIZERS[msg_name] = orig_cls
|
||||
return orig_cls
|
||||
return f
|
||||
|
||||
@@ -19,81 +19,49 @@ along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from typing import *
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class SettingDescriptor(Generic[_T]):
|
||||
__slots__ = ("name", "default")
|
||||
|
||||
def __init__(self, default: Union[Callable[[], _T], _T]):
|
||||
self.default = default
|
||||
self.name: Optional[str] = None
|
||||
|
||||
def __set_name__(self, owner: Settings, name: str):
|
||||
self.name = name
|
||||
|
||||
def _make_default(self) -> _T:
|
||||
if callable(self.default):
|
||||
return self.default()
|
||||
return self.default
|
||||
|
||||
def __get__(self, obj: Settings, owner: Optional[Type] = None) -> _T:
|
||||
val: Union[_T, dataclasses.MISSING] = obj.get_setting(self.name)
|
||||
if val is dataclasses.MISSING:
|
||||
val = self._make_default()
|
||||
return val
|
||||
|
||||
def __set__(self, obj: Settings, value: _T) -> None:
|
||||
obj.set_setting(self.name, value)
|
||||
|
||||
|
||||
class Settings:
|
||||
def __init__(self, quiet_logging=False, spammy_logging=False, log_tests=True):
|
||||
""" some lovely configurable settings
|
||||
ENABLE_DEFERRED_PACKET_PARSING: bool = SettingDescriptor(True)
|
||||
ALLOW_UNKNOWN_MESSAGES: bool = SettingDescriptor(True)
|
||||
|
||||
These are applied application wide, and can be
|
||||
overridden at any time in a specific instance
|
||||
|
||||
quiet_logging overrides spammy_logging
|
||||
"""
|
||||
def __init__(self):
|
||||
self._settings: Dict[str, Any] = {}
|
||||
|
||||
self.quiet_logging = quiet_logging
|
||||
self.spammy_logging = spammy_logging
|
||||
def get_setting(self, name: str) -> Any:
|
||||
return self._settings.get(name, dataclasses.MISSING)
|
||||
|
||||
# toggle handling udp packets
|
||||
self.HANDLE_PACKETS = True
|
||||
self.HANDLE_OUTGOING_PACKETS = False
|
||||
|
||||
# toggle parsing all/handled packets
|
||||
self.ENABLE_DEFERRED_PACKET_PARSING = True
|
||||
|
||||
# ~~~~~~~~~~~~~~~~~~
|
||||
# Logging behaviors
|
||||
# ~~~~~~~~~~~~~~~~~~
|
||||
# being a test tool, and an immature one at that,
|
||||
# enable fine granularity in the logging, but
|
||||
# make sure we can tone it down as well
|
||||
|
||||
self.LOG_VERBOSE = True
|
||||
self.ENABLE_BYTES_TO_HEX_LOGGING = False
|
||||
self.ENABLE_CAPS_LOGGING = True
|
||||
self.ENABLE_CAPS_LLSD_LOGGING = False
|
||||
self.ENABLE_EQ_LOGGING = True
|
||||
self.ENABLE_UDP_LOGGING = True
|
||||
self.ENABLE_OBJECT_LOGGING = True
|
||||
self.LOG_SKIPPED_PACKETS = True
|
||||
self.ENABLE_HOST_LOGGING = True
|
||||
self.LOG_COROUTINE_SPAWNS = True
|
||||
self.PROXY_LOGGING = False
|
||||
|
||||
# allow disabling logging of certain packets
|
||||
self.DISABLE_SPAMMERS = True
|
||||
self.UDP_SPAMMERS = ['PacketAck', 'AgentUpdate']
|
||||
|
||||
# toggle handling a region's event queue
|
||||
self.ENABLE_REGION_EVENT_QUEUE = True
|
||||
|
||||
# how many seconds to wait between polling
|
||||
# a region's event queue
|
||||
self.REGION_EVENT_QUEUE_POLL_INTERVAL = 1
|
||||
|
||||
if self.spammy_logging:
|
||||
self.ENABLE_BYTES_TO_HEX_LOGGING = True
|
||||
self.ENABLE_CAPS_LLSD_LOGGING = True
|
||||
self.DISABLE_SPAMMERS = False
|
||||
|
||||
# override the defaults
|
||||
if self.quiet_logging:
|
||||
self.LOG_VERBOSE = False
|
||||
self.ENABLE_BYTES_TO_HEX_LOGGING = False
|
||||
self.ENABLE_CAPS_LOGGING = False
|
||||
self.ENABLE_CAPS_LLSD_LOGGING = False
|
||||
self.ENABLE_EQ_LOGGING = False
|
||||
self.ENABLE_UDP_LOGGING = False
|
||||
self.LOG_SKIPPED_PACKETS = False
|
||||
self.ENABLE_OBJECT_LOGGING = False
|
||||
self.ENABLE_HOST_LOGGING = False
|
||||
self.LOG_COROUTINE_SPAWNS = False
|
||||
self.DISABLE_SPAMMERS = True
|
||||
|
||||
# ~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Test related settings
|
||||
# ~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
if log_tests:
|
||||
self.ENABLE_LOGGING_IN_TESTS = True
|
||||
else:
|
||||
self.ENABLE_LOGGING_IN_TESTS = False
|
||||
def set_setting(self, name: str, val: Any):
|
||||
self._settings[name] = val
|
||||
|
||||
2439
hippolyzer/lib/base/templates.py
Normal file
2439
hippolyzer/lib/base/templates.py
Normal file
File diff suppressed because it is too large
Load Diff
45
hippolyzer/lib/base/test_utils.py
Normal file
45
hippolyzer/lib/base/test_utils.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import asyncio
|
||||
from typing import Any, Optional, List, Tuple
|
||||
|
||||
from hippolyzer.lib.base.message.circuit import Circuit, ConnectionHolder
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.network.transport import AbstractUDPTransport, ADDR_TUPLE, UDPPacket
|
||||
|
||||
|
||||
class MockTransport(AbstractUDPTransport):
|
||||
def sendto(self, data: Any, addr: Optional[ADDR_TUPLE] = ...) -> None:
|
||||
pass
|
||||
|
||||
def abort(self) -> None:
|
||||
pass
|
||||
|
||||
def close(self) -> None:
|
||||
pass
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.packets: List[Tuple[bytes, Tuple[str, int]]] = []
|
||||
|
||||
def send_packet(self, packet: UDPPacket) -> None:
|
||||
self.packets.append((packet.data, packet.dst_addr))
|
||||
|
||||
|
||||
class MockHandlingCircuit(Circuit):
|
||||
def __init__(self, handler: MessageHandler[Message, str]):
|
||||
super().__init__(("127.0.0.1", 1), ("127.0.0.1", 2), None)
|
||||
self.handler = handler
|
||||
|
||||
def _send_prepared_message(self, message: Message, transport=None):
|
||||
loop = asyncio.get_event_loop_policy().get_event_loop()
|
||||
loop.call_soon(self.handler.handle, message)
|
||||
|
||||
|
||||
class MockConnectionHolder(ConnectionHolder):
|
||||
def __init__(self, circuit, message_handler):
|
||||
self.circuit = circuit
|
||||
self.message_handler = message_handler
|
||||
|
||||
|
||||
async def soon(awaitable) -> Message:
|
||||
return await asyncio.wait_for(awaitable, timeout=1.0)
|
||||
@@ -8,18 +8,17 @@ import dataclasses
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.templates import (
|
||||
from hippolyzer.lib.base.helpers import create_logged_task
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.circuit import ConnectionHolder
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketFlags
|
||||
from hippolyzer.lib.base.templates import (
|
||||
TransferRequestParamsBase,
|
||||
TransferChannelType,
|
||||
TransferSourceType,
|
||||
TransferStatus,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
|
||||
_TRANSFER_MESSAGES = {"TransferInfo", "TransferPacket", "TransferAbort"}
|
||||
|
||||
@@ -49,7 +48,7 @@ class Transfer:
|
||||
def cancelled(self) -> bool:
|
||||
return self._future.cancelled()
|
||||
|
||||
def is_our_message(self, message: ProxiedMessage):
|
||||
def is_our_message(self, message: Message):
|
||||
if "TransferData" in message.blocks:
|
||||
transfer_block = message["TransferData"][0]
|
||||
else:
|
||||
@@ -71,8 +70,15 @@ class Transfer:
|
||||
|
||||
|
||||
class TransferManager:
|
||||
def __init__(self, region: ProxiedRegion):
|
||||
self._region: ProxiedRegion = proxify(region)
|
||||
def __init__(
|
||||
self,
|
||||
connection_holder: ConnectionHolder,
|
||||
agent_id: Optional[UUID] = None,
|
||||
session_id: Optional[UUID] = None,
|
||||
):
|
||||
self._connection_holder = connection_holder
|
||||
self._agent_id = agent_id
|
||||
self._session_id = session_id
|
||||
|
||||
def request(
|
||||
self, *,
|
||||
@@ -86,11 +92,11 @@ class TransferManager:
|
||||
params_dict = dataclasses.asdict(params)
|
||||
# Fill in any missing AgentID or SessionID attrs if the params type has them
|
||||
if params_dict.get("AgentID", dataclasses.MISSING) is None:
|
||||
params.AgentID = self._region.session().agent_id
|
||||
params.AgentID = self._agent_id
|
||||
if params_dict.get("SessionID", dataclasses.MISSING) is None:
|
||||
params.SessionID = self._region.session().id
|
||||
params.SessionID = self._session_id
|
||||
|
||||
self._region.circuit.send_message(ProxiedMessage(
|
||||
self._connection_holder.circuit.send(Message(
|
||||
'TransferRequest',
|
||||
Block(
|
||||
'TransferInfo',
|
||||
@@ -100,20 +106,21 @@ class TransferManager:
|
||||
Priority=priority,
|
||||
Params_=params,
|
||||
),
|
||||
flags=PacketFlags.RELIABLE,
|
||||
))
|
||||
transfer = Transfer(transfer_id)
|
||||
asyncio.create_task(self._pump_transfer_replies(transfer))
|
||||
create_logged_task(self._pump_transfer_replies(transfer), "Transfer Pump")
|
||||
return transfer
|
||||
|
||||
async def _pump_transfer_replies(self, transfer: Transfer):
|
||||
# Subscribe to message related to our transfer while we're in this block
|
||||
with self._region.message_handler.subscribe_async(
|
||||
_TRANSFER_MESSAGES,
|
||||
predicate=transfer.is_our_message
|
||||
with self._connection_holder.message_handler.subscribe_async(
|
||||
_TRANSFER_MESSAGES,
|
||||
predicate=transfer.is_our_message,
|
||||
) as get_msg:
|
||||
while not transfer.done():
|
||||
try:
|
||||
msg: ProxiedMessage = await asyncio.wait_for(get_msg(), 5.0)
|
||||
msg: Message = await asyncio.wait_for(get_msg(), 5.0)
|
||||
except TimeoutError as e:
|
||||
transfer.set_exception(e)
|
||||
return
|
||||
@@ -128,18 +135,18 @@ class TransferManager:
|
||||
elif msg.name == "TransferAbort":
|
||||
transfer.error_code = msg["TransferID"][0].deserialize_var("Result")
|
||||
transfer.set_exception(
|
||||
ConnectionAbortedError(f"Unknown failure")
|
||||
ConnectionAbortedError("Unknown failure")
|
||||
)
|
||||
|
||||
def _handle_transfer_packet(self, msg: ProxiedMessage, transfer: Transfer):
|
||||
def _handle_transfer_packet(self, msg: Message, transfer: Transfer):
|
||||
transfer_block = msg["TransferData"][0]
|
||||
packet_id: int = transfer_block["Packet"]
|
||||
packet_data = transfer_block["Data"]
|
||||
transfer.chunks[packet_id] = packet_data
|
||||
if transfer_block["Status"] == TransferStatus.DONE:
|
||||
if transfer_block["Status"] == TransferStatus.DONE and not transfer.done():
|
||||
transfer.mark_done()
|
||||
|
||||
def _handle_transfer_info(self, msg: ProxiedMessage, transfer: Transfer):
|
||||
def _handle_transfer_info(self, msg: Message, transfer: Transfer):
|
||||
transfer_block = msg["TransferInfo"][0]
|
||||
transfer.expected_size = transfer_block["Size"]
|
||||
# Don't re-set if we get a resend of packet 0
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user