Compare commits
236 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ad2aca1803 | ||
|
|
8cf500ce44 | ||
|
|
ceda7f370e | ||
|
|
0692a10253 | ||
|
|
c1c2a96295 | ||
|
|
b4be9fa757 | ||
|
|
a8967f0b7d | ||
|
|
10af5cc250 | ||
|
|
0ea1b0324e | ||
|
|
4ece6efe60 | ||
|
|
15bc8e0ed2 | ||
|
|
33fad6339f | ||
|
|
93916104db | ||
|
|
3bb4fb0640 | ||
|
|
c9495763e5 | ||
|
|
a7825a881c | ||
|
|
a6bbd97b98 | ||
|
|
3500212da0 | ||
|
|
01ea9d7879 | ||
|
|
f19e1b8bfb | ||
|
|
f2202556d7 | ||
|
|
5a5b471fe4 | ||
|
|
ff0f20d1dd | ||
|
|
4898c852c1 | ||
|
|
adf5295e2b | ||
|
|
7514baaa5f | ||
|
|
0ba1a779ef | ||
|
|
3ea8a27914 | ||
|
|
2451ad3674 | ||
|
|
25804df238 | ||
|
|
474173ba54 | ||
|
|
049a3b703f | ||
|
|
ac77fde892 | ||
|
|
6ee9b22923 | ||
|
|
f355138cd2 | ||
|
|
478d135d1f | ||
|
|
80c9acdabe | ||
|
|
d4eaa7c543 | ||
|
|
2571550da4 | ||
|
|
b3ee3a3506 | ||
|
|
11feccd93b | ||
|
|
bb6ce5c013 | ||
|
|
a35aa9046e | ||
|
|
6c32da878d | ||
|
|
49c54bc896 | ||
|
|
4c9fa38ffb | ||
|
|
2856e78f16 | ||
|
|
33884925f4 | ||
|
|
a11ef96d9a | ||
|
|
7b6239d66a | ||
|
|
2c3bd140ff | ||
|
|
9d2087a0fb | ||
|
|
67db8110a1 | ||
|
|
ab1c56ff3e | ||
|
|
142f2e42ca | ||
|
|
e7764c1665 | ||
|
|
582cfea47c | ||
|
|
6f38d84a1c | ||
|
|
1fc46e66bc | ||
|
|
167673aa08 | ||
|
|
5ad8ee986f | ||
|
|
e9d7ee7e8e | ||
|
|
d21c3ec004 | ||
|
|
01c6931d53 | ||
|
|
493563bb6f | ||
|
|
ca5c71402b | ||
|
|
ad765a1ede | ||
|
|
9adee14e0f | ||
|
|
57c4bd0e7c | ||
|
|
1085dbc8ab | ||
|
|
fb9740003e | ||
|
|
087f16fbc5 | ||
|
|
fa96e80590 | ||
|
|
539d38fb4a | ||
|
|
caaf0b0e13 | ||
|
|
16958e516d | ||
|
|
74e4e0c4ec | ||
|
|
3efeb46500 | ||
|
|
0f2e933be1 | ||
|
|
a7f40b0d15 | ||
|
|
e6ac99458f | ||
|
|
92cadf26e9 | ||
|
|
305038a31d | ||
|
|
bd67d6f19f | ||
|
|
81eae4edbf | ||
|
|
776ef71574 | ||
|
|
31125ca489 | ||
|
|
29ab108764 | ||
|
|
61820f1670 | ||
|
|
7fafb8b5ae | ||
|
|
28e84c0c5a | ||
|
|
e629214bef | ||
|
|
5e9433b4a4 | ||
|
|
5f2082c6e9 | ||
|
|
12c0deadee | ||
|
|
6da766ef22 | ||
|
|
f278a4bfcf | ||
|
|
631fe91049 | ||
|
|
159f39227a | ||
|
|
670acef0b4 | ||
|
|
1165769aca | ||
|
|
613dd32a40 | ||
|
|
d7a88f904e | ||
|
|
a8344a231b | ||
|
|
11043e365a | ||
|
|
ad34ba78ea | ||
|
|
f9b4ae1308 | ||
|
|
7fee8f6bfe | ||
|
|
2e0ca3649c | ||
|
|
e0d44741e9 | ||
|
|
008d59c7d6 | ||
|
|
ed03b0d49f | ||
|
|
4cc1513e58 | ||
|
|
c768aeaf40 | ||
|
|
42ebb0e915 | ||
|
|
31ba9635eb | ||
|
|
dc58512ee6 | ||
|
|
4a58731441 | ||
|
|
c2b92d2d7d | ||
|
|
640b384d27 | ||
|
|
a2ef3d9f8e | ||
|
|
0456b4b62d | ||
|
|
92c9c82e73 | ||
|
|
c5ed1cff24 | ||
|
|
0710735546 | ||
|
|
7869df224e | ||
|
|
6f6274ec7d | ||
|
|
40da130066 | ||
|
|
5947d52c8d | ||
|
|
e4b73a7196 | ||
|
|
1ded1180dc | ||
|
|
5517d60e7a | ||
|
|
ed7e42625e | ||
|
|
d5cde896fb | ||
|
|
007c79f4a7 | ||
|
|
f1b523b5de | ||
|
|
c42e0d7291 | ||
|
|
1ee1b9acc6 | ||
|
|
9904633a99 | ||
|
|
c8791db75e | ||
|
|
21d1c7ebfe | ||
|
|
996a43be5b | ||
|
|
9e8127e577 | ||
|
|
cfcd324a11 | ||
|
|
6872634bf4 | ||
|
|
091090c6fd | ||
|
|
bd4fff4200 | ||
|
|
52dfd0be05 | ||
|
|
60f1737115 | ||
|
|
7a5d6baf02 | ||
|
|
44a332a77b | ||
|
|
beb0a2d6a4 | ||
|
|
9be66df52b | ||
|
|
da0117db1b | ||
|
|
4dbf01a604 | ||
|
|
36858ed3e2 | ||
|
|
370c586582 | ||
|
|
fdfffd96c9 | ||
|
|
6da9f58b23 | ||
|
|
12e3912a37 | ||
|
|
8147e7e1d7 | ||
|
|
19dba6651c | ||
|
|
274f96c710 | ||
|
|
09e1d0b6fc | ||
|
|
f4fb68e310 | ||
|
|
8edf7ae89b | ||
|
|
b6458e9eb7 | ||
|
|
375af1e7f6 | ||
|
|
76d0a72590 | ||
|
|
3255556835 | ||
|
|
d19122c039 | ||
|
|
5692f7b8b6 | ||
|
|
21cea0f009 | ||
|
|
193d762132 | ||
|
|
227fbf7a2e | ||
|
|
25a397bcc5 | ||
|
|
b0dca80b87 | ||
|
|
ea475b528f | ||
|
|
2036e3c5b3 | ||
|
|
584d9f11e8 | ||
|
|
df020281f1 | ||
|
|
78c1b8869e | ||
|
|
87d5e8340b | ||
|
|
e6423d2f43 | ||
|
|
fac44a12b0 | ||
|
|
99ca7b1674 | ||
|
|
e066724a2f | ||
|
|
dce032de31 | ||
|
|
2f578b2bc4 | ||
|
|
0c1656e6ab | ||
|
|
2b6d8a70f4 | ||
|
|
1a308e9671 | ||
|
|
7b21e5634c | ||
|
|
e4548a285d | ||
|
|
72e926f04c | ||
|
|
d9fa14b17c | ||
|
|
33c5abaaf4 | ||
|
|
2dfd61fcc5 | ||
|
|
eb58e747ce | ||
|
|
1d221a2289 | ||
|
|
2ffd0458d0 | ||
|
|
25f533a31b | ||
|
|
570dbce181 | ||
|
|
ccb63e971b | ||
|
|
8be4bce8bc | ||
|
|
e945706d2b | ||
|
|
6c748a6ab2 | ||
|
|
6abc7ca7d2 | ||
|
|
c57e0e467c | ||
|
|
e46b4adad2 | ||
|
|
5ef9b5354a | ||
|
|
34ca7d54be | ||
|
|
cb316f1992 | ||
|
|
da05a6cf1f | ||
|
|
f06c31e225 | ||
|
|
b4e5596ca2 | ||
|
|
49a54ce099 | ||
|
|
0349fd9078 | ||
|
|
118ef2813a | ||
|
|
256f74b71a | ||
|
|
4a84453ca4 | ||
|
|
34316cb166 | ||
|
|
0f7d35cdca | ||
|
|
2ee8a6f008 | ||
|
|
848a6745c0 | ||
|
|
0cbbedd27b | ||
|
|
e951a5b5c3 | ||
|
|
68bf3ba4a2 | ||
|
|
5b4f8f03dc | ||
|
|
d7c2215cbc | ||
|
|
629e59d3f9 | ||
|
|
8f68bc219e | ||
|
|
ba296377de | ||
|
|
e34927a996 | ||
|
|
3c6a917550 | ||
|
|
dbae2acf27 |
@@ -9,4 +9,4 @@ exclude_lines =
|
||||
def __repr__
|
||||
raise AssertionError
|
||||
assert False
|
||||
pass
|
||||
^\s*pass\b
|
||||
|
||||
13
.github/workflows/bundle_windows.yml
vendored
13
.github/workflows/bundle_windows.yml
vendored
@@ -1,5 +1,3 @@
|
||||
# Have to manually unzip this (it gets double zipped) and add it
|
||||
# onto the release after it gets created. Don't want actions with repo write.
|
||||
name: Bundle Windows EXE
|
||||
|
||||
|
||||
@@ -9,8 +7,12 @@ on:
|
||||
types:
|
||||
- created
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref_name:
|
||||
description: Name to use for the release
|
||||
env:
|
||||
target_tag: ${{ github.ref_name }}
|
||||
target_tag: ${{ github.ref_name || github.event.inputs.ref_name }}
|
||||
sha: ${{ github.sha || github.event.inputs.ref_name }}
|
||||
|
||||
|
||||
jobs:
|
||||
@@ -21,7 +23,7 @@ jobs:
|
||||
contents: write
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.9]
|
||||
python-version: ["3.11"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -51,10 +53,11 @@ jobs:
|
||||
- name: Upload the artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: hippolyzer-windows-${{ github.sha }}
|
||||
name: hippolyzer-windows-${{ env.sha }}
|
||||
path: ./hippolyzer-windows-${{ env.target_tag }}.zip
|
||||
|
||||
- uses: ncipollo/release-action@v1.10.0
|
||||
if: github.event_name != 'workflow_dispatch'
|
||||
with:
|
||||
artifacts: hippolyzer-windows-${{ env.target_tag }}.zip
|
||||
tag: ${{ env.target_tag }}
|
||||
|
||||
2
.github/workflows/pypi_publish.yml
vendored
2
.github/workflows/pypi_publish.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
||||
13
.github/workflows/pytest.yml
vendored
13
.github/workflows/pytest.yml
vendored
@@ -1,6 +1,12 @@
|
||||
name: Run Python Tests
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- '*.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- '*.md'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -8,7 +14,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.8", "3.10"]
|
||||
python-version: ["3.10", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -20,10 +26,11 @@ jobs:
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install --upgrade pip wheel
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements-test.txt
|
||||
sudo apt-get install libopenjp2-7
|
||||
pip install -e .
|
||||
- name: Run Flake8
|
||||
run: |
|
||||
flake8 .
|
||||
|
||||
39
README.md
39
README.md
@@ -27,7 +27,7 @@ with low-level SL details. See the [Local Animation addon example](https://githu
|
||||
|
||||
### From Source
|
||||
|
||||
* Python 3.8 or above is **required**. If you're unable to upgrade your system Python package due to
|
||||
* Python 3.10 or above is **required**. If you're unable to upgrade your system Python package due to
|
||||
being on a stable distro, you can use [pyenv](https://github.com/pyenv/pyenv) to create
|
||||
a self-contained Python install with the appropriate version.
|
||||
* [Create a clean Python 3 virtualenv](https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment)
|
||||
@@ -48,8 +48,7 @@ A proxy is provided with both a CLI and Qt-based interface. The proxy applicatio
|
||||
custom SOCKS 5 UDP proxy, as well as an HTTP proxy based on [mitmproxy](https://mitmproxy.org/).
|
||||
|
||||
Multiple clients are supported at a time, and UDP messages may be injected in either
|
||||
direction. The proxy UI was inspired by the Message Log and Message Builder as present in
|
||||
the [Alchemy](https://github.com/AlchemyViewer/Alchemy) viewer.
|
||||
direction.
|
||||
|
||||
### Proxy Setup
|
||||
|
||||
@@ -83,27 +82,9 @@ SOCKS 5 works correctly on these platforms, so you can just configure it through
|
||||
the `no_proxy` env var appropriately. For ex. `no_proxy="asset-cdn.glb.agni.lindenlab.com" ./firestorm`.
|
||||
* Log in!
|
||||
|
||||
##### Firestorm
|
||||
|
||||
The proxy selection dialog in the most recent Firestorm release is non-functional, as
|
||||
https://bitbucket.org/lindenlab/viewer/commits/454c7f4543688126b2fa5c0560710f5a1733702e was not pulled in.
|
||||
|
||||
As a workaround, you can go to `Debug -> Show Debug Settings` and enter the following values:
|
||||
|
||||
| Name | Value |
|
||||
|---------------------|-----------|
|
||||
| HttpProxyType | Web |
|
||||
| BrowserProxyAddress | 127.0.0.1 |
|
||||
| BrowserProxyEnabled | TRUE |
|
||||
| BrowserProxyPort | 9062 |
|
||||
| Socks5ProxyEnabled | TRUE |
|
||||
| Socks5ProxyHost | 127.0.0.1 |
|
||||
| Socks5ProxyPort | 9061 |
|
||||
|
||||
Or, if you're on Linux, you can also use [LinHippoAutoProxy](https://github.com/SaladDais/LinHippoAutoProxy).
|
||||
|
||||
Connections from the in-viewer browser will likely _not_ be run through Hippolyzer when using either of
|
||||
these workarounds.
|
||||
Or, if you're on Linux, you can instead use [LinHippoAutoProxy](https://github.com/SaladDais/LinHippoAutoProxy)
|
||||
to launch your viewer, which will configure everything for you. Note that connections from the in-viewer browser will
|
||||
likely _not_ be run through Hippolyzer when using LinHippoAutoProxy.
|
||||
|
||||
### Filtering
|
||||
|
||||
@@ -344,7 +325,7 @@ The REPL is fully async aware and allows awaiting events without blocking:
|
||||
|
||||
```python
|
||||
>>> from hippolyzer.lib.client.object_manager import ObjectUpdateType
|
||||
>>> evt = await session.objects.events.wait_for((ObjectUpdateType.OBJECT_UPDATE,), timeout=2.0)
|
||||
>>> evt = await session.objects.events.wait_for((ObjectUpdateType.UPDATE,), timeout=2.0)
|
||||
>>> evt.updated
|
||||
{'Position'}
|
||||
```
|
||||
@@ -421,9 +402,13 @@ above is your only option.
|
||||
|
||||
### Should I use this library to make an SL client in Python?
|
||||
|
||||
No. If you just want to write a client in Python, you should instead look at using
|
||||
Probably not. If you just want to write a client in Python, you should instead look at using
|
||||
[libremetaverse](https://github.com/cinderblocks/libremetaverse/) via pythonnet.
|
||||
I removed the client-related code inherited from PyOGP because libremetaverse's was simply better.
|
||||
I removed the client-related code inherited from PyOGP because libremetaverse's was simply better
|
||||
for general use.
|
||||
|
||||
<https://github.com/CasperTech/node-metaverse/> also looks like a good, modern wrapper if you
|
||||
prefer TypeScript.
|
||||
|
||||
There is, however, a very low-level `HippoClient` class provided for testing, but it's unlikely
|
||||
to be what you want for writing a general-purpose bot.
|
||||
|
||||
@@ -114,7 +114,7 @@ class BlueishObjectListGUIAddon(BaseAddon):
|
||||
region.objects.request_missing_objects()
|
||||
|
||||
def handle_object_updated(self, session: Session, region: ProxiedRegion,
|
||||
obj: Object, updated_props: Set[str]):
|
||||
obj: Object, updated_props: Set[str], msg: Optional[Message]):
|
||||
if self.blueish_model is None:
|
||||
return
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from hippolyzer.lib.proxy.sessions import Session
|
||||
def handle_lludp_message(session: Session, region: ProxiedRegion, message: Message):
|
||||
# addon_ctx will persist across addon reloads, use for storing data that
|
||||
# needs to survive across calls to this function
|
||||
ctx = session.addon_ctx
|
||||
ctx = session.addon_ctx[__name__]
|
||||
if message.name == "ChatFromViewer":
|
||||
chat = message["ChatData"]["Message"]
|
||||
if chat == "COUNT":
|
||||
|
||||
@@ -4,8 +4,13 @@ Helper for making deformer anims. This could have a GUI I guess.
|
||||
import dataclasses
|
||||
from typing import *
|
||||
|
||||
import numpy as np
|
||||
import transformations
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3, Quaternion, UUID
|
||||
from hippolyzer.lib.base.llanim import Joint, Animation, PosKeyframe, RotKeyframe
|
||||
from hippolyzer.lib.base.mesh import MeshAsset, SegmentHeaderDict, SkinSegmentDict, LLMeshSerializer
|
||||
from hippolyzer.lib.base.serialization import BufferWriter
|
||||
from hippolyzer.lib.proxy.addon_utils import show_message, BaseAddon, SessionProperty
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.commands import handle_command, Parameter
|
||||
@@ -45,6 +50,58 @@ def build_deformer(joints: Dict[str, DeformerJoint]) -> bytes:
|
||||
return anim.to_bytes()
|
||||
|
||||
|
||||
def build_mesh_deformer(joints: Dict[str, DeformerJoint]) -> bytes:
|
||||
skin_seg = SkinSegmentDict(
|
||||
joint_names=[],
|
||||
bind_shape_matrix=identity_mat4(),
|
||||
inverse_bind_matrix=[],
|
||||
alt_inverse_bind_matrix=[],
|
||||
pelvis_offset=0.0,
|
||||
lock_scale_if_joint_position=False
|
||||
)
|
||||
for joint_name, joint in joints.items():
|
||||
# We can only represent joint translations, ignore this joint if it doesn't have any.
|
||||
if not joint.pos:
|
||||
continue
|
||||
skin_seg['joint_names'].append(joint_name)
|
||||
# Inverse bind matrix isn't actually used, so we can just give it a placeholder value of the
|
||||
# identity mat4. This might break things in weird ways because the matrix isn't actually sensible.
|
||||
skin_seg['inverse_bind_matrix'].append(identity_mat4())
|
||||
# Create a flattened mat4 that only has a translation component of our joint pos
|
||||
# The viewer ignores any other component of these matrices so no point putting shear
|
||||
# or perspective or whatever :)
|
||||
joint_mat4 = pos_to_mat4(joint.pos)
|
||||
# Ask the viewer to override this joint's usual parent-relative position with our matrix
|
||||
skin_seg['alt_inverse_bind_matrix'].append(joint_mat4)
|
||||
|
||||
# Make a dummy mesh and shove our skin segment onto it. None of the tris are rigged, so the
|
||||
# viewer will freak out and refuse to display the tri, only the joint translations will be used.
|
||||
# Supposedly a mesh with a `skin` segment but no weights on the material should just result in an
|
||||
# effectively unrigged material, but that's not the case. Oh well.
|
||||
mesh = MeshAsset.make_triangle()
|
||||
mesh.header['skin'] = SegmentHeaderDict(offset=0, size=0)
|
||||
mesh.segments['skin'] = skin_seg
|
||||
|
||||
writer = BufferWriter("!")
|
||||
writer.write(LLMeshSerializer(), mesh)
|
||||
return writer.copy_buffer()
|
||||
|
||||
|
||||
def identity_mat4() -> List[float]:
|
||||
"""
|
||||
Return an "Identity" mat4
|
||||
|
||||
Effectively represents a transform of no rot, no translation, no shear, no perspective
|
||||
and scaling by 1.0 on every axis.
|
||||
"""
|
||||
return list(np.identity(4).flatten('F'))
|
||||
|
||||
|
||||
def pos_to_mat4(pos: Vector3) -> List[float]:
|
||||
"""Convert a position Vector3 to a Translation Mat4"""
|
||||
return list(transformations.compose_matrix(translate=tuple(pos)).flatten('F'))
|
||||
|
||||
|
||||
class DeformerAddon(BaseAddon):
|
||||
deform_joints: Dict[str, DeformerJoint] = SessionProperty(dict)
|
||||
|
||||
@@ -95,7 +152,7 @@ class DeformerAddon(BaseAddon):
|
||||
local_anim.LocalAnimAddon.apply_local_anim(session, region, "deformer_addon", anim_data)
|
||||
|
||||
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
|
||||
cmd: str, options: List[str], param: str):
|
||||
behaviour: str, options: List[str], param: str):
|
||||
# An object in-world can also tell the client how to deform itself via
|
||||
# RLV-style commands.
|
||||
|
||||
@@ -103,9 +160,9 @@ class DeformerAddon(BaseAddon):
|
||||
if param != "force":
|
||||
return
|
||||
|
||||
if cmd == "stop_deforming":
|
||||
if behaviour == "stop_deforming":
|
||||
self.deform_joints.clear()
|
||||
elif cmd == "deform_joints":
|
||||
elif behaviour == "deform_joints":
|
||||
self.deform_joints.clear()
|
||||
for joint_data in options:
|
||||
joint_split = joint_data.split("|")
|
||||
@@ -118,5 +175,41 @@ class DeformerAddon(BaseAddon):
|
||||
self._reapply_deformer(session, region)
|
||||
return True
|
||||
|
||||
@handle_command()
|
||||
async def save_deformer_as_mesh(self, _session: Session, _region: ProxiedRegion):
|
||||
"""
|
||||
Export the deformer as a crafted rigged mesh rather than an animation
|
||||
|
||||
Mesh deformers have the advantage that they don't cause your joints to "stick"
|
||||
like animations do when using animations with pos keyframes.
|
||||
"""
|
||||
filename = await AddonManager.UI.save_file(filter_str="LL Mesh (*.llmesh)")
|
||||
if not filename:
|
||||
return
|
||||
with open(filename, "wb") as f:
|
||||
f.write(build_mesh_deformer(self.deform_joints))
|
||||
|
||||
@handle_command()
|
||||
async def upload_deformer_as_mesh(self, _session: Session, region: ProxiedRegion):
|
||||
"""Same as save_deformer_as_mesh, but uploads the mesh directly to SL."""
|
||||
|
||||
mesh_bytes = build_mesh_deformer(self.deform_joints)
|
||||
try:
|
||||
# Send off mesh to calculate upload cost
|
||||
upload_token = await region.asset_uploader.initiate_mesh_upload("deformer", mesh_bytes)
|
||||
except Exception as e:
|
||||
show_message(e)
|
||||
raise
|
||||
|
||||
if not await AddonManager.UI.confirm("Upload", f"Spend {upload_token.linden_cost}L on upload?"):
|
||||
return
|
||||
|
||||
# Do the actual upload
|
||||
try:
|
||||
await region.asset_uploader.complete_upload(upload_token)
|
||||
except Exception as e:
|
||||
show_message(e)
|
||||
raise
|
||||
|
||||
|
||||
addons = [DeformerAddon()]
|
||||
|
||||
@@ -17,7 +17,7 @@ from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.inventory import InventoryModel, InventoryObject
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.templates import XferFilePath
|
||||
from hippolyzer.lib.base.templates import XferFilePath, AssetType
|
||||
from hippolyzer.lib.proxy import addon_ctx
|
||||
from hippolyzer.lib.proxy.webapp_cap_addon import WebAppCapAddon
|
||||
|
||||
@@ -64,7 +64,7 @@ async def get_task_inventory():
|
||||
InventoryObject(
|
||||
name="Contents",
|
||||
parent_id=UUID.ZERO,
|
||||
type="category",
|
||||
type=AssetType.CATEGORY,
|
||||
obj_id=obj_id
|
||||
).to_llsd()
|
||||
],
|
||||
|
||||
50
addon_examples/leap_example.py
Normal file
50
addon_examples/leap_example.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""
|
||||
Example of how to control a viewer over LEAP
|
||||
|
||||
Must launch the viewer with `outleap-agent` LEAP script.
|
||||
See https://github.com/SaladDais/outleap/ for more info on LEAP / outleap.
|
||||
"""
|
||||
|
||||
import outleap
|
||||
from outleap.scripts.inspector import LEAPInspectorGUI
|
||||
|
||||
from hippolyzer.lib.proxy.addon_utils import send_chat, BaseAddon, show_message
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
|
||||
|
||||
# Path found using `outleap-inspector`
|
||||
FPS_PATH = outleap.UIPath("/main_view/menu_stack/status_bar_container/status/time_and_media_bg/FPSText")
|
||||
|
||||
|
||||
class LEAPExampleAddon(BaseAddon):
|
||||
async def handle_leap_client_added(self, session_manager: SessionManager, leap_client: outleap.LEAPClient):
|
||||
# You can do things as soon as the LEAP client connects, like if you want to automate
|
||||
# login or whatever.
|
||||
viewer_control_api = outleap.LLViewerControlAPI(leap_client)
|
||||
# Ask for a config value and print it in the viewer logs
|
||||
print(await viewer_control_api.get("Global", "StatsPilotFile"))
|
||||
|
||||
@handle_command()
|
||||
async def show_ui_inspector(self, session: Session, _region: ProxiedRegion):
|
||||
"""Spawn a GUI for inspecting the UI state"""
|
||||
if not session.leap_client:
|
||||
show_message("No LEAP client connected?")
|
||||
return
|
||||
LEAPInspectorGUI(session.leap_client).show()
|
||||
|
||||
@handle_command()
|
||||
async def say_fps(self, session: Session, _region: ProxiedRegion):
|
||||
"""Say your current FPS in chat"""
|
||||
if not session.leap_client:
|
||||
show_message("No LEAP client connected?")
|
||||
return
|
||||
|
||||
window_api = outleap.LLWindowAPI(session.leap_client)
|
||||
fps = (await window_api.get_info(path=FPS_PATH))['value']
|
||||
|
||||
send_chat(f"LEAP says I'm running at {fps} FPS!")
|
||||
|
||||
|
||||
addons = [LEAPExampleAddon()]
|
||||
@@ -20,6 +20,7 @@ bulk upload, like changing priority or removing a joint.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import pathlib
|
||||
from abc import abstractmethod
|
||||
from typing import *
|
||||
@@ -106,19 +107,22 @@ class LocalAnimAddon(BaseAddon):
|
||||
if not anim_id:
|
||||
continue
|
||||
# is playing right now, check if there's a newer version
|
||||
self.apply_local_anim_from_file(session, region, anim_name, only_if_changed=True)
|
||||
try:
|
||||
self.apply_local_anim_from_file(session, region, anim_name, only_if_changed=True)
|
||||
except Exception:
|
||||
logging.exception("Exploded while replaying animation")
|
||||
await asyncio.sleep(1.0)
|
||||
|
||||
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
|
||||
cmd: str, options: List[str], param: str):
|
||||
behaviour: str, options: List[str], param: str):
|
||||
# We only handle commands
|
||||
if param != "force":
|
||||
return
|
||||
|
||||
if cmd == "stop_local_anim":
|
||||
if behaviour == "stop_local_anim":
|
||||
self.apply_local_anim(session, region, options[0], new_data=None)
|
||||
return True
|
||||
elif cmd == "start_local_anim":
|
||||
elif behaviour == "start_local_anim":
|
||||
self.apply_local_anim_from_file(session, region, options[0])
|
||||
return True
|
||||
|
||||
@@ -175,7 +179,6 @@ class LocalAnimAddon(BaseAddon):
|
||||
if only_if_changed and old_mtime == mtime:
|
||||
return
|
||||
|
||||
cls.local_anim_mtimes[anim_name] = mtime
|
||||
# file might not even exist anymore if mtime is `None`,
|
||||
# anim will automatically stop if that happens.
|
||||
if mtime:
|
||||
@@ -187,6 +190,7 @@ class LocalAnimAddon(BaseAddon):
|
||||
with open(anim_path, "rb") as f:
|
||||
anim_data = f.read()
|
||||
anim_data = cls._mangle_anim(anim_data)
|
||||
cls.local_anim_mtimes[anim_name] = mtime
|
||||
else:
|
||||
print(f"Unknown anim {anim_name!r}")
|
||||
cls.apply_local_anim(session, region, anim_name, new_data=anim_data)
|
||||
|
||||
@@ -230,7 +230,7 @@ class MeshUploadInterceptingAddon(BaseAddon):
|
||||
show_message("Mangled upload request")
|
||||
|
||||
def handle_object_updated(self, session: Session, region: ProxiedRegion,
|
||||
obj: Object, updated_props: Set[str]):
|
||||
obj: Object, updated_props: Set[str], msg: Optional[Message]):
|
||||
if obj.LocalID not in self.local_mesh_target_locals:
|
||||
return
|
||||
if "Name" not in updated_props or obj.Name is None:
|
||||
|
||||
@@ -10,6 +10,7 @@ before you start tracking can help too.
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.base.templates import PCode
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message, SessionProperty
|
||||
@@ -20,7 +21,7 @@ from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
class ObjectUpdateBlameAddon(BaseAddon):
|
||||
update_blame_counter: Counter[UUID] = SessionProperty(Counter)
|
||||
track_update_blame: bool = SessionProperty(False)
|
||||
should_track_update_blame: bool = SessionProperty(False)
|
||||
|
||||
@handle_command()
|
||||
async def precache_objects(self, _session: Session, region: ProxiedRegion):
|
||||
@@ -38,11 +39,11 @@ class ObjectUpdateBlameAddon(BaseAddon):
|
||||
|
||||
@handle_command()
|
||||
async def track_update_blame(self, _session: Session, _region: ProxiedRegion):
|
||||
self.track_update_blame = True
|
||||
self.should_track_update_blame = True
|
||||
|
||||
@handle_command()
|
||||
async def untrack_update_blame(self, _session: Session, _region: ProxiedRegion):
|
||||
self.track_update_blame = False
|
||||
self.should_track_update_blame = False
|
||||
|
||||
@handle_command()
|
||||
async def clear_update_blame(self, _session: Session, _region: ProxiedRegion):
|
||||
@@ -57,8 +58,8 @@ class ObjectUpdateBlameAddon(BaseAddon):
|
||||
print(f"{obj_id} ({name!r}): {count}")
|
||||
|
||||
def handle_object_updated(self, session: Session, region: ProxiedRegion,
|
||||
obj: Object, updated_props: Set[str]):
|
||||
if not self.track_update_blame:
|
||||
obj: Object, updated_props: Set[str], msg: Optional[Message]):
|
||||
if not self.should_track_update_blame:
|
||||
return
|
||||
if region != session.main_region:
|
||||
return
|
||||
|
||||
21
addon_examples/packet_stats.py
Normal file
21
addon_examples/packet_stats.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import collections
|
||||
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, GlobalProperty
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class PacketStatsAddon(BaseAddon):
|
||||
packet_stats: collections.Counter = GlobalProperty(collections.Counter)
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
self.packet_stats[message.name] += 1
|
||||
|
||||
@handle_command()
|
||||
async def print_packet_stats(self, _session: Session, _region: ProxiedRegion):
|
||||
print(self.packet_stats.most_common(10))
|
||||
|
||||
|
||||
addons = [PacketStatsAddon()]
|
||||
@@ -72,14 +72,13 @@ class PixelArtistAddon(BaseAddon):
|
||||
# Watch for any newly created prims, this is basically what the viewer does to find
|
||||
# prims that it just created with the build tool.
|
||||
with session.objects.events.subscribe_async(
|
||||
(ObjectUpdateType.OBJECT_UPDATE,),
|
||||
(ObjectUpdateType.UPDATE,),
|
||||
predicate=lambda e: e.object.UpdateFlags & JUST_CREATED_FLAGS and "LocalID" in e.updated
|
||||
) as get_events:
|
||||
# Create a pool of prims to use for building the pixel art
|
||||
for _ in range(needed_prims):
|
||||
# TODO: We don't track the land group or user's active group, so
|
||||
# "anyone can build" must be on for rezzing to work.
|
||||
group_id = UUID()
|
||||
# TODO: Can't get land group atm, just tries to rez with the user's active group
|
||||
group_id = session.active_group
|
||||
region.circuit.send(Message(
|
||||
'ObjectAdd',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id, GroupID=group_id),
|
||||
|
||||
111
addon_examples/puppetry_example.py
Normal file
111
addon_examples/puppetry_example.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""
|
||||
Control a puppetry-enabled viewer and make your neck spin like crazy
|
||||
|
||||
It currently requires a custom rebased Firestorm with puppetry applied on top,
|
||||
and patches applied on top to make startup LEAP scripts be treated as puppetry modules.
|
||||
Basically, you probably don't want to use this yet. But hey, Puppetry is still only
|
||||
on the beta grid anyway.
|
||||
"""
|
||||
import asyncio
|
||||
import enum
|
||||
import logging
|
||||
import math
|
||||
from typing import *
|
||||
|
||||
import outleap
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Quaternion
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BodyPartMask(enum.IntFlag):
|
||||
"""Which joints to send the viewer as part of "move" puppetry command"""
|
||||
HEAD = 1 << 0
|
||||
FACE = 1 << 1
|
||||
LHAND = 1 << 2
|
||||
RHAND = 1 << 3
|
||||
FINGERS = 1 << 4
|
||||
|
||||
|
||||
def register_puppetry_command(func: Callable[[dict], Awaitable[None]]):
|
||||
"""Register a method as handling inbound puppetry commands from the viewer"""
|
||||
func._puppetry_command = True
|
||||
return func
|
||||
|
||||
|
||||
class PuppetryExampleAddon(BaseAddon):
|
||||
server_skeleton: Dict[str, Dict[str, Any]] = SessionProperty(dict)
|
||||
camera_num: int = SessionProperty(0)
|
||||
parts_active: BodyPartMask = SessionProperty(lambda: BodyPartMask(0x1F))
|
||||
puppetry_api: Optional[outleap.LLPuppetryAPI] = SessionProperty(None)
|
||||
leap_client: Optional[outleap.LEAPClient] = SessionProperty(None)
|
||||
|
||||
def handle_session_init(self, session: Session):
|
||||
if not session.leap_client:
|
||||
return
|
||||
self.puppetry_api = outleap.LLPuppetryAPI(session.leap_client)
|
||||
self.leap_client = session.leap_client
|
||||
self._schedule_task(self._serve())
|
||||
self._schedule_task(self._exorcist(session))
|
||||
|
||||
@register_puppetry_command
|
||||
async def enable_parts(self, args: dict):
|
||||
if (new_mask := args.get("parts_mask")) is not None:
|
||||
self.parts_active = BodyPartMask(new_mask)
|
||||
|
||||
@register_puppetry_command
|
||||
async def set_camera(self, args: dict):
|
||||
if (camera_num := args.get("camera_num")) is not None:
|
||||
self.camera_num = camera_num
|
||||
|
||||
@register_puppetry_command
|
||||
async def stop(self, _args: dict):
|
||||
LOG.info("Viewer asked us to stop puppetry")
|
||||
|
||||
@register_puppetry_command
|
||||
async def log(self, _args: dict):
|
||||
# Intentionally ignored, we don't care about things the viewer
|
||||
# asked us to log
|
||||
pass
|
||||
|
||||
@register_puppetry_command
|
||||
async def set_skeleton(self, args: dict):
|
||||
# Don't really care about what the viewer thinks the view of the skeleton is.
|
||||
# Just log store it.
|
||||
self.server_skeleton = args
|
||||
|
||||
async def _serve(self):
|
||||
"""Handle inbound puppetry commands from viewer in a loop"""
|
||||
async with self.leap_client.listen_scoped("puppetry.controller") as listener:
|
||||
while True:
|
||||
msg = await listener.get()
|
||||
cmd = msg["command"]
|
||||
handler = getattr(self, cmd, None)
|
||||
if handler is None or not hasattr(handler, "_puppetry_command"):
|
||||
LOG.warning(f"Unknown puppetry command {cmd!r}: {msg!r}")
|
||||
continue
|
||||
await handler(msg.get("args", {}))
|
||||
|
||||
async def _exorcist(self, session):
|
||||
"""Do the Linda Blair thing with your neck"""
|
||||
spin_rad = 0.0
|
||||
while True:
|
||||
await asyncio.sleep(0.05)
|
||||
if not session.main_region:
|
||||
continue
|
||||
# Wrap spin_rad around if necessary
|
||||
while spin_rad > math.pi:
|
||||
spin_rad -= math.pi * 2
|
||||
|
||||
# LEAP wants rot as a quaternion with just the imaginary parts.
|
||||
neck_rot = Quaternion.from_euler(0, 0, spin_rad).data(3)
|
||||
self.puppetry_api.move({
|
||||
"mNeck": {"no_constraint": True, "local_rot": neck_rot},
|
||||
})
|
||||
spin_rad += math.pi / 25
|
||||
|
||||
|
||||
addons = [PuppetryExampleAddon()]
|
||||
@@ -13,7 +13,7 @@ def _to_spongecase(val):
|
||||
|
||||
|
||||
def handle_lludp_message(session: Session, _region: ProxiedRegion, message: Message):
|
||||
ctx = session.addon_ctx
|
||||
ctx = session.addon_ctx[__name__]
|
||||
ctx.setdefault("spongecase", False)
|
||||
if message.name == "ChatFromViewer":
|
||||
chat = message["ChatData"]["Message"]
|
||||
|
||||
@@ -2,21 +2,17 @@
|
||||
Example of how to upload assets, assumes assets are already encoded
|
||||
in the appropriate format.
|
||||
|
||||
/524 upload <asset type>
|
||||
/524 upload_asset <asset type>
|
||||
"""
|
||||
import pprint
|
||||
from pathlib import Path
|
||||
from typing import *
|
||||
|
||||
import aiohttp
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.mesh import LLMeshSerializer
|
||||
from hippolyzer.lib.base.serialization import BufferReader
|
||||
from hippolyzer.lib.base.templates import AssetType
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import ais_item_to_inventory_data, show_message, BaseAddon
|
||||
from hippolyzer.lib.proxy.addon_utils import show_message, BaseAddon
|
||||
from hippolyzer.lib.proxy.commands import handle_command, Parameter
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
@@ -29,7 +25,6 @@ class UploaderAddon(BaseAddon):
|
||||
async def upload_asset(self, _session: Session, region: ProxiedRegion,
|
||||
asset_type: AssetType, flags: Optional[int] = None):
|
||||
"""Upload a raw asset with optional flags"""
|
||||
inv_type = asset_type.inventory_type
|
||||
file = await AddonManager.UI.open_file()
|
||||
if not file:
|
||||
return
|
||||
@@ -42,67 +37,32 @@ class UploaderAddon(BaseAddon):
|
||||
with open(file, "rb") as f:
|
||||
file_body = f.read()
|
||||
|
||||
params = {
|
||||
"asset_type": asset_type.human_name,
|
||||
"description": "(No Description)",
|
||||
"everyone_mask": 0,
|
||||
"group_mask": 0,
|
||||
"folder_id": UUID(), # Puts it in the default folder, I guess. Undocumented.
|
||||
"inventory_type": inv_type.human_name,
|
||||
"name": name,
|
||||
"next_owner_mask": 581632,
|
||||
}
|
||||
if flags is not None:
|
||||
params['flags'] = flags
|
||||
try:
|
||||
if asset_type == AssetType.MESH:
|
||||
# Kicking off a mesh upload works a little differently internally
|
||||
# Half-parse the mesh so that we can figure out how many faces it has
|
||||
reader = BufferReader("!", file_body)
|
||||
mesh = reader.read(LLMeshSerializer(parse_segment_contents=False))
|
||||
upload_token = await region.asset_uploader.initiate_mesh_upload(
|
||||
name, mesh, flags=flags
|
||||
)
|
||||
else:
|
||||
upload_token = await region.asset_uploader.initiate_asset_upload(
|
||||
name, asset_type, file_body, flags=flags,
|
||||
)
|
||||
except Exception as e:
|
||||
show_message(e)
|
||||
raise
|
||||
|
||||
caps = region.caps_client
|
||||
async with aiohttp.ClientSession() as sess:
|
||||
async with caps.post('NewFileAgentInventory', llsd=params, session=sess) as resp:
|
||||
parsed = await resp.read_llsd()
|
||||
if "uploader" not in parsed:
|
||||
show_message(f"Upload error!: {parsed!r}")
|
||||
return
|
||||
print("Got upload URL, uploading...")
|
||||
if not await AddonManager.UI.confirm("Upload", f"Spend {upload_token.linden_cost}L on upload?"):
|
||||
return
|
||||
|
||||
async with caps.post(parsed["uploader"], data=file_body, session=sess) as resp:
|
||||
upload_parsed = await resp.read_llsd()
|
||||
|
||||
if "new_inventory_item" not in upload_parsed:
|
||||
show_message(f"Got weird upload resp: {pprint.pformat(upload_parsed)}")
|
||||
return
|
||||
|
||||
await self._force_inv_update(region, upload_parsed['new_inventory_item'])
|
||||
|
||||
@handle_command(item_id=UUID)
|
||||
async def force_inv_update(self, _session: Session, region: ProxiedRegion, item_id: UUID):
|
||||
"""Force an inventory update for a given item id"""
|
||||
await self._force_inv_update(region, item_id)
|
||||
|
||||
async def _force_inv_update(self, region: ProxiedRegion, item_id: UUID):
|
||||
session = region.session()
|
||||
ais_req_data = {
|
||||
"items": [
|
||||
{
|
||||
"owner_id": session.agent_id,
|
||||
"item_id": item_id,
|
||||
}
|
||||
]
|
||||
}
|
||||
async with region.caps_client.post('FetchInventory2', llsd=ais_req_data) as resp:
|
||||
ais_item = (await resp.read_llsd())["items"][0]
|
||||
|
||||
message = Message(
|
||||
"UpdateCreateInventoryItem",
|
||||
Block(
|
||||
"AgentData",
|
||||
AgentID=session.agent_id,
|
||||
SimApproved=1,
|
||||
TransactionID=UUID.random(),
|
||||
),
|
||||
ais_item_to_inventory_data(ais_item),
|
||||
direction=Direction.IN
|
||||
)
|
||||
region.circuit.send(message)
|
||||
# Do the actual upload
|
||||
try:
|
||||
await region.asset_uploader.complete_upload(upload_token)
|
||||
except Exception as e:
|
||||
show_message(e)
|
||||
raise
|
||||
|
||||
|
||||
addons = [UploaderAddon()]
|
||||
|
||||
53
client_examples/hello_client.py
Normal file
53
client_examples/hello_client.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""
|
||||
A simple client that just says hello to people
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import pprint
|
||||
from contextlib import aclosing
|
||||
import os
|
||||
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.templates import ChatType, ChatSourceType
|
||||
from hippolyzer.lib.client.hippo_client import HippoClient
|
||||
|
||||
|
||||
async def amain():
|
||||
client = HippoClient()
|
||||
|
||||
async def _respond_to_chat(message: Message):
|
||||
if message["ChatData"]["SourceID"] == client.session.agent_id:
|
||||
return
|
||||
if message["ChatData"]["SourceType"] != ChatSourceType.AGENT:
|
||||
return
|
||||
if "hello" not in message["ChatData"]["Message"].lower():
|
||||
return
|
||||
await client.send_chat(f'Hello {message["ChatData"]["FromName"]}!', chat_type=ChatType.SHOUT)
|
||||
|
||||
async with aclosing(client):
|
||||
await client.login(
|
||||
username=os.environ["HIPPO_USERNAME"],
|
||||
password=os.environ["HIPPO_PASSWORD"],
|
||||
start_location=os.environ.get("HIPPO_START_LOCATION", "last"),
|
||||
)
|
||||
print("I'm here")
|
||||
|
||||
# Wait until we have details about parcels and print them
|
||||
await client.main_region.parcel_manager.parcels_downloaded.wait()
|
||||
pprint.pprint(client.main_region.parcel_manager.parcels)
|
||||
|
||||
await client.send_chat("Hello World!", chat_type=ChatType.SHOUT)
|
||||
client.session.message_handler.subscribe("ChatFromSimulator", _respond_to_chat)
|
||||
# Example of how to work with caps
|
||||
async with client.main_caps_client.get("SimulatorFeatures") as features_resp:
|
||||
print("Features:", await features_resp.read_llsd())
|
||||
|
||||
while True:
|
||||
try:
|
||||
await asyncio.sleep(0.001)
|
||||
except (KeyboardInterrupt, asyncio.CancelledError):
|
||||
await client.send_chat("Goodbye World!", chat_type=ChatType.SHOUT)
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(amain())
|
||||
@@ -191,7 +191,7 @@
|
||||
</size>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(80, 0, 0)</string>
|
||||
<string notr="true"/>
|
||||
</property>
|
||||
<property name="tabChangesFocus">
|
||||
<bool>true</bool>
|
||||
|
||||
@@ -9,13 +9,14 @@ from typing import Optional
|
||||
|
||||
import mitmproxy.ctx
|
||||
import mitmproxy.exceptions
|
||||
import outleap
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.ca_utils import setup_ca
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, create_proxy_master, HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.http_event_manager import MITMProxyEventManager
|
||||
from hippolyzer.lib.proxy.lludp_proxy import SLSOCKS5Server
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
@@ -76,6 +77,15 @@ class SelectionManagerAddon(BaseAddon):
|
||||
selected.task_item = parsed["item-id"]
|
||||
|
||||
|
||||
class AgentUpdaterAddon(BaseAddon):
|
||||
def handle_eq_event(self, session: Session, region: ProxiedRegion, event: dict):
|
||||
if event['message'] != 'AgentGroupDataUpdate':
|
||||
return
|
||||
session.groups.clear()
|
||||
for group in event['body']['GroupData']:
|
||||
session.groups.add(group['GroupID'])
|
||||
|
||||
|
||||
class REPLAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def spawn_repl(self, session: Session, region: ProxiedRegion):
|
||||
@@ -84,12 +94,12 @@ class REPLAddon(BaseAddon):
|
||||
AddonManager.spawn_repl()
|
||||
|
||||
|
||||
def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowContext):
|
||||
def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowContext, ssl_insecure=False):
|
||||
mitm_loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(mitm_loop)
|
||||
|
||||
async def mitmproxy_loop():
|
||||
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
|
||||
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context, ssl_insecure=ssl_insecure)
|
||||
gc.freeze()
|
||||
await mitmproxy_master.run()
|
||||
|
||||
@@ -97,11 +107,12 @@ def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowCo
|
||||
|
||||
|
||||
def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] = None,
|
||||
extra_addon_paths: Optional[list] = None, proxy_host=None):
|
||||
extra_addon_paths: Optional[list] = None, proxy_host=None, ssl_insecure=False):
|
||||
extra_addons = extra_addons or []
|
||||
extra_addon_paths = extra_addon_paths or []
|
||||
extra_addons.append(SelectionManagerAddon())
|
||||
extra_addons.append(REPLAddon())
|
||||
extra_addons.append(AgentUpdaterAddon())
|
||||
|
||||
root_log = logging.getLogger()
|
||||
root_log.addHandler(logging.StreamHandler())
|
||||
@@ -112,6 +123,7 @@ def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] =
|
||||
|
||||
udp_proxy_port = session_manager.settings.SOCKS_PROXY_PORT
|
||||
http_proxy_port = session_manager.settings.HTTP_PROXY_PORT
|
||||
leap_port = session_manager.settings.LEAP_PORT
|
||||
if proxy_host is None:
|
||||
proxy_host = session_manager.settings.PROXY_BIND_ADDR
|
||||
|
||||
@@ -121,17 +133,13 @@ def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] =
|
||||
# TODO: argparse
|
||||
if len(sys.argv) == 3:
|
||||
if sys.argv[1] == "--setup-ca":
|
||||
try:
|
||||
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
|
||||
except mitmproxy.exceptions.MitmproxyException:
|
||||
# Proxy already running, create the master so we don't try to bind to a port
|
||||
mitmproxy_master = create_proxy_master(proxy_host, http_proxy_port, flow_context)
|
||||
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
|
||||
setup_ca(sys.argv[2], mitmproxy_master)
|
||||
return sys.exit(0)
|
||||
|
||||
http_proc = multiprocessing.Process(
|
||||
target=run_http_proxy_process,
|
||||
args=(proxy_host, http_proxy_port, flow_context),
|
||||
args=(proxy_host, http_proxy_port, flow_context, ssl_insecure),
|
||||
daemon=True,
|
||||
)
|
||||
http_proc.start()
|
||||
@@ -143,6 +151,10 @@ def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] =
|
||||
coro = asyncio.start_server(server.handle_connection, proxy_host, udp_proxy_port)
|
||||
async_server = loop.run_until_complete(coro)
|
||||
|
||||
leap_server = outleap.LEAPBridgeServer(session_manager.leap_client_connected)
|
||||
coro = asyncio.start_server(leap_server.handle_connection, proxy_host, leap_port)
|
||||
async_leap_server = loop.run_until_complete(coro)
|
||||
|
||||
event_manager = MITMProxyEventManager(session_manager, flow_context)
|
||||
loop.create_task(event_manager.run())
|
||||
|
||||
@@ -169,6 +181,8 @@ def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] =
|
||||
# Close the server
|
||||
print("Closing SOCKS server")
|
||||
async_server.close()
|
||||
print("Shutting down LEAP server")
|
||||
async_leap_server.close()
|
||||
print("Shutting down addons")
|
||||
AddonManager.shutdown()
|
||||
print("Waiting for SOCKS server to close")
|
||||
|
||||
@@ -24,7 +24,7 @@ from hippolyzer.apps.model import MessageLogModel, MessageLogHeader, RegionListM
|
||||
from hippolyzer.apps.proxy import start_proxy
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import bytes_unescape, bytes_escape, get_resource_filename
|
||||
from hippolyzer.lib.base.helpers import bytes_unescape, bytes_escape, get_resource_filename, create_logged_task
|
||||
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.message_formatting import (
|
||||
@@ -39,10 +39,11 @@ from hippolyzer.lib.base.settings import SettingDescriptor
|
||||
from hippolyzer.lib.base.ui_helpers import loadUi
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base.network.transport import Direction, SocketUDPTransport
|
||||
from hippolyzer.lib.client.state import BaseClientSessionManager
|
||||
from hippolyzer.lib.proxy.addons import BaseInteractionManager, AddonManager
|
||||
from hippolyzer.lib.proxy.ca_utils import setup_ca_everywhere
|
||||
from hippolyzer.lib.proxy.caps_client import ProxyCapsClient
|
||||
from hippolyzer.lib.proxy.http_proxy import create_proxy_master, HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry, AbstractMessageLogEntry, WrappingMessageLogger, \
|
||||
import_log_entries, export_log_entries
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
@@ -71,6 +72,7 @@ class GUISessionManager(SessionManager, QtCore.QObject):
|
||||
regionRemoved = QtCore.Signal(ProxiedRegion)
|
||||
|
||||
def __init__(self, settings):
|
||||
BaseClientSessionManager.__init__(self)
|
||||
SessionManager.__init__(self, settings)
|
||||
QtCore.QObject.__init__(self)
|
||||
self.all_regions = []
|
||||
@@ -231,7 +233,8 @@ class MessageLogWindow(QtWidgets.QMainWindow):
|
||||
"AvatarRenderInfo FirestormBridge ObjectAnimation ParcelDwellRequest ParcelAccessListRequest " \
|
||||
"ParcelDwellReply ParcelAccessListReply AttachedSoundGainChange " \
|
||||
"ParcelPropertiesRequest ParcelProperties GetObjectCost GetObjectPhysicsData ObjectImage " \
|
||||
"ViewerAsset GetTexture SetAlwaysRun GetDisplayNames MapImageService MapItemReply".split(" ")
|
||||
"ViewerAsset GetTexture SetAlwaysRun GetDisplayNames MapImageService MapItemReply " \
|
||||
"AgentFOV GenericStreamingMessage".split(" ")
|
||||
DEFAULT_FILTER = f"!({' || '.join(ignored for ignored in DEFAULT_IGNORE)})"
|
||||
|
||||
textRequest: QtWidgets.QTextEdit
|
||||
@@ -274,9 +277,11 @@ class MessageLogWindow(QtWidgets.QMainWindow):
|
||||
self.actionOpenMessageBuilder.triggered.connect(self._openMessageBuilder)
|
||||
|
||||
self.actionProxyRemotelyAccessible.setChecked(self.settings.REMOTELY_ACCESSIBLE)
|
||||
self.actionProxySSLInsecure.setChecked(self.settings.SSL_INSECURE)
|
||||
self.actionUseViewerObjectCache.setChecked(self.settings.USE_VIEWER_OBJECT_CACHE)
|
||||
self.actionRequestMissingObjects.setChecked(self.settings.AUTOMATICALLY_REQUEST_MISSING_OBJECTS)
|
||||
self.actionProxyRemotelyAccessible.triggered.connect(self._setProxyRemotelyAccessible)
|
||||
self.actionProxySSLInsecure.triggered.connect(self._setProxySSLInsecure)
|
||||
self.actionUseViewerObjectCache.triggered.connect(self._setUseViewerObjectCache)
|
||||
self.actionRequestMissingObjects.triggered.connect(self._setRequestMissingObjects)
|
||||
self.actionOpenNewMessageLogWindow.triggered.connect(self._openNewMessageLogWindow)
|
||||
@@ -457,7 +462,7 @@ class MessageLogWindow(QtWidgets.QMainWindow):
|
||||
if clicked_btn is not yes_btn:
|
||||
return
|
||||
|
||||
master = create_proxy_master("127.0.0.1", -1, HTTPFlowContext())
|
||||
master = create_http_proxy("127.0.0.1", -1, HTTPFlowContext())
|
||||
dirs = setup_ca_everywhere(master)
|
||||
|
||||
msg = QtWidgets.QMessageBox()
|
||||
@@ -473,6 +478,12 @@ class MessageLogWindow(QtWidgets.QMainWindow):
|
||||
msg.setText("Remote accessibility setting changes will take effect on next run")
|
||||
msg.exec()
|
||||
|
||||
def _setProxySSLInsecure(self, checked: bool):
|
||||
self.sessionManager.settings.SSL_INSECURE = checked
|
||||
msg = QtWidgets.QMessageBox()
|
||||
msg.setText("SSL security setting changes will take effect on next run")
|
||||
msg.exec()
|
||||
|
||||
def _setUseViewerObjectCache(self, checked: bool):
|
||||
self.sessionManager.settings.USE_VIEWER_OBJECT_CACHE = checked
|
||||
|
||||
@@ -565,7 +576,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
message_names = sorted(x.name for x in self.templateDict)
|
||||
|
||||
for message_name in message_names:
|
||||
if self.templateDict[message_name].msg_trust:
|
||||
if self.templateDict[message_name].trusted:
|
||||
self.comboTrusted.addItem(message_name)
|
||||
else:
|
||||
self.comboUntrusted.addItem(message_name)
|
||||
@@ -815,7 +826,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
# enough for the full response to pass through the proxy
|
||||
await resp.read()
|
||||
|
||||
asyncio.create_task(_send_request())
|
||||
create_logged_task(_send_request(), "Send HTTP Request")
|
||||
|
||||
|
||||
class AddonDialog(QtWidgets.QDialog):
|
||||
@@ -936,6 +947,7 @@ def gui_main():
|
||||
session_manager=window.sessionManager,
|
||||
extra_addon_paths=window.getAddonList(),
|
||||
proxy_host=http_host,
|
||||
ssl_insecure=settings.SSL_INSECURE,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -193,7 +193,7 @@
|
||||
</size>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(80, 0, 0)</string>
|
||||
<string notr="true"/>
|
||||
</property>
|
||||
<property name="tabChangesFocus">
|
||||
<bool>true</bool>
|
||||
@@ -213,7 +213,7 @@
|
||||
</widget>
|
||||
<widget class="QPlainTextEdit" name="textResponse">
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(0, 0, 80)</string>
|
||||
<string notr="true"/>
|
||||
</property>
|
||||
<property name="tabChangesFocus">
|
||||
<bool>true</bool>
|
||||
@@ -245,7 +245,7 @@
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>700</width>
|
||||
<height>22</height>
|
||||
<height>29</height>
|
||||
</rect>
|
||||
</property>
|
||||
<widget class="QMenu" name="menuFile">
|
||||
@@ -268,6 +268,7 @@
|
||||
<addaction name="actionProxyRemotelyAccessible"/>
|
||||
<addaction name="actionUseViewerObjectCache"/>
|
||||
<addaction name="actionRequestMissingObjects"/>
|
||||
<addaction name="actionProxySSLInsecure"/>
|
||||
</widget>
|
||||
<addaction name="menuFile"/>
|
||||
</widget>
|
||||
@@ -342,6 +343,17 @@
|
||||
<string>Export Log Entries</string>
|
||||
</property>
|
||||
</action>
|
||||
<action name="actionProxySSLInsecure">
|
||||
<property name="checkable">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Allow Insecure SSL Connections</string>
|
||||
</property>
|
||||
<property name="toolTip">
|
||||
<string>Allow invalid SSL certificates from upstream connections</string>
|
||||
</property>
|
||||
</action>
|
||||
</widget>
|
||||
<resources/>
|
||||
<connections/>
|
||||
|
||||
@@ -11,12 +11,11 @@
|
||||
# * * Collada tooling sucks and even LL is moving away from it
|
||||
# * * Ensuring LLMesh->Collada and LLMesh->GLTF conversion don't differ semantically is easy via assimp.
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import os.path
|
||||
import secrets
|
||||
import statistics
|
||||
import sys
|
||||
from typing import Dict, List, Iterable, Optional
|
||||
from typing import Dict, Optional
|
||||
|
||||
import collada
|
||||
import collada.source
|
||||
@@ -27,11 +26,22 @@ import transformations
|
||||
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
from hippolyzer.lib.base.serialization import BufferReader
|
||||
from hippolyzer.lib.base.mesh import LLMeshSerializer, MeshAsset, positions_from_domain, SkinSegmentDict
|
||||
from hippolyzer.lib.base.mesh import (
|
||||
LLMeshSerializer,
|
||||
MeshAsset,
|
||||
positions_from_domain,
|
||||
SkinSegmentDict,
|
||||
llsd_to_mat4,
|
||||
)
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
|
||||
def mat4_to_collada(mat: np.ndarray) -> np.ndarray:
|
||||
return mat.flatten(order='C')
|
||||
|
||||
|
||||
def mesh_to_collada(ll_mesh: MeshAsset, include_skin=True) -> collada.Collada:
|
||||
dae = collada.Collada()
|
||||
axis = collada.asset.UP_AXIS.Z_UP
|
||||
@@ -52,7 +62,7 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
|
||||
skin_seg = ll_mesh.segments.get('skin')
|
||||
bind_shape_matrix = None
|
||||
if include_skin and skin_seg:
|
||||
bind_shape_matrix = np.array(skin_seg["bind_shape_matrix"]).reshape((4, 4))
|
||||
bind_shape_matrix = llsd_to_mat4(skin_seg["bind_shape_matrix"])
|
||||
should_skin = True
|
||||
# Transform from the skin will be applied on the controller, not the node
|
||||
node_transform = np.identity(4)
|
||||
@@ -85,7 +95,7 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
|
||||
reflective=0.0,
|
||||
shadingtype="blinn",
|
||||
shininess=0.0,
|
||||
diffuse=(0.0, 0.0, 0.0),
|
||||
diffuse=(1.0, 1.0, 1.0),
|
||||
)
|
||||
mat = collada.material.Material(f"material{sub_uniq}", f"material{sub_uniq}", effect)
|
||||
|
||||
@@ -119,9 +129,8 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
|
||||
accessor.set('source', f"#{accessor.get('source')}")
|
||||
|
||||
flattened_bind_poses = []
|
||||
# LLMesh matrices are row-major, convert to col-major for Collada.
|
||||
for bind_pose in skin_seg['inverse_bind_matrix']:
|
||||
flattened_bind_poses.append(np.array(bind_pose).reshape((4, 4)).flatten('F'))
|
||||
flattened_bind_poses.append(mat4_to_collada(llsd_to_mat4(bind_pose)))
|
||||
flattened_bind_poses = np.array(flattened_bind_poses)
|
||||
inv_bind_source = _create_mat4_source(f"bind-poses{sub_uniq}", flattened_bind_poses, "TRANSFORM")
|
||||
|
||||
@@ -142,7 +151,7 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
|
||||
# in SL, with their own distinct sets of weights and vertex data.
|
||||
controller_node = E.controller(
|
||||
E.skin(
|
||||
E.bind_shape_matrix(' '.join(str(x) for x in bind_shape_matrix.flatten('F'))),
|
||||
E.bind_shape_matrix(' '.join(str(x) for x in mat4_to_collada(bind_shape_matrix))),
|
||||
joints_source.xmlnode,
|
||||
inv_bind_source.xmlnode,
|
||||
weights_source.xmlnode,
|
||||
@@ -173,11 +182,13 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
|
||||
node = collada.scene.Node(
|
||||
node_name,
|
||||
children=geom_nodes,
|
||||
transforms=[collada.scene.MatrixTransform(np.array(node_transform.flatten('F')))],
|
||||
transforms=[collada.scene.MatrixTransform(mat4_to_collada(node_transform))],
|
||||
)
|
||||
if should_skin:
|
||||
# We need a skeleton per _mesh asset_ because you could have incongruous skeletons
|
||||
# within the same linkset.
|
||||
# TODO: can we maintain some kind of skeleton cache, where if this skeleton has no conflicts
|
||||
# with another skeleton in the cache, we just use that skeleton and add any additional joints?
|
||||
skel_root = load_skeleton_nodes()
|
||||
transform_skeleton(skel_root, dae, skin_seg)
|
||||
skel = collada.scene.Node.load(dae, skel_root, {})
|
||||
@@ -199,7 +210,6 @@ def load_skeleton_nodes() -> etree.ElementBase:
|
||||
def transform_skeleton(skel_root: etree.ElementBase, dae: collada.Collada, skin_seg: SkinSegmentDict,
|
||||
include_unreferenced_bones=False):
|
||||
"""Update skeleton XML nodes to account for joint translations in the mesh"""
|
||||
# TODO: Use translation component only.
|
||||
joint_nodes: Dict[str, collada.scene.Node] = {}
|
||||
for skel_node in skel_root.iter():
|
||||
# xpath is loathsome so this is easier.
|
||||
@@ -208,7 +218,8 @@ def transform_skeleton(skel_root: etree.ElementBase, dae: collada.Collada, skin_
|
||||
joint_nodes[skel_node.get('name')] = collada.scene.Node.load(dae, skel_node, {})
|
||||
for joint_name, matrix in zip(skin_seg['joint_names'], skin_seg.get('alt_inverse_bind_matrix', [])):
|
||||
joint_node = joint_nodes[joint_name]
|
||||
joint_node.matrix = np.array(matrix).reshape((4, 4)).flatten('F')
|
||||
joint_decomp = transformations.decompose_matrix(llsd_to_mat4(matrix))
|
||||
joint_node.matrix = mat4_to_collada(transformations.compose_matrix(translate=joint_decomp[3]))
|
||||
# Update the underlying XML element with the new transform matrix
|
||||
joint_node.save()
|
||||
|
||||
@@ -249,48 +260,61 @@ def _create_mat4_source(name: str, data: np.ndarray, semantic: str):
|
||||
return source
|
||||
|
||||
|
||||
def fix_weird_bind_matrices(skin_seg: SkinSegmentDict):
|
||||
def fix_weird_bind_matrices(skin_seg: SkinSegmentDict) -> None:
|
||||
"""
|
||||
Fix weird-looking bind matrices to have normal scaling
|
||||
Fix weird-looking bind matrices to have sensible scaling and rotations
|
||||
|
||||
Not sure why these even happen (weird mesh authoring programs?)
|
||||
Sometimes get enormous inverse bind matrices (each component 10k+) and tiny
|
||||
Sometimes we get enormous inverse bind matrices (each component 10k+) and tiny
|
||||
bind shape matrix components. This detects inverse bind shape matrices
|
||||
with weird scales and tries to set them to what they "should" be without
|
||||
the weird inverted scaling.
|
||||
"""
|
||||
axis_counters = [collections.Counter() for _ in range(3)]
|
||||
for joint_inv in skin_seg['inverse_bind_matrix']:
|
||||
joint_mat = np.array(joint_inv).reshape((4, 4))
|
||||
joint_scale = transformations.decompose_matrix(joint_mat)[0]
|
||||
for axis_counter, axis_val in zip(axis_counters, joint_scale):
|
||||
axis_counter[axis_val] += 1
|
||||
most_common_inv_scale = []
|
||||
for axis_counter in axis_counters:
|
||||
most_common_inv_scale.append(axis_counter.most_common(1)[0][0])
|
||||
|
||||
if abs(1.0 - statistics.fmean(most_common_inv_scale)) > 1.0:
|
||||
# The magnitude of the scales in the inverse bind matrices look very strange.
|
||||
# The bind matrix itself is probably messed up as well, try to fix it.
|
||||
skin_seg['bind_shape_matrix'] = fix_llsd_matrix_scale(skin_seg['bind_shape_matrix'], most_common_inv_scale)
|
||||
if joint_positions := skin_seg.get('alt_inverse_bind_matrix', None):
|
||||
fix_matrix_list_scale(joint_positions, most_common_inv_scale)
|
||||
rev_scale = tuple(1.0 / x for x in most_common_inv_scale)
|
||||
fix_matrix_list_scale(skin_seg['inverse_bind_matrix'], rev_scale)
|
||||
# Sometimes we get mesh assets that have the vertex data naturally in y-up orientation,
|
||||
# and get re-oriented to z-up not through the bind shape matrix, but through the
|
||||
# transforms in the inverse bind matrices!
|
||||
#
|
||||
# Blender, for one, does not like this very much, and generally won't generate mesh
|
||||
# assets like this, as explained here https://developer.blender.org/T38660.
|
||||
# In vanilla Blender, these mesh assets will show up scaled and rotated _only_ according
|
||||
# to the bind shape matrix, which may end up with the model 25 meters tall and sitting
|
||||
# on its side.
|
||||
#
|
||||
# https://avalab.org/avastar/292/knowledge/compare-workbench/, while somewhat outdated,
|
||||
# has some information on rest pose vs default pose and scaling that I believe is relevant.
|
||||
# https://github.com/KhronosGroup/glTF-Blender-IO/issues/994 as well.
|
||||
#
|
||||
# While trying to figure out what was going on, I searched for something like
|
||||
# "inverse bind matrix scale collada", "bind pose scale blender", etc. Pretty much every
|
||||
# result was either a bug filed by, or a question asked by the creator of Avastar, or an SL user.
|
||||
# I think that says a lot about how annoying it is to author mesh for SL in particular.
|
||||
#
|
||||
# I spent a good month or so tearing my hair out over this wondering how these values could
|
||||
# even be possible. I wasn't sure how I should write mesh import code if I don't understand
|
||||
# how to interpret existing data, or how it even ended up the way it did. Turns out I wasn't
|
||||
# misinterpreting the data, the data really is just weird.
|
||||
#
|
||||
# I'd also had the idea that you could sniff which body a given rigged asset was meant
|
||||
# for by doing trivial matching on the inverse bind matrices, but obviously that isn't true!
|
||||
#
|
||||
# Basically:
|
||||
# 1) Maya is evil and generates evil, this evil bleeds into SL's assets through transforms.
|
||||
# 2) Blender is also evil, but in a manner that doesn't agree with Maya's evil.
|
||||
# 3) Collada was a valiant effort, but is evil in practice. Seemingly simple Collada
|
||||
# files are interpreted completely differently by Blender, Maya, and sometimes SL.
|
||||
# 4) Those three evils collude to make an interop nightmare for everyone like "oh my rigger
|
||||
# rigs using Maya and now my model is huge and all my normals are fucked on reimport"
|
||||
# 5) Yes, there's still good reasons to be using Avastar in 2022 even though nobody authoring
|
||||
# rigged mesh for any other use has to use something similar.
|
||||
|
||||
if not skin_seg['joint_names']:
|
||||
return
|
||||
|
||||
def fix_matrix_list_scale(source: List[List[float]], scale_fixup: Iterable[float]):
|
||||
for i, alt_inv_matrix in enumerate(source):
|
||||
source[i] = fix_llsd_matrix_scale(alt_inv_matrix, scale_fixup)
|
||||
|
||||
|
||||
def fix_llsd_matrix_scale(source: List[float], scale_fixup: Iterable[float]):
|
||||
matrix = np.array(source).reshape((4, 4))
|
||||
decomposed = list(transformations.decompose_matrix(matrix))
|
||||
# Need to handle both the scale and translation matrices
|
||||
for idx in (0, 3):
|
||||
decomposed[idx] = tuple(x * y for x, y in zip(decomposed[idx], scale_fixup))
|
||||
return list(transformations.compose_matrix(*decomposed).flatten('C'))
|
||||
# TODO: calculate the correct inverse bind matrix scale & rotations from avatar_skeleton.xml
|
||||
# definitions. If the rotation and scale factors are the same across all inverse bind matrices then
|
||||
# they can be moved over to the bind shape matrix to keep Blender happy.
|
||||
# Maybe add a scaled / rotated empty as a parent for the armature instead?
|
||||
return
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
@@ -29,6 +29,7 @@ import math
|
||||
from typing import *
|
||||
|
||||
import recordclass
|
||||
import transformations
|
||||
|
||||
logger = getLogger('hippolyzer.lib.base.datatypes')
|
||||
|
||||
@@ -38,12 +39,13 @@ class _IterableStub:
|
||||
__iter__: Callable
|
||||
|
||||
|
||||
class TupleCoord(recordclass.datatuple, _IterableStub): # type: ignore
|
||||
__options__ = {
|
||||
"fast_new": False,
|
||||
}
|
||||
RAD_TO_DEG = 180 / math.pi
|
||||
|
||||
|
||||
class TupleCoord(recordclass.RecordClass, _IterableStub):
|
||||
def __init__(self, *args):
|
||||
# Only to help typing, doesn't actually do anything.
|
||||
# All the important stuff happens in `__new__()`
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
@@ -220,6 +222,15 @@ class Quaternion(TupleCoord):
|
||||
)
|
||||
return super().__mul__(other)
|
||||
|
||||
@classmethod
|
||||
def from_transformations(cls, coord) -> Quaternion:
|
||||
"""Convert to W (S) last form"""
|
||||
return cls(coord[1], coord[2], coord[3], coord[0])
|
||||
|
||||
def to_transformations(self) -> Tuple[float, float, float, float]:
|
||||
"""Convert to W (S) first form for use with the transformations lib"""
|
||||
return self.W, self.X, self.Y, self.Z
|
||||
|
||||
@classmethod
|
||||
def from_euler(cls, roll, pitch, yaw, degrees=False):
|
||||
if degrees:
|
||||
@@ -241,6 +252,9 @@ class Quaternion(TupleCoord):
|
||||
|
||||
return cls(X=x, Y=y, Z=z, W=w)
|
||||
|
||||
def to_euler(self) -> Vector3:
|
||||
return Vector3(*transformations.euler_from_quaternion(self.to_transformations()))
|
||||
|
||||
def data(self, wanted_components=None):
|
||||
if wanted_components == 3:
|
||||
return self.X, self.Y, self.Z
|
||||
@@ -290,6 +304,9 @@ class JankStringyBytes(bytes):
|
||||
def __str__(self):
|
||||
return self.rstrip(b"\x00").decode("utf8", errors="replace")
|
||||
|
||||
def __bool__(self):
|
||||
return not (super().__eq__(b"") or super().__eq__(b"\x00"))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, str):
|
||||
return str(self) == other
|
||||
@@ -303,6 +320,36 @@ class JankStringyBytes(bytes):
|
||||
return item in str(self)
|
||||
return item in bytes(self)
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, bytes):
|
||||
return JankStringyBytes(bytes(self) + other)
|
||||
return str(self) + other
|
||||
|
||||
def __radd__(self, other):
|
||||
if isinstance(other, bytes):
|
||||
return JankStringyBytes(other + bytes(self))
|
||||
return other + str(self)
|
||||
|
||||
def lower(self):
|
||||
return str(self).lower()
|
||||
|
||||
def upper(self):
|
||||
return str(self).upper()
|
||||
|
||||
def startswith(self, __prefix, __start=None, __end=None):
|
||||
if __start or __end:
|
||||
raise RuntimeError("Can't handle __start or __end")
|
||||
if isinstance(__prefix, str):
|
||||
return str(self).startswith(__prefix)
|
||||
return self.startswith(__prefix)
|
||||
|
||||
def endswith(self, __prefix, __start=None, __end=None):
|
||||
if __start or __end:
|
||||
raise RuntimeError("Can't handle __start or __end")
|
||||
if isinstance(__prefix, str):
|
||||
return str(self).endswith(__prefix)
|
||||
return self.endswith(__prefix)
|
||||
|
||||
|
||||
class RawBytes(bytes):
|
||||
__slots__ = ()
|
||||
@@ -351,7 +398,7 @@ def flags_to_pod(flag_cls: Type[enum.IntFlag], val: int) -> Tuple[Union[str, int
|
||||
return tuple(flag.name for flag in iter(flag_cls) if val & flag.value) + extra
|
||||
|
||||
|
||||
class TaggedUnion(recordclass.datatuple): # type: ignore
|
||||
class TaggedUnion(recordclass.RecordClass):
|
||||
tag: Any
|
||||
value: Any
|
||||
|
||||
@@ -359,5 +406,5 @@ class TaggedUnion(recordclass.datatuple): # type: ignore
|
||||
__all__ = [
|
||||
"Vector3", "Vector4", "Vector2", "Quaternion", "TupleCoord",
|
||||
"UUID", "RawBytes", "StringEnum", "JankStringyBytes", "TaggedUnion",
|
||||
"IntEnum", "IntFlag", "flags_to_pod", "Pretty"
|
||||
"IntEnum", "IntFlag", "flags_to_pod", "Pretty", "RAD_TO_DEG"
|
||||
]
|
||||
|
||||
@@ -18,17 +18,20 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from logging import getLogger
|
||||
from hippolyzer.lib.base.helpers import create_logged_task
|
||||
|
||||
logger = getLogger('utilities.events')
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Event:
|
||||
""" an object containing data which will be passed out to all subscribers """
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, name=None):
|
||||
self.subscribers = []
|
||||
self.name = name
|
||||
|
||||
def subscribe(self, handler, *args, one_shot=False, predicate=None, **kwargs):
|
||||
""" establish the subscribers (handlers) to this event """
|
||||
@@ -38,7 +41,8 @@ class Event:
|
||||
|
||||
return self
|
||||
|
||||
def _handler_key(self, handler):
|
||||
@staticmethod
|
||||
def _handler_key(handler):
|
||||
return handler[:3]
|
||||
|
||||
def unsubscribe(self, handler, *args, **kwargs):
|
||||
@@ -52,24 +56,37 @@ class Event:
|
||||
raise ValueError(f"Handler {handler!r} is not subscribed to this event.")
|
||||
return self
|
||||
|
||||
def _create_async_wrapper(self, handler, args, inner_args, kwargs):
|
||||
# Note that unsubscription may be delayed due to asyncio scheduling :)
|
||||
async def _run_handler_wrapper():
|
||||
unsubscribe = await handler(args, *inner_args, **kwargs)
|
||||
if unsubscribe:
|
||||
_ = self.unsubscribe(handler, *inner_args, **kwargs)
|
||||
return _run_handler_wrapper
|
||||
|
||||
def notify(self, args):
|
||||
for handler in self.subscribers[:]:
|
||||
instance, inner_args, kwargs, one_shot, predicate = handler
|
||||
for subscriber in self.subscribers[:]:
|
||||
handler, inner_args, kwargs, one_shot, predicate = subscriber
|
||||
if predicate and not predicate(args):
|
||||
continue
|
||||
if one_shot:
|
||||
self.unsubscribe(instance, *inner_args, **kwargs)
|
||||
if instance(args, *inner_args, **kwargs):
|
||||
self.unsubscribe(instance, *inner_args, **kwargs)
|
||||
self.unsubscribe(handler, *inner_args, **kwargs)
|
||||
if asyncio.iscoroutinefunction(handler):
|
||||
create_logged_task(self._create_async_wrapper(handler, args, inner_args, kwargs)(), self.name, LOG)
|
||||
else:
|
||||
try:
|
||||
if handler(args, *inner_args, **kwargs) and not one_shot:
|
||||
self.unsubscribe(handler, *inner_args, **kwargs)
|
||||
except:
|
||||
# One handler failing shouldn't prevent notification of other handlers.
|
||||
LOG.exception(f"Failed in handler for {self.name}")
|
||||
|
||||
def get_subscriber_count(self):
|
||||
def __len__(self):
|
||||
return len(self.subscribers)
|
||||
|
||||
def clear_subscribers(self):
|
||||
self.subscribers.clear()
|
||||
return self
|
||||
|
||||
__iadd__ = subscribe
|
||||
__isub__ = unsubscribe
|
||||
__call__ = notify
|
||||
__len__ = get_subscriber_count
|
||||
|
||||
@@ -176,7 +176,7 @@ class MessageTemplateNotFound(MessageSystemError):
|
||||
self.template = template
|
||||
|
||||
def __str__(self):
|
||||
return "No message template found, context: '%s'" % self.context
|
||||
return "No message template found for %s, context: '%s'" % (self.template, self.context)
|
||||
|
||||
|
||||
class MessageTemplateParsingError(MessageSystemError):
|
||||
|
||||
528
hippolyzer/lib/base/gltftools.py
Normal file
528
hippolyzer/lib/base/gltftools.py
Normal file
@@ -0,0 +1,528 @@
|
||||
"""
|
||||
WIP LLMesh -> glTF converter, for testing eventual glTF -> LLMesh conversion logic.
|
||||
"""
|
||||
# TODO:
|
||||
# * Simple tests
|
||||
# * Round-tripping skinning data from Blender-compatible glTF back to LLMesh (maybe through rig retargeting?)
|
||||
# * Panda3D-glTF viewer for LLMesh? The glTFs seem to work fine in Panda3D-glTF's `gltf-viewer`.
|
||||
# * Check if skew and projection components of transform matrices are ignored in practice as the spec requires.
|
||||
# I suppose this would render some real assets impossible to represent with glTF.
|
||||
|
||||
import dataclasses
|
||||
import math
|
||||
import pprint
|
||||
import sys
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from typing import *
|
||||
|
||||
import gltflib
|
||||
import numpy as np
|
||||
import transformations
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3
|
||||
from hippolyzer.lib.base.mesh import (
|
||||
LLMeshSerializer, MeshAsset, positions_from_domain, SkinSegmentDict, VertexWeight, llsd_to_mat4
|
||||
)
|
||||
from hippolyzer.lib.base.mesh_skeleton import AVATAR_SKELETON
|
||||
from hippolyzer.lib.base.serialization import BufferReader
|
||||
|
||||
|
||||
class IdentityList(list):
|
||||
"""
|
||||
List, but does index() by object identity, not equality
|
||||
|
||||
GLTF references objects by their index within some list, but we prefer to pass around
|
||||
actual object references internally. If we don't do this, then when we try and get
|
||||
a GLTF reference to a given object via `.index()` then we could end up actually getting
|
||||
a reference to some other object that just happens to be equal. This was causing issues
|
||||
with all primitives ending up with the same material, due to the default material's value
|
||||
being the same across all primitives.
|
||||
"""
|
||||
def index(self, value, start: Optional[int] = None, stop: Optional[int] = None) -> int:
|
||||
view = self[start:stop]
|
||||
for i, x in enumerate(view):
|
||||
if x is value:
|
||||
if start:
|
||||
return i + start
|
||||
return i
|
||||
raise ValueError(value)
|
||||
|
||||
|
||||
def sl_to_gltf_coords(coords):
|
||||
"""
|
||||
SL (X, Y, Z) -> GL (X, Z, Y), as GLTF commandeth
|
||||
|
||||
Note that this will only work when reordering axes, flipping an axis is more complicated.
|
||||
"""
|
||||
return coords[0], coords[2], coords[1], *coords[3:]
|
||||
|
||||
|
||||
def sl_to_gltf_uv(uv):
|
||||
"""Flip the V coordinate of a UV to match glTF convention"""
|
||||
return [uv[0], -uv[1]]
|
||||
|
||||
|
||||
def sl_mat4_to_gltf(mat: np.ndarray) -> List[float]:
|
||||
"""
|
||||
Convert an SL Mat4 to the glTF coordinate system
|
||||
|
||||
This should only be done immediately before storing the matrix in a glTF structure!
|
||||
"""
|
||||
# TODO: This is probably not correct. We definitely need to flip Z but there's
|
||||
# probably a better way to do it.
|
||||
decomp = [sl_to_gltf_coords(x) for x in transformations.decompose_matrix(mat)]
|
||||
trans = decomp[3]
|
||||
decomp[3] = (trans[0], trans[1], -trans[2])
|
||||
return list(transformations.compose_matrix(*decomp).flatten(order='F'))
|
||||
|
||||
|
||||
# Mat3 to convert points from SL coordinate space to GLTF coordinate space
|
||||
POINT_TO_GLTF_MAT = transformations.compose_matrix(angles=(-(math.pi / 2), 0, 0))[:3, :3]
|
||||
|
||||
|
||||
def sl_vec3_array_to_gltf(vec_list: np.ndarray) -> np.ndarray:
|
||||
new_array = []
|
||||
for x in vec_list:
|
||||
new_array.append(POINT_TO_GLTF_MAT.dot(x))
|
||||
return np.array(new_array)
|
||||
|
||||
|
||||
def sl_weights_to_gltf(sl_weights: List[List[VertexWeight]]) -> Tuple[np.ndarray, np.ndarray]:
|
||||
"""Convert SL Weights to separate JOINTS_0 and WEIGHTS_0 vec4 arrays"""
|
||||
joints = np.zeros((len(sl_weights), 4), dtype=np.uint8)
|
||||
weights = np.zeros((len(sl_weights), 4), dtype=np.float32)
|
||||
|
||||
for i, vert_weights in enumerate(sl_weights):
|
||||
# We need to re-normalize these since the quantization can mess them up
|
||||
collected_weights = []
|
||||
for j, vert_weight in enumerate(vert_weights):
|
||||
joints[i, j] = vert_weight.joint_idx
|
||||
collected_weights.append(vert_weight.weight)
|
||||
weight_sum = sum(collected_weights)
|
||||
if weight_sum:
|
||||
for j, weight in enumerate(collected_weights):
|
||||
weights[i, j] = weight / weight_sum
|
||||
|
||||
return joints, weights
|
||||
|
||||
|
||||
def normalize_vec3(a):
|
||||
norm = np.linalg.norm(a)
|
||||
if norm == 0:
|
||||
return a
|
||||
return a / norm
|
||||
|
||||
|
||||
def apply_bind_shape_matrix(bind_shape_matrix: np.ndarray, verts: np.ndarray, norms: np.ndarray) \
|
||||
-> Tuple[np.ndarray, np.ndarray]:
|
||||
"""
|
||||
Apply the bind shape matrix to the mesh data
|
||||
|
||||
glTF expects all verts and normals to be in armature-local space so that mesh data can be shared
|
||||
between differently-oriented armatures. Or something.
|
||||
# https://github.com/KhronosGroup/glTF-Blender-IO/issues/566#issuecomment-523119339
|
||||
|
||||
glTF also doesn't have a concept of a "bind shape matrix" like Collada does
|
||||
per its skinning docs, so we have to mix it into the mesh data manually.
|
||||
See https://github.com/KhronosGroup/glTF-Tutorials/blob/master/gltfTutorial/gltfTutorial_020_Skins.md
|
||||
"""
|
||||
scale, _, angles, translation, _ = transformations.decompose_matrix(bind_shape_matrix)
|
||||
scale_mat = transformations.compose_matrix(scale=scale)[:3, :3]
|
||||
rot_mat = transformations.euler_matrix(*angles)[:3, :3]
|
||||
rot_scale_mat = scale_mat @ np.linalg.inv(rot_mat)
|
||||
|
||||
# Apply the SRT transform to each vert
|
||||
verts = (verts @ rot_scale_mat) + translation
|
||||
|
||||
# Our scale is unlikely to be uniform, so we have to fix up our normals as well.
|
||||
# https://paroj.github.io/gltut/Illumination/Tut09%20Normal%20Transformation.html
|
||||
inv_transpose_mat = np.transpose(np.linalg.inv(bind_shape_matrix)[:3, :3])
|
||||
new_norms = [normalize_vec3(inv_transpose_mat @ norm) for norm in norms]
|
||||
|
||||
return verts, np.array(new_norms)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class JointContext:
|
||||
node: gltflib.Node
|
||||
# Original matrix for the bone, may have custom translation, but otherwise the same.
|
||||
orig_matrix: np.ndarray
|
||||
# xform that must be applied to inverse bind matrices to account for the changed bone
|
||||
fixup_matrix: np.ndarray
|
||||
|
||||
|
||||
JOINT_CONTEXT_DICT = Dict[str, JointContext]
|
||||
|
||||
|
||||
class GLTFBuilder:
|
||||
def __init__(self, blender_compatibility=False):
|
||||
self.scene = gltflib.Scene(nodes=IdentityList())
|
||||
self.model = gltflib.GLTFModel(
|
||||
asset=gltflib.Asset(version="2.0"),
|
||||
accessors=IdentityList(),
|
||||
nodes=IdentityList(),
|
||||
materials=IdentityList(),
|
||||
buffers=IdentityList(),
|
||||
bufferViews=IdentityList(),
|
||||
meshes=IdentityList(),
|
||||
skins=IdentityList(),
|
||||
scenes=IdentityList((self.scene,)),
|
||||
extensionsUsed=["KHR_materials_specular"],
|
||||
scene=0,
|
||||
)
|
||||
self.gltf = gltflib.GLTF(
|
||||
model=self.model,
|
||||
resources=IdentityList(),
|
||||
)
|
||||
self.blender_compatibility = blender_compatibility
|
||||
|
||||
def add_nodes_from_llmesh(self, mesh: MeshAsset, name: str, mesh_transform: Optional[np.ndarray] = None):
|
||||
"""Build a glTF version of a mesh asset, appending it and its armature to the scene root"""
|
||||
# TODO: mesh data instancing?
|
||||
# consider https://github.com/KhronosGroup/glTF-Blender-IO/issues/1634.
|
||||
if mesh_transform is None:
|
||||
mesh_transform = np.identity(4)
|
||||
|
||||
skin_seg: Optional[SkinSegmentDict] = mesh.segments.get('skin')
|
||||
skin = None
|
||||
if skin_seg:
|
||||
mesh_transform = llsd_to_mat4(skin_seg['bind_shape_matrix'])
|
||||
joint_ctxs = self.add_joints(skin_seg)
|
||||
|
||||
# Give our armature a root node and parent the pelvis to it
|
||||
armature_node = self.add_node("Armature")
|
||||
self.scene.nodes.append(self.model.nodes.index(armature_node))
|
||||
armature_node.children.append(self.model.nodes.index(joint_ctxs['mPelvis'].node))
|
||||
skin = self.add_skin("Armature", joint_ctxs, skin_seg)
|
||||
skin.skeleton = self.model.nodes.index(armature_node)
|
||||
|
||||
primitives = []
|
||||
# Just the high LOD for now
|
||||
for submesh in mesh.segments['high_lod']:
|
||||
verts = np.array(positions_from_domain(submesh['Position'], submesh['PositionDomain']))
|
||||
norms = np.array(submesh['Normal'])
|
||||
tris = np.array(submesh['TriangleList'])
|
||||
joints = np.array([])
|
||||
weights = np.array([])
|
||||
range_uv = np.array([])
|
||||
if "TexCoord0" in submesh:
|
||||
range_uv = np.array(positions_from_domain(submesh['TexCoord0'], submesh['TexCoord0Domain']))
|
||||
if 'Weights' in submesh:
|
||||
joints, weights = sl_weights_to_gltf(submesh['Weights'])
|
||||
|
||||
if skin:
|
||||
# Convert verts and norms to armature-local space
|
||||
verts, norms = apply_bind_shape_matrix(mesh_transform, verts, norms)
|
||||
|
||||
primitives.append(self.add_primitive(
|
||||
tris=tris,
|
||||
positions=verts,
|
||||
normals=norms,
|
||||
uvs=range_uv,
|
||||
joints=joints,
|
||||
weights=weights,
|
||||
))
|
||||
|
||||
mesh_node = self.add_node(
|
||||
name,
|
||||
self.add_mesh(name, primitives),
|
||||
transform=mesh_transform,
|
||||
)
|
||||
if skin:
|
||||
# Node translation isn't relevant, we're going to use the bind matrices
|
||||
# If you pull this into Blender you may want to untick "Guess Original Bind Pose",
|
||||
# it guesses that based on the inverse bind matrices which may have Maya poisoning.
|
||||
# TODO: Maybe we could automatically undo that by comparing expected bone scale and rot
|
||||
# to scale and rot in the inverse bind matrices, and applying fixups to the
|
||||
# bind shape matrix and inverse bind matrices?
|
||||
mesh_node.matrix = None
|
||||
mesh_node.skin = self.model.skins.index(skin)
|
||||
|
||||
self.scene.nodes.append(self.model.nodes.index(mesh_node))
|
||||
|
||||
def add_node(
|
||||
self,
|
||||
name: str,
|
||||
mesh: Optional[gltflib.Mesh] = None,
|
||||
transform: Optional[np.ndarray] = None,
|
||||
) -> gltflib.Node:
|
||||
node = gltflib.Node(
|
||||
name=name,
|
||||
mesh=self.model.meshes.index(mesh) if mesh else None,
|
||||
matrix=sl_mat4_to_gltf(transform) if transform is not None else None,
|
||||
children=[],
|
||||
)
|
||||
self.model.nodes.append(node)
|
||||
return node
|
||||
|
||||
def add_mesh(
|
||||
self,
|
||||
name: str,
|
||||
primitives: List[gltflib.Primitive],
|
||||
) -> gltflib.Mesh:
|
||||
for i, prim in enumerate(primitives):
|
||||
# Give the materials a name relating to what "face" they belong to
|
||||
self.model.materials[prim.material].name = f"{name}.{i:03}"
|
||||
mesh = gltflib.Mesh(name=name, primitives=primitives)
|
||||
self.model.meshes.append(mesh)
|
||||
return mesh
|
||||
|
||||
def add_primitive(
|
||||
self,
|
||||
tris: np.ndarray,
|
||||
positions: np.ndarray,
|
||||
normals: np.ndarray,
|
||||
uvs: np.ndarray,
|
||||
weights: np.ndarray,
|
||||
joints: np.ndarray,
|
||||
) -> gltflib.Primitive:
|
||||
# Make a Material for the primitive. Materials pretty much _are_ the primitives in
|
||||
# LLMesh, so just make them both in one go. We need a unique material for each primitive.
|
||||
material = gltflib.Material(
|
||||
pbrMetallicRoughness=gltflib.PBRMetallicRoughness(
|
||||
baseColorFactor=[1.0, 1.0, 1.0, 1.0],
|
||||
metallicFactor=0.0,
|
||||
roughnessFactor=0.0,
|
||||
),
|
||||
extensions={
|
||||
"KHR_materials_specular": {
|
||||
"specularFactor": 0.0,
|
||||
"specularColorFactor": [0, 0, 0]
|
||||
},
|
||||
}
|
||||
)
|
||||
self.model.materials.append(material)
|
||||
|
||||
attributes = gltflib.Attributes(
|
||||
POSITION=self.maybe_add_vec_array(sl_vec3_array_to_gltf(positions), gltflib.AccessorType.VEC3),
|
||||
NORMAL=self.maybe_add_vec_array(sl_vec3_array_to_gltf(normals), gltflib.AccessorType.VEC3),
|
||||
TEXCOORD_0=self.maybe_add_vec_array(np.array([sl_to_gltf_uv(uv) for uv in uvs]), gltflib.AccessorType.VEC2),
|
||||
JOINTS_0=self.maybe_add_vec_array(joints, gltflib.AccessorType.VEC4, gltflib.ComponentType.UNSIGNED_BYTE),
|
||||
WEIGHTS_0=self.maybe_add_vec_array(weights, gltflib.AccessorType.VEC4),
|
||||
)
|
||||
|
||||
return gltflib.Primitive(
|
||||
attributes=attributes,
|
||||
indices=self.model.accessors.index(self.add_scalars(tris)),
|
||||
material=self.model.materials.index(material),
|
||||
mode=gltflib.PrimitiveMode.TRIANGLES,
|
||||
)
|
||||
|
||||
def add_scalars(self, scalars: np.ndarray) -> gltflib.Accessor:
|
||||
"""
|
||||
Add a potentially multidimensional array of scalars, returning the accessor
|
||||
|
||||
Generally only used for triangle indices
|
||||
"""
|
||||
scalar_bytes = scalars.astype(np.uint32).flatten().tobytes()
|
||||
buffer_view = self.add_buffer_view(scalar_bytes, None)
|
||||
accessor = gltflib.Accessor(
|
||||
bufferView=self.model.bufferViews.index(buffer_view),
|
||||
componentType=gltflib.ComponentType.UNSIGNED_INT,
|
||||
count=scalars.size, # use the flattened size!
|
||||
type=gltflib.AccessorType.SCALAR.value, # type: ignore
|
||||
min=[int(scalars.min())], # type: ignore
|
||||
max=[int(scalars.max())], # type: ignore
|
||||
)
|
||||
self.model.accessors.append(accessor)
|
||||
return accessor
|
||||
|
||||
def maybe_add_vec_array(
|
||||
self,
|
||||
vecs: np.ndarray,
|
||||
vec_type: gltflib.AccessorType,
|
||||
component_type: gltflib.ComponentType = gltflib.ComponentType.FLOAT,
|
||||
) -> Optional[int]:
|
||||
if not vecs.size:
|
||||
return None
|
||||
accessor = self.add_vec_array(vecs, vec_type, component_type)
|
||||
return self.model.accessors.index(accessor)
|
||||
|
||||
def add_vec_array(
|
||||
self,
|
||||
vecs: np.ndarray,
|
||||
vec_type: gltflib.AccessorType,
|
||||
component_type: gltflib.ComponentType = gltflib.ComponentType.FLOAT
|
||||
) -> gltflib.Accessor:
|
||||
"""
|
||||
Add a two-dimensional array of vecs (positions, normals, weights, UVs) returning the accessor
|
||||
|
||||
Vec type may be a vec2, vec3, or a vec4.
|
||||
"""
|
||||
# Pretty much all of these are float32 except the ones that aren't
|
||||
dtype = np.float32
|
||||
if component_type == gltflib.ComponentType.UNSIGNED_BYTE:
|
||||
dtype = np.uint8
|
||||
vec_data = vecs.astype(dtype).tobytes()
|
||||
buffer_view = self.add_buffer_view(vec_data, target=None)
|
||||
accessor = gltflib.Accessor(
|
||||
bufferView=self.model.bufferViews.index(buffer_view),
|
||||
componentType=component_type,
|
||||
count=len(vecs),
|
||||
type=vec_type.value, # type: ignore
|
||||
min=vecs.min(axis=0).tolist(), # type: ignore
|
||||
max=vecs.max(axis=0).tolist(), # type: ignore
|
||||
)
|
||||
self.model.accessors.append(accessor)
|
||||
return accessor
|
||||
|
||||
def add_buffer_view(self, data: bytes, target: Optional[gltflib.BufferTarget]) -> gltflib.BufferView:
|
||||
"""Create a buffer view and associated buffer and resource for a blob of data"""
|
||||
resource = gltflib.FileResource(filename=f"res-{uuid.uuid4()}.bin", data=data)
|
||||
self.gltf.resources.append(resource)
|
||||
|
||||
buffer = gltflib.Buffer(uri=resource.filename, byteLength=len(resource.data))
|
||||
self.model.buffers.append(buffer)
|
||||
|
||||
buffer_view = gltflib.BufferView(
|
||||
buffer=self.model.buffers.index(buffer),
|
||||
byteLength=buffer.byteLength,
|
||||
byteOffset=0,
|
||||
target=target
|
||||
)
|
||||
self.model.bufferViews.append(buffer_view)
|
||||
return buffer_view
|
||||
|
||||
def add_joints(self, skin: SkinSegmentDict) -> JOINT_CONTEXT_DICT:
|
||||
# There may be some joints not present in the mesh that we need to add to reach the mPelvis root
|
||||
required_joints = set()
|
||||
for joint_name in skin['joint_names']:
|
||||
joint_node = AVATAR_SKELETON[joint_name]
|
||||
required_joints.add(joint_node)
|
||||
required_joints.update(joint_node.ancestors)
|
||||
|
||||
# If this is present, it may override the joint positions from the skeleton definition
|
||||
if 'alt_inverse_bind_matrix' in skin:
|
||||
joint_overrides = dict(zip(skin['joint_names'], skin['alt_inverse_bind_matrix']))
|
||||
else:
|
||||
joint_overrides = {}
|
||||
|
||||
built_joints: JOINT_CONTEXT_DICT = {}
|
||||
for joint in required_joints:
|
||||
joint_matrix = joint.matrix
|
||||
|
||||
# Do we have a joint position override that would affect joint_matrix?
|
||||
override = joint_overrides.get(joint.name)
|
||||
if override:
|
||||
decomp = list(transformations.decompose_matrix(joint_matrix))
|
||||
# We specifically only want the translation from the override!
|
||||
translation = transformations.translation_from_matrix(llsd_to_mat4(override))
|
||||
# Only do it if the difference is over 0.1mm though
|
||||
if Vector3.dist(Vector3(*translation), joint.translation) > 0.0001:
|
||||
decomp[3] = translation
|
||||
joint_matrix = transformations.compose_matrix(*decomp)
|
||||
|
||||
# Do we need to mess with the bone's matrices to make Blender cooperate?
|
||||
orig_matrix = joint_matrix
|
||||
fixup_matrix = np.identity(4)
|
||||
if self.blender_compatibility:
|
||||
joint_matrix, fixup_matrix = self._fix_blender_joint(joint_matrix)
|
||||
|
||||
# TODO: populate "extras" here with the metadata the Blender collada stuff uses to store
|
||||
# "bind_mat" and "rest_mat" so we can go back to our original matrices when exporting
|
||||
# from blender to .dae!
|
||||
gltf_joint = self.add_node(joint.name, transform=joint_matrix)
|
||||
|
||||
# Store the node along with any fixups we may need to apply to the bind matrices later
|
||||
built_joints[joint.name] = JointContext(gltf_joint, orig_matrix, fixup_matrix)
|
||||
|
||||
# Add each joint to the child list of their respective parent
|
||||
for joint_name, joint_ctx in built_joints.items():
|
||||
if parent := AVATAR_SKELETON[joint_name].parent:
|
||||
built_joints[parent().name].node.children.append(self.model.nodes.index(joint_ctx.node))
|
||||
return built_joints
|
||||
|
||||
def _fix_blender_joint(self, joint_matrix: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
|
||||
"""
|
||||
Split a joint matrix into a joint matrix and fixup matrix
|
||||
|
||||
If we don't account for weird scaling on the collision volumes, then
|
||||
Blender freaks out. This is an issue in blender where it doesn't
|
||||
apply the inverse bind matrices relative to the scale and rotation of
|
||||
the bones themselves, as it should per the glTF spec. Blender's glTF loader
|
||||
tries to recover from this by applying certain transforms as a pose, but
|
||||
the damage has been done by that point. Nobody else runs really runs into
|
||||
this because they have the good sense to not use some nightmare abomination
|
||||
rig with scaling and rotation on the skeleton like SL does.
|
||||
|
||||
Blender will _only_ correctly handle the translation component of the joint,
|
||||
any other transforms need to be mixed into the inverse bind matrices themselves.
|
||||
There's no internal concept of bone scale or rot in Blender right now.
|
||||
|
||||
Should investigate an Avastar-style approach of optionally retargeting
|
||||
to a Blender-compatible rig with translation-only bones, and modify
|
||||
the bind matrices to accommodate. The glTF importer supports metadata through
|
||||
the "extras" fields, so we can potentially abuse the "bind_mat" metadata field
|
||||
that Blender already uses for the "Keep Bind Info" Collada import / export hack.
|
||||
|
||||
For context:
|
||||
* https://github.com/KhronosGroup/glTF-Blender-IO/issues/1305
|
||||
* https://developer.blender.org/T38660 (these are Collada, but still relevant)
|
||||
* https://developer.blender.org/T29246
|
||||
* https://developer.blender.org/T50412
|
||||
* https://developer.blender.org/T53620 (FBX but still relevant)
|
||||
"""
|
||||
scale, shear, angles, translate, projection = transformations.decompose_matrix(joint_matrix)
|
||||
joint_matrix = transformations.compose_matrix(translate=translate)
|
||||
fixup_matrix = transformations.compose_matrix(scale=scale, angles=angles)
|
||||
return joint_matrix, fixup_matrix
|
||||
|
||||
def add_skin(self, name: str, joint_nodes: JOINT_CONTEXT_DICT, skin_seg: SkinSegmentDict) -> gltflib.Skin:
|
||||
joints_arr = []
|
||||
for joint_name in skin_seg['joint_names']:
|
||||
joint_ctx = joint_nodes[joint_name]
|
||||
joints_arr.append(self.model.nodes.index(joint_ctx.node))
|
||||
|
||||
inv_binds = []
|
||||
for joint_name, inv_bind in zip(skin_seg['joint_names'], skin_seg['inverse_bind_matrix']):
|
||||
joint_ctx = joint_nodes[joint_name]
|
||||
inv_bind = joint_ctx.fixup_matrix @ llsd_to_mat4(inv_bind)
|
||||
inv_binds.append(sl_mat4_to_gltf(inv_bind))
|
||||
inv_binds_data = np.array(inv_binds, dtype=np.float32).tobytes()
|
||||
buffer_view = self.add_buffer_view(inv_binds_data, target=None)
|
||||
accessor = gltflib.Accessor(
|
||||
bufferView=self.model.bufferViews.index(buffer_view),
|
||||
componentType=gltflib.ComponentType.FLOAT,
|
||||
count=len(inv_binds),
|
||||
type=gltflib.AccessorType.MAT4.value, # type: ignore
|
||||
)
|
||||
self.model.accessors.append(accessor)
|
||||
accessor_idx = self.model.accessors.index(accessor)
|
||||
|
||||
skin = gltflib.Skin(name=name, joints=joints_arr, inverseBindMatrices=accessor_idx)
|
||||
self.model.skins.append(skin)
|
||||
return skin
|
||||
|
||||
def finalize(self):
|
||||
"""Clean up the mesh to pass the glTF smell test, should be done last"""
|
||||
def _nullify_empty_lists(dc):
|
||||
for field in dataclasses.fields(dc):
|
||||
# Empty lists should be replaced with None
|
||||
if getattr(dc, field.name) == []:
|
||||
setattr(dc, field.name, None)
|
||||
|
||||
for node in self.model.nodes:
|
||||
_nullify_empty_lists(node)
|
||||
_nullify_empty_lists(self.model)
|
||||
return self.gltf
|
||||
|
||||
|
||||
def main():
|
||||
# Take an llmesh file as an argument and spit out basename-converted.gltf
|
||||
with open(sys.argv[1], "rb") as f:
|
||||
reader = BufferReader("<", f.read())
|
||||
|
||||
filename = Path(sys.argv[1]).stem
|
||||
mesh: MeshAsset = reader.read(LLMeshSerializer(parse_segment_contents=True))
|
||||
|
||||
builder = GLTFBuilder(blender_compatibility=True)
|
||||
builder.add_nodes_from_llmesh(mesh, filename)
|
||||
gltf = builder.finalize()
|
||||
|
||||
pprint.pprint(gltf.model)
|
||||
gltf.export_glb(sys.argv[1].rsplit(".", 1)[0] + "-converted.gltf")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,7 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import codecs
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
|
||||
import lazy_object_proxy
|
||||
@@ -132,6 +134,13 @@ def proxify(obj: Union[Callable[[], _T], weakref.ReferenceType, _T]) -> _T:
|
||||
return obj
|
||||
|
||||
|
||||
class BiDiDict(Generic[_T]):
|
||||
"""Dictionary for bidirectional lookups"""
|
||||
def __init__(self, values: Dict[_T, _T]):
|
||||
self.forward = {**values}
|
||||
self.backward = {value: key for (key, value) in values.items()}
|
||||
|
||||
|
||||
def bytes_unescape(val: bytes) -> bytes:
|
||||
# Only in CPython. bytes -> bytes with escape decoding.
|
||||
# https://stackoverflow.com/a/23151714
|
||||
@@ -147,7 +156,7 @@ def get_resource_filename(resource_filename: str):
|
||||
return pkg_resources.resource_filename("hippolyzer", resource_filename)
|
||||
|
||||
|
||||
def to_chunks(chunkable: Sequence[_T], chunk_size: int) -> Generator[_T, None, None]:
|
||||
def to_chunks(chunkable: Sequence[_T], chunk_size: int) -> Generator[Sequence[_T], None, None]:
|
||||
while chunkable:
|
||||
yield chunkable[:chunk_size]
|
||||
chunkable = chunkable[chunk_size:]
|
||||
@@ -158,3 +167,31 @@ def get_mtime(path):
|
||||
return os.stat(path).st_mtime
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def fut_logger(name: str, logger: logging.Logger, fut: asyncio.Future, *args) -> None:
|
||||
"""Callback suitable for exception logging in `Future.add_done_callback()`"""
|
||||
if not fut.cancelled() and fut.exception():
|
||||
if isinstance(fut.exception(), asyncio.CancelledError):
|
||||
# Don't really care if the task was just cancelled
|
||||
return
|
||||
logger.exception(f"Failed in task for {name}", exc_info=fut.exception())
|
||||
|
||||
|
||||
def add_future_logger(
|
||||
fut: asyncio.Future,
|
||||
name: Optional[str] = None,
|
||||
logger: Optional[logging.Logger] = None,
|
||||
):
|
||||
"""Add a logger to Futures that will never be directly `await`ed, logging exceptions"""
|
||||
fut.add_done_callback(functools.partial(fut_logger, name, logger or logging.getLogger()))
|
||||
|
||||
|
||||
def create_logged_task(
|
||||
coro: Coroutine,
|
||||
name: Optional[str] = None,
|
||||
logger: Optional[logging.Logger] = None,
|
||||
) -> asyncio.Task:
|
||||
task = asyncio.create_task(coro, name=name)
|
||||
add_future_logger(task, name, logger)
|
||||
return task
|
||||
|
||||
@@ -3,13 +3,21 @@ Parse the horrible legacy inventory-related format.
|
||||
|
||||
It's typically only used for object contents now.
|
||||
"""
|
||||
|
||||
# TODO: Maybe handle CRC calculation? Does anything care about that?
|
||||
# I don't think anything in the viewer actually looks at the result
|
||||
# of the CRC check for UDP stuff.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import asyncio
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import inspect
|
||||
import logging
|
||||
import secrets
|
||||
import struct
|
||||
import typing
|
||||
import weakref
|
||||
from io import StringIO
|
||||
from typing import *
|
||||
@@ -22,12 +30,15 @@ from hippolyzer.lib.base.legacy_schema import (
|
||||
SchemaFieldSerializer,
|
||||
SchemaHexInt,
|
||||
SchemaInt,
|
||||
SchemaLLSD,
|
||||
SchemaMultilineStr,
|
||||
SchemaParsingError,
|
||||
SchemaStr,
|
||||
SchemaUUID,
|
||||
schema_field,
|
||||
)
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.templates import SaleType, InventoryType, LookupIntEnum, AssetType, FolderType
|
||||
|
||||
MAGIC_ID = UUID("3c115e51-04f4-523c-9fa6-98aff1034730")
|
||||
LOG = logging.getLogger(__name__)
|
||||
@@ -37,12 +48,42 @@ _T = TypeVar("_T")
|
||||
class SchemaFlagField(SchemaHexInt):
|
||||
"""Like a hex int, but must be serialized as bytes in LLSD due to being a U32"""
|
||||
@classmethod
|
||||
def from_llsd(cls, val: Any) -> int:
|
||||
return struct.unpack("!I", val)[0]
|
||||
def from_llsd(cls, val: Any, flavor: str) -> int:
|
||||
# Sometimes values in S32 range will just come through normally
|
||||
if isinstance(val, int):
|
||||
return val
|
||||
|
||||
if flavor == "legacy":
|
||||
return struct.unpack("!I", val)[0]
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def to_llsd(cls, val: int) -> Any:
|
||||
return struct.pack("!I", val)
|
||||
def to_llsd(cls, val: int, flavor: str) -> Any:
|
||||
if flavor == "legacy":
|
||||
return struct.pack("!I", val)
|
||||
return val
|
||||
|
||||
|
||||
class SchemaEnumField(SchemaStr, Generic[_T]):
|
||||
def __init__(self, enum_cls: Type[LookupIntEnum]):
|
||||
super().__init__()
|
||||
self._enum_cls = enum_cls
|
||||
|
||||
def deserialize(self, val: str) -> _T:
|
||||
return self._enum_cls.from_lookup_name(val)
|
||||
|
||||
def serialize(self, val: _T) -> str:
|
||||
return self._enum_cls(val).to_lookup_name()
|
||||
|
||||
def from_llsd(self, val: Union[str, int], flavor: str) -> _T:
|
||||
if flavor == "legacy":
|
||||
return self.deserialize(val)
|
||||
return self._enum_cls(val)
|
||||
|
||||
def to_llsd(self, val: _T, flavor: str) -> Union[int, str]:
|
||||
if flavor == "legacy":
|
||||
return self.serialize(val)
|
||||
return int(val)
|
||||
|
||||
|
||||
def _yield_schema_tokens(reader: StringIO):
|
||||
@@ -98,10 +139,14 @@ class InventoryBase(SchemaBase):
|
||||
if not spec:
|
||||
LOG.warning(f"Internal key {key!r}")
|
||||
continue
|
||||
|
||||
spec_cls = spec
|
||||
if not inspect.isclass(spec_cls):
|
||||
spec_cls = spec_cls.__class__
|
||||
# some kind of nested structure like sale_info
|
||||
if issubclass(spec, SchemaBase):
|
||||
if issubclass(spec_cls, SchemaBase):
|
||||
obj_dict[key] = spec.from_reader(reader)
|
||||
elif issubclass(spec, SchemaFieldSerializer):
|
||||
elif issubclass(spec_cls, SchemaFieldSerializer):
|
||||
obj_dict[key] = spec.deserialize(val)
|
||||
else:
|
||||
raise ValueError(f"Unsupported spec for {key!r}, {spec!r}")
|
||||
@@ -110,29 +155,46 @@ class InventoryBase(SchemaBase):
|
||||
return cls._obj_from_dict(obj_dict)
|
||||
|
||||
def to_writer(self, writer: StringIO):
|
||||
writer.write(f"\t{self.SCHEMA_NAME}\t0\n")
|
||||
writer.write(f"\t{self.SCHEMA_NAME}")
|
||||
if self.SCHEMA_NAME == "permissions":
|
||||
writer.write(" 0\n")
|
||||
else:
|
||||
writer.write("\t0\n")
|
||||
writer.write("\t{\n")
|
||||
for field_name, field in self._get_fields_dict().items():
|
||||
|
||||
# Make sure the ID field always comes first, if there is one.
|
||||
fields_dict = {}
|
||||
if hasattr(self, "ID_ATTR"):
|
||||
fields_dict = {getattr(self, "ID_ATTR"): None}
|
||||
# update()ing will put all fields that aren't yet in the dict after the ID attr.
|
||||
fields_dict.update(self._get_fields_dict())
|
||||
|
||||
for field_name, field in fields_dict.items():
|
||||
spec = field.metadata.get("spec")
|
||||
# Not meant to be serialized
|
||||
if not spec:
|
||||
continue
|
||||
|
||||
val = getattr(self, field_name)
|
||||
if val is None:
|
||||
if field.metadata.get("llsd_only"):
|
||||
continue
|
||||
|
||||
val = getattr(self, field_name)
|
||||
if val is None and not field.metadata.get("include_none"):
|
||||
continue
|
||||
|
||||
spec_cls = spec
|
||||
if not inspect.isclass(spec_cls):
|
||||
spec_cls = spec_cls.__class__
|
||||
# Some kind of nested structure like sale_info
|
||||
if isinstance(val, SchemaBase):
|
||||
val.to_writer(writer)
|
||||
elif issubclass(spec, SchemaFieldSerializer):
|
||||
elif issubclass(spec_cls, SchemaFieldSerializer):
|
||||
writer.write(f"\t\t{field_name}\t{spec.serialize(val)}\n")
|
||||
else:
|
||||
raise ValueError(f"Bad inventory spec {spec!r}")
|
||||
writer.write("\t}\n")
|
||||
|
||||
|
||||
class InventoryDifferences(typing.NamedTuple):
|
||||
class InventoryDifferences(NamedTuple):
|
||||
changed: List[InventoryNodeBase]
|
||||
removed: List[InventoryNodeBase]
|
||||
|
||||
@@ -141,6 +203,7 @@ class InventoryModel(InventoryBase):
|
||||
def __init__(self):
|
||||
self.nodes: Dict[UUID, InventoryNodeBase] = {}
|
||||
self.root: Optional[InventoryContainerBase] = None
|
||||
self.any_dirty = asyncio.Event()
|
||||
|
||||
@classmethod
|
||||
def from_reader(cls, reader: StringIO, read_header=False) -> InventoryModel:
|
||||
@@ -163,20 +226,15 @@ class InventoryModel(InventoryBase):
|
||||
return model
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, llsd_val: List[Dict]) -> InventoryModel:
|
||||
def from_llsd(cls, llsd_val: List[Dict], flavor: str = "legacy") -> InventoryModel:
|
||||
model = cls()
|
||||
for obj_dict in llsd_val:
|
||||
if InventoryCategory.ID_ATTR in obj_dict:
|
||||
if (obj := InventoryCategory.from_llsd(obj_dict)) is not None:
|
||||
model.add(obj)
|
||||
elif InventoryObject.ID_ATTR in obj_dict:
|
||||
if (obj := InventoryObject.from_llsd(obj_dict)) is not None:
|
||||
model.add(obj)
|
||||
elif InventoryItem.ID_ATTR in obj_dict:
|
||||
if (obj := InventoryItem.from_llsd(obj_dict)) is not None:
|
||||
model.add(obj)
|
||||
else:
|
||||
LOG.warning(f"Unknown object type {obj_dict!r}")
|
||||
for inv_type in INVENTORY_TYPES:
|
||||
if inv_type.ID_ATTR in obj_dict:
|
||||
if (obj := inv_type.from_llsd(obj_dict, flavor)) is not None:
|
||||
model.add(obj)
|
||||
break
|
||||
LOG.warning(f"Unknown object type {obj_dict!r}")
|
||||
return model
|
||||
|
||||
@property
|
||||
@@ -190,6 +248,12 @@ class InventoryModel(InventoryBase):
|
||||
if isinstance(node, InventoryContainerBase):
|
||||
yield node
|
||||
|
||||
@property
|
||||
def dirty_categories(self) -> Iterable[InventoryCategory]:
|
||||
for node in self.nodes:
|
||||
if isinstance(node, InventoryCategory) and node.version == InventoryCategory.VERSION_NONE:
|
||||
yield node
|
||||
|
||||
@property
|
||||
def all_items(self) -> Iterable[InventoryItem]:
|
||||
for node in self.nodes.values():
|
||||
@@ -205,8 +269,8 @@ class InventoryModel(InventoryBase):
|
||||
for node in self.ordered_nodes:
|
||||
node.to_writer(writer)
|
||||
|
||||
def to_llsd(self):
|
||||
return list(node.to_llsd() for node in self.ordered_nodes)
|
||||
def to_llsd(self, flavor: str = "legacy"):
|
||||
return list(node.to_llsd(flavor) for node in self.ordered_nodes)
|
||||
|
||||
def add(self, node: InventoryNodeBase):
|
||||
if node.node_id in self.nodes:
|
||||
@@ -217,14 +281,37 @@ class InventoryModel(InventoryBase):
|
||||
if node.parent_id == UUID.ZERO:
|
||||
self.root = node
|
||||
node.model = weakref.proxy(self)
|
||||
return node
|
||||
|
||||
def unlink(self, node: InventoryNodeBase) -> Sequence[InventoryNodeBase]:
|
||||
def update(self, node: InventoryNodeBase, update_fields: Optional[Iterable[str]] = None) -> InventoryNodeBase:
|
||||
"""Update an existing node, optionally only updating specific fields"""
|
||||
if node.node_id not in self.nodes:
|
||||
raise KeyError(f"{node.node_id} not in the inventory model")
|
||||
|
||||
orig_node = self.nodes[node.node_id]
|
||||
if node.__class__ != orig_node.__class__:
|
||||
raise ValueError(f"Tried to update {orig_node!r} from non-matching {node!r}")
|
||||
|
||||
if not update_fields:
|
||||
# Update everything but the model parameter
|
||||
update_fields = node.get_field_names()
|
||||
for field_name in update_fields:
|
||||
setattr(orig_node, field_name, getattr(node, field_name))
|
||||
return orig_node
|
||||
|
||||
def upsert(self, node: InventoryNodeBase, update_fields: Optional[Iterable[str]] = None) -> InventoryNodeBase:
|
||||
"""Add or update a node"""
|
||||
if node.node_id in self.nodes:
|
||||
return self.update(node, update_fields)
|
||||
return self.add(node)
|
||||
|
||||
def unlink(self, node: InventoryNodeBase, single_only: bool = False) -> Sequence[InventoryNodeBase]:
|
||||
"""Unlink a node and its descendants from the tree, returning the removed nodes"""
|
||||
assert node.model == self
|
||||
if node == self.root:
|
||||
self.root = None
|
||||
unlinked = [node]
|
||||
if isinstance(node, InventoryContainerBase):
|
||||
if isinstance(node, InventoryContainerBase) and not single_only:
|
||||
for child in node.children:
|
||||
unlinked.extend(self.unlink(child))
|
||||
self.nodes.pop(node.node_id, None)
|
||||
@@ -257,6 +344,19 @@ class InventoryModel(InventoryBase):
|
||||
removed=removed_in_other,
|
||||
)
|
||||
|
||||
def flag_if_dirty(self):
|
||||
if any(self.dirty_categories):
|
||||
self.any_dirty.set()
|
||||
|
||||
def __getitem__(self, item: UUID) -> InventoryNodeBase:
|
||||
return self.nodes[item]
|
||||
|
||||
def __contains__(self, item: UUID):
|
||||
return item in self.nodes
|
||||
|
||||
def get(self, item: UUID) -> Optional[InventoryNodeBase]:
|
||||
return self.nodes.get(item)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryPermissions(InventoryBase):
|
||||
@@ -271,27 +371,41 @@ class InventoryPermissions(InventoryBase):
|
||||
owner_id: UUID = schema_field(SchemaUUID)
|
||||
last_owner_id: UUID = schema_field(SchemaUUID)
|
||||
group_id: UUID = schema_field(SchemaUUID)
|
||||
# Nothing actually cares about this, but it could be there.
|
||||
# It's kind of redundant since it just means owner_id == NULL_KEY && group_id != NULL_KEY.
|
||||
is_owner_group: Optional[int] = schema_field(SchemaInt, default=None, llsd_only=True)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventorySaleInfo(InventoryBase):
|
||||
SCHEMA_NAME: ClassVar[str] = "sale_info"
|
||||
|
||||
sale_type: str = schema_field(SchemaStr)
|
||||
sale_type: SaleType = schema_field(SchemaEnumField(SaleType))
|
||||
sale_price: int = schema_field(SchemaInt)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryNodeBase(InventoryBase):
|
||||
ID_ATTR: ClassVar[str]
|
||||
|
||||
class _HasName(abc.ABC):
|
||||
"""
|
||||
Only exists so that we can assert that all subclasses should have this without forcing
|
||||
a particular serialization order, as would happen if this was present on InventoryNodeBase.
|
||||
"""
|
||||
name: str
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryNodeBase(InventoryBase, _HasName):
|
||||
ID_ATTR: ClassVar[str]
|
||||
|
||||
parent_id: Optional[UUID] = schema_field(SchemaUUID)
|
||||
|
||||
model: Optional[InventoryModel] = dataclasses.field(
|
||||
default=None, init=False, hash=False, compare=False, repr=False
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_field_names(cls) -> Set[str]:
|
||||
return set(cls._get_fields_dict().keys()) - {"model"}
|
||||
|
||||
@property
|
||||
def node_id(self) -> UUID:
|
||||
return getattr(self, self.ID_ATTR)
|
||||
@@ -328,8 +442,7 @@ class InventoryNodeBase(InventoryBase):
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryContainerBase(InventoryNodeBase):
|
||||
type: str = schema_field(SchemaStr)
|
||||
name: str = schema_field(SchemaMultilineStr)
|
||||
type: AssetType = schema_field(SchemaEnumField(AssetType))
|
||||
|
||||
@property
|
||||
def children(self) -> Sequence[InventoryNodeBase]:
|
||||
@@ -358,8 +471,8 @@ class InventoryContainerBase(InventoryNodeBase):
|
||||
name=name,
|
||||
cat_id=UUID.random(),
|
||||
parent_id=self.node_id,
|
||||
type="category",
|
||||
pref_type="-1",
|
||||
type=AssetType.CATEGORY,
|
||||
pref_type=FolderType.NONE,
|
||||
owner_id=getattr(self, 'owner_id', UUID.ZERO),
|
||||
version=1,
|
||||
)
|
||||
@@ -376,6 +489,8 @@ class InventoryObject(InventoryContainerBase):
|
||||
ID_ATTR: ClassVar[str] = "obj_id"
|
||||
|
||||
obj_id: UUID = schema_field(SchemaUUID)
|
||||
name: str = schema_field(SchemaMultilineStr)
|
||||
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=True)
|
||||
|
||||
__hash__ = InventoryNodeBase.__hash__
|
||||
|
||||
@@ -383,12 +498,61 @@ class InventoryObject(InventoryContainerBase):
|
||||
@dataclasses.dataclass
|
||||
class InventoryCategory(InventoryContainerBase):
|
||||
ID_ATTR: ClassVar[str] = "cat_id"
|
||||
# AIS calls this something else...
|
||||
ID_ATTR_AIS: ClassVar[str] = "category_id"
|
||||
SCHEMA_NAME: ClassVar[str] = "inv_category"
|
||||
VERSION_NONE: ClassVar[int] = -1
|
||||
|
||||
cat_id: UUID = schema_field(SchemaUUID)
|
||||
pref_type: str = schema_field(SchemaStr, llsd_name="preferred_type")
|
||||
owner_id: UUID = schema_field(SchemaUUID)
|
||||
version: int = schema_field(SchemaInt)
|
||||
pref_type: FolderType = schema_field(SchemaEnumField(FolderType), llsd_name="preferred_type")
|
||||
name: str = schema_field(SchemaMultilineStr)
|
||||
owner_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
|
||||
version: int = schema_field(SchemaInt, default=VERSION_NONE, llsd_only=True)
|
||||
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=False)
|
||||
|
||||
def to_folder_data(self) -> Block:
|
||||
return Block(
|
||||
"FolderData",
|
||||
FolderID=self.cat_id,
|
||||
ParentID=self.parent_id,
|
||||
CallbackID=0,
|
||||
Type=self.pref_type,
|
||||
Name=self.name,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_folder_data(cls, block: Block):
|
||||
return cls(
|
||||
cat_id=block["FolderID"],
|
||||
parent_id=block["ParentID"],
|
||||
pref_type=block["Type"],
|
||||
name=block["Name"],
|
||||
type=AssetType.CATEGORY,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy"):
|
||||
if flavor == "ais" and "type" not in inv_dict:
|
||||
inv_dict = inv_dict.copy()
|
||||
inv_dict["type"] = AssetType.CATEGORY
|
||||
return super().from_llsd(inv_dict, flavor)
|
||||
|
||||
def to_llsd(self, flavor: str = "legacy"):
|
||||
payload = super().to_llsd(flavor)
|
||||
if flavor == "ais":
|
||||
# AIS already knows the inventory type is category
|
||||
payload.pop("type", None)
|
||||
return payload
|
||||
|
||||
@classmethod
|
||||
def _get_fields_dict(cls, llsd_flavor: Optional[str] = None):
|
||||
fields = super()._get_fields_dict(llsd_flavor)
|
||||
if llsd_flavor == "ais":
|
||||
# These have different names though
|
||||
fields["type_default"] = fields.pop("preferred_type")
|
||||
fields["agent_id"] = fields.pop("owner_id")
|
||||
fields["category_id"] = fields.pop("cat_id")
|
||||
return fields
|
||||
|
||||
__hash__ = InventoryNodeBase.__hash__
|
||||
|
||||
@@ -399,16 +563,17 @@ class InventoryItem(InventoryNodeBase):
|
||||
ID_ATTR: ClassVar[str] = "item_id"
|
||||
|
||||
item_id: UUID = schema_field(SchemaUUID)
|
||||
type: str = schema_field(SchemaStr)
|
||||
inv_type: str = schema_field(SchemaStr)
|
||||
flags: int = schema_field(SchemaFlagField)
|
||||
name: str = schema_field(SchemaMultilineStr)
|
||||
desc: str = schema_field(SchemaMultilineStr)
|
||||
creation_date: dt.datetime = schema_field(SchemaDate, llsd_name="created_at")
|
||||
permissions: InventoryPermissions = schema_field(InventoryPermissions)
|
||||
sale_info: InventorySaleInfo = schema_field(InventorySaleInfo)
|
||||
asset_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
|
||||
shadow_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
|
||||
type: Optional[AssetType] = schema_field(SchemaEnumField(AssetType), default=None)
|
||||
inv_type: Optional[InventoryType] = schema_field(SchemaEnumField(InventoryType), default=None)
|
||||
flags: Optional[int] = schema_field(SchemaFlagField, default=None)
|
||||
sale_info: Optional[InventorySaleInfo] = schema_field(InventorySaleInfo, default=None)
|
||||
name: Optional[str] = schema_field(SchemaMultilineStr, default=None)
|
||||
desc: Optional[str] = schema_field(SchemaMultilineStr, default=None)
|
||||
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=True)
|
||||
creation_date: Optional[dt.datetime] = schema_field(SchemaDate, llsd_name="created_at", default=None)
|
||||
|
||||
__hash__ = InventoryNodeBase.__hash__
|
||||
|
||||
@@ -417,3 +582,112 @@ class InventoryItem(InventoryNodeBase):
|
||||
if self.asset_id is not None:
|
||||
return self.asset_id
|
||||
return self.shadow_id ^ MAGIC_ID
|
||||
|
||||
def to_inventory_data(self) -> Block:
|
||||
return Block(
|
||||
"InventoryData",
|
||||
ItemID=self.item_id,
|
||||
FolderID=self.parent_id,
|
||||
CallbackID=0,
|
||||
CreatorID=self.permissions.creator_id,
|
||||
OwnerID=self.permissions.owner_id,
|
||||
GroupID=self.permissions.group_id,
|
||||
BaseMask=self.permissions.base_mask,
|
||||
OwnerMask=self.permissions.owner_mask,
|
||||
GroupMask=self.permissions.group_mask,
|
||||
EveryoneMask=self.permissions.everyone_mask,
|
||||
NextOwnerMask=self.permissions.next_owner_mask,
|
||||
GroupOwned=self.permissions.owner_id == UUID.ZERO and self.permissions.group_id != UUID.ZERO,
|
||||
AssetID=self.true_asset_id,
|
||||
Type=self.type,
|
||||
InvType=self.inv_type,
|
||||
Flags=self.flags,
|
||||
SaleType=self.sale_info.sale_type,
|
||||
SalePrice=self.sale_info.sale_price,
|
||||
Name=self.name,
|
||||
Description=self.desc,
|
||||
CreationDate=SchemaDate.to_llsd(self.creation_date, "legacy"),
|
||||
# Meaningless here
|
||||
CRC=secrets.randbits(32),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_inventory_data(cls, block: Block):
|
||||
return cls(
|
||||
item_id=block["ItemID"],
|
||||
# Might be under one of two names
|
||||
parent_id=block.get("ParentID", block["FolderID"]),
|
||||
permissions=InventoryPermissions(
|
||||
creator_id=block["CreatorID"],
|
||||
owner_id=block["OwnerID"],
|
||||
# Unknown, not sent in this schema
|
||||
last_owner_id=block.get("LastOwnerID", UUID.ZERO),
|
||||
group_id=block["GroupID"],
|
||||
base_mask=block["BaseMask"],
|
||||
owner_mask=block["OwnerMask"],
|
||||
group_mask=block["GroupMask"],
|
||||
everyone_mask=block["EveryoneMask"],
|
||||
next_owner_mask=block["NextOwnerMask"],
|
||||
),
|
||||
# May be missing in UpdateInventoryItem
|
||||
asset_id=block.get("AssetID"),
|
||||
type=AssetType(block["Type"]),
|
||||
inv_type=InventoryType(block["InvType"]),
|
||||
flags=block["Flags"],
|
||||
sale_info=InventorySaleInfo(
|
||||
sale_type=SaleType(block["SaleType"]),
|
||||
sale_price=block["SalePrice"],
|
||||
),
|
||||
name=block["Name"],
|
||||
desc=block["Description"],
|
||||
creation_date=block["CreationDate"],
|
||||
)
|
||||
|
||||
def to_llsd(self, flavor: str = "legacy"):
|
||||
val = super().to_llsd(flavor=flavor)
|
||||
if flavor == "ais":
|
||||
# There's little chance this differs from owner ID, just place it.
|
||||
val["agent_id"] = val["permissions"]["owner_id"]
|
||||
if val["type"] == AssetType.LINK:
|
||||
# For link items, there is no asset, only a linked ID.
|
||||
val["linked_id"] = val.pop("asset_id")
|
||||
# These don't exist either
|
||||
val.pop("permissions", None)
|
||||
val.pop("sale_info", None)
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy"):
|
||||
if flavor == "ais" and "linked_id" in inv_dict:
|
||||
# Links get represented differently than other items for whatever reason.
|
||||
# This is incredibly annoying, under *NIX there's nothing really special about symlinks.
|
||||
inv_dict = inv_dict.copy()
|
||||
# Fill this in since it needs to be there
|
||||
if "permissions" not in inv_dict:
|
||||
inv_dict["permissions"] = InventoryPermissions(
|
||||
base_mask=0xFFffFFff,
|
||||
owner_mask=0xFFffFFff,
|
||||
group_mask=0xFFffFFff,
|
||||
everyone_mask=0,
|
||||
next_owner_mask=0xFFffFFff,
|
||||
creator_id=UUID.ZERO,
|
||||
owner_id=UUID.ZERO,
|
||||
last_owner_id=UUID.ZERO,
|
||||
group_id=UUID.ZERO,
|
||||
).to_llsd("ais")
|
||||
if "sale_info" not in inv_dict:
|
||||
inv_dict["sale_info"] = InventorySaleInfo(
|
||||
sale_type=SaleType.NOT,
|
||||
sale_price=0,
|
||||
).to_llsd("ais")
|
||||
if "type" not in inv_dict:
|
||||
inv_dict["type"] = AssetType.LINK
|
||||
|
||||
# In the context of symlinks, asset id means linked item ID.
|
||||
# This is also how indra stores symlinks. Why the asymmetry in AIS if none of the
|
||||
# consumers actually want it? Who knows.
|
||||
inv_dict["asset_id"] = inv_dict.pop("linked_id")
|
||||
return super().from_llsd(inv_dict, flavor)
|
||||
|
||||
|
||||
INVENTORY_TYPES: Tuple[Type[InventoryNodeBase], ...] = (InventoryCategory, InventoryObject, InventoryItem)
|
||||
|
||||
@@ -9,11 +9,14 @@ import abc
|
||||
import calendar
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import inspect
|
||||
import logging
|
||||
import re
|
||||
from io import StringIO
|
||||
from typing import *
|
||||
|
||||
import hippolyzer.lib.base.llsd as llsd
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
@@ -32,11 +35,11 @@ class SchemaFieldSerializer(abc.ABC, Generic[_T]):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, val: Any) -> _T:
|
||||
def from_llsd(cls, val: Any, flavor: str) -> _T:
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def to_llsd(cls, val: _T) -> Any:
|
||||
def to_llsd(cls, val: _T, flavor: str) -> Any:
|
||||
return val
|
||||
|
||||
|
||||
@@ -50,11 +53,11 @@ class SchemaDate(SchemaFieldSerializer[dt.datetime]):
|
||||
return str(calendar.timegm(val.utctimetuple()))
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, val: Any) -> dt.datetime:
|
||||
def from_llsd(cls, val: Any, flavor: str) -> dt.datetime:
|
||||
return dt.datetime.utcfromtimestamp(val)
|
||||
|
||||
@classmethod
|
||||
def to_llsd(cls, val: dt.datetime):
|
||||
def to_llsd(cls, val: dt.datetime, flavor: str):
|
||||
return calendar.timegm(val.utctimetuple())
|
||||
|
||||
|
||||
@@ -101,6 +104,13 @@ class SchemaStr(SchemaFieldSerializer[str]):
|
||||
|
||||
|
||||
class SchemaUUID(SchemaFieldSerializer[UUID]):
|
||||
@classmethod
|
||||
def from_llsd(cls, val: Any, flavor: str) -> UUID:
|
||||
# FetchInventory2 will return a string, but we want a UUID. It's not an issue
|
||||
# for us to return a UUID later there because it'll just cast to string if
|
||||
# that's what it wants
|
||||
return UUID(val)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> UUID:
|
||||
return UUID(val)
|
||||
@@ -110,12 +120,28 @@ class SchemaUUID(SchemaFieldSerializer[UUID]):
|
||||
return str(val)
|
||||
|
||||
|
||||
def schema_field(spec: Type[Union[SchemaBase, SchemaFieldSerializer]], *, default=dataclasses.MISSING, init=True,
|
||||
repr=True, hash=None, compare=True, llsd_name=None) -> dataclasses.Field: # noqa
|
||||
class SchemaLLSD(SchemaFieldSerializer[_T]):
|
||||
"""Arbitrary LLSD embedded in a field"""
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> _T:
|
||||
return llsd.parse_xml(val.partition("|")[0].encode("utf8"))
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: _T) -> str:
|
||||
# Don't include the XML header
|
||||
return llsd.format_xml(val).split(b">", 1)[1].decode("utf8") + "\n|"
|
||||
|
||||
|
||||
_SCHEMA_SPEC = Union[Type[Union["SchemaBase", SchemaFieldSerializer]], SchemaFieldSerializer]
|
||||
|
||||
|
||||
def schema_field(spec: _SCHEMA_SPEC, *, default=dataclasses.MISSING, init=True,
|
||||
repr=True, hash=None, compare=True, llsd_name=None, llsd_only=False,
|
||||
include_none=False) -> dataclasses.Field: # noqa
|
||||
"""Describe a field in the inventory schema and the shape of its value"""
|
||||
return dataclasses.field(
|
||||
metadata={"spec": spec, "llsd_name": llsd_name}, default=default,
|
||||
init=init, repr=repr, hash=hash, compare=compare,
|
||||
return dataclasses.field( # noqa
|
||||
metadata={"spec": spec, "llsd_name": llsd_name, "llsd_only": llsd_only, "include_none": include_none},
|
||||
default=default, init=init, repr=repr, hash=hash, compare=compare,
|
||||
)
|
||||
|
||||
|
||||
@@ -138,11 +164,11 @@ def parse_schema_line(line: str):
|
||||
@dataclasses.dataclass
|
||||
class SchemaBase(abc.ABC):
|
||||
@classmethod
|
||||
def _get_fields_dict(cls, llsd=False):
|
||||
def _get_fields_dict(cls, llsd_flavor: Optional[str] = None):
|
||||
fields_dict = {}
|
||||
for field in dataclasses.fields(cls):
|
||||
field_name = field.name
|
||||
if llsd:
|
||||
if llsd_flavor:
|
||||
field_name = field.metadata.get("llsd_name") or field_name
|
||||
fields_dict[field_name] = field
|
||||
return fields_dict
|
||||
@@ -161,27 +187,39 @@ class SchemaBase(abc.ABC):
|
||||
return cls.from_str(data.decode("utf8"))
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, inv_dict: Dict):
|
||||
fields = cls._get_fields_dict(llsd=True)
|
||||
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy"):
|
||||
fields = cls._get_fields_dict(llsd_flavor=flavor)
|
||||
obj_dict = {}
|
||||
for key, val in inv_dict.items():
|
||||
if key in fields:
|
||||
field: dataclasses.Field = fields[key]
|
||||
key = field.name
|
||||
spec = field.metadata.get("spec")
|
||||
# Not a real key, an internal var on our dataclass
|
||||
if not spec:
|
||||
LOG.warning(f"Internal key {key!r}")
|
||||
continue
|
||||
# some kind of nested structure like sale_info
|
||||
if issubclass(spec, SchemaBase):
|
||||
obj_dict[key] = spec.from_llsd(val)
|
||||
elif issubclass(spec, SchemaFieldSerializer):
|
||||
obj_dict[key] = spec.from_llsd(val)
|
||||
try:
|
||||
for key, val in inv_dict.items():
|
||||
if key in fields:
|
||||
field: dataclasses.Field = fields[key]
|
||||
key = field.name
|
||||
spec = field.metadata.get("spec")
|
||||
# Not a real key, an internal var on our dataclass
|
||||
if not spec:
|
||||
LOG.warning(f"Internal key {key!r}")
|
||||
continue
|
||||
|
||||
spec_cls = spec
|
||||
if not inspect.isclass(spec_cls):
|
||||
spec_cls = spec_cls.__class__
|
||||
|
||||
# some kind of nested structure like sale_info
|
||||
if issubclass(spec_cls, SchemaBase):
|
||||
obj_dict[key] = spec.from_llsd(val, flavor)
|
||||
elif issubclass(spec_cls, SchemaFieldSerializer):
|
||||
obj_dict[key] = spec.from_llsd(val, flavor)
|
||||
else:
|
||||
raise ValueError(f"Unsupported spec for {key!r}, {spec!r}")
|
||||
else:
|
||||
raise ValueError(f"Unsupported spec for {key!r}, {spec!r}")
|
||||
else:
|
||||
LOG.warning(f"Unknown key {key!r}")
|
||||
if flavor != "ais":
|
||||
# AIS has a number of different fields that are irrelevant depending on
|
||||
# what exactly sent the payload
|
||||
LOG.warning(f"Unknown key {key!r}")
|
||||
except:
|
||||
LOG.error(f"Failed to parse inventory schema: {inv_dict!r}")
|
||||
raise
|
||||
return cls._obj_from_dict(obj_dict)
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
@@ -193,9 +231,9 @@ class SchemaBase(abc.ABC):
|
||||
writer.seek(0)
|
||||
return writer.read()
|
||||
|
||||
def to_llsd(self):
|
||||
def to_llsd(self, flavor: str = "legacy"):
|
||||
obj_dict = {}
|
||||
for field_name, field in self._get_fields_dict(llsd=True).items():
|
||||
for field_name, field in self._get_fields_dict(llsd_flavor=flavor).items():
|
||||
spec = field.metadata.get("spec")
|
||||
# Not meant to be serialized
|
||||
if not spec:
|
||||
@@ -205,11 +243,15 @@ class SchemaBase(abc.ABC):
|
||||
if val is None:
|
||||
continue
|
||||
|
||||
spec_cls = spec
|
||||
if not inspect.isclass(spec_cls):
|
||||
spec_cls = spec_cls.__class__
|
||||
|
||||
# Some kind of nested structure like sale_info
|
||||
if isinstance(val, SchemaBase):
|
||||
val = val.to_llsd()
|
||||
elif issubclass(spec, SchemaFieldSerializer):
|
||||
val = spec.to_llsd(val)
|
||||
val = val.to_llsd(flavor)
|
||||
elif issubclass(spec_cls, SchemaFieldSerializer):
|
||||
val = spec.to_llsd(val, flavor)
|
||||
else:
|
||||
raise ValueError(f"Bad inventory spec {spec!r}")
|
||||
obj_dict[field_name] = val
|
||||
|
||||
@@ -1,20 +1,27 @@
|
||||
import calendar
|
||||
import datetime
|
||||
import struct
|
||||
import typing
|
||||
import uuid
|
||||
import zlib
|
||||
|
||||
from llbase.llsd import *
|
||||
from llsd import *
|
||||
# So we can directly reference the original wrapper funcs where necessary
|
||||
import llbase.llsd
|
||||
import llsd as base_llsd
|
||||
from llsd.base import is_string, is_unicode
|
||||
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
|
||||
|
||||
class HippoLLSDBaseFormatter(llbase.llsd.LLSDBaseFormatter):
|
||||
class HippoLLSDBaseFormatter(base_llsd.base.LLSDBaseFormatter):
|
||||
UUID: callable
|
||||
ARRAY: callable
|
||||
BINARY: callable
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.type_map[UUID] = self.UUID
|
||||
self.type_map[JankStringyBytes] = self.BINARY
|
||||
self.type_map[Vector2] = self.TUPLECOORD
|
||||
self.type_map[Vector3] = self.TUPLECOORD
|
||||
self.type_map[Vector4] = self.TUPLECOORD
|
||||
@@ -24,44 +31,125 @@ class HippoLLSDBaseFormatter(llbase.llsd.LLSDBaseFormatter):
|
||||
return self.ARRAY(v.data())
|
||||
|
||||
|
||||
class HippoLLSDXMLFormatter(llbase.llsd.LLSDXMLFormatter, HippoLLSDBaseFormatter):
|
||||
class HippoLLSDXMLFormatter(base_llsd.serde_xml.LLSDXMLFormatter, HippoLLSDBaseFormatter):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
|
||||
class HippoLLSDXMLPrettyFormatter(llbase.llsd.LLSDXMLPrettyFormatter, HippoLLSDBaseFormatter):
|
||||
class HippoLLSDXMLPrettyFormatter(base_llsd.serde_xml.LLSDXMLPrettyFormatter, HippoLLSDBaseFormatter):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
|
||||
def format_pretty_xml(val: typing.Any):
|
||||
def format_pretty_xml(val: typing.Any) -> bytes:
|
||||
return HippoLLSDXMLPrettyFormatter().format(val)
|
||||
|
||||
|
||||
def format_xml(val: typing.Any):
|
||||
def format_xml(val: typing.Any) -> bytes:
|
||||
return HippoLLSDXMLFormatter().format(val)
|
||||
|
||||
|
||||
class HippoLLSDNotationFormatter(llbase.llsd.LLSDNotationFormatter, HippoLLSDBaseFormatter):
|
||||
class HippoLLSDNotationFormatter(base_llsd.serde_notation.LLSDNotationFormatter, HippoLLSDBaseFormatter):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def STRING(self, v):
|
||||
# llbase's notation LLSD encoder isn't suitable for generating line-delimited
|
||||
# LLSD because the string formatter leaves \n unencoded, unlike indra's llcommon.
|
||||
# Add our own escaping rule.
|
||||
return super().STRING(v).replace(b"\n", b"\\n")
|
||||
|
||||
def format_notation(val: typing.Any):
|
||||
|
||||
def format_notation(val: typing.Any) -> bytes:
|
||||
return HippoLLSDNotationFormatter().format(val)
|
||||
|
||||
|
||||
def format_binary(val: typing.Any, with_header=True):
|
||||
val = llbase.llsd.format_binary(val)
|
||||
if not with_header:
|
||||
return val.split(b"\n", 1)[1]
|
||||
def format_binary(val: typing.Any, with_header=True) -> bytes:
|
||||
val = _format_binary_recurse(val)
|
||||
if with_header:
|
||||
return b'<?llsd/binary?>\n' + val
|
||||
return val
|
||||
|
||||
|
||||
class HippoLLSDBinaryParser(llbase.llsd.LLSDBinaryParser):
|
||||
# This is copied almost wholesale from https://bitbucket.org/lindenlab/llbase/src/master/llbase/llsd.py
|
||||
# With a few minor changes to make serialization round-trip correctly. It's evil.
|
||||
def _format_binary_recurse(something) -> bytes:
|
||||
"""Binary formatter workhorse."""
|
||||
def _format_list(list_something):
|
||||
array_builder = [b'[' + struct.pack('!i', len(list_something))]
|
||||
for item in list_something:
|
||||
array_builder.append(_format_binary_recurse(item))
|
||||
array_builder.append(b']')
|
||||
return b''.join(array_builder)
|
||||
|
||||
if something is None:
|
||||
return b'!'
|
||||
elif isinstance(something, LLSD):
|
||||
return _format_binary_recurse(something.thing)
|
||||
elif isinstance(something, bool):
|
||||
if something:
|
||||
return b'1'
|
||||
else:
|
||||
return b'0'
|
||||
elif isinstance(something, int):
|
||||
try:
|
||||
return b'i' + struct.pack('!i', something)
|
||||
except (OverflowError, struct.error) as exc:
|
||||
raise LLSDSerializationError(str(exc), something)
|
||||
elif isinstance(something, float):
|
||||
try:
|
||||
return b'r' + struct.pack('!d', something)
|
||||
except SystemError as exc:
|
||||
raise LLSDSerializationError(str(exc), something)
|
||||
elif isinstance(something, uuid.UUID):
|
||||
return b'u' + something.bytes
|
||||
elif isinstance(something, (binary, JankStringyBytes)):
|
||||
return b'b' + struct.pack('!i', len(something)) + something
|
||||
elif is_string(something):
|
||||
if is_unicode(something):
|
||||
something = something.encode("utf8")
|
||||
return b's' + struct.pack('!i', len(something)) + something
|
||||
elif isinstance(something, uri):
|
||||
return b'l' + struct.pack('!i', len(something)) + something.encode("utf8")
|
||||
elif isinstance(something, datetime.datetime):
|
||||
return b'd' + struct.pack('<d', something.timestamp())
|
||||
elif isinstance(something, datetime.date):
|
||||
seconds_since_epoch = calendar.timegm(something.timetuple())
|
||||
return b'd' + struct.pack('<d', seconds_since_epoch)
|
||||
elif isinstance(something, (list, tuple)):
|
||||
return _format_list(something)
|
||||
elif isinstance(something, dict):
|
||||
map_builder = [b'{' + struct.pack('!i', len(something))]
|
||||
for key, value in something.items():
|
||||
if isinstance(key, str):
|
||||
key = key.encode("utf8")
|
||||
map_builder.append(b'k' + struct.pack('!i', len(key)) + key)
|
||||
map_builder.append(_format_binary_recurse(value))
|
||||
map_builder.append(b'}')
|
||||
return b''.join(map_builder)
|
||||
else:
|
||||
try:
|
||||
return _format_list(list(something))
|
||||
except TypeError:
|
||||
raise LLSDSerializationError(
|
||||
"Cannot serialize unknown type: %s (%s)" %
|
||||
(type(something), something))
|
||||
|
||||
|
||||
class HippoLLSDBinaryParser(base_llsd.serde_binary.LLSDBinaryParser):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._dispatch[ord('u')] = lambda: UUID(bytes=self._getc(16))
|
||||
self._dispatch[ord('d')] = self._parse_date
|
||||
|
||||
def _parse_date(self):
|
||||
seconds = struct.unpack("<d", self._getc(8))[0]
|
||||
try:
|
||||
return datetime.datetime.fromtimestamp(seconds, tz=datetime.timezone.utc)
|
||||
except OverflowError as exc:
|
||||
# A garbage seconds value can cause utcfromtimestamp() to raise
|
||||
# OverflowError: timestamp out of range for platform time_t
|
||||
self._error(exc, -8)
|
||||
|
||||
def _parse_string(self):
|
||||
# LLSD's C++ API lets you stuff binary in a string field even though it's only
|
||||
@@ -74,22 +162,26 @@ class HippoLLSDBinaryParser(llbase.llsd.LLSDBinaryParser):
|
||||
return bytes_val
|
||||
|
||||
|
||||
# Python uses one, C++ uses the other, and everyone's unhappy.
|
||||
_BINARY_HEADERS = (b'<? LLSD/Binary ?>', b'<?llsd/binary?>')
|
||||
|
||||
|
||||
def parse_binary(data: bytes):
|
||||
if data.startswith(b'<?llsd/binary?>'):
|
||||
if any(data.startswith(x) for x in _BINARY_HEADERS):
|
||||
data = data.split(b'\n', 1)[1]
|
||||
return HippoLLSDBinaryParser().parse(data)
|
||||
|
||||
|
||||
def parse_xml(data: bytes):
|
||||
return llbase.llsd.parse_xml(data)
|
||||
return base_llsd.parse_xml(data)
|
||||
|
||||
|
||||
def parse_notation(data: bytes):
|
||||
return llbase.llsd.parse_notation(data)
|
||||
return base_llsd.parse_notation(data)
|
||||
|
||||
|
||||
def zip_llsd(val: typing.Any):
|
||||
return zlib.compress(format_binary(val, with_header=False))
|
||||
return zlib.compress(format_binary(val, with_header=False), level=zlib.Z_BEST_COMPRESSION)
|
||||
|
||||
|
||||
def unzip_llsd(data: bytes):
|
||||
@@ -101,13 +193,13 @@ def parse(data: bytes):
|
||||
# content-type is usually nonsense.
|
||||
try:
|
||||
data = data.lstrip()
|
||||
if data.startswith(b'<?llsd/binary?>'):
|
||||
if any(data.startswith(x) for x in _BINARY_HEADERS):
|
||||
return parse_binary(data)
|
||||
elif data.startswith(b'<'):
|
||||
return parse_xml(data)
|
||||
else:
|
||||
return parse_notation(data)
|
||||
except KeyError as e:
|
||||
raise llbase.llsd.LLSDParseError('LLSD could not be parsed: %s' % (e,))
|
||||
raise base_llsd.LLSDParseError('LLSD could not be parsed: %s' % (e,))
|
||||
except TypeError as e:
|
||||
raise llbase.llsd.LLSDParseError('Input stream not of type bytes. %s' % (e,))
|
||||
raise base_llsd.LLSDParseError('Input stream not of type bytes. %s' % (e,))
|
||||
|
||||
@@ -11,21 +11,75 @@ from typing import *
|
||||
import zlib
|
||||
from copy import deepcopy
|
||||
|
||||
import numpy as np
|
||||
import recordclass
|
||||
|
||||
from hippolyzer.lib.base import serialization as se
|
||||
from hippolyzer.lib.base.datatypes import Vector3, Vector2, UUID, TupleCoord
|
||||
from hippolyzer.lib.base.llsd import zip_llsd, unzip_llsd
|
||||
from hippolyzer.lib.base.serialization import ParseContext
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def llsd_to_mat4(mat: Union[np.ndarray, Sequence[float]]) -> np.ndarray:
|
||||
return np.array(mat).reshape((4, 4), order='F')
|
||||
|
||||
|
||||
def mat4_to_llsd(mat: np.ndarray) -> List[float]:
|
||||
return list(mat.flatten(order='F'))
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class MeshAsset:
|
||||
header: MeshHeaderDict = dataclasses.field(default_factory=dict)
|
||||
segments: MeshSegmentDict = dataclasses.field(default_factory=dict)
|
||||
raw_segments: Dict[str, bytes] = dataclasses.field(default_factory=dict)
|
||||
|
||||
@classmethod
|
||||
def make_triangle(cls) -> MeshAsset:
|
||||
"""Make an asset representing an un-rigged single-sided mesh triangle"""
|
||||
inst = cls()
|
||||
inst.header = {
|
||||
"version": 1,
|
||||
"high_lod": {"offset": 0, "size": 0},
|
||||
"physics_mesh": {"offset": 0, "size": 0},
|
||||
"physics_convex": {"offset": 0, "size": 0},
|
||||
}
|
||||
base_lod: LODSegmentDict = {
|
||||
'Normal': [
|
||||
Vector3(-0.0, -0.0, -1.0),
|
||||
Vector3(-0.0, -0.0, -1.0),
|
||||
Vector3(-0.0, -0.0, -1.0)
|
||||
],
|
||||
'PositionDomain': {'Max': [0.5, 0.5, 0.0], 'Min': [-0.5, -0.5, 0.0]},
|
||||
'Position': [
|
||||
Vector3(0.0, 0.0, 0.0),
|
||||
Vector3(1.0, 0.0, 0.0),
|
||||
Vector3(0.5, 1.0, 0.0)
|
||||
],
|
||||
'TexCoord0Domain': {'Max': [1.0, 1.0], 'Min': [0.0, 0.0]},
|
||||
'TexCoord0': [
|
||||
Vector2(0.0, 0.0),
|
||||
Vector2(1.0, 0.0),
|
||||
Vector2(0.5, 1.0)
|
||||
],
|
||||
'TriangleList': [[0, 1, 2]],
|
||||
}
|
||||
inst.segments['physics_mesh'] = [deepcopy(base_lod)]
|
||||
inst.segments['high_lod'] = [deepcopy(base_lod)]
|
||||
convex_segment: PhysicsConvexSegmentDict = {
|
||||
'BoundingVerts': [
|
||||
Vector3(-0.0, 1.0, -1.0),
|
||||
Vector3(-1.0, -1.0, -1.0),
|
||||
Vector3(1.0, -1.0, -1.0)
|
||||
],
|
||||
'Max': [0.5, 0.5, 0.0],
|
||||
'Min': [-0.5, -0.5, 0.0]
|
||||
}
|
||||
inst.segments['physics_convex'] = convex_segment
|
||||
return inst
|
||||
|
||||
def iter_lods(self) -> Generator[List[LODSegmentDict], None, None]:
|
||||
for lod_name, lod_val in self.segments.items():
|
||||
if lod_name.endswith("_lod"):
|
||||
@@ -124,7 +178,7 @@ class DomainDict(TypedDict):
|
||||
Min: List[float]
|
||||
|
||||
|
||||
class VertexWeight(recordclass.datatuple): # type: ignore
|
||||
class VertexWeight(recordclass.RecordClass):
|
||||
"""Vertex weight for a specific joint on a specific vertex"""
|
||||
# index of the joint within the joint_names list in the skin segment
|
||||
joint_idx: int
|
||||
@@ -135,20 +189,26 @@ class VertexWeight(recordclass.datatuple): # type: ignore
|
||||
class SkinSegmentDict(TypedDict, total=False):
|
||||
"""Rigging information"""
|
||||
joint_names: List[str]
|
||||
# model -> world transform matrix for model
|
||||
# model -> world transform mat4 for model
|
||||
bind_shape_matrix: List[float]
|
||||
# world -> joint local transform matrices
|
||||
# world -> joint local transform mat4s
|
||||
inverse_bind_matrix: List[List[float]]
|
||||
# offset matrices for joints, translation-only.
|
||||
# Not sure what these are relative to, base joint or model <0,0,0>.
|
||||
# Transform mat4s for the joint nodes themselves.
|
||||
# The matrices may have scale or other components, but only the
|
||||
# translation component will be used by the viewer.
|
||||
# All translations are relative to the joint's parent.
|
||||
alt_inverse_bind_matrix: List[List[float]]
|
||||
lock_scale_if_joint_position: bool
|
||||
pelvis_offset: float
|
||||
|
||||
|
||||
class PhysicsConvexSegmentDict(DomainDict, total=False):
|
||||
"""Data for convex hull collisions, populated by the client"""
|
||||
# Min / Max domain vals are inline, unlike for LODs
|
||||
"""
|
||||
Data for convex hull collisions, populated by the client
|
||||
|
||||
Min / Max pos domain vals are inline, unlike for LODs, so this inherits from DomainDict
|
||||
"""
|
||||
# Indices into the Positions list
|
||||
HullList: List[int]
|
||||
# -1.0 - 1.0, dequantized from binary field of U16s
|
||||
Positions: List[Vector3]
|
||||
@@ -158,13 +218,13 @@ class PhysicsConvexSegmentDict(DomainDict, total=False):
|
||||
|
||||
class PhysicsHavokSegmentDict(TypedDict, total=False):
|
||||
"""Cached data for Havok collisions, populated by sim and not used by client."""
|
||||
HullMassProps: MassPropsDict
|
||||
MOPP: MOPPDict
|
||||
MeshDecompMassProps: MassPropsDict
|
||||
HullMassProps: HavokMassPropsDict
|
||||
MOPP: HavokMOPPDict
|
||||
MeshDecompMassProps: HavokMassPropsDict
|
||||
WeldingData: bytes
|
||||
|
||||
|
||||
class MassPropsDict(TypedDict, total=False):
|
||||
class HavokMassPropsDict(TypedDict, total=False):
|
||||
# Vec, center of mass
|
||||
CoM: List[float]
|
||||
# 9 floats, Mat3?
|
||||
@@ -173,7 +233,7 @@ class MassPropsDict(TypedDict, total=False):
|
||||
volume: float
|
||||
|
||||
|
||||
class MOPPDict(TypedDict, total=False):
|
||||
class HavokMOPPDict(TypedDict, total=False):
|
||||
"""Memory Optimized Partial Polytope"""
|
||||
BuildType: int
|
||||
MoppData: bytes
|
||||
@@ -205,7 +265,6 @@ def positions_to_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
|
||||
class VertexWeights(se.SerializableBase):
|
||||
"""Serializer for a list of joint weights on a single vertex"""
|
||||
INFLUENCE_SER = se.QuantizedFloat(se.U16, 0.0, 1.0)
|
||||
INFLUENCE_LIMIT = 4
|
||||
INFLUENCE_TERM = 0xFF
|
||||
|
||||
@@ -216,18 +275,30 @@ class VertexWeights(se.SerializableBase):
|
||||
for val in vals:
|
||||
joint_idx, influence = val
|
||||
writer.write(se.U8, joint_idx)
|
||||
writer.write(cls.INFLUENCE_SER, influence, ctx=ctx)
|
||||
writer.write(se.U16, round(influence * 0xFFff), ctx=ctx)
|
||||
if len(vals) != cls.INFLUENCE_LIMIT:
|
||||
writer.write(se.U8, cls.INFLUENCE_TERM)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, reader: se.Reader, ctx=None):
|
||||
# NOTE: normally you'd want to do something like arrange this into a nicely
|
||||
# aligned byte array with zero padding so that you could vectorize the decoding.
|
||||
# In cases where having a vertex with no weights is semantically equivalent to
|
||||
# having a vertex _with_ weights of a value of 0.0 that's fine. This isn't the case
|
||||
# in LL's implementation of mesh:
|
||||
#
|
||||
# https://bitbucket.org/lindenlab/viewer/src/d31a83fb946c49a38376ea3b312b5380d0c8c065/indra/llmath/llvolume.cpp#lines-2560:2628
|
||||
#
|
||||
# Consider the difference between handling of b"\x00\x00\x00\xFF" and b"\xFF" with the above logic.
|
||||
# To simplify round-tripping while preserving those semantics, we don't do a vectorized decode.
|
||||
# I had a vectorized numpy version, but those requirements made everything a bit of a mess.
|
||||
influence_list = []
|
||||
for _ in range(cls.INFLUENCE_LIMIT):
|
||||
joint_idx = reader.read(se.U8)
|
||||
joint_idx = reader.read_bytes(1)[0]
|
||||
if joint_idx == cls.INFLUENCE_TERM:
|
||||
break
|
||||
influence_list.append(VertexWeight(joint_idx, reader.read(cls.INFLUENCE_SER, ctx=ctx)))
|
||||
weight = reader.read(se.U16, ctx=ctx) / 0xFFff
|
||||
influence_list.append(VertexWeight(joint_idx, weight))
|
||||
return influence_list
|
||||
|
||||
|
||||
@@ -262,16 +333,46 @@ class SegmentSerializer:
|
||||
return new_segment
|
||||
|
||||
|
||||
class VecListAdapter(se.Adapter):
|
||||
def __init__(self, child_spec: se.SERIALIZABLE_TYPE, vec_type: Type):
|
||||
super().__init__(child_spec)
|
||||
self.vec_type = vec_type
|
||||
|
||||
def encode(self, val: Any, ctx: Optional[ParseContext]) -> Any:
|
||||
return val
|
||||
|
||||
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
|
||||
new_vals = []
|
||||
for elem in val:
|
||||
new_vals.append(self.vec_type(*elem))
|
||||
return new_vals
|
||||
|
||||
|
||||
LE_U16: np.dtype = np.dtype(np.uint16).newbyteorder('<') # noqa
|
||||
|
||||
|
||||
LOD_SEGMENT_SERIALIZER = SegmentSerializer({
|
||||
# 16-bit indices to the verts making up the tri. Imposes a 16-bit
|
||||
# upper limit on verts in any given material in the mesh.
|
||||
"TriangleList": se.Collection(None, se.Collection(3, se.U16)),
|
||||
"TriangleList": se.ExprAdapter(
|
||||
se.NumPyArray(se.BytesGreedy(), LE_U16, 3),
|
||||
decode_func=lambda x: x.tolist(),
|
||||
),
|
||||
# These are used to interpolate between values in their respective domains
|
||||
# Each position represents a single vert.
|
||||
"Position": se.Collection(None, se.Vector3U16(0.0, 1.0)),
|
||||
"TexCoord0": se.Collection(None, se.Vector2U16(0.0, 1.0)),
|
||||
# Normals have a static domain between -1 and 1, so just use that.
|
||||
"Normal": se.Collection(None, se.Vector3U16(-1.0, 1.0)),
|
||||
"Position": VecListAdapter(
|
||||
se.QuantizedNumPyArray(se.NumPyArray(se.BytesGreedy(), LE_U16, 3), 0.0, 1.0),
|
||||
Vector3,
|
||||
),
|
||||
"TexCoord0": VecListAdapter(
|
||||
se.QuantizedNumPyArray(se.NumPyArray(se.BytesGreedy(), LE_U16, 2), 0.0, 1.0),
|
||||
Vector2,
|
||||
),
|
||||
# Normals have a static domain between -1 and 1, so we just use that rather than 0.0 - 1.0.
|
||||
"Normal": VecListAdapter(
|
||||
se.QuantizedNumPyArray(se.NumPyArray(se.BytesGreedy(), LE_U16, 3), -1.0, 1.0),
|
||||
Vector3,
|
||||
),
|
||||
"Weights": se.Collection(None, VertexWeights)
|
||||
})
|
||||
|
||||
|
||||
121
hippolyzer/lib/base/mesh_skeleton.py
Normal file
121
hippolyzer/lib/base/mesh_skeleton.py
Normal file
@@ -0,0 +1,121 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import weakref
|
||||
from typing import *
|
||||
|
||||
import transformations
|
||||
from lxml import etree
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3, RAD_TO_DEG
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
|
||||
|
||||
MAYBE_JOINT_REF = Optional[Callable[[], "JointNode"]]
|
||||
SKELETON_REF = Optional[Callable[[], "Skeleton"]]
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class JointNode:
|
||||
name: str
|
||||
parent: MAYBE_JOINT_REF
|
||||
skeleton: SKELETON_REF
|
||||
translation: Vector3
|
||||
pivot: Vector3 # pivot point for the joint, generally the same as translation
|
||||
rotation: Vector3 # Euler rotation in degrees
|
||||
scale: Vector3
|
||||
type: str # bone or collision_volume
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.name, self.type))
|
||||
|
||||
@property
|
||||
def matrix(self):
|
||||
return transformations.compose_matrix(
|
||||
scale=tuple(self.scale),
|
||||
angles=tuple(self.rotation / RAD_TO_DEG),
|
||||
translate=tuple(self.translation),
|
||||
)
|
||||
|
||||
@property
|
||||
def index(self) -> int:
|
||||
bone_idx = 0
|
||||
for node in self.skeleton().joint_dict.values():
|
||||
if node.type != "bone":
|
||||
continue
|
||||
if self is node:
|
||||
return bone_idx
|
||||
bone_idx += 1
|
||||
raise KeyError(f"{self.name!r} doesn't exist in skeleton")
|
||||
|
||||
@property
|
||||
def ancestors(self) -> Sequence[JointNode]:
|
||||
joint_node = self
|
||||
ancestors = []
|
||||
while joint_node.parent:
|
||||
joint_node = joint_node.parent()
|
||||
ancestors.append(joint_node)
|
||||
return ancestors
|
||||
|
||||
@property
|
||||
def children(self) -> Sequence[JointNode]:
|
||||
children = []
|
||||
for node in self.skeleton().joint_dict.values():
|
||||
if node.parent and node.parent() == self:
|
||||
children.append(node)
|
||||
return children
|
||||
|
||||
@property
|
||||
def descendents(self) -> Set[JointNode]:
|
||||
descendents = set()
|
||||
ancestors = {self}
|
||||
last_ancestors = set()
|
||||
while last_ancestors != ancestors:
|
||||
last_ancestors = ancestors
|
||||
for node in self.skeleton().joint_dict.values():
|
||||
if node.parent and node.parent() in ancestors:
|
||||
ancestors.add(node)
|
||||
descendents.add(node)
|
||||
return descendents
|
||||
|
||||
|
||||
class Skeleton:
|
||||
def __init__(self, root_node: etree.ElementBase):
|
||||
self.joint_dict: Dict[str, JointNode] = {}
|
||||
self._parse_node_children(root_node, None)
|
||||
|
||||
def __getitem__(self, item: str) -> JointNode:
|
||||
return self.joint_dict[item]
|
||||
|
||||
def _parse_node_children(self, node: etree.ElementBase, parent: MAYBE_JOINT_REF):
|
||||
name = node.get('name')
|
||||
joint = JointNode(
|
||||
name=name,
|
||||
parent=parent,
|
||||
skeleton=weakref.ref(self),
|
||||
translation=_get_vec_attr(node, "pos", Vector3()),
|
||||
pivot=_get_vec_attr(node, "pivot", Vector3()),
|
||||
rotation=_get_vec_attr(node, "rot", Vector3()),
|
||||
scale=_get_vec_attr(node, "scale", Vector3(1, 1, 1)),
|
||||
type=node.tag,
|
||||
)
|
||||
self.joint_dict[name] = joint
|
||||
for child in node.iterchildren():
|
||||
self._parse_node_children(child, weakref.ref(joint))
|
||||
|
||||
|
||||
def _get_vec_attr(node, attr_name: str, default: Vector3) -> Vector3:
|
||||
attr_val = node.get(attr_name, None)
|
||||
if not attr_val:
|
||||
return default
|
||||
return Vector3(*(float(x) for x in attr_val.split(" ") if x))
|
||||
|
||||
|
||||
def load_avatar_skeleton() -> Skeleton:
|
||||
skel_path = get_resource_filename("lib/base/data/avatar_skeleton.xml")
|
||||
with open(skel_path, 'r') as f:
|
||||
skel_root = etree.fromstring(f.read())
|
||||
return Skeleton(skel_root.getchildren()[0])
|
||||
|
||||
|
||||
AVATAR_SKELETON = load_avatar_skeleton()
|
||||
@@ -6,6 +6,7 @@ import copy
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import logging
|
||||
from collections import deque
|
||||
from typing import *
|
||||
from typing import Optional
|
||||
|
||||
@@ -25,16 +26,23 @@ class ReliableResendInfo:
|
||||
|
||||
|
||||
class Circuit:
|
||||
def __init__(self, near_host: Optional[ADDR_TUPLE], far_host: ADDR_TUPLE, transport):
|
||||
def __init__(
|
||||
self,
|
||||
near_host: Optional[ADDR_TUPLE],
|
||||
far_host: ADDR_TUPLE,
|
||||
transport: Optional[AbstractUDPTransport] = None,
|
||||
):
|
||||
self.near_host: Optional[ADDR_TUPLE] = near_host
|
||||
self.host: ADDR_TUPLE = far_host
|
||||
self.is_alive = True
|
||||
self.transport: Optional[AbstractUDPTransport] = transport
|
||||
self.transport = transport
|
||||
self.serializer = UDPMessageSerializer()
|
||||
self.last_packet_at = dt.datetime.now()
|
||||
self.packet_id_base = 0
|
||||
self.unacked_reliable: Dict[Tuple[Direction, int], ReliableResendInfo] = {}
|
||||
self.resend_every: float = 3.0
|
||||
# Reliable messages that we've already seen and handled, for resend suppression
|
||||
self.seen_reliable: deque[int] = deque(maxlen=1_000)
|
||||
|
||||
def _send_prepared_message(self, message: Message, transport=None):
|
||||
try:
|
||||
@@ -44,6 +52,11 @@ class Circuit:
|
||||
raise
|
||||
return self.send_datagram(serialized, message.direction, transport=transport)
|
||||
|
||||
def disconnect(self):
|
||||
self.packet_id_base = 0
|
||||
self.unacked_reliable.clear()
|
||||
self.is_alive = False
|
||||
|
||||
def send_datagram(self, data: bytes, direction: Direction, transport=None):
|
||||
self.last_packet_at = dt.datetime.now()
|
||||
src_addr, dst_addr = self.host, self.near_host
|
||||
@@ -66,6 +79,7 @@ class Circuit:
|
||||
# If it was queued, it's not anymore
|
||||
message.queued = False
|
||||
message.finalized = True
|
||||
return True
|
||||
|
||||
def send(self, message: Message, transport=None) -> UDPPacket:
|
||||
if self.prepare_message(message):
|
||||
@@ -77,9 +91,6 @@ class Circuit:
|
||||
)
|
||||
return self._send_prepared_message(message, transport)
|
||||
|
||||
# Temporary alias
|
||||
send_message = send
|
||||
|
||||
def send_reliable(self, message: Message, transport=None) -> asyncio.Future:
|
||||
"""send() wrapper that always sends reliably and allows `await`ing ACK receipt"""
|
||||
if not message.synthetic:
|
||||
@@ -123,6 +134,13 @@ class Circuit:
|
||||
message.direction = direction
|
||||
self.send(message)
|
||||
|
||||
def track_reliable(self, packet_id: int) -> bool:
|
||||
"""Tracks a reliable packet, returning if it's a new message"""
|
||||
if packet_id in self.seen_reliable:
|
||||
return False
|
||||
self.seen_reliable.append(packet_id)
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %r : %r>" % (self.__class__.__name__, self.near_host, self.host)
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -29,7 +29,10 @@ from hippolyzer.lib.base.message.msgtypes import MsgType
|
||||
|
||||
PACKER = Callable[[Any], bytes]
|
||||
UNPACKER = Callable[[bytes], Any]
|
||||
LLSD_PACKER = Callable[[Any], Any]
|
||||
LLSD_UNPACKER = Callable[[Any], Any]
|
||||
SPEC = Tuple[UNPACKER, PACKER]
|
||||
LLSD_SPEC = Tuple[LLSD_UNPACKER, LLSD_PACKER]
|
||||
|
||||
|
||||
def _pack_string(pack_string):
|
||||
@@ -64,6 +67,21 @@ def _make_tuplecoord_spec(typ: Type[TupleCoord], struct_fmt: str,
|
||||
return lambda x: typ(*struct_obj.unpack(x)), _packer
|
||||
|
||||
|
||||
def _make_llsd_tuplecoord_spec(typ: Type[TupleCoord], needed_elems: Optional[int] = None):
|
||||
if needed_elems is None:
|
||||
# Number of elems needed matches the number in the coord type
|
||||
def _packer(x):
|
||||
return list(x)
|
||||
else:
|
||||
# Special case, we only want to pack some of the components.
|
||||
# Mostly for Quaternion since we don't actually need to send W.
|
||||
def _packer(x):
|
||||
if isinstance(x, TupleCoord):
|
||||
x = x.data()
|
||||
return list(x.data(needed_elems))
|
||||
return lambda x: typ(*x), _packer
|
||||
|
||||
|
||||
def _unpack_specs(cls):
|
||||
cls.UNPACKERS = {k: v[0] for (k, v) in cls.SPECS.items()}
|
||||
cls.PACKERS = {k: v[1] for (k, v) in cls.SPECS.items()}
|
||||
@@ -78,7 +96,7 @@ class TemplateDataPacker:
|
||||
MsgType.MVT_S8: _make_struct_spec('b'),
|
||||
MsgType.MVT_U8: _make_struct_spec('B'),
|
||||
MsgType.MVT_BOOL: _make_struct_spec('B'),
|
||||
MsgType.MVT_LLUUID: (lambda x: UUID(bytes=bytes(x)), lambda x: x.bytes),
|
||||
MsgType.MVT_LLUUID: (lambda x: UUID(bytes=bytes(x)), lambda x: UUID(x).bytes),
|
||||
MsgType.MVT_IP_ADDR: (socket.inet_ntoa, socket.inet_aton),
|
||||
MsgType.MVT_IP_PORT: _make_struct_spec('!H'),
|
||||
MsgType.MVT_U16: _make_struct_spec('<H'),
|
||||
@@ -110,10 +128,15 @@ class TemplateDataPacker:
|
||||
class LLSDDataPacker(TemplateDataPacker):
|
||||
# Some template var types aren't directly representable in LLSD, so they
|
||||
# get encoded to binary fields.
|
||||
SPECS = {
|
||||
SPECS: Dict[MsgType, LLSD_SPEC] = {
|
||||
MsgType.MVT_IP_ADDR: (socket.inet_ntoa, socket.inet_aton),
|
||||
# LLSD ints are technically bound to S32 range.
|
||||
MsgType.MVT_U32: _make_struct_spec('!I'),
|
||||
MsgType.MVT_U64: _make_struct_spec('!Q'),
|
||||
MsgType.MVT_S64: _make_struct_spec('!q'),
|
||||
# These are arrays in LLSD, we need to turn them into coords.
|
||||
MsgType.MVT_LLVector3: _make_llsd_tuplecoord_spec(Vector3),
|
||||
MsgType.MVT_LLVector3d: _make_llsd_tuplecoord_spec(Vector3),
|
||||
MsgType.MVT_LLVector4: _make_llsd_tuplecoord_spec(Vector4),
|
||||
MsgType.MVT_LLQuaternion: _make_llsd_tuplecoord_spec(Quaternion, needed_elems=3)
|
||||
}
|
||||
|
||||
@@ -75,8 +75,8 @@ class Block:
|
||||
for var_name, val in kwargs.items():
|
||||
self[var_name] = val
|
||||
|
||||
def get_variable(self, var_name):
|
||||
return self.vars.get(var_name)
|
||||
def get(self, var_name, default: Optional[VAR_TYPE] = None) -> Optional[VAR_TYPE]:
|
||||
return self.vars.get(var_name, default)
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.vars
|
||||
@@ -188,7 +188,7 @@ class MsgBlockList(List["Block"]):
|
||||
class Message:
|
||||
__slots__ = ("name", "send_flags", "packet_id", "acks", "body_boundaries", "queued",
|
||||
"offset", "raw_extra", "raw_body", "deserializer", "_blocks", "finalized",
|
||||
"direction", "meta", "synthetic", "dropped", "sender")
|
||||
"direction", "meta", "synthetic", "dropped", "sender", "unknown_message")
|
||||
|
||||
def __init__(self, name, *args, packet_id=None, flags=0, acks=None, direction=None):
|
||||
# TODO: Do this on a timer or something.
|
||||
@@ -200,6 +200,7 @@ class Message:
|
||||
|
||||
self.acks = acks if acks is not None else tuple()
|
||||
self.body_boundaries = (-1, -1)
|
||||
self.unknown_message = False
|
||||
self.offset = 0
|
||||
self.raw_extra = b""
|
||||
self.direction: Direction = direction if direction is not None else Direction.OUT
|
||||
@@ -222,7 +223,7 @@ class Message:
|
||||
def add_blocks(self, block_list):
|
||||
# can have a list of blocks if it is multiple or variable
|
||||
for block in block_list:
|
||||
if type(block) == list:
|
||||
if type(block) is list:
|
||||
for bl in block:
|
||||
self.add_block(bl)
|
||||
else:
|
||||
@@ -288,7 +289,7 @@ class Message:
|
||||
|
||||
def ensure_parsed(self):
|
||||
# This is a little magic, think about whether we want this.
|
||||
if self.raw_body and self.deserializer():
|
||||
if self.raw_body and self.deserializer and self.deserializer():
|
||||
self.deserializer().parse_message_body(self)
|
||||
|
||||
def to_dict(self, extended=False):
|
||||
@@ -341,6 +342,21 @@ class Message:
|
||||
msg.acks = dict_val['acks']
|
||||
return msg
|
||||
|
||||
@classmethod
|
||||
def from_eq_event(cls, event) -> Message:
|
||||
# If this isn't a templated message (like some EQ-only events are),
|
||||
# then we wrap it in a synthetic `Message` so that the API for handling
|
||||
# both EQ-only and templated message events can be the same. Ick.
|
||||
msg = cls(event["message"])
|
||||
if isinstance(event["body"], dict):
|
||||
msg.add_block(Block("EventData", **event["body"]))
|
||||
else:
|
||||
# Shouldn't be any events that have anything other than a dict
|
||||
# as a body, but just to be sure...
|
||||
msg.add_block(Block("EventData", Data=event["body"]))
|
||||
msg.synthetic = True
|
||||
return msg
|
||||
|
||||
def invalidate_caches(self):
|
||||
# Don't have any caches if we haven't even parsed
|
||||
if self.raw_body:
|
||||
|
||||
@@ -20,7 +20,7 @@ Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
from logging import getLogger
|
||||
|
||||
from llbase import llsd
|
||||
import llsd
|
||||
|
||||
from hippolyzer.lib.base.message.data import msg_details
|
||||
|
||||
|
||||
@@ -31,7 +31,8 @@ _T = TypeVar("_T")
|
||||
_K = TypeVar("_K", bound=Hashable)
|
||||
MESSAGE_HANDLER = Callable[[_T], Any]
|
||||
PREDICATE = Callable[[_T], bool]
|
||||
MESSAGE_NAMES = Iterable[_K]
|
||||
# TODO: Can't do `Iterable[Union[_K, Literal["*"]]]` apparently?
|
||||
MESSAGE_NAMES = Iterable[Union[_K, str]]
|
||||
|
||||
|
||||
class MessageHandler(Generic[_T, _K]):
|
||||
@@ -41,12 +42,11 @@ class MessageHandler(Generic[_T, _K]):
|
||||
|
||||
def register(self, message_name: _K) -> Event:
|
||||
LOG.debug('Creating a monitor for %s' % message_name)
|
||||
return self.handlers.setdefault(message_name, Event())
|
||||
return self.handlers.setdefault(message_name, Event(message_name))
|
||||
|
||||
def subscribe(self, message_name: _K, handler: MESSAGE_HANDLER) -> Event:
|
||||
def subscribe(self, message_name: Union[_K, Literal["*"]], handler: MESSAGE_HANDLER):
|
||||
notifier = self.register(message_name)
|
||||
notifier.subscribe(handler)
|
||||
return notifier
|
||||
|
||||
def _subscribe_all(self, message_names: MESSAGE_NAMES, handler: MESSAGE_HANDLER,
|
||||
predicate: Optional[PREDICATE] = None) -> List[Event]:
|
||||
@@ -145,7 +145,7 @@ class MessageHandler(Generic[_T, _K]):
|
||||
# Always try to call wildcard handlers
|
||||
self._handle_type('*', message)
|
||||
|
||||
def _handle_type(self, name: _K, message: _T):
|
||||
def _handle_type(self, name: Union[_K, Literal["*"]], message: _T):
|
||||
handler = self.handlers.get(name)
|
||||
if not handler:
|
||||
return
|
||||
|
||||
@@ -47,7 +47,6 @@ class MsgBlockType:
|
||||
MBT_SINGLE = 0
|
||||
MBT_MULTIPLE = 1
|
||||
MBT_VARIABLE = 2
|
||||
MBT_String_List = ['Single', 'Multiple', 'Variable']
|
||||
|
||||
|
||||
class PacketFlags(enum.IntFlag):
|
||||
@@ -55,6 +54,8 @@ class PacketFlags(enum.IntFlag):
|
||||
RELIABLE = 0x40
|
||||
RESENT = 0x20
|
||||
ACK = 0x10
|
||||
# Not a real flag, just used for display.
|
||||
EQ = 1 << 10
|
||||
|
||||
|
||||
# frequency for messages
|
||||
@@ -62,28 +63,23 @@ class PacketFlags(enum.IntFlag):
|
||||
# = '\xFF\xFF'
|
||||
# = '\xFF'
|
||||
# = ''
|
||||
class MsgFrequency:
|
||||
FIXED_FREQUENCY_MESSAGE = -1 # marking it
|
||||
LOW_FREQUENCY_MESSAGE = 4
|
||||
MEDIUM_FREQUENCY_MESSAGE = 2
|
||||
HIGH_FREQUENCY_MESSAGE = 1
|
||||
class MsgFrequency(enum.IntEnum):
|
||||
FIXED = -1 # marking it
|
||||
LOW = 4
|
||||
MEDIUM = 2
|
||||
HIGH = 1
|
||||
|
||||
|
||||
class MsgTrust:
|
||||
LL_NOTRUST = 0
|
||||
LL_TRUSTED = 1
|
||||
class MsgEncoding(enum.IntEnum):
|
||||
UNENCODED = 0
|
||||
ZEROCODED = 1
|
||||
|
||||
|
||||
class MsgEncoding:
|
||||
LL_UNENCODED = 0
|
||||
LL_ZEROCODED = 1
|
||||
|
||||
|
||||
class MsgDeprecation:
|
||||
LL_DEPRECATED = 0
|
||||
LL_UDPDEPRECATED = 1
|
||||
LL_UDPBLACKLISTED = 2
|
||||
LL_NOTDEPRECATED = 3
|
||||
class MsgDeprecation(enum.IntEnum):
|
||||
DEPRECATED = 0
|
||||
UDPDEPRECATED = 1
|
||||
UDPBLACKLISTED = 2
|
||||
NOTDEPRECATED = 3
|
||||
|
||||
|
||||
# message variable types
|
||||
|
||||
@@ -21,7 +21,7 @@ Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
import typing
|
||||
|
||||
from .msgtypes import MsgType, MsgBlockType
|
||||
from .msgtypes import MsgType, MsgBlockType, MsgFrequency
|
||||
from ..datatypes import UUID
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ class MessageTemplateVariable:
|
||||
return f"{self.__class__.__name__}(name={self.name!r}, tp={self.type!r}, size={self.size!r})"
|
||||
|
||||
@property
|
||||
def probably_binary(self):
|
||||
def probably_binary(self) -> bool:
|
||||
if self._probably_binary is not None:
|
||||
return self._probably_binary
|
||||
|
||||
@@ -49,7 +49,7 @@ class MessageTemplateVariable:
|
||||
return self._probably_binary
|
||||
|
||||
@property
|
||||
def probably_text(self):
|
||||
def probably_text(self) -> bool:
|
||||
if self._probably_text is not None:
|
||||
return self._probably_text
|
||||
|
||||
@@ -97,49 +97,36 @@ class MessageTemplateBlock:
|
||||
self.block_type: MsgBlockType = MsgBlockType.MBT_SINGLE
|
||||
self.number = 0
|
||||
|
||||
def add_variable(self, var):
|
||||
def add_variable(self, var: MessageTemplateVariable):
|
||||
self.variable_map[var.name] = var
|
||||
self.variables.append(var)
|
||||
|
||||
def get_variable(self, name):
|
||||
def get_variable(self, name) -> MessageTemplateVariable:
|
||||
return self.variable_map[name]
|
||||
|
||||
|
||||
class MessageTemplate(object):
|
||||
frequency_strings = {-1: 'fixed', 1: 'high', 2: 'medium', 4: 'low'} # strings for printout
|
||||
deprecation_strings = ["Deprecated", "UDPDeprecated", "UDPBlackListed", "NotDeprecated"] # using _as_string methods
|
||||
encoding_strings = ["Unencoded", "Zerocoded"] # etc
|
||||
trusted_strings = ["Trusted", "NotTrusted"] # etc LDE 24oct2008
|
||||
|
||||
class MessageTemplate:
|
||||
def __init__(self, name):
|
||||
self.blocks: typing.List[MessageTemplateBlock] = []
|
||||
self.block_map: typing.Dict[str, MessageTemplateBlock] = {}
|
||||
|
||||
# this is the function or object that will handle this type of message
|
||||
self.received_count = 0
|
||||
|
||||
self.name = name
|
||||
self.frequency = None
|
||||
self.msg_num = 0
|
||||
self.msg_freq_num_bytes = None
|
||||
self.msg_trust = None
|
||||
self.msg_deprecation = None
|
||||
self.msg_encoding = None
|
||||
self.frequency: typing.Optional[MsgFrequency] = None
|
||||
self.num = 0
|
||||
# Frequency + msg num as bytes
|
||||
self.freq_num_bytes = None
|
||||
self.trusted = False
|
||||
self.deprecation = None
|
||||
self.encoding = None
|
||||
|
||||
def add_block(self, block):
|
||||
def add_block(self, block: MessageTemplateBlock):
|
||||
self.block_map[block.name] = block
|
||||
self.blocks.append(block)
|
||||
|
||||
def get_block(self, name):
|
||||
def get_block(self, name) -> MessageTemplateBlock:
|
||||
return self.block_map[name]
|
||||
|
||||
def get_msg_freq_num_len(self):
|
||||
if self.frequency == -1:
|
||||
if self.frequency == MsgFrequency.FIXED:
|
||||
return 4
|
||||
return self.frequency
|
||||
|
||||
def get_frequency_as_string(self):
|
||||
return MessageTemplate.frequency_strings[self.frequency]
|
||||
|
||||
def get_deprecation_as_string(self):
|
||||
return MessageTemplate.deprecation_strings[self.msg_deprecation]
|
||||
|
||||
@@ -43,7 +43,7 @@ class TemplateDictionary:
|
||||
|
||||
self.template_list: typing.List[MessageTemplate] = []
|
||||
# maps name to template
|
||||
self.message_templates = {}
|
||||
self.message_templates: typing.Dict[str, MessageTemplate] = {}
|
||||
|
||||
# maps (freq,num) to template
|
||||
self.message_dict = {}
|
||||
@@ -68,32 +68,32 @@ class TemplateDictionary:
|
||||
|
||||
# do a mapping of type to a string for easier reference
|
||||
frequency_str = ''
|
||||
if template.frequency == MsgFrequency.FIXED_FREQUENCY_MESSAGE:
|
||||
if template.frequency == MsgFrequency.FIXED:
|
||||
frequency_str = "Fixed"
|
||||
elif template.frequency == MsgFrequency.LOW_FREQUENCY_MESSAGE:
|
||||
elif template.frequency == MsgFrequency.LOW:
|
||||
frequency_str = "Low"
|
||||
elif template.frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE:
|
||||
elif template.frequency == MsgFrequency.MEDIUM:
|
||||
frequency_str = "Medium"
|
||||
elif template.frequency == MsgFrequency.HIGH_FREQUENCY_MESSAGE:
|
||||
elif template.frequency == MsgFrequency.HIGH:
|
||||
frequency_str = "High"
|
||||
|
||||
self.message_dict[(frequency_str,
|
||||
template.msg_num)] = template
|
||||
template.num)] = template
|
||||
|
||||
def build_message_ids(self):
|
||||
for template in list(self.message_templates.values()):
|
||||
frequency = template.frequency
|
||||
num_bytes = None
|
||||
if frequency == MsgFrequency.FIXED_FREQUENCY_MESSAGE:
|
||||
if frequency == MsgFrequency.FIXED:
|
||||
# have to do this because Fixed messages are stored as a long in the template
|
||||
num_bytes = b'\xff\xff\xff' + struct.pack("B", template.msg_num)
|
||||
elif frequency == MsgFrequency.LOW_FREQUENCY_MESSAGE:
|
||||
num_bytes = b'\xff\xff' + struct.pack("!H", template.msg_num)
|
||||
elif frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE:
|
||||
num_bytes = b'\xff' + struct.pack("B", template.msg_num)
|
||||
elif frequency == MsgFrequency.HIGH_FREQUENCY_MESSAGE:
|
||||
num_bytes = struct.pack("B", template.msg_num)
|
||||
template.msg_freq_num_bytes = num_bytes
|
||||
num_bytes = b'\xff\xff\xff' + struct.pack("B", template.num)
|
||||
elif frequency == MsgFrequency.LOW:
|
||||
num_bytes = b'\xff\xff' + struct.pack("!H", template.num)
|
||||
elif frequency == MsgFrequency.MEDIUM:
|
||||
num_bytes = b'\xff' + struct.pack("B", template.num)
|
||||
elif frequency == MsgFrequency.HIGH:
|
||||
num_bytes = struct.pack("B", template.num)
|
||||
template.freq_num_bytes = num_bytes
|
||||
|
||||
def get_template_by_name(self, template_name) -> typing.Optional[MessageTemplate]:
|
||||
return self.message_templates.get(template_name)
|
||||
|
||||
@@ -22,7 +22,7 @@ import struct
|
||||
import re
|
||||
|
||||
from . import template
|
||||
from .msgtypes import MsgFrequency, MsgTrust, MsgEncoding
|
||||
from .msgtypes import MsgFrequency, MsgEncoding
|
||||
from .msgtypes import MsgDeprecation, MsgBlockType, MsgType
|
||||
from ..exc import MessageTemplateParsingError, MessageTemplateNotFound
|
||||
|
||||
@@ -112,67 +112,69 @@ class MessageTemplateParser:
|
||||
frequency = None
|
||||
freq_str = match.group(2)
|
||||
if freq_str == 'Low':
|
||||
frequency = MsgFrequency.LOW_FREQUENCY_MESSAGE
|
||||
frequency = MsgFrequency.LOW
|
||||
elif freq_str == 'Medium':
|
||||
frequency = MsgFrequency.MEDIUM_FREQUENCY_MESSAGE
|
||||
frequency = MsgFrequency.MEDIUM
|
||||
elif freq_str == 'High':
|
||||
frequency = MsgFrequency.HIGH_FREQUENCY_MESSAGE
|
||||
frequency = MsgFrequency.HIGH
|
||||
elif freq_str == 'Fixed':
|
||||
frequency = MsgFrequency.FIXED_FREQUENCY_MESSAGE
|
||||
frequency = MsgFrequency.FIXED
|
||||
|
||||
new_template.frequency = frequency
|
||||
|
||||
msg_num = int(match.group(3), 0)
|
||||
if frequency == MsgFrequency.FIXED_FREQUENCY_MESSAGE:
|
||||
if frequency == MsgFrequency.FIXED:
|
||||
# have to do this because Fixed messages are stored as a long in the template
|
||||
msg_num &= 0xff
|
||||
msg_num_bytes = struct.pack('!BBBB', 0xff, 0xff, 0xff, msg_num)
|
||||
elif frequency == MsgFrequency.LOW_FREQUENCY_MESSAGE:
|
||||
elif frequency == MsgFrequency.LOW:
|
||||
msg_num_bytes = struct.pack('!BBH', 0xff, 0xff, msg_num)
|
||||
elif frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE:
|
||||
elif frequency == MsgFrequency.MEDIUM:
|
||||
msg_num_bytes = struct.pack('!BB', 0xff, msg_num)
|
||||
elif frequency == MsgFrequency.HIGH_FREQUENCY_MESSAGE:
|
||||
elif frequency == MsgFrequency.HIGH:
|
||||
msg_num_bytes = struct.pack('!B', msg_num)
|
||||
else:
|
||||
raise Exception("don't know about frequency %s" % frequency)
|
||||
|
||||
new_template.msg_num = msg_num
|
||||
new_template.msg_freq_num_bytes = msg_num_bytes
|
||||
new_template.num = msg_num
|
||||
new_template.freq_num_bytes = msg_num_bytes
|
||||
|
||||
msg_trust = None
|
||||
msg_trust_str = match.group(4)
|
||||
if msg_trust_str == 'Trusted':
|
||||
msg_trust = MsgTrust.LL_TRUSTED
|
||||
msg_trust = True
|
||||
elif msg_trust_str == 'NotTrusted':
|
||||
msg_trust = MsgTrust.LL_NOTRUST
|
||||
msg_trust = False
|
||||
else:
|
||||
raise ValueError(f"Invalid trust {msg_trust_str}")
|
||||
|
||||
new_template.msg_trust = msg_trust
|
||||
new_template.trusted = msg_trust
|
||||
|
||||
msg_encoding = None
|
||||
msg_encoding_str = match.group(5)
|
||||
if msg_encoding_str == 'Unencoded':
|
||||
msg_encoding = MsgEncoding.LL_UNENCODED
|
||||
msg_encoding = MsgEncoding.UNENCODED
|
||||
elif msg_encoding_str == 'Zerocoded':
|
||||
msg_encoding = MsgEncoding.LL_ZEROCODED
|
||||
msg_encoding = MsgEncoding.ZEROCODED
|
||||
else:
|
||||
raise ValueError(f"Invalid encoding {msg_encoding_str}")
|
||||
|
||||
new_template.msg_encoding = msg_encoding
|
||||
new_template.encoding = msg_encoding
|
||||
|
||||
msg_dep = None
|
||||
msg_dep_str = match.group(7)
|
||||
if msg_dep_str:
|
||||
if msg_dep_str == 'Deprecated':
|
||||
msg_dep = MsgDeprecation.LL_DEPRECATED
|
||||
msg_dep = MsgDeprecation.DEPRECATED
|
||||
elif msg_dep_str == 'UDPDeprecated':
|
||||
msg_dep = MsgDeprecation.LL_UDPDEPRECATED
|
||||
msg_dep = MsgDeprecation.UDPDEPRECATED
|
||||
elif msg_dep_str == 'UDPBlackListed':
|
||||
msg_dep = MsgDeprecation.LL_UDPBLACKLISTED
|
||||
msg_dep = MsgDeprecation.UDPBLACKLISTED
|
||||
elif msg_dep_str == 'NotDeprecated':
|
||||
msg_dep = MsgDeprecation.LL_NOTDEPRECATED
|
||||
msg_dep = MsgDeprecation.NOTDEPRECATED
|
||||
else:
|
||||
msg_dep = MsgDeprecation.LL_NOTDEPRECATED
|
||||
msg_dep = MsgDeprecation.NOTDEPRECATED
|
||||
if msg_dep is None:
|
||||
raise MessageTemplateParsingError("Unknown msg_dep field %s" % match.group(0))
|
||||
new_template.msg_deprecation = msg_dep
|
||||
new_template.deprecation = msg_dep
|
||||
|
||||
return new_template
|
||||
|
||||
|
||||
@@ -126,8 +126,14 @@ class UDPMessageDeserializer:
|
||||
frequency, num = _parse_msg_num(reader)
|
||||
current_template = self.template_dict.get_template_by_pair(frequency, num)
|
||||
if current_template is None:
|
||||
raise exc.MessageTemplateNotFound("deserializing data")
|
||||
msg.name = current_template.name
|
||||
if self.settings.ALLOW_UNKNOWN_MESSAGES:
|
||||
LOG.warning(f"Unknown message type {frequency}:{num}")
|
||||
msg.unknown_message = True
|
||||
msg.name = "UnknownMessage:%d" % num
|
||||
else:
|
||||
raise exc.MessageTemplateNotFound("deserializing data", f"{frequency}:{num}")
|
||||
else:
|
||||
msg.name = current_template.name
|
||||
|
||||
# extra field, see note regarding msg.offset
|
||||
msg.raw_extra = reader.read_bytes(msg.offset)
|
||||
@@ -143,6 +149,12 @@ class UDPMessageDeserializer:
|
||||
# Already parsed if we don't have a raw body
|
||||
if not raw_body:
|
||||
return
|
||||
|
||||
if msg.unknown_message:
|
||||
# We can't parse this, we don't know anything about it
|
||||
msg.deserializer = None
|
||||
return
|
||||
|
||||
msg.raw_body = None
|
||||
msg.deserializer = None
|
||||
|
||||
@@ -157,7 +169,6 @@ class UDPMessageDeserializer:
|
||||
reader.seek(current_template.get_msg_freq_num_len() + msg.offset)
|
||||
|
||||
for tmpl_block in current_template.blocks:
|
||||
LOG.debug("Parsing %s:%s" % (msg.name, tmpl_block.name))
|
||||
# EOF?
|
||||
if not len(reader):
|
||||
# Seems like even some "Single" blocks are optional?
|
||||
@@ -180,7 +191,6 @@ class UDPMessageDeserializer:
|
||||
|
||||
for i in range(repeat_count):
|
||||
current_block = Block(tmpl_block.name)
|
||||
LOG.debug("Adding block %s" % current_block.name)
|
||||
msg.add_block(current_block)
|
||||
|
||||
for tmpl_variable in tmpl_block.variables:
|
||||
@@ -222,11 +232,17 @@ class UDPMessageDeserializer:
|
||||
if tmpl_variable.probably_binary:
|
||||
return unpacked_data
|
||||
# Truncated strings need to be treated carefully
|
||||
if tmpl_variable.probably_text and unpacked_data.endswith(b"\x00"):
|
||||
try:
|
||||
return unpacked_data.decode("utf8").rstrip("\x00")
|
||||
except UnicodeDecodeError:
|
||||
return JankStringyBytes(unpacked_data)
|
||||
if tmpl_variable.probably_text:
|
||||
# If it has a null terminator, let's try to decode it first.
|
||||
# We don't want to do this if there isn't one, because that may change
|
||||
# the meaning of the data.
|
||||
if unpacked_data.endswith(b"\x00"):
|
||||
try:
|
||||
return unpacked_data.decode("utf8").rstrip("\x00")
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
# Failed, return jank stringy bytes
|
||||
return JankStringyBytes(unpacked_data)
|
||||
elif tmpl_variable.type in {MsgType.MVT_FIXED, MsgType.MVT_VARIABLE}:
|
||||
# No idea if this should be bytes or a string... make an object that's sort of both.
|
||||
return JankStringyBytes(unpacked_data)
|
||||
|
||||
@@ -45,7 +45,7 @@ class UDPMessageSerializer:
|
||||
|
||||
def serialize(self, msg: Message):
|
||||
current_template = self.template_dict.get_template_by_name(msg.name)
|
||||
if current_template is None:
|
||||
if current_template is None and msg.raw_body is None:
|
||||
raise exc.MessageSerializationError("message name", "invalid message name")
|
||||
|
||||
# Header and trailers are all big-endian
|
||||
@@ -69,7 +69,7 @@ class UDPMessageSerializer:
|
||||
# frequency and message number. The template stores it because it doesn't
|
||||
# change per template.
|
||||
body_writer = se.BufferWriter("<")
|
||||
body_writer.write_bytes(current_template.msg_freq_num_bytes)
|
||||
body_writer.write_bytes(current_template.freq_num_bytes)
|
||||
body_writer.write_bytes(msg.extra)
|
||||
|
||||
# We're going to pop off keys as we go, so shallow copy the dict.
|
||||
|
||||
@@ -82,8 +82,9 @@ CAPS_DICT = Union[
|
||||
|
||||
|
||||
class CapsClient:
|
||||
def __init__(self, caps: Optional[CAPS_DICT] = None):
|
||||
def __init__(self, caps: Optional[CAPS_DICT] = None, session: Optional[aiohttp.ClientSession] = None) -> None:
|
||||
self._caps = caps
|
||||
self._session = session
|
||||
|
||||
def _request_fixups(self, cap_or_url: str, headers: Dict, proxy: Optional[bool], ssl: Any):
|
||||
return cap_or_url, headers, proxy, ssl
|
||||
@@ -117,6 +118,7 @@ class CapsClient:
|
||||
session_owned = False
|
||||
# Use an existing session if we have one to take advantage of connection pooling
|
||||
# otherwise create one
|
||||
session = session or self._session
|
||||
if session is None:
|
||||
session_owned = True
|
||||
session = aiohttp.ClientSession(
|
||||
|
||||
@@ -46,6 +46,9 @@ class UDPPacket:
|
||||
return self.dst_addr
|
||||
return self.src_addr
|
||||
|
||||
def __repr__(self):
|
||||
return f"<{self.__class__.__name__} src_addr={self.src_addr!r} dst_addr={self.dst_addr!r} data={self.data!r}>"
|
||||
|
||||
|
||||
class AbstractUDPTransport(abc.ABC):
|
||||
__slots__ = ()
|
||||
|
||||
@@ -35,12 +35,7 @@ import hippolyzer.lib.base.serialization as se
|
||||
import hippolyzer.lib.base.templates as tmpls
|
||||
|
||||
|
||||
class Object(recordclass.datatuple): # type: ignore
|
||||
__options__ = {
|
||||
"use_weakref": True,
|
||||
}
|
||||
__weakref__: Any
|
||||
|
||||
class Object(recordclass.RecordClass, use_weakref=True): # type: ignore
|
||||
LocalID: Optional[int] = None
|
||||
State: Optional[int] = None
|
||||
FullID: Optional[UUID] = None
|
||||
@@ -199,6 +194,28 @@ class Object(recordclass.datatuple): # type: ignore
|
||||
del val["Parent"]
|
||||
return val
|
||||
|
||||
@property
|
||||
def Ancestors(self) -> List[Object]:
|
||||
obj = self
|
||||
ancestors = []
|
||||
while obj.Parent:
|
||||
obj = obj.Parent
|
||||
ancestors.append(obj)
|
||||
return ancestors
|
||||
|
||||
@property
|
||||
def Descendents(self) -> List[Object]:
|
||||
new_children = [self]
|
||||
descendents = []
|
||||
while new_children:
|
||||
to_check = new_children[:]
|
||||
new_children.clear()
|
||||
for obj in to_check:
|
||||
for child in obj.Children:
|
||||
new_children.append(child)
|
||||
descendents.append(child)
|
||||
return descendents
|
||||
|
||||
|
||||
def handle_to_gridxy(handle: int) -> Tuple[int, int]:
|
||||
return (handle >> 32) // 256, (handle & 0xFFffFFff) // 256
|
||||
|
||||
@@ -10,6 +10,7 @@ from io import SEEK_CUR, SEEK_SET, SEEK_END, RawIOBase, BufferedIOBase
|
||||
from typing import *
|
||||
|
||||
import lazy_object_proxy
|
||||
import numpy as np
|
||||
|
||||
import hippolyzer.lib.base.llsd as llsd
|
||||
import hippolyzer.lib.base.datatypes as dtypes
|
||||
@@ -27,6 +28,14 @@ class _Unserializable:
|
||||
return False
|
||||
|
||||
|
||||
class MissingType:
|
||||
"""Simple sentinel type like dataclasses._MISSING_TYPE"""
|
||||
pass
|
||||
|
||||
|
||||
MISSING = MissingType()
|
||||
|
||||
|
||||
UNSERIALIZABLE = _Unserializable()
|
||||
_T = TypeVar("_T")
|
||||
|
||||
@@ -288,7 +297,7 @@ class SerializableBase(abc.ABC):
|
||||
@classmethod
|
||||
def default_value(cls) -> Any:
|
||||
# None may be a valid default, so return MISSING as a sentinel val
|
||||
return dataclasses.MISSING
|
||||
return MISSING
|
||||
|
||||
|
||||
class Adapter(SerializableBase, abc.ABC):
|
||||
@@ -328,18 +337,18 @@ class ForwardSerializable(SerializableBase):
|
||||
def __init__(self, func: Callable[[], SERIALIZABLE_TYPE]):
|
||||
super().__init__()
|
||||
self._func = func
|
||||
self._wrapped = dataclasses.MISSING
|
||||
self._wrapped: Union[MissingType, SERIALIZABLE_TYPE] = MISSING
|
||||
|
||||
def _ensure_evaled(self):
|
||||
if self._wrapped is dataclasses.MISSING:
|
||||
if self._wrapped is MISSING:
|
||||
self._wrapped = self._func()
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self._wrapped, attr)
|
||||
|
||||
def default_value(self) -> Any:
|
||||
if self._wrapped is dataclasses.MISSING:
|
||||
return dataclasses.MISSING
|
||||
if self._wrapped is MISSING:
|
||||
return MISSING
|
||||
return self._wrapped.default_value()
|
||||
|
||||
def serialize(self, val, writer: BufferWriter, ctx: Optional[ParseContext]):
|
||||
@@ -357,10 +366,10 @@ class Template(SerializableBase):
|
||||
def __init__(self, template_spec: Dict[str, SERIALIZABLE_TYPE], skip_missing=False):
|
||||
self._template_spec = template_spec
|
||||
self._skip_missing = skip_missing
|
||||
self._size = dataclasses.MISSING
|
||||
self._size = MISSING
|
||||
|
||||
def calc_size(self):
|
||||
if self._size is not dataclasses.MISSING:
|
||||
if self._size is not MISSING:
|
||||
return self._size
|
||||
sum_bytes = 0
|
||||
for _, field_type in self._template_spec.items():
|
||||
@@ -830,7 +839,7 @@ class QuantizedFloat(QuantizedFloatBase):
|
||||
super().__init__(prim_spec, zero_median=False)
|
||||
self.lower = lower
|
||||
self.upper = upper
|
||||
# We know the range in `QuantizedFloat` when it's constructed, so we can infer
|
||||
# We know the range in `QuantizedFloat` when it's constructed, so we can infer
|
||||
# whether or not we should round towards zero in __init__
|
||||
max_error = (upper - lower) * self.step_mag
|
||||
midpoint = (upper + lower) / 2.0
|
||||
@@ -1196,9 +1205,9 @@ class ContextMixin(Generic[_T]):
|
||||
def _choose_option(self, ctx: Optional[ParseContext]) -> _T:
|
||||
idx = self._fun(ctx)
|
||||
if idx not in self._options:
|
||||
if dataclasses.MISSING not in self._options:
|
||||
if MISSING not in self._options:
|
||||
raise KeyError(f"{idx!r} not found in {self._options!r}")
|
||||
idx = dataclasses.MISSING
|
||||
idx = MISSING
|
||||
return self._options[idx]
|
||||
|
||||
|
||||
@@ -1442,7 +1451,7 @@ class StringEnumAdapter(Adapter):
|
||||
class FixedPoint(SerializableBase):
|
||||
def __init__(self, ser_spec, int_bits, frac_bits, signed=False):
|
||||
# Should never be used due to how this handles signs :/
|
||||
assert(not ser_spec.is_signed)
|
||||
assert (not ser_spec.is_signed)
|
||||
|
||||
self._ser_spec: SerializablePrimitive = ser_spec
|
||||
self._signed = signed
|
||||
@@ -1452,7 +1461,7 @@ class FixedPoint(SerializableBase):
|
||||
self._min_val = ((1 << int_bits) * -1) if signed else 0
|
||||
self._max_val = 1 << int_bits
|
||||
|
||||
assert(required_bits == (ser_spec.calc_size() * 8))
|
||||
assert (required_bits == (ser_spec.calc_size() * 8))
|
||||
|
||||
def deserialize(self, reader: Reader, ctx):
|
||||
fixed_val = float(self._ser_spec.deserialize(reader, ctx))
|
||||
@@ -1482,8 +1491,8 @@ def _make_undefined_raiser():
|
||||
return f
|
||||
|
||||
|
||||
def dataclass_field(spec: Union[SERIALIZABLE_TYPE, Callable], *, default=dataclasses.MISSING,
|
||||
default_factory=dataclasses.MISSING, init=True, repr=True, # noqa
|
||||
def dataclass_field(spec: Union[SERIALIZABLE_TYPE, Callable], *, default: Any = dataclasses.MISSING,
|
||||
default_factory: Any = dataclasses.MISSING, init=True, repr=True, # noqa
|
||||
hash=None, compare=True) -> dataclasses.Field: # noqa
|
||||
enrich_factory = False
|
||||
# Lambda, need to defer evaluation of spec until it's actually used.
|
||||
@@ -1504,7 +1513,7 @@ def dataclass_field(spec: Union[SERIALIZABLE_TYPE, Callable], *, default=datacla
|
||||
metadata={"spec": spec}, default=default, default_factory=default_factory, init=init,
|
||||
repr=repr, hash=hash, compare=compare
|
||||
)
|
||||
# Need to stuff this on so it knows which field went unspecified.
|
||||
# Need to stuff this on, so it knows which field went unspecified.
|
||||
if enrich_factory:
|
||||
default_factory.field = field
|
||||
return field
|
||||
@@ -1571,8 +1580,16 @@ def bitfield_field(bits: int, *, adapter: Optional[Adapter] = None, default=0, i
|
||||
|
||||
|
||||
class BitfieldDataclass(DataclassAdapter):
|
||||
def __init__(self, data_cls: Type,
|
||||
prim_spec: Optional[SerializablePrimitive] = None, shift: bool = True):
|
||||
PRIM_SPEC: ClassVar[Optional[SerializablePrimitive]] = None
|
||||
|
||||
def __init__(self, data_cls: Optional[Type] = None,
|
||||
prim_spec: Optional[SerializablePrimitive] = None, shift: Optional[bool] = None):
|
||||
if not dataclasses.is_dataclass(data_cls):
|
||||
raise ValueError(f"{data_cls!r} is not a dataclass")
|
||||
if prim_spec is None:
|
||||
prim_spec = getattr(data_cls, 'PRIM_SPEC', None)
|
||||
if shift is None:
|
||||
shift = getattr(data_cls, 'SHIFT', True)
|
||||
super().__init__(data_cls, prim_spec)
|
||||
self._shift = shift
|
||||
self._bitfield_spec = self._build_bitfield(data_cls)
|
||||
@@ -1602,7 +1619,9 @@ class BitfieldDataclass(DataclassAdapter):
|
||||
|
||||
|
||||
class ExprAdapter(Adapter):
|
||||
def __init__(self, child_spec: SERIALIZABLE_TYPE, decode_func: Callable, encode_func: Callable):
|
||||
_ID = lambda x: x
|
||||
|
||||
def __init__(self, child_spec: SERIALIZABLE_TYPE, decode_func: Callable = _ID, encode_func: Callable = _ID):
|
||||
super().__init__(child_spec)
|
||||
self._decode_func = decode_func
|
||||
self._encode_func = encode_func
|
||||
@@ -1651,9 +1670,64 @@ class BinaryLLSD(SerializableBase):
|
||||
writer.write_bytes(llsd.format_binary(val, with_header=False))
|
||||
|
||||
|
||||
class NumPyArray(Adapter):
|
||||
"""
|
||||
An 2-dimensional, dynamic-length array of data from numpy. Greedy.
|
||||
|
||||
Unlike most other serializers, your endianness _must_ be specified in the dtype!
|
||||
"""
|
||||
__slots__ = ['dtype', 'elems']
|
||||
|
||||
def __init__(self, child_spec: Optional[SERIALIZABLE_TYPE], dtype: np.dtype, elems: int):
|
||||
super().__init__(child_spec)
|
||||
self.dtype = dtype
|
||||
self.elems = elems
|
||||
|
||||
def _pick_dtype(self, endian: str) -> np.dtype:
|
||||
return self.dtype.newbyteorder('>') if endian != "<" else self.dtype
|
||||
|
||||
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
|
||||
num_elems = len(val) // self.dtype.itemsize
|
||||
num_ndims = num_elems // self.elems
|
||||
buf_array = np.frombuffer(val, dtype=self.dtype, count=num_elems)
|
||||
return buf_array.reshape((num_ndims, self.elems))
|
||||
|
||||
def encode(self, val, ctx: Optional[ParseContext]) -> Any:
|
||||
val: np.ndarray = np.array(val, dtype=self.dtype).flatten()
|
||||
return val.tobytes()
|
||||
|
||||
|
||||
class QuantizedNumPyArray(Adapter):
|
||||
"""Like QuantizedFloat. Only works correctly for unsigned types, no zero midpoint rounding!"""
|
||||
def __init__(self, child_spec: NumPyArray, lower: float, upper: float):
|
||||
super().__init__(child_spec)
|
||||
self.dtype = child_spec.dtype
|
||||
self.lower = lower
|
||||
self.upper = upper
|
||||
self.step_mag = 1.0 / ((2 ** (self.dtype.itemsize * 8)) - 1)
|
||||
|
||||
def encode(self, val: Any, ctx: Optional[ParseContext]) -> Any:
|
||||
val = np.array(val, dtype=np.float64)
|
||||
val = np.clip(val, self.lower, self.upper)
|
||||
delta = self.upper - self.lower
|
||||
if delta == 0.0:
|
||||
return np.zeros(val.shape, dtype=self.dtype)
|
||||
|
||||
val -= self.lower
|
||||
val /= delta
|
||||
val /= self.step_mag
|
||||
return np.rint(val).astype(self.dtype)
|
||||
|
||||
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
|
||||
val = val.astype(np.float64)
|
||||
val *= self.step_mag
|
||||
val *= self.upper - self.lower
|
||||
val += self.lower
|
||||
return val
|
||||
|
||||
|
||||
def subfield_serializer(msg_name, block_name, var_name):
|
||||
def f(orig_cls):
|
||||
global SUBFIELD_SERIALIZERS
|
||||
SUBFIELD_SERIALIZERS[(msg_name, block_name, var_name)] = orig_cls
|
||||
return orig_cls
|
||||
return f
|
||||
@@ -1850,7 +1924,7 @@ class IntEnumSubfieldSerializer(AdapterInstanceSubfieldSerializer):
|
||||
val = super().deserialize(ctx_obj, val, pod=pod)
|
||||
# Don't pretend we were able to deserialize this if we
|
||||
# had to fall through to the `int` case.
|
||||
if pod and type(val) == int:
|
||||
if pod and type(val) is int:
|
||||
return UNSERIALIZABLE
|
||||
return val
|
||||
|
||||
@@ -1865,7 +1939,6 @@ class IntFlagSubfieldSerializer(AdapterInstanceSubfieldSerializer):
|
||||
|
||||
def http_serializer(msg_name):
|
||||
def f(orig_cls):
|
||||
global HTTP_SERIALIZERS
|
||||
HTTP_SERIALIZERS[msg_name] = orig_cls
|
||||
return orig_cls
|
||||
return f
|
||||
|
||||
@@ -55,6 +55,7 @@ class SettingDescriptor(Generic[_T]):
|
||||
|
||||
class Settings:
|
||||
ENABLE_DEFERRED_PACKET_PARSING: bool = SettingDescriptor(True)
|
||||
ALLOW_UNKNOWN_MESSAGES: bool = SettingDescriptor(True)
|
||||
|
||||
def __init__(self):
|
||||
self._settings: Dict[str, Any] = {}
|
||||
|
||||
@@ -4,23 +4,52 @@ Serialization templates for structures used in LLUDP and HTTP bodies.
|
||||
|
||||
import abc
|
||||
import collections
|
||||
import copy
|
||||
import dataclasses
|
||||
import datetime
|
||||
import enum
|
||||
import importlib
|
||||
import logging
|
||||
import math
|
||||
import zlib
|
||||
from typing import *
|
||||
|
||||
import numpy as np
|
||||
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3
|
||||
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3, Quaternion
|
||||
from hippolyzer.lib.base.helpers import BiDiDict
|
||||
from hippolyzer.lib.base.namevalue import NameValuesSerializer
|
||||
from hippolyzer.lib.base.serialization import ParseContext
|
||||
|
||||
try:
|
||||
importlib.reload(se) # type: ignore
|
||||
except:
|
||||
logging.exception("Failed to reload serialization lib")
|
||||
|
||||
class LookupIntEnum(IntEnum):
|
||||
"""
|
||||
Used for enums that have legacy string names, may be used in the legacy schema
|
||||
|
||||
Generally this is the string returned by `LLWhateverType::lookup()` in indra
|
||||
"""
|
||||
@abc.abstractmethod
|
||||
def to_lookup_name(self) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def from_lookup_name(cls, legacy_name: str):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
_ASSET_TYPE_BIDI: BiDiDict[str] = BiDiDict({
|
||||
"animation": "animatn",
|
||||
"callingcard": "callcard",
|
||||
"lsl_text": "lsltext",
|
||||
"lsl_bytecode": "lslbyte",
|
||||
"texture_tga": "txtr_tga",
|
||||
"image_tga": "img_tga",
|
||||
"image_jpeg": "jpg",
|
||||
"sound_wav": "snd_wav",
|
||||
"folder_link": "link_f",
|
||||
"unknown": "invalid",
|
||||
"none": "-1",
|
||||
})
|
||||
|
||||
|
||||
@se.enum_field_serializer("RequestXfer", "XferID", "VFileType")
|
||||
@@ -31,7 +60,7 @@ except:
|
||||
@se.enum_field_serializer("RezObject", "InventoryData", "Type")
|
||||
@se.enum_field_serializer("RezScript", "InventoryBlock", "Type")
|
||||
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "Type")
|
||||
class AssetType(IntEnum):
|
||||
class AssetType(LookupIntEnum):
|
||||
TEXTURE = 0
|
||||
SOUND = 1
|
||||
CALLINGCARD = 2
|
||||
@@ -52,7 +81,7 @@ class AssetType(IntEnum):
|
||||
GESTURE = 21
|
||||
SIMSTATE = 22
|
||||
LINK = 24
|
||||
LINK_FOLDER = 25
|
||||
FOLDER_LINK = 25
|
||||
MARKETPLACE_FOLDER = 26
|
||||
WIDGET = 40
|
||||
PERSON = 45
|
||||
@@ -64,19 +93,18 @@ class AssetType(IntEnum):
|
||||
RESERVED_5 = 54
|
||||
RESERVED_6 = 55
|
||||
SETTINGS = 56
|
||||
MATERIAL = 57
|
||||
UNKNOWN = 255
|
||||
NONE = -1
|
||||
|
||||
@property
|
||||
def human_name(self):
|
||||
def to_lookup_name(self) -> str:
|
||||
lower = self.name.lower()
|
||||
return {
|
||||
"animation": "animatn",
|
||||
"callingcard": "callcard",
|
||||
"texture_tga": "txtr_tga",
|
||||
"image_tga": "img_tga",
|
||||
"sound_wav": "snd_wav",
|
||||
}.get(lower, lower)
|
||||
return _ASSET_TYPE_BIDI.forward.get(lower, lower)
|
||||
|
||||
@classmethod
|
||||
def from_lookup_name(cls, legacy_name: str):
|
||||
reg_name = _ASSET_TYPE_BIDI.backward.get(legacy_name, legacy_name).upper()
|
||||
return cls[reg_name]
|
||||
|
||||
@property
|
||||
def inventory_type(self):
|
||||
@@ -101,15 +129,23 @@ class AssetType(IntEnum):
|
||||
AssetType.PERSON: InventoryType.PERSON,
|
||||
AssetType.MESH: InventoryType.MESH,
|
||||
AssetType.SETTINGS: InventoryType.SETTINGS,
|
||||
AssetType.MATERIAL: InventoryType.MATERIAL,
|
||||
}.get(self, AssetType.NONE)
|
||||
|
||||
|
||||
_INV_TYPE_BIDI: BiDiDict[str] = BiDiDict({
|
||||
"callingcard": "callcard",
|
||||
"attachment": "attach",
|
||||
"none": "-1",
|
||||
})
|
||||
|
||||
|
||||
@se.enum_field_serializer("UpdateCreateInventoryItem", "InventoryData", "InvType")
|
||||
@se.enum_field_serializer("CreateInventoryItem", "InventoryBlock", "InvType")
|
||||
@se.enum_field_serializer("RezObject", "InventoryData", "InvType")
|
||||
@se.enum_field_serializer("RezScript", "InventoryBlock", "InvType")
|
||||
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "InvType")
|
||||
class InventoryType(IntEnum):
|
||||
class InventoryType(LookupIntEnum):
|
||||
TEXTURE = 0
|
||||
SOUND = 1
|
||||
CALLINGCARD = 2
|
||||
@@ -135,18 +171,41 @@ class InventoryType(IntEnum):
|
||||
WIDGET = 23
|
||||
PERSON = 24
|
||||
SETTINGS = 25
|
||||
MATERIAL = 26
|
||||
UNKNOWN = 255
|
||||
NONE = -1
|
||||
|
||||
@property
|
||||
def human_name(self):
|
||||
def to_lookup_name(self) -> str:
|
||||
lower = self.name.lower()
|
||||
return {
|
||||
"callingcard": "callcard",
|
||||
}.get(lower, lower)
|
||||
return _INV_TYPE_BIDI.forward.get(lower, lower)
|
||||
|
||||
@classmethod
|
||||
def from_lookup_name(cls, legacy_name: str):
|
||||
reg_name = _INV_TYPE_BIDI.backward.get(legacy_name, legacy_name).upper()
|
||||
return cls[reg_name]
|
||||
|
||||
|
||||
class FolderType(IntEnum):
|
||||
_FOLDER_TYPE_BIDI: BiDiDict[str] = BiDiDict({
|
||||
"callingcard": "callcard",
|
||||
"lsl_text": "lsltext",
|
||||
"animation": "animatn",
|
||||
"snapshot_category": "snapshot",
|
||||
"lost_and_found": "lstndfnd",
|
||||
"ensemble_start": "ensemble",
|
||||
"ensemble_end": "ensemble",
|
||||
"current_outfit": "current",
|
||||
"my_outfits": "my_otfts",
|
||||
"basic_root": "basic_rt",
|
||||
"marketplace_listings": "merchant",
|
||||
"marketplace_stock": "stock",
|
||||
"marketplace_version": "version",
|
||||
"my_suitcase": "suitcase",
|
||||
"root_inventory": "root_inv",
|
||||
"none": "-1",
|
||||
})
|
||||
|
||||
|
||||
class FolderType(LookupIntEnum):
|
||||
TEXTURE = 0
|
||||
SOUND = 1
|
||||
CALLINGCARD = 2
|
||||
@@ -165,6 +224,7 @@ class FolderType(IntEnum):
|
||||
ANIMATION = 20
|
||||
GESTURE = 21
|
||||
FAVORITE = 23
|
||||
# The "ensemble" values aren't used, no idea what they were for.
|
||||
ENSEMBLE_START = 26
|
||||
ENSEMBLE_END = 45
|
||||
# This range is reserved for special clothing folder types.
|
||||
@@ -181,14 +241,24 @@ class FolderType(IntEnum):
|
||||
# Note: We actually *never* create folders with that type. This is used for icon override only.
|
||||
MARKETPLACE_VERSION = 55
|
||||
SETTINGS = 56
|
||||
# Firestorm folders, may not actually exist
|
||||
FIRESTORM = 57
|
||||
PHOENIX = 58
|
||||
RLV = 59
|
||||
MATERIAL = 57
|
||||
# Firestorm folders, may not actually exist in legacy schema
|
||||
FIRESTORM = 58
|
||||
PHOENIX = 59
|
||||
RLV = 60
|
||||
# Opensim folders
|
||||
MY_SUITCASE = 100
|
||||
NONE = -1
|
||||
|
||||
def to_lookup_name(self) -> str:
|
||||
lower = self.name.lower()
|
||||
return _FOLDER_TYPE_BIDI.forward.get(lower, lower)
|
||||
|
||||
@classmethod
|
||||
def from_lookup_name(cls, legacy_name: str):
|
||||
reg_name = _FOLDER_TYPE_BIDI.backward.get(legacy_name, legacy_name).upper()
|
||||
return cls[reg_name]
|
||||
|
||||
|
||||
@se.enum_field_serializer("AgentIsNowWearing", "WearableData", "WearableType")
|
||||
@se.enum_field_serializer("AgentWearablesUpdate", "WearableData", "WearableType")
|
||||
@@ -248,6 +318,9 @@ class Permissions(IntFlag):
|
||||
RESERVED = 1 << 31
|
||||
|
||||
|
||||
_SALE_TYPE_LEGACY_NAMES = ("not", "orig", "copy", "cntn")
|
||||
|
||||
|
||||
@se.enum_field_serializer("ObjectSaleInfo", "ObjectData", "SaleType")
|
||||
@se.enum_field_serializer("ObjectProperties", "ObjectData", "SaleType")
|
||||
@se.enum_field_serializer("ObjectPropertiesFamily", "ObjectData", "SaleType")
|
||||
@@ -256,12 +329,19 @@ class Permissions(IntFlag):
|
||||
@se.enum_field_serializer("RezObject", "InventoryData", "SaleType")
|
||||
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "SaleType")
|
||||
@se.enum_field_serializer("UpdateCreateInventoryItem", "InventoryData", "SaleType")
|
||||
class SaleInfo(IntEnum):
|
||||
class SaleType(LookupIntEnum):
|
||||
NOT = 0
|
||||
ORIGINAL = 1
|
||||
COPY = 2
|
||||
CONTENTS = 3
|
||||
|
||||
@classmethod
|
||||
def from_lookup_name(cls, legacy_name: str):
|
||||
return cls(_SALE_TYPE_LEGACY_NAMES.index(legacy_name))
|
||||
|
||||
def to_lookup_name(self) -> str:
|
||||
return _SALE_TYPE_LEGACY_NAMES[int(self.value)]
|
||||
|
||||
|
||||
@se.flag_field_serializer("ParcelInfoReply", "Data", "Flags")
|
||||
class ParcelInfoFlags(IntFlag):
|
||||
@@ -280,6 +360,7 @@ class ParcelInfoFlags(IntFlag):
|
||||
class MapImageFlags(IntFlag):
|
||||
# No clue, honestly. I guess there's potentially different image types you could request.
|
||||
LAYER = 1 << 1
|
||||
RETURN_NONEXISTENT = 0x10000
|
||||
|
||||
|
||||
@se.enum_field_serializer("MapBlockReply", "Data", "Access")
|
||||
@@ -359,10 +440,10 @@ class PermissionType(IntEnum):
|
||||
@se.enum_field_serializer("TransferRequest", "TransferInfo", "SourceType")
|
||||
class TransferSourceType(IntEnum):
|
||||
UNKNOWN = 0
|
||||
FILE = enum.auto()
|
||||
ASSET = enum.auto()
|
||||
SIM_INV_ITEM = enum.auto()
|
||||
SIM_ESTATE = enum.auto()
|
||||
FILE = 1
|
||||
ASSET = 2
|
||||
SIM_INV_ITEM = 3
|
||||
SIM_ESTATE = 4
|
||||
|
||||
|
||||
class EstateAssetType(IntEnum):
|
||||
@@ -425,15 +506,15 @@ class TransferParamsSerializer(se.EnumSwitchedSubfieldSerializer):
|
||||
@se.enum_field_serializer("TransferInfo", "TransferInfo", "ChannelType")
|
||||
class TransferChannelType(IntEnum):
|
||||
UNKNOWN = 0
|
||||
MISC = enum.auto()
|
||||
ASSET = enum.auto()
|
||||
MISC = 1
|
||||
ASSET = 2
|
||||
|
||||
|
||||
@se.enum_field_serializer("TransferInfo", "TransferInfo", "TargetType")
|
||||
class TransferTargetType(IntEnum):
|
||||
UNKNOWN = 0
|
||||
FILE = enum.auto()
|
||||
VFILE = enum.auto()
|
||||
FILE = 1
|
||||
VFILE = 2
|
||||
|
||||
|
||||
@se.enum_field_serializer("TransferInfo", "TransferInfo", "Status")
|
||||
@@ -540,45 +621,45 @@ class SendXferPacketIDSerializer(se.AdapterSubfieldSerializer):
|
||||
@se.enum_field_serializer("ViewerEffect", "Effect", "Type")
|
||||
class ViewerEffectType(IntEnum):
|
||||
TEXT = 0
|
||||
ICON = enum.auto()
|
||||
CONNECTOR = enum.auto()
|
||||
FLEXIBLE_OBJECT = enum.auto()
|
||||
ANIMAL_CONTROLS = enum.auto()
|
||||
LOCAL_ANIMATION_OBJECT = enum.auto()
|
||||
CLOTH = enum.auto()
|
||||
EFFECT_BEAM = enum.auto()
|
||||
EFFECT_GLOW = enum.auto()
|
||||
EFFECT_POINT = enum.auto()
|
||||
EFFECT_TRAIL = enum.auto()
|
||||
EFFECT_SPHERE = enum.auto()
|
||||
EFFECT_SPIRAL = enum.auto()
|
||||
EFFECT_EDIT = enum.auto()
|
||||
EFFECT_LOOKAT = enum.auto()
|
||||
EFFECT_POINTAT = enum.auto()
|
||||
EFFECT_VOICE_VISUALIZER = enum.auto()
|
||||
NAME_TAG = enum.auto()
|
||||
EFFECT_BLOB = enum.auto()
|
||||
ICON = 1
|
||||
CONNECTOR = 2
|
||||
FLEXIBLE_OBJECT = 3
|
||||
ANIMAL_CONTROLS = 4
|
||||
LOCAL_ANIMATION_OBJECT = 5
|
||||
CLOTH = 6
|
||||
EFFECT_BEAM = 7
|
||||
EFFECT_GLOW = 8
|
||||
EFFECT_POINT = 9
|
||||
EFFECT_TRAIL = 10
|
||||
EFFECT_SPHERE = 11
|
||||
EFFECT_SPIRAL = 12
|
||||
EFFECT_EDIT = 13
|
||||
EFFECT_LOOKAT = 14
|
||||
EFFECT_POINTAT = 15
|
||||
EFFECT_VOICE_VISUALIZER = 16
|
||||
NAME_TAG = 17
|
||||
EFFECT_BLOB = 18
|
||||
|
||||
|
||||
class LookAtTarget(IntEnum):
|
||||
NONE = 0
|
||||
IDLE = enum.auto()
|
||||
AUTO_LISTEN = enum.auto()
|
||||
FREELOOK = enum.auto()
|
||||
RESPOND = enum.auto()
|
||||
HOVER = enum.auto()
|
||||
CONVERSATION = enum.auto()
|
||||
SELECT = enum.auto()
|
||||
FOCUS = enum.auto()
|
||||
MOUSELOOK = enum.auto()
|
||||
CLEAR = enum.auto()
|
||||
IDLE = 1
|
||||
AUTO_LISTEN = 2
|
||||
FREELOOK = 3
|
||||
RESPOND = 4
|
||||
HOVER = 5
|
||||
CONVERSATION = 6
|
||||
SELECT = 7
|
||||
FOCUS = 8
|
||||
MOUSELOOK = 9
|
||||
CLEAR = 10
|
||||
|
||||
|
||||
class PointAtTarget(IntEnum):
|
||||
NONE = 0
|
||||
SELECT = enum.auto()
|
||||
GRAB = enum.auto()
|
||||
CLEAR = enum.auto()
|
||||
SELECT = 1
|
||||
GRAB = 2
|
||||
CLEAR = 3
|
||||
|
||||
|
||||
@se.subfield_serializer("ViewerEffect", "Effect", "TypeData")
|
||||
@@ -943,7 +1024,7 @@ class ObjectStateAdapter(se.ContextAdapter):
|
||||
PCode.AVATAR: se.IntFlag(AgentState),
|
||||
PCode.PRIMITIVE: AttachmentStateAdapter(None),
|
||||
# Other cases are probably just a number (tree species ID or something.)
|
||||
dataclasses.MISSING: se.IdentityAdapter(),
|
||||
se.MISSING: se.IdentityAdapter(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1146,9 +1227,15 @@ class TEExceptionField(se.SerializableBase):
|
||||
return dict
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_TE_FIELD_KEY = Optional[Sequence[int]]
|
||||
_TE_DICT = Dict[_TE_FIELD_KEY, _T]
|
||||
|
||||
|
||||
def _te_field(spec: se.SERIALIZABLE_TYPE, first=False, optional=False,
|
||||
default_factory=dataclasses.MISSING, default=dataclasses.MISSING):
|
||||
if default_factory is not dataclasses.MISSING:
|
||||
default_factory: Union[se.MissingType, Callable[[], _T]] = se.MISSING,
|
||||
default: Union[se.MissingType, _T] = se.MISSING):
|
||||
if default_factory is not se.MISSING:
|
||||
new_default_factory = lambda: {None: default_factory()}
|
||||
elif default is not None:
|
||||
new_default_factory = lambda: {None: default}
|
||||
@@ -1160,9 +1247,6 @@ def _te_field(spec: se.SERIALIZABLE_TYPE, first=False, optional=False,
|
||||
)
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_TE_FIELD_KEY = Optional[Sequence[int]]
|
||||
|
||||
# If this seems weird it's because it is. TE offsets are S16s with `0` as the actual 0
|
||||
# point, and LL divides by `0x7FFF` to convert back to float. Negative S16s can
|
||||
# actually go to -0x8000 due to two's complement, creating a larger range for negatives.
|
||||
@@ -1221,22 +1305,22 @@ MAX_TES = 45
|
||||
|
||||
@dataclasses.dataclass
|
||||
class TextureEntryCollection:
|
||||
Textures: Dict[_TE_FIELD_KEY, UUID] = _te_field(
|
||||
Textures: _TE_DICT[UUID] = _te_field(
|
||||
# Plywood texture
|
||||
se.UUID, first=True, default=UUID('89556747-24cb-43ed-920b-47caed15465f'))
|
||||
# Bytes are inverted so fully opaque white is \x00\x00\x00\x00
|
||||
Color: Dict[_TE_FIELD_KEY, bytes] = _te_field(Color4(invert_bytes=True), default=b"\xff\xff\xff\xff")
|
||||
ScalesS: Dict[_TE_FIELD_KEY, float] = _te_field(se.F32, default=1.0)
|
||||
ScalesT: Dict[_TE_FIELD_KEY, float] = _te_field(se.F32, default=1.0)
|
||||
OffsetsS: Dict[_TE_FIELD_KEY, float] = _te_field(TE_S16_COORD, default=0.0)
|
||||
OffsetsT: Dict[_TE_FIELD_KEY, float] = _te_field(TE_S16_COORD, default=0.0)
|
||||
Rotation: Dict[_TE_FIELD_KEY, float] = _te_field(PackedTERotation(), default=0.0)
|
||||
BasicMaterials: Dict[_TE_FIELD_KEY, "BasicMaterials"] = _te_field(
|
||||
Color: _TE_DICT[bytes] = _te_field(Color4(invert_bytes=True), default=b"\xff\xff\xff\xff")
|
||||
ScalesS: _TE_DICT[float] = _te_field(se.F32, default=1.0)
|
||||
ScalesT: _TE_DICT[float] = _te_field(se.F32, default=1.0)
|
||||
OffsetsS: _TE_DICT[float] = _te_field(TE_S16_COORD, default=0.0)
|
||||
OffsetsT: _TE_DICT[float] = _te_field(TE_S16_COORD, default=0.0)
|
||||
Rotation: _TE_DICT[float] = _te_field(PackedTERotation(), default=0.0)
|
||||
BasicMaterials: _TE_DICT["BasicMaterials"] = _te_field(
|
||||
BUMP_SHINY_FULLBRIGHT, default_factory=BasicMaterials,
|
||||
)
|
||||
MediaFlags: Dict[_TE_FIELD_KEY, "MediaFlags"] = _te_field(MEDIA_FLAGS, default_factory=MediaFlags)
|
||||
Glow: Dict[_TE_FIELD_KEY, float] = _te_field(se.QuantizedFloat(se.U8, 0.0, 1.0), default=0.0)
|
||||
Materials: Dict[_TE_FIELD_KEY, UUID] = _te_field(se.UUID, optional=True, default=UUID.ZERO)
|
||||
MediaFlags: _TE_DICT["MediaFlags"] = _te_field(MEDIA_FLAGS, default_factory=MediaFlags)
|
||||
Glow: _TE_DICT[float] = _te_field(se.QuantizedFloat(se.U8, 0.0, 1.0), default=0.0)
|
||||
Materials: _TE_DICT[UUID] = _te_field(se.UUID, optional=True, default=UUID.ZERO)
|
||||
|
||||
def unwrap(self):
|
||||
"""Return `self` regardless of whether this is lazy wrapped object or not"""
|
||||
@@ -1253,7 +1337,7 @@ class TextureEntryCollection:
|
||||
vals = getattr(self, key)
|
||||
# Fill give all faces the default value for this key
|
||||
for te in as_dicts:
|
||||
te[key] = vals[None]
|
||||
te[key] = copy.copy(vals[None])
|
||||
# Walk over the exception cases and replace the default value
|
||||
for face_nums, val in vals.items():
|
||||
# Default case already handled
|
||||
@@ -1262,7 +1346,7 @@ class TextureEntryCollection:
|
||||
for face_num in face_nums:
|
||||
if face_num >= num_faces:
|
||||
raise ValueError(f"Bad value for num_faces? {face_num} >= {num_faces}")
|
||||
as_dicts[face_num][key] = val
|
||||
as_dicts[face_num][key] = copy.copy(val)
|
||||
return [TextureEntry(**x) for x in as_dicts]
|
||||
|
||||
@classmethod
|
||||
@@ -1462,6 +1546,8 @@ class ExtraParamType(IntEnum):
|
||||
RESERVED = 0x50
|
||||
MESH = 0x60
|
||||
EXTENDED_MESH = 0x70
|
||||
RENDER_MATERIAL = 0x80
|
||||
REFLECTION_PROBE = 0x90
|
||||
|
||||
|
||||
class ExtendedMeshFlags(IntFlag):
|
||||
@@ -1484,6 +1570,13 @@ class SculptTypeData:
|
||||
Mirror: bool = se.bitfield_field(bits=1, adapter=se.BoolAdapter())
|
||||
|
||||
|
||||
class ReflectionProbeFlags(IntFlag):
|
||||
# use a box influence volume
|
||||
BOX_VOLUME = 0x1
|
||||
# render dynamic objects (avatars) into this Reflection Probe
|
||||
DYNAMIC = 0x2
|
||||
|
||||
|
||||
EXTRA_PARAM_TEMPLATES = {
|
||||
ExtraParamType.FLEXIBLE: se.Template({
|
||||
"Tension": se.BitField(se.U8, {"Tension": 6, "Softness1": 2}),
|
||||
@@ -1515,6 +1608,15 @@ EXTRA_PARAM_TEMPLATES = {
|
||||
ExtraParamType.EXTENDED_MESH: se.Template({
|
||||
"Flags": se.IntFlag(ExtendedMeshFlags, se.U32),
|
||||
}),
|
||||
ExtraParamType.RENDER_MATERIAL: se.Collection(se.U8, se.Template({
|
||||
"TEIdx": se.U8,
|
||||
"TEID": se.UUID,
|
||||
})),
|
||||
ExtraParamType.REFLECTION_PROBE: se.Template({
|
||||
"Ambiance": se.F32,
|
||||
"ClipDistance": se.F32,
|
||||
"Flags": se.IntFlag(ReflectionProbeFlags, se.U8),
|
||||
}),
|
||||
}
|
||||
|
||||
|
||||
@@ -1720,9 +1822,20 @@ class ChatSourceType(IntEnum):
|
||||
UNKNOWN = 3
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ThrottleData:
|
||||
resend: float = se.dataclass_field(se.F32)
|
||||
land: float = se.dataclass_field(se.F32)
|
||||
wind: float = se.dataclass_field(se.F32)
|
||||
cloud: float = se.dataclass_field(se.F32)
|
||||
task: float = se.dataclass_field(se.F32)
|
||||
texture: float = se.dataclass_field(se.F32)
|
||||
asset: float = se.dataclass_field(se.F32)
|
||||
|
||||
|
||||
@se.subfield_serializer("AgentThrottle", "Throttle", "Throttles")
|
||||
class AgentThrottlesSerializer(se.SimpleSubfieldSerializer):
|
||||
TEMPLATE = se.Collection(None, se.F32)
|
||||
TEMPLATE = se.Dataclass(ThrottleData)
|
||||
|
||||
|
||||
@se.subfield_serializer("ObjectUpdate", "ObjectData", "NameValue")
|
||||
@@ -1733,28 +1846,28 @@ class NameValueSerializer(se.SimpleSubfieldSerializer):
|
||||
@se.enum_field_serializer("SetFollowCamProperties", "CameraProperty", "Type")
|
||||
class CameraPropertyType(IntEnum):
|
||||
PITCH = 0
|
||||
FOCUS_OFFSET = enum.auto()
|
||||
FOCUS_OFFSET_X = enum.auto()
|
||||
FOCUS_OFFSET_Y = enum.auto()
|
||||
FOCUS_OFFSET_Z = enum.auto()
|
||||
POSITION_LAG = enum.auto()
|
||||
FOCUS_LAG = enum.auto()
|
||||
DISTANCE = enum.auto()
|
||||
BEHINDNESS_ANGLE = enum.auto()
|
||||
BEHINDNESS_LAG = enum.auto()
|
||||
POSITION_THRESHOLD = enum.auto()
|
||||
FOCUS_THRESHOLD = enum.auto()
|
||||
ACTIVE = enum.auto()
|
||||
POSITION = enum.auto()
|
||||
POSITION_X = enum.auto()
|
||||
POSITION_Y = enum.auto()
|
||||
POSITION_Z = enum.auto()
|
||||
FOCUS = enum.auto()
|
||||
FOCUS_X = enum.auto()
|
||||
FOCUS_Y = enum.auto()
|
||||
FOCUS_Z = enum.auto()
|
||||
POSITION_LOCKED = enum.auto()
|
||||
FOCUS_LOCKED = enum.auto()
|
||||
FOCUS_OFFSET = 1
|
||||
FOCUS_OFFSET_X = 2
|
||||
FOCUS_OFFSET_Y = 3
|
||||
FOCUS_OFFSET_Z = 4
|
||||
POSITION_LAG = 5
|
||||
FOCUS_LAG = 6
|
||||
DISTANCE = 7
|
||||
BEHINDNESS_ANGLE = 8
|
||||
BEHINDNESS_LAG = 9
|
||||
POSITION_THRESHOLD = 10
|
||||
FOCUS_THRESHOLD = 11
|
||||
ACTIVE = 12
|
||||
POSITION = 13
|
||||
POSITION_X = 14
|
||||
POSITION_Y = 15
|
||||
POSITION_Z = 16
|
||||
FOCUS = 17
|
||||
FOCUS_X = 18
|
||||
FOCUS_Y = 19
|
||||
FOCUS_Z = 20
|
||||
POSITION_LOCKED = 21
|
||||
FOCUS_LOCKED = 22
|
||||
|
||||
|
||||
@se.enum_field_serializer("DeRezObject", "AgentBlock", "Destination")
|
||||
@@ -1858,8 +1971,10 @@ class AvatarPropertiesFlags(IntFlag):
|
||||
|
||||
|
||||
@se.flag_field_serializer("AvatarGroupsReply", "GroupData", "GroupPowers")
|
||||
@se.flag_field_serializer("AvatarGroupDataUpdate", "GroupData", "GroupPowers")
|
||||
@se.flag_field_serializer("AvatarDataUpdate", "AgentDataData", "GroupPowers")
|
||||
@se.flag_field_serializer("AgentGroupDataUpdate", "GroupData", "GroupPowers")
|
||||
@se.flag_field_serializer("AgentDataUpdate", "AgentData", "GroupPowers")
|
||||
@se.flag_field_serializer("GroupProfileReply", "GroupData", "PowersMask")
|
||||
@se.flag_field_serializer("GroupRoleDataReply", "RoleData", "Powers")
|
||||
class GroupPowerFlags(IntFlag):
|
||||
MEMBER_INVITE = 1 << 1 # Invite member
|
||||
MEMBER_EJECT = 1 << 2 # Eject member from group
|
||||
@@ -1869,30 +1984,33 @@ class GroupPowerFlags(IntFlag):
|
||||
# Roles
|
||||
ROLE_CREATE = 1 << 4 # Create new roles
|
||||
ROLE_DELETE = 1 << 5 # Delete roles
|
||||
ROLE_PROPERTIES = 1 << 6 # Change Role Names, Titles, and Descriptions (Of roles the user is in, only, or any role in group?)
|
||||
ROLE_PROPERTIES = 1 << 6 # Change Role Names, Titles, and Descriptions
|
||||
ROLE_ASSIGN_MEMBER_LIMITED = 1 << 7 # Assign Member to a Role that the assigner is in
|
||||
ROLE_ASSIGN_MEMBER = 1 << 8 # Assign Member to Role
|
||||
ROLE_REMOVE_MEMBER = 1 << 9 # Remove Member from Role
|
||||
ROLE_CHANGE_ACTIONS = 1 << 10 # Change actions a role can perform
|
||||
|
||||
# Group Identity
|
||||
GROUP_CHANGE_IDENTITY = 1 << 11 # Charter, insignia, 'Show In Group List', 'Publish on the web', 'Mature', all 'Show Member In Group Profile' checkboxes
|
||||
GROUP_CHANGE_IDENTITY = 1 << 11 # Charter, insignia, 'Show In Group List', 'Publish on the web', 'Mature', etc.
|
||||
|
||||
# Parcel Management
|
||||
LAND_DEED = 1 << 12 # Deed Land and Buy Land for Group
|
||||
LAND_RELEASE = 1 << 13 # Release Land (to Gov. Linden)
|
||||
LAND_SET_SALE_INFO = 1 << 14 # Set for sale info (Toggle "For Sale", Set Price, Set Target, Toggle "Sell objects with the land")
|
||||
# Set for sale info (Toggle "For Sale", Set Price, Set Target, Toggle "Sell objects with the land")
|
||||
LAND_SET_SALE_INFO = 1 << 14
|
||||
LAND_DIVIDE_JOIN = 1 << 15 # Divide and Join Parcels
|
||||
|
||||
# Parcel Identity
|
||||
LAND_FIND_PLACES = 1 << 17 # Toggle "Show in Find Places" and Set Category.
|
||||
LAND_CHANGE_IDENTITY = 1 << 18 # Change Parcel Identity: Parcel Name, Parcel Description, Snapshot, 'Publish on the web', and 'Mature' checkbox
|
||||
# Change Parcel Identity: Parcel Name, Parcel Description, Snapshot, 'Publish on the web', and 'Mature' checkbox
|
||||
LAND_CHANGE_IDENTITY = 1 << 18
|
||||
LAND_SET_LANDING_POINT = 1 << 19 # Set Landing Point
|
||||
|
||||
# Parcel Settings
|
||||
LAND_CHANGE_MEDIA = 1 << 20 # Change Media Settings
|
||||
LAND_EDIT = 1 << 21 # Toggle Edit Land
|
||||
LAND_OPTIONS = 1 << 22 # Toggle Set Home Point, Fly, Outside Scripts, Create/Edit Objects, Landmark, and Damage checkboxes
|
||||
# Toggle Set Home Point, Fly, Outside Scripts, Create/Edit Objects, Landmark, and Damage checkboxes
|
||||
LAND_OPTIONS = 1 << 22
|
||||
|
||||
# Parcel Powers
|
||||
LAND_ALLOW_EDIT_LAND = 1 << 23 # Bypass Edit Land Restriction
|
||||
@@ -1946,6 +2064,15 @@ class GroupPowerFlags(IntFlag):
|
||||
GROUP_BAN_ACCESS = 1 << 51 # Allows access to ban / un-ban agents from a group.
|
||||
|
||||
|
||||
@se.flag_field_serializer("GrantUserRights", "Rights", "RelatedRights")
|
||||
@se.flag_field_serializer("ChangeUserRights", "Rights", "RelatedRights")
|
||||
class UserRelatedRights(IntFlag):
|
||||
"""See lluserrelations.h for definitions"""
|
||||
ONLINE_STATUS = 1
|
||||
MAP_LOCATION = 1 << 1
|
||||
MODIFY_OBJECTS = 1 << 2
|
||||
|
||||
|
||||
@se.flag_field_serializer("RequestObjectPropertiesFamily", "ObjectData", "RequestFlags")
|
||||
@se.flag_field_serializer("ObjectPropertiesFamily", "ObjectData", "RequestFlags")
|
||||
class ObjectPropertiesFamilyRequestFlags(IntFlag):
|
||||
@@ -1997,6 +2124,226 @@ class ModifyLandAction(IntEnum):
|
||||
REVERT = 5
|
||||
|
||||
|
||||
@se.flag_field_serializer("RevokePermissions", "Data", "ObjectPermissions")
|
||||
@se.flag_field_serializer("ScriptQuestion", "Data", "Questions")
|
||||
@se.flag_field_serializer("ScriptAnswerYes", "Data", "Questions")
|
||||
class ScriptPermissions(IntFlag):
|
||||
# "1" itself seems to be unused?
|
||||
TAKE_MONEY = 1 << 1
|
||||
TAKE_CONTROLS = 1 << 2
|
||||
# Doesn't seem to be used?
|
||||
REMAP_CONTROLS = 1 << 3
|
||||
TRIGGER_ANIMATIONS = 1 << 4
|
||||
ATTACH = 1 << 5
|
||||
# Doesn't seem to be used?
|
||||
RELEASE_OWNERSHIP = 1 << 6
|
||||
CHANGE_LINKS = 1 << 7
|
||||
# Object joints don't exist anymore
|
||||
CHANGE_JOINTS = 1 << 8
|
||||
# Change its own permissions? Doesn't seem to be used.
|
||||
CHANGE_PERMISSIONS = 1 << 9
|
||||
TRACK_CAMERA = 1 << 10
|
||||
CONTROL_CAMERA = 1 << 11
|
||||
TELEPORT = 1 << 12
|
||||
JOIN_EXPERIENCE = 1 << 13
|
||||
MANAGE_ESTATE_ACCESS = 1 << 14
|
||||
ANIMATION_OVERRIDE = 1 << 15
|
||||
RETURN_OBJECTS = 1 << 16
|
||||
FORCE_SIT = 1 << 17
|
||||
CHANGE_ENVIRONMENT = 1 << 18
|
||||
|
||||
|
||||
@se.flag_field_serializer("ParcelProperties", "ParcelData", "ParcelFlags")
|
||||
class ParcelFlags(IntFlag):
|
||||
ALLOW_FLY = 1 << 0 # Can start flying
|
||||
ALLOW_OTHER_SCRIPTS = 1 << 1 # Scripts by others can run.
|
||||
FOR_SALE = 1 << 2 # Can buy this land
|
||||
FOR_SALE_OBJECTS = 1 << 7 # Can buy all objects on this land
|
||||
ALLOW_LANDMARK = 1 << 3 # Always true/deprecated
|
||||
ALLOW_TERRAFORM = 1 << 4
|
||||
ALLOW_DAMAGE = 1 << 5
|
||||
CREATE_OBJECTS = 1 << 6
|
||||
# 7 is moved above
|
||||
USE_ACCESS_GROUP = 1 << 8
|
||||
USE_ACCESS_LIST = 1 << 9
|
||||
USE_BAN_LIST = 1 << 10
|
||||
USE_PASS_LIST = 1 << 11
|
||||
SHOW_DIRECTORY = 1 << 12
|
||||
ALLOW_DEED_TO_GROUP = 1 << 13
|
||||
CONTRIBUTE_WITH_DEED = 1 << 14
|
||||
SOUND_LOCAL = 1 << 15 # Hear sounds in this parcel only
|
||||
SELL_PARCEL_OBJECTS = 1 << 16 # Objects on land are included as part of the land when the land is sold
|
||||
ALLOW_PUBLISH = 1 << 17 # Allow publishing of parcel information on the web
|
||||
MATURE_PUBLISH = 1 << 18 # The information on this parcel is mature
|
||||
URL_WEB_PAGE = 1 << 19 # The "media URL" is an HTML page
|
||||
URL_RAW_HTML = 1 << 20 # The "media URL" is a raw HTML string like <H1>Foo</H1>
|
||||
RESTRICT_PUSHOBJECT = 1 << 21 # Restrict push object to either on agent or on scripts owned by parcel owner
|
||||
DENY_ANONYMOUS = 1 << 22 # Deny all non identified/transacted accounts
|
||||
# DENY_IDENTIFIED = 1 << 23 # Deny identified accounts
|
||||
# DENY_TRANSACTED = 1 << 24 # Deny identified accounts
|
||||
ALLOW_GROUP_SCRIPTS = 1 << 25 # Allow scripts owned by group
|
||||
CREATE_GROUP_OBJECTS = 1 << 26 # Allow object creation by group members or objects
|
||||
ALLOW_ALL_OBJECT_ENTRY = 1 << 27 # Allow all objects to enter a parcel
|
||||
ALLOW_GROUP_OBJECT_ENTRY = 1 << 28 # Only allow group (and owner) objects to enter the parcel
|
||||
ALLOW_VOICE_CHAT = 1 << 29 # Allow residents to use voice chat on this parcel
|
||||
USE_ESTATE_VOICE_CHAN = 1 << 30
|
||||
DENY_AGEUNVERIFIED = 1 << 31 # Prevent residents who aren't age-verified
|
||||
|
||||
|
||||
@se.enum_field_serializer("UpdateMuteListEntry", "MuteData", "MuteType")
|
||||
class MuteType(IntEnum):
|
||||
BY_NAME = 0
|
||||
AGENT = 1
|
||||
OBJECT = 2
|
||||
GROUP = 3
|
||||
# Voice, presumably.
|
||||
EXTERNAL = 4
|
||||
|
||||
|
||||
@se.flag_field_serializer("UpdateMuteListEntry", "MuteData", "MuteFlags")
|
||||
class MuteFlags(IntFlag):
|
||||
# For backwards compatibility (since any mute list entries that were created before the flags existed
|
||||
# will have a flags field of 0), some flags are "inverted".
|
||||
# Note that it's possible, through flags, to completely disable an entry in the mute list.
|
||||
# The code should detect this case and remove the mute list entry instead.
|
||||
TEXT_CHAT = 1 << 0
|
||||
VOICE_CHAT = 1 << 1
|
||||
PARTICLES = 1 << 2
|
||||
OBJECT_SOUNDS = 1 << 3
|
||||
|
||||
@property
|
||||
def DEFAULT(self):
|
||||
return 0x0
|
||||
|
||||
@property
|
||||
def ALL(self):
|
||||
return 0xF
|
||||
|
||||
|
||||
class DateAdapter(se.Adapter):
|
||||
def __init__(self, multiplier: int = 1):
|
||||
super(DateAdapter, self).__init__(None)
|
||||
self._multiplier = multiplier
|
||||
|
||||
def decode(self, val: Any, ctx: Optional[se.ParseContext], pod: bool = False) -> Any:
|
||||
return datetime.datetime.fromtimestamp(val / self._multiplier).isoformat()
|
||||
|
||||
def encode(self, val: Any, ctx: Optional[se.ParseContext]) -> Any:
|
||||
return int(datetime.datetime.fromisoformat(val).timestamp() * self._multiplier)
|
||||
|
||||
|
||||
@se.enum_field_serializer("MeanCollisionAlert", "MeanCollision", "Type")
|
||||
class MeanCollisionType(IntEnum):
|
||||
INVALID = 0
|
||||
BUMP = enum.auto()
|
||||
LLPUSHOBJECT = enum.auto()
|
||||
SELECTED_OBJECT_COLLIDE = enum.auto()
|
||||
SCRIPTED_OBJECT_COLLIDE = enum.auto()
|
||||
PHYSICAL_OBJECT_COLLIDE = enum.auto()
|
||||
|
||||
|
||||
@se.subfield_serializer("ObjectProperties", "ObjectData", "CreationDate")
|
||||
class CreationDateSerializer(se.AdapterSubfieldSerializer):
|
||||
ADAPTER = DateAdapter(1_000_000)
|
||||
ORIG_INLINE = True
|
||||
|
||||
|
||||
@se.subfield_serializer("MeanCollisionAlert", "MeanCollision", "Time")
|
||||
@se.subfield_serializer("ParcelProperties", "ParcelData", "ClaimDate")
|
||||
class DateSerializer(se.AdapterSubfieldSerializer):
|
||||
ADAPTER = DateAdapter()
|
||||
ORIG_INLINE = True
|
||||
|
||||
|
||||
class ParcelGridType(IntEnum):
|
||||
PUBLIC = 0x00
|
||||
OWNED = 0x01 # Presumably non-linden owned land
|
||||
GROUP = 0x02
|
||||
SELF = 0x03
|
||||
FOR_SALE = 0x04
|
||||
AUCTION = 0x05
|
||||
|
||||
|
||||
class ParcelGridFlags(IntFlag):
|
||||
UNUSED = 0x8
|
||||
HIDDEN_AVS = 0x10
|
||||
SOUND_LOCAL = 0x20
|
||||
WEST_LINE = 0x40
|
||||
SOUTH_LINE = 0x80
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ParcelGridInfo(se.BitfieldDataclass):
|
||||
PRIM_SPEC: ClassVar[se.SerializablePrimitive] = se.U8
|
||||
SHIFT: ClassVar[bool] = False
|
||||
|
||||
Type: Union[ParcelGridType, int] = se.bitfield_field(bits=3, adapter=se.IntEnum(ParcelGridType))
|
||||
Flags: ParcelGridFlags = se.bitfield_field(bits=5, adapter=se.IntFlag(ParcelGridFlags))
|
||||
|
||||
|
||||
@se.subfield_serializer("ParcelOverlay", "ParcelData", "Data")
|
||||
class ParcelOverlaySerializer(se.SimpleSubfieldSerializer):
|
||||
TEMPLATE = se.Collection(None, se.BitfieldDataclass(ParcelGridInfo))
|
||||
|
||||
|
||||
class BitmapAdapter(se.Adapter):
|
||||
def __init__(self, shape: Tuple[int, int]):
|
||||
super().__init__(None)
|
||||
self._shape = shape
|
||||
|
||||
def encode(self, val: Any, ctx: Optional[ParseContext]) -> Any:
|
||||
if val and isinstance(val[0], bytes):
|
||||
return b''.join(val)
|
||||
return np.packbits(np.array(val, dtype=np.uint8).flatten(), bitorder="little").tobytes()
|
||||
|
||||
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
|
||||
if pod:
|
||||
return [val[i:i + (self._shape[1] // 8)] for i in range(0, len(val), (self._shape[1] // 8))]
|
||||
parcel_bitmap = np.frombuffer(val, dtype=np.uint8)
|
||||
# This is a boolean array where each bit says whether the parcel occupies that grid.
|
||||
return np.unpackbits(parcel_bitmap, bitorder="little").reshape(self._shape)
|
||||
|
||||
|
||||
@se.subfield_serializer("ParcelProperties", "ParcelData", "Bitmap")
|
||||
class ParcelPropertiesBitmapSerializer(se.AdapterSubfieldSerializer):
|
||||
"""Bitmap that describes which grids a parcel occupies"""
|
||||
ADAPTER = BitmapAdapter((256 // 4, 256 // 4))
|
||||
|
||||
|
||||
@se.enum_field_serializer("ParcelProperties", "ParcelData", "LandingType")
|
||||
class LandingType(IntEnum):
|
||||
NONE = 1
|
||||
LANDING_POINT = 1
|
||||
DIRECT = 2
|
||||
|
||||
|
||||
@se.enum_field_serializer("ParcelProperties", "ParcelData", "Status")
|
||||
class LandOwnershipStatus(IntEnum):
|
||||
LEASED = 0
|
||||
LEASE_PENDING = 1
|
||||
ABANDONED = 2
|
||||
NONE = -1
|
||||
|
||||
|
||||
@se.enum_field_serializer("ParcelProperties", "ParcelData", "Category")
|
||||
class LandCategory(IntEnum):
|
||||
NONE = 0
|
||||
LINDEN = enum.auto()
|
||||
ADULT = enum.auto()
|
||||
ARTS = enum.auto()
|
||||
BUSINESS = enum.auto()
|
||||
EDUCATIONAL = enum.auto()
|
||||
GAMING = enum.auto()
|
||||
HANGOUT = enum.auto()
|
||||
NEWCOMER = enum.auto()
|
||||
PARK = enum.auto()
|
||||
RESIDENTIAL = enum.auto()
|
||||
SHOPPING = enum.auto()
|
||||
STAGE = enum.auto()
|
||||
OTHER = enum.auto()
|
||||
ANY = -1
|
||||
|
||||
|
||||
@se.http_serializer("RenderMaterials")
|
||||
class RenderMaterialsSerializer(se.BaseHTTPSerializer):
|
||||
@classmethod
|
||||
@@ -2027,3 +2374,69 @@ class RetrieveNavMeshSrcSerializer(se.BaseHTTPSerializer):
|
||||
# 15 bit window size, gzip wrapped
|
||||
deser["navmesh_data"] = zlib.decompress(deser["navmesh_data"], wbits=15 | 32)
|
||||
return deser
|
||||
|
||||
|
||||
# Beta puppetry stuff, subject to change!
|
||||
|
||||
|
||||
class PuppetryEventMask(IntFlag):
|
||||
POSITION = 1 << 0
|
||||
POSITION_IN_PARENT_FRAME = 1 << 1
|
||||
ROTATION = 1 << 2
|
||||
ROTATION_IN_PARENT_FRAME = 1 << 3
|
||||
SCALE = 1 << 4
|
||||
DISABLE_CONSTRAINT = 1 << 7
|
||||
|
||||
|
||||
class PuppetryOption(se.OptionalFlagged):
|
||||
def __init__(self, flag_val, spec):
|
||||
super().__init__("mask", se.IntFlag(PuppetryEventMask, se.U8), flag_val, spec)
|
||||
|
||||
|
||||
# Range to use for puppetry's quantized floats when converting to<->from U16
|
||||
LL_PELVIS_OFFSET_RANGE = (-5.0, 5.0)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PuppetryJointData:
|
||||
# Where does this number come from? `avatar_skeleton.xml`?
|
||||
joint_id: int = se.dataclass_field(se.S16)
|
||||
# Determines which fields will follow
|
||||
mask: PuppetryEventMask = se.dataclass_field(se.IntFlag(PuppetryEventMask, se.U8))
|
||||
rotation: Optional[Quaternion] = se.dataclass_field(
|
||||
# These are very odd scales for a quantized quaternion, but that's what they are.
|
||||
PuppetryOption(PuppetryEventMask.ROTATION, se.PackedQuat(se.Vector3U16(*LL_PELVIS_OFFSET_RANGE))),
|
||||
)
|
||||
position: Optional[Vector3] = se.dataclass_field(
|
||||
PuppetryOption(PuppetryEventMask.POSITION, se.Vector3U16(*LL_PELVIS_OFFSET_RANGE)),
|
||||
)
|
||||
scale: Optional[Vector3] = se.dataclass_field(
|
||||
PuppetryOption(PuppetryEventMask.SCALE, se.Vector3U16(*LL_PELVIS_OFFSET_RANGE)),
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PuppetryEventData:
|
||||
time: int = se.dataclass_field(se.S32)
|
||||
# Must be set manually due to below issue
|
||||
num_joints: int = se.dataclass_field(se.U16)
|
||||
# This field is packed in the least helpful way possible. The length field
|
||||
# is in between the collection count and the collection data, but the length
|
||||
# field essentially only tells you how many bytes until the end of the buffer
|
||||
# proper, which you already know from msgsystem. Why is this here?
|
||||
joints: List[PuppetryJointData] = se.dataclass_field(se.TypedByteArray(
|
||||
se.U32,
|
||||
# Just treat contents as a greedy collection, tries to keep reading until EOF
|
||||
se.Collection(None, se.Dataclass(PuppetryJointData)),
|
||||
))
|
||||
|
||||
|
||||
@se.subfield_serializer("AgentAnimation", "PhysicalAvatarEventList", "TypeData")
|
||||
@se.subfield_serializer("AvatarAnimation", "PhysicalAvatarEventList", "TypeData")
|
||||
class PuppetryEventDataSerializer(se.SimpleSubfieldSerializer):
|
||||
# You can have multiple joint events packed in right after the other, implicitly.
|
||||
# They may _or may not_ be split into separate PhysicalAvatarEventList blocks?
|
||||
# This doesn't seem to be handled specifically in the decoder, is this a
|
||||
# serialization bug in the viewer?
|
||||
TEMPLATE = se.Collection(None, se.Dataclass(PuppetryEventData))
|
||||
EMPTY_IS_NONE = True
|
||||
|
||||
45
hippolyzer/lib/base/test_utils.py
Normal file
45
hippolyzer/lib/base/test_utils.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import asyncio
|
||||
from typing import Any, Optional, List, Tuple
|
||||
|
||||
from hippolyzer.lib.base.message.circuit import Circuit, ConnectionHolder
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.network.transport import AbstractUDPTransport, ADDR_TUPLE, UDPPacket
|
||||
|
||||
|
||||
class MockTransport(AbstractUDPTransport):
|
||||
def sendto(self, data: Any, addr: Optional[ADDR_TUPLE] = ...) -> None:
|
||||
pass
|
||||
|
||||
def abort(self) -> None:
|
||||
pass
|
||||
|
||||
def close(self) -> None:
|
||||
pass
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.packets: List[Tuple[bytes, Tuple[str, int]]] = []
|
||||
|
||||
def send_packet(self, packet: UDPPacket) -> None:
|
||||
self.packets.append((packet.data, packet.dst_addr))
|
||||
|
||||
|
||||
class MockHandlingCircuit(Circuit):
|
||||
def __init__(self, handler: MessageHandler[Message, str]):
|
||||
super().__init__(("127.0.0.1", 1), ("127.0.0.1", 2), None)
|
||||
self.handler = handler
|
||||
|
||||
def _send_prepared_message(self, message: Message, transport=None):
|
||||
loop = asyncio.get_event_loop_policy().get_event_loop()
|
||||
loop.call_soon(self.handler.handle, message)
|
||||
|
||||
|
||||
class MockConnectionHolder(ConnectionHolder):
|
||||
def __init__(self, circuit, message_handler):
|
||||
self.circuit = circuit
|
||||
self.message_handler = message_handler
|
||||
|
||||
|
||||
async def soon(awaitable) -> Message:
|
||||
return await asyncio.wait_for(awaitable, timeout=1.0)
|
||||
@@ -8,6 +8,7 @@ import dataclasses
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import create_logged_task
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.circuit import ConnectionHolder
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketFlags
|
||||
@@ -108,7 +109,7 @@ class TransferManager:
|
||||
flags=PacketFlags.RELIABLE,
|
||||
))
|
||||
transfer = Transfer(transfer_id)
|
||||
asyncio.create_task(self._pump_transfer_replies(transfer))
|
||||
create_logged_task(self._pump_transfer_replies(transfer), "Transfer Pump")
|
||||
return transfer
|
||||
|
||||
async def _pump_transfer_replies(self, transfer: Transfer):
|
||||
|
||||
@@ -5,6 +5,7 @@ Body parts and linden clothing layers
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import enum
|
||||
import logging
|
||||
from io import StringIO
|
||||
from typing import *
|
||||
@@ -21,6 +22,60 @@ LOG = logging.getLogger(__name__)
|
||||
_T = TypeVar("_T")
|
||||
|
||||
WEARABLE_VERSION = "LLWearable version 22"
|
||||
DEFAULT_WEARABLE_TEX = UUID("c228d1cf-4b5d-4ba8-84f4-899a0796aa97")
|
||||
|
||||
|
||||
class AvatarTEIndex(enum.IntEnum):
|
||||
"""From llavatarappearancedefines.h"""
|
||||
HEAD_BODYPAINT = 0
|
||||
UPPER_SHIRT = enum.auto()
|
||||
LOWER_PANTS = enum.auto()
|
||||
EYES_IRIS = enum.auto()
|
||||
HAIR = enum.auto()
|
||||
UPPER_BODYPAINT = enum.auto()
|
||||
LOWER_BODYPAINT = enum.auto()
|
||||
LOWER_SHOES = enum.auto()
|
||||
HEAD_BAKED = enum.auto()
|
||||
UPPER_BAKED = enum.auto()
|
||||
LOWER_BAKED = enum.auto()
|
||||
EYES_BAKED = enum.auto()
|
||||
LOWER_SOCKS = enum.auto()
|
||||
UPPER_JACKET = enum.auto()
|
||||
LOWER_JACKET = enum.auto()
|
||||
UPPER_GLOVES = enum.auto()
|
||||
UPPER_UNDERSHIRT = enum.auto()
|
||||
LOWER_UNDERPANTS = enum.auto()
|
||||
SKIRT = enum.auto()
|
||||
SKIRT_BAKED = enum.auto()
|
||||
HAIR_BAKED = enum.auto()
|
||||
LOWER_ALPHA = enum.auto()
|
||||
UPPER_ALPHA = enum.auto()
|
||||
HEAD_ALPHA = enum.auto()
|
||||
EYES_ALPHA = enum.auto()
|
||||
HAIR_ALPHA = enum.auto()
|
||||
HEAD_TATTOO = enum.auto()
|
||||
UPPER_TATTOO = enum.auto()
|
||||
LOWER_TATTOO = enum.auto()
|
||||
HEAD_UNIVERSAL_TATTOO = enum.auto()
|
||||
UPPER_UNIVERSAL_TATTOO = enum.auto()
|
||||
LOWER_UNIVERSAL_TATTOO = enum.auto()
|
||||
SKIRT_TATTOO = enum.auto()
|
||||
HAIR_TATTOO = enum.auto()
|
||||
EYES_TATTOO = enum.auto()
|
||||
LEFT_ARM_TATTOO = enum.auto()
|
||||
LEFT_LEG_TATTOO = enum.auto()
|
||||
AUX1_TATTOO = enum.auto()
|
||||
AUX2_TATTOO = enum.auto()
|
||||
AUX3_TATTOO = enum.auto()
|
||||
LEFTARM_BAKED = enum.auto()
|
||||
LEFTLEG_BAKED = enum.auto()
|
||||
AUX1_BAKED = enum.auto()
|
||||
AUX2_BAKED = enum.auto()
|
||||
AUX3_BAKED = enum.auto()
|
||||
|
||||
@property
|
||||
def is_baked(self) -> bool:
|
||||
return self.name.endswith("_BAKED")
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
@@ -35,9 +90,8 @@ class VisualParam:
|
||||
|
||||
|
||||
class VisualParams(List[VisualParam]):
|
||||
def __init__(self):
|
||||
def __init__(self, lad_path):
|
||||
super().__init__()
|
||||
lad_path = get_resource_filename("lib/base/data/avatar_lad.xml")
|
||||
with open(lad_path, "rb") as f:
|
||||
doc = parse_etree(f)
|
||||
for param in doc.findall(".//param"):
|
||||
@@ -59,8 +113,11 @@ class VisualParams(List[VisualParam]):
|
||||
def by_wearable(self, wearable: str) -> List[VisualParam]:
|
||||
return [x for x in self if x.wearable == wearable]
|
||||
|
||||
def by_id(self, vparam_id: int) -> VisualParam:
|
||||
return [x for x in self if x.id == vparam_id][0]
|
||||
|
||||
VISUAL_PARAMS = VisualParams()
|
||||
|
||||
VISUAL_PARAMS = VisualParams(get_resource_filename("lib/base/data/avatar_lad.xml"))
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
|
||||
@@ -9,6 +9,7 @@ import random
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, RawBytes
|
||||
from hippolyzer.lib.base.helpers import create_logged_task
|
||||
from hippolyzer.lib.base.message.data_packer import TemplateDataPacker
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.msgtypes import MsgType, PacketFlags
|
||||
@@ -125,7 +126,7 @@ class XferManager:
|
||||
direction=direction,
|
||||
))
|
||||
xfer = Xfer(xfer_id, direction=direction, turbo=turbo)
|
||||
asyncio.create_task(self._pump_xfer_replies(xfer))
|
||||
create_logged_task(self._pump_xfer_replies(xfer), "Xfer Pump")
|
||||
return xfer
|
||||
|
||||
async def _pump_xfer_replies(self, xfer: Xfer):
|
||||
@@ -269,12 +270,13 @@ class XferManager:
|
||||
xfer.xfer_id = request_msg["XferID"]["ID"]
|
||||
|
||||
packet_id = 0
|
||||
# TODO: No resend yet. If it's lost, it's lost.
|
||||
while xfer.chunks:
|
||||
chunk = xfer.chunks.pop(packet_id)
|
||||
# EOF if there are no chunks left
|
||||
packet_val = XferPacket(PacketID=packet_id, IsEOF=not bool(xfer.chunks))
|
||||
self._connection_holder.circuit.send(Message(
|
||||
# We just send reliably since I don't care to implement the Xfer-specific
|
||||
# resend-on-unacked nastiness
|
||||
_ = self._connection_holder.circuit.send_reliable(Message(
|
||||
"SendXferPacket",
|
||||
Block("XferID", ID=xfer.xfer_id, Packet_=packet_val),
|
||||
Block("DataPacket", Data=chunk),
|
||||
|
||||
127
hippolyzer/lib/client/asset_uploader.py
Normal file
127
hippolyzer/lib/client/asset_uploader.py
Normal file
@@ -0,0 +1,127 @@
|
||||
from typing import NamedTuple, Union, Optional, List
|
||||
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.mesh import MeshAsset, LLMeshSerializer
|
||||
from hippolyzer.lib.base.templates import AssetType
|
||||
from hippolyzer.lib.client.state import BaseClientRegion
|
||||
|
||||
|
||||
class UploadError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UploadToken(NamedTuple):
|
||||
linden_cost: int
|
||||
uploader_url: str
|
||||
payload: bytes
|
||||
|
||||
|
||||
class MeshUploadDetails(NamedTuple):
|
||||
mesh_bytes: bytes
|
||||
num_faces: int
|
||||
|
||||
|
||||
class AssetUploader:
|
||||
def __init__(self, region: BaseClientRegion):
|
||||
self._region = region
|
||||
|
||||
async def initiate_asset_upload(self, name: str, asset_type: AssetType,
|
||||
body: bytes, flags: Optional[int] = None) -> UploadToken:
|
||||
payload = {
|
||||
"asset_type": asset_type.to_lookup_name(),
|
||||
"description": "(No Description)",
|
||||
"everyone_mask": 0,
|
||||
"group_mask": 0,
|
||||
"folder_id": UUID.ZERO, # Puts it in the default folder, I guess. Undocumented.
|
||||
"inventory_type": asset_type.inventory_type.to_lookup_name(),
|
||||
"name": name,
|
||||
"next_owner_mask": 581632,
|
||||
}
|
||||
if flags is not None:
|
||||
payload['flags'] = flags
|
||||
resp_payload = await self._make_newfileagentinventory_req(payload)
|
||||
|
||||
return UploadToken(resp_payload["upload_price"], resp_payload["uploader"], body)
|
||||
|
||||
async def _make_newfileagentinventory_req(self, payload: dict):
|
||||
async with self._region.caps_client.post("NewFileAgentInventory", llsd=payload) as resp:
|
||||
resp.raise_for_status()
|
||||
resp_payload = await resp.read_llsd()
|
||||
# Need to sniff the resp payload for this because SL sends a 200 status code on error
|
||||
if "error" in resp_payload:
|
||||
raise UploadError(resp_payload)
|
||||
return resp_payload
|
||||
|
||||
async def complete_upload(self, token: UploadToken) -> dict:
|
||||
async with self._region.caps_client.post(token.uploader_url, data=token.payload) as resp:
|
||||
resp.raise_for_status()
|
||||
resp_payload = await resp.read_llsd()
|
||||
# The actual upload endpoints return 200 on error, have to sniff the payload to figure
|
||||
# out if it actually failed...
|
||||
if "error" in resp_payload:
|
||||
raise UploadError(resp_payload)
|
||||
await self._handle_upload_complete(resp_payload)
|
||||
return resp_payload
|
||||
|
||||
async def _handle_upload_complete(self, resp_payload: dict):
|
||||
"""
|
||||
Generic hook called when any asset upload completes.
|
||||
|
||||
Could trigger an AIS fetch to send the viewer details about the item we just created,
|
||||
assuming we were in proxy context.
|
||||
"""
|
||||
pass
|
||||
|
||||
# The mesh upload flow is a little special, so it gets its own method
|
||||
async def initiate_mesh_upload(self, name: str, mesh: Union[MeshUploadDetails, MeshAsset],
|
||||
flags: Optional[int] = None) -> UploadToken:
|
||||
if isinstance(mesh, MeshAsset):
|
||||
writer = se.BufferWriter("!")
|
||||
writer.write(LLMeshSerializer(), mesh)
|
||||
mesh = MeshUploadDetails(writer.copy_buffer(), len(mesh.segments['high_lod']))
|
||||
|
||||
asset_resources = self._build_asset_resources(name, [mesh])
|
||||
payload = {
|
||||
'asset_resources': asset_resources,
|
||||
'asset_type': 'mesh',
|
||||
'description': '(No Description)',
|
||||
'everyone_mask': 0,
|
||||
'folder_id': UUID.ZERO,
|
||||
'group_mask': 0,
|
||||
'inventory_type': 'object',
|
||||
'name': name,
|
||||
'next_owner_mask': 581632,
|
||||
'texture_folder_id': UUID.ZERO
|
||||
}
|
||||
if flags is not None:
|
||||
payload['flags'] = flags
|
||||
resp_payload = await self._make_newfileagentinventory_req(payload)
|
||||
|
||||
upload_body = llsd.format_xml(asset_resources)
|
||||
return UploadToken(resp_payload["upload_price"], resp_payload["uploader"], upload_body)
|
||||
|
||||
def _build_asset_resources(self, name: str, meshes: List[MeshUploadDetails]) -> dict:
|
||||
instances = []
|
||||
for mesh in meshes:
|
||||
instances.append({
|
||||
'face_list': [{
|
||||
'diffuse_color': [1.0, 1.0, 1.0, 1.0],
|
||||
'fullbright': False
|
||||
}] * mesh.num_faces,
|
||||
'material': 3,
|
||||
'mesh': 0,
|
||||
'mesh_name': name,
|
||||
'physics_shape_type': 2,
|
||||
'position': [0.0, 0.0, 0.0],
|
||||
'rotation': [0.7071067690849304, 0.0, 0.0, 0.7071067690849304],
|
||||
'scale': [1.0, 1.0, 1.0]
|
||||
})
|
||||
|
||||
return {
|
||||
'instance_list': instances,
|
||||
'mesh_list': [mesh.mesh_bytes for mesh in meshes],
|
||||
'metric': 'MUT_Unspecified',
|
||||
'texture_list': []
|
||||
}
|
||||
778
hippolyzer/lib/client/hippo_client.py
Normal file
778
hippolyzer/lib/client/hippo_client.py
Normal file
@@ -0,0 +1,778 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import hashlib
|
||||
from importlib.metadata import version
|
||||
import logging
|
||||
import uuid
|
||||
import weakref
|
||||
import xmlrpc.client
|
||||
from typing import *
|
||||
|
||||
import aiohttp
|
||||
import multidict
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3, StringEnum
|
||||
from hippolyzer.lib.base.helpers import proxify, get_resource_filename, create_logged_task
|
||||
from hippolyzer.lib.base.message.circuit import Circuit
|
||||
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.message.message_dot_xml import MessageDotXML
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
|
||||
from hippolyzer.lib.base.network.caps_client import CapsClient, CAPS_DICT
|
||||
from hippolyzer.lib.base.network.transport import ADDR_TUPLE, Direction, SocketUDPTransport, AbstractUDPTransport
|
||||
from hippolyzer.lib.base.settings import Settings, SettingDescriptor
|
||||
from hippolyzer.lib.base.templates import RegionHandshakeReplyFlags, ChatType, ThrottleData
|
||||
from hippolyzer.lib.base.transfer_manager import TransferManager
|
||||
from hippolyzer.lib.base.xfer_manager import XferManager
|
||||
from hippolyzer.lib.client.asset_uploader import AssetUploader
|
||||
from hippolyzer.lib.client.inventory_manager import InventoryManager
|
||||
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
|
||||
from hippolyzer.lib.client.parcel_manager import ParcelManager
|
||||
from hippolyzer.lib.client.state import BaseClientSession, BaseClientRegion, BaseClientSessionManager
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StartLocation(StringEnum):
|
||||
LAST = "last"
|
||||
HOME = "home"
|
||||
|
||||
|
||||
class ClientSettings(Settings):
|
||||
SSL_VERIFY: bool = SettingDescriptor(False)
|
||||
"""Off by default for now, the cert validation is a big mess due to LL using an internal CA."""
|
||||
SSL_CERT_PATH: str = SettingDescriptor(get_resource_filename("lib/base/network/data/ca-bundle.crt"))
|
||||
USER_AGENT: str = SettingDescriptor(f"Hippolyzer/v{version('hippolyzer')}")
|
||||
SEND_AGENT_UPDATES: bool = SettingDescriptor(True)
|
||||
"""Generally you want to send these, lots of things will break if you don't send at least one."""
|
||||
AUTO_REQUEST_PARCELS: bool = SettingDescriptor(True)
|
||||
"""Automatically request all parcel details when connecting to a region"""
|
||||
AUTO_REQUEST_MATERIALS: bool = SettingDescriptor(True)
|
||||
"""Automatically request all materials when connecting to a region"""
|
||||
|
||||
|
||||
class HippoCapsClient(CapsClient):
|
||||
def __init__(
|
||||
self,
|
||||
settings: ClientSettings,
|
||||
caps: Optional[CAPS_DICT] = None,
|
||||
session: Optional[aiohttp.ClientSession] = None,
|
||||
) -> None:
|
||||
super().__init__(caps, session)
|
||||
self._settings = settings
|
||||
|
||||
def _request_fixups(self, cap_or_url: str, headers: Dict, proxy: Optional[bool], ssl: Any):
|
||||
headers["User-Agent"] = self._settings.USER_AGENT
|
||||
return cap_or_url, headers, proxy, self._settings.SSL_VERIFY
|
||||
|
||||
|
||||
class HippoClientProtocol(asyncio.DatagramProtocol):
|
||||
def __init__(self, session: HippoClientSession):
|
||||
self.session = proxify(session)
|
||||
self.message_xml = MessageDotXML()
|
||||
self.deserializer = UDPMessageDeserializer(
|
||||
settings=self.session.session_manager.settings,
|
||||
)
|
||||
|
||||
def datagram_received(self, data, source_addr: ADDR_TUPLE):
|
||||
region = self.session.region_by_circuit_addr(source_addr)
|
||||
if not region:
|
||||
logging.warning("Received packet from invalid address %s", source_addr)
|
||||
return
|
||||
|
||||
message = self.deserializer.deserialize(data)
|
||||
message.direction = Direction.IN
|
||||
message.sender = source_addr
|
||||
|
||||
if not self.message_xml.validate_udp_msg(message.name):
|
||||
LOG.warning(
|
||||
f"Received {message.name!r} over UDP, when it should come over the event queue. Discarding."
|
||||
)
|
||||
raise PermissionError(f"UDPBanned message {message.name}")
|
||||
|
||||
region.circuit.collect_acks(message)
|
||||
|
||||
should_handle = True
|
||||
if message.reliable:
|
||||
# This is a bit crap. We send an ACK immediately through a PacketAck.
|
||||
# This is pretty wasteful, we should batch them up and send them on a timer.
|
||||
# We should ACK even if it's a resend of something we've already handled, maybe
|
||||
# they never got the ACK.
|
||||
region.circuit.send_acks((message.packet_id,))
|
||||
should_handle = region.circuit.track_reliable(message.packet_id)
|
||||
|
||||
try:
|
||||
if should_handle:
|
||||
self.session.message_handler.handle(message)
|
||||
except:
|
||||
LOG.exception("Failed in session message handler")
|
||||
if should_handle:
|
||||
region.message_handler.handle(message)
|
||||
|
||||
|
||||
class HippoClientRegion(BaseClientRegion):
|
||||
def __init__(self, circuit_addr, seed_cap: Optional[str], session: HippoClientSession, handle=None):
|
||||
super().__init__()
|
||||
self.caps = multidict.MultiDict()
|
||||
self.message_handler: MessageHandler[Message, str] = MessageHandler(take_by_default=False)
|
||||
self.circuit_addr = circuit_addr
|
||||
self.handle = handle
|
||||
if seed_cap:
|
||||
self.caps["Seed"] = seed_cap
|
||||
self.session: Callable[[], HippoClientSession] = weakref.ref(session)
|
||||
self.caps_client = HippoCapsClient(session.session_manager.settings, self.caps, session.http_session)
|
||||
self.xfer_manager = XferManager(proxify(self), self.session().secure_session_id)
|
||||
self.transfer_manager = TransferManager(proxify(self), session.agent_id, session.id)
|
||||
self.asset_uploader = AssetUploader(proxify(self))
|
||||
self.parcel_manager = ParcelManager(proxify(self))
|
||||
self.objects = ClientObjectManager(self)
|
||||
self._llsd_serializer = LLSDMessageSerializer()
|
||||
self._eq_task: Optional[asyncio.Task] = None
|
||||
self.connected: asyncio.Future = asyncio.Future()
|
||||
|
||||
self.message_handler.subscribe("StartPingCheck", self._handle_ping_check)
|
||||
|
||||
def update_caps(self, caps: Mapping[str, str]) -> None:
|
||||
self.caps.update(caps)
|
||||
|
||||
@property
|
||||
def cap_urls(self) -> multidict.MultiDict:
|
||||
return self.caps.copy()
|
||||
|
||||
async def connect(self, main_region: bool = False):
|
||||
# Disconnect first if we're already connected
|
||||
if self.circuit and self.circuit.is_alive:
|
||||
self.disconnect()
|
||||
if self.connected.done():
|
||||
self.connected = asyncio.Future()
|
||||
|
||||
try:
|
||||
# TODO: What happens if a circuit code is invalid, again? Does it just refuse to ACK?
|
||||
await self.circuit.send_reliable(
|
||||
Message(
|
||||
"UseCircuitCode",
|
||||
Block(
|
||||
"CircuitCode",
|
||||
Code=self.session().circuit_code,
|
||||
SessionID=self.session().id,
|
||||
ID=self.session().agent_id,
|
||||
),
|
||||
)
|
||||
)
|
||||
self.circuit.is_alive = True
|
||||
|
||||
# Clear out any old caps urls except the seed URL, we're about to fetch new caps.
|
||||
seed_url = self.caps["Seed"]
|
||||
self.caps.clear()
|
||||
self.caps["Seed"] = seed_url
|
||||
|
||||
# Kick this off and await it later
|
||||
seed_resp_fut = self.caps_client.post("Seed", llsd=list(self.session().session_manager.SUPPORTED_CAPS))
|
||||
|
||||
# Register first so we can handle it even if the ack happens after the message is sent
|
||||
region_handshake_fut = self.message_handler.wait_for(("RegionHandshake",))
|
||||
|
||||
# If we're connecting to the main region, it won't even send us a RegionHandshake until we
|
||||
# first send a CompleteAgentMovement.
|
||||
if main_region:
|
||||
await self.complete_agent_movement()
|
||||
|
||||
self.name = str((await region_handshake_fut)["RegionInfo"][0]["SimName"])
|
||||
self.session().objects.track_region_objects(self.handle)
|
||||
await self.circuit.send_reliable(
|
||||
Message(
|
||||
"RegionHandshakeReply",
|
||||
Block("AgentData", AgentID=self.session().agent_id, SessionID=self.session().id),
|
||||
Block(
|
||||
"RegionInfo",
|
||||
Flags=(
|
||||
RegionHandshakeReplyFlags.SUPPORTS_SELF_APPEARANCE
|
||||
| RegionHandshakeReplyFlags.VOCACHE_CULLING_ENABLED
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
await self.circuit.send_reliable(
|
||||
Message(
|
||||
"AgentThrottle",
|
||||
Block(
|
||||
"AgentData",
|
||||
AgentID=self.session().agent_id,
|
||||
SessionID=self.session().id,
|
||||
CircuitCode=self.session().circuit_code,
|
||||
),
|
||||
Block(
|
||||
"Throttle",
|
||||
GenCounter=0,
|
||||
# Reasonable defaults, I guess
|
||||
Throttles_=ThrottleData(
|
||||
resend=207360.0,
|
||||
land=165376.0,
|
||||
wind=33075.19921875,
|
||||
cloud=33075.19921875,
|
||||
task=682700.75,
|
||||
texture=682700.75,
|
||||
asset=269312.0
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
if self.session().session_manager.settings.SEND_AGENT_UPDATES:
|
||||
# Usually we want to send at least one, since lots of messages will never be sent by the sim
|
||||
# until we send at least one AgentUpdate. For example, ParcelOverlay and LayerData.
|
||||
await self.circuit.send_reliable(
|
||||
Message(
|
||||
"AgentUpdate",
|
||||
Block(
|
||||
'AgentData',
|
||||
AgentID=self.session().agent_id,
|
||||
SessionID=self.session().id,
|
||||
# Don't really care about the other fields.
|
||||
fill_missing=True,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
async with seed_resp_fut as seed_resp:
|
||||
seed_resp.raise_for_status()
|
||||
self.update_caps(await seed_resp.read_llsd())
|
||||
|
||||
self._eq_task = create_logged_task(self._poll_event_queue(), "EQ Poll")
|
||||
|
||||
settings = self.session().session_manager.settings
|
||||
if settings.AUTO_REQUEST_PARCELS:
|
||||
_ = create_logged_task(self.parcel_manager.request_dirty_parcels(), "Parcel Request")
|
||||
if settings.AUTO_REQUEST_MATERIALS:
|
||||
_ = create_logged_task(self.objects.request_all_materials(), "Request All Materials")
|
||||
|
||||
except Exception as e:
|
||||
# Let consumers who were `await`ing the connected signal know there was an error
|
||||
if not self.connected.done():
|
||||
self.connected.set_exception(e)
|
||||
raise
|
||||
|
||||
self.connected.set_result(None)
|
||||
|
||||
def disconnect(self) -> None:
|
||||
"""Simulator has gone away, disconnect. Should be synchronous"""
|
||||
if self._eq_task is not None:
|
||||
self._eq_task.cancel()
|
||||
self._eq_task = None
|
||||
self.circuit.disconnect()
|
||||
self.objects.clear()
|
||||
if self.connected.done():
|
||||
self.connected = asyncio.Future()
|
||||
# TODO: cancel XFers and Transfers and whatnot
|
||||
|
||||
async def complete_agent_movement(self) -> None:
|
||||
await self.circuit.send_reliable(
|
||||
Message(
|
||||
"CompleteAgentMovement",
|
||||
Block(
|
||||
"AgentData",
|
||||
AgentID=self.session().agent_id,
|
||||
SessionID=self.session().id,
|
||||
CircuitCode=self.session().circuit_code
|
||||
),
|
||||
)
|
||||
)
|
||||
self.session().main_region = self
|
||||
|
||||
async def _poll_event_queue(self):
|
||||
ack: Optional[int] = None
|
||||
while True:
|
||||
payload = {"ack": ack, "done": False}
|
||||
try:
|
||||
async with self.caps_client.post("EventQueueGet", llsd=payload) as resp:
|
||||
if resp.status != 200:
|
||||
await asyncio.sleep(0.1)
|
||||
continue
|
||||
polled = await resp.read_llsd()
|
||||
for event in polled["events"]:
|
||||
if self._llsd_serializer.can_handle(event["message"]):
|
||||
msg = self._llsd_serializer.deserialize(event)
|
||||
else:
|
||||
msg = Message.from_eq_event(event)
|
||||
msg.sender = self.circuit_addr
|
||||
msg.direction = Direction.IN
|
||||
self.session().message_handler.handle(msg)
|
||||
self.message_handler.handle(msg)
|
||||
ack = polled["id"]
|
||||
await asyncio.sleep(0.001)
|
||||
except aiohttp.client_exceptions.ServerDisconnectedError:
|
||||
# This is expected to happen during long-polling, just pick up again where we left off.
|
||||
await asyncio.sleep(0.001)
|
||||
|
||||
async def _handle_ping_check(self, message: Message):
|
||||
self.circuit.send(
|
||||
Message(
|
||||
"CompletePingCheck",
|
||||
Block("PingID", PingID=message["PingID"]["PingID"]),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class HippoClientSession(BaseClientSession):
|
||||
"""Represents a client's view of a remote session"""
|
||||
REGION_CLS = HippoClientRegion
|
||||
|
||||
region_by_handle: Callable[[int], Optional[HippoClientRegion]]
|
||||
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[HippoClientRegion]]
|
||||
regions: List[HippoClientRegion]
|
||||
session_manager: HippoClient
|
||||
main_region: Optional[HippoClientRegion]
|
||||
|
||||
def __init__(self, id, secure_session_id, agent_id, circuit_code, session_manager: Optional[HippoClient] = None,
|
||||
login_data=None):
|
||||
super().__init__(id, secure_session_id, agent_id, circuit_code, session_manager, login_data=login_data)
|
||||
self.http_session = session_manager.http_session
|
||||
self.objects = ClientWorldObjectManager(proxify(self), session_manager.settings, None)
|
||||
self.inventory_manager = InventoryManager(proxify(self))
|
||||
self.transport: Optional[SocketUDPTransport] = None
|
||||
self.protocol: Optional[HippoClientProtocol] = None
|
||||
self.message_handler.take_by_default = False
|
||||
|
||||
for msg_name in ("DisableSimulator", "CloseCircuit"):
|
||||
self.message_handler.subscribe(msg_name, lambda msg: self.unregister_region(msg.sender))
|
||||
for msg_name in ("TeleportFinish", "CrossedRegion", "EstablishAgentCommunication"):
|
||||
self.message_handler.subscribe(msg_name, self._handle_register_region_message)
|
||||
|
||||
def register_region(self, circuit_addr: Optional[ADDR_TUPLE] = None, seed_url: Optional[str] = None,
|
||||
handle: Optional[int] = None) -> HippoClientRegion:
|
||||
return super().register_region(circuit_addr, seed_url, handle) # type:ignore
|
||||
|
||||
def unregister_region(self, circuit_addr: ADDR_TUPLE) -> None:
|
||||
for i, region in enumerate(self.regions):
|
||||
if region.circuit_addr == circuit_addr:
|
||||
self.regions[i].disconnect()
|
||||
del self.regions[i]
|
||||
return
|
||||
raise KeyError(f"No such region for {circuit_addr!r}")
|
||||
|
||||
def open_circuit(self, circuit_addr: ADDR_TUPLE):
|
||||
for region in self.regions:
|
||||
if region.circuit_addr == circuit_addr:
|
||||
valid_circuit = False
|
||||
if not region.circuit or not region.circuit.is_alive:
|
||||
region.circuit = Circuit(("127.0.0.1", 0), circuit_addr, self.transport)
|
||||
region.circuit.is_alive = False
|
||||
valid_circuit = True
|
||||
if region.circuit and region.circuit.is_alive:
|
||||
# Whatever, already open
|
||||
logging.debug("Tried to re-open circuit for %r" % (circuit_addr,))
|
||||
valid_circuit = True
|
||||
return valid_circuit
|
||||
return False
|
||||
|
||||
def _handle_register_region_message(self, msg: Message):
|
||||
# Handle events that inform us about new regions
|
||||
sim_addr, sim_handle, sim_seed = None, None, None
|
||||
moving_to_region = False
|
||||
# Sim is asking us to talk to a neighbour
|
||||
if msg.name == "EstablishAgentCommunication":
|
||||
ip_split = msg["EventData"]["sim-ip-and-port"].split(":")
|
||||
sim_addr = (ip_split[0], int(ip_split[1]))
|
||||
sim_seed = msg["EventData"]["seed-capability"]
|
||||
# We teleported or cross region, opening comms to new sim
|
||||
elif msg.name in ("TeleportFinish", "CrossedRegion"):
|
||||
sim_block = msg.get_block("RegionData", msg.get_block("Info"))[0]
|
||||
sim_addr = (sim_block["SimIP"], sim_block["SimPort"])
|
||||
sim_handle = sim_block["RegionHandle"]
|
||||
sim_seed = sim_block["SeedCapability"]
|
||||
moving_to_region = True
|
||||
# Sim telling us about a neighbour
|
||||
# elif msg.name == "EnableSimulator":
|
||||
# sim_block = msg["SimulatorInfo"][0]
|
||||
# sim_addr = (sim_block["IP"], sim_block["Port"])
|
||||
# sim_handle = sim_block["Handle"]
|
||||
# TODO: EnableSimulator is a little weird. It creates a region and establishes a
|
||||
# circuit, but with no seed cap. The viewer will send UseCircuitCode and all that,
|
||||
# but it's totally workable to just wait for an EstablishAgentCommunication to do that,
|
||||
# since that's when the region actually shows up. I guess EnableSimulator just gives the
|
||||
# viewer some lead time to set up the circuit before the region is actually shown through
|
||||
# EstablishAgentCommunication? Either way, messing around with regions that don't have seed
|
||||
# caps is annoying, so let's just not do it.
|
||||
|
||||
# Register a region if this message was telling us about a new one
|
||||
if sim_addr is not None:
|
||||
region = self.register_region(sim_addr, handle=sim_handle, seed_url=sim_seed)
|
||||
# We can't actually connect without a sim seed, mind you, when we receive and EnableSimulator
|
||||
# we have to wait for the EstablishAgentCommunication to actually connect.
|
||||
need_connect = (region.circuit and region.circuit.is_alive) or moving_to_region
|
||||
self.open_circuit(sim_addr)
|
||||
if need_connect:
|
||||
create_logged_task(region.connect(main_region=moving_to_region), "Region Connect")
|
||||
elif moving_to_region:
|
||||
# No need to connect, but we do need to complete agent movement.
|
||||
create_logged_task(region.complete_agent_movement(), "CompleteAgentMovement")
|
||||
|
||||
|
||||
class HippoClient(BaseClientSessionManager):
|
||||
"""A simple client, only connects to one region at a time currently."""
|
||||
|
||||
SUPPORTED_CAPS: Set[str] = {
|
||||
"AbuseCategories",
|
||||
"AcceptFriendship",
|
||||
"AcceptGroupInvite",
|
||||
"AgentPreferences",
|
||||
"AgentProfile",
|
||||
"AgentState",
|
||||
"AttachmentResources",
|
||||
"AvatarPickerSearch",
|
||||
"AvatarRenderInfo",
|
||||
"CharacterProperties",
|
||||
"ChatSessionRequest",
|
||||
"CopyInventoryFromNotecard",
|
||||
"CreateInventoryCategory",
|
||||
"DeclineFriendship",
|
||||
"DeclineGroupInvite",
|
||||
"DispatchRegionInfo",
|
||||
"DirectDelivery",
|
||||
"EnvironmentSettings",
|
||||
"EstateAccess",
|
||||
"DispatchOpenRegionSettings",
|
||||
"EstateChangeInfo",
|
||||
"EventQueueGet",
|
||||
"ExtEnvironment",
|
||||
"FetchLib2",
|
||||
"FetchLibDescendents2",
|
||||
"FetchInventory2",
|
||||
"FetchInventoryDescendents2",
|
||||
"IncrementCOFVersion",
|
||||
"InventoryAPIv3",
|
||||
"LibraryAPIv3",
|
||||
"InterestList",
|
||||
"InventoryThumbnailUpload",
|
||||
"GetDisplayNames",
|
||||
"GetExperiences",
|
||||
"AgentExperiences",
|
||||
"FindExperienceByName",
|
||||
"GetExperienceInfo",
|
||||
"GetAdminExperiences",
|
||||
"GetCreatorExperiences",
|
||||
"ExperiencePreferences",
|
||||
"GroupExperiences",
|
||||
"UpdateExperience",
|
||||
"IsExperienceAdmin",
|
||||
"IsExperienceContributor",
|
||||
"RegionExperiences",
|
||||
"ExperienceQuery",
|
||||
"GetMesh",
|
||||
"GetMesh2",
|
||||
"GetMetadata",
|
||||
"GetObjectCost",
|
||||
"GetObjectPhysicsData",
|
||||
"GetTexture",
|
||||
"GroupAPIv1",
|
||||
"GroupMemberData",
|
||||
"GroupProposalBallot",
|
||||
"HomeLocation",
|
||||
"LandResources",
|
||||
"LSLSyntax",
|
||||
"MapLayer",
|
||||
"MapLayerGod",
|
||||
"MeshUploadFlag",
|
||||
"NavMeshGenerationStatus",
|
||||
"NewFileAgentInventory",
|
||||
"ObjectAnimation",
|
||||
"ObjectMedia",
|
||||
"ObjectMediaNavigate",
|
||||
"ObjectNavMeshProperties",
|
||||
"ParcelPropertiesUpdate",
|
||||
"ParcelVoiceInfoRequest",
|
||||
"ProductInfoRequest",
|
||||
"ProvisionVoiceAccountRequest",
|
||||
"ReadOfflineMsgs",
|
||||
"RegionObjects",
|
||||
"RemoteParcelRequest",
|
||||
"RenderMaterials",
|
||||
"RequestTextureDownload",
|
||||
"ResourceCostSelected",
|
||||
"RetrieveNavMeshSrc",
|
||||
"SearchStatRequest",
|
||||
"SearchStatTracking",
|
||||
"SendPostcard",
|
||||
"SendUserReport",
|
||||
"SendUserReportWithScreenshot",
|
||||
"ServerReleaseNotes",
|
||||
"SetDisplayName",
|
||||
"SimConsoleAsync",
|
||||
"SimulatorFeatures",
|
||||
"StartGroupProposal",
|
||||
"TerrainNavMeshProperties",
|
||||
"TextureStats",
|
||||
"UntrustedSimulatorMessage",
|
||||
"UpdateAgentInformation",
|
||||
"UpdateAgentLanguage",
|
||||
"UpdateAvatarAppearance",
|
||||
"UpdateGestureAgentInventory",
|
||||
"UpdateGestureTaskInventory",
|
||||
"UpdateNotecardAgentInventory",
|
||||
"UpdateNotecardTaskInventory",
|
||||
"UpdateScriptAgent",
|
||||
"UpdateScriptTask",
|
||||
"UpdateSettingsAgentInventory",
|
||||
"UpdateSettingsTaskInventory",
|
||||
"UploadAgentProfileImage",
|
||||
"UploadBakedTexture",
|
||||
"UserInfo",
|
||||
"ViewerAsset",
|
||||
"ViewerBenefits",
|
||||
"ViewerMetrics",
|
||||
"ViewerStartAuction",
|
||||
"ViewerStats",
|
||||
}
|
||||
|
||||
DEFAULT_OPTIONS = {
|
||||
"inventory-root",
|
||||
"inventory-skeleton",
|
||||
"inventory-lib-root",
|
||||
"inventory-lib-owner",
|
||||
"inventory-skel-lib",
|
||||
"initial-outfit",
|
||||
"gestures",
|
||||
"display_names",
|
||||
"event_notifications",
|
||||
"classified_categories",
|
||||
"adult_compliant",
|
||||
"buddy-list",
|
||||
"newuser-config",
|
||||
"ui-config",
|
||||
"advanced-mode",
|
||||
"max-agent-groups",
|
||||
"map-server-url",
|
||||
"voice-config",
|
||||
"tutorial_setting",
|
||||
"login-flags",
|
||||
"global-textures",
|
||||
# Not an official option, just so this can be tracked.
|
||||
"pyogp-client",
|
||||
}
|
||||
|
||||
DEFAULT_LOGIN_URI = "https://login.agni.lindenlab.com/cgi-bin/login.cgi"
|
||||
|
||||
def __init__(self, options: Optional[Set[str]] = None):
|
||||
self._username: Optional[str] = None
|
||||
self._password: Optional[str] = None
|
||||
self._mac = uuid.getnode()
|
||||
self._options = options if options is not None else self.DEFAULT_OPTIONS
|
||||
self.http_session: Optional[aiohttp.ClientSession] = aiohttp.ClientSession(trust_env=True)
|
||||
self.session: Optional[HippoClientSession] = None
|
||||
self.settings = ClientSettings()
|
||||
self._resend_task: Optional[asyncio.Task] = None
|
||||
|
||||
@property
|
||||
def main_region(self) -> Optional[HippoClientRegion]:
|
||||
if not self.session:
|
||||
return None
|
||||
return self.session.main_region
|
||||
|
||||
@property
|
||||
def main_circuit(self) -> Optional[Circuit]:
|
||||
if not self.main_region:
|
||||
return None
|
||||
return self.main_region.circuit
|
||||
|
||||
@property
|
||||
def main_caps_client(self) -> Optional[CapsClient]:
|
||||
if not self.main_region:
|
||||
return None
|
||||
return self.main_region.caps_client
|
||||
|
||||
async def aclose(self):
|
||||
try:
|
||||
self.logout()
|
||||
finally:
|
||||
if self.http_session:
|
||||
await self.http_session.close()
|
||||
self.http_session = None
|
||||
|
||||
def __del__(self):
|
||||
# Make sure we don't leak resources if someone was lazy.
|
||||
try:
|
||||
self.logout()
|
||||
finally:
|
||||
if self.http_session:
|
||||
try:
|
||||
asyncio.create_task(self.http_session.close)
|
||||
except:
|
||||
pass
|
||||
self.http_session = None
|
||||
|
||||
async def _create_transport(self) -> Tuple[AbstractUDPTransport, HippoClientProtocol]:
|
||||
loop = asyncio.get_event_loop_policy().get_event_loop()
|
||||
transport, protocol = await loop.create_datagram_endpoint(
|
||||
lambda: HippoClientProtocol(self.session),
|
||||
local_addr=('0.0.0.0', 0))
|
||||
transport = SocketUDPTransport(transport)
|
||||
return transport, protocol
|
||||
|
||||
async def login(
|
||||
self,
|
||||
username: str,
|
||||
password: str,
|
||||
login_uri: Optional[str] = None,
|
||||
agree_to_tos: bool = False,
|
||||
start_location: Union[StartLocation, str, None] = StartLocation.LAST,
|
||||
connect: bool = True,
|
||||
):
|
||||
if self.session:
|
||||
raise RuntimeError("Already logged in!")
|
||||
|
||||
if not login_uri:
|
||||
login_uri = self.DEFAULT_LOGIN_URI
|
||||
|
||||
if start_location is None:
|
||||
start_location = StartLocation.LAST
|
||||
|
||||
# This isn't a symbolic start location and isn't a URI, must be a sim name.
|
||||
if start_location not in iter(StartLocation) and not start_location.startswith("uri:"):
|
||||
start_location = f"uri:{start_location}&128&128&128"
|
||||
|
||||
split_username = username.split(" ")
|
||||
if len(split_username) < 2:
|
||||
first_name = split_username[0]
|
||||
last_name = "Resident"
|
||||
else:
|
||||
first_name, last_name = split_username
|
||||
|
||||
payload = {
|
||||
"address_size": 64,
|
||||
"agree_to_tos": int(agree_to_tos),
|
||||
"channel": "Hippolyzer",
|
||||
"extended_errors": 1,
|
||||
"first": first_name,
|
||||
"last": last_name,
|
||||
"host_id": "",
|
||||
"id0": hashlib.md5(str(self._mac).encode("ascii")).hexdigest(),
|
||||
"mac": hashlib.md5(str(self._mac).encode("ascii")).hexdigest(),
|
||||
"mfa_hash": "",
|
||||
"passwd": "$1$" + hashlib.md5(str(password).encode("ascii")).hexdigest(),
|
||||
# TODO: actually get these
|
||||
"platform": "lnx",
|
||||
"platform_string": "Linux 6.6",
|
||||
# TODO: What is this?
|
||||
"platform_version": "2.38.0",
|
||||
"read_critical": 0,
|
||||
"start": str(start_location),
|
||||
"token": "",
|
||||
"version": version("hippolyzer"),
|
||||
"options": list(self._options),
|
||||
}
|
||||
async with self.http_session.post(
|
||||
login_uri,
|
||||
data=xmlrpc.client.dumps((payload,), "login_to_simulator"),
|
||||
headers={"Content-Type": "text/xml", "User-Agent": self.settings.USER_AGENT},
|
||||
ssl=self.settings.SSL_VERIFY,
|
||||
) as resp:
|
||||
resp.raise_for_status()
|
||||
login_data = xmlrpc.client.loads((await resp.read()).decode("utf8"))[0][0]
|
||||
self.session = HippoClientSession.from_login_data(login_data, self)
|
||||
|
||||
self.session.transport, self.session.protocol = await self._create_transport()
|
||||
self._resend_task = create_logged_task(self._attempt_resends(), "Circuit Resend")
|
||||
self.session.message_handler.subscribe("AgentDataUpdate", self._handle_agent_data_update)
|
||||
self.session.message_handler.subscribe("AgentGroupDataUpdate", self._handle_agent_group_data_update)
|
||||
|
||||
assert self.session.open_circuit(self.session.regions[-1].circuit_addr)
|
||||
if connect:
|
||||
region = self.session.regions[-1]
|
||||
await region.connect(main_region=True)
|
||||
|
||||
def logout(self):
|
||||
if not self.session:
|
||||
return
|
||||
if self._resend_task:
|
||||
self._resend_task.cancel()
|
||||
self._resend_task = None
|
||||
|
||||
if self.main_circuit and self.main_circuit.is_alive:
|
||||
# Don't need to send reliably, there's a good chance the server won't ACK anyway.
|
||||
self.main_circuit.send(
|
||||
Message(
|
||||
"LogoutRequest",
|
||||
Block("AgentData", AgentID=self.session.agent_id, SessionID=self.session.id),
|
||||
)
|
||||
)
|
||||
session = self.session
|
||||
self.session = None
|
||||
for region in session.regions:
|
||||
region.disconnect()
|
||||
session.transport.close()
|
||||
|
||||
def send_chat(self, message: Union[bytes, str], channel: int = 0, chat_type=ChatType.NORMAL) -> asyncio.Future:
|
||||
return self.main_circuit.send_reliable(Message(
|
||||
"ChatFromViewer",
|
||||
Block("AgentData", SessionID=self.session.id, AgentID=self.session.agent_id),
|
||||
Block("ChatData", Message=message, Channel=channel, Type=chat_type),
|
||||
))
|
||||
|
||||
def teleport(self, region_handle: int, local_pos=Vector3(0, 0, 0)) -> asyncio.Future:
|
||||
"""Synchronously requests a teleport, returning a Future for teleport completion"""
|
||||
teleport_fut = asyncio.Future()
|
||||
|
||||
# Send request synchronously, await asynchronously.
|
||||
send_fut = self.main_circuit.send_reliable(
|
||||
Message(
|
||||
'TeleportLocationRequest',
|
||||
Block('AgentData', AgentID=self.session.agent_id, SessionID=self.session.id),
|
||||
Block('Info', RegionHandle=region_handle, Position=local_pos, fill_missing=True),
|
||||
)
|
||||
)
|
||||
|
||||
async def _handle_teleport():
|
||||
# Subscribe first, we may receive an event before we receive the packet ACK.
|
||||
with self.session.message_handler.subscribe_async(
|
||||
("TeleportLocal", "TeleportFailed", "TeleportFinish"),
|
||||
) as get_tp_done_msg:
|
||||
try:
|
||||
await send_fut
|
||||
except Exception as e:
|
||||
# Pass along error if we failed to send reliably.
|
||||
teleport_fut.set_exception(e)
|
||||
return
|
||||
|
||||
# Wait for a message that says we're done the teleport
|
||||
msg = await get_tp_done_msg()
|
||||
if msg.name == "TeleportFailed":
|
||||
teleport_fut.set_exception(RuntimeError("Failed to teleport"))
|
||||
elif msg.name == "TeleportLocal":
|
||||
# Within the sim, nothing else we need to do
|
||||
teleport_fut.set_result(None)
|
||||
elif msg.name == "TeleportFinish":
|
||||
# Non-local TP, wait until we receive the AgentMovementComplete to
|
||||
# set the finished signal.
|
||||
|
||||
# Region should be registered by this point, wait for it to connect
|
||||
try:
|
||||
# just fail if it takes longer than 30 seconds for the handshake to complete
|
||||
await asyncio.wait_for(self.session.region_by_handle(region_handle).connected, 30)
|
||||
except Exception as e:
|
||||
teleport_fut.set_exception(e)
|
||||
return
|
||||
teleport_fut.set_result(None)
|
||||
|
||||
create_logged_task(_handle_teleport(), "Teleport")
|
||||
|
||||
return teleport_fut
|
||||
|
||||
async def _attempt_resends(self):
|
||||
while True:
|
||||
if self.session is None:
|
||||
break
|
||||
for region in self.session.regions:
|
||||
if not region.circuit.is_alive:
|
||||
continue
|
||||
region.circuit.resend_unacked()
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
def _handle_agent_data_update(self, msg: Message):
|
||||
self.session.active_group = msg["AgentData"]["ActiveGroupID"]
|
||||
|
||||
def _handle_agent_group_data_update(self, msg: Message):
|
||||
self.session.groups.clear()
|
||||
for block in msg["GroupData"]:
|
||||
self.session.groups.add(block["GroupID"])
|
||||
208
hippolyzer/lib/client/inventory_manager.py
Normal file
208
hippolyzer/lib/client/inventory_manager.py
Normal file
@@ -0,0 +1,208 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import gzip
|
||||
import itertools
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Union, List, Tuple, Set
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.inventory import InventoryModel, InventoryCategory, InventoryItem
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.templates import AssetType, FolderType
|
||||
from hippolyzer.lib.client.state import BaseClientSession
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InventoryManager:
|
||||
def __init__(self, session: BaseClientSession):
|
||||
self._session = session
|
||||
self.model: InventoryModel = InventoryModel()
|
||||
self._load_skeleton()
|
||||
self._session.message_handler.subscribe("BulkUpdateInventory", self._handle_bulk_update_inventory)
|
||||
self._session.message_handler.subscribe("UpdateCreateInventoryItem", self._handle_update_create_inventory_item)
|
||||
self._session.message_handler.subscribe("RemoveInventoryItem", self._handle_remove_inventory_item)
|
||||
self._session.message_handler.subscribe("MoveInventoryItem", self._handle_move_inventory_item)
|
||||
|
||||
def _load_skeleton(self):
|
||||
assert not self.model.nodes
|
||||
skel_cats: List[dict] = self._session.login_data.get('inventory-skeleton', [])
|
||||
for skel_cat in skel_cats:
|
||||
self.model.add(InventoryCategory(
|
||||
name=skel_cat["name"],
|
||||
cat_id=UUID(skel_cat["folder_id"]),
|
||||
parent_id=UUID(skel_cat["parent_id"]),
|
||||
# Don't use the version from the skeleton, this flags the inventory as needing
|
||||
# completion from the inventory cache. This matches indra's behavior.
|
||||
version=InventoryCategory.VERSION_NONE,
|
||||
type=AssetType.CATEGORY,
|
||||
pref_type=FolderType(skel_cat.get("type_default", FolderType.NONE)),
|
||||
owner_id=self._session.agent_id,
|
||||
))
|
||||
|
||||
def load_cache(self, path: Union[str, Path]):
|
||||
# Per indra, rough flow for loading inv on login is:
|
||||
# 1. Look at inventory skeleton from login response
|
||||
# 2. Pre-populate model with categories from the skeleton, including their versions
|
||||
# 3. Read the inventory cache, tracking categories and items separately
|
||||
# 4. Walk the list of categories in our cache. If the cat exists in the skeleton and the versions
|
||||
# match, then we may load the category and its descendants from cache.
|
||||
# 5. Any categories in the skeleton but not in the cache, or those with mismatched versions must be fetched.
|
||||
# The viewer does this by setting the local version of the cats to -1 and forcing a descendent fetch
|
||||
# over AIS.
|
||||
#
|
||||
# By the time you call this function call, you should have already loaded the inventory skeleton
|
||||
# into the model set its inventory category versions to VERSION_NONE.
|
||||
|
||||
skel_cats: List[dict] = self._session.login_data['inventory-skeleton']
|
||||
# UUID -> version map for inventory skeleton
|
||||
skel_versions = {UUID(cat["folder_id"]): cat["version"] for cat in skel_cats}
|
||||
LOG.info(f"Parsing inv cache at {path}")
|
||||
cached_categories, cached_items = self._parse_cache(path)
|
||||
LOG.info(f"Done parsing inv cache at {path}")
|
||||
loaded_cat_ids: Set[UUID] = set()
|
||||
|
||||
for cached_cat in cached_categories:
|
||||
existing_cat: InventoryCategory = self.model.get(cached_cat.cat_id) # noqa
|
||||
# Don't clobber an existing cat unless it just has a placeholder version,
|
||||
# maybe from loading the skeleton?
|
||||
if existing_cat and existing_cat.version != InventoryCategory.VERSION_NONE:
|
||||
continue
|
||||
# Cached cat isn't the same as what the inv server says it should be, can't use it.
|
||||
if cached_cat.version != skel_versions.get(cached_cat.cat_id):
|
||||
continue
|
||||
# Update any existing category in-place, or add if not present
|
||||
self.model.upsert(cached_cat)
|
||||
# Any items in this category in our cache file are usable and should be added
|
||||
loaded_cat_ids.add(cached_cat.cat_id)
|
||||
|
||||
for cached_item in cached_items:
|
||||
# The skeleton doesn't have any items, so if we run into any items they should be exactly the
|
||||
# same as what we're trying to add. No point clobbering.
|
||||
if cached_item.item_id in self.model:
|
||||
continue
|
||||
# The parent category didn't have a cache hit against the inventory skeleton, can't add!
|
||||
# We don't even know if this item would be in the current version of it's parent cat!
|
||||
if cached_item.parent_id not in loaded_cat_ids:
|
||||
continue
|
||||
self.model.add(cached_item)
|
||||
|
||||
self.model.flag_if_dirty()
|
||||
|
||||
def _parse_cache(self, path: Union[str, Path]) -> Tuple[List[InventoryCategory], List[InventoryItem]]:
|
||||
"""Warning, may be incredibly slow due to llsd.parse_notation() behavior"""
|
||||
categories: List[InventoryCategory] = []
|
||||
items: List[InventoryItem] = []
|
||||
# Parse our cached items and categories out of the compressed inventory cache
|
||||
first_line = True
|
||||
with gzip.open(path, "rb") as f:
|
||||
# Line-delimited LLSD notation!
|
||||
for line in f.readlines():
|
||||
# TODO: Parsing of invcache is dominated by `parse_notation()`. It's stupidly inefficient.
|
||||
node_llsd = llsd.parse_notation(line)
|
||||
if first_line:
|
||||
# First line is the file header
|
||||
first_line = False
|
||||
if node_llsd['inv_cache_version'] not in (2, 3):
|
||||
raise ValueError(f"Unknown cache version: {node_llsd!r}")
|
||||
continue
|
||||
|
||||
if InventoryCategory.ID_ATTR in node_llsd:
|
||||
if (cat_node := InventoryCategory.from_llsd(node_llsd)) is not None:
|
||||
categories.append(cat_node)
|
||||
elif InventoryItem.ID_ATTR in node_llsd:
|
||||
if (item_node := InventoryItem.from_llsd(node_llsd)) is not None:
|
||||
items.append(item_node)
|
||||
else:
|
||||
LOG.warning(f"Unknown node type in inv cache: {node_llsd!r}")
|
||||
return categories, items
|
||||
|
||||
def _handle_bulk_update_inventory(self, msg: Message):
|
||||
any_cats = False
|
||||
for folder_block in msg["FolderData"]:
|
||||
if folder_block["FolderID"] == UUID.ZERO:
|
||||
continue
|
||||
any_cats = True
|
||||
self.model.upsert(
|
||||
InventoryCategory.from_folder_data(folder_block),
|
||||
# Don't clobber version, we only want to fetch the folder if it's new
|
||||
# and hasn't just moved.
|
||||
update_fields={"parent_id", "name", "pref_type"},
|
||||
)
|
||||
for item_block in msg["ItemData"]:
|
||||
if item_block["ItemID"] == UUID.ZERO:
|
||||
continue
|
||||
self.model.upsert(InventoryItem.from_inventory_data(item_block))
|
||||
|
||||
if any_cats:
|
||||
self.model.flag_if_dirty()
|
||||
|
||||
def _validate_recipient(self, recipient: UUID):
|
||||
if self._session.agent_id != recipient:
|
||||
raise ValueError(f"AgentID Mismatch {self._session.agent_id} != {recipient}")
|
||||
|
||||
def _handle_update_create_inventory_item(self, msg: Message):
|
||||
self._validate_recipient(msg["AgentData"]["AgentID"])
|
||||
for inventory_block in msg["InventoryData"]:
|
||||
self.model.upsert(InventoryItem.from_inventory_data(inventory_block))
|
||||
|
||||
def _handle_remove_inventory_item(self, msg: Message):
|
||||
self._validate_recipient(msg["AgentData"]["AgentID"])
|
||||
for inventory_block in msg["InventoryData"]:
|
||||
node = self.model.get(inventory_block["ItemID"])
|
||||
if node:
|
||||
self.model.unlink(node)
|
||||
|
||||
def _handle_remove_inventory_folder(self, msg: Message):
|
||||
self._validate_recipient(msg["AgentData"]["AgentID"])
|
||||
for folder_block in msg["FolderData"]:
|
||||
node = self.model.get(folder_block["FolderID"])
|
||||
if node:
|
||||
self.model.unlink(node)
|
||||
|
||||
def _handle_move_inventory_item(self, msg: Message):
|
||||
for inventory_block in msg["InventoryData"]:
|
||||
node = self.model.get(inventory_block["ItemID"])
|
||||
if not node:
|
||||
LOG.warning(f"Missing inventory item {inventory_block['ItemID']}")
|
||||
continue
|
||||
if inventory_block["NewName"]:
|
||||
node.name = str(inventory_block["NewName"])
|
||||
node.parent_id = inventory_block['FolderID']
|
||||
|
||||
def process_aisv3_response(self, payload: dict):
|
||||
if "name" in payload:
|
||||
# Just a rough guess. Assume this response is updating something if there's
|
||||
# a "name" key.
|
||||
if InventoryCategory.ID_ATTR_AIS in payload:
|
||||
if (cat_node := InventoryCategory.from_llsd(payload, flavor="ais")) is not None:
|
||||
self.model.upsert(cat_node)
|
||||
elif InventoryItem.ID_ATTR in payload:
|
||||
if (item_node := InventoryItem.from_llsd(payload, flavor="ais")) is not None:
|
||||
self.model.upsert(item_node)
|
||||
else:
|
||||
LOG.warning(f"Unknown node type in AIS payload: {payload!r}")
|
||||
|
||||
# Parse the embedded stuff
|
||||
embedded_dict = payload.get("_embedded", {})
|
||||
for category_llsd in embedded_dict.get("categories", {}).values():
|
||||
self.model.upsert(InventoryCategory.from_llsd(category_llsd, flavor="ais"))
|
||||
for item_llsd in embedded_dict.get("items", {}).values():
|
||||
self.model.upsert(InventoryItem.from_llsd(item_llsd, flavor="ais"))
|
||||
for link_llsd in embedded_dict.get("links", {}).values():
|
||||
self.model.upsert(InventoryItem.from_llsd(link_llsd, flavor="ais"))
|
||||
|
||||
# Get rid of anything we were asked to
|
||||
for node_id in itertools.chain(
|
||||
payload.get("_broken_links_removed", ()),
|
||||
payload.get("_removed_items", ()),
|
||||
payload.get("_category_items_removed", ()),
|
||||
payload.get("_categories_removed", ()),
|
||||
):
|
||||
node = self.model.get(node_id)
|
||||
if node:
|
||||
# Presumably this list is exhaustive, so don't unlink children.
|
||||
self.model.unlink(node, single_only=True)
|
||||
@@ -27,16 +27,20 @@ from hippolyzer.lib.base.objects import (
|
||||
)
|
||||
from hippolyzer.lib.base.settings import Settings
|
||||
from hippolyzer.lib.client.namecache import NameCache, NameCacheEntry
|
||||
from hippolyzer.lib.client.state import BaseClientSession, BaseClientRegion
|
||||
from hippolyzer.lib.base.templates import PCode, ObjectStateSerializer
|
||||
from hippolyzer.lib.base import llsd
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.client.state import BaseClientRegion, BaseClientSession
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
OBJECT_OR_LOCAL = Union[Object, int]
|
||||
MATERIAL_MAP_TYPE = Dict[UUID, dict]
|
||||
|
||||
|
||||
class ObjectUpdateType(enum.IntEnum):
|
||||
OBJECT_UPDATE = enum.auto()
|
||||
UPDATE = enum.auto()
|
||||
PROPERTIES = enum.auto()
|
||||
FAMILY = enum.auto()
|
||||
COSTS = enum.auto()
|
||||
@@ -48,12 +52,13 @@ class ClientObjectManager:
|
||||
Object manager for a specific region
|
||||
"""
|
||||
|
||||
__slots__ = ("_region", "_world_objects", "state")
|
||||
__slots__ = ("_region", "_world_objects", "state", "__weakref__", "_requesting_all_mats_lock")
|
||||
|
||||
def __init__(self, region: BaseClientRegion):
|
||||
self._region: BaseClientRegion = proxify(region)
|
||||
self._world_objects: ClientWorldObjectManager = proxify(region.session().objects)
|
||||
self.state: RegionObjectsState = RegionObjectsState()
|
||||
self._requesting_all_mats_lock = asyncio.Lock()
|
||||
|
||||
def __len__(self):
|
||||
return len(self.state.localid_lookup)
|
||||
@@ -71,7 +76,7 @@ class ClientObjectManager:
|
||||
if self._region.handle is not None:
|
||||
# We're tracked by the world object manager, tell it to untrack
|
||||
# any objects that we owned
|
||||
self._world_objects.clear_region_objects(self._region.handle)
|
||||
self._world_objects.untrack_region_objects(self._region.handle)
|
||||
|
||||
def lookup_localid(self, localid: int) -> Optional[Object]:
|
||||
return self.state.lookup_localid(localid)
|
||||
@@ -161,9 +166,56 @@ class ClientObjectManager:
|
||||
|
||||
futures = []
|
||||
for local_id in local_ids:
|
||||
futures.append(self.state.register_future(local_id, ObjectUpdateType.OBJECT_UPDATE))
|
||||
futures.append(self.state.register_future(local_id, ObjectUpdateType.UPDATE))
|
||||
return futures
|
||||
|
||||
async def request_all_materials(self) -> MATERIAL_MAP_TYPE:
|
||||
"""
|
||||
Request all materials within the sim
|
||||
|
||||
Sigh, yes, this is best practice per indra :(
|
||||
"""
|
||||
if self._requesting_all_mats_lock.locked():
|
||||
# We're already requesting all materials, wait until the lock is free
|
||||
# and just return what was returned.
|
||||
async with self._requesting_all_mats_lock:
|
||||
return self.state.materials
|
||||
|
||||
async with self._requesting_all_mats_lock:
|
||||
async with self._region.caps_client.get("RenderMaterials") as resp:
|
||||
resp.raise_for_status()
|
||||
# Clear out all previous materials, this is a complete response.
|
||||
self.state.materials.clear()
|
||||
self._process_materials_response(await resp.read())
|
||||
return self.state.materials
|
||||
|
||||
async def request_materials(self, material_ids: Sequence[UUID]) -> MATERIAL_MAP_TYPE:
|
||||
if self._requesting_all_mats_lock.locked():
|
||||
# Just wait for the in-flight request for all materials to complete
|
||||
# if we have one in flight.
|
||||
async with self._requesting_all_mats_lock:
|
||||
# Wait for the lock to be released
|
||||
pass
|
||||
|
||||
not_found = set(x for x in material_ids if (x not in self.state.materials))
|
||||
if not_found:
|
||||
# Request any materials we don't already have, if there were any
|
||||
data = {"Zipped": llsd.zip_llsd([x.bytes for x in material_ids])}
|
||||
async with self._region.caps_client.post("RenderMaterials", data=data) as resp:
|
||||
resp.raise_for_status()
|
||||
self._process_materials_response(await resp.read())
|
||||
|
||||
# build up a dict of just the requested mats
|
||||
mats = {}
|
||||
for mat_id in material_ids:
|
||||
mats[mat_id] = self.state.materials[mat_id]
|
||||
return mats
|
||||
|
||||
def _process_materials_response(self, response: bytes):
|
||||
entries = llsd.unzip_llsd(llsd.parse_xml(response)["Zipped"])
|
||||
for entry in entries:
|
||||
self.state.materials[UUID(bytes=entry["ID"])] = entry["Material"]
|
||||
|
||||
|
||||
class ObjectEvent:
|
||||
__slots__ = ("object", "updated", "update_type")
|
||||
@@ -238,12 +290,14 @@ class ClientWorldObjectManager:
|
||||
if self._get_region_manager(handle) is None:
|
||||
self._region_managers[handle] = proxify(self._session.region_by_handle(handle).objects)
|
||||
|
||||
def clear_region_objects(self, handle: int):
|
||||
def untrack_region_objects(self, handle: int):
|
||||
"""Handle signal that a region object manager was just cleared"""
|
||||
# Make sure they're gone from our lookup table
|
||||
for obj in tuple(self._fullid_lookup.values()):
|
||||
if obj.RegionHandle == handle:
|
||||
del self._fullid_lookup[obj.FullID]
|
||||
if handle in self._region_managers:
|
||||
del self._region_managers[handle]
|
||||
self._rebuild_avatar_objects()
|
||||
|
||||
def _get_region_manager(self, handle: int) -> Optional[ClientObjectManager]:
|
||||
@@ -288,16 +342,17 @@ class ClientWorldObjectManager:
|
||||
obj = obj.Parent
|
||||
|
||||
def clear(self):
|
||||
for handle in tuple(self._region_managers.keys()):
|
||||
self.untrack_region_objects(handle)
|
||||
self._avatars.clear()
|
||||
for region_mgr in self._region_managers.values():
|
||||
region_mgr.clear()
|
||||
if self._fullid_lookup:
|
||||
LOG.warning(f"Had {len(self._fullid_lookup)} objects not tied to a region manager!")
|
||||
self._fullid_lookup.clear()
|
||||
self._rebuild_avatar_objects()
|
||||
self._region_managers.clear()
|
||||
|
||||
def _update_existing_object(self, obj: Object, new_properties: dict, update_type: ObjectUpdateType):
|
||||
def _update_existing_object(self, obj: Object, new_properties: dict, update_type: ObjectUpdateType,
|
||||
msg: Optional[Message]):
|
||||
old_parent_id = obj.ParentID
|
||||
new_parent_id = new_properties.get("ParentID", obj.ParentID)
|
||||
old_local_id = obj.LocalID
|
||||
@@ -340,23 +395,23 @@ class ClientWorldObjectManager:
|
||||
LOG.warning(f"Tried to move object {obj!r} to unknown region {new_region_handle}")
|
||||
|
||||
if obj.PCode == PCode.AVATAR:
|
||||
# `Avatar` instances are handled separately. Update all Avatar objects so
|
||||
# we can deal with the RegionHandle change.
|
||||
# `Avatar` instances are handled separately. Update all Avatar objects,
|
||||
# so we can deal with the RegionHandle change.
|
||||
self._rebuild_avatar_objects()
|
||||
elif new_parent_id != old_parent_id:
|
||||
# Parent ID changed, but we're in the same region
|
||||
new_region_state.handle_object_reparented(obj, old_parent_id=old_parent_id)
|
||||
|
||||
if actually_updated_props and new_region_state is not None:
|
||||
self._run_object_update_hooks(obj, actually_updated_props, update_type)
|
||||
self._run_object_update_hooks(obj, actually_updated_props, update_type, msg)
|
||||
|
||||
def _track_new_object(self, region: RegionObjectsState, obj: Object):
|
||||
def _track_new_object(self, region: RegionObjectsState, obj: Object, msg: Message):
|
||||
region.track_object(obj)
|
||||
self._fullid_lookup[obj.FullID] = obj
|
||||
if obj.PCode == PCode.AVATAR:
|
||||
self._avatar_objects[obj.FullID] = obj
|
||||
self._rebuild_avatar_objects()
|
||||
self._run_object_update_hooks(obj, set(obj.to_dict().keys()), ObjectUpdateType.OBJECT_UPDATE)
|
||||
self._run_object_update_hooks(obj, set(obj.to_dict().keys()), ObjectUpdateType.UPDATE, msg)
|
||||
|
||||
def _kill_object_by_local_id(self, region_state: RegionObjectsState, local_id: int):
|
||||
obj = region_state.lookup_localid(local_id)
|
||||
@@ -408,11 +463,11 @@ class ClientWorldObjectManager:
|
||||
# our view of the world then we want to move it to this region.
|
||||
obj = self.lookup_fullid(object_data["FullID"])
|
||||
if obj:
|
||||
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE)
|
||||
self._update_existing_object(obj, object_data, ObjectUpdateType.UPDATE, msg)
|
||||
else:
|
||||
if region_state is None:
|
||||
continue
|
||||
self._track_new_object(region_state, Object(**object_data))
|
||||
self._track_new_object(region_state, Object(**object_data), msg)
|
||||
msg.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_terse_object_update(self, msg: Message):
|
||||
@@ -432,7 +487,7 @@ class ClientWorldObjectManager:
|
||||
# Need the Object as context because decoding state requires PCode.
|
||||
state_deserializer = ObjectStateSerializer.deserialize
|
||||
object_data["State"] = state_deserializer(ctx_obj=obj, val=object_data["State"])
|
||||
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE)
|
||||
self._update_existing_object(obj, object_data, ObjectUpdateType.UPDATE, msg)
|
||||
else:
|
||||
if region_state:
|
||||
region_state.missing_locals.add(object_data["LocalID"])
|
||||
@@ -460,7 +515,7 @@ class ClientWorldObjectManager:
|
||||
self._update_existing_object(obj, {
|
||||
"UpdateFlags": update_flags,
|
||||
"RegionHandle": handle,
|
||||
}, ObjectUpdateType.OBJECT_UPDATE)
|
||||
}, ObjectUpdateType.UPDATE, msg)
|
||||
continue
|
||||
|
||||
cached_obj_data = self._lookup_cache_entry(handle, block["ID"], block["CRC"])
|
||||
@@ -468,7 +523,7 @@ class ClientWorldObjectManager:
|
||||
cached_obj = normalize_object_update_compressed_data(cached_obj_data)
|
||||
cached_obj["UpdateFlags"] = update_flags
|
||||
cached_obj["RegionHandle"] = handle
|
||||
self._track_new_object(region_state, Object(**cached_obj))
|
||||
self._track_new_object(region_state, Object(**cached_obj), msg)
|
||||
continue
|
||||
|
||||
# Don't know about it and wasn't cached.
|
||||
@@ -499,11 +554,11 @@ class ClientWorldObjectManager:
|
||||
LOG.warning(f"Got ObjectUpdateCompressed for unknown region {handle}: {object_data!r}")
|
||||
obj = self.lookup_fullid(object_data["FullID"])
|
||||
if obj:
|
||||
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE)
|
||||
self._update_existing_object(obj, object_data, ObjectUpdateType.UPDATE, msg)
|
||||
else:
|
||||
if region_state is None:
|
||||
continue
|
||||
self._track_new_object(region_state, Object(**object_data))
|
||||
self._track_new_object(region_state, Object(**object_data), msg)
|
||||
msg.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_object_properties_generic(self, packet: Message):
|
||||
@@ -516,7 +571,7 @@ class ClientWorldObjectManager:
|
||||
obj = self.lookup_fullid(block["ObjectID"])
|
||||
if obj:
|
||||
seen_locals.append(obj.LocalID)
|
||||
self._update_existing_object(obj, object_properties, ObjectUpdateType.PROPERTIES)
|
||||
self._update_existing_object(obj, object_properties, ObjectUpdateType.PROPERTIES, packet)
|
||||
else:
|
||||
LOG.debug(f"Received {packet.name} for unknown {block['ObjectID']}")
|
||||
packet.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
@@ -563,11 +618,16 @@ class ClientWorldObjectManager:
|
||||
LOG.debug(f"Received ObjectCost for unknown {object_id}")
|
||||
continue
|
||||
obj.ObjectCosts.update(object_costs)
|
||||
self._run_object_update_hooks(obj, {"ObjectCosts"}, ObjectUpdateType.COSTS)
|
||||
self._run_object_update_hooks(obj, {"ObjectCosts"}, ObjectUpdateType.COSTS, None)
|
||||
|
||||
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: ObjectUpdateType):
|
||||
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: ObjectUpdateType,
|
||||
msg: Optional[Message]):
|
||||
region_state = self._get_region_state(obj.RegionHandle)
|
||||
region_state.resolve_futures(obj, update_type)
|
||||
if region_state:
|
||||
region_state.resolve_futures(obj, update_type)
|
||||
else:
|
||||
LOG.warning(f"{obj} not tied to a region state")
|
||||
|
||||
if obj.PCode == PCode.AVATAR and "NameValue" in updated_props:
|
||||
if obj.NameValue:
|
||||
self.name_cache.update(obj.FullID, obj.NameValue.to_dict())
|
||||
@@ -644,13 +704,14 @@ class RegionObjectsState:
|
||||
|
||||
__slots__ = (
|
||||
"handle", "missing_locals", "_orphans", "localid_lookup", "coarse_locations",
|
||||
"_object_futures"
|
||||
"_object_futures", "materials"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
self.missing_locals = set()
|
||||
self.localid_lookup: Dict[int, Object] = {}
|
||||
self.coarse_locations: Dict[UUID, Vector3] = {}
|
||||
self.materials: MATERIAL_MAP_TYPE = {}
|
||||
self._object_futures: Dict[Tuple[int, int], List[asyncio.Future]] = {}
|
||||
self._orphans: Dict[int, List[int]] = collections.defaultdict(list)
|
||||
|
||||
@@ -663,6 +724,7 @@ class RegionObjectsState:
|
||||
self.coarse_locations.clear()
|
||||
self.missing_locals.clear()
|
||||
self.localid_lookup.clear()
|
||||
self.materials.clear()
|
||||
|
||||
def lookup_localid(self, localid: int) -> Optional[Object]:
|
||||
return self.localid_lookup.get(localid)
|
||||
@@ -756,7 +818,8 @@ class RegionObjectsState:
|
||||
def handle_object_reparented(self, obj: Object, old_parent_id: int):
|
||||
"""Recreate any links to ancestor Objects for obj due to parent changes"""
|
||||
self._unparent_object(obj, old_parent_id)
|
||||
self._parent_object(obj, insert_at_head=True)
|
||||
# Avatars get sent to the _end_ of the child list when reparented
|
||||
self._parent_object(obj, insert_at_head=obj.PCode != PCode.AVATAR)
|
||||
|
||||
def collect_orphans(self, parent_localid: int) -> Sequence[int]:
|
||||
"""Take ownership of any orphan IDs belonging to parent_localid"""
|
||||
|
||||
251
hippolyzer/lib/client/parcel_manager.py
Normal file
251
hippolyzer/lib/client/parcel_manager.py
Normal file
@@ -0,0 +1,251 @@
|
||||
import asyncio
|
||||
import dataclasses
|
||||
import logging
|
||||
from typing import *
|
||||
|
||||
import numpy as np
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, Vector3, Vector2
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.templates import ParcelGridFlags, ParcelFlags
|
||||
from hippolyzer.lib.client.state import BaseClientRegion
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Parcel:
|
||||
local_id: int
|
||||
name: str
|
||||
flags: ParcelFlags
|
||||
group_id: UUID
|
||||
# TODO: More properties
|
||||
|
||||
|
||||
class ParcelManager:
|
||||
# We expect to receive this number of ParcelOverlay messages
|
||||
NUM_CHUNKS = 4
|
||||
# No, we don't support varregion or whatever.
|
||||
REGION_SIZE = 256
|
||||
# Basically, the minimum parcel size is 4 on either axis so each "point" in the
|
||||
# ParcelOverlay represents an area this size
|
||||
GRID_STEP = 4
|
||||
GRIDS_PER_EDGE = REGION_SIZE // GRID_STEP
|
||||
|
||||
def __init__(self, region: BaseClientRegion):
|
||||
# dimensions are south to north, west to east
|
||||
self.overlay = np.zeros((self.GRIDS_PER_EDGE, self.GRIDS_PER_EDGE), dtype=np.uint8)
|
||||
# 1-indexed parcel list index
|
||||
self.parcel_indices = np.zeros((self.GRIDS_PER_EDGE, self.GRIDS_PER_EDGE), dtype=np.uint16)
|
||||
self.parcels: List[Optional[Parcel]] = []
|
||||
self.overlay_chunks: List[Optional[bytes]] = [None] * self.NUM_CHUNKS
|
||||
self.overlay_complete = asyncio.Event()
|
||||
self.parcels_downloaded = asyncio.Event()
|
||||
self._parcels_dirty: bool = True
|
||||
self._region = region
|
||||
self._next_seq = 1
|
||||
self._region.message_handler.subscribe("ParcelOverlay", self._handle_parcel_overlay)
|
||||
|
||||
def _handle_parcel_overlay(self, message: Message):
|
||||
self.add_overlay_chunk(message["ParcelData"]["Data"], message["ParcelData"]["SequenceID"])
|
||||
|
||||
def add_overlay_chunk(self, chunk: bytes, chunk_num: int) -> bool:
|
||||
self.overlay_chunks[chunk_num] = chunk
|
||||
# Still have some pending chunks, don't try to parse this yet
|
||||
if not all(self.overlay_chunks):
|
||||
return False
|
||||
|
||||
new_overlay_data = b"".join(self.overlay_chunks)
|
||||
self.overlay_chunks = [None] * self.NUM_CHUNKS
|
||||
self._parcels_dirty = False
|
||||
if new_overlay_data != self.overlay.data[:]:
|
||||
# If the raw data doesn't match, then we have to parse again
|
||||
new_data = np.frombuffer(new_overlay_data, dtype=np.uint8).reshape(self.overlay.shape)
|
||||
np.copyto(self.overlay, new_data)
|
||||
self._parse_overlay()
|
||||
# We could optimize this by just marking specific squares dirty
|
||||
# if the parcel indices have changed between parses, but I don't care
|
||||
# to do that.
|
||||
self._parcels_dirty = True
|
||||
self.parcels_downloaded.clear()
|
||||
if not self.overlay_complete.is_set():
|
||||
self.overlay_complete.set()
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def _pos_to_grid_coords(cls, pos: Vector3) -> Tuple[int, int]:
|
||||
return round(pos.Y // cls.GRID_STEP), round(pos.X // cls.GRID_STEP)
|
||||
|
||||
def _parse_overlay(self):
|
||||
# Zero out all parcel indices
|
||||
self.parcel_indices[:, :] = 0
|
||||
next_parcel_idx = 1
|
||||
for y in range(0, self.GRIDS_PER_EDGE):
|
||||
for x in range(0, self.GRIDS_PER_EDGE):
|
||||
# We already have a parcel index for this grid, continue
|
||||
if self.parcel_indices[y, x]:
|
||||
continue
|
||||
|
||||
# Fill all adjacent grids with this parcel index
|
||||
self._flood_fill_parcel_index(y, x, next_parcel_idx)
|
||||
# SL doesn't allow disjoint grids to be part of the same parcel, so
|
||||
# whatever grid we find next without a parcel index must be a new parcel
|
||||
next_parcel_idx += 1
|
||||
|
||||
# Should have found at least one parcel
|
||||
assert next_parcel_idx >= 2
|
||||
|
||||
# Have a different number of parcels now, we can't use the existing parcel objects
|
||||
# because it's unlikely that just parcel boundaries have changed.
|
||||
if len(self.parcels) != next_parcel_idx - 1:
|
||||
# We don't know about any of these parcels yet, fill with none
|
||||
self.parcels = [None] * (next_parcel_idx - 1)
|
||||
|
||||
def _flood_fill_parcel_index(self, start_y, start_x, parcel_idx):
|
||||
"""Flood fill all neighboring grids with the parcel index, being mindful of parcel boundaries"""
|
||||
# We know the start grid is assigned to this parcel index
|
||||
self.parcel_indices[start_y, start_x] = parcel_idx
|
||||
# Queue of grids to test the neighbors of, start with the start grid.
|
||||
neighbor_test_queue: List[Tuple[int, int]] = [(start_y, start_x)]
|
||||
|
||||
while neighbor_test_queue:
|
||||
to_test = neighbor_test_queue.pop(0)
|
||||
test_grid = self.overlay[to_test]
|
||||
|
||||
for direction in ((-1, 0), (1, 0), (0, -1), (0, 1)):
|
||||
new_pos = to_test[0] + direction[0], to_test[1] + direction[1]
|
||||
|
||||
if any(x < 0 or x >= self.GRIDS_PER_EDGE for x in new_pos):
|
||||
# Outside bounds
|
||||
continue
|
||||
if self.parcel_indices[new_pos]:
|
||||
# Already set, skip
|
||||
continue
|
||||
|
||||
if direction[0] == -1 and test_grid & ParcelGridFlags.SOUTH_LINE:
|
||||
# Test grid is already on a south line, can't go south.
|
||||
continue
|
||||
if direction[1] == -1 and test_grid & ParcelGridFlags.WEST_LINE:
|
||||
# Test grid is already on a west line, can't go west.
|
||||
continue
|
||||
|
||||
grid = self.overlay[new_pos]
|
||||
|
||||
if direction[0] == 1 and grid & ParcelGridFlags.SOUTH_LINE:
|
||||
# Hit a south line going north, this is outside the current parcel
|
||||
continue
|
||||
if direction[1] == 1 and grid & ParcelGridFlags.WEST_LINE:
|
||||
# Hit a west line going east, this is outside the current parcel
|
||||
continue
|
||||
# This grid is within the current parcel, set the parcel index
|
||||
self.parcel_indices[new_pos] = parcel_idx
|
||||
# Append the grid to the neighbour testing queue
|
||||
neighbor_test_queue.append(new_pos)
|
||||
|
||||
async def request_dirty_parcels(self) -> Tuple[Parcel, ...]:
|
||||
if self._parcels_dirty:
|
||||
return await self.request_all_parcels()
|
||||
return tuple(self.parcels)
|
||||
|
||||
async def request_all_parcels(self) -> Tuple[Parcel, ...]:
|
||||
await self.overlay_complete.wait()
|
||||
# Because of how we build up the parcel index map, it's safe for us to
|
||||
# do this instead of keeping track of seen IDs in a set or similar
|
||||
last_seen_parcel_index = 0
|
||||
futs = []
|
||||
for y in range(0, self.GRIDS_PER_EDGE):
|
||||
for x in range(0, self.GRIDS_PER_EDGE):
|
||||
parcel_index = self.parcel_indices[y, x]
|
||||
assert parcel_index != 0
|
||||
if parcel_index <= last_seen_parcel_index:
|
||||
continue
|
||||
assert parcel_index == last_seen_parcel_index + 1
|
||||
last_seen_parcel_index = parcel_index
|
||||
# Request a position within the parcel
|
||||
futs.append(self.request_parcel_properties(
|
||||
Vector2(x * self.GRID_STEP + 1.0, y * self.GRID_STEP + 1.0)
|
||||
))
|
||||
|
||||
# Wait for all parcel properties to come in
|
||||
await asyncio.gather(*futs)
|
||||
self.parcels_downloaded.set()
|
||||
self._parcels_dirty = False
|
||||
return tuple(self.parcels)
|
||||
|
||||
async def request_parcel_properties(self, pos: Vector2) -> Parcel:
|
||||
await self.overlay_complete.wait()
|
||||
seq_id = self._next_seq
|
||||
# Register a wait on a ParcelProperties matching this seq
|
||||
parcel_props_fut = self._region.message_handler.wait_for(
|
||||
("ParcelProperties",),
|
||||
predicate=lambda msg: msg["ParcelData"]["SequenceID"] == seq_id,
|
||||
timeout=10.0,
|
||||
)
|
||||
# We don't care about when we receive an ack, we only care about when we receive the parcel props
|
||||
_ = self._region.circuit.send_reliable(Message(
|
||||
"ParcelPropertiesRequest",
|
||||
Block("AgentData", AgentID=self._region.session().agent_id, SessionID=self._region.session().id),
|
||||
Block(
|
||||
"ParcelData",
|
||||
SequenceID=seq_id,
|
||||
West=pos.X,
|
||||
East=pos.X,
|
||||
North=pos.Y,
|
||||
South=pos.Y,
|
||||
# What does this even mean?
|
||||
SnapSelection=0,
|
||||
),
|
||||
))
|
||||
self._next_seq += 1
|
||||
|
||||
return self._process_parcel_properties(await parcel_props_fut, pos)
|
||||
|
||||
def _process_parcel_properties(self, parcel_props: Message, pos: Optional[Vector2] = None) -> Parcel:
|
||||
data_block = parcel_props["ParcelData"][0]
|
||||
grid_coord = None
|
||||
# Parcel indices are one-indexed, convert to zero-indexed.
|
||||
if pos is not None:
|
||||
# We have a pos, figure out where in the grid we should look for the parcel index
|
||||
grid_coord = self._pos_to_grid_coords(pos)
|
||||
else:
|
||||
# Need to look at the parcel bitmap to figure out a valid grid coord.
|
||||
# This is a boolean array where each bit says whether the parcel occupies that grid.
|
||||
parcel_bitmap = data_block.deserialize_var("Bitmap")
|
||||
|
||||
for y in range(self.GRIDS_PER_EDGE):
|
||||
for x in range(self.GRIDS_PER_EDGE):
|
||||
if parcel_bitmap[y, x]:
|
||||
# This is the first grid the parcel occupies per the bitmap
|
||||
grid_coord = y, x
|
||||
break
|
||||
if grid_coord:
|
||||
break
|
||||
|
||||
parcel = Parcel(
|
||||
local_id=data_block["LocalID"],
|
||||
name=data_block["Name"],
|
||||
flags=ParcelFlags(data_block["ParcelFlags"]),
|
||||
group_id=data_block["GroupID"],
|
||||
# Parcel UUID isn't in this response :/
|
||||
)
|
||||
|
||||
# I guess the bitmap _could_ be empty, but probably not.
|
||||
if grid_coord is not None:
|
||||
parcel_idx = self.parcel_indices[grid_coord] - 1
|
||||
if len(self.parcels) > parcel_idx >= 0:
|
||||
# Okay, parcels list is sane, place the parcel in there.
|
||||
self.parcels[parcel_idx] = parcel
|
||||
else:
|
||||
LOG.warning(f"Received ParcelProperties with incomplete overlay for {grid_coord!r}")
|
||||
|
||||
return parcel
|
||||
|
||||
async def get_parcel_at(self, pos: Vector2, request_if_missing: bool = True) -> Optional[Parcel]:
|
||||
grid_coord = self._pos_to_grid_coords(pos)
|
||||
parcel = None
|
||||
if parcel_idx := self.parcel_indices[grid_coord]:
|
||||
parcel = self.parcels[parcel_idx - 1]
|
||||
if request_if_missing and parcel is None:
|
||||
return await self.request_parcel_properties(pos)
|
||||
return parcel
|
||||
51
hippolyzer/lib/client/rlv.py
Normal file
51
hippolyzer/lib/client/rlv.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from typing import NamedTuple, List, Sequence
|
||||
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.templates import ChatType
|
||||
|
||||
|
||||
class RLVCommand(NamedTuple):
|
||||
behaviour: str
|
||||
param: str
|
||||
options: List[str]
|
||||
|
||||
|
||||
class RLVParser:
|
||||
@staticmethod
|
||||
def is_rlv_message(msg: Message) -> bool:
|
||||
chat: str = msg["ChatData"]["Message"]
|
||||
chat_type: int = msg["ChatData"]["ChatType"]
|
||||
return chat and chat.startswith("@") and chat_type == ChatType.OWNER
|
||||
|
||||
@staticmethod
|
||||
def parse_chat(chat: str) -> List[RLVCommand]:
|
||||
assert chat.startswith("@")
|
||||
chat = chat.lstrip("@")
|
||||
commands = []
|
||||
for command_str in chat.split(","):
|
||||
if not command_str:
|
||||
continue
|
||||
# RLV-style command, `<cmd>(:<option1>;<option2>)?(=<param>)?`
|
||||
# Roughly (?<behaviour>[^:=]+)(:(?<option>[^=]*))?=(?<param>\w+)
|
||||
options, _, param = command_str.partition("=")
|
||||
behaviour, _, options = options.partition(":")
|
||||
# TODO: Not always correct, commands can specify their own parsing for the option field
|
||||
# maybe special-case these?
|
||||
options = options.split(";") if options else []
|
||||
commands.append(RLVCommand(behaviour, param, options))
|
||||
return commands
|
||||
|
||||
@staticmethod
|
||||
def format_chat(commands: Sequence[RLVCommand]) -> str:
|
||||
assert commands
|
||||
chat = ""
|
||||
for command in commands:
|
||||
if chat:
|
||||
chat += ","
|
||||
|
||||
chat += command.behaviour
|
||||
if command.options:
|
||||
chat += ":" + ";".join(command.options)
|
||||
if command.param:
|
||||
chat += "=" + command.param
|
||||
return "@" + chat
|
||||
@@ -4,17 +4,21 @@ Base classes for common session-related state shared between clients and proxies
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import logging
|
||||
import weakref
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.circuit import ConnectionHolder
|
||||
import multidict
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, Vector3
|
||||
from hippolyzer.lib.base.message.circuit import ConnectionHolder, Circuit
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.network.caps_client import CapsClient
|
||||
from hippolyzer.lib.base.network.transport import ADDR_TUPLE
|
||||
from hippolyzer.lib.base.objects import handle_to_global_pos
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
|
||||
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
|
||||
|
||||
|
||||
class BaseClientRegion(ConnectionHolder, abc.ABC):
|
||||
@@ -24,6 +28,53 @@ class BaseClientRegion(ConnectionHolder, abc.ABC):
|
||||
session: Callable[[], BaseClientSession]
|
||||
objects: ClientObjectManager
|
||||
caps_client: CapsClient
|
||||
cap_urls: multidict.MultiDict[str]
|
||||
circuit_addr: ADDR_TUPLE
|
||||
circuit: Optional[Circuit]
|
||||
_name: Optional[str]
|
||||
|
||||
def __init__(self):
|
||||
self._name = None
|
||||
self.circuit = None
|
||||
|
||||
@abc.abstractmethod
|
||||
def update_caps(self, caps: Mapping[str, str]) -> None:
|
||||
pass
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if self._name:
|
||||
return self._name
|
||||
return "Pending %r" % (self.circuit_addr,)
|
||||
|
||||
@name.setter
|
||||
def name(self, val):
|
||||
self._name = val
|
||||
|
||||
@property
|
||||
def global_pos(self) -> Vector3:
|
||||
if self.handle is None:
|
||||
raise ValueError("Can't determine global region position without handle")
|
||||
return handle_to_global_pos(self.handle)
|
||||
|
||||
@property
|
||||
def is_alive(self):
|
||||
if not self.circuit:
|
||||
return False
|
||||
return self.circuit.is_alive
|
||||
|
||||
def mark_dead(self):
|
||||
logging.info("Marking %r dead" % self)
|
||||
if self.circuit:
|
||||
self.circuit.is_alive = False
|
||||
self.objects.clear()
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s (%r)>" % (self.__class__.__name__, self.name, self.handle)
|
||||
|
||||
|
||||
class BaseClientSessionManager:
|
||||
pass
|
||||
|
||||
|
||||
class BaseClientSession(abc.ABC):
|
||||
@@ -31,8 +82,104 @@ class BaseClientSession(abc.ABC):
|
||||
id: UUID
|
||||
agent_id: UUID
|
||||
secure_session_id: UUID
|
||||
active_group: UUID
|
||||
groups: Set[UUID]
|
||||
message_handler: MessageHandler[Message, str]
|
||||
regions: Sequence[BaseClientRegion]
|
||||
regions: MutableSequence[BaseClientRegion]
|
||||
region_by_handle: Callable[[int], Optional[BaseClientRegion]]
|
||||
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[BaseClientRegion]]
|
||||
objects: ClientWorldObjectManager
|
||||
login_data: Dict[str, Any]
|
||||
REGION_CLS = Type[BaseClientRegion]
|
||||
|
||||
def __init__(self, id, secure_session_id, agent_id, circuit_code,
|
||||
session_manager: Optional[BaseClientSessionManager], login_data=None):
|
||||
self.login_data = login_data or {}
|
||||
self.pending = True
|
||||
self.id: UUID = id
|
||||
self.secure_session_id: UUID = secure_session_id
|
||||
self.agent_id: UUID = agent_id
|
||||
self.circuit_code = circuit_code
|
||||
self.global_caps = {}
|
||||
self.session_manager = session_manager
|
||||
self.active_group: UUID = UUID.ZERO
|
||||
self.groups: Set[UUID] = set()
|
||||
self.regions = []
|
||||
self._main_region = None
|
||||
self.message_handler: MessageHandler[Message, str] = MessageHandler()
|
||||
super().__init__()
|
||||
|
||||
@classmethod
|
||||
def from_login_data(cls, login_data, session_manager):
|
||||
sess = cls(
|
||||
id=UUID(login_data["session_id"]),
|
||||
secure_session_id=UUID(login_data["secure_session_id"]),
|
||||
agent_id=UUID(login_data["agent_id"]),
|
||||
circuit_code=int(login_data["circuit_code"]),
|
||||
session_manager=session_manager,
|
||||
login_data=login_data,
|
||||
)
|
||||
appearance_service = login_data.get("agent_appearance_service")
|
||||
map_image_service = login_data.get("map-server-url")
|
||||
if appearance_service:
|
||||
sess.global_caps["AppearanceService"] = appearance_service
|
||||
if map_image_service:
|
||||
sess.global_caps["MapImageService"] = map_image_service
|
||||
# Login data also has details about the initial sim
|
||||
sess.register_region(
|
||||
circuit_addr=(login_data["sim_ip"], login_data["sim_port"]),
|
||||
handle=(login_data["region_x"] << 32) | login_data["region_y"],
|
||||
seed_url=login_data["seed_capability"],
|
||||
)
|
||||
return sess
|
||||
|
||||
def register_region(self, circuit_addr: Optional[ADDR_TUPLE] = None, seed_url: Optional[str] = None,
|
||||
handle: Optional[int] = None) -> BaseClientRegion:
|
||||
if not any((circuit_addr, seed_url)):
|
||||
raise ValueError("One of circuit_addr and seed_url must be defined!")
|
||||
|
||||
for region in self.regions:
|
||||
if region.circuit_addr == circuit_addr:
|
||||
if seed_url and region.cap_urls.get("Seed") != seed_url:
|
||||
region.update_caps({"Seed": seed_url})
|
||||
if handle:
|
||||
region.handle = handle
|
||||
return region
|
||||
if seed_url and region.cap_urls.get("Seed") == seed_url:
|
||||
return region
|
||||
|
||||
if not circuit_addr:
|
||||
raise ValueError("Can't create region without circuit addr!")
|
||||
|
||||
logging.info("Registering region for %r" % (circuit_addr,))
|
||||
region = self.REGION_CLS(circuit_addr, seed_url, self, handle=handle)
|
||||
self.regions.append(region)
|
||||
return region
|
||||
|
||||
@property
|
||||
def main_region(self) -> Optional[BaseClientRegion]:
|
||||
if self._main_region and self._main_region() in self.regions:
|
||||
return self._main_region()
|
||||
return None
|
||||
|
||||
@main_region.setter
|
||||
def main_region(self, val: BaseClientRegion):
|
||||
self._main_region = weakref.ref(val)
|
||||
|
||||
def transaction_to_assetid(self, transaction_id: UUID):
|
||||
return UUID.combine(transaction_id, self.secure_session_id)
|
||||
|
||||
def region_by_circuit_addr(self, circuit_addr) -> Optional[BaseClientRegion]:
|
||||
for region in self.regions:
|
||||
if region.circuit_addr == circuit_addr and region.circuit:
|
||||
return region
|
||||
return None
|
||||
|
||||
def region_by_handle(self, handle: int) -> Optional[BaseClientRegion]:
|
||||
for region in self.regions:
|
||||
if region.handle == handle:
|
||||
return region
|
||||
return None
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.id)
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import *
|
||||
import abc
|
||||
import copy
|
||||
import dataclasses
|
||||
import multiprocessing
|
||||
import pickle
|
||||
import secrets
|
||||
import warnings
|
||||
from typing import *
|
||||
|
||||
import outleap
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, Vector3
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
@@ -14,10 +17,11 @@ from hippolyzer.lib.proxy import addon_ctx
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.base.network.transport import UDPPacket, Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager, Session
|
||||
from hippolyzer.lib.proxy.task_scheduler import TaskLifeScope
|
||||
from hippolyzer.lib.base.templates import ChatSourceType, ChatType
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager, Session
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
|
||||
|
||||
class AssetAliasTracker:
|
||||
@@ -99,47 +103,32 @@ def send_chat(message: Union[bytes, str], channel=0, chat_type=ChatType.NORMAL,
|
||||
))
|
||||
|
||||
|
||||
def ais_item_to_inventory_data(ais_item: dict):
|
||||
return Block(
|
||||
"InventoryData",
|
||||
ItemID=ais_item["item_id"],
|
||||
FolderID=ais_item["parent_id"],
|
||||
CallbackID=0,
|
||||
CreatorID=ais_item["permissions"]["creator_id"],
|
||||
OwnerID=ais_item["permissions"]["owner_id"],
|
||||
GroupID=ais_item["permissions"]["group_id"],
|
||||
BaseMask=ais_item["permissions"]["base_mask"],
|
||||
OwnerMask=ais_item["permissions"]["owner_mask"],
|
||||
GroupMask=ais_item["permissions"]["group_mask"],
|
||||
EveryoneMask=ais_item["permissions"]["everyone_mask"],
|
||||
NextOwnerMask=ais_item["permissions"]["next_owner_mask"],
|
||||
GroupOwned=0,
|
||||
AssetID=ais_item["asset_id"],
|
||||
Type=ais_item["type"],
|
||||
InvType=ais_item["inv_type"],
|
||||
Flags=ais_item["flags"],
|
||||
SaleType=ais_item["sale_info"]["sale_type"],
|
||||
SalePrice=ais_item["sale_info"]["sale_price"],
|
||||
Name=ais_item["name"],
|
||||
Description=ais_item["desc"],
|
||||
CreationDate=ais_item["created_at"],
|
||||
# Meaningless here
|
||||
CRC=secrets.randbits(32),
|
||||
)
|
||||
class MetaBaseAddon(abc.ABCMeta):
|
||||
"""
|
||||
Metaclass for BaseAddon that prevents class member assignments from clobbering descriptors
|
||||
|
||||
Without this things like:
|
||||
|
||||
class Foo(BaseAddon):
|
||||
bar: int = GlobalProperty(0)
|
||||
|
||||
Foo.bar = 2
|
||||
|
||||
Won't work as you expect!
|
||||
"""
|
||||
def __setattr__(self, key: str, value):
|
||||
try:
|
||||
existing = object.__getattribute__(self, key)
|
||||
if existing and isinstance(existing, BaseAddonProperty):
|
||||
existing.__set__(self, value)
|
||||
return
|
||||
except AttributeError:
|
||||
# If the attribute doesn't exist then it's fine to use the base setattr.
|
||||
pass
|
||||
super().__setattr__(key, value)
|
||||
|
||||
|
||||
def ais_folder_to_inventory_data(ais_folder: dict):
|
||||
return Block(
|
||||
"FolderData",
|
||||
FolderID=ais_folder["cat_id"],
|
||||
ParentID=ais_folder["parent_id"],
|
||||
CallbackID=0,
|
||||
Type=ais_folder["preferred_type"],
|
||||
Name=ais_folder["name"],
|
||||
)
|
||||
|
||||
|
||||
class BaseAddon(abc.ABC):
|
||||
class BaseAddon(metaclass=MetaBaseAddon):
|
||||
def _schedule_task(self, coro: Coroutine, session=None,
|
||||
region_scoped=False, session_scoped=True, addon_scoped=True):
|
||||
session = session or addon_ctx.session.get(None) or None
|
||||
@@ -183,7 +172,7 @@ class BaseAddon(abc.ABC):
|
||||
pass
|
||||
|
||||
def handle_object_updated(self, session: Session, region: ProxiedRegion,
|
||||
obj: Object, updated_props: Set[str]):
|
||||
obj: Object, updated_props: Set[str], msg: Optional[Message]):
|
||||
pass
|
||||
|
||||
def handle_object_killed(self, session: Session, region: ProxiedRegion, obj: Object):
|
||||
@@ -199,16 +188,19 @@ class BaseAddon(abc.ABC):
|
||||
pass
|
||||
|
||||
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
|
||||
cmd: str, options: List[str], param: str):
|
||||
behaviour: str, options: List[str], param: str):
|
||||
pass
|
||||
|
||||
def handle_proxied_packet(self, session_manager: SessionManager, packet: UDPPacket,
|
||||
session: Optional[Session], region: Optional[ProxiedRegion]):
|
||||
pass
|
||||
|
||||
async def handle_leap_client_added(self, session_manager: SessionManager, leap_client: outleap.LEAPClient):
|
||||
pass
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_U = TypeVar("_U", Session, SessionManager)
|
||||
_U = TypeVar("_U", "Session", "SessionManager")
|
||||
|
||||
|
||||
class BaseAddonProperty(abc.ABC, Generic[_T, _U]):
|
||||
@@ -219,13 +211,17 @@ class BaseAddonProperty(abc.ABC, Generic[_T, _U]):
|
||||
session_manager.addon_ctx dict, without any namespacing. Can be accessed either
|
||||
through `AddonClass.property_name` or `addon_instance.property_name`.
|
||||
"""
|
||||
__slots__ = ("name", "default")
|
||||
__slots__ = ("name", "default", "_owner")
|
||||
|
||||
def __init__(self, default=dataclasses.MISSING):
|
||||
self.default = default
|
||||
self._owner = None
|
||||
|
||||
def __set_name__(self, owner, name: str):
|
||||
self.name = name
|
||||
# Keep track of which addon "owns" this property so that we can shove
|
||||
# the data in a bucket specific to that addon name.
|
||||
self._owner = owner
|
||||
|
||||
def _make_default(self) -> _T:
|
||||
if self.default is not dataclasses.MISSING:
|
||||
@@ -243,21 +239,23 @@ class BaseAddonProperty(abc.ABC, Generic[_T, _U]):
|
||||
if ctx_obj is None:
|
||||
raise AttributeError(
|
||||
f"{self.__class__} {self.name} accessed outside proper context")
|
||||
addon_state = ctx_obj.addon_ctx[self._owner.__name__]
|
||||
# Set a default if we have one, otherwise let the keyerror happen.
|
||||
# Maybe we should do this at addon initialization instead of on get.
|
||||
if self.name not in ctx_obj.addon_ctx:
|
||||
if self.name not in addon_state:
|
||||
default = self._make_default()
|
||||
if default is not dataclasses.MISSING:
|
||||
ctx_obj.addon_ctx[self.name] = default
|
||||
addon_state[self.name] = default
|
||||
else:
|
||||
raise AttributeError(f"{self.name} is not set")
|
||||
return ctx_obj.addon_ctx[self.name]
|
||||
return addon_state[self.name]
|
||||
|
||||
def __set__(self, _obj, value: _T) -> None:
|
||||
self._get_context_obj().addon_ctx[self.name] = value
|
||||
addon_state = self._get_context_obj().addon_ctx[self._owner.__name__]
|
||||
addon_state[self.name] = value
|
||||
|
||||
|
||||
class SessionProperty(BaseAddonProperty[_T, Session]):
|
||||
class SessionProperty(BaseAddonProperty[_T, "Session"]):
|
||||
"""
|
||||
Property tied to the current session context
|
||||
|
||||
@@ -267,7 +265,7 @@ class SessionProperty(BaseAddonProperty[_T, Session]):
|
||||
return addon_ctx.session.get()
|
||||
|
||||
|
||||
class GlobalProperty(BaseAddonProperty[_T, SessionManager]):
|
||||
class GlobalProperty(BaseAddonProperty[_T, "SessionManager"]):
|
||||
"""
|
||||
Property tied to the global SessionManager context
|
||||
|
||||
|
||||
@@ -15,10 +15,13 @@ import time
|
||||
from types import ModuleType
|
||||
from typing import *
|
||||
|
||||
import outleap
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import get_mtime
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import UDPPacket
|
||||
from hippolyzer.lib.client.rlv import RLVParser
|
||||
from hippolyzer.lib.proxy import addon_ctx
|
||||
from hippolyzer.lib.proxy.task_scheduler import TaskLifeScope, TaskScheduler
|
||||
|
||||
@@ -172,7 +175,10 @@ class AddonManager:
|
||||
def load_addon_from_path(cls, path, reload=False, raise_exceptions=True):
|
||||
path = pathlib.Path(path).absolute()
|
||||
mod_name = "hippolyzer.user_addon_%s" % path.stem
|
||||
cls.BASE_ADDON_SPECS.append(importlib.util.spec_from_file_location(mod_name, path))
|
||||
spec = importlib.util.spec_from_file_location(mod_name, path)
|
||||
if not spec:
|
||||
raise ValueError(f"Unable to load {path}")
|
||||
cls.BASE_ADDON_SPECS.append(spec)
|
||||
addon_dir = os.path.realpath(pathlib.Path(path).parent.absolute())
|
||||
|
||||
if addon_dir not in sys.path:
|
||||
@@ -199,9 +205,9 @@ class AddonManager:
|
||||
@classmethod
|
||||
def _check_hotreloads(cls):
|
||||
"""Mark addons that rely on changed files for reloading"""
|
||||
for filename, importers in cls.HOTRELOAD_IMPORTERS.items():
|
||||
mtime = get_mtime(filename)
|
||||
if not mtime or mtime == cls.FILE_MTIMES.get(filename, None):
|
||||
for file_path, importers in cls.HOTRELOAD_IMPORTERS.items():
|
||||
mtime = get_mtime(file_path)
|
||||
if not mtime or mtime == cls.FILE_MTIMES.get(file_path, None):
|
||||
continue
|
||||
|
||||
# Mark anything that imported this as dirty too, handling circular
|
||||
@@ -220,10 +226,15 @@ class AddonManager:
|
||||
|
||||
_dirty_importers(importers)
|
||||
|
||||
if file_path not in cls.BASE_ADDON_SPECS:
|
||||
# Make sure we won't reload importers in a loop if this is actually something
|
||||
# that was dynamically imported, where `hot_reload()` might not be called again!
|
||||
cls.FILE_MTIMES[file_path] = mtime
|
||||
|
||||
@classmethod
|
||||
def hot_reload(cls, mod: Any, require_addons_loaded=False):
|
||||
# Solely to trick the type checker because ModuleType doesn't apply where it should
|
||||
# and Protocols aren't well supported yet.
|
||||
# and Protocols aren't well-supported yet.
|
||||
imported_mod: ModuleType = mod
|
||||
imported_file = imported_mod.__file__
|
||||
# Mark the caller as having imported (and being dependent on) `module`
|
||||
@@ -338,11 +349,11 @@ class AddonManager:
|
||||
cls.SCHEDULER.kill_matching_tasks(lifetime_mask=TaskLifeScope.ADDON, creator=addon)
|
||||
|
||||
@classmethod
|
||||
def _call_all_addon_hooks(cls, hook_name, *args, **kwargs):
|
||||
def _call_all_addon_hooks(cls, hook_name, *args, call_async=False, **kwargs) -> Optional[bool]:
|
||||
for module in cls.FRESH_ADDON_MODULES.values():
|
||||
if not module:
|
||||
continue
|
||||
ret = cls._call_module_hooks(module, hook_name, *args, **kwargs)
|
||||
ret = cls._call_module_hooks(module, hook_name, *args, call_async=call_async, **kwargs)
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
@@ -373,15 +384,15 @@ class AddonManager:
|
||||
return commands
|
||||
|
||||
@classmethod
|
||||
def _call_module_hooks(cls, module, hook_name, *args, **kwargs):
|
||||
def _call_module_hooks(cls, module, hook_name, *args, call_async=False, **kwargs):
|
||||
for addon in cls._get_module_addons(module):
|
||||
ret = cls._try_call_hook(addon, hook_name, *args, **kwargs)
|
||||
ret = cls._try_call_hook(addon, hook_name, *args, call_async=call_async, **kwargs)
|
||||
if ret:
|
||||
return ret
|
||||
return cls._try_call_hook(module, hook_name, *args, **kwargs)
|
||||
return cls._try_call_hook(module, hook_name, *args, call_async=call_async, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def _try_call_hook(cls, addon, hook_name, *args, **kwargs):
|
||||
def _try_call_hook(cls, addon, hook_name, *args, call_async=False, **kwargs) -> Optional[bool]:
|
||||
if cls._SUBPROCESS:
|
||||
return
|
||||
|
||||
@@ -391,6 +402,20 @@ class AddonManager:
|
||||
if not hook_func:
|
||||
return
|
||||
try:
|
||||
if call_async:
|
||||
old_hook_func = hook_func
|
||||
|
||||
# Wrapper so we can invoke an async hook synchronously.
|
||||
def _wrapper(*w_args, **w_kwargs):
|
||||
cls.SCHEDULER.schedule_task(
|
||||
old_hook_func(*w_args, **w_kwargs),
|
||||
scope=TaskLifeScope.ADDON,
|
||||
creator=addon,
|
||||
)
|
||||
# Fall through to any other handlers as well,
|
||||
# async handlers don't chain.
|
||||
return None
|
||||
hook_func = _wrapper
|
||||
return hook_func(*args, **kwargs)
|
||||
except:
|
||||
logging.exception("Exploded in %r's %s hook" % (addon, hook_name))
|
||||
@@ -428,26 +453,36 @@ class AddonManager:
|
||||
raise
|
||||
return True
|
||||
if message.name == "ChatFromSimulator" and "ChatData" in message:
|
||||
chat: str = message["ChatData"]["Message"]
|
||||
chat_type: int = message["ChatData"]["ChatType"]
|
||||
# RLV-style OwnerSay?
|
||||
if chat and chat.startswith("@") and chat_type == 8:
|
||||
# RLV-style command, `@<cmd>(:<option1>;<option2>)?(=<param>)?`
|
||||
options, _, param = chat.rpartition("=")
|
||||
cmd, _, options = options.lstrip("@").partition(":")
|
||||
options = options.split(";")
|
||||
if RLVParser.is_rlv_message(message):
|
||||
# RLV allows putting multiple commands into one message, blindly splitting on ",".
|
||||
all_cmds_handled = True
|
||||
chat: str = message["ChatData"]["Message"]
|
||||
source = message["ChatData"]["SourceID"]
|
||||
try:
|
||||
with addon_ctx.push(session, region):
|
||||
handled = cls._call_all_addon_hooks("handle_rlv_command",
|
||||
session, region, source, cmd, options, param)
|
||||
if handled:
|
||||
region.circuit.drop_message(message)
|
||||
return True
|
||||
except:
|
||||
LOG.exception(f"Failed while handling command {chat!r}")
|
||||
if not cls._SWALLOW_ADDON_EXCEPTIONS:
|
||||
raise
|
||||
for command in RLVParser.parse_chat(chat):
|
||||
try:
|
||||
with addon_ctx.push(session, region):
|
||||
handled = cls._call_all_addon_hooks(
|
||||
"handle_rlv_command",
|
||||
session,
|
||||
region,
|
||||
source,
|
||||
command.behaviour,
|
||||
command.options,
|
||||
command.param,
|
||||
)
|
||||
if handled:
|
||||
region.circuit.drop_message(message)
|
||||
else:
|
||||
all_cmds_handled = False
|
||||
except:
|
||||
LOG.exception(f"Failed while handling command {command!r}")
|
||||
all_cmds_handled = False
|
||||
if not cls._SWALLOW_ADDON_EXCEPTIONS:
|
||||
raise
|
||||
# Drop the chat message if all commands it contained were handled by an addon
|
||||
if all_cmds_handled:
|
||||
return True
|
||||
|
||||
with addon_ctx.push(session, region):
|
||||
return cls._call_all_addon_hooks("handle_lludp_message", session, region, message)
|
||||
@@ -528,9 +563,9 @@ class AddonManager:
|
||||
|
||||
@classmethod
|
||||
def handle_object_updated(cls, session: Session, region: ProxiedRegion,
|
||||
obj: Object, updated_props: Set[str]):
|
||||
obj: Object, updated_props: Set[str], msg: Optional[Message]):
|
||||
with addon_ctx.push(session, region):
|
||||
return cls._call_all_addon_hooks("handle_object_updated", session, region, obj, updated_props)
|
||||
return cls._call_all_addon_hooks("handle_object_updated", session, region, obj, updated_props, msg)
|
||||
|
||||
@classmethod
|
||||
def handle_object_killed(cls, session: Session, region: ProxiedRegion, obj: Object):
|
||||
@@ -560,3 +595,7 @@ class AddonManager:
|
||||
with addon_ctx.push(session, region):
|
||||
return cls._call_all_addon_hooks("handle_proxied_packet", session_manager,
|
||||
packet, session, region)
|
||||
|
||||
@classmethod
|
||||
def handle_leap_client_added(cls, session_manager: SessionManager, leap_client: outleap.LEAPClient):
|
||||
return cls._call_all_addon_hooks("handle_leap_client_added", session_manager, leap_client, call_async=True)
|
||||
|
||||
39
hippolyzer/lib/proxy/asset_uploader.py
Normal file
39
hippolyzer/lib/proxy/asset_uploader.py
Normal file
@@ -0,0 +1,39 @@
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.inventory import InventoryItem
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.client.asset_uploader import AssetUploader
|
||||
|
||||
|
||||
class ProxyAssetUploader(AssetUploader):
|
||||
async def _handle_upload_complete(self, resp_payload: dict):
|
||||
# Check if this a failure response first, raising if it is
|
||||
await super()._handle_upload_complete(resp_payload)
|
||||
|
||||
# Fetch enough data from AIS to tell the viewer about the new inventory item
|
||||
session = self._region.session()
|
||||
item_id = resp_payload["new_inventory_item"]
|
||||
ais_req_data = {
|
||||
"items": [
|
||||
{
|
||||
"owner_id": session.agent_id,
|
||||
"item_id": item_id,
|
||||
}
|
||||
]
|
||||
}
|
||||
async with self._region.caps_client.post('FetchInventory2', llsd=ais_req_data) as resp:
|
||||
ais_item = InventoryItem.from_llsd((await resp.read_llsd())["items"][0], flavor="ais")
|
||||
|
||||
# Got it, ship it off to the viewer
|
||||
message = Message(
|
||||
"UpdateCreateInventoryItem",
|
||||
Block(
|
||||
"AgentData",
|
||||
AgentID=session.agent_id,
|
||||
SimApproved=1,
|
||||
TransactionID=UUID.random(),
|
||||
),
|
||||
ais_item.to_inventory_data(),
|
||||
direction=Direction.IN
|
||||
)
|
||||
self._region.circuit.send(message)
|
||||
@@ -16,6 +16,8 @@ import mitmproxy.http
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.caps import CapData, CapType
|
||||
@@ -32,6 +34,9 @@ def apply_security_monkeypatches():
|
||||
apply_security_monkeypatches()
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MITMProxyEventManager:
|
||||
"""
|
||||
Handles HTTP request and response events from the mitmproxy process
|
||||
@@ -42,7 +47,7 @@ class MITMProxyEventManager:
|
||||
"UpdateNotecardAgentInventory", "UpdateNotecardTaskInventory",
|
||||
"UpdateScriptAgent", "UpdateScriptTask",
|
||||
"UpdateSettingsAgentInventory", "UpdateSettingsTaskInventory",
|
||||
"UploadBakedTexture",
|
||||
"UploadBakedTexture", "UploadAgentProfileImage",
|
||||
}
|
||||
|
||||
def __init__(self, session_manager: SessionManager, flow_context: HTTPFlowContext):
|
||||
@@ -58,7 +63,7 @@ class MITMProxyEventManager:
|
||||
try:
|
||||
await self.pump_proxy_event()
|
||||
except:
|
||||
logging.exception("Exploded when handling parsed packets")
|
||||
LOG.exception("Exploded when handling parsed packets")
|
||||
|
||||
async def pump_proxy_event(self):
|
||||
try:
|
||||
@@ -140,7 +145,7 @@ class MITMProxyEventManager:
|
||||
# Both the wrapper request and the actual asset server request went through
|
||||
# the proxy. Don't bother trying the redirect strategy anymore.
|
||||
self._asset_server_proxied = True
|
||||
logging.warning("noproxy not used, switching to URI rewrite strategy")
|
||||
LOG.warning("noproxy not used, switching to URI rewrite strategy")
|
||||
elif cap_data and cap_data.cap_name == "EventQueueGet":
|
||||
# HACK: The sim's EQ acking mechanism doesn't seem to actually work.
|
||||
# if the client drops the connection due to timeout before we can
|
||||
@@ -151,7 +156,7 @@ class MITMProxyEventManager:
|
||||
eq_manager = cap_data.region().eq_manager
|
||||
cached_resp = eq_manager.get_cached_poll_response(req_ack_id)
|
||||
if cached_resp:
|
||||
logging.warning("Had to serve a cached EventQueueGet due to client desync")
|
||||
LOG.warning("Had to serve a cached EventQueueGet due to client desync")
|
||||
flow.response = mitmproxy.http.Response.make(
|
||||
200,
|
||||
llsd.format_xml(cached_resp),
|
||||
@@ -215,7 +220,7 @@ class MITMProxyEventManager:
|
||||
try:
|
||||
message_logger.log_http_response(flow)
|
||||
except:
|
||||
logging.exception("Failed while logging HTTP flow")
|
||||
LOG.exception("Failed while logging HTTP flow")
|
||||
|
||||
# Don't process responses for requests or responses injected by the proxy.
|
||||
# We already processed it, it came from us!
|
||||
@@ -224,6 +229,11 @@ class MITMProxyEventManager:
|
||||
|
||||
status = flow.response.status_code
|
||||
cap_data: Optional[CapData] = flow.metadata["cap_data"]
|
||||
if not cap_data:
|
||||
# Make sure there's always cap data attached to the flow, even if it's
|
||||
# empty. Some consumers expect it to always be there, when it might not
|
||||
# be if the proxy barfed while handling the request.
|
||||
cap_data = flow.metadata["cap_data"] = CapData()
|
||||
|
||||
if status == 200 and cap_data and cap_data.cap_name == "FirestormBridge":
|
||||
# Fake FirestormBridge cap based on a bridge-like response coming from
|
||||
@@ -269,13 +279,13 @@ class MITMProxyEventManager:
|
||||
|
||||
if cap_data.cap_name == "Seed":
|
||||
parsed = llsd.parse_xml(flow.response.content)
|
||||
logging.debug("Got seed cap for %r : %r" % (cap_data, parsed))
|
||||
LOG.debug("Got seed cap for %r : %r" % (cap_data, parsed))
|
||||
region.update_caps(parsed)
|
||||
|
||||
# On LL's grid these URIs aren't unique across sessions or regions,
|
||||
# so we get request attribution by replacing them with a unique
|
||||
# alias URI.
|
||||
logging.debug("Replacing GetMesh caps with wrapped versions")
|
||||
LOG.debug("Replacing GetMesh caps with wrapped versions")
|
||||
wrappable_caps = {"GetMesh2", "GetMesh", "GetTexture", "ViewerAsset"}
|
||||
for cap_name in wrappable_caps:
|
||||
if cap_name in parsed:
|
||||
@@ -310,7 +320,7 @@ class MITMProxyEventManager:
|
||||
if "uploader" in parsed:
|
||||
region.register_cap(cap_data.cap_name + "Uploader", parsed["uploader"], CapType.TEMPORARY)
|
||||
except:
|
||||
logging.exception("OOPS, blew up in HTTP proxy!")
|
||||
LOG.exception("OOPS, blew up in HTTP proxy!")
|
||||
|
||||
def _handle_login_flow(self, flow: HippoHTTPFlow):
|
||||
resp = xmlrpc.client.loads(flow.response.content)[0][0] # type: ignore
|
||||
@@ -319,20 +329,30 @@ class MITMProxyEventManager:
|
||||
flow.cap_data = CapData("LoginRequest", session=weakref.ref(sess))
|
||||
|
||||
def _handle_eq_event(self, session: Session, region: ProxiedRegion, event: Dict[str, Any]):
|
||||
logging.debug("Event received on %r: %r" % (self, event))
|
||||
LOG.debug("Event received on %r: %r" % (self, event))
|
||||
message_logger = self.session_manager.message_logger
|
||||
if message_logger:
|
||||
message_logger.log_eq_event(session, region, event)
|
||||
|
||||
if self.llsd_message_serializer.can_handle(event["message"]):
|
||||
msg = self.llsd_message_serializer.deserialize(event)
|
||||
else:
|
||||
msg = Message.from_eq_event(event)
|
||||
msg.sender = region.circuit_addr
|
||||
msg.direction = Direction.IN
|
||||
|
||||
try:
|
||||
region.message_handler.handle(msg)
|
||||
except:
|
||||
LOG.exception("Failed while handling EQ message")
|
||||
|
||||
handle_event = AddonManager.handle_eq_event(session, region, event)
|
||||
if handle_event is True:
|
||||
# Addon handled the event and didn't want it sent to the viewer
|
||||
return True
|
||||
|
||||
msg = None
|
||||
# Handle events that inform us about new regions
|
||||
sim_addr, sim_handle, sim_seed = None, None, None
|
||||
if self.llsd_message_serializer.can_handle(event["message"]):
|
||||
msg = self.llsd_message_serializer.deserialize(event)
|
||||
# Sim is asking us to talk to a neighbour
|
||||
if event["message"] == "EstablishAgentCommunication":
|
||||
ip_split = event["body"]["sim-ip-and-port"].split(":")
|
||||
|
||||
@@ -8,6 +8,7 @@ import queue
|
||||
import typing
|
||||
import uuid
|
||||
import weakref
|
||||
from typing import Iterable
|
||||
|
||||
import mitmproxy.certs
|
||||
import mitmproxy.ctx
|
||||
@@ -15,20 +16,30 @@ import mitmproxy.log
|
||||
import mitmproxy.master
|
||||
import mitmproxy.options
|
||||
import mitmproxy.proxy
|
||||
from cryptography import x509
|
||||
from cryptography.x509 import GeneralNames
|
||||
from mitmproxy.addons import core, clientplayback, proxyserver, next_layer, disable_h2c
|
||||
from mitmproxy.certs import CertStoreEntry
|
||||
from mitmproxy.http import HTTPFlow
|
||||
from mitmproxy.proxy.layers import tls
|
||||
import OpenSSL
|
||||
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename, create_logged_task
|
||||
from hippolyzer.lib.base.multiprocessing_utils import ParentProcessWatcher
|
||||
from hippolyzer.lib.proxy.caps import SerializedCapData
|
||||
|
||||
|
||||
class SLCertStore(mitmproxy.certs.CertStore):
|
||||
def get_cert(self, commonname: typing.Optional[str], sans: typing.List[str], *args, **kwargs):
|
||||
def get_cert(
|
||||
self,
|
||||
commonname: str | None,
|
||||
sans: Iterable[x509.GeneralName],
|
||||
organization: str | None = None,
|
||||
*args,
|
||||
**kwargs
|
||||
) -> CertStoreEntry:
|
||||
entry = super().get_cert(commonname, sans, *args, **kwargs)
|
||||
cert, privkey, chain = entry.cert, entry.privatekey, entry.chain_file
|
||||
cert, privkey, chain, chain_certs = entry.cert, entry.privatekey, entry.chain_file, entry.chain_certs
|
||||
x509 = cert.to_pyopenssl()
|
||||
# The cert must have a subject key ID or the viewer will reject it.
|
||||
for i in range(0, x509.get_extension_count()):
|
||||
@@ -48,10 +59,10 @@ class SLCertStore(mitmproxy.certs.CertStore):
|
||||
])
|
||||
x509.sign(OpenSSL.crypto.PKey.from_cryptography_key(privkey), "sha256") # type: ignore
|
||||
new_entry = mitmproxy.certs.CertStoreEntry(
|
||||
mitmproxy.certs.Cert.from_pyopenssl(x509), privkey, chain
|
||||
mitmproxy.certs.Cert.from_pyopenssl(x509), privkey, chain, chain_certs,
|
||||
)
|
||||
# Replace the cert that was created in the base `get_cert()` with our modified cert
|
||||
self.certs[(commonname, tuple(sans))] = new_entry
|
||||
self.certs[(commonname, GeneralNames(sans))] = new_entry
|
||||
self.expire_queue.pop(-1)
|
||||
self.expire(new_entry)
|
||||
return new_entry
|
||||
@@ -105,21 +116,9 @@ class IPCInterceptionAddon:
|
||||
self.to_proxy_queue: multiprocessing.Queue = flow_context.to_proxy_queue
|
||||
self.shutdown_signal: multiprocessing.Event = flow_context.shutdown_signal
|
||||
|
||||
def add_log(self, entry: mitmproxy.log.LogEntry):
|
||||
if entry.level == "debug":
|
||||
logging.debug(entry.msg)
|
||||
elif entry.level in ("alert", "info"):
|
||||
# TODO: All mitmproxy infos are basically debugs, should
|
||||
# probably give these dedicated loggers
|
||||
logging.debug(entry.msg)
|
||||
elif entry.level == "warn":
|
||||
logging.warning(entry.msg)
|
||||
elif entry.level == "error":
|
||||
logging.error(entry.msg)
|
||||
|
||||
def running(self):
|
||||
# register to pump the events or something here
|
||||
asyncio.create_task(self._pump_callbacks())
|
||||
create_logged_task(self._pump_callbacks(), "Pump HTTP proxy callbacks")
|
||||
# Tell the main process mitmproxy is ready to handle requests
|
||||
self.mitmproxy_ready.set()
|
||||
|
||||
@@ -236,7 +235,7 @@ class SLMITMMaster(mitmproxy.master.Master):
|
||||
)
|
||||
|
||||
|
||||
def create_proxy_master(host, port, flow_context: HTTPFlowContext): # pragma: no cover
|
||||
def create_http_proxy(host, port, flow_context: HTTPFlowContext, ssl_insecure=False): # pragma: no cover
|
||||
opts = mitmproxy.options.Options()
|
||||
master = SLMITMMaster(flow_context, opts)
|
||||
|
||||
@@ -251,10 +250,6 @@ def create_proxy_master(host, port, flow_context: HTTPFlowContext): # pragma: n
|
||||
ssl_verify_upstream_trusted_ca=ca_bundle,
|
||||
listen_host=host,
|
||||
listen_port=port,
|
||||
ssl_insecure=ssl_insecure,
|
||||
)
|
||||
return master
|
||||
|
||||
|
||||
def create_http_proxy(bind_host, port, flow_context: HTTPFlowContext): # pragma: no cover
|
||||
master = create_proxy_master(bind_host, port, flow_context)
|
||||
return master
|
||||
|
||||
107
hippolyzer/lib/proxy/inventory_manager.py
Normal file
107
hippolyzer/lib/proxy/inventory_manager.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import functools
|
||||
import logging
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.helpers import get_mtime, create_logged_task
|
||||
from hippolyzer.lib.client.inventory_manager import InventoryManager
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.viewer_settings import iter_viewer_cache_dirs
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProxyInventoryManager(InventoryManager):
|
||||
_session: "Session"
|
||||
|
||||
def __init__(self, session: "Session"):
|
||||
# These handlers all need their processing deferred until the cache has been loaded.
|
||||
# Since cache is loaded asynchronously, the viewer may get ahead of us due to parsing
|
||||
# the cache faster and start requesting inventory details we can't do anything with yet.
|
||||
self._handle_update_create_inventory_item = self._wrap_with_cache_defer(
|
||||
self._handle_update_create_inventory_item
|
||||
)
|
||||
self._handle_remove_inventory_item = self._wrap_with_cache_defer(
|
||||
self._handle_remove_inventory_item
|
||||
)
|
||||
self._handle_remove_inventory_folder = self._wrap_with_cache_defer(
|
||||
self._handle_remove_inventory_folder
|
||||
)
|
||||
self._handle_bulk_update_inventory = self._wrap_with_cache_defer(
|
||||
self._handle_bulk_update_inventory
|
||||
)
|
||||
self._handle_move_inventory_item = self._wrap_with_cache_defer(
|
||||
self._handle_move_inventory_item
|
||||
)
|
||||
self.process_aisv3_response = self._wrap_with_cache_defer(
|
||||
self.process_aisv3_response
|
||||
)
|
||||
|
||||
# Base constructor after, because it registers handlers to specific methods, which need to
|
||||
# be wrapped before we call they're registered. Handlers are registered by method reference,
|
||||
# not by name!
|
||||
super().__init__(session)
|
||||
session.http_message_handler.subscribe("InventoryAPIv3", self._handle_aisv3_flow)
|
||||
newest_cache = None
|
||||
newest_timestamp = dt.datetime(year=1970, month=1, day=1, tzinfo=dt.timezone.utc)
|
||||
# So consumers know when the inventory should be complete
|
||||
self.cache_loaded: asyncio.Event = asyncio.Event()
|
||||
self._cache_deferred_calls: List[Tuple[Callable[..., None], Tuple]] = []
|
||||
# Look for the newest version of the cached inventory and use that.
|
||||
# Not foolproof, but close enough if we're not sure what viewer is being used.
|
||||
for cache_dir in iter_viewer_cache_dirs():
|
||||
inv_cache_path = cache_dir / (str(session.agent_id) + ".inv.llsd.gz")
|
||||
if inv_cache_path.exists():
|
||||
mod = get_mtime(inv_cache_path)
|
||||
if not mod:
|
||||
continue
|
||||
mod_ts = dt.datetime.fromtimestamp(mod, dt.timezone.utc)
|
||||
if mod_ts <= newest_timestamp:
|
||||
continue
|
||||
newest_cache = inv_cache_path
|
||||
|
||||
if newest_cache:
|
||||
cache_load_fut = asyncio.ensure_future(asyncio.to_thread(self.load_cache, newest_cache))
|
||||
# Meh. Don't care if it fails.
|
||||
cache_load_fut.add_done_callback(lambda *args: self.cache_loaded.set())
|
||||
create_logged_task(self._apply_deferred_after_loaded(), "Apply deferred inventory", LOG)
|
||||
else:
|
||||
self.cache_loaded.set()
|
||||
|
||||
async def _apply_deferred_after_loaded(self):
|
||||
await self.cache_loaded.wait()
|
||||
LOG.info("Applying deferred inventory calls")
|
||||
deferred_calls = self._cache_deferred_calls[:]
|
||||
self._cache_deferred_calls.clear()
|
||||
for func, args in deferred_calls:
|
||||
try:
|
||||
func(*args)
|
||||
except:
|
||||
LOG.exception("Failed to apply deferred inventory call")
|
||||
|
||||
def _wrap_with_cache_defer(self, func: Callable[..., None]):
|
||||
@functools.wraps(func)
|
||||
def wrapped(*inner_args):
|
||||
if not self.cache_loaded.is_set():
|
||||
self._cache_deferred_calls.append((func, inner_args))
|
||||
else:
|
||||
func(*inner_args)
|
||||
return wrapped
|
||||
|
||||
def _handle_aisv3_flow(self, flow: HippoHTTPFlow):
|
||||
if flow.response.status_code < 200 or flow.response.status_code > 300:
|
||||
# Probably not a success
|
||||
return
|
||||
content_type = flow.response.headers.get("Content-Type", "")
|
||||
if "llsd" not in content_type:
|
||||
# Okay, probably still some kind of error...
|
||||
return
|
||||
|
||||
# Try and add anything from the response into the model
|
||||
self.process_aisv3_response(llsd.parse(flow.response.content))
|
||||
@@ -161,6 +161,8 @@ class InterceptingLLUDPProxyProtocol(UDPProxyProtocol):
|
||||
region.mark_dead()
|
||||
elif message.name == "RegionHandshake":
|
||||
region.name = str(message["RegionInfo"][0]["SimName"])
|
||||
elif message.name == "AgentDataUpdate" and self.session:
|
||||
self.session.active_group = message["AgentData"]["ActiveGroupID"]
|
||||
|
||||
# Send the message if it wasn't explicitly dropped or sent before
|
||||
if not message.finalized:
|
||||
|
||||
@@ -16,10 +16,14 @@ import weakref
|
||||
from defusedxml import minidom
|
||||
|
||||
from hippolyzer.lib.base import serialization as se, llsd
|
||||
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.datatypes import TaggedUnion, UUID, TupleCoord
|
||||
from hippolyzer.lib.base.helpers import bytes_escape
|
||||
from hippolyzer.lib.base.message.message_formatting import HumanMessageSerializer
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketFlags
|
||||
from hippolyzer.lib.base.message.template_dict import DEFAULT_TEMPLATE_DICT
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.message_filter import MetaFieldSpecifier, compile_filter, BaseFilterNode, MessageFilterNode, \
|
||||
EnumFieldSpecifier, MatchResult
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
@@ -401,7 +405,7 @@ class AbstractMessageLogEntry(abc.ABC):
|
||||
beautified = minidom.parseString(content).toprettyxml(indent=" ")
|
||||
# kill blank lines. will break cdata sections. meh.
|
||||
beautified = re.sub(r'\n\s*\n', '\n', beautified, flags=re.MULTILINE)
|
||||
return re.sub(r'<([\w]+)>\s*</\1>', r'<\1></\1>',
|
||||
return re.sub(r'<(\w+)>\s*</\1>', r'<\1></\1>',
|
||||
beautified, flags=re.MULTILINE)
|
||||
|
||||
|
||||
@@ -522,7 +526,7 @@ class HTTPMessageLogEntry(AbstractMessageLogEntry):
|
||||
buf.write(bytes(headers).decode("utf8", errors="replace"))
|
||||
buf.write("\r\n")
|
||||
|
||||
buf.write(message_body)
|
||||
buf.write(message_body or "")
|
||||
return buf.getvalue()
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
@@ -549,6 +553,12 @@ class HTTPMessageLogEntry(AbstractMessageLogEntry):
|
||||
return self._summary
|
||||
|
||||
def _guess_content_type(self, message):
|
||||
# SL's login service lies and says that its XML-RPC response is LLSD+XML.
|
||||
# It is not, and it blows up the parser. It's been broken ever since the
|
||||
# login rewrite and a fix is likely not forthcoming. I'm sick of seeing
|
||||
# the traceback, so just hack around it.
|
||||
if self.name == "LoginRequest":
|
||||
return "application/xml"
|
||||
content_type = message.headers.get("Content-Type", "")
|
||||
if not message.content or content_type.startswith("application/llsd"):
|
||||
return content_type
|
||||
@@ -608,6 +618,19 @@ class EQMessageLogEntry(AbstractMessageLogEntry):
|
||||
return "EQ"
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
# TODO: This is a bit of a hack! Templated messages can be sent over the EQ, so let's
|
||||
# display them as template messages if that's what they are.
|
||||
if self.event['message'] in DEFAULT_TEMPLATE_DICT.message_templates:
|
||||
msg = LLSDMessageSerializer().deserialize(self.event)
|
||||
msg.synthetic = True
|
||||
msg.send_flags = PacketFlags.EQ
|
||||
msg.direction = Direction.IN
|
||||
# Annoyingly, templated messages sent over the EQ can have extra fields not specified
|
||||
# in the template, and this is often the case. ParcelProperties has fields that aren't
|
||||
# in the template. Luckily, we don't really care about extra fields, we just may not
|
||||
# be able to automatically decode U32 and friends without the hint from the template
|
||||
# that that is what they are.
|
||||
return HumanMessageSerializer.to_human_string(msg, replacements, beautify)
|
||||
return f'EQ {self.event["message"]}\n\n{self._format_llsd(self.event["body"])}'
|
||||
|
||||
@property
|
||||
|
||||
@@ -48,6 +48,7 @@ class ProxyObjectManager(ClientObjectManager):
|
||||
"RequestMultipleObjects",
|
||||
self._handle_request_multiple_objects,
|
||||
)
|
||||
region.http_message_handler.subscribe("RenderMaterials", self._handle_render_materials)
|
||||
|
||||
def load_cache(self):
|
||||
if not self.may_use_vo_cache or self.cache_loaded:
|
||||
@@ -100,6 +101,13 @@ class ProxyObjectManager(ClientObjectManager):
|
||||
# Remove any queued cache misses that the viewer just requested for itself
|
||||
self.queued_cache_misses -= {b["ID"] for b in msg["ObjectData"]}
|
||||
|
||||
def _handle_render_materials(self, flow: HippoHTTPFlow):
|
||||
if flow.response.status_code != 200:
|
||||
return
|
||||
if flow.request.method not in ("GET", "POST"):
|
||||
return
|
||||
self._process_materials_response(flow.response.content)
|
||||
|
||||
|
||||
class ProxyWorldObjectManager(ClientWorldObjectManager):
|
||||
_session: Session
|
||||
@@ -133,8 +141,9 @@ class ProxyWorldObjectManager(ClientWorldObjectManager):
|
||||
region_mgr.queued_cache_misses |= missing_locals
|
||||
region_mgr.request_missed_cached_objects_soon()
|
||||
|
||||
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: ObjectUpdateType):
|
||||
super()._run_object_update_hooks(obj, updated_props, update_type)
|
||||
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: ObjectUpdateType,
|
||||
msg: Optional[Message]):
|
||||
super()._run_object_update_hooks(obj, updated_props, update_type, msg)
|
||||
region = self._session.region_by_handle(obj.RegionHandle)
|
||||
if self._settings.ALLOW_AUTO_REQUEST_OBJECTS:
|
||||
if obj.PCode == PCode.AVATAR and "ParentID" in updated_props:
|
||||
@@ -145,7 +154,7 @@ class ProxyWorldObjectManager(ClientWorldObjectManager):
|
||||
# have no way to get a sitting agent's true region location, even if it's ourselves.
|
||||
region.objects.queued_cache_misses.add(obj.ParentID)
|
||||
region.objects.request_missed_cached_objects_soon()
|
||||
AddonManager.handle_object_updated(self._session, region, obj, updated_props)
|
||||
AddonManager.handle_object_updated(self._session, region, obj, updated_props, msg)
|
||||
|
||||
def _run_kill_object_hooks(self, obj: Object):
|
||||
super()._run_kill_object_hooks(obj)
|
||||
|
||||
18
hippolyzer/lib/proxy/parcel_manager.py
Normal file
18
hippolyzer/lib/proxy/parcel_manager.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.client.parcel_manager import ParcelManager
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
|
||||
|
||||
class ProxyParcelManager(ParcelManager):
|
||||
def __init__(self, region: "ProxiedRegion"):
|
||||
super().__init__(proxify(region))
|
||||
# Handle ParcelProperties messages that we didn't specifically ask for
|
||||
self._region.message_handler.subscribe("ParcelProperties", self._handle_parcel_properties)
|
||||
|
||||
def _handle_parcel_properties(self, msg: Message):
|
||||
self._process_parcel_properties(msg)
|
||||
return None
|
||||
@@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import hashlib
|
||||
import uuid
|
||||
import weakref
|
||||
@@ -9,12 +8,11 @@ import urllib.parse
|
||||
|
||||
import multidict
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3, UUID
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.objects import handle_to_global_pos
|
||||
from hippolyzer.lib.client.state import BaseClientRegion
|
||||
from hippolyzer.lib.proxy.caps_client import ProxyCapsClient
|
||||
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
|
||||
@@ -22,6 +20,8 @@ from hippolyzer.lib.proxy.caps import CapType
|
||||
from hippolyzer.lib.proxy.object_manager import ProxyObjectManager
|
||||
from hippolyzer.lib.base.transfer_manager import TransferManager
|
||||
from hippolyzer.lib.base.xfer_manager import XferManager
|
||||
from hippolyzer.lib.proxy.asset_uploader import ProxyAssetUploader
|
||||
from hippolyzer.lib.proxy.parcel_manager import ProxyParcelManager
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
@@ -43,14 +43,15 @@ class CapsMultiDict(multidict.MultiDict[Tuple[CapType, str]]):
|
||||
|
||||
|
||||
class ProxiedRegion(BaseClientRegion):
|
||||
circuit: Optional[ProxiedCircuit]
|
||||
|
||||
def __init__(self, circuit_addr, seed_cap: str, session: Session, handle=None):
|
||||
super().__init__()
|
||||
# A client may make a Seed request twice, and may get back two (valid!) sets of
|
||||
# Cap URIs. We need to be able to look up both, so MultiDict is necessary.
|
||||
self.handle: Optional[int] = handle
|
||||
self._name: Optional[str] = None
|
||||
# TODO: when does this change?
|
||||
self.cache_id: Optional[UUID] = None
|
||||
self.circuit: Optional[ProxiedCircuit] = None
|
||||
self.circuit_addr = circuit_addr
|
||||
self.caps = CapsMultiDict()
|
||||
# Reverse lookup for URL -> cap data
|
||||
@@ -66,34 +67,14 @@ class ProxiedRegion(BaseClientRegion):
|
||||
self.objects: ProxyObjectManager = ProxyObjectManager(self, may_use_vo_cache=True)
|
||||
self.xfer_manager = XferManager(proxify(self), self.session().secure_session_id)
|
||||
self.transfer_manager = TransferManager(proxify(self), session.agent_id, session.id)
|
||||
self.asset_uploader = ProxyAssetUploader(proxify(self))
|
||||
self.parcel_manager = ProxyParcelManager(proxify(self))
|
||||
self._recalc_caps()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if self._name:
|
||||
return self._name
|
||||
return "Pending %r" % (self.circuit_addr,)
|
||||
|
||||
@name.setter
|
||||
def name(self, val):
|
||||
self._name = val
|
||||
|
||||
@property
|
||||
def cap_urls(self) -> multidict.MultiDict[str, str]:
|
||||
def cap_urls(self) -> multidict.MultiDict[str]:
|
||||
return multidict.MultiDict((x, y[1]) for x, y in self.caps.items())
|
||||
|
||||
@property
|
||||
def global_pos(self) -> Vector3:
|
||||
if self.handle is None:
|
||||
raise ValueError("Can't determine global region position without handle")
|
||||
return handle_to_global_pos(self.handle)
|
||||
|
||||
@property
|
||||
def is_alive(self):
|
||||
if not self.circuit:
|
||||
return False
|
||||
return self.circuit.is_alive
|
||||
|
||||
def update_caps(self, caps: Mapping[str, str]):
|
||||
for cap_name, cap_url in caps.items():
|
||||
if isinstance(cap_url, str) and cap_url.startswith('http'):
|
||||
@@ -156,15 +137,9 @@ class ProxiedRegion(BaseClientRegion):
|
||||
return None
|
||||
|
||||
def mark_dead(self):
|
||||
logging.info("Marking %r dead" % self)
|
||||
if self.circuit:
|
||||
self.circuit.is_alive = False
|
||||
self.objects.clear()
|
||||
super().mark_dead()
|
||||
self.eq_manager.clear()
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.name)
|
||||
|
||||
|
||||
class EventQueueManager:
|
||||
def __init__(self, region: ProxiedRegion):
|
||||
@@ -187,7 +162,7 @@ class EventQueueManager:
|
||||
# over the EQ. That will allow us to shove our own event onto the response once it comes in,
|
||||
# otherwise we have to wait until the EQ legitimately returns 200 due to a new event.
|
||||
# May or may not work in OpenSim.
|
||||
circuit.send_message(Message(
|
||||
circuit.send(Message(
|
||||
'PlacesQuery',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id, QueryID=UUID()),
|
||||
Block('TransactionData', TransactionID=UUID()),
|
||||
|
||||
@@ -1,23 +1,27 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections
|
||||
import dataclasses
|
||||
import datetime
|
||||
import functools
|
||||
import logging
|
||||
import multiprocessing
|
||||
import weakref
|
||||
from typing import *
|
||||
from weakref import ref
|
||||
|
||||
from outleap import LEAPClient
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.client.state import BaseClientSession
|
||||
from hippolyzer.lib.base.network.transport import ADDR_TUPLE
|
||||
from hippolyzer.lib.client.state import BaseClientSession, BaseClientSessionManager
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
|
||||
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
|
||||
from hippolyzer.lib.proxy.http_proxy import HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.caps import is_asset_server_cap_name, CapData, CapType
|
||||
from hippolyzer.lib.proxy.inventory_manager import ProxyInventoryManager
|
||||
from hippolyzer.lib.proxy.namecache import ProxyNameCache
|
||||
from hippolyzer.lib.proxy.object_manager import ProxyWorldObjectManager
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
@@ -29,27 +33,34 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
class Session(BaseClientSession):
|
||||
def __init__(self, session_id, secure_session_id, agent_id, circuit_code,
|
||||
regions: MutableSequence[ProxiedRegion]
|
||||
region_by_handle: Callable[[int], Optional[ProxiedRegion]]
|
||||
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[ProxiedRegion]]
|
||||
main_region: Optional[ProxiedRegion]
|
||||
REGION_CLS = ProxiedRegion
|
||||
|
||||
def __init__(self, id, secure_session_id, agent_id, circuit_code,
|
||||
session_manager: Optional[SessionManager], login_data=None):
|
||||
self.login_data = login_data or {}
|
||||
self.pending = True
|
||||
self.id: UUID = session_id
|
||||
self.secure_session_id: UUID = secure_session_id
|
||||
self.agent_id: UUID = agent_id
|
||||
self.circuit_code = circuit_code
|
||||
self.global_caps = {}
|
||||
super().__init__(
|
||||
id=id,
|
||||
secure_session_id=secure_session_id,
|
||||
agent_id=agent_id,
|
||||
circuit_code=circuit_code,
|
||||
session_manager=session_manager,
|
||||
login_data=login_data,
|
||||
)
|
||||
# Bag of arbitrary data addons can use to persist data across addon reloads
|
||||
self.addon_ctx = {}
|
||||
self.session_manager: SessionManager = session_manager or None
|
||||
# Each addon name gets its own separate dict within this dict
|
||||
self.addon_ctx: Dict[str, Dict[str, Any]] = collections.defaultdict(dict)
|
||||
self.session_manager: SessionManager = session_manager
|
||||
self.selected: SelectionModel = SelectionModel()
|
||||
self.regions: List[ProxiedRegion] = []
|
||||
self.started_at = datetime.datetime.now()
|
||||
self.message_handler: MessageHandler[Message, str] = MessageHandler()
|
||||
self.http_message_handler: MessageHandler[HippoHTTPFlow, str] = MessageHandler()
|
||||
self.objects = ProxyWorldObjectManager(self, session_manager.settings, session_manager.name_cache)
|
||||
self.inventory = ProxyInventoryManager(proxify(self))
|
||||
self.leap_client: Optional[LEAPClient] = None
|
||||
# Base path of a newview type cache directory for this session
|
||||
self.cache_dir: Optional[str] = None
|
||||
self._main_region = None
|
||||
|
||||
@property
|
||||
def global_addon_ctx(self):
|
||||
@@ -57,77 +68,13 @@ class Session(BaseClientSession):
|
||||
return {}
|
||||
return self.session_manager.addon_ctx
|
||||
|
||||
@classmethod
|
||||
def from_login_data(cls, login_data, session_manager):
|
||||
sess = Session(
|
||||
session_id=UUID(login_data["session_id"]),
|
||||
secure_session_id=UUID(login_data["secure_session_id"]),
|
||||
agent_id=UUID(login_data["agent_id"]),
|
||||
circuit_code=int(login_data["circuit_code"]),
|
||||
session_manager=session_manager,
|
||||
login_data=login_data,
|
||||
)
|
||||
appearance_service = login_data.get("agent_appearance_service")
|
||||
map_image_service = login_data.get("map-server-url")
|
||||
if appearance_service:
|
||||
sess.global_caps["AppearanceService"] = appearance_service
|
||||
if map_image_service:
|
||||
sess.global_caps["MapImageService"] = map_image_service
|
||||
# Login data also has details about the initial sim
|
||||
sess.register_region(
|
||||
circuit_addr=(login_data["sim_ip"], login_data["sim_port"]),
|
||||
handle=(login_data["region_x"] << 32) | login_data["region_y"],
|
||||
seed_url=login_data["seed_capability"],
|
||||
)
|
||||
return sess
|
||||
|
||||
@property
|
||||
def main_region(self) -> Optional[ProxiedRegion]:
|
||||
if self._main_region and self._main_region() in self.regions:
|
||||
return self._main_region()
|
||||
return None
|
||||
|
||||
@main_region.setter
|
||||
def main_region(self, val: ProxiedRegion):
|
||||
self._main_region = weakref.ref(val)
|
||||
|
||||
def register_region(self, circuit_addr: Optional[Tuple[str, int]] = None,
|
||||
def register_region(self, circuit_addr: Optional[ADDR_TUPLE] = None,
|
||||
seed_url: Optional[str] = None,
|
||||
handle: Optional[int] = None) -> ProxiedRegion:
|
||||
if not any((circuit_addr, seed_url)):
|
||||
raise ValueError("One of circuit_addr and seed_url must be defined!")
|
||||
|
||||
for region in self.regions:
|
||||
if region.circuit_addr == circuit_addr:
|
||||
if seed_url and region.cap_urls.get("Seed") != seed_url:
|
||||
region.update_caps({"Seed": seed_url})
|
||||
if handle:
|
||||
region.handle = handle
|
||||
return region
|
||||
if seed_url and region.cap_urls.get("Seed") == seed_url:
|
||||
return region
|
||||
|
||||
if not circuit_addr:
|
||||
raise ValueError("Can't create region without circuit addr!")
|
||||
|
||||
logging.info("Registering region for %r" % (circuit_addr,))
|
||||
region = ProxiedRegion(circuit_addr, seed_url, self, handle=handle)
|
||||
self.regions.append(region)
|
||||
region: ProxiedRegion = super().register_region(circuit_addr, seed_url, handle) # type: ignore
|
||||
AddonManager.handle_region_registered(self, region)
|
||||
return region
|
||||
|
||||
def region_by_circuit_addr(self, circuit_addr) -> Optional[ProxiedRegion]:
|
||||
for region in self.regions:
|
||||
if region.circuit_addr == circuit_addr and region.circuit:
|
||||
return region
|
||||
return None
|
||||
|
||||
def region_by_handle(self, handle: int) -> Optional[ProxiedRegion]:
|
||||
for region in self.regions:
|
||||
if region.handle == handle:
|
||||
return region
|
||||
return None
|
||||
|
||||
def open_circuit(self, near_addr, circuit_addr, transport):
|
||||
for region in self.regions:
|
||||
if region.circuit_addr == circuit_addr:
|
||||
@@ -167,23 +114,19 @@ class Session(BaseClientSession):
|
||||
return CapData(cap_name, ref(region), ref(self), base_url, cap_type)
|
||||
return None
|
||||
|
||||
def transaction_to_assetid(self, transaction_id: UUID):
|
||||
return UUID.combine(transaction_id, self.secure_session_id)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.id)
|
||||
|
||||
|
||||
class SessionManager:
|
||||
class SessionManager(BaseClientSessionManager):
|
||||
def __init__(self, settings: ProxySettings):
|
||||
BaseClientSessionManager.__init__(self)
|
||||
self.settings: ProxySettings = settings
|
||||
self.sessions: List[Session] = []
|
||||
self.shutdown_signal = multiprocessing.Event()
|
||||
self.flow_context = HTTPFlowContext()
|
||||
self.asset_repo = HTTPAssetRepo()
|
||||
self.message_logger: Optional[BaseMessageLogger] = None
|
||||
self.addon_ctx: Dict[str, Any] = {}
|
||||
self.addon_ctx: Dict[str, Dict[str, Any]] = collections.defaultdict(dict)
|
||||
self.name_cache = ProxyNameCache()
|
||||
self.pending_leap_clients: List[LEAPClient] = []
|
||||
|
||||
def create_session(self, login_data) -> Session:
|
||||
session = Session.from_login_data(login_data, self)
|
||||
@@ -192,6 +135,15 @@ class SessionManager:
|
||||
session.http_message_handler,
|
||||
)
|
||||
self.sessions.append(session)
|
||||
# TODO: less crap way of tying a LEAP client to a session
|
||||
while self.pending_leap_clients:
|
||||
leap_client = self.pending_leap_clients.pop(-1)
|
||||
# Client may have gone bad since it connected
|
||||
if not leap_client.connected:
|
||||
continue
|
||||
logging.info("Assigned LEAP client to session")
|
||||
session.leap_client = leap_client
|
||||
break
|
||||
logging.info("Created %r" % session)
|
||||
return session
|
||||
|
||||
@@ -206,6 +158,8 @@ class SessionManager:
|
||||
def close_session(self, session: Session):
|
||||
logging.info("Closed %r" % session)
|
||||
session.objects.clear()
|
||||
if session.leap_client:
|
||||
session.leap_client.disconnect()
|
||||
self.sessions.remove(session)
|
||||
|
||||
def resolve_cap(self, url: str) -> Optional["CapData"]:
|
||||
@@ -215,6 +169,10 @@ class SessionManager:
|
||||
return cap_data
|
||||
return CapData()
|
||||
|
||||
async def leap_client_connected(self, leap_client: LEAPClient):
|
||||
self.pending_leap_clients.append(leap_client)
|
||||
AddonManager.handle_leap_client_added(self, leap_client)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class SelectionModel:
|
||||
|
||||
@@ -25,6 +25,7 @@ class EnvSettingDescriptor(SettingDescriptor):
|
||||
class ProxySettings(Settings):
|
||||
SOCKS_PROXY_PORT: int = EnvSettingDescriptor(9061, "HIPPO_UDP_PORT", int)
|
||||
HTTP_PROXY_PORT: int = EnvSettingDescriptor(9062, "HIPPO_HTTP_PORT", int)
|
||||
LEAP_PORT: int = EnvSettingDescriptor(9063, "HIPPO_LEAP_PORT", int)
|
||||
PROXY_BIND_ADDR: str = EnvSettingDescriptor("127.0.0.1", "HIPPO_BIND_HOST", str)
|
||||
REMOTELY_ACCESSIBLE: bool = SettingDescriptor(False)
|
||||
USE_VIEWER_OBJECT_CACHE: bool = SettingDescriptor(False)
|
||||
@@ -34,3 +35,4 @@ class ProxySettings(Settings):
|
||||
AUTOMATICALLY_REQUEST_MISSING_OBJECTS: bool = SettingDescriptor(False)
|
||||
ADDON_SCRIPTS: List[str] = SettingDescriptor(list)
|
||||
FILTERS: Dict[str, str] = SettingDescriptor(dict)
|
||||
SSL_INSECURE: bool = SettingDescriptor(False)
|
||||
|
||||
@@ -83,7 +83,7 @@ class SOCKS5Server:
|
||||
try:
|
||||
# UDP Associate
|
||||
if cmd == 3:
|
||||
loop = asyncio.get_running_loop()
|
||||
loop = asyncio.get_event_loop_policy().get_event_loop()
|
||||
transport, protocol = await loop.create_datagram_endpoint(
|
||||
self._udp_protocol_creator(writer.get_extra_info("peername")),
|
||||
local_addr=('0.0.0.0', 0))
|
||||
|
||||
@@ -65,7 +65,7 @@ class TaskScheduler:
|
||||
task.cancel()
|
||||
|
||||
try:
|
||||
event_loop = asyncio.get_running_loop()
|
||||
event_loop = asyncio.get_event_loop_policy().get_event_loop()
|
||||
await_all = asyncio.gather(*(task for task_data, task in self.tasks))
|
||||
event_loop.run_until_complete(await_all)
|
||||
except RuntimeError:
|
||||
|
||||
@@ -108,4 +108,7 @@ CAP_TEMPLATES: List[CAPTemplate] = [
|
||||
CAPTemplate(cap_name='ViewerBenefits', method='GET', body=b'', query=set(), path=''),
|
||||
CAPTemplate(cap_name='SetDisplayName', method='POST', body=b'<?xml version="1.0" ?>\n<llsd>\n<map>\n <key>display_name</key>\n <array>\n <string>OLD_DISPLAY_NAME</string>\n <string>NEW_DISPLAY_NAME</string>\n </array>\n </map>\n</llsd>\n', query=set(), path=''),
|
||||
CAPTemplate(cap_name='ObjectMediaNavigate', method='POST', body=b'<?xml version="1.0" ?>\n<llsd>\n<map>\n <key>current_url</key>\n <string></string>\n <key>object_id</key>\n <uuid><!HIPPOREPL[[SELECTED_FULL]]></uuid>\n <key>texture_index</key>\n <integer></integer>\n </map>\n</llsd>\n', query=set(), path=''),
|
||||
CAPTemplate(cap_name='AgentProfile', method='GET', body=b'', query=set(), path='/<SOME_ID>'),
|
||||
CAPTemplate(cap_name='InterestList', method='POST', body=b'<?xml version="1.0" ?>\n<llsd>\n<map>\n <key>mode</key>\n <string>360</string>\n </map>\n</llsd>', query=set(), path='/'),
|
||||
CAPTemplate(cap_name='RegionObjects', method='GET', body=b'', query=set(), path=''),
|
||||
]
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import asyncio
|
||||
import unittest
|
||||
from typing import Any, Optional, List, Tuple
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer
|
||||
from hippolyzer.lib.base.network.transport import UDPPacket, AbstractUDPTransport, ADDR_TUPLE
|
||||
from hippolyzer.lib.base.network.transport import UDPPacket
|
||||
from hippolyzer.lib.base.test_utils import MockTransport
|
||||
from hippolyzer.lib.proxy.lludp_proxy import InterceptingLLUDPProxyProtocol
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager
|
||||
@@ -63,21 +63,3 @@ class BaseProxyTest(unittest.IsolatedAsyncioTestCase):
|
||||
def _msg_to_datagram(self, msg: Message, src, dst, socks_header=True):
|
||||
packet = self._msg_to_packet(msg, src, dst)
|
||||
return SOCKS5UDPTransport.serialize(packet, force_socks_header=socks_header)
|
||||
|
||||
|
||||
class MockTransport(AbstractUDPTransport):
|
||||
def sendto(self, data: Any, addr: Optional[ADDR_TUPLE] = ...) -> None:
|
||||
pass
|
||||
|
||||
def abort(self) -> None:
|
||||
pass
|
||||
|
||||
def close(self) -> None:
|
||||
pass
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.packets: List[Tuple[bytes, Tuple[str, int]]] = []
|
||||
|
||||
def send_packet(self, packet: UDPPacket) -> None:
|
||||
self.packets.append((packet.data, packet.dst_addr))
|
||||
|
||||
@@ -139,7 +139,7 @@ class ViewerObjectCache:
|
||||
return RegionViewerObjectCache.from_file(objects_file)
|
||||
|
||||
|
||||
class ViewerObjectCacheEntry(recordclass.datatuple): # type: ignore
|
||||
class ViewerObjectCacheEntry(recordclass.dataobject): # type: ignore
|
||||
local_id: int
|
||||
crc: int
|
||||
data: bytes
|
||||
|
||||
0
hippolyzer/lib/voice/__init__.py
Normal file
0
hippolyzer/lib/voice/__init__.py
Normal file
485
hippolyzer/lib/voice/client.py
Normal file
485
hippolyzer/lib/voice/client.py
Normal file
@@ -0,0 +1,485 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import random
|
||||
import subprocess
|
||||
import tempfile
|
||||
import urllib.parse
|
||||
import uuid
|
||||
from typing import Optional, Union, Any, Dict
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3
|
||||
from hippolyzer.lib.base.events import Event
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.objects import handle_to_gridxy
|
||||
from .connection import VivoxConnection, VivoxMessage
|
||||
from ..base.helpers import create_logged_task
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
RESP_LOG = logging.getLogger(__name__ + ".responses")
|
||||
|
||||
|
||||
def launch_slvoice(voice_path, args, env=None):
|
||||
return subprocess.Popen([voice_path] + args, env=env)
|
||||
|
||||
|
||||
def uuid_to_vivox(val):
|
||||
return (b"x" + base64.b64encode(uuid.UUID(val).bytes, b"-_")).decode("utf8")
|
||||
|
||||
|
||||
def uuid_to_vivox_uri(val):
|
||||
return "sip:%s@bhr.vivox.com" % uuid_to_vivox(val)
|
||||
|
||||
|
||||
def vivox_to_uuid(val):
|
||||
# Pull the base64-encoded UUID out of the URI
|
||||
val = val.split(":")[-1].split("@")[0][1:]
|
||||
return str(uuid.UUID(bytes=base64.b64decode(val, b"-_")))
|
||||
|
||||
|
||||
class VoiceClient:
|
||||
SERVER_URL = "https://www.bhr.vivox.com/api2/"
|
||||
|
||||
def __init__(self, host: str, port: int):
|
||||
self._host = host
|
||||
self._port = port
|
||||
|
||||
self.logged_in = asyncio.Event()
|
||||
self.ready = asyncio.Event()
|
||||
self.session_ready = asyncio.Event()
|
||||
self.session_added = Event()
|
||||
self.channel_info_updated = Event()
|
||||
self.participant_added = Event()
|
||||
self.participant_updated = Event()
|
||||
self.participant_removed = Event()
|
||||
self.capture_devices_received = Event()
|
||||
self.render_devices_received = Event()
|
||||
self.render_devices = {}
|
||||
self.capture_devices = {}
|
||||
|
||||
self._pending_req_futures: dict[str, asyncio.Future] = {}
|
||||
|
||||
self._connector_handle: Optional[str] = None
|
||||
self._session_handle: Optional[str] = None
|
||||
self._session_group_handle: Optional[str] = None
|
||||
self._account_handle: Optional[str] = None
|
||||
self._account_uri: Optional[str] = None
|
||||
self._username: Optional[str] = None
|
||||
self._password: Optional[str] = None
|
||||
self._display_name: Optional[str] = None
|
||||
self._uri: Optional[str] = None
|
||||
self._participants: Dict[str, dict] = {}
|
||||
|
||||
self._mic_muted = False
|
||||
self._region_global_x = 0
|
||||
self._region_global_y = 0
|
||||
|
||||
self._pos = Vector3(0, 0, 0)
|
||||
|
||||
self.vivox_conn: Optional[VivoxConnection] = None
|
||||
self._poll_task = create_logged_task(self._poll_messages(), "Poll Vivox messages")
|
||||
self.event_handler: MessageHandler[VivoxMessage, str] = MessageHandler(take_by_default=False)
|
||||
|
||||
self.event_handler.subscribe(
|
||||
"VoiceServiceConnectionStateChangedEvent",
|
||||
self._handle_voice_service_connection_state_changed
|
||||
)
|
||||
self.event_handler.subscribe("AccountLoginStateChangeEvent", self._handle_account_login_state_change)
|
||||
self.event_handler.subscribe("SessionAddedEvent", self._handle_session_added)
|
||||
self.event_handler.subscribe("SessionRemovedEvent", self._handle_session_removed)
|
||||
self.event_handler.subscribe("ParticipantAddedEvent", self._handle_participant_added)
|
||||
self.event_handler.subscribe("ParticipantUpdatedEvent", self._handle_participant_updated)
|
||||
self.event_handler.subscribe("ParticipantRemovedEvent", self._handle_participant_removed)
|
||||
|
||||
@property
|
||||
def username(self):
|
||||
return self._username
|
||||
|
||||
@property
|
||||
def password(self):
|
||||
return self._password
|
||||
|
||||
@property
|
||||
def display_name(self):
|
||||
return self._display_name
|
||||
|
||||
@property
|
||||
def global_pos(self):
|
||||
return self._pos
|
||||
|
||||
@property
|
||||
def region_pos(self):
|
||||
return self._global_to_region(self.global_pos)
|
||||
|
||||
@property
|
||||
def uri(self):
|
||||
return self._uri
|
||||
|
||||
@property
|
||||
def participants(self):
|
||||
# TODO: wrap in something to make immutable
|
||||
return self._participants
|
||||
|
||||
def close(self):
|
||||
if self.vivox_conn is not None:
|
||||
self.vivox_conn.close()
|
||||
self._poll_task.cancel()
|
||||
self._poll_task = None
|
||||
|
||||
async def aclose(self):
|
||||
if self._account_handle:
|
||||
await self.logout()
|
||||
self.close()
|
||||
|
||||
@classmethod
|
||||
async def simple_init(
|
||||
cls,
|
||||
voice_path: str,
|
||||
host: Optional[str] = None,
|
||||
port: Optional[int] = None,
|
||||
env: Optional[dict] = None
|
||||
):
|
||||
"""Simple initializer for standing up a client"""
|
||||
if not host:
|
||||
host = "127.0.0.1"
|
||||
if not port:
|
||||
port = random.randrange(40000, 60000)
|
||||
|
||||
str_addr = "%s:%s" % (host, port)
|
||||
launch_slvoice(voice_path, ["-i", str_addr, "-m", "component"], env=env)
|
||||
# HACK: wait for the process to start listening
|
||||
await asyncio.sleep(0.2)
|
||||
|
||||
client = cls(host, port)
|
||||
await client.create_vivox_connection()
|
||||
await client.ready.wait()
|
||||
return client
|
||||
|
||||
async def create_vivox_connection(self):
|
||||
reader, writer = await asyncio.open_connection(host=self._host, port=self._port)
|
||||
self.vivox_conn = VivoxConnection(reader, writer)
|
||||
|
||||
async def create_connector(self):
|
||||
# TODO: Move all this extra crap out of here
|
||||
devices = (await self.send_message("Aux.GetCaptureDevices.1", {}))["Results"]
|
||||
self.capture_devices_received.notify(devices)
|
||||
self.capture_devices.clear()
|
||||
self.capture_devices.update(devices)
|
||||
|
||||
devices = (await self.send_message("Aux.GetRenderDevices.1", {}))["Results"]
|
||||
self.render_devices_received.notify(devices)
|
||||
self.render_devices.clear()
|
||||
self.render_devices.update(devices)
|
||||
|
||||
await self.set_speakers_muted(False)
|
||||
await self.set_speaker_volume(62)
|
||||
await self.set_mic_muted(True)
|
||||
await self.set_mic_volume(50)
|
||||
|
||||
connector_resp = await self.send_message("Connector.Create.1", {
|
||||
"ClientName": "V2 SDK",
|
||||
"AccountManagementServer": self.SERVER_URL,
|
||||
"Mode": "Normal",
|
||||
"MinimumPort": 30000,
|
||||
"MaximumPort": 50000,
|
||||
"Logging": {
|
||||
"Folder": tempfile.gettempdir(),
|
||||
"FileNamePrefix": "VivConnector",
|
||||
"FileNameSuffix": ".log",
|
||||
"LogLevel": 1
|
||||
},
|
||||
"Application": "",
|
||||
"MaxCalls": 12,
|
||||
})
|
||||
|
||||
self._connector_handle = connector_resp['Results']['ConnectorHandle']
|
||||
self.ready.set()
|
||||
|
||||
async def login(self, username: Union[uuid.UUID, str], password: str):
|
||||
# UUID, convert to Vivox format
|
||||
if isinstance(username, uuid.UUID) or len(username) == 36:
|
||||
username = uuid_to_vivox(username)
|
||||
|
||||
self._username = username
|
||||
self._password = password
|
||||
if not self._connector_handle:
|
||||
raise Exception("Need a connector handle to log in")
|
||||
if self._account_handle:
|
||||
await self.logout()
|
||||
|
||||
resp = await self.send_message("Account.Login.1", {
|
||||
"ConnectorHandle": self._connector_handle,
|
||||
"AccountName": username,
|
||||
"AccountPassword": password,
|
||||
"AudioSessionAnswerMode": "VerifyAnswer",
|
||||
"EnableBuddiesAndPresence": "false",
|
||||
"BuddyManagementMode": "Application",
|
||||
"ParticipantPropertyFrequency": 5,
|
||||
})
|
||||
|
||||
if resp["ReturnCode"] != 0:
|
||||
raise Exception(resp)
|
||||
|
||||
self._display_name = urllib.parse.unquote(resp["Results"]["DisplayName"])
|
||||
self._account_uri = resp["Results"]["Uri"]
|
||||
await self.logged_in.wait()
|
||||
|
||||
return resp
|
||||
|
||||
async def logout(self):
|
||||
if self._session_handle:
|
||||
await self.leave_session()
|
||||
|
||||
if self._account_handle:
|
||||
await self.send_message("Account.Logout.1", {
|
||||
"AccountHandle": self._account_handle,
|
||||
})
|
||||
self._account_handle = None
|
||||
self._account_uri = None
|
||||
self.logged_in.clear()
|
||||
|
||||
async def join_session(self, uri: str, region_handle: Optional[int] = None):
|
||||
if self._session_handle:
|
||||
await self.leave_session()
|
||||
|
||||
self.set_ref_region(region_handle)
|
||||
|
||||
self._uri = uri
|
||||
|
||||
await self.send_message("Session.Create.1", {
|
||||
"AccountHandle": self._account_handle,
|
||||
"URI": uri,
|
||||
"ConnectAudio": "true",
|
||||
"ConnectText": "false",
|
||||
"VoiceFontID": 0,
|
||||
"Name": ""
|
||||
})
|
||||
# wait until we're actually added
|
||||
await self.session_ready.wait()
|
||||
|
||||
async def leave_session(self):
|
||||
await self.send_message("SessionGroup.Terminate.1", {
|
||||
"SessionGroupHandle": self._session_group_handle,
|
||||
})
|
||||
self.session_ready.clear()
|
||||
|
||||
# TODO: refactor into a collection
|
||||
for participant in self._participants.values():
|
||||
self.participant_removed.notify(participant)
|
||||
self._participants.clear()
|
||||
self._session_handle = None
|
||||
self._session_group_handle = None
|
||||
self._region_global_x = 0
|
||||
self._region_global_y = 0
|
||||
self._uri = None
|
||||
|
||||
def set_3d_pos(self, pos: Vector3, vel: Vector3 = Vector3(0, 0, 0)) -> asyncio.Future:
|
||||
"""Set global 3D position, in Vivox coordinates"""
|
||||
self._pos = pos
|
||||
future = self.send_message("Session.Set3DPosition.1", {
|
||||
"SessionHandle": self._session_handle,
|
||||
"SpeakerPosition": self._build_position_dict(pos),
|
||||
"ListenerPosition": self._build_position_dict(pos, vel=vel),
|
||||
})
|
||||
self._channel_info_updated()
|
||||
return future
|
||||
|
||||
def set_region_3d_pos(self, pos: Vector3, vel: Vector3 = Vector3(0, 0, 0)) -> asyncio.Future:
|
||||
"""Set 3D position, in region-local coordinates"""
|
||||
vel = Vector3(vel[0], vel[2], -vel[1])
|
||||
return self.set_3d_pos(self._region_to_global(pos), vel=vel)
|
||||
|
||||
def set_speakers_muted(self, val: bool):
|
||||
return self.send_message("Connector.MuteLocalSpeaker.1", {
|
||||
"Value": json.dumps(val),
|
||||
"ConnectorHandle": self._connector_handle
|
||||
})
|
||||
|
||||
def set_mic_muted(self, val: bool):
|
||||
self._mic_muted = val
|
||||
|
||||
return self.send_message("Connector.MuteLocalMic.1", {
|
||||
"Value": json.dumps(val),
|
||||
"ConnectorHandle": self._connector_handle
|
||||
})
|
||||
|
||||
def set_mic_volume(self, vol: int):
|
||||
return self.send_message("Connector.SetLocalMicVolume.1", {
|
||||
"Value": vol,
|
||||
"ConnectorHandle": self._connector_handle
|
||||
})
|
||||
|
||||
def set_speaker_volume(self, vol: int):
|
||||
return self.send_message("Connector.SetLocalSpeakerVolume.1", {
|
||||
"Value": vol,
|
||||
"ConnectorHandle": self._connector_handle
|
||||
})
|
||||
|
||||
def set_capture_device(self, device: str):
|
||||
return self.send_message("Aux.SetCaptureDevice.1", {
|
||||
"CaptureDeviceSpecifier": device,
|
||||
})
|
||||
|
||||
def set_participant_volume(self, participant: str, vol: int):
|
||||
return self.send_message("Session.SetParticipantVolumeForMe.1", {
|
||||
"SessionHandle": self._session_handle,
|
||||
"ParticipantURI": participant,
|
||||
"Volume": vol,
|
||||
})
|
||||
|
||||
async def get_channel_info(self, uri: str) -> dict:
|
||||
return await self.send_message("Account.ChannelGetInfo.1", {
|
||||
"AccountHandle": self._account_handle,
|
||||
"URI": uri
|
||||
})
|
||||
|
||||
def send_web_call(self, rel_path: str, params: dict) -> asyncio.Future[dict]:
|
||||
"""Make a call to a Vivox Web API"""
|
||||
return self.send_message("Account.WebCall.1", {
|
||||
"AccountHandle": self._account_handle,
|
||||
"RelativePath": rel_path,
|
||||
"Parameters": params,
|
||||
})
|
||||
|
||||
def send_message(self, msg_type: str, data: Any) -> asyncio.Future[dict]:
|
||||
request_id = self._make_request_id()
|
||||
# This is apparently what the viewer does, not clear if
|
||||
# request_id has any semantic significance
|
||||
if msg_type == "Session.Create.1":
|
||||
request_id = data["URI"]
|
||||
|
||||
RESP_LOG.debug("%s %s %s %r" % ("Request", request_id, msg_type, data))
|
||||
|
||||
create_logged_task(self.vivox_conn.send_request(request_id, msg_type, data), "Send Vivox message")
|
||||
future = asyncio.Future()
|
||||
self._pending_req_futures[request_id] = future
|
||||
return future
|
||||
|
||||
def send_raw(self, data: bytes):
|
||||
return self.vivox_conn.send_raw(data)
|
||||
|
||||
def set_ref_region(self, region_handle: Optional[int]):
|
||||
"""Set reference position for region-local coordinates"""
|
||||
if region_handle is not None:
|
||||
self._region_global_x, self._region_global_y = handle_to_gridxy(region_handle)
|
||||
else:
|
||||
self._region_global_x, self._region_global_y = (0, 0)
|
||||
self._channel_info_updated()
|
||||
|
||||
async def _poll_messages(self):
|
||||
while not self.vivox_conn:
|
||||
await asyncio.sleep(0.001)
|
||||
|
||||
async for msg in self.vivox_conn.read_messages():
|
||||
try:
|
||||
RESP_LOG.debug(repr(msg))
|
||||
if msg.type == "Event":
|
||||
self.event_handler.handle(msg)
|
||||
elif msg.type == "Response":
|
||||
# Might not have this request ID if it was sent directly via the socket
|
||||
if msg.request_id in self._pending_req_futures:
|
||||
self._pending_req_futures[msg.request_id].set_result(msg.data)
|
||||
del self._pending_req_futures[msg.request_id]
|
||||
except Exception:
|
||||
LOG.exception("Error in response handler?")
|
||||
|
||||
async def _handle_voice_service_connection_state_changed(self, _msg: VivoxMessage):
|
||||
await self.create_connector()
|
||||
|
||||
def _handle_account_login_state_change(self, msg: VivoxMessage):
|
||||
if msg.data.get('StatusString') == "OK" and msg.data['State'] == '1':
|
||||
self._account_handle = msg.data['AccountHandle']
|
||||
self.logged_in.set()
|
||||
else:
|
||||
self.logged_in.clear()
|
||||
self._account_uri = None
|
||||
self._account_handle = None
|
||||
|
||||
def _handle_session_added(self, msg: VivoxMessage):
|
||||
self._session_handle = msg.data["SessionHandle"]
|
||||
self._session_group_handle = msg.data["SessionGroupHandle"]
|
||||
self.session_added.notify(self._session_handle)
|
||||
# We still have to wait for ourselves to be added as a participant, wait on
|
||||
# that to set the session_ready event.
|
||||
|
||||
def _handle_session_removed(self, _msg: VivoxMessage):
|
||||
self._session_handle = None
|
||||
# We often don't get all the `ParticipantRemoved`s before the session dies,
|
||||
# clear out the participant list.
|
||||
for participant in tuple(self._participants.keys()):
|
||||
self._remove_participant(participant)
|
||||
self.session_ready.clear()
|
||||
|
||||
def _handle_participant_added(self, msg: VivoxMessage):
|
||||
self._participants[msg.data["ParticipantUri"]] = msg.data
|
||||
self.participant_added.notify(msg.data)
|
||||
if msg.data["ParticipantUri"] == self._account_uri and not self.session_ready.is_set():
|
||||
self.session_ready.set()
|
||||
|
||||
def _handle_participant_updated(self, msg: VivoxMessage):
|
||||
participant_uri = msg.data["ParticipantUri"]
|
||||
if participant_uri in self._participants:
|
||||
participant = self._participants[participant_uri]
|
||||
participant.update(msg.data)
|
||||
self.participant_updated.notify(participant)
|
||||
|
||||
def _handle_participant_removed(self, msg: VivoxMessage):
|
||||
self._remove_participant(msg.data["ParticipantUri"])
|
||||
|
||||
def _remove_participant(self, participant_uri: str):
|
||||
if participant_uri in self._participants:
|
||||
participant = self._participants[participant_uri]
|
||||
del self._participants[participant_uri]
|
||||
self.participant_removed.notify(participant)
|
||||
|
||||
def _global_to_region(self, pos: Vector3):
|
||||
x = pos.X - self._region_global_x * 256
|
||||
z = pos.Z + self._region_global_y * 256
|
||||
# Vivox uses a different coordinate system than SL, Y is up!
|
||||
return Vector3(x, -z, pos.Y)
|
||||
|
||||
def _region_to_global(self, pos: Vector3):
|
||||
x = pos.X + self._region_global_x * 256
|
||||
y = pos.Y + self._region_global_y * 256
|
||||
return Vector3(x, pos.Z, -y)
|
||||
|
||||
def _build_position_dict(self, pos: Vector3, vel: Vector3 = Vector3(0, 0, 0)) -> dict:
|
||||
return {
|
||||
"Position": {
|
||||
"X": pos.X,
|
||||
"Y": pos.Y,
|
||||
"Z": pos.Z,
|
||||
},
|
||||
"Velocity": {
|
||||
"X": vel.X,
|
||||
"Y": vel.Y,
|
||||
"Z": vel.Z,
|
||||
},
|
||||
"AtOrientation": {
|
||||
"X": "1.29938e-05",
|
||||
"Y": 0,
|
||||
"Z": -1,
|
||||
},
|
||||
"UpOrientation": {
|
||||
"X": 0,
|
||||
"Y": 1,
|
||||
"Z": 0,
|
||||
},
|
||||
"LeftOrientation": {
|
||||
"X": -1,
|
||||
"Y": 0,
|
||||
"Z": "-1.29938e-05",
|
||||
}
|
||||
}
|
||||
|
||||
def _channel_info_updated(self):
|
||||
pos = self.global_pos
|
||||
if self._region_global_x is not None:
|
||||
pos = self.region_pos
|
||||
self.channel_info_updated.notify(pos)
|
||||
|
||||
def _make_request_id(self):
|
||||
return str(uuid.uuid4())
|
||||
156
hippolyzer/lib/voice/connection.py
Normal file
156
hippolyzer/lib/voice/connection.py
Normal file
@@ -0,0 +1,156 @@
|
||||
# TODO: some fancy parser that parses everything into
|
||||
# dicts or objects using schemas.
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import weakref
|
||||
from typing import Any, Optional, Coroutine, NamedTuple
|
||||
|
||||
import defusedxml.lxml
|
||||
import lxml.etree
|
||||
|
||||
|
||||
class VivoxMessage(NamedTuple):
|
||||
type: str
|
||||
name: str
|
||||
request_id: Optional[str]
|
||||
data: dict
|
||||
|
||||
|
||||
def xml_to_dict(element):
|
||||
return element.tag, dict(map(xml_to_dict, element)) or element.text
|
||||
|
||||
|
||||
def buildxml(r, d, list_elem_name='i'):
|
||||
if isinstance(d, dict):
|
||||
for k, v in d.items():
|
||||
s = lxml.etree.SubElement(r, k)
|
||||
buildxml(s, v, list_elem_name)
|
||||
elif isinstance(d, (list, tuple, set)):
|
||||
for v in d:
|
||||
if isinstance(v, lxml.etree._Element): # noqa
|
||||
s = r
|
||||
else:
|
||||
s = lxml.etree.SubElement(r, list_elem_name)
|
||||
buildxml(s, v, list_elem_name)
|
||||
elif isinstance(d, str):
|
||||
r.text = d
|
||||
elif isinstance(d, lxml.etree._Element): # noqa
|
||||
r.append(d)
|
||||
elif d is None:
|
||||
r.text = ""
|
||||
else:
|
||||
r.text = str(d)
|
||||
return r
|
||||
|
||||
|
||||
_VIVOX_NS = b' xmlns="http://www.vivox.com"' # noqa
|
||||
|
||||
|
||||
def _remove_vivox_ns(data):
|
||||
return data.replace(_VIVOX_NS, b"").strip()
|
||||
|
||||
|
||||
def _clean_message(msg_action: str, parsed, dict_msg: dict):
|
||||
# TODO: refactor this into some XML -> dict schema, some XML is ambiguous
|
||||
if msg_action == "Aux.GetCaptureDevices.1":
|
||||
devices = []
|
||||
for device in parsed.find('Results/CaptureDevices'):
|
||||
devices.append(xml_to_dict(device)[1])
|
||||
dict_msg["Results"]["CaptureDevices"] = devices
|
||||
if msg_action == "Account.WebCall.1":
|
||||
results = dict_msg["Results"]
|
||||
content_type = results.get("ContentType") or ""
|
||||
if content_type.startswith("text/xml"):
|
||||
xml_content = _remove_vivox_ns(results["Content"].encode("utf8"))
|
||||
parsed_content = defusedxml.lxml.fromstring(xml_content)
|
||||
body = parsed_content.xpath("//body")[0]
|
||||
results["Content"] = body
|
||||
if "ReturnCode" in dict_msg:
|
||||
dict_msg["ReturnCode"] = int(dict_msg["ReturnCode"])
|
||||
return dict_msg
|
||||
|
||||
|
||||
def _build_webcall_params(params: dict) -> list:
|
||||
params_list = []
|
||||
elem = lxml.etree.Element('base')
|
||||
for name, val in params.items():
|
||||
params_list.append({"Name": name, "Value": val})
|
||||
buildxml(elem, params_list, 'Parameter')
|
||||
return list(elem)
|
||||
|
||||
|
||||
class VivoxConnection:
|
||||
def __init__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, owned=True):
|
||||
self._reader: Optional[asyncio.StreamReader] = reader
|
||||
self._writer: Optional[asyncio.StreamWriter] = writer
|
||||
self._owned = owned
|
||||
|
||||
def close(self):
|
||||
if self._owned and self._writer:
|
||||
self._writer.close()
|
||||
self._writer = None
|
||||
self._reader = None
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
async def read_messages(self):
|
||||
# TODO: handle interrupted read
|
||||
while self._reader and not self._reader.at_eof() and not self._writer.is_closing():
|
||||
yield await self.read_message()
|
||||
|
||||
async def read_message(self):
|
||||
msg = await self._reader.readuntil(b"\n\n\n")
|
||||
return self.parse(msg[:-3])
|
||||
|
||||
def parse(self, raw_msg) -> VivoxMessage:
|
||||
parsed_msg = defusedxml.lxml.fromstring(raw_msg.decode("utf8"))
|
||||
msg_type = parsed_msg.tag
|
||||
request_id = parsed_msg.attrib.get("requestId", None)
|
||||
|
||||
# There may be no params, just use an empty dict if that's the case
|
||||
dict_msg = xml_to_dict(parsed_msg)[1] or {}
|
||||
|
||||
if msg_type == "Event":
|
||||
msg_action = parsed_msg.attrib.get("type")
|
||||
elif msg_type == "Response":
|
||||
msg_action = parsed_msg.attrib.get("action")
|
||||
# This is pretty useless, get rid of it because it gunks up repr()s.
|
||||
if 'InputXml' in dict_msg:
|
||||
del dict_msg['InputXml']
|
||||
dict_msg = _clean_message(msg_action, parsed_msg, dict_msg)
|
||||
elif msg_type == "Request":
|
||||
msg_action = parsed_msg.attrib.get("action")
|
||||
else:
|
||||
raise Exception("Unknown Vivox message type %r?" % msg_type)
|
||||
return VivoxMessage(msg_type, msg_action, request_id, dict_msg)
|
||||
|
||||
def send_raw(self, buf: bytes) -> Coroutine[Any, Any, None]:
|
||||
self._writer.write(buf + b"\n\n\n")
|
||||
drain_coro = self._writer.drain()
|
||||
# Don't whine if this isn't awaited, we may not always want to flush immediately.
|
||||
weakref.finalize(drain_coro, drain_coro.close)
|
||||
return drain_coro
|
||||
|
||||
def send_request(self, request_id: str, action: str, data: Any) -> Coroutine[Any, Any, None]:
|
||||
if action == "Account.WebCall.1":
|
||||
data = dict(data)
|
||||
data["Parameters"] = _build_webcall_params(data["Parameters"])
|
||||
return self._send_request_response("Request", request_id, action, data)
|
||||
|
||||
def send_response(self, request_id: str, action: str, data: Any) -> Coroutine[Any, Any, None]:
|
||||
return self._send_request_response("Response", request_id, action, data)
|
||||
|
||||
def _send_request_response(self, msg_type: str, request_id: str, action: str, data: Any):
|
||||
elem = lxml.etree.Element(msg_type)
|
||||
elem.attrib["requestId"] = request_id
|
||||
elem.attrib["action"] = action
|
||||
serialized = lxml.etree.tostring(buildxml(elem, data))
|
||||
return self.send_raw(serialized)
|
||||
|
||||
def send_event(self, event_type: str, data: Any) -> Coroutine[Any, Any, None]:
|
||||
elem = lxml.etree.Element("Event")
|
||||
elem.attrib["type"] = event_type
|
||||
serialized = lxml.etree.tostring(buildxml(elem, data))
|
||||
return self.send_raw(serialized)
|
||||
115
requirements.txt
115
requirements.txt
@@ -1,68 +1,77 @@
|
||||
aiohttp==3.8.1
|
||||
aiosignal==1.2.0
|
||||
aiohttp==3.9.2
|
||||
aioquic==0.9.25
|
||||
aiosignal==1.3.1
|
||||
appdirs==1.4.4
|
||||
Arpeggio==1.10.2
|
||||
asgiref==3.4.1
|
||||
async-timeout==4.0.1
|
||||
attrs==21.2.0
|
||||
blinker==1.4
|
||||
Brotli==1.0.9
|
||||
certifi==2021.10.8
|
||||
cffi==1.15.0
|
||||
charset-normalizer==2.0.9
|
||||
click==8.0.3
|
||||
cryptography==36.0.2
|
||||
Arpeggio==2.0.2
|
||||
asgiref==3.7.2
|
||||
attrs==23.2.0
|
||||
blinker==1.7.0
|
||||
Brotli==1.1.0
|
||||
certifi==2023.11.17
|
||||
cffi==1.16.0
|
||||
click==8.1.7
|
||||
cryptography==41.0.7
|
||||
dataclasses-json==0.6.3
|
||||
defusedxml==0.7.1
|
||||
Flask==2.0.2
|
||||
frozenlist==1.2.0
|
||||
Flask==2.3.3
|
||||
frozenlist==1.4.1
|
||||
gltflib==1.0.13
|
||||
Glymur==0.9.6
|
||||
h11==0.12.0
|
||||
h11==0.14.0
|
||||
h2==4.1.0
|
||||
hpack==4.0.0
|
||||
hyperframe==6.0.1
|
||||
idna==2.10
|
||||
itsdangerous==2.0.1
|
||||
jedi==0.18.1
|
||||
Jinja2==3.0.3
|
||||
kaitaistruct==0.9
|
||||
lazy-object-proxy==1.6.0
|
||||
itsdangerous==2.1.2
|
||||
jedi==0.19.1
|
||||
Jinja2==3.1.3
|
||||
kaitaistruct==0.10
|
||||
lazy-object-proxy==1.10.0
|
||||
ldap3==2.9.1
|
||||
llbase==1.2.11
|
||||
lxml==4.6.4
|
||||
MarkupSafe==2.0.1
|
||||
mitmproxy==8.0.0
|
||||
msgpack==1.0.3
|
||||
multidict==5.2.0
|
||||
numpy==1.21.4
|
||||
llsd==1.0.0
|
||||
lxml==5.1.0
|
||||
MarkupSafe==2.1.3
|
||||
marshmallow==3.20.1
|
||||
mitmproxy==10.2.1
|
||||
mitmproxy_rs==0.5.1
|
||||
msgpack==1.0.7
|
||||
multidict==6.0.4
|
||||
mypy-extensions==1.0.0
|
||||
numpy==1.26.3
|
||||
outleap==0.6.1
|
||||
packaging==23.2
|
||||
parso==0.8.3
|
||||
passlib==1.7.4
|
||||
prompt-toolkit==3.0.23
|
||||
protobuf==3.18.1
|
||||
ptpython==3.0.20
|
||||
prompt-toolkit==3.0.43
|
||||
protobuf==4.25.1
|
||||
ptpython==3.0.25
|
||||
publicsuffix2==2.20191221
|
||||
pyasn1==0.4.8
|
||||
pyasn1==0.5.1
|
||||
pyasn1-modules==0.3.0
|
||||
pycollada==0.8
|
||||
pycparser==2.21
|
||||
pycollada==0.7.2
|
||||
Pygments==2.10.0
|
||||
pyOpenSSL==22.0.0
|
||||
pyparsing==2.4.7
|
||||
Pygments==2.17.2
|
||||
pylsqpack==0.3.18
|
||||
pyOpenSSL==23.3.0
|
||||
pyparsing==3.1.1
|
||||
pyperclip==1.8.2
|
||||
PySide6==6.2.2
|
||||
qasync==0.22.0
|
||||
recordclass==0.14.3
|
||||
requests==2.26.0
|
||||
ruamel.yaml==0.17.16
|
||||
ruamel.yaml.clib==0.2.6
|
||||
shiboken6==6.2.2
|
||||
PySide6-Essentials==6.6.1
|
||||
python-dateutil==2.8.2
|
||||
qasync==0.27.1
|
||||
recordclass==0.18.2
|
||||
ruamel.yaml==0.18.5
|
||||
ruamel.yaml.clib==0.2.8
|
||||
service-identity==23.1.0
|
||||
shiboken6==6.6.1
|
||||
six==1.16.0
|
||||
sortedcontainers==2.4.0
|
||||
tornado==6.1
|
||||
transformations==2021.6.6
|
||||
typing-extensions==4.0.1
|
||||
urllib3==1.26.7
|
||||
urwid==2.1.2
|
||||
wcwidth==0.2.5
|
||||
Werkzeug==2.0.2
|
||||
wsproto==1.0.0
|
||||
yarl==1.7.2
|
||||
zstandard==0.15.2
|
||||
tornado==6.4
|
||||
transformations==2024.6.1
|
||||
typing-inspect==0.9.0
|
||||
typing_extensions==4.9.0
|
||||
urwid-mitmproxy==2.1.2.1
|
||||
wcwidth==0.2.13
|
||||
Werkzeug==2.3.8
|
||||
wsproto==1.2.0
|
||||
yarl==1.9.4
|
||||
zstandard==0.22.0
|
||||
|
||||
42
setup.py
42
setup.py
@@ -25,7 +25,7 @@ from setuptools import setup, find_packages
|
||||
|
||||
here = path.abspath(path.dirname(__file__))
|
||||
|
||||
version = '0.11.2'
|
||||
version = '0.15.2'
|
||||
|
||||
with open(path.join(here, 'README.md')) as readme_fh:
|
||||
readme = readme_fh.read()
|
||||
@@ -42,9 +42,9 @@ setup(
|
||||
"Operating System :: POSIX",
|
||||
"Operating System :: Microsoft :: Windows",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Topic :: System :: Networking :: Monitoring",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
@@ -75,33 +75,39 @@ setup(
|
||||
entry_points={
|
||||
'console_scripts': {
|
||||
'hippolyzer-gui = hippolyzer.apps.proxy_gui:gui_main',
|
||||
'hippolyzer-cli = hippolyzer.apps.proxy:main'
|
||||
'hippolyzer-cli = hippolyzer.apps.proxy:main',
|
||||
}
|
||||
},
|
||||
zip_safe=False,
|
||||
python_requires='>=3.8',
|
||||
python_requires='>=3.10',
|
||||
install_requires=[
|
||||
'llbase>=1.2.5',
|
||||
'llsd<1.1.0',
|
||||
'defusedxml',
|
||||
'aiohttp<4.0.0',
|
||||
'recordclass<0.15',
|
||||
# Newer recordclasses break!
|
||||
'recordclass>0.15,<0.18.3',
|
||||
'lazy-object-proxy',
|
||||
'arpeggio',
|
||||
# requests breaks with newer idna
|
||||
'idna<3,>=2.5',
|
||||
# 7.x will be a major change.
|
||||
'mitmproxy>=8.0.0,<8.1',
|
||||
# For REPLs
|
||||
'ptpython<4.0',
|
||||
# JP2 codec
|
||||
'Glymur<0.9.7',
|
||||
'numpy<2.0',
|
||||
# These could be in extras_require if you don't want a GUI.
|
||||
'pyside6',
|
||||
'qasync',
|
||||
# Needed for mesh format conversion tooling
|
||||
'pycollada',
|
||||
'transformations',
|
||||
'gltflib',
|
||||
# JP2 codec
|
||||
'Glymur<0.9.7',
|
||||
'numpy<2.0',
|
||||
|
||||
# Proxy-specific stuff
|
||||
'outleap<1.0',
|
||||
'arpeggio',
|
||||
# 12.x will be a major change.
|
||||
'mitmproxy>=11.0.0,<12',
|
||||
'Werkzeug<4.0',
|
||||
# For REPLs
|
||||
'ptpython<4.0',
|
||||
# These could be in extras_require if you don't want a GUI.
|
||||
'pyside6-essentials',
|
||||
'qasync',
|
||||
],
|
||||
tests_require=[
|
||||
"pytest",
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
import glob
|
||||
|
||||
import setuptools # noqa
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from distutils.core import Command
|
||||
from importlib.metadata import version
|
||||
from pathlib import Path
|
||||
|
||||
from cx_Freeze import setup, Executable
|
||||
@@ -31,20 +34,20 @@ TO_DELETE = [
|
||||
"lib/aiohttp/_http_writer.c",
|
||||
"lib/aiohttp/_websocket.c",
|
||||
# Improve this to work with different versions.
|
||||
"lib/aiohttp/python39.dll",
|
||||
"lib/lazy_object_proxy/python39.dll",
|
||||
"lib/lxml/python39.dll",
|
||||
"lib/markupsafe/python39.dll",
|
||||
"lib/multidict/python39.dll",
|
||||
"lib/numpy/core/python39.dll",
|
||||
"lib/numpy/fft/python39.dll",
|
||||
"lib/numpy/linalg/python39.dll",
|
||||
"lib/numpy/random/python39.dll",
|
||||
"lib/python39.dll",
|
||||
"lib/recordclass/python39.dll",
|
||||
"lib/regex/python39.dll",
|
||||
"lib/aiohttp/python3*.dll",
|
||||
"lib/lazy_object_proxy/python3*.dll",
|
||||
"lib/lxml/python3*.dll",
|
||||
"lib/markupsafe/python3*.dll",
|
||||
"lib/multidict/python3*.dll",
|
||||
"lib/numpy/core/python3*.dll",
|
||||
"lib/numpy/fft/python3*.dll",
|
||||
"lib/numpy/linalg/python3*.dll",
|
||||
"lib/numpy/random/python3*.dll",
|
||||
"lib/python3*.dll",
|
||||
"lib/recordclass/python3*.dll",
|
||||
"lib/regex/python3*.dll",
|
||||
"lib/test",
|
||||
"lib/yarl/python39.dll",
|
||||
"lib/yarl/python3*.dll",
|
||||
]
|
||||
|
||||
COPY_TO_ZIP = [
|
||||
@@ -76,11 +79,12 @@ class FinalizeCXFreezeCommand(Command):
|
||||
if path.name.startswith("exe.") and path.is_dir():
|
||||
for cleanse_suffix in TO_DELETE:
|
||||
cleanse_path = path / cleanse_suffix
|
||||
shutil.rmtree(cleanse_path, ignore_errors=True)
|
||||
try:
|
||||
os.unlink(cleanse_path)
|
||||
except:
|
||||
pass
|
||||
for globbed in glob.glob(str(cleanse_path)):
|
||||
shutil.rmtree(globbed, ignore_errors=True)
|
||||
try:
|
||||
os.unlink(globbed)
|
||||
except:
|
||||
pass
|
||||
for to_copy in COPY_TO_ZIP:
|
||||
shutil.copy(BASE_DIR / to_copy, path / to_copy)
|
||||
shutil.copytree(BASE_DIR / "addon_examples", path / "addon_examples")
|
||||
@@ -94,6 +98,7 @@ options = {
|
||||
"passlib",
|
||||
"_cffi_backend",
|
||||
"hippolyzer",
|
||||
"mitmproxy_windows",
|
||||
],
|
||||
# exclude packages that are not really needed
|
||||
"excludes": [
|
||||
@@ -113,7 +118,7 @@ executables = [
|
||||
|
||||
setup(
|
||||
name="hippolyzer_gui",
|
||||
version="0.9.0",
|
||||
version=version("hippolyzer"),
|
||||
description="Hippolyzer GUI",
|
||||
options=options,
|
||||
executables=executables,
|
||||
|
||||
@@ -79,6 +79,20 @@ class TestDatatypes(unittest.TestCase):
|
||||
quat = Quaternion(X=128.0, Y=128.0, Z=22.0)
|
||||
self.assertEqual(quat, (128.0, 128.0, 22.0, 0.0))
|
||||
|
||||
def test_quaternion_euler_roundtrip(self):
|
||||
orig_vec = Vector3(0.0, -1.0, 2.0)
|
||||
quat = Quaternion.from_euler(*orig_vec)
|
||||
for orig_comp, new_comp in zip(orig_vec, quat.to_euler()):
|
||||
self.assertAlmostEqual(orig_comp, new_comp)
|
||||
|
||||
def test_quaternion_transformations(self):
|
||||
quat = Quaternion(0.4034226801113349, -0.2590347239999257, 0.7384602626041288, 0.4741598817790379)
|
||||
expected_trans = (0.4741598817790379, 0.4034226801113349, -0.2590347239999257, 0.7384602626041288)
|
||||
trans_quat = quat.to_transformations()
|
||||
self.assertSequenceEqual(expected_trans, trans_quat)
|
||||
new_quat = Quaternion.from_transformations(trans_quat)
|
||||
self.assertEqual(quat, new_quat)
|
||||
|
||||
def test_uuid_from_bytes(self):
|
||||
tmp_uuid = uuid.UUID('2b7f7a6e-32c5-dbfd-e2c7-926d1a9f0aca')
|
||||
tmp_uuid2 = uuid.UUID('1dd5efe2-faaf-1864-5ac9-bc61c5d8d7ea')
|
||||
@@ -135,6 +149,9 @@ class TestDatatypes(unittest.TestCase):
|
||||
self.assertIsInstance(val, UUID)
|
||||
self.assertEqual(orig, val)
|
||||
|
||||
def test_str_llsd_serialization(self):
|
||||
self.assertEqual(b"'foo\\nbar'", llsd.format_notation("foo\nbar"))
|
||||
|
||||
def test_jank_stringy_bytes(self):
|
||||
val = JankStringyBytes(b"foo\x00")
|
||||
self.assertTrue("o" in val)
|
||||
@@ -146,3 +163,6 @@ class TestDatatypes(unittest.TestCase):
|
||||
self.assertNotEqual(b"foo", val)
|
||||
self.assertEqual(b"foo", JankStringyBytes(b"foo"))
|
||||
self.assertEqual("foo", JankStringyBytes(b"foo"))
|
||||
self.assertFalse(JankStringyBytes(b""))
|
||||
self.assertFalse(JankStringyBytes(b"\x00"))
|
||||
self.assertTrue(JankStringyBytes(b"\x01"))
|
||||
|
||||
63
tests/base/test_events.py
Normal file
63
tests/base/test_events.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import asyncio
|
||||
import unittest
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from hippolyzer.lib.base.events import Event
|
||||
|
||||
|
||||
class TestEvents(unittest.IsolatedAsyncioTestCase):
|
||||
async def asyncSetUp(self):
|
||||
self.event = Event()
|
||||
|
||||
async def test_trigger_sync(self):
|
||||
mock = MagicMock(return_value=False)
|
||||
self.event.subscribe(mock)
|
||||
self.event.notify("foo")
|
||||
mock.assert_called_with("foo")
|
||||
self.assertIn(mock, [x[0] for x in self.event.subscribers])
|
||||
|
||||
async def test_trigger_sync_unsub(self):
|
||||
mock = MagicMock(return_value=True)
|
||||
self.event.subscribe(mock)
|
||||
self.event.notify("foo")
|
||||
mock.assert_called_with("foo")
|
||||
self.assertNotIn(mock, [x[0] for x in self.event.subscribers])
|
||||
|
||||
async def test_trigger_async(self):
|
||||
called = asyncio.Event()
|
||||
mock = MagicMock()
|
||||
|
||||
async def _mock_wrapper(*args, **kwargs):
|
||||
called.set()
|
||||
mock(*args, **kwargs)
|
||||
self.event.subscribe(_mock_wrapper)
|
||||
self.event.notify("foo")
|
||||
await called.wait()
|
||||
mock.assert_called_with("foo")
|
||||
self.assertIn(_mock_wrapper, [x[0] for x in self.event.subscribers])
|
||||
|
||||
async def test_trigger_async_unsub(self):
|
||||
called = asyncio.Event()
|
||||
mock = MagicMock()
|
||||
|
||||
async def _mock_wrapper(*args, **kwargs):
|
||||
called.set()
|
||||
mock(*args, **kwargs)
|
||||
return True
|
||||
self.event.subscribe(_mock_wrapper)
|
||||
self.event.notify("foo")
|
||||
await called.wait()
|
||||
mock.assert_called_with("foo")
|
||||
self.assertNotIn(_mock_wrapper, [x[0] for x in self.event.subscribers])
|
||||
|
||||
async def test_multiple_subscribers(self):
|
||||
called = asyncio.Event()
|
||||
called2 = asyncio.Event()
|
||||
|
||||
self.event.subscribe(lambda *args: called.set())
|
||||
self.event.subscribe(lambda *args: called2.set())
|
||||
|
||||
self.event.notify(None)
|
||||
|
||||
self.assertTrue(called.is_set())
|
||||
self.assertTrue(called2.is_set())
|
||||
@@ -2,7 +2,7 @@ import copy
|
||||
import unittest
|
||||
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
from hippolyzer.lib.base.inventory import InventoryModel
|
||||
from hippolyzer.lib.base.inventory import InventoryModel, SaleType
|
||||
from hippolyzer.lib.base.wearables import Wearable, VISUAL_PARAMS
|
||||
|
||||
SIMPLE_INV = """\tinv_object\t0
|
||||
@@ -11,6 +11,8 @@ SIMPLE_INV = """\tinv_object\t0
|
||||
\t\tparent_id\t00000000-0000-0000-0000-000000000000
|
||||
\t\ttype\tcategory
|
||||
\t\tname\tContents|
|
||||
\t\tmetadata\t<llsd><undef /></llsd>
|
||||
|
|
||||
\t}
|
||||
\tinv_item\t0
|
||||
\t{
|
||||
@@ -39,10 +41,23 @@ SIMPLE_INV = """\tinv_object\t0
|
||||
\t}
|
||||
\t\tname\tNew Script|
|
||||
\t\tdesc\t2020-04-20 04:20:39 lsl2 script|
|
||||
\t\tmetadata\t<llsd><map><key>experience</key><uuid>a2e76fcd-9360-4f6d-a924-000000000003</uuid></map></llsd>
|
||||
|
|
||||
\t\tcreation_date\t1587367239
|
||||
\t}
|
||||
"""
|
||||
|
||||
INV_CATEGORY = """\tinv_category\t0
|
||||
\t{
|
||||
\t\tcat_id\tf4d91477-def1-487a-b4f3-6fa201c17376
|
||||
\t\tparent_id\t00000000-0000-0000-0000-000000000000
|
||||
\t\ttype\tlsltext
|
||||
\t\tpref_type\tlsltext
|
||||
\t\tname\tScripts|
|
||||
\t\towner_id\ta2e76fcd-9360-4f6d-a924-000000000003
|
||||
\t}
|
||||
"""
|
||||
|
||||
|
||||
class TestLegacyInv(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
@@ -52,15 +67,27 @@ class TestLegacyInv(unittest.TestCase):
|
||||
self.assertTrue(UUID('f4d91477-def1-487a-b4f3-6fa201c17376') in self.model.nodes)
|
||||
self.assertIsNotNone(self.model.root)
|
||||
|
||||
def test_parse_category(self):
|
||||
model = InventoryModel.from_str(INV_CATEGORY)
|
||||
self.assertEqual(UUID('f4d91477-def1-487a-b4f3-6fa201c17376'), model.root.node_id)
|
||||
|
||||
def test_serialize(self):
|
||||
self.model = InventoryModel.from_str(SIMPLE_INV)
|
||||
new_model = InventoryModel.from_str(self.model.to_str())
|
||||
self.assertEqual(self.model, new_model)
|
||||
|
||||
def test_serialize_category(self):
|
||||
model = InventoryModel.from_str(INV_CATEGORY)
|
||||
new_model = InventoryModel.from_str(model.to_str())
|
||||
self.assertEqual(model, new_model)
|
||||
|
||||
def test_category_legacy_serialization(self):
|
||||
self.assertEqual(INV_CATEGORY, InventoryModel.from_str(INV_CATEGORY).to_str())
|
||||
|
||||
def test_item_access(self):
|
||||
item = self.model.nodes[UUID('dd163122-946b-44df-99f6-a6030e2b9597')]
|
||||
self.assertEqual(item.name, "New Script")
|
||||
self.assertEqual(item.sale_info.sale_type, "not")
|
||||
self.assertEqual(item.sale_info.sale_type, SaleType.NOT)
|
||||
self.assertDictEqual(item.metadata, {"experience": UUID("a2e76fcd-9360-4f6d-a924-000000000003")})
|
||||
self.assertEqual(item.model, self.model)
|
||||
|
||||
def test_access_children(self):
|
||||
@@ -112,6 +139,7 @@ class TestLegacyInv(unittest.TestCase):
|
||||
'inv_type': 'script',
|
||||
'item_id': UUID('dd163122-946b-44df-99f6-a6030e2b9597'),
|
||||
'name': 'New Script',
|
||||
'metadata': {"experience": UUID("a2e76fcd-9360-4f6d-a924-000000000003")},
|
||||
'parent_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
|
||||
'permissions': {
|
||||
'base_mask': 2147483647,
|
||||
@@ -122,7 +150,7 @@ class TestLegacyInv(unittest.TestCase):
|
||||
'last_owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
|
||||
'next_owner_mask': 581632,
|
||||
'owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
|
||||
'owner_mask': 2147483647
|
||||
'owner_mask': 2147483647,
|
||||
},
|
||||
'sale_info': {
|
||||
'sale_price': 10,
|
||||
@@ -133,12 +161,31 @@ class TestLegacyInv(unittest.TestCase):
|
||||
]
|
||||
)
|
||||
|
||||
def test_llsd_serialization_ais(self):
|
||||
model = InventoryModel.from_str(INV_CATEGORY)
|
||||
self.assertEqual(
|
||||
[
|
||||
{
|
||||
'agent_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
|
||||
'category_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
|
||||
'name': 'Scripts',
|
||||
'parent_id': UUID('00000000-0000-0000-0000-000000000000'),
|
||||
'type_default': 10,
|
||||
'version': -1
|
||||
}
|
||||
],
|
||||
model.to_llsd("ais")
|
||||
)
|
||||
|
||||
def test_llsd_legacy_equality(self):
|
||||
new_model = InventoryModel.from_llsd(self.model.to_llsd())
|
||||
self.assertEqual(self.model, new_model)
|
||||
new_model.root.name = "foo"
|
||||
self.assertNotEqual(self.model, new_model)
|
||||
|
||||
def test_legacy_serialization(self):
|
||||
self.assertEqual(SIMPLE_INV, self.model.to_str())
|
||||
|
||||
def test_difference_added(self):
|
||||
new_model = InventoryModel.from_llsd(self.model.to_llsd())
|
||||
diff = self.model.get_differences(new_model)
|
||||
|
||||
@@ -40,6 +40,8 @@ class TestMesh(unittest.TestCase):
|
||||
writer.write(serializer, reader.read(serializer))
|
||||
second_buf = writer.copy_buffer()
|
||||
self.assertEqual(first_buf, second_buf)
|
||||
# Dates may not round-trip correctly, but length should always be the same
|
||||
self.assertEqual(len(first_buf), len(self.slm_bytes))
|
||||
|
||||
def test_serialize_raw_segments(self):
|
||||
serializer = LLMeshSerializer(include_raw_segments=True)
|
||||
@@ -62,3 +64,8 @@ class TestMesh(unittest.TestCase):
|
||||
mat_list = list(mesh.iter_lod_materials())
|
||||
self.assertEqual(4, len(mat_list))
|
||||
self.assertIsInstance(mat_list[0], dict)
|
||||
|
||||
def test_make_default_triangle(self):
|
||||
tri = MeshAsset.make_triangle()
|
||||
self.assertEqual(0.5, tri.segments['high_lod'][0]['Position'][2].X)
|
||||
self.assertEqual(1, tri.header['version'])
|
||||
|
||||
@@ -181,6 +181,8 @@ class TestMessageHandlers(unittest.IsolatedAsyncioTestCase):
|
||||
self.message_handler.handle(msg)
|
||||
|
||||
async def test_subscription(self):
|
||||
called = asyncio.Event()
|
||||
called2 = asyncio.Event()
|
||||
with self.message_handler.subscribe_async(
|
||||
message_names=("Foo",),
|
||||
predicate=lambda m: m["Bar"]["Baz"] == 1,
|
||||
@@ -192,6 +194,10 @@ class TestMessageHandlers(unittest.IsolatedAsyncioTestCase):
|
||||
msg3 = Message("Foo", Block("Bar", Baz=1, Biz=3))
|
||||
self._fake_received_message(msg1)
|
||||
self._fake_received_message(msg2)
|
||||
|
||||
self.message_handler.subscribe("Foo", lambda *args: called.set())
|
||||
self.message_handler.subscribe("Foo", lambda *args: called2.set())
|
||||
|
||||
self._fake_received_message(msg3)
|
||||
received = []
|
||||
while True:
|
||||
@@ -199,14 +205,15 @@ class TestMessageHandlers(unittest.IsolatedAsyncioTestCase):
|
||||
received.append(await asyncio.wait_for(get_msg(), 0.001))
|
||||
except asyncio.exceptions.TimeoutError:
|
||||
break
|
||||
self.assertEqual(len(foo_handlers), 1)
|
||||
self.assertEqual(len(foo_handlers), 3)
|
||||
self.assertListEqual(received, [msg1, msg3])
|
||||
# The message should have been take()n, making a copy
|
||||
self.assertIsNot(msg1, received[0])
|
||||
# take() was called, so this should have been marked queued
|
||||
self.assertTrue(msg1.queued)
|
||||
# Leaving the block should have unsubscribed automatically
|
||||
self.assertEqual(len(foo_handlers), 0)
|
||||
self.assertEqual(len(foo_handlers), 2)
|
||||
self.assertTrue(called.is_set())
|
||||
|
||||
async def test_subscription_no_take(self):
|
||||
with self.message_handler.subscribe_async(("Foo",), take=False) as get_msg:
|
||||
|
||||
@@ -89,7 +89,7 @@ class _MutableMultiDictTests:
|
||||
d = create_instance()
|
||||
s = pickle.dumps(d, protocol)
|
||||
ud = pickle.loads(s)
|
||||
assert type(ud) == type(d)
|
||||
assert type(ud) is type(d)
|
||||
assert ud == d
|
||||
alternative = pickle.dumps(create_instance("werkzeug"), protocol)
|
||||
assert pickle.loads(alternative) == d
|
||||
|
||||
@@ -50,6 +50,8 @@ OBJECT_UPDATE = binascii.unhexlify(''.join(OBJECT_UPDATE.split()))
|
||||
|
||||
COARSE_LOCATION_UPDATE = b'\x00\x00\x00\x00E\x00\xff\x06\x00\xff\xff\xff\xff\x00'
|
||||
|
||||
UNKNOWN_PACKET = b'\x00\x00\x00\x00E\x00\xff\xf0\x00\xff\xff\xff\xff\x00'
|
||||
|
||||
|
||||
class TestPacketDecode(unittest.TestCase):
|
||||
|
||||
@@ -110,3 +112,12 @@ class TestPacketDecode(unittest.TestCase):
|
||||
parsed = deserializer.deserialize(message)
|
||||
logging.debug("Parsed blocks: %r " % (list(parsed.blocks.keys()),))
|
||||
self.assertEqual(message, serializer.serialize(parsed))
|
||||
|
||||
def test_unknown_packet_roundtrips(self):
|
||||
message = UNKNOWN_PACKET
|
||||
deserializer = UDPMessageDeserializer(settings=self.settings)
|
||||
serializer = UDPMessageSerializer()
|
||||
parsed = deserializer.deserialize(message)
|
||||
logging.debug("Parsed blocks: %r " % (list(parsed.blocks.keys()),))
|
||||
self.assertEqual("UnknownMessage:240", parsed.name)
|
||||
self.assertEqual(message, serializer.serialize(parsed))
|
||||
|
||||
@@ -6,6 +6,8 @@ import uuid
|
||||
from io import BytesIO
|
||||
from typing import Optional
|
||||
|
||||
import numpy as np
|
||||
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base.llanim import Animation, Joint, RotKeyframe
|
||||
@@ -693,6 +695,46 @@ class NameValueSerializationTests(BaseSerializationTest):
|
||||
deser.to_dict()
|
||||
|
||||
|
||||
class NumPySerializationTests(BaseSerializationTest):
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.writer.endianness = "<"
|
||||
|
||||
def test_simple(self):
|
||||
quant_spec = se.Vector3U16(0.0, 1.0)
|
||||
self.writer.write(quant_spec, Vector3(0, 0.1, 0))
|
||||
self.writer.write(quant_spec, Vector3(1, 1, 1))
|
||||
|
||||
reader = self._get_reader()
|
||||
np_spec = se.NumPyArray(se.BytesGreedy(), np.dtype(np.uint16), 3)
|
||||
np_val = reader.read(np_spec)
|
||||
expected_arr = np.array([[0, 6554, 0], [0xFFFF, 0xFFFF, 0xFFFF]], dtype=np.uint16)
|
||||
np.testing.assert_array_equal(expected_arr, np_val)
|
||||
|
||||
# Make sure writing the array back works correctly
|
||||
orig_buf = self.writer.copy_buffer()
|
||||
self.writer.clear()
|
||||
self.writer.write(np_spec, expected_arr)
|
||||
self.assertEqual(orig_buf, self.writer.copy_buffer())
|
||||
|
||||
def test_quantization(self):
|
||||
quant_spec = se.Vector3U16(0.0, 1.0)
|
||||
self.writer.write(quant_spec, Vector3(0, 0.1, 0))
|
||||
self.writer.write(quant_spec, Vector3(1, 1, 1))
|
||||
|
||||
reader = self._get_reader()
|
||||
np_spec = se.QuantizedNumPyArray(se.NumPyArray(se.BytesGreedy(), np.dtype(np.uint16), 3), 0.0, 1.0)
|
||||
np_val = reader.read(np_spec)
|
||||
expected_arr = np.array([[0, 0.1, 0], [1, 1, 1]], dtype=np.float64)
|
||||
np.testing.assert_array_almost_equal(expected_arr, np_val, decimal=5)
|
||||
|
||||
# Make sure writing the array back works correctly
|
||||
orig_buf = self.writer.copy_buffer()
|
||||
self.writer.clear()
|
||||
self.writer.write(np_spec, expected_arr)
|
||||
self.assertEqual(orig_buf, self.writer.copy_buffer())
|
||||
|
||||
|
||||
class AnimSerializationTests(BaseSerializationTest):
|
||||
SIMPLE_ANIM = b'\x01\x00\x00\x00\x01\x00\x00\x00H\x11\xd1?\x00\x00\x00\x00\x00H\x11\xd1?\x00\x00\x00\x00' \
|
||||
b'\xcd\xccL>\x9a\x99\x99>\x01\x00\x00\x00\x02\x00\x00\x00mNeck\x00\x01\x00\x00\x00\x03\x00' \
|
||||
|
||||
@@ -23,13 +23,7 @@ import unittest
|
||||
from hippolyzer.lib.base.settings import Settings
|
||||
|
||||
|
||||
class TestEvents(unittest.TestCase):
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
class TestSettings(unittest.TestCase):
|
||||
def test_base_settings(self):
|
||||
settings = Settings()
|
||||
self.assertEqual(settings.ENABLE_DEFERRED_PACKET_PARSING, True)
|
||||
|
||||
32
tests/base/test_skeleton.py
Normal file
32
tests/base/test_skeleton.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
|
||||
from hippolyzer.lib.base.mesh_skeleton import load_avatar_skeleton
|
||||
|
||||
|
||||
class TestSkeleton(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls) -> None:
|
||||
cls.skeleton = load_avatar_skeleton()
|
||||
|
||||
def test_get_joint(self):
|
||||
node = self.skeleton["mNeck"]
|
||||
self.assertEqual("mNeck", node.name)
|
||||
self.assertEqual(self.skeleton, node.skeleton())
|
||||
|
||||
def test_get_joint_index(self):
|
||||
self.assertEqual(7, self.skeleton["mNeck"].index)
|
||||
self.assertEqual(113, self.skeleton["mKneeLeft"].index)
|
||||
|
||||
def test_get_joint_parent(self):
|
||||
self.assertEqual("mChest", self.skeleton["mNeck"].parent().name)
|
||||
|
||||
def test_get_joint_matrix(self):
|
||||
expected_mat = np.array([
|
||||
[1., 0., 0., -0.01],
|
||||
[0., 1., 0., 0.],
|
||||
[0., 0., 1., 0.251],
|
||||
[0., 0., 0., 1.]
|
||||
])
|
||||
np.testing.assert_equal(expected_mat, self.skeleton["mNeck"].matrix)
|
||||
@@ -27,7 +27,7 @@ from hippolyzer.lib.base.message.data import msg_tmpl
|
||||
from hippolyzer.lib.base.message.template import MessageTemplate, MessageTemplateBlock, MessageTemplateVariable
|
||||
from hippolyzer.lib.base.message.template_dict import TemplateDictionary
|
||||
from hippolyzer.lib.base.message.template_parser import MessageTemplateParser
|
||||
from hippolyzer.lib.base.message.msgtypes import MsgFrequency, MsgTrust, MsgEncoding, \
|
||||
from hippolyzer.lib.base.message.msgtypes import MsgFrequency, MsgEncoding, \
|
||||
MsgDeprecation, MsgBlockType, MsgType
|
||||
|
||||
|
||||
@@ -45,8 +45,8 @@ class TestDictionary(unittest.TestCase):
|
||||
msg_dict = TemplateDictionary(self.template_list)
|
||||
packet = msg_dict.get_template_by_name('ConfirmEnableSimulator')
|
||||
assert packet is not None, "get_packet failed"
|
||||
assert packet.frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE, "Incorrect frequency"
|
||||
assert packet.msg_num == 8, "Incorrect message number for ConfirmEnableSimulator"
|
||||
assert packet.frequency == MsgFrequency.MEDIUM, "Incorrect frequency"
|
||||
assert packet.num == 8, "Incorrect message number for ConfirmEnableSimulator"
|
||||
|
||||
def test_get_packet_pair(self):
|
||||
msg_dict = TemplateDictionary(self.template_list)
|
||||
@@ -76,29 +76,29 @@ class TestTemplates(unittest.TestCase):
|
||||
template = self.msg_dict['CompletePingCheck']
|
||||
name = template.name
|
||||
freq = template.frequency
|
||||
num = template.msg_num
|
||||
trust = template.msg_trust
|
||||
enc = template.msg_encoding
|
||||
num = template.num
|
||||
trust = template.trusted
|
||||
enc = template.encoding
|
||||
assert name == 'CompletePingCheck', "Expected: CompletePingCheck Returned: " + name
|
||||
assert freq == MsgFrequency.HIGH_FREQUENCY_MESSAGE, "Expected: High Returned: " + freq
|
||||
assert freq == MsgFrequency.HIGH, "Expected: High Returned: " + freq
|
||||
assert num == 2, "Expected: 2 Returned: " + str(num)
|
||||
assert trust == MsgTrust.LL_NOTRUST, "Expected: NotTrusted Returned: " + trust
|
||||
assert enc == MsgEncoding.LL_UNENCODED, "Expected: Unencoded Returned: " + enc
|
||||
assert not trust, "Expected: NotTrusted Returned: " + trust
|
||||
assert enc == MsgEncoding.UNENCODED, "Expected: Unencoded Returned: " + enc
|
||||
|
||||
def test_deprecated(self):
|
||||
template = self.msg_dict['ObjectPosition']
|
||||
dep = template.msg_deprecation
|
||||
assert dep == MsgDeprecation.LL_DEPRECATED, "Expected: Deprecated Returned: " + str(dep)
|
||||
dep = template.deprecation
|
||||
assert dep == MsgDeprecation.DEPRECATED, "Expected: Deprecated Returned: " + str(dep)
|
||||
|
||||
def test_template_fixed(self):
|
||||
template = self.msg_dict['PacketAck']
|
||||
num = template.msg_num
|
||||
num = template.num
|
||||
assert num == 251, "Expected: 251 Returned: " + str(num)
|
||||
|
||||
def test_blacklisted(self):
|
||||
template = self.msg_dict['TeleportFinish']
|
||||
self.assertEqual(template.msg_deprecation,
|
||||
MsgDeprecation.LL_UDPBLACKLISTED)
|
||||
self.assertEqual(template.deprecation,
|
||||
MsgDeprecation.UDPBLACKLISTED)
|
||||
|
||||
def test_block(self):
|
||||
block = self.msg_dict['OpenCircuit'].get_block('CircuitInfo')
|
||||
@@ -167,7 +167,7 @@ class TestTemplates(unittest.TestCase):
|
||||
|
||||
frequency_counter = {"low": 0, 'medium': 0, "high": 0, 'fixed': 0}
|
||||
for template in list(self.msg_dict.message_templates.values()):
|
||||
frequency_counter[template.get_frequency_as_string()] += 1
|
||||
frequency_counter[template.frequency.name.lower()] += 1
|
||||
self.assertEqual(low_count, frequency_counter["low"])
|
||||
self.assertEqual(medium_count, frequency_counter["medium"])
|
||||
self.assertEqual(high_count, frequency_counter["high"])
|
||||
|
||||
@@ -4,9 +4,9 @@ import unittest
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import create_logged_task
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.message.circuit import ConnectionHolder
|
||||
from hippolyzer.lib.base.templates import (
|
||||
AssetType,
|
||||
EstateAssetType,
|
||||
@@ -16,26 +16,10 @@ from hippolyzer.lib.base.templates import (
|
||||
TransferTargetType,
|
||||
TransferStatus,
|
||||
)
|
||||
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.base.transfer_manager import TransferManager, Transfer
|
||||
from hippolyzer.lib.base.xfer_manager import XferManager
|
||||
|
||||
|
||||
class MockHandlingCircuit(ProxiedCircuit):
|
||||
def __init__(self, handler: MessageHandler[Message, str]):
|
||||
super().__init__(("127.0.0.1", 1), ("127.0.0.1", 2), None)
|
||||
self.handler = handler
|
||||
|
||||
def _send_prepared_message(self, message: Message, transport=None):
|
||||
loop = asyncio.get_event_loop_policy().get_event_loop()
|
||||
loop.call_soon(self.handler.handle, message)
|
||||
|
||||
|
||||
class MockConnectionHolder(ConnectionHolder):
|
||||
def __init__(self, circuit, message_handler):
|
||||
self.circuit = circuit
|
||||
self.message_handler = message_handler
|
||||
from hippolyzer.lib.base.test_utils import MockHandlingCircuit, MockConnectionHolder
|
||||
|
||||
|
||||
class BaseTransferTests(unittest.IsolatedAsyncioTestCase):
|
||||
@@ -78,7 +62,7 @@ class XferManagerTests(BaseTransferTests):
|
||||
))
|
||||
|
||||
async def test_small_xfer_upload(self):
|
||||
asyncio.create_task(self._handle_vfile_upload())
|
||||
_ = create_logged_task(self._handle_vfile_upload())
|
||||
await asyncio.wait_for(self.xfer_manager.upload_asset(
|
||||
AssetType.BODYPART, self.SMALL_PAYLOAD
|
||||
), timeout=0.1)
|
||||
@@ -86,7 +70,7 @@ class XferManagerTests(BaseTransferTests):
|
||||
|
||||
async def test_large_xfer_upload(self):
|
||||
# Larger payloads take a different path
|
||||
asyncio.create_task(self._handle_vfile_upload())
|
||||
_ = create_logged_task(self._handle_vfile_upload())
|
||||
await asyncio.wait_for(self.xfer_manager.upload_asset(
|
||||
AssetType.BODYPART, self.LARGE_PAYLOAD
|
||||
), timeout=0.1)
|
||||
@@ -142,7 +126,7 @@ class TestTransferManager(BaseTransferTests):
|
||||
packet_num += 1
|
||||
|
||||
async def test_simple_transfer(self):
|
||||
asyncio.create_task(self._handle_covenant_download())
|
||||
_ = create_logged_task(self._handle_covenant_download())
|
||||
transfer: Transfer = await asyncio.wait_for(self.transfer_manager.request(
|
||||
source_type=TransferSourceType.SIM_ESTATE,
|
||||
params=TransferRequestParamsSimEstate(
|
||||
|
||||
39
tests/client/__init__.py
Normal file
39
tests/client/__init__.py
Normal file
@@ -0,0 +1,39 @@
|
||||
from typing import Mapping, Optional
|
||||
|
||||
import multidict
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.network.caps_client import CapsClient
|
||||
from hippolyzer.lib.base.test_utils import MockHandlingCircuit
|
||||
from hippolyzer.lib.client.hippo_client import ClientSettings
|
||||
from hippolyzer.lib.client.object_manager import ClientWorldObjectManager
|
||||
from hippolyzer.lib.client.state import BaseClientRegion, BaseClientSession, BaseClientSessionManager
|
||||
|
||||
|
||||
class MockClientRegion(BaseClientRegion):
|
||||
def __init__(self, caps_urls: Optional[dict] = None):
|
||||
super().__init__()
|
||||
self.handle = None
|
||||
self.circuit_addr = ("127.0.0.1", 1)
|
||||
self.message_handler: MessageHandler[Message, str] = MessageHandler(take_by_default=False)
|
||||
self.circuit = MockHandlingCircuit(self.message_handler)
|
||||
self._name = "Test"
|
||||
self.cap_urls = multidict.MultiDict()
|
||||
if caps_urls:
|
||||
self.cap_urls.update(caps_urls)
|
||||
self.caps_client = CapsClient(self.cap_urls)
|
||||
|
||||
def session(self):
|
||||
return MockClientSession(UUID.ZERO, UUID.ZERO, UUID.ZERO, 0, None)
|
||||
|
||||
def update_caps(self, caps: Mapping[str, str]) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class MockClientSession(BaseClientSession):
|
||||
def __init__(self, id, secure_session_id, agent_id, circuit_code,
|
||||
session_manager: Optional[BaseClientSessionManager]):
|
||||
super().__init__(id, secure_session_id, agent_id, circuit_code, session_manager)
|
||||
self.objects = ClientWorldObjectManager(self, ClientSettings(), None)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user