Compare commits
125 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6f87ec8725 | ||
|
|
fb885d8eec | ||
|
|
78281ed12b | ||
|
|
4087eaa3c6 | ||
|
|
32428941d7 | ||
|
|
0cc3397402 | ||
|
|
0c2dfd3213 | ||
|
|
e119181e3f | ||
|
|
64c7265578 | ||
|
|
eb652152f5 | ||
|
|
cd03dd4fdd | ||
|
|
056e142347 | ||
|
|
927a353dec | ||
|
|
bc68eeb7d2 | ||
|
|
de79f42aa6 | ||
|
|
e138ae88a1 | ||
|
|
e20a4a01ad | ||
|
|
a2b49fdc44 | ||
|
|
988a82179e | ||
|
|
4eb97b5958 | ||
|
|
4962d8e7bf | ||
|
|
a652779cc5 | ||
|
|
d7092e7733 | ||
|
|
8b5a7ebecf | ||
|
|
8effd431a6 | ||
|
|
22fb44ef28 | ||
|
|
c8dc67ea37 | ||
|
|
0dbba40fe1 | ||
|
|
97e567be77 | ||
|
|
76216ee390 | ||
|
|
c60c2819ac | ||
|
|
7cbef457cf | ||
|
|
4916bdc543 | ||
|
|
bb0e88e460 | ||
|
|
46e598cded | ||
|
|
ce130c4831 | ||
|
|
b6ac988601 | ||
|
|
c8dbbef8fc | ||
|
|
a974f167d1 | ||
|
|
2d3b3daf10 | ||
|
|
1d54c70164 | ||
|
|
6dafe32f6a | ||
|
|
3149d3610f | ||
|
|
f8f3bcfc36 | ||
|
|
8548cce4e5 | ||
|
|
ad2aca1803 | ||
|
|
8cf500ce44 | ||
|
|
ceda7f370e | ||
|
|
0692a10253 | ||
|
|
c1c2a96295 | ||
|
|
b4be9fa757 | ||
|
|
a8967f0b7d | ||
|
|
10af5cc250 | ||
|
|
0ea1b0324e | ||
|
|
4ece6efe60 | ||
|
|
15bc8e0ed2 | ||
|
|
33fad6339f | ||
|
|
93916104db | ||
|
|
3bb4fb0640 | ||
|
|
c9495763e5 | ||
|
|
a7825a881c | ||
|
|
a6bbd97b98 | ||
|
|
3500212da0 | ||
|
|
01ea9d7879 | ||
|
|
f19e1b8bfb | ||
|
|
f2202556d7 | ||
|
|
5a5b471fe4 | ||
|
|
ff0f20d1dd | ||
|
|
4898c852c1 | ||
|
|
adf5295e2b | ||
|
|
7514baaa5f | ||
|
|
0ba1a779ef | ||
|
|
3ea8a27914 | ||
|
|
2451ad3674 | ||
|
|
25804df238 | ||
|
|
474173ba54 | ||
|
|
049a3b703f | ||
|
|
ac77fde892 | ||
|
|
6ee9b22923 | ||
|
|
f355138cd2 | ||
|
|
478d135d1f | ||
|
|
80c9acdabe | ||
|
|
d4eaa7c543 | ||
|
|
2571550da4 | ||
|
|
b3ee3a3506 | ||
|
|
11feccd93b | ||
|
|
bb6ce5c013 | ||
|
|
a35aa9046e | ||
|
|
6c32da878d | ||
|
|
49c54bc896 | ||
|
|
4c9fa38ffb | ||
|
|
2856e78f16 | ||
|
|
33884925f4 | ||
|
|
a11ef96d9a | ||
|
|
7b6239d66a | ||
|
|
2c3bd140ff | ||
|
|
9d2087a0fb | ||
|
|
67db8110a1 | ||
|
|
ab1c56ff3e | ||
|
|
142f2e42ca | ||
|
|
e7764c1665 | ||
|
|
582cfea47c | ||
|
|
6f38d84a1c | ||
|
|
1fc46e66bc | ||
|
|
167673aa08 | ||
|
|
5ad8ee986f | ||
|
|
e9d7ee7e8e | ||
|
|
d21c3ec004 | ||
|
|
01c6931d53 | ||
|
|
493563bb6f | ||
|
|
ca5c71402b | ||
|
|
ad765a1ede | ||
|
|
9adee14e0f | ||
|
|
57c4bd0e7c | ||
|
|
1085dbc8ab | ||
|
|
fb9740003e | ||
|
|
087f16fbc5 | ||
|
|
fa96e80590 | ||
|
|
539d38fb4a | ||
|
|
caaf0b0e13 | ||
|
|
16958e516d | ||
|
|
74e4e0c4ec | ||
|
|
3efeb46500 | ||
|
|
0f2e933be1 | ||
|
|
a7f40b0d15 |
26
.github/workflows/bundle_windows.yml
vendored
26
.github/workflows/bundle_windows.yml
vendored
@@ -1,5 +1,3 @@
|
||||
# Have to manually unzip this (it gets double zipped) and add it
|
||||
# onto the release after it gets created. Don't want actions with repo write.
|
||||
name: Bundle Windows EXE
|
||||
|
||||
|
||||
@@ -9,23 +7,30 @@ on:
|
||||
types:
|
||||
- created
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref_name:
|
||||
description: Name to use for the release
|
||||
env:
|
||||
target_tag: ${{ github.ref_name }}
|
||||
target_tag: ${{ github.ref_name || github.event.inputs.ref_name }}
|
||||
sha: ${{ github.sha || github.event.inputs.ref_name }}
|
||||
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: windows-2019
|
||||
runs-on: windows-2022
|
||||
permissions:
|
||||
contents: write
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.9]
|
||||
python-version: ["3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- name: Get history and tags for SCM versioning to work
|
||||
run: |
|
||||
git fetch --prune --unshallow
|
||||
git fetch --depth=1 origin +refs/tags/*:refs/tags/*
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
@@ -35,7 +40,7 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
pip install -e .
|
||||
pip install -e .[gui]
|
||||
pip install cx_freeze
|
||||
|
||||
- name: Bundle with cx_Freeze
|
||||
@@ -49,12 +54,13 @@ jobs:
|
||||
mv ./dist/*.zip hippolyzer-windows-${{ env.target_tag }}.zip
|
||||
|
||||
- name: Upload the artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: hippolyzer-windows-${{ github.sha }}
|
||||
name: hippolyzer-windows-${{ env.sha }}
|
||||
path: ./hippolyzer-windows-${{ env.target_tag }}.zip
|
||||
|
||||
- uses: ncipollo/release-action@v1.10.0
|
||||
if: github.event_name != 'workflow_dispatch'
|
||||
with:
|
||||
artifacts: hippolyzer-windows-${{ env.target_tag }}.zip
|
||||
tag: ${{ env.target_tag }}
|
||||
|
||||
14
.github/workflows/pypi_publish.yml
vendored
14
.github/workflows/pypi_publish.yml
vendored
@@ -16,18 +16,22 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Get history and tags for SCM versioning to work
|
||||
run: |
|
||||
git fetch --prune --unshallow
|
||||
git fetch --depth=1 origin +refs/tags/*:refs/tags/*
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools wheel
|
||||
python -m pip install --upgrade pip setuptools wheel build
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
- name: Build
|
||||
run: >-
|
||||
python setup.py sdist bdist_wheel
|
||||
python -m build
|
||||
# We do this, since failures on test.pypi aren't that bad
|
||||
- name: Publish to Test PyPI
|
||||
if: startsWith(github.event.ref, 'refs/tags') || github.event_name == 'release'
|
||||
@@ -36,6 +40,7 @@ jobs:
|
||||
user: __token__
|
||||
password: ${{ secrets.TEST_PYPI_API_TOKEN }}
|
||||
repository_url: https://test.pypi.org/legacy/
|
||||
attestations: false
|
||||
|
||||
- name: Publish to PyPI
|
||||
if: startsWith(github.event.ref, 'refs/tags') || github.event_name == 'release'
|
||||
@@ -43,3 +48,4 @@ jobs:
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
attestations: false
|
||||
|
||||
11
.github/workflows/pytest.yml
vendored
11
.github/workflows/pytest.yml
vendored
@@ -14,11 +14,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.8", "3.11"]
|
||||
python-version: ["3.12", "3.13"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- name: Get history and tags for SCM versioning to work
|
||||
run: |
|
||||
git fetch --prune --unshallow
|
||||
git fetch --depth=1 origin +refs/tags/*:refs/tags/*
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
@@ -30,7 +33,7 @@ jobs:
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements-test.txt
|
||||
sudo apt-get install libopenjp2-7
|
||||
pip install -e .
|
||||
pip install -e .[gui]
|
||||
- name: Run Flake8
|
||||
run: |
|
||||
flake8 .
|
||||
|
||||
@@ -27,7 +27,7 @@ with low-level SL details. See the [Local Animation addon example](https://githu
|
||||
|
||||
### From Source
|
||||
|
||||
* Python 3.8 or above is **required**. If you're unable to upgrade your system Python package due to
|
||||
* Python 3.12 or above is **required**. If you're unable to upgrade your system Python package due to
|
||||
being on a stable distro, you can use [pyenv](https://github.com/pyenv/pyenv) to create
|
||||
a self-contained Python install with the appropriate version.
|
||||
* [Create a clean Python 3 virtualenv](https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment)
|
||||
@@ -35,7 +35,9 @@ with low-level SL details. See the [Local Animation addon example](https://githu
|
||||
* Activate the virtualenv by running the appropriate activation script
|
||||
* * Under Linux this would be something like `source <virtualenv_dir>/bin/activate`
|
||||
* * Under Windows it's `<virtualenv_dir>\Scripts\activate.bat`
|
||||
* Run `pip install hippolyzer`, or run `pip install -e .` in a cloned repo to install an editable version
|
||||
* Run `pip install hippolyzer[gui]` for a full install, or run `pip install -e .[gui]` in a cloned repo to install an editable version
|
||||
* * If you only want the core library without proxy or GUI support, use `pip install hippolyzer` or `pip install -e .`
|
||||
* * If you only want proxy/CLI support without the GUI, use `pip install hippolyzer[proxy]` or `pip install -e .[proxy]`
|
||||
|
||||
### Binary Windows Builds
|
||||
|
||||
@@ -325,7 +327,7 @@ The REPL is fully async aware and allows awaiting events without blocking:
|
||||
|
||||
```python
|
||||
>>> from hippolyzer.lib.client.object_manager import ObjectUpdateType
|
||||
>>> evt = await session.objects.events.wait_for((ObjectUpdateType.OBJECT_UPDATE,), timeout=2.0)
|
||||
>>> evt = await session.objects.events.wait_for((ObjectUpdateType.UPDATE,), timeout=2.0)
|
||||
>>> evt.updated
|
||||
{'Position'}
|
||||
```
|
||||
|
||||
125
addon_examples/anim_tracker.py
Normal file
125
addon_examples/anim_tracker.py
Normal file
@@ -0,0 +1,125 @@
|
||||
"""
|
||||
Debugger for detecting when animations within an object get started or stopped
|
||||
|
||||
Useful for tracking down animation sequence-related bugs within your LSL scripts,
|
||||
or debugging automatic animation stopping behavior in the viewer.
|
||||
|
||||
If an animation unexpectedly stops and nobody requested it be stopped, it's a potential viewer bug (or priority issue).
|
||||
If an animation unexpectedly stops and the viewer requested it be stopped, it's also a potential viewer bug.
|
||||
If an animation unexpectedly stops and only the server requested it be stopped, it's a potential script / server bug.
|
||||
"""
|
||||
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.base.templates import AssetType
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.addon_utils import show_message
|
||||
|
||||
|
||||
class AnimTrackerAddon(BaseAddon):
|
||||
should_track_anims: bool = SessionProperty(False)
|
||||
anims_lookup: Dict[UUID, str] = SessionProperty(dict)
|
||||
last_tracker_anims: Set[UUID] = SessionProperty(set)
|
||||
|
||||
def _format_anim_diffs(self, started_anims: Set[UUID], stopped_anims: Set[UUID]):
|
||||
added_strs = [f"+{self.anims_lookup[x]!r}" for x in started_anims]
|
||||
removed_strs = [f"-{self.anims_lookup[x]!r}" for x in stopped_anims]
|
||||
|
||||
return ", ".join(removed_strs + added_strs)
|
||||
|
||||
@handle_command()
|
||||
async def track_anims(self, session: Session, region: ProxiedRegion):
|
||||
"""Track when animations within this object get started or stopped"""
|
||||
if self.should_track_anims:
|
||||
self.last_tracker_anims.clear()
|
||||
self.anims_lookup.clear()
|
||||
|
||||
selected = region.objects.lookup_localid(session.selected.object_local)
|
||||
if not selected:
|
||||
return
|
||||
|
||||
self.should_track_anims = True
|
||||
|
||||
object_items = await region.objects.request_object_inv(selected)
|
||||
|
||||
anims: Dict[UUID, str] = {}
|
||||
for item in object_items:
|
||||
if item.type != AssetType.ANIMATION:
|
||||
continue
|
||||
anims[item.true_asset_id] = item.name
|
||||
|
||||
self.anims_lookup = anims
|
||||
|
||||
@handle_command()
|
||||
async def stop_tracking_anims(self, _session: Session, _region: ProxiedRegion):
|
||||
"""Stop reporting differences"""
|
||||
if self.should_track_anims:
|
||||
self.should_track_anims = False
|
||||
self.last_tracker_anims.clear()
|
||||
self.anims_lookup.clear()
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if not self.should_track_anims:
|
||||
return
|
||||
|
||||
if message.name != "AgentAnimation" or message.direction != Direction.OUT:
|
||||
# AgentAnimation is the message the viewer uses to request manually starting or stopping animations.
|
||||
# We don't care about other messages, we're just interested in distinguishing cases where the viewer
|
||||
# specifically requested something vs something being done by the server on its own.
|
||||
return
|
||||
av = session.objects.lookup_avatar(session.agent_id)
|
||||
if not av or not av.Object:
|
||||
print("Somehow didn't know about our own av object?")
|
||||
return
|
||||
|
||||
current_anims = set([x for x in av.Object.Animations if x in self.anims_lookup])
|
||||
started_anims: Set[UUID] = set()
|
||||
stopped_anims: Set[UUID] = set()
|
||||
|
||||
for block in message["AnimationList"]:
|
||||
anim_id = block["AnimID"]
|
||||
if anim_id not in self.anims_lookup:
|
||||
continue
|
||||
|
||||
start_anim = block["StartAnim"]
|
||||
already_started = anim_id in current_anims
|
||||
if start_anim == already_started:
|
||||
# No change
|
||||
continue
|
||||
|
||||
if start_anim:
|
||||
started_anims.add(anim_id)
|
||||
else:
|
||||
stopped_anims.add(anim_id)
|
||||
|
||||
if started_anims or stopped_anims:
|
||||
show_message("Viewer Requested Anims: " + self._format_anim_diffs(started_anims, stopped_anims))
|
||||
|
||||
def handle_object_updated(self, session: Session, region: ProxiedRegion,
|
||||
obj: Object, updated_props: Set[str], msg: Optional[Message]):
|
||||
if not self.should_track_anims:
|
||||
return
|
||||
if obj.FullID != session.agent_id:
|
||||
return
|
||||
if "Animations" not in updated_props:
|
||||
return
|
||||
|
||||
current_anims = set([x for x in obj.Animations if x in self.anims_lookup])
|
||||
started_anims = current_anims - self.last_tracker_anims
|
||||
stopped_anims = self.last_tracker_anims - current_anims
|
||||
|
||||
self.last_tracker_anims.clear()
|
||||
self.last_tracker_anims.update(current_anims)
|
||||
|
||||
if started_anims or stopped_anims:
|
||||
show_message("Anim Diffs: " + self._format_anim_diffs(started_anims, stopped_anims))
|
||||
|
||||
|
||||
addons = [AnimTrackerAddon()]
|
||||
94
addon_examples/appearance_delay_tracker.py
Normal file
94
addon_examples/appearance_delay_tracker.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""
|
||||
Try and diagnose very slow avatar appearance loads when the avatars first come on the scene
|
||||
|
||||
I guess use LEAP or something to detect when things _actually_ declouded.
|
||||
"""
|
||||
from typing import *
|
||||
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.base.templates import PCode
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, GlobalProperty
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class AvatarBakeRequest:
|
||||
requested: dt.datetime
|
||||
received: Optional[dt.datetime] = None
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class AvatarAppearanceRecord:
|
||||
object_received: dt.datetime
|
||||
"""When we learned about the agent as an object"""
|
||||
appearance_received: Optional[dt.datetime] = None
|
||||
"""When AvatarAppearance was first received"""
|
||||
bake_requests: Dict[str, AvatarBakeRequest] = dataclasses.field(default_factory=dict)
|
||||
"""Layer name -> request / response details"""
|
||||
|
||||
|
||||
class AppearanceDelayTrackerAddon(BaseAddon):
|
||||
# Should be able to access this in the REPL
|
||||
# Normally we'd use a session property, but we may not have a proper session context for some requests
|
||||
av_appearance_data: Dict[UUID, AvatarAppearanceRecord] = GlobalProperty(dict)
|
||||
|
||||
def handle_object_updated(self, session: Session, region: ProxiedRegion,
|
||||
obj: Object, updated_props: Set[str], msg: Optional[Message]):
|
||||
if obj.PCode == PCode.AVATAR and obj.FullID not in self.av_appearance_data:
|
||||
self.av_appearance_data[obj.FullID] = AvatarAppearanceRecord(object_received=dt.datetime.now())
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.name != "AvatarAppearance":
|
||||
return
|
||||
agent_id = message["Sender"]["ID"]
|
||||
appearance_data = self.av_appearance_data.get(agent_id)
|
||||
if not appearance_data:
|
||||
print(f"Got appearance for {agent_id} without knowing about object?")
|
||||
return
|
||||
|
||||
if appearance_data.appearance_received:
|
||||
return
|
||||
appearance_data.appearance_received = dt.datetime.now()
|
||||
|
||||
def handle_http_request(self, session_manager: SessionManager, flow: HippoHTTPFlow):
|
||||
if not flow.cap_data:
|
||||
return
|
||||
if flow.cap_data.cap_name != "AppearanceService":
|
||||
return
|
||||
|
||||
agent_id = UUID(flow.request.url.split('/')[-3])
|
||||
slot_name = flow.request.url.split('/')[-2]
|
||||
appearance_data = self.av_appearance_data.get(agent_id)
|
||||
if not appearance_data:
|
||||
print(f"Got AppearanceService req for {agent_id} without knowing about object?")
|
||||
return
|
||||
if slot_name in appearance_data.bake_requests:
|
||||
# We already requested this slot before
|
||||
return
|
||||
appearance_data.bake_requests[slot_name] = AvatarBakeRequest(requested=dt.datetime.now())
|
||||
|
||||
def handle_http_response(self, session_manager: SessionManager, flow: HippoHTTPFlow):
|
||||
if not flow.cap_data:
|
||||
return
|
||||
if flow.cap_data.cap_name != "AppearanceService":
|
||||
return
|
||||
|
||||
agent_id = UUID(flow.request.url.split('/')[-3])
|
||||
slot_name = flow.request.url.split('/')[-2]
|
||||
appearance_data = self.av_appearance_data.get(agent_id)
|
||||
if not appearance_data:
|
||||
return
|
||||
slot_details = appearance_data.bake_requests.get(slot_name)
|
||||
if not slot_details:
|
||||
return
|
||||
slot_details.received = dt.datetime.now()
|
||||
|
||||
|
||||
addons = [AppearanceDelayTrackerAddon()]
|
||||
44
addon_examples/create_shape.py
Normal file
44
addon_examples/create_shape.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""
|
||||
Demonstrates item creation as well as bodypart / clothing upload
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.templates import WearableType, Permissions
|
||||
from hippolyzer.lib.base.wearables import Wearable, VISUAL_PARAMS
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class ShapeCreatorAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def create_shape(self, session: Session, region: ProxiedRegion):
|
||||
"""Make a shape with pre-set parameters and place it in the body parts folder"""
|
||||
|
||||
wearable = Wearable.make_default(WearableType.SHAPE)
|
||||
# Max out the jaw jut param
|
||||
jaw_param = VISUAL_PARAMS.by_name("Jaw Jut")
|
||||
wearable.parameters[jaw_param.id] = jaw_param.value_max
|
||||
wearable.name = "Cool Shape"
|
||||
|
||||
# A unique transaction ID is needed to tie the item creation to the following asset upload.
|
||||
transaction_id = UUID.random()
|
||||
item = await session.inventory.create_item(
|
||||
UUID.ZERO, # This will place it in the default folder for the type
|
||||
name=wearable.name,
|
||||
type=wearable.wearable_type.asset_type,
|
||||
inv_type=wearable.wearable_type.asset_type.inventory_type,
|
||||
wearable_type=wearable.wearable_type,
|
||||
next_mask=Permissions.MOVE | Permissions.MODIFY | Permissions.COPY | Permissions.TRANSFER,
|
||||
transaction_id=transaction_id,
|
||||
)
|
||||
print(f"Created {item!r}")
|
||||
await region.xfer_manager.upload_asset(
|
||||
wearable.wearable_type.asset_type,
|
||||
wearable.to_str(),
|
||||
transaction_id=transaction_id,
|
||||
)
|
||||
|
||||
|
||||
addons = [ShapeCreatorAddon()]
|
||||
@@ -17,7 +17,7 @@ from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.inventory import InventoryModel, InventoryObject
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.templates import XferFilePath
|
||||
from hippolyzer.lib.base.templates import XferFilePath, AssetType
|
||||
from hippolyzer.lib.proxy import addon_ctx
|
||||
from hippolyzer.lib.proxy.webapp_cap_addon import WebAppCapAddon
|
||||
|
||||
@@ -64,7 +64,7 @@ async def get_task_inventory():
|
||||
InventoryObject(
|
||||
name="Contents",
|
||||
parent_id=UUID.ZERO,
|
||||
type="category",
|
||||
type=AssetType.CATEGORY,
|
||||
obj_id=obj_id
|
||||
).to_llsd()
|
||||
],
|
||||
|
||||
@@ -8,7 +8,7 @@ applied to the mesh before upload.
|
||||
I personally use manglers to strip bounding box materials you need
|
||||
to add to give a mesh an arbitrary center of rotation / scaling.
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.base.helpers import reorient_coord
|
||||
from hippolyzer.lib.base.mesh import MeshAsset
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
|
||||
@@ -16,23 +16,8 @@ import local_mesh
|
||||
AddonManager.hot_reload(local_mesh, require_addons_loaded=True)
|
||||
|
||||
|
||||
def _reorient_coord(coord, orientation, normals=False):
|
||||
coords = []
|
||||
for axis in orientation:
|
||||
axis_idx = abs(axis) - 1
|
||||
if normals:
|
||||
# Normals have a static domain from -1.0 to 1.0, just negate.
|
||||
new_coord = coord[axis_idx] if axis >= 0 else -coord[axis_idx]
|
||||
else:
|
||||
new_coord = coord[axis_idx] if axis >= 0 else 1.0 - coord[axis_idx]
|
||||
coords.append(new_coord)
|
||||
if coord.__class__ in (list, tuple):
|
||||
return coord.__class__(coords)
|
||||
return coord.__class__(*coords)
|
||||
|
||||
|
||||
def _reorient_coord_list(coord_list, orientation, normals=False):
|
||||
return [_reorient_coord(x, orientation, normals) for x in coord_list]
|
||||
def _reorient_coord_list(coord_list, orientation, min_val: int | float = 0):
|
||||
return [reorient_coord(x, orientation, min_val) for x in coord_list]
|
||||
|
||||
|
||||
def reorient_mesh(orientation):
|
||||
@@ -47,7 +32,7 @@ def reorient_mesh(orientation):
|
||||
# flipping the axes around.
|
||||
material["Position"] = _reorient_coord_list(material["Position"], orientation)
|
||||
# Are you even supposed to do this to the normals?
|
||||
material["Normal"] = _reorient_coord_list(material["Normal"], orientation, normals=True)
|
||||
material["Normal"] = _reorient_coord_list(material["Normal"], orientation, min_val=-1)
|
||||
return mesh
|
||||
return _reorienter
|
||||
|
||||
|
||||
21
addon_examples/packet_stats.py
Normal file
21
addon_examples/packet_stats.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import collections
|
||||
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, GlobalProperty
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class PacketStatsAddon(BaseAddon):
|
||||
packet_stats: collections.Counter = GlobalProperty(collections.Counter)
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
self.packet_stats[message.name] += 1
|
||||
|
||||
@handle_command()
|
||||
async def print_packet_stats(self, _session: Session, _region: ProxiedRegion):
|
||||
print(self.packet_stats.most_common(10))
|
||||
|
||||
|
||||
addons = [PacketStatsAddon()]
|
||||
@@ -72,14 +72,13 @@ class PixelArtistAddon(BaseAddon):
|
||||
# Watch for any newly created prims, this is basically what the viewer does to find
|
||||
# prims that it just created with the build tool.
|
||||
with session.objects.events.subscribe_async(
|
||||
(ObjectUpdateType.OBJECT_UPDATE,),
|
||||
(ObjectUpdateType.UPDATE,),
|
||||
predicate=lambda e: e.object.UpdateFlags & JUST_CREATED_FLAGS and "LocalID" in e.updated
|
||||
) as get_events:
|
||||
# Create a pool of prims to use for building the pixel art
|
||||
for _ in range(needed_prims):
|
||||
# TODO: We don't track the land group or user's active group, so
|
||||
# "anyone can build" must be on for rezzing to work.
|
||||
group_id = UUID()
|
||||
# TODO: Can't get land group atm, just tries to rez with the user's active group
|
||||
group_id = session.active_group
|
||||
region.circuit.send(Message(
|
||||
'ObjectAdd',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id, GroupID=group_id),
|
||||
|
||||
53
addon_examples/rlv_at_home.py
Normal file
53
addon_examples/rlv_at_home.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""
|
||||
You don't need RLV, we have RLV at home.
|
||||
|
||||
RLV at home:
|
||||
"""
|
||||
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.templates import ChatType
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, send_chat
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
def send_rlv_chat(channel: int, message: str):
|
||||
# We always shout.
|
||||
send_chat(channel=channel, message=message, chat_type=ChatType.NORMAL)
|
||||
|
||||
|
||||
class RLVAtHomeAddon(BaseAddon):
|
||||
"""
|
||||
Addon for pretending to be an RLV-enabled viewer
|
||||
|
||||
Useful if you want only a specific subset of RLV and don't want everything RLV normally allows,
|
||||
or want to override some RLV builtins.
|
||||
"""
|
||||
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
|
||||
behaviour: str, options: List[str], param: str) -> bool | None:
|
||||
# print(behaviour, options, param)
|
||||
if behaviour == "clear":
|
||||
return True
|
||||
elif behaviour in ("versionnum", "versionnew", "version"):
|
||||
# People tend to just check that this returned anything at all. Just say we're 2.0.0 for all of these.
|
||||
send_rlv_chat(int(param), "2.0.0")
|
||||
return True
|
||||
elif behaviour == "getinv":
|
||||
# Pretend we don't have anything
|
||||
send_rlv_chat(int(param), "")
|
||||
return True
|
||||
elif behaviour == "sit":
|
||||
# Sure, we can sit on stuff, whatever.
|
||||
region.circuit.send(Message(
|
||||
'AgentRequestSit',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block('TargetObject', TargetID=UUID(options[0]), Offset=(0, 0, 0)),
|
||||
))
|
||||
return True
|
||||
return None
|
||||
|
||||
|
||||
addons = [RLVAtHomeAddon()]
|
||||
@@ -3,6 +3,7 @@ A simple client that just says hello to people
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import pprint
|
||||
from contextlib import aclosing
|
||||
import os
|
||||
|
||||
@@ -19,7 +20,7 @@ async def amain():
|
||||
return
|
||||
if message["ChatData"]["SourceType"] != ChatSourceType.AGENT:
|
||||
return
|
||||
if "hello" not in str(message["ChatData"]["Message"]).lower():
|
||||
if "hello" not in message["ChatData"]["Message"].lower():
|
||||
return
|
||||
await client.send_chat(f'Hello {message["ChatData"]["FromName"]}!', chat_type=ChatType.SHOUT)
|
||||
|
||||
@@ -30,9 +31,13 @@ async def amain():
|
||||
start_location=os.environ.get("HIPPO_START_LOCATION", "last"),
|
||||
)
|
||||
print("I'm here")
|
||||
|
||||
# Wait until we have details about parcels and print them
|
||||
await client.main_region.parcel_manager.parcels_downloaded.wait()
|
||||
pprint.pprint(client.main_region.parcel_manager.parcels)
|
||||
|
||||
await client.send_chat("Hello World!", chat_type=ChatType.SHOUT)
|
||||
client.session.message_handler.subscribe("ChatFromSimulator", _respond_to_chat)
|
||||
|
||||
# Example of how to work with caps
|
||||
async with client.main_caps_client.get("SimulatorFeatures") as features_resp:
|
||||
print("Features:", await features_resp.read_llsd())
|
||||
|
||||
@@ -77,6 +77,15 @@ class SelectionManagerAddon(BaseAddon):
|
||||
selected.task_item = parsed["item-id"]
|
||||
|
||||
|
||||
class AgentUpdaterAddon(BaseAddon):
|
||||
def handle_eq_event(self, session: Session, region: ProxiedRegion, event: dict):
|
||||
if event['message'] != 'AgentGroupDataUpdate':
|
||||
return
|
||||
session.groups.clear()
|
||||
for group in event['body']['GroupData']:
|
||||
session.groups.add(group['GroupID'])
|
||||
|
||||
|
||||
class REPLAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def spawn_repl(self, session: Session, region: ProxiedRegion):
|
||||
@@ -103,6 +112,7 @@ def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] =
|
||||
extra_addon_paths = extra_addon_paths or []
|
||||
extra_addons.append(SelectionManagerAddon())
|
||||
extra_addons.append(REPLAddon())
|
||||
extra_addons.append(AgentUpdaterAddon())
|
||||
|
||||
root_log = logging.getLogger()
|
||||
root_log.addHandler(logging.StreamHandler())
|
||||
|
||||
@@ -24,7 +24,7 @@ from hippolyzer.apps.model import MessageLogModel, MessageLogHeader, RegionListM
|
||||
from hippolyzer.apps.proxy import start_proxy
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import bytes_unescape, bytes_escape, get_resource_filename
|
||||
from hippolyzer.lib.base.helpers import bytes_unescape, bytes_escape, get_resource_filename, create_logged_task
|
||||
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.message_formatting import (
|
||||
@@ -234,7 +234,7 @@ class MessageLogWindow(QtWidgets.QMainWindow):
|
||||
"ParcelDwellReply ParcelAccessListReply AttachedSoundGainChange " \
|
||||
"ParcelPropertiesRequest ParcelProperties GetObjectCost GetObjectPhysicsData ObjectImage " \
|
||||
"ViewerAsset GetTexture SetAlwaysRun GetDisplayNames MapImageService MapItemReply " \
|
||||
"AgentFOV".split(" ")
|
||||
"AgentFOV GenericStreamingMessage".split(" ")
|
||||
DEFAULT_FILTER = f"!({' || '.join(ignored for ignored in DEFAULT_IGNORE)})"
|
||||
|
||||
textRequest: QtWidgets.QTextEdit
|
||||
@@ -576,7 +576,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
message_names = sorted(x.name for x in self.templateDict)
|
||||
|
||||
for message_name in message_names:
|
||||
if self.templateDict[message_name].msg_trust:
|
||||
if self.templateDict[message_name].trusted:
|
||||
self.comboTrusted.addItem(message_name)
|
||||
else:
|
||||
self.comboUntrusted.addItem(message_name)
|
||||
@@ -719,7 +719,9 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
transport = None
|
||||
off_circuit = self.checkOffCircuit.isChecked()
|
||||
if off_circuit:
|
||||
transport = SocketUDPTransport(socket.socket(socket.AF_INET, socket.SOCK_DGRAM))
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
sock.bind(("0.0.0.0", 0))
|
||||
transport = SocketUDPTransport(sock)
|
||||
region.circuit.send(msg, transport=transport)
|
||||
if off_circuit:
|
||||
transport.close()
|
||||
@@ -826,7 +828,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
# enough for the full response to pass through the proxy
|
||||
await resp.read()
|
||||
|
||||
asyncio.create_task(_send_request())
|
||||
create_logged_task(_send_request(), "Send HTTP Request")
|
||||
|
||||
|
||||
class AddonDialog(QtWidgets.QDialog):
|
||||
|
||||
@@ -3,10 +3,12 @@ Assorted utilities to make creating animations from scratch easier
|
||||
"""
|
||||
|
||||
import copy
|
||||
from typing import List, Union
|
||||
from typing import List, Union, Mapping
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3, Quaternion
|
||||
from hippolyzer.lib.base.llanim import PosKeyframe, RotKeyframe
|
||||
from hippolyzer.lib.base.llanim import PosKeyframe, RotKeyframe, JOINTS_DICT, Joint
|
||||
from hippolyzer.lib.base.mesh_skeleton import AVATAR_SKELETON
|
||||
from hippolyzer.lib.base.multidict import OrderedMultiDict
|
||||
|
||||
|
||||
def smooth_step(t: float):
|
||||
@@ -89,3 +91,35 @@ def smooth_rot(start: Quaternion, end: Quaternion, inter_frames: int, time: floa
|
||||
smooth_t = smooth_step(t)
|
||||
frames.append(RotKeyframe(time=time + (t * duration), rot=rot_interp(start, end, smooth_t)))
|
||||
return frames + [RotKeyframe(time=time + duration, rot=end)]
|
||||
|
||||
|
||||
def mirror_joints(joints_dict: Mapping[str, Joint]) -> JOINTS_DICT:
|
||||
"""Mirror a joints dict so left / right are swapped, including transformations"""
|
||||
new_joints: JOINTS_DICT = OrderedMultiDict()
|
||||
|
||||
for joint_name, joint in joints_dict.items():
|
||||
inverse_joint_node = AVATAR_SKELETON[joint_name].inverse
|
||||
if not inverse_joint_node:
|
||||
new_joints[joint_name] = joint
|
||||
continue
|
||||
|
||||
# Okay, this is one we have to actually mirror
|
||||
new_joint = Joint(joint.priority, [], [])
|
||||
|
||||
for rot_keyframe in joint.rot_keyframes:
|
||||
new_joint.rot_keyframes.append(RotKeyframe(
|
||||
time=rot_keyframe.time,
|
||||
# Just need to mirror on yaw and roll
|
||||
rot=Quaternion.from_euler(*(rot_keyframe.rot.to_euler() * Vector3(-1, 1, -1)))
|
||||
))
|
||||
|
||||
for pos_keyframe in joint.pos_keyframes:
|
||||
new_joint.pos_keyframes.append(PosKeyframe(
|
||||
time=pos_keyframe.time,
|
||||
# Y is left / right so just negate it.
|
||||
pos=pos_keyframe.pos * Vector3(1, -1, 1)
|
||||
))
|
||||
|
||||
new_joints[inverse_joint_node.name] = new_joint
|
||||
|
||||
return new_joints
|
||||
|
||||
@@ -304,6 +304,9 @@ class JankStringyBytes(bytes):
|
||||
def __str__(self):
|
||||
return self.rstrip(b"\x00").decode("utf8", errors="replace")
|
||||
|
||||
def __bool__(self):
|
||||
return not (super().__eq__(b"") or super().__eq__(b"\x00"))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, str):
|
||||
return str(self) == other
|
||||
@@ -317,6 +320,36 @@ class JankStringyBytes(bytes):
|
||||
return item in str(self)
|
||||
return item in bytes(self)
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, bytes):
|
||||
return JankStringyBytes(bytes(self) + other)
|
||||
return str(self) + other
|
||||
|
||||
def __radd__(self, other):
|
||||
if isinstance(other, bytes):
|
||||
return JankStringyBytes(other + bytes(self))
|
||||
return other + str(self)
|
||||
|
||||
def lower(self):
|
||||
return str(self).lower()
|
||||
|
||||
def upper(self):
|
||||
return str(self).upper()
|
||||
|
||||
def startswith(self, __prefix, __start=None, __end=None):
|
||||
if __start or __end:
|
||||
raise RuntimeError("Can't handle __start or __end")
|
||||
if isinstance(__prefix, str):
|
||||
return str(self).startswith(__prefix)
|
||||
return self.startswith(__prefix)
|
||||
|
||||
def endswith(self, __prefix, __start=None, __end=None):
|
||||
if __start or __end:
|
||||
raise RuntimeError("Can't handle __start or __end")
|
||||
if isinstance(__prefix, str):
|
||||
return str(self).endswith(__prefix)
|
||||
return self.endswith(__prefix)
|
||||
|
||||
|
||||
class RawBytes(bytes):
|
||||
__slots__ = ()
|
||||
|
||||
@@ -19,16 +19,19 @@ along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
import asyncio
|
||||
from logging import getLogger
|
||||
import logging
|
||||
|
||||
logger = getLogger('utilities.events')
|
||||
from hippolyzer.lib.base.helpers import create_logged_task
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Event:
|
||||
""" an object containing data which will be passed out to all subscribers """
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, name=None):
|
||||
self.subscribers = []
|
||||
self.name = name
|
||||
|
||||
def subscribe(self, handler, *args, one_shot=False, predicate=None, **kwargs):
|
||||
""" establish the subscribers (handlers) to this event """
|
||||
@@ -38,7 +41,8 @@ class Event:
|
||||
|
||||
return self
|
||||
|
||||
def _handler_key(self, handler):
|
||||
@staticmethod
|
||||
def _handler_key(handler):
|
||||
return handler[:3]
|
||||
|
||||
def unsubscribe(self, handler, *args, **kwargs):
|
||||
@@ -52,24 +56,30 @@ class Event:
|
||||
raise ValueError(f"Handler {handler!r} is not subscribed to this event.")
|
||||
return self
|
||||
|
||||
def _create_async_wrapper(self, handler, args, inner_args, kwargs):
|
||||
# Note that unsubscription may be delayed due to asyncio scheduling :)
|
||||
async def _run_handler_wrapper():
|
||||
unsubscribe = await handler(args, *inner_args, **kwargs)
|
||||
if unsubscribe:
|
||||
_ = self.unsubscribe(handler, *inner_args, **kwargs)
|
||||
return _run_handler_wrapper
|
||||
|
||||
def notify(self, args):
|
||||
for handler in self.subscribers[:]:
|
||||
handler, inner_args, kwargs, one_shot, predicate = handler
|
||||
for subscriber in self.subscribers[:]:
|
||||
handler, inner_args, kwargs, one_shot, predicate = subscriber
|
||||
if predicate and not predicate(args):
|
||||
continue
|
||||
if one_shot:
|
||||
self.unsubscribe(handler, *inner_args, **kwargs)
|
||||
if asyncio.iscoroutinefunction(handler):
|
||||
# Note that unsubscription may be delayed due to asyncio scheduling :)
|
||||
|
||||
async def _run_handler_wrapper():
|
||||
unsubscribe = await handler(args, *inner_args, **kwargs)
|
||||
if unsubscribe:
|
||||
_ = self.unsubscribe(handler, *inner_args, **kwargs)
|
||||
asyncio.create_task(_run_handler_wrapper())
|
||||
create_logged_task(self._create_async_wrapper(handler, args, inner_args, kwargs)(), self.name, LOG)
|
||||
else:
|
||||
if handler(args, *inner_args, **kwargs) and not one_shot:
|
||||
self.unsubscribe(handler, *inner_args, **kwargs)
|
||||
try:
|
||||
if handler(args, *inner_args, **kwargs) and not one_shot:
|
||||
self.unsubscribe(handler, *inner_args, **kwargs)
|
||||
except:
|
||||
# One handler failing shouldn't prevent notification of other handlers.
|
||||
LOG.exception(f"Failed in handler for {self.name}")
|
||||
|
||||
def __len__(self):
|
||||
return len(self.subscribers)
|
||||
|
||||
@@ -429,8 +429,8 @@ class GLTFBuilder:
|
||||
|
||||
# Add each joint to the child list of their respective parent
|
||||
for joint_name, joint_ctx in built_joints.items():
|
||||
if parent := AVATAR_SKELETON[joint_name].parent:
|
||||
built_joints[parent().name].node.children.append(self.model.nodes.index(joint_ctx.node))
|
||||
if parent_name := AVATAR_SKELETON[joint_name].parent_name:
|
||||
built_joints[parent_name].node.children.append(self.model.nodes.index(joint_ctx.node))
|
||||
return built_joints
|
||||
|
||||
def _fix_blender_joint(self, joint_matrix: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import codecs
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
|
||||
import lazy_object_proxy
|
||||
@@ -132,6 +134,13 @@ def proxify(obj: Union[Callable[[], _T], weakref.ReferenceType, _T]) -> _T:
|
||||
return obj
|
||||
|
||||
|
||||
class BiDiDict(Generic[_T]):
|
||||
"""Dictionary for bidirectional lookups"""
|
||||
def __init__(self, values: Dict[_T, _T]):
|
||||
self.forward = {**values}
|
||||
self.backward = {value: key for (key, value) in values.items()}
|
||||
|
||||
|
||||
def bytes_unescape(val: bytes) -> bytes:
|
||||
# Only in CPython. bytes -> bytes with escape decoding.
|
||||
# https://stackoverflow.com/a/23151714
|
||||
@@ -158,3 +167,49 @@ def get_mtime(path):
|
||||
return os.stat(path).st_mtime
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def fut_logger(name: str, logger: logging.Logger, fut: asyncio.Future, *args) -> None:
|
||||
"""Callback suitable for exception logging in `Future.add_done_callback()`"""
|
||||
if not fut.cancelled() and fut.exception():
|
||||
if isinstance(fut.exception(), asyncio.CancelledError):
|
||||
# Don't really care if the task was just cancelled
|
||||
return
|
||||
logger.exception(f"Failed in task for {name}", exc_info=fut.exception())
|
||||
|
||||
|
||||
def add_future_logger(
|
||||
fut: asyncio.Future,
|
||||
name: Optional[str] = None,
|
||||
logger: Optional[logging.Logger] = None,
|
||||
):
|
||||
"""Add a logger to Futures that will never be directly `await`ed, logging exceptions"""
|
||||
fut.add_done_callback(functools.partial(fut_logger, name, logger or logging.getLogger()))
|
||||
|
||||
|
||||
def create_logged_task(
|
||||
coro: Coroutine,
|
||||
name: Optional[str] = None,
|
||||
logger: Optional[logging.Logger] = None,
|
||||
) -> asyncio.Task:
|
||||
task = asyncio.create_task(coro, name=name)
|
||||
add_future_logger(task, name, logger)
|
||||
return task
|
||||
|
||||
|
||||
def reorient_coord(coord, new_orientation, min_val: int | float = 0):
|
||||
"""
|
||||
Reorient a coordinate instance such that its components are negated and transposed appropriately.
|
||||
|
||||
For ex:
|
||||
reorient_coord((1,2,3), (3,-2,-1)) == (3,-2,-1)
|
||||
"""
|
||||
min_val = abs(min_val)
|
||||
coords = []
|
||||
for axis in new_orientation:
|
||||
axis_idx = abs(axis) - 1
|
||||
new_coord = coord[axis_idx] if axis >= 0 else min_val - coord[axis_idx]
|
||||
coords.append(new_coord)
|
||||
if coord.__class__ in (list, tuple):
|
||||
return coord.__class__(coords)
|
||||
return coord.__class__(*coords)
|
||||
|
||||
@@ -3,13 +3,21 @@ Parse the horrible legacy inventory-related format.
|
||||
|
||||
It's typically only used for object contents now.
|
||||
"""
|
||||
|
||||
# TODO: Maybe handle CRC calculation? Does anything care about that?
|
||||
# I don't think anything in the viewer actually looks at the result
|
||||
# of the CRC check for UDP stuff.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import asyncio
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import inspect
|
||||
import logging
|
||||
import secrets
|
||||
import struct
|
||||
import typing
|
||||
import weakref
|
||||
from io import StringIO
|
||||
from typing import *
|
||||
@@ -29,6 +37,8 @@ from hippolyzer.lib.base.legacy_schema import (
|
||||
SchemaUUID,
|
||||
schema_field,
|
||||
)
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.templates import SaleType, InventoryType, LookupIntEnum, AssetType, FolderType
|
||||
|
||||
MAGIC_ID = UUID("3c115e51-04f4-523c-9fa6-98aff1034730")
|
||||
LOG = logging.getLogger(__name__)
|
||||
@@ -38,12 +48,42 @@ _T = TypeVar("_T")
|
||||
class SchemaFlagField(SchemaHexInt):
|
||||
"""Like a hex int, but must be serialized as bytes in LLSD due to being a U32"""
|
||||
@classmethod
|
||||
def from_llsd(cls, val: Any) -> int:
|
||||
return struct.unpack("!I", val)[0]
|
||||
def from_llsd(cls, val: Any, flavor: str) -> int:
|
||||
# Sometimes values in S32 range will just come through normally
|
||||
if isinstance(val, int):
|
||||
return val
|
||||
|
||||
if flavor == "legacy":
|
||||
return struct.unpack("!I", val)[0]
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def to_llsd(cls, val: int) -> Any:
|
||||
return struct.pack("!I", val)
|
||||
def to_llsd(cls, val: int, flavor: str) -> Any:
|
||||
if flavor == "legacy":
|
||||
return struct.pack("!I", val)
|
||||
return val
|
||||
|
||||
|
||||
class SchemaEnumField(SchemaStr, Generic[_T]):
|
||||
def __init__(self, enum_cls: Type[LookupIntEnum]):
|
||||
super().__init__()
|
||||
self._enum_cls = enum_cls
|
||||
|
||||
def deserialize(self, val: str) -> _T:
|
||||
return self._enum_cls.from_lookup_name(val)
|
||||
|
||||
def serialize(self, val: _T) -> str:
|
||||
return self._enum_cls(val).to_lookup_name()
|
||||
|
||||
def from_llsd(self, val: Union[str, int], flavor: str) -> _T:
|
||||
if flavor == "legacy":
|
||||
return self.deserialize(val)
|
||||
return self._enum_cls(val)
|
||||
|
||||
def to_llsd(self, val: _T, flavor: str) -> Union[int, str]:
|
||||
if flavor == "legacy":
|
||||
return self.serialize(val)
|
||||
return int(val)
|
||||
|
||||
|
||||
def _yield_schema_tokens(reader: StringIO):
|
||||
@@ -99,10 +139,14 @@ class InventoryBase(SchemaBase):
|
||||
if not spec:
|
||||
LOG.warning(f"Internal key {key!r}")
|
||||
continue
|
||||
|
||||
spec_cls = spec
|
||||
if not inspect.isclass(spec_cls):
|
||||
spec_cls = spec_cls.__class__
|
||||
# some kind of nested structure like sale_info
|
||||
if issubclass(spec, SchemaBase):
|
||||
if issubclass(spec_cls, SchemaBase):
|
||||
obj_dict[key] = spec.from_reader(reader)
|
||||
elif issubclass(spec, SchemaFieldSerializer):
|
||||
elif issubclass(spec_cls, SchemaFieldSerializer):
|
||||
obj_dict[key] = spec.deserialize(val)
|
||||
else:
|
||||
raise ValueError(f"Unsupported spec for {key!r}, {spec!r}")
|
||||
@@ -111,9 +155,21 @@ class InventoryBase(SchemaBase):
|
||||
return cls._obj_from_dict(obj_dict)
|
||||
|
||||
def to_writer(self, writer: StringIO):
|
||||
writer.write(f"\t{self.SCHEMA_NAME}\t0\n")
|
||||
writer.write(f"\t{self.SCHEMA_NAME}")
|
||||
if self.SCHEMA_NAME == "permissions":
|
||||
writer.write(" 0\n")
|
||||
else:
|
||||
writer.write("\t0\n")
|
||||
writer.write("\t{\n")
|
||||
for field_name, field in self._get_fields_dict().items():
|
||||
|
||||
# Make sure the ID field always comes first, if there is one.
|
||||
fields_dict: Dict[str, dataclasses.Field] = {}
|
||||
if hasattr(self, "ID_ATTR"):
|
||||
fields_dict = {getattr(self, "ID_ATTR"): dataclasses.field()}
|
||||
# update()ing will put all fields that aren't yet in the dict after the ID attr.
|
||||
fields_dict.update(self._get_fields_dict())
|
||||
|
||||
for field_name, field in fields_dict.items():
|
||||
spec = field.metadata.get("spec")
|
||||
# Not meant to be serialized
|
||||
if not spec:
|
||||
@@ -122,20 +178,23 @@ class InventoryBase(SchemaBase):
|
||||
continue
|
||||
|
||||
val = getattr(self, field_name)
|
||||
if val is None:
|
||||
if val is None and not field.metadata.get("include_none"):
|
||||
continue
|
||||
|
||||
spec_cls = spec
|
||||
if not inspect.isclass(spec_cls):
|
||||
spec_cls = spec_cls.__class__
|
||||
# Some kind of nested structure like sale_info
|
||||
if isinstance(val, SchemaBase):
|
||||
val.to_writer(writer)
|
||||
elif issubclass(spec, SchemaFieldSerializer):
|
||||
elif issubclass(spec_cls, SchemaFieldSerializer):
|
||||
writer.write(f"\t\t{field_name}\t{spec.serialize(val)}\n")
|
||||
else:
|
||||
raise ValueError(f"Bad inventory spec {spec!r}")
|
||||
writer.write("\t}\n")
|
||||
|
||||
|
||||
class InventoryDifferences(typing.NamedTuple):
|
||||
class InventoryDifferences(NamedTuple):
|
||||
changed: List[InventoryNodeBase]
|
||||
removed: List[InventoryNodeBase]
|
||||
|
||||
@@ -144,6 +203,7 @@ class InventoryModel(InventoryBase):
|
||||
def __init__(self):
|
||||
self.nodes: Dict[UUID, InventoryNodeBase] = {}
|
||||
self.root: Optional[InventoryContainerBase] = None
|
||||
self.any_dirty = asyncio.Event()
|
||||
|
||||
@classmethod
|
||||
def from_reader(cls, reader: StringIO, read_header=False) -> InventoryModel:
|
||||
@@ -166,14 +226,16 @@ class InventoryModel(InventoryBase):
|
||||
return model
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, llsd_val: List[Dict]) -> InventoryModel:
|
||||
def from_llsd(cls, llsd_val: List[Dict], flavor: str = "legacy") -> Self:
|
||||
model = cls()
|
||||
for obj_dict in llsd_val:
|
||||
obj = None
|
||||
for inv_type in INVENTORY_TYPES:
|
||||
if inv_type.ID_ATTR in obj_dict:
|
||||
if (obj := inv_type.from_llsd(obj_dict)) is not None:
|
||||
if (obj := inv_type.from_llsd(obj_dict, flavor)) is not None:
|
||||
model.add(obj)
|
||||
break
|
||||
if obj is None:
|
||||
LOG.warning(f"Unknown object type {obj_dict!r}")
|
||||
return model
|
||||
|
||||
@@ -188,11 +250,17 @@ class InventoryModel(InventoryBase):
|
||||
if isinstance(node, InventoryContainerBase):
|
||||
yield node
|
||||
|
||||
@property
|
||||
def dirty_categories(self) -> Iterable[InventoryCategory]:
|
||||
for node in self.nodes:
|
||||
if isinstance(node, InventoryCategory) and node.version == InventoryCategory.VERSION_NONE:
|
||||
yield node
|
||||
|
||||
@property
|
||||
def all_items(self) -> Iterable[InventoryItem]:
|
||||
for node in self.nodes.values():
|
||||
if not isinstance(node, InventoryContainerBase):
|
||||
yield node
|
||||
yield node # type: ignore
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, InventoryModel):
|
||||
@@ -203,8 +271,8 @@ class InventoryModel(InventoryBase):
|
||||
for node in self.ordered_nodes:
|
||||
node.to_writer(writer)
|
||||
|
||||
def to_llsd(self):
|
||||
return list(node.to_llsd() for node in self.ordered_nodes)
|
||||
def to_llsd(self, flavor: str = "legacy"):
|
||||
return list(node.to_llsd(flavor) for node in self.ordered_nodes)
|
||||
|
||||
def add(self, node: InventoryNodeBase):
|
||||
if node.node_id in self.nodes:
|
||||
@@ -215,6 +283,29 @@ class InventoryModel(InventoryBase):
|
||||
if node.parent_id == UUID.ZERO:
|
||||
self.root = node
|
||||
node.model = weakref.proxy(self)
|
||||
return node
|
||||
|
||||
def update(self, node: InventoryNodeBase, update_fields: Optional[Iterable[str]] = None) -> InventoryNodeBase:
|
||||
"""Update an existing node, optionally only updating specific fields"""
|
||||
if node.node_id not in self.nodes:
|
||||
raise KeyError(f"{node.node_id} not in the inventory model")
|
||||
|
||||
orig_node = self.nodes[node.node_id]
|
||||
if node.__class__ != orig_node.__class__:
|
||||
raise ValueError(f"Tried to update {orig_node!r} from non-matching {node!r}")
|
||||
|
||||
if not update_fields:
|
||||
# Update everything but the model parameter
|
||||
update_fields = node.get_field_names()
|
||||
for field_name in update_fields:
|
||||
setattr(orig_node, field_name, getattr(node, field_name))
|
||||
return orig_node
|
||||
|
||||
def upsert(self, node: InventoryNodeBase, update_fields: Optional[Iterable[str]] = None) -> InventoryNodeBase:
|
||||
"""Add or update a node"""
|
||||
if node.node_id in self.nodes:
|
||||
return self.update(node, update_fields)
|
||||
return self.add(node)
|
||||
|
||||
def unlink(self, node: InventoryNodeBase, single_only: bool = False) -> Sequence[InventoryNodeBase]:
|
||||
"""Unlink a node and its descendants from the tree, returning the removed nodes"""
|
||||
@@ -255,14 +346,30 @@ class InventoryModel(InventoryBase):
|
||||
removed=removed_in_other,
|
||||
)
|
||||
|
||||
def flag_if_dirty(self):
|
||||
if any(self.dirty_categories):
|
||||
self.any_dirty.set()
|
||||
|
||||
def __getitem__(self, item: UUID) -> InventoryNodeBase:
|
||||
return self.nodes[item]
|
||||
|
||||
def __contains__(self, item: UUID):
|
||||
return item in self.nodes
|
||||
|
||||
def get(self, item: UUID) -> Optional[InventoryNodeBase]:
|
||||
return self.nodes.get(item)
|
||||
def get(self, key: UUID) -> Optional[InventoryNodeBase]:
|
||||
return self.nodes.get(key)
|
||||
|
||||
def get_category(self, key: UUID) -> InventoryCategory:
|
||||
node = self.get(key)
|
||||
if not isinstance(node, InventoryCategory):
|
||||
raise ValueError(f"{node!r} is not a category")
|
||||
return node
|
||||
|
||||
def get_item(self, key: UUID) -> InventoryItem:
|
||||
node = self.get(key)
|
||||
if not isinstance(node, InventoryItem):
|
||||
raise ValueError(f"{node!r} is not an item")
|
||||
return node
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
@@ -280,28 +387,59 @@ class InventoryPermissions(InventoryBase):
|
||||
group_id: UUID = schema_field(SchemaUUID)
|
||||
# Nothing actually cares about this, but it could be there.
|
||||
# It's kind of redundant since it just means owner_id == NULL_KEY && group_id != NULL_KEY.
|
||||
is_owner_group: int = schema_field(SchemaInt, default=0, llsd_only=True)
|
||||
is_owner_group: Optional[int] = schema_field(SchemaInt, default=None, llsd_only=True)
|
||||
|
||||
@classmethod
|
||||
def make_default(cls) -> Self:
|
||||
return cls(
|
||||
base_mask=0xFFffFFff,
|
||||
owner_mask=0xFFffFFff,
|
||||
group_mask=0,
|
||||
everyone_mask=0,
|
||||
next_owner_mask=0x82000,
|
||||
creator_id=UUID.ZERO,
|
||||
owner_id=UUID.ZERO,
|
||||
last_owner_id=UUID.ZERO,
|
||||
group_id=UUID.ZERO,
|
||||
is_owner_group=None
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventorySaleInfo(InventoryBase):
|
||||
SCHEMA_NAME: ClassVar[str] = "sale_info"
|
||||
|
||||
sale_type: str = schema_field(SchemaStr)
|
||||
sale_type: SaleType = schema_field(SchemaEnumField(SaleType))
|
||||
sale_price: int = schema_field(SchemaInt)
|
||||
|
||||
@classmethod
|
||||
def make_default(cls) -> Self:
|
||||
return cls(sale_type=SaleType.NOT, sale_price=10)
|
||||
|
||||
|
||||
class _HasBaseNodeAttrs(abc.ABC):
|
||||
"""
|
||||
Only exists so that we can assert that all subclasses should have this without forcing
|
||||
a particular serialization order, as would happen if this was present on InventoryNodeBase.
|
||||
"""
|
||||
name: str
|
||||
type: AssetType
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryNodeBase(InventoryBase):
|
||||
class InventoryNodeBase(InventoryBase, _HasBaseNodeAttrs):
|
||||
ID_ATTR: ClassVar[str]
|
||||
|
||||
name: str
|
||||
|
||||
parent_id: Optional[UUID] = schema_field(SchemaUUID)
|
||||
|
||||
model: Optional[InventoryModel] = dataclasses.field(
|
||||
default=None, init=False, hash=False, compare=False, repr=False
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_field_names(cls) -> Set[str]:
|
||||
return set(cls._get_fields_dict().keys()) - {"model"}
|
||||
|
||||
@property
|
||||
def node_id(self) -> UUID:
|
||||
return getattr(self, self.ID_ATTR)
|
||||
@@ -338,8 +476,7 @@ class InventoryNodeBase(InventoryBase):
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryContainerBase(InventoryNodeBase):
|
||||
type: str = schema_field(SchemaStr)
|
||||
name: str = schema_field(SchemaMultilineStr)
|
||||
type: AssetType = schema_field(SchemaEnumField(AssetType))
|
||||
|
||||
@property
|
||||
def children(self) -> Sequence[InventoryNodeBase]:
|
||||
@@ -348,6 +485,25 @@ class InventoryContainerBase(InventoryNodeBase):
|
||||
if x.parent_id == self.node_id
|
||||
)
|
||||
|
||||
@property
|
||||
def descendents(self) -> List[InventoryNodeBase]:
|
||||
new_children: List[InventoryNodeBase] = [self]
|
||||
descendents = []
|
||||
while new_children:
|
||||
to_check = new_children[:]
|
||||
new_children.clear()
|
||||
for obj in to_check:
|
||||
if isinstance(obj, InventoryContainerBase):
|
||||
for child in obj.children:
|
||||
if child in descendents:
|
||||
continue
|
||||
new_children.append(child)
|
||||
descendents.append(child)
|
||||
else:
|
||||
if obj not in descendents:
|
||||
descendents.append(obj)
|
||||
return descendents
|
||||
|
||||
def __getitem__(self, item: Union[int, str]) -> InventoryNodeBase:
|
||||
if isinstance(item, int):
|
||||
return self.children[item]
|
||||
@@ -368,8 +524,8 @@ class InventoryContainerBase(InventoryNodeBase):
|
||||
name=name,
|
||||
cat_id=UUID.random(),
|
||||
parent_id=self.node_id,
|
||||
type="category",
|
||||
pref_type="-1",
|
||||
type=AssetType.CATEGORY,
|
||||
pref_type=FolderType.NONE,
|
||||
owner_id=getattr(self, 'owner_id', UUID.ZERO),
|
||||
version=1,
|
||||
)
|
||||
@@ -386,7 +542,8 @@ class InventoryObject(InventoryContainerBase):
|
||||
ID_ATTR: ClassVar[str] = "obj_id"
|
||||
|
||||
obj_id: UUID = schema_field(SchemaUUID)
|
||||
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None)
|
||||
name: str = schema_field(SchemaMultilineStr)
|
||||
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=True)
|
||||
|
||||
__hash__ = InventoryNodeBase.__hash__
|
||||
|
||||
@@ -394,14 +551,61 @@ class InventoryObject(InventoryContainerBase):
|
||||
@dataclasses.dataclass
|
||||
class InventoryCategory(InventoryContainerBase):
|
||||
ID_ATTR: ClassVar[str] = "cat_id"
|
||||
# AIS calls this something else...
|
||||
ID_ATTR_AIS: ClassVar[str] = "category_id"
|
||||
SCHEMA_NAME: ClassVar[str] = "inv_category"
|
||||
VERSION_NONE: ClassVar[int] = -1
|
||||
|
||||
cat_id: UUID = schema_field(SchemaUUID)
|
||||
pref_type: str = schema_field(SchemaStr, llsd_name="preferred_type")
|
||||
owner_id: UUID = schema_field(SchemaUUID)
|
||||
version: int = schema_field(SchemaInt)
|
||||
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None)
|
||||
pref_type: FolderType = schema_field(SchemaEnumField(FolderType), llsd_name="preferred_type")
|
||||
name: str = schema_field(SchemaMultilineStr)
|
||||
owner_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
|
||||
version: int = schema_field(SchemaInt, default=VERSION_NONE, llsd_only=True)
|
||||
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=False)
|
||||
|
||||
def to_folder_data(self) -> Block:
|
||||
return Block(
|
||||
"FolderData",
|
||||
FolderID=self.cat_id,
|
||||
ParentID=self.parent_id,
|
||||
CallbackID=0,
|
||||
Type=self.pref_type,
|
||||
Name=self.name,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_folder_data(cls, block: Block):
|
||||
return cls(
|
||||
cat_id=block["FolderID"],
|
||||
parent_id=block["ParentID"],
|
||||
pref_type=block["Type"],
|
||||
name=block["Name"],
|
||||
type=AssetType.CATEGORY,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy") -> Self:
|
||||
if flavor == "ais" and "type" not in inv_dict:
|
||||
inv_dict = inv_dict.copy()
|
||||
inv_dict["type"] = AssetType.CATEGORY
|
||||
return super().from_llsd(inv_dict, flavor)
|
||||
|
||||
def to_llsd(self, flavor: str = "legacy"):
|
||||
payload = super().to_llsd(flavor)
|
||||
if flavor == "ais":
|
||||
# AIS already knows the inventory type is category
|
||||
payload.pop("type", None)
|
||||
return payload
|
||||
|
||||
@classmethod
|
||||
def _get_fields_dict(cls, llsd_flavor: Optional[str] = None):
|
||||
fields = super()._get_fields_dict(llsd_flavor)
|
||||
if llsd_flavor == "ais":
|
||||
# These have different names though
|
||||
fields["type_default"] = fields.pop("preferred_type")
|
||||
fields["agent_id"] = fields.pop("owner_id")
|
||||
fields["category_id"] = fields.pop("cat_id")
|
||||
return fields
|
||||
|
||||
__hash__ = InventoryNodeBase.__hash__
|
||||
|
||||
@@ -412,17 +616,20 @@ class InventoryItem(InventoryNodeBase):
|
||||
ID_ATTR: ClassVar[str] = "item_id"
|
||||
|
||||
item_id: UUID = schema_field(SchemaUUID)
|
||||
type: str = schema_field(SchemaStr)
|
||||
inv_type: str = schema_field(SchemaStr)
|
||||
flags: int = schema_field(SchemaFlagField)
|
||||
name: str = schema_field(SchemaMultilineStr)
|
||||
desc: str = schema_field(SchemaMultilineStr)
|
||||
creation_date: dt.datetime = schema_field(SchemaDate, llsd_name="created_at")
|
||||
permissions: InventoryPermissions = schema_field(InventoryPermissions)
|
||||
sale_info: InventorySaleInfo = schema_field(InventorySaleInfo)
|
||||
asset_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
|
||||
shadow_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
|
||||
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None)
|
||||
type: Optional[AssetType] = schema_field(SchemaEnumField(AssetType), default=None)
|
||||
inv_type: Optional[InventoryType] = schema_field(SchemaEnumField(InventoryType), default=None)
|
||||
flags: Optional[int] = schema_field(SchemaFlagField, default=None)
|
||||
sale_info: Optional[InventorySaleInfo] = schema_field(InventorySaleInfo, default=None)
|
||||
name: Optional[str] = schema_field(SchemaMultilineStr, default=None)
|
||||
desc: Optional[str] = schema_field(SchemaMultilineStr, default=None)
|
||||
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=True)
|
||||
"""Specifically for script metadata, generally just experience info"""
|
||||
thumbnail: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=False)
|
||||
"""Generally just a dict with the thumbnail UUID in it"""
|
||||
creation_date: Optional[dt.datetime] = schema_field(SchemaDate, llsd_name="created_at", default=None)
|
||||
|
||||
__hash__ = InventoryNodeBase.__hash__
|
||||
|
||||
@@ -432,5 +639,111 @@ class InventoryItem(InventoryNodeBase):
|
||||
return self.asset_id
|
||||
return self.shadow_id ^ MAGIC_ID
|
||||
|
||||
def to_inventory_data(self, block_name: str = "InventoryData") -> Block:
|
||||
return Block(
|
||||
block_name,
|
||||
ItemID=self.item_id,
|
||||
FolderID=self.parent_id,
|
||||
CallbackID=0,
|
||||
CreatorID=self.permissions.creator_id,
|
||||
OwnerID=self.permissions.owner_id,
|
||||
GroupID=self.permissions.group_id,
|
||||
BaseMask=self.permissions.base_mask,
|
||||
OwnerMask=self.permissions.owner_mask,
|
||||
GroupMask=self.permissions.group_mask,
|
||||
EveryoneMask=self.permissions.everyone_mask,
|
||||
NextOwnerMask=self.permissions.next_owner_mask,
|
||||
GroupOwned=self.permissions.owner_id == UUID.ZERO and self.permissions.group_id != UUID.ZERO,
|
||||
AssetID=self.true_asset_id,
|
||||
Type=self.type,
|
||||
InvType=self.inv_type,
|
||||
Flags=self.flags,
|
||||
SaleType=self.sale_info.sale_type,
|
||||
SalePrice=self.sale_info.sale_price,
|
||||
Name=self.name,
|
||||
Description=self.desc,
|
||||
CreationDate=SchemaDate.to_llsd(self.creation_date, "legacy"),
|
||||
# Meaningless here
|
||||
CRC=secrets.randbits(32),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_inventory_data(cls, block: Block):
|
||||
return cls(
|
||||
item_id=block["ItemID"],
|
||||
# Might be under one of two names
|
||||
parent_id=block.get("ParentID", block["FolderID"]),
|
||||
permissions=InventoryPermissions(
|
||||
creator_id=block["CreatorID"],
|
||||
owner_id=block["OwnerID"],
|
||||
# Unknown, not sent in this schema
|
||||
last_owner_id=block.get("LastOwnerID", UUID.ZERO),
|
||||
group_id=block["GroupID"],
|
||||
base_mask=block["BaseMask"],
|
||||
owner_mask=block["OwnerMask"],
|
||||
group_mask=block["GroupMask"],
|
||||
everyone_mask=block["EveryoneMask"],
|
||||
next_owner_mask=block["NextOwnerMask"],
|
||||
),
|
||||
# May be missing in UpdateInventoryItem
|
||||
asset_id=block.get("AssetID"),
|
||||
type=AssetType(block["Type"]),
|
||||
inv_type=InventoryType(block["InvType"]),
|
||||
flags=block["Flags"],
|
||||
sale_info=InventorySaleInfo(
|
||||
sale_type=SaleType(block["SaleType"]),
|
||||
sale_price=block["SalePrice"],
|
||||
),
|
||||
name=block["Name"],
|
||||
desc=block["Description"],
|
||||
creation_date=SchemaDate.from_llsd(block["CreationDate"], "legacy"),
|
||||
)
|
||||
|
||||
def to_llsd(self, flavor: str = "legacy"):
|
||||
val = super().to_llsd(flavor=flavor)
|
||||
if flavor == "ais":
|
||||
# There's little chance this differs from owner ID, just place it.
|
||||
val["agent_id"] = val["permissions"]["owner_id"]
|
||||
if val["type"] == AssetType.LINK:
|
||||
# For link items, there is no asset, only a linked ID.
|
||||
val["linked_id"] = val.pop("asset_id")
|
||||
# These don't exist either
|
||||
val.pop("permissions", None)
|
||||
val.pop("sale_info", None)
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy") -> Self:
|
||||
if flavor == "ais" and "linked_id" in inv_dict:
|
||||
# Links get represented differently than other items for whatever reason.
|
||||
# This is incredibly annoying, under *NIX there's nothing really special about symlinks.
|
||||
inv_dict = inv_dict.copy()
|
||||
# Fill this in since it needs to be there
|
||||
if "permissions" not in inv_dict:
|
||||
inv_dict["permissions"] = InventoryPermissions(
|
||||
base_mask=0xFFffFFff,
|
||||
owner_mask=0xFFffFFff,
|
||||
group_mask=0xFFffFFff,
|
||||
everyone_mask=0,
|
||||
next_owner_mask=0xFFffFFff,
|
||||
creator_id=UUID.ZERO,
|
||||
owner_id=UUID.ZERO,
|
||||
last_owner_id=UUID.ZERO,
|
||||
group_id=UUID.ZERO,
|
||||
).to_llsd("ais")
|
||||
if "sale_info" not in inv_dict:
|
||||
inv_dict["sale_info"] = InventorySaleInfo(
|
||||
sale_type=SaleType.NOT,
|
||||
sale_price=0,
|
||||
).to_llsd("ais")
|
||||
if "type" not in inv_dict:
|
||||
inv_dict["type"] = AssetType.LINK
|
||||
|
||||
# In the context of symlinks, asset id means linked item ID.
|
||||
# This is also how indra stores symlinks. Why the asymmetry in AIS if none of the
|
||||
# consumers actually want it? Who knows.
|
||||
inv_dict["asset_id"] = inv_dict.pop("linked_id")
|
||||
return super().from_llsd(inv_dict, flavor)
|
||||
|
||||
|
||||
INVENTORY_TYPES: Tuple[Type[InventoryNodeBase], ...] = (InventoryCategory, InventoryObject, InventoryItem)
|
||||
|
||||
@@ -9,6 +9,7 @@ import abc
|
||||
import calendar
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import inspect
|
||||
import logging
|
||||
import re
|
||||
from io import StringIO
|
||||
@@ -34,29 +35,29 @@ class SchemaFieldSerializer(abc.ABC, Generic[_T]):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, val: Any) -> _T:
|
||||
def from_llsd(cls, val: Any, flavor: str) -> _T:
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def to_llsd(cls, val: _T) -> Any:
|
||||
def to_llsd(cls, val: _T, flavor: str) -> Any:
|
||||
return val
|
||||
|
||||
|
||||
class SchemaDate(SchemaFieldSerializer[dt.datetime]):
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> dt.datetime:
|
||||
return dt.datetime.utcfromtimestamp(int(val))
|
||||
return dt.datetime.fromtimestamp(int(val), dt.timezone.utc)
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: dt.datetime) -> str:
|
||||
return str(calendar.timegm(val.utctimetuple()))
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, val: Any) -> dt.datetime:
|
||||
return dt.datetime.utcfromtimestamp(val)
|
||||
def from_llsd(cls, val: Any, flavor: str) -> dt.datetime:
|
||||
return dt.datetime.fromtimestamp(val, dt.timezone.utc)
|
||||
|
||||
@classmethod
|
||||
def to_llsd(cls, val: dt.datetime):
|
||||
def to_llsd(cls, val: dt.datetime, flavor: str):
|
||||
return calendar.timegm(val.utctimetuple())
|
||||
|
||||
|
||||
@@ -103,6 +104,13 @@ class SchemaStr(SchemaFieldSerializer[str]):
|
||||
|
||||
|
||||
class SchemaUUID(SchemaFieldSerializer[UUID]):
|
||||
@classmethod
|
||||
def from_llsd(cls, val: Any, flavor: str) -> UUID:
|
||||
# FetchInventory2 will return a string, but we want a UUID. It's not an issue
|
||||
# for us to return a UUID later there because it'll just cast to string if
|
||||
# that's what it wants
|
||||
return UUID(val)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> UUID:
|
||||
return UUID(val)
|
||||
@@ -116,19 +124,24 @@ class SchemaLLSD(SchemaFieldSerializer[_T]):
|
||||
"""Arbitrary LLSD embedded in a field"""
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> _T:
|
||||
return llsd.parse_notation(val.encode("utf8"))
|
||||
return llsd.parse_xml(val.partition("|")[0].encode("utf8"))
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: _T) -> str:
|
||||
return llsd.format_notation(val).decode("utf8")
|
||||
# Don't include the XML header
|
||||
return llsd.format_xml(val).split(b">", 1)[1].decode("utf8") + "\n|"
|
||||
|
||||
|
||||
def schema_field(spec: Type[Union[SchemaBase, SchemaFieldSerializer]], *, default=dataclasses.MISSING, init=True,
|
||||
repr=True, hash=None, compare=True, llsd_name=None, llsd_only=False) -> dataclasses.Field: # noqa
|
||||
_SCHEMA_SPEC = Union[Type[Union["SchemaBase", SchemaFieldSerializer]], SchemaFieldSerializer]
|
||||
|
||||
|
||||
def schema_field(spec: _SCHEMA_SPEC, *, default=dataclasses.MISSING, init=True,
|
||||
repr=True, hash=None, compare=True, llsd_name=None, llsd_only=False,
|
||||
include_none=False) -> dataclasses.Field: # noqa
|
||||
"""Describe a field in the inventory schema and the shape of its value"""
|
||||
return dataclasses.field( # noqa
|
||||
metadata={"spec": spec, "llsd_name": llsd_name, "llsd_only": llsd_only}, default=default,
|
||||
init=init, repr=repr, hash=hash, compare=compare,
|
||||
metadata={"spec": spec, "llsd_name": llsd_name, "llsd_only": llsd_only, "include_none": include_none},
|
||||
default=default, init=init, repr=repr, hash=hash, compare=compare,
|
||||
)
|
||||
|
||||
|
||||
@@ -151,17 +164,17 @@ def parse_schema_line(line: str):
|
||||
@dataclasses.dataclass
|
||||
class SchemaBase(abc.ABC):
|
||||
@classmethod
|
||||
def _get_fields_dict(cls, llsd=False):
|
||||
def _get_fields_dict(cls, llsd_flavor: Optional[str] = None) -> Dict[str, dataclasses.Field]:
|
||||
fields_dict = {}
|
||||
for field in dataclasses.fields(cls):
|
||||
field_name = field.name
|
||||
if llsd:
|
||||
if llsd_flavor:
|
||||
field_name = field.metadata.get("llsd_name") or field_name
|
||||
fields_dict[field_name] = field
|
||||
return fields_dict
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, text: str):
|
||||
def from_str(cls, text: str) -> Self:
|
||||
return cls.from_reader(StringIO(text))
|
||||
|
||||
@classmethod
|
||||
@@ -170,31 +183,43 @@ class SchemaBase(abc.ABC):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes):
|
||||
def from_bytes(cls, data: bytes) -> Self:
|
||||
return cls.from_str(data.decode("utf8"))
|
||||
|
||||
@classmethod
|
||||
def from_llsd(cls, inv_dict: Dict):
|
||||
fields = cls._get_fields_dict(llsd=True)
|
||||
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy") -> Self:
|
||||
fields = cls._get_fields_dict(llsd_flavor=flavor)
|
||||
obj_dict = {}
|
||||
for key, val in inv_dict.items():
|
||||
if key in fields:
|
||||
field: dataclasses.Field = fields[key]
|
||||
key = field.name
|
||||
spec = field.metadata.get("spec")
|
||||
# Not a real key, an internal var on our dataclass
|
||||
if not spec:
|
||||
LOG.warning(f"Internal key {key!r}")
|
||||
continue
|
||||
# some kind of nested structure like sale_info
|
||||
if issubclass(spec, SchemaBase):
|
||||
obj_dict[key] = spec.from_llsd(val)
|
||||
elif issubclass(spec, SchemaFieldSerializer):
|
||||
obj_dict[key] = spec.from_llsd(val)
|
||||
try:
|
||||
for key, val in inv_dict.items():
|
||||
if key in fields:
|
||||
field = fields[key]
|
||||
key = field.name
|
||||
spec = field.metadata.get("spec")
|
||||
# Not a real key, an internal var on our dataclass
|
||||
if not spec:
|
||||
LOG.warning(f"Internal key {key!r}")
|
||||
continue
|
||||
|
||||
spec_cls = spec
|
||||
if not inspect.isclass(spec_cls):
|
||||
spec_cls = spec_cls.__class__
|
||||
|
||||
# some kind of nested structure like sale_info
|
||||
if issubclass(spec_cls, SchemaBase):
|
||||
obj_dict[key] = spec.from_llsd(val, flavor)
|
||||
elif issubclass(spec_cls, SchemaFieldSerializer):
|
||||
obj_dict[key] = spec.from_llsd(val, flavor)
|
||||
else:
|
||||
raise ValueError(f"Unsupported spec for {key!r}, {spec!r}")
|
||||
else:
|
||||
raise ValueError(f"Unsupported spec for {key!r}, {spec!r}")
|
||||
else:
|
||||
LOG.warning(f"Unknown key {key!r}")
|
||||
if flavor != "ais":
|
||||
# AIS has a number of different fields that are irrelevant depending on
|
||||
# what exactly sent the payload
|
||||
LOG.warning(f"Unknown key {key!r}")
|
||||
except:
|
||||
LOG.error(f"Failed to parse inventory schema: {inv_dict!r}")
|
||||
raise
|
||||
return cls._obj_from_dict(obj_dict)
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
@@ -206,9 +231,9 @@ class SchemaBase(abc.ABC):
|
||||
writer.seek(0)
|
||||
return writer.read()
|
||||
|
||||
def to_llsd(self):
|
||||
def to_llsd(self, flavor: str = "legacy"):
|
||||
obj_dict = {}
|
||||
for field_name, field in self._get_fields_dict(llsd=True).items():
|
||||
for field_name, field in self._get_fields_dict(llsd_flavor=flavor).items():
|
||||
spec = field.metadata.get("spec")
|
||||
# Not meant to be serialized
|
||||
if not spec:
|
||||
@@ -218,11 +243,15 @@ class SchemaBase(abc.ABC):
|
||||
if val is None:
|
||||
continue
|
||||
|
||||
spec_cls = spec
|
||||
if not inspect.isclass(spec_cls):
|
||||
spec_cls = spec_cls.__class__
|
||||
|
||||
# Some kind of nested structure like sale_info
|
||||
if isinstance(val, SchemaBase):
|
||||
val = val.to_llsd()
|
||||
elif issubclass(spec, SchemaFieldSerializer):
|
||||
val = spec.to_llsd(val)
|
||||
val = val.to_llsd(flavor)
|
||||
elif issubclass(spec_cls, SchemaFieldSerializer):
|
||||
val = spec.to_llsd(val, flavor)
|
||||
else:
|
||||
raise ValueError(f"Bad inventory spec {spec!r}")
|
||||
obj_dict[field_name] = val
|
||||
@@ -233,5 +262,5 @@ class SchemaBase(abc.ABC):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def _obj_from_dict(cls, obj_dict: Dict):
|
||||
def _obj_from_dict(cls, obj_dict: Dict) -> Self:
|
||||
return cls(**obj_dict) # type: ignore
|
||||
|
||||
@@ -15,6 +15,8 @@ CONSTRAINT_DATACLASS = se.ForwardSerializable(lambda: se.Dataclass(Constraint))
|
||||
POSKEYFRAME_DATACLASS = se.ForwardSerializable(lambda: se.Dataclass(PosKeyframe))
|
||||
ROTKEYFRAME_DATACLASS = se.ForwardSerializable(lambda: se.Dataclass(RotKeyframe))
|
||||
|
||||
JOINTS_DICT = OrderedMultiDict[str, "Joint"]
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Animation:
|
||||
@@ -29,7 +31,7 @@ class Animation:
|
||||
ease_in_duration: float = se.dataclass_field(se.F32)
|
||||
ease_out_duration: float = se.dataclass_field(se.F32)
|
||||
hand_pose: HandPose = se.dataclass_field(lambda: se.IntEnum(HandPose, se.U32), default=0)
|
||||
joints: OrderedMultiDict[str, Joint] = se.dataclass_field(se.MultiDictAdapter(
|
||||
joints: JOINTS_DICT = se.dataclass_field(se.MultiDictAdapter(
|
||||
se.Collection(se.U32, se.Tuple(se.CStr(), JOINT_DATACLASS)),
|
||||
))
|
||||
constraints: List[Constraint] = se.dataclass_field(
|
||||
|
||||
@@ -16,10 +16,12 @@ from hippolyzer.lib.base.datatypes import *
|
||||
class HippoLLSDBaseFormatter(base_llsd.base.LLSDBaseFormatter):
|
||||
UUID: callable
|
||||
ARRAY: callable
|
||||
BINARY: callable
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.type_map[UUID] = self.UUID
|
||||
self.type_map[JankStringyBytes] = self.BINARY
|
||||
self.type_map[Vector2] = self.TUPLECOORD
|
||||
self.type_map[Vector3] = self.TUPLECOORD
|
||||
self.type_map[Vector4] = self.TUPLECOORD
|
||||
@@ -33,6 +35,12 @@ class HippoLLSDXMLFormatter(base_llsd.serde_xml.LLSDXMLFormatter, HippoLLSDBaseF
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def _generate(self, something):
|
||||
if isinstance(something, int) and type(something) is not int:
|
||||
# The lookup in the underlying library will fail if we don't convert IntEnums to actual ints.
|
||||
something = int(something)
|
||||
return super()._generate(something)
|
||||
|
||||
|
||||
class HippoLLSDXMLPrettyFormatter(base_llsd.serde_xml.LLSDXMLPrettyFormatter, HippoLLSDBaseFormatter):
|
||||
def __init__(self):
|
||||
@@ -101,7 +109,7 @@ def _format_binary_recurse(something) -> bytes:
|
||||
raise LLSDSerializationError(str(exc), something)
|
||||
elif isinstance(something, uuid.UUID):
|
||||
return b'u' + something.bytes
|
||||
elif isinstance(something, binary):
|
||||
elif isinstance(something, (binary, JankStringyBytes)):
|
||||
return b'b' + struct.pack('!i', len(something)) + something
|
||||
elif is_string(something):
|
||||
if is_unicode(something):
|
||||
@@ -160,8 +168,12 @@ class HippoLLSDBinaryParser(base_llsd.serde_binary.LLSDBinaryParser):
|
||||
return bytes_val
|
||||
|
||||
|
||||
# Python uses one, C++ uses the other, and everyone's unhappy.
|
||||
_BINARY_HEADERS = (b'<? LLSD/Binary ?>', b'<?llsd/binary?>')
|
||||
|
||||
|
||||
def parse_binary(data: bytes):
|
||||
if data.startswith(b'<?llsd/binary?>'):
|
||||
if any(data.startswith(x) for x in _BINARY_HEADERS):
|
||||
data = data.split(b'\n', 1)[1]
|
||||
return HippoLLSDBinaryParser().parse(data)
|
||||
|
||||
@@ -187,7 +199,7 @@ def parse(data: bytes):
|
||||
# content-type is usually nonsense.
|
||||
try:
|
||||
data = data.lstrip()
|
||||
if data.startswith(b'<?llsd/binary?>'):
|
||||
if any(data.startswith(x) for x in _BINARY_HEADERS):
|
||||
return parse_binary(data)
|
||||
elif data.startswith(b'<'):
|
||||
return parse_xml(data)
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
import dataclasses
|
||||
import re
|
||||
import weakref
|
||||
from typing import *
|
||||
|
||||
@@ -9,22 +11,23 @@ from lxml import etree
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3, RAD_TO_DEG
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
from hippolyzer.lib.base.mesh import MeshAsset, SkinSegmentDict, llsd_to_mat4
|
||||
|
||||
|
||||
MAYBE_JOINT_REF = Optional[Callable[[], "JointNode"]]
|
||||
MAYBE_JOINT_REF = Optional[str]
|
||||
SKELETON_REF = Optional[Callable[[], "Skeleton"]]
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class JointNode:
|
||||
name: str
|
||||
parent: MAYBE_JOINT_REF
|
||||
parent_name: MAYBE_JOINT_REF
|
||||
skeleton: SKELETON_REF
|
||||
translation: Vector3
|
||||
pivot: Vector3 # pivot point for the joint, generally the same as translation
|
||||
rotation: Vector3 # Euler rotation in degrees
|
||||
scale: Vector3
|
||||
type: str # bone or collision_volume
|
||||
support: str
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.name, self.type))
|
||||
@@ -37,6 +40,12 @@ class JointNode:
|
||||
translate=tuple(self.translation),
|
||||
)
|
||||
|
||||
@property
|
||||
def parent(self) -> Optional[JointNode]:
|
||||
if self.parent_name:
|
||||
return self.skeleton()[self.parent_name]
|
||||
return None
|
||||
|
||||
@property
|
||||
def index(self) -> int:
|
||||
bone_idx = 0
|
||||
@@ -51,57 +60,109 @@ class JointNode:
|
||||
@property
|
||||
def ancestors(self) -> Sequence[JointNode]:
|
||||
joint_node = self
|
||||
ancestors = []
|
||||
while joint_node.parent:
|
||||
joint_node = joint_node.parent()
|
||||
skeleton = self.skeleton()
|
||||
ancestors: List[JointNode] = []
|
||||
while joint_node.parent_name:
|
||||
joint_node = skeleton.joint_dict.get(joint_node.parent_name)
|
||||
ancestors.append(joint_node)
|
||||
return ancestors
|
||||
|
||||
@property
|
||||
def children(self) -> Sequence[JointNode]:
|
||||
children = []
|
||||
children: List[JointNode] = []
|
||||
for node in self.skeleton().joint_dict.values():
|
||||
if node.parent and node.parent() == self:
|
||||
if node.parent_name and node.parent_name == self.name:
|
||||
children.append(node)
|
||||
return children
|
||||
|
||||
@property
|
||||
def inverse(self) -> Optional[JointNode]:
|
||||
l_re = re.compile(r"(.*?(?:_|\b))L((?:_|\b).*)")
|
||||
r_re = re.compile(r"(.*?(?:_|\b))R((?:_|\b).*)")
|
||||
|
||||
inverse_name = None
|
||||
if "Left" in self.name:
|
||||
inverse_name = self.name.replace("Left", "Right")
|
||||
elif "LEFT" in self.name:
|
||||
inverse_name = self.name.replace("LEFT", "RIGHT")
|
||||
elif l_re.match(self.name):
|
||||
inverse_name = re.sub(l_re, r"\1R\2", self.name)
|
||||
elif "Right" in self.name:
|
||||
inverse_name = self.name.replace("Right", "Left")
|
||||
elif "RIGHT" in self.name:
|
||||
inverse_name = self.name.replace("RIGHT", "LEFT")
|
||||
elif r_re.match(self.name):
|
||||
inverse_name = re.sub(r_re, r"\1L\2", self.name)
|
||||
|
||||
if inverse_name:
|
||||
return self.skeleton().joint_dict.get(inverse_name)
|
||||
return None
|
||||
|
||||
@property
|
||||
def descendents(self) -> Set[JointNode]:
|
||||
descendents = set()
|
||||
ancestors = {self}
|
||||
last_ancestors = set()
|
||||
descendents: Set[JointNode] = set()
|
||||
ancestors: Set[str] = {self.name}
|
||||
last_ancestors: Set[str] = set()
|
||||
while last_ancestors != ancestors:
|
||||
last_ancestors = ancestors
|
||||
last_ancestors = ancestors.copy()
|
||||
for node in self.skeleton().joint_dict.values():
|
||||
if node.parent and node.parent() in ancestors:
|
||||
ancestors.add(node)
|
||||
if node.parent_name and node.parent_name in ancestors:
|
||||
ancestors.add(node.name)
|
||||
descendents.add(node)
|
||||
return descendents
|
||||
|
||||
|
||||
class Skeleton:
|
||||
def __init__(self, root_node: etree.ElementBase):
|
||||
def __init__(self, root_node: Optional[etree.ElementBase] = None):
|
||||
self.joint_dict: Dict[str, JointNode] = {}
|
||||
self._parse_node_children(root_node, None)
|
||||
if root_node is not None:
|
||||
self._parse_node_children(root_node, None)
|
||||
|
||||
def __getitem__(self, item: str) -> JointNode:
|
||||
return self.joint_dict[item]
|
||||
|
||||
def _parse_node_children(self, node: etree.ElementBase, parent: MAYBE_JOINT_REF):
|
||||
def clone(self) -> Self:
|
||||
val = copy.deepcopy(self)
|
||||
skel_ref = weakref.ref(val)
|
||||
for joint in val.joint_dict.values():
|
||||
joint.skeleton = skel_ref
|
||||
return val
|
||||
|
||||
def _parse_node_children(self, node: etree.ElementBase, parent_name: MAYBE_JOINT_REF):
|
||||
name = node.get('name')
|
||||
joint = JointNode(
|
||||
name=name,
|
||||
parent=parent,
|
||||
parent_name=parent_name,
|
||||
skeleton=weakref.ref(self),
|
||||
translation=_get_vec_attr(node, "pos", Vector3()),
|
||||
pivot=_get_vec_attr(node, "pivot", Vector3()),
|
||||
rotation=_get_vec_attr(node, "rot", Vector3()),
|
||||
scale=_get_vec_attr(node, "scale", Vector3(1, 1, 1)),
|
||||
support=node.get('support', 'base'),
|
||||
type=node.tag,
|
||||
)
|
||||
self.joint_dict[name] = joint
|
||||
for child in node.iterchildren():
|
||||
self._parse_node_children(child, weakref.ref(joint))
|
||||
self._parse_node_children(child, joint.name)
|
||||
|
||||
def merge_mesh_skeleton(self, mesh: MeshAsset) -> None:
|
||||
"""Update this skeleton with a skeleton definition from a mesh asset"""
|
||||
skin_seg: Optional[SkinSegmentDict] = mesh.segments.get('skin')
|
||||
if not skin_seg:
|
||||
return
|
||||
|
||||
for joint_name, matrix in zip(skin_seg['joint_names'], skin_seg.get('alt_inverse_bind_matrix', [])):
|
||||
# We're only meant to use the translation component from the alt inverse bind matrix.
|
||||
joint_decomp = transformations.decompose_matrix(llsd_to_mat4(matrix))
|
||||
joint_node = self.joint_dict.get(joint_name)
|
||||
if not joint_node:
|
||||
continue
|
||||
joint_node.translation = Vector3(*joint_decomp[3])
|
||||
|
||||
if pelvis_offset := skin_seg.get('pelvis_offset'):
|
||||
# TODO: Should we even do this?
|
||||
pelvis_node = self["mPelvis"]
|
||||
pelvis_node.translation += Vector3(0, 0, pelvis_offset)
|
||||
|
||||
|
||||
def _get_vec_attr(node, attr_name: str, default: Vector3) -> Vector3:
|
||||
|
||||
@@ -84,7 +84,7 @@ class Circuit:
|
||||
def send(self, message: Message, transport=None) -> UDPPacket:
|
||||
if self.prepare_message(message):
|
||||
# If the message originates from us then we're responsible for resends.
|
||||
if message.reliable and message.synthetic:
|
||||
if message.reliable and message.synthetic and not transport:
|
||||
self.unacked_reliable[(message.direction, message.packet_id)] = ReliableResendInfo(
|
||||
last_resent=dt.datetime.now(),
|
||||
message=message,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -29,7 +29,10 @@ from hippolyzer.lib.base.message.msgtypes import MsgType
|
||||
|
||||
PACKER = Callable[[Any], bytes]
|
||||
UNPACKER = Callable[[bytes], Any]
|
||||
LLSD_PACKER = Callable[[Any], Any]
|
||||
LLSD_UNPACKER = Callable[[Any], Any]
|
||||
SPEC = Tuple[UNPACKER, PACKER]
|
||||
LLSD_SPEC = Tuple[LLSD_UNPACKER, LLSD_PACKER]
|
||||
|
||||
|
||||
def _pack_string(pack_string):
|
||||
@@ -64,6 +67,21 @@ def _make_tuplecoord_spec(typ: Type[TupleCoord], struct_fmt: str,
|
||||
return lambda x: typ(*struct_obj.unpack(x)), _packer
|
||||
|
||||
|
||||
def _make_llsd_tuplecoord_spec(typ: Type[TupleCoord], needed_elems: Optional[int] = None):
|
||||
if needed_elems is None:
|
||||
# Number of elems needed matches the number in the coord type
|
||||
def _packer(x):
|
||||
return list(x)
|
||||
else:
|
||||
# Special case, we only want to pack some of the components.
|
||||
# Mostly for Quaternion since we don't actually need to send W.
|
||||
def _packer(x):
|
||||
if isinstance(x, TupleCoord):
|
||||
x = x.data()
|
||||
return list(x.data(needed_elems))
|
||||
return lambda x: typ(*x), _packer
|
||||
|
||||
|
||||
def _unpack_specs(cls):
|
||||
cls.UNPACKERS = {k: v[0] for (k, v) in cls.SPECS.items()}
|
||||
cls.PACKERS = {k: v[1] for (k, v) in cls.SPECS.items()}
|
||||
@@ -110,10 +128,15 @@ class TemplateDataPacker:
|
||||
class LLSDDataPacker(TemplateDataPacker):
|
||||
# Some template var types aren't directly representable in LLSD, so they
|
||||
# get encoded to binary fields.
|
||||
SPECS = {
|
||||
SPECS: Dict[MsgType, LLSD_SPEC] = {
|
||||
MsgType.MVT_IP_ADDR: (socket.inet_ntoa, socket.inet_aton),
|
||||
# LLSD ints are technically bound to S32 range.
|
||||
MsgType.MVT_U32: _make_struct_spec('!I'),
|
||||
MsgType.MVT_U64: _make_struct_spec('!Q'),
|
||||
MsgType.MVT_S64: _make_struct_spec('!q'),
|
||||
# These are arrays in LLSD, we need to turn them into coords.
|
||||
MsgType.MVT_LLVector3: _make_llsd_tuplecoord_spec(Vector3),
|
||||
MsgType.MVT_LLVector3d: _make_llsd_tuplecoord_spec(Vector3),
|
||||
MsgType.MVT_LLVector4: _make_llsd_tuplecoord_spec(Vector4),
|
||||
MsgType.MVT_LLQuaternion: _make_llsd_tuplecoord_spec(Quaternion, needed_elems=3)
|
||||
}
|
||||
|
||||
@@ -75,8 +75,8 @@ class Block:
|
||||
for var_name, val in kwargs.items():
|
||||
self[var_name] = val
|
||||
|
||||
def get_variable(self, var_name):
|
||||
return self.vars.get(var_name)
|
||||
def get(self, var_name, default: Optional[VAR_TYPE] = None) -> Optional[VAR_TYPE]:
|
||||
return self.vars.get(var_name, default)
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.vars
|
||||
@@ -188,7 +188,7 @@ class MsgBlockList(List["Block"]):
|
||||
class Message:
|
||||
__slots__ = ("name", "send_flags", "packet_id", "acks", "body_boundaries", "queued",
|
||||
"offset", "raw_extra", "raw_body", "deserializer", "_blocks", "finalized",
|
||||
"direction", "meta", "synthetic", "dropped", "sender")
|
||||
"direction", "meta", "synthetic", "dropped", "sender", "unknown_message")
|
||||
|
||||
def __init__(self, name, *args, packet_id=None, flags=0, acks=None, direction=None):
|
||||
# TODO: Do this on a timer or something.
|
||||
@@ -200,6 +200,7 @@ class Message:
|
||||
|
||||
self.acks = acks if acks is not None else tuple()
|
||||
self.body_boundaries = (-1, -1)
|
||||
self.unknown_message = False
|
||||
self.offset = 0
|
||||
self.raw_extra = b""
|
||||
self.direction: Direction = direction if direction is not None else Direction.OUT
|
||||
@@ -266,7 +267,7 @@ class Message:
|
||||
block.message_name = self.name
|
||||
block.finalize()
|
||||
|
||||
def get_block(self, block_name: str, default=None, /) -> Optional[Block]:
|
||||
def get_blocks(self, block_name: str, default=None, /) -> Optional[MsgBlockList]:
|
||||
return self.blocks.get(block_name, default)
|
||||
|
||||
@property
|
||||
@@ -288,7 +289,7 @@ class Message:
|
||||
|
||||
def ensure_parsed(self):
|
||||
# This is a little magic, think about whether we want this.
|
||||
if self.raw_body and self.deserializer():
|
||||
if self.raw_body and self.deserializer and self.deserializer():
|
||||
self.deserializer().parse_message_body(self)
|
||||
|
||||
def to_dict(self, extended=False):
|
||||
@@ -341,6 +342,21 @@ class Message:
|
||||
msg.acks = dict_val['acks']
|
||||
return msg
|
||||
|
||||
@classmethod
|
||||
def from_eq_event(cls, event) -> Message:
|
||||
# If this isn't a templated message (like some EQ-only events are),
|
||||
# then we wrap it in a synthetic `Message` so that the API for handling
|
||||
# both EQ-only and templated message events can be the same. Ick.
|
||||
msg = cls(event["message"])
|
||||
if isinstance(event["body"], dict):
|
||||
msg.add_block(Block("EventData", **event["body"]))
|
||||
else:
|
||||
# Shouldn't be any events that have anything other than a dict
|
||||
# as a body, but just to be sure...
|
||||
msg.add_block(Block("EventData", Data=event["body"]))
|
||||
msg.synthetic = True
|
||||
return msg
|
||||
|
||||
def invalidate_caches(self):
|
||||
# Don't have any caches if we haven't even parsed
|
||||
if self.raw_body:
|
||||
|
||||
@@ -31,7 +31,7 @@ _T = TypeVar("_T")
|
||||
_K = TypeVar("_K", bound=Hashable)
|
||||
MESSAGE_HANDLER = Callable[[_T], Any]
|
||||
PREDICATE = Callable[[_T], bool]
|
||||
# TODO: Can't do `Iterable[Union[_K, Literal["*"]]` apparently?
|
||||
# TODO: Can't do `Iterable[Union[_K, Literal["*"]]]` apparently?
|
||||
MESSAGE_NAMES = Iterable[Union[_K, str]]
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ class MessageHandler(Generic[_T, _K]):
|
||||
|
||||
def register(self, message_name: _K) -> Event:
|
||||
LOG.debug('Creating a monitor for %s' % message_name)
|
||||
return self.handlers.setdefault(message_name, Event())
|
||||
return self.handlers.setdefault(message_name, Event(message_name))
|
||||
|
||||
def subscribe(self, message_name: Union[_K, Literal["*"]], handler: MESSAGE_HANDLER):
|
||||
notifier = self.register(message_name)
|
||||
@@ -57,7 +57,7 @@ class MessageHandler(Generic[_T, _K]):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def subscribe_async(self, message_names: MESSAGE_NAMES, predicate: Optional[PREDICATE] = None,
|
||||
take: Optional[bool] = None) -> ContextManager[Callable[[], Awaitable[_T]]]:
|
||||
take: Optional[bool] = None) -> Generator[Callable[[], Awaitable[_T]], None, None]:
|
||||
"""
|
||||
Subscribe to a set of message matching predicate while within a block
|
||||
|
||||
@@ -92,6 +92,7 @@ class MessageHandler(Generic[_T, _K]):
|
||||
finally:
|
||||
for n in notifiers:
|
||||
n.unsubscribe(_handler_wrapper)
|
||||
return None
|
||||
|
||||
def wait_for(self, message_names: MESSAGE_NAMES, predicate: Optional[PREDICATE] = None,
|
||||
timeout: Optional[float] = None, take: Optional[bool] = None) -> Awaitable[_T]:
|
||||
|
||||
@@ -47,7 +47,6 @@ class MsgBlockType:
|
||||
MBT_SINGLE = 0
|
||||
MBT_MULTIPLE = 1
|
||||
MBT_VARIABLE = 2
|
||||
MBT_String_List = ['Single', 'Multiple', 'Variable']
|
||||
|
||||
|
||||
class PacketFlags(enum.IntFlag):
|
||||
@@ -55,6 +54,8 @@ class PacketFlags(enum.IntFlag):
|
||||
RELIABLE = 0x40
|
||||
RESENT = 0x20
|
||||
ACK = 0x10
|
||||
# Not a real flag, just used for display.
|
||||
EQ = 1 << 10
|
||||
|
||||
|
||||
# frequency for messages
|
||||
@@ -62,28 +63,23 @@ class PacketFlags(enum.IntFlag):
|
||||
# = '\xFF\xFF'
|
||||
# = '\xFF'
|
||||
# = ''
|
||||
class MsgFrequency:
|
||||
FIXED_FREQUENCY_MESSAGE = -1 # marking it
|
||||
LOW_FREQUENCY_MESSAGE = 4
|
||||
MEDIUM_FREQUENCY_MESSAGE = 2
|
||||
HIGH_FREQUENCY_MESSAGE = 1
|
||||
class MsgFrequency(enum.IntEnum):
|
||||
FIXED = -1 # marking it
|
||||
LOW = 4
|
||||
MEDIUM = 2
|
||||
HIGH = 1
|
||||
|
||||
|
||||
class MsgTrust:
|
||||
LL_NOTRUST = 0
|
||||
LL_TRUSTED = 1
|
||||
class MsgEncoding(enum.IntEnum):
|
||||
UNENCODED = 0
|
||||
ZEROCODED = 1
|
||||
|
||||
|
||||
class MsgEncoding:
|
||||
LL_UNENCODED = 0
|
||||
LL_ZEROCODED = 1
|
||||
|
||||
|
||||
class MsgDeprecation:
|
||||
LL_DEPRECATED = 0
|
||||
LL_UDPDEPRECATED = 1
|
||||
LL_UDPBLACKLISTED = 2
|
||||
LL_NOTDEPRECATED = 3
|
||||
class MsgDeprecation(enum.IntEnum):
|
||||
DEPRECATED = 0
|
||||
UDPDEPRECATED = 1
|
||||
UDPBLACKLISTED = 2
|
||||
NOTDEPRECATED = 3
|
||||
|
||||
|
||||
# message variable types
|
||||
|
||||
@@ -21,7 +21,7 @@ Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
import typing
|
||||
|
||||
from .msgtypes import MsgType, MsgBlockType
|
||||
from .msgtypes import MsgType, MsgBlockType, MsgFrequency
|
||||
from ..datatypes import UUID
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ class MessageTemplateVariable:
|
||||
return f"{self.__class__.__name__}(name={self.name!r}, tp={self.type!r}, size={self.size!r})"
|
||||
|
||||
@property
|
||||
def probably_binary(self):
|
||||
def probably_binary(self) -> bool:
|
||||
if self._probably_binary is not None:
|
||||
return self._probably_binary
|
||||
|
||||
@@ -49,7 +49,7 @@ class MessageTemplateVariable:
|
||||
return self._probably_binary
|
||||
|
||||
@property
|
||||
def probably_text(self):
|
||||
def probably_text(self) -> bool:
|
||||
if self._probably_text is not None:
|
||||
return self._probably_text
|
||||
|
||||
@@ -97,49 +97,36 @@ class MessageTemplateBlock:
|
||||
self.block_type: MsgBlockType = MsgBlockType.MBT_SINGLE
|
||||
self.number = 0
|
||||
|
||||
def add_variable(self, var):
|
||||
def add_variable(self, var: MessageTemplateVariable):
|
||||
self.variable_map[var.name] = var
|
||||
self.variables.append(var)
|
||||
|
||||
def get_variable(self, name):
|
||||
def get_variable(self, name) -> MessageTemplateVariable:
|
||||
return self.variable_map[name]
|
||||
|
||||
|
||||
class MessageTemplate(object):
|
||||
frequency_strings = {-1: 'fixed', 1: 'high', 2: 'medium', 4: 'low'} # strings for printout
|
||||
deprecation_strings = ["Deprecated", "UDPDeprecated", "UDPBlackListed", "NotDeprecated"] # using _as_string methods
|
||||
encoding_strings = ["Unencoded", "Zerocoded"] # etc
|
||||
trusted_strings = ["Trusted", "NotTrusted"] # etc LDE 24oct2008
|
||||
|
||||
class MessageTemplate:
|
||||
def __init__(self, name):
|
||||
self.blocks: typing.List[MessageTemplateBlock] = []
|
||||
self.block_map: typing.Dict[str, MessageTemplateBlock] = {}
|
||||
|
||||
# this is the function or object that will handle this type of message
|
||||
self.received_count = 0
|
||||
|
||||
self.name = name
|
||||
self.frequency = None
|
||||
self.msg_num = 0
|
||||
self.msg_freq_num_bytes = None
|
||||
self.msg_trust = None
|
||||
self.msg_deprecation = None
|
||||
self.msg_encoding = None
|
||||
self.frequency: typing.Optional[MsgFrequency] = None
|
||||
self.num = 0
|
||||
# Frequency + msg num as bytes
|
||||
self.freq_num_bytes = None
|
||||
self.trusted = False
|
||||
self.deprecation = None
|
||||
self.encoding = None
|
||||
|
||||
def add_block(self, block):
|
||||
def add_block(self, block: MessageTemplateBlock):
|
||||
self.block_map[block.name] = block
|
||||
self.blocks.append(block)
|
||||
|
||||
def get_block(self, name):
|
||||
def get_block(self, name) -> MessageTemplateBlock:
|
||||
return self.block_map[name]
|
||||
|
||||
def get_msg_freq_num_len(self):
|
||||
if self.frequency == -1:
|
||||
if self.frequency == MsgFrequency.FIXED:
|
||||
return 4
|
||||
return self.frequency
|
||||
|
||||
def get_frequency_as_string(self):
|
||||
return MessageTemplate.frequency_strings[self.frequency]
|
||||
|
||||
def get_deprecation_as_string(self):
|
||||
return MessageTemplate.deprecation_strings[self.msg_deprecation]
|
||||
|
||||
@@ -43,7 +43,7 @@ class TemplateDictionary:
|
||||
|
||||
self.template_list: typing.List[MessageTemplate] = []
|
||||
# maps name to template
|
||||
self.message_templates = {}
|
||||
self.message_templates: typing.Dict[str, MessageTemplate] = {}
|
||||
|
||||
# maps (freq,num) to template
|
||||
self.message_dict = {}
|
||||
@@ -68,32 +68,32 @@ class TemplateDictionary:
|
||||
|
||||
# do a mapping of type to a string for easier reference
|
||||
frequency_str = ''
|
||||
if template.frequency == MsgFrequency.FIXED_FREQUENCY_MESSAGE:
|
||||
if template.frequency == MsgFrequency.FIXED:
|
||||
frequency_str = "Fixed"
|
||||
elif template.frequency == MsgFrequency.LOW_FREQUENCY_MESSAGE:
|
||||
elif template.frequency == MsgFrequency.LOW:
|
||||
frequency_str = "Low"
|
||||
elif template.frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE:
|
||||
elif template.frequency == MsgFrequency.MEDIUM:
|
||||
frequency_str = "Medium"
|
||||
elif template.frequency == MsgFrequency.HIGH_FREQUENCY_MESSAGE:
|
||||
elif template.frequency == MsgFrequency.HIGH:
|
||||
frequency_str = "High"
|
||||
|
||||
self.message_dict[(frequency_str,
|
||||
template.msg_num)] = template
|
||||
template.num)] = template
|
||||
|
||||
def build_message_ids(self):
|
||||
for template in list(self.message_templates.values()):
|
||||
frequency = template.frequency
|
||||
num_bytes = None
|
||||
if frequency == MsgFrequency.FIXED_FREQUENCY_MESSAGE:
|
||||
if frequency == MsgFrequency.FIXED:
|
||||
# have to do this because Fixed messages are stored as a long in the template
|
||||
num_bytes = b'\xff\xff\xff' + struct.pack("B", template.msg_num)
|
||||
elif frequency == MsgFrequency.LOW_FREQUENCY_MESSAGE:
|
||||
num_bytes = b'\xff\xff' + struct.pack("!H", template.msg_num)
|
||||
elif frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE:
|
||||
num_bytes = b'\xff' + struct.pack("B", template.msg_num)
|
||||
elif frequency == MsgFrequency.HIGH_FREQUENCY_MESSAGE:
|
||||
num_bytes = struct.pack("B", template.msg_num)
|
||||
template.msg_freq_num_bytes = num_bytes
|
||||
num_bytes = b'\xff\xff\xff' + struct.pack("B", template.num)
|
||||
elif frequency == MsgFrequency.LOW:
|
||||
num_bytes = b'\xff\xff' + struct.pack("!H", template.num)
|
||||
elif frequency == MsgFrequency.MEDIUM:
|
||||
num_bytes = b'\xff' + struct.pack("B", template.num)
|
||||
elif frequency == MsgFrequency.HIGH:
|
||||
num_bytes = struct.pack("B", template.num)
|
||||
template.freq_num_bytes = num_bytes
|
||||
|
||||
def get_template_by_name(self, template_name) -> typing.Optional[MessageTemplate]:
|
||||
return self.message_templates.get(template_name)
|
||||
|
||||
@@ -22,7 +22,7 @@ import struct
|
||||
import re
|
||||
|
||||
from . import template
|
||||
from .msgtypes import MsgFrequency, MsgTrust, MsgEncoding
|
||||
from .msgtypes import MsgFrequency, MsgEncoding
|
||||
from .msgtypes import MsgDeprecation, MsgBlockType, MsgType
|
||||
from ..exc import MessageTemplateParsingError, MessageTemplateNotFound
|
||||
|
||||
@@ -112,67 +112,69 @@ class MessageTemplateParser:
|
||||
frequency = None
|
||||
freq_str = match.group(2)
|
||||
if freq_str == 'Low':
|
||||
frequency = MsgFrequency.LOW_FREQUENCY_MESSAGE
|
||||
frequency = MsgFrequency.LOW
|
||||
elif freq_str == 'Medium':
|
||||
frequency = MsgFrequency.MEDIUM_FREQUENCY_MESSAGE
|
||||
frequency = MsgFrequency.MEDIUM
|
||||
elif freq_str == 'High':
|
||||
frequency = MsgFrequency.HIGH_FREQUENCY_MESSAGE
|
||||
frequency = MsgFrequency.HIGH
|
||||
elif freq_str == 'Fixed':
|
||||
frequency = MsgFrequency.FIXED_FREQUENCY_MESSAGE
|
||||
frequency = MsgFrequency.FIXED
|
||||
|
||||
new_template.frequency = frequency
|
||||
|
||||
msg_num = int(match.group(3), 0)
|
||||
if frequency == MsgFrequency.FIXED_FREQUENCY_MESSAGE:
|
||||
if frequency == MsgFrequency.FIXED:
|
||||
# have to do this because Fixed messages are stored as a long in the template
|
||||
msg_num &= 0xff
|
||||
msg_num_bytes = struct.pack('!BBBB', 0xff, 0xff, 0xff, msg_num)
|
||||
elif frequency == MsgFrequency.LOW_FREQUENCY_MESSAGE:
|
||||
elif frequency == MsgFrequency.LOW:
|
||||
msg_num_bytes = struct.pack('!BBH', 0xff, 0xff, msg_num)
|
||||
elif frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE:
|
||||
elif frequency == MsgFrequency.MEDIUM:
|
||||
msg_num_bytes = struct.pack('!BB', 0xff, msg_num)
|
||||
elif frequency == MsgFrequency.HIGH_FREQUENCY_MESSAGE:
|
||||
elif frequency == MsgFrequency.HIGH:
|
||||
msg_num_bytes = struct.pack('!B', msg_num)
|
||||
else:
|
||||
raise Exception("don't know about frequency %s" % frequency)
|
||||
|
||||
new_template.msg_num = msg_num
|
||||
new_template.msg_freq_num_bytes = msg_num_bytes
|
||||
new_template.num = msg_num
|
||||
new_template.freq_num_bytes = msg_num_bytes
|
||||
|
||||
msg_trust = None
|
||||
msg_trust_str = match.group(4)
|
||||
if msg_trust_str == 'Trusted':
|
||||
msg_trust = MsgTrust.LL_TRUSTED
|
||||
msg_trust = True
|
||||
elif msg_trust_str == 'NotTrusted':
|
||||
msg_trust = MsgTrust.LL_NOTRUST
|
||||
msg_trust = False
|
||||
else:
|
||||
raise ValueError(f"Invalid trust {msg_trust_str}")
|
||||
|
||||
new_template.msg_trust = msg_trust
|
||||
new_template.trusted = msg_trust
|
||||
|
||||
msg_encoding = None
|
||||
msg_encoding_str = match.group(5)
|
||||
if msg_encoding_str == 'Unencoded':
|
||||
msg_encoding = MsgEncoding.LL_UNENCODED
|
||||
msg_encoding = MsgEncoding.UNENCODED
|
||||
elif msg_encoding_str == 'Zerocoded':
|
||||
msg_encoding = MsgEncoding.LL_ZEROCODED
|
||||
msg_encoding = MsgEncoding.ZEROCODED
|
||||
else:
|
||||
raise ValueError(f"Invalid encoding {msg_encoding_str}")
|
||||
|
||||
new_template.msg_encoding = msg_encoding
|
||||
new_template.encoding = msg_encoding
|
||||
|
||||
msg_dep = None
|
||||
msg_dep_str = match.group(7)
|
||||
if msg_dep_str:
|
||||
if msg_dep_str == 'Deprecated':
|
||||
msg_dep = MsgDeprecation.LL_DEPRECATED
|
||||
msg_dep = MsgDeprecation.DEPRECATED
|
||||
elif msg_dep_str == 'UDPDeprecated':
|
||||
msg_dep = MsgDeprecation.LL_UDPDEPRECATED
|
||||
msg_dep = MsgDeprecation.UDPDEPRECATED
|
||||
elif msg_dep_str == 'UDPBlackListed':
|
||||
msg_dep = MsgDeprecation.LL_UDPBLACKLISTED
|
||||
msg_dep = MsgDeprecation.UDPBLACKLISTED
|
||||
elif msg_dep_str == 'NotDeprecated':
|
||||
msg_dep = MsgDeprecation.LL_NOTDEPRECATED
|
||||
msg_dep = MsgDeprecation.NOTDEPRECATED
|
||||
else:
|
||||
msg_dep = MsgDeprecation.LL_NOTDEPRECATED
|
||||
msg_dep = MsgDeprecation.NOTDEPRECATED
|
||||
if msg_dep is None:
|
||||
raise MessageTemplateParsingError("Unknown msg_dep field %s" % match.group(0))
|
||||
new_template.msg_deprecation = msg_dep
|
||||
new_template.deprecation = msg_dep
|
||||
|
||||
return new_template
|
||||
|
||||
|
||||
@@ -126,8 +126,14 @@ class UDPMessageDeserializer:
|
||||
frequency, num = _parse_msg_num(reader)
|
||||
current_template = self.template_dict.get_template_by_pair(frequency, num)
|
||||
if current_template is None:
|
||||
raise exc.MessageTemplateNotFound("deserializing data", f"{frequency}:{num}")
|
||||
msg.name = current_template.name
|
||||
if self.settings.ALLOW_UNKNOWN_MESSAGES:
|
||||
LOG.warning(f"Unknown message type {frequency}:{num}")
|
||||
msg.unknown_message = True
|
||||
msg.name = "UnknownMessage:%d" % num
|
||||
else:
|
||||
raise exc.MessageTemplateNotFound("deserializing data", f"{frequency}:{num}")
|
||||
else:
|
||||
msg.name = current_template.name
|
||||
|
||||
# extra field, see note regarding msg.offset
|
||||
msg.raw_extra = reader.read_bytes(msg.offset)
|
||||
@@ -143,6 +149,12 @@ class UDPMessageDeserializer:
|
||||
# Already parsed if we don't have a raw body
|
||||
if not raw_body:
|
||||
return
|
||||
|
||||
if msg.unknown_message:
|
||||
# We can't parse this, we don't know anything about it
|
||||
msg.deserializer = None
|
||||
return
|
||||
|
||||
msg.raw_body = None
|
||||
msg.deserializer = None
|
||||
|
||||
@@ -220,11 +232,17 @@ class UDPMessageDeserializer:
|
||||
if tmpl_variable.probably_binary:
|
||||
return unpacked_data
|
||||
# Truncated strings need to be treated carefully
|
||||
if tmpl_variable.probably_text and unpacked_data.endswith(b"\x00"):
|
||||
try:
|
||||
return unpacked_data.decode("utf8").rstrip("\x00")
|
||||
except UnicodeDecodeError:
|
||||
return JankStringyBytes(unpacked_data)
|
||||
if tmpl_variable.probably_text:
|
||||
# If it has a null terminator, let's try to decode it first.
|
||||
# We don't want to do this if there isn't one, because that may change
|
||||
# the meaning of the data.
|
||||
if unpacked_data.endswith(b"\x00"):
|
||||
try:
|
||||
return unpacked_data.decode("utf8").rstrip("\x00")
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
# Failed, return jank stringy bytes
|
||||
return JankStringyBytes(unpacked_data)
|
||||
elif tmpl_variable.type in {MsgType.MVT_FIXED, MsgType.MVT_VARIABLE}:
|
||||
# No idea if this should be bytes or a string... make an object that's sort of both.
|
||||
return JankStringyBytes(unpacked_data)
|
||||
|
||||
@@ -45,7 +45,7 @@ class UDPMessageSerializer:
|
||||
|
||||
def serialize(self, msg: Message):
|
||||
current_template = self.template_dict.get_template_by_name(msg.name)
|
||||
if current_template is None:
|
||||
if current_template is None and msg.raw_body is None:
|
||||
raise exc.MessageSerializationError("message name", "invalid message name")
|
||||
|
||||
# Header and trailers are all big-endian
|
||||
@@ -69,13 +69,13 @@ class UDPMessageSerializer:
|
||||
# frequency and message number. The template stores it because it doesn't
|
||||
# change per template.
|
||||
body_writer = se.BufferWriter("<")
|
||||
body_writer.write_bytes(current_template.msg_freq_num_bytes)
|
||||
body_writer.write_bytes(current_template.freq_num_bytes)
|
||||
body_writer.write_bytes(msg.extra)
|
||||
|
||||
# We're going to pop off keys as we go, so shallow copy the dict.
|
||||
blocks = copy.copy(msg.blocks)
|
||||
|
||||
missing_block = None
|
||||
missing_blocks: List[MessageTemplateBlock] = []
|
||||
# Iterate based on the order of the blocks in the message template
|
||||
for tmpl_block in current_template.blocks:
|
||||
block_list = blocks.pop(tmpl_block.name, None)
|
||||
@@ -83,13 +83,21 @@ class UDPMessageSerializer:
|
||||
# omitted by SL. Not an error unless another block containing data follows it.
|
||||
# Keep track.
|
||||
if block_list is None:
|
||||
missing_block = tmpl_block.name
|
||||
missing_blocks.append(tmpl_block)
|
||||
logger.debug("No block %s, bailing out" % tmpl_block.name)
|
||||
continue
|
||||
# Had a missing block before, but we found one later in the template?
|
||||
elif missing_block:
|
||||
raise ValueError(f"Unexpected {tmpl_block.name} block after missing {missing_block}")
|
||||
self._serialize_block(body_writer, tmpl_block, block_list)
|
||||
# Had a missing block before, but we specified one defined later in the template?
|
||||
elif missing_blocks:
|
||||
if not all(x.block_type == MsgBlockType.MBT_VARIABLE for x in missing_blocks):
|
||||
raise ValueError(f"Unexpected {tmpl_block.name} block after missing {missing_blocks!r}")
|
||||
# This is okay, we just need to put empty blocks for all the variable blocks that came before.
|
||||
# Normally we wouldn't even put these to match SL behavior, but in this case we need the
|
||||
# empty blocks so the decoder will decode these as the correct block type.
|
||||
for missing_block in missing_blocks:
|
||||
self._serialize_block_list(body_writer, missing_block, MsgBlockList())
|
||||
missing_blocks.clear()
|
||||
|
||||
self._serialize_block_list(body_writer, tmpl_block, block_list)
|
||||
if blocks:
|
||||
raise KeyError(f"Unexpected {tuple(blocks.keys())!r} blocks in {msg.name}")
|
||||
|
||||
@@ -105,8 +113,8 @@ class UDPMessageSerializer:
|
||||
writer.write(se.U8, len(msg.acks))
|
||||
return writer.copy_buffer()
|
||||
|
||||
def _serialize_block(self, writer: se.BufferWriter, tmpl_block: MessageTemplateBlock,
|
||||
block_list: MsgBlockList):
|
||||
def _serialize_block_list(self, writer: se.BufferWriter, tmpl_block: MessageTemplateBlock,
|
||||
block_list: MsgBlockList):
|
||||
block_count = len(block_list)
|
||||
# Multiple block type means there is a static number of blocks
|
||||
if tmpl_block.block_type == MsgBlockType.MBT_MULTIPLE:
|
||||
|
||||
@@ -42,7 +42,7 @@ class Object(recordclass.RecordClass, use_weakref=True): # type: ignore
|
||||
CRC: Optional[int] = None
|
||||
PCode: Optional[tmpls.PCode] = None
|
||||
Material: Optional[tmpls.MCode] = None
|
||||
ClickAction: Optional[int] = None
|
||||
ClickAction: Optional[tmpls.ClickAction] = None
|
||||
Scale: Optional[Vector3] = None
|
||||
ParentID: Optional[int] = None
|
||||
# Actually contains a weakref proxy
|
||||
@@ -125,12 +125,14 @@ class Object(recordclass.RecordClass, use_weakref=True): # type: ignore
|
||||
SitName: Optional[str] = None
|
||||
TextureID: Optional[List[UUID]] = None
|
||||
RegionHandle: Optional[int] = None
|
||||
Animations: Optional[List[UUID]] = None
|
||||
|
||||
def __init__(self, **_kwargs):
|
||||
""" set up the object attributes """
|
||||
self.ExtraParams = self.ExtraParams or {} # Variable 1
|
||||
self.ObjectCosts = self.ObjectCosts or {}
|
||||
self.ChildIDs = []
|
||||
self.Animations = self.Animations or []
|
||||
# Same as parent, contains weakref proxies.
|
||||
self.Children: List[Object] = []
|
||||
|
||||
@@ -241,6 +243,7 @@ def normalize_object_update(block: Block, handle: int):
|
||||
"NameValue": block.deserialize_var("NameValue", make_copy=False),
|
||||
"TextureAnim": block.deserialize_var("TextureAnim", make_copy=False),
|
||||
"ExtraParams": block.deserialize_var("ExtraParams", make_copy=False) or {},
|
||||
"ClickAction": block.deserialize_var("ClickAction", make_copy=False),
|
||||
"PSBlock": block.deserialize_var("PSBlock", make_copy=False).value,
|
||||
"UpdateFlags": block.deserialize_var("UpdateFlags", make_copy=False),
|
||||
"State": block.deserialize_var("State", make_copy=False),
|
||||
@@ -253,7 +256,7 @@ def normalize_object_update(block: Block, handle: int):
|
||||
# OwnerID is only set in this packet if a sound is playing. Don't allow
|
||||
# ObjectUpdates to clobber _real_ OwnerIDs we had from ObjectProperties
|
||||
# with a null UUID.
|
||||
if object_data["OwnerID"] == UUID():
|
||||
if object_data["OwnerID"] == UUID.ZERO:
|
||||
del object_data["OwnerID"]
|
||||
del object_data["Flags"]
|
||||
del object_data["Gain"]
|
||||
@@ -309,7 +312,7 @@ def normalize_object_update_compressed_data(data: bytes):
|
||||
compressed["SoundFlags"] = 0
|
||||
compressed["SoundGain"] = 0.0
|
||||
compressed["SoundRadius"] = 0.0
|
||||
compressed["Sound"] = UUID()
|
||||
compressed["Sound"] = UUID.ZERO
|
||||
if compressed["TextureEntry"] is None:
|
||||
compressed["TextureEntry"] = tmpls.TextureEntryCollection()
|
||||
|
||||
@@ -323,7 +326,7 @@ def normalize_object_update_compressed_data(data: bytes):
|
||||
# Don't clobber OwnerID in case the object has a proper one from
|
||||
# a previous ObjectProperties. OwnerID isn't expected to be populated
|
||||
# on ObjectUpdates unless an attached sound is playing.
|
||||
if object_data["OwnerID"] == UUID():
|
||||
if object_data["OwnerID"] == UUID.ZERO:
|
||||
del object_data["OwnerID"]
|
||||
return object_data
|
||||
|
||||
@@ -433,8 +436,8 @@ class FastObjectUpdateCompressedDataDeserializer:
|
||||
"PCode": pcode,
|
||||
"State": state,
|
||||
"CRC": crc,
|
||||
"Material": material,
|
||||
"ClickAction": click_action,
|
||||
"Material": tmpls.MCode(material),
|
||||
"ClickAction": tmpls.ClickAction(click_action),
|
||||
"Scale": scale,
|
||||
"Position": pos,
|
||||
"Rotation": rot,
|
||||
|
||||
@@ -1580,8 +1580,16 @@ def bitfield_field(bits: int, *, adapter: Optional[Adapter] = None, default=0, i
|
||||
|
||||
|
||||
class BitfieldDataclass(DataclassAdapter):
|
||||
def __init__(self, data_cls: Type,
|
||||
prim_spec: Optional[SerializablePrimitive] = None, shift: bool = True):
|
||||
PRIM_SPEC: ClassVar[Optional[SerializablePrimitive]] = None
|
||||
|
||||
def __init__(self, data_cls: Optional[Type] = None,
|
||||
prim_spec: Optional[SerializablePrimitive] = None, shift: Optional[bool] = None):
|
||||
if not dataclasses.is_dataclass(data_cls):
|
||||
raise ValueError(f"{data_cls!r} is not a dataclass")
|
||||
if prim_spec is None:
|
||||
prim_spec = getattr(data_cls, 'PRIM_SPEC', None)
|
||||
if shift is None:
|
||||
shift = getattr(data_cls, 'SHIFT', True)
|
||||
super().__init__(data_cls, prim_spec)
|
||||
self._shift = shift
|
||||
self._bitfield_spec = self._build_bitfield(data_cls)
|
||||
@@ -1720,7 +1728,6 @@ class QuantizedNumPyArray(Adapter):
|
||||
|
||||
def subfield_serializer(msg_name, block_name, var_name):
|
||||
def f(orig_cls):
|
||||
global SUBFIELD_SERIALIZERS
|
||||
SUBFIELD_SERIALIZERS[(msg_name, block_name, var_name)] = orig_cls
|
||||
return orig_cls
|
||||
return f
|
||||
@@ -1932,7 +1939,6 @@ class IntFlagSubfieldSerializer(AdapterInstanceSubfieldSerializer):
|
||||
|
||||
def http_serializer(msg_name):
|
||||
def f(orig_cls):
|
||||
global HTTP_SERIALIZERS
|
||||
HTTP_SERIALIZERS[msg_name] = orig_cls
|
||||
return orig_cls
|
||||
return f
|
||||
|
||||
@@ -55,6 +55,7 @@ class SettingDescriptor(Generic[_T]):
|
||||
|
||||
class Settings:
|
||||
ENABLE_DEFERRED_PACKET_PARSING: bool = SettingDescriptor(True)
|
||||
ALLOW_UNKNOWN_MESSAGES: bool = SettingDescriptor(True)
|
||||
|
||||
def __init__(self):
|
||||
self._settings: Dict[str, Any] = {}
|
||||
|
||||
@@ -12,10 +12,44 @@ import math
|
||||
import zlib
|
||||
from typing import *
|
||||
|
||||
import numpy as np
|
||||
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3, Quaternion
|
||||
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3
|
||||
from hippolyzer.lib.base.helpers import BiDiDict
|
||||
from hippolyzer.lib.base.namevalue import NameValuesSerializer
|
||||
from hippolyzer.lib.base.serialization import ParseContext
|
||||
|
||||
|
||||
class LookupIntEnum(IntEnum):
|
||||
"""
|
||||
Used for enums that have legacy string names, may be used in the legacy schema
|
||||
|
||||
Generally this is the string returned by `LLWhateverType::lookup()` in indra
|
||||
"""
|
||||
@abc.abstractmethod
|
||||
def to_lookup_name(self) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def from_lookup_name(cls, legacy_name: str):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
_ASSET_TYPE_BIDI: BiDiDict[str] = BiDiDict({
|
||||
"animation": "animatn",
|
||||
"callingcard": "callcard",
|
||||
"lsl_text": "lsltext",
|
||||
"lsl_bytecode": "lslbyte",
|
||||
"texture_tga": "txtr_tga",
|
||||
"image_tga": "img_tga",
|
||||
"image_jpeg": "jpg",
|
||||
"sound_wav": "snd_wav",
|
||||
"folder_link": "link_f",
|
||||
"unknown": "invalid",
|
||||
"none": "-1",
|
||||
})
|
||||
|
||||
|
||||
@se.enum_field_serializer("RequestXfer", "XferID", "VFileType")
|
||||
@@ -23,10 +57,12 @@ from hippolyzer.lib.base.namevalue import NameValuesSerializer
|
||||
@se.enum_field_serializer("AssetUploadComplete", "AssetBlock", "Type")
|
||||
@se.enum_field_serializer("UpdateCreateInventoryItem", "InventoryData", "Type")
|
||||
@se.enum_field_serializer("CreateInventoryItem", "InventoryBlock", "Type")
|
||||
@se.enum_field_serializer("LinkInventoryItem", "InventoryBlock", "Type")
|
||||
@se.enum_field_serializer("RezObject", "InventoryData", "Type")
|
||||
@se.enum_field_serializer("RezScript", "InventoryBlock", "Type")
|
||||
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "Type")
|
||||
class AssetType(IntEnum):
|
||||
@se.enum_field_serializer("BulkUpdateInventory", "ItemData", "Type")
|
||||
class AssetType(LookupIntEnum):
|
||||
TEXTURE = 0
|
||||
SOUND = 1
|
||||
CALLINGCARD = 2
|
||||
@@ -47,7 +83,7 @@ class AssetType(IntEnum):
|
||||
GESTURE = 21
|
||||
SIMSTATE = 22
|
||||
LINK = 24
|
||||
LINK_FOLDER = 25
|
||||
FOLDER_LINK = 25
|
||||
MARKETPLACE_FOLDER = 26
|
||||
WIDGET = 40
|
||||
PERSON = 45
|
||||
@@ -59,19 +95,18 @@ class AssetType(IntEnum):
|
||||
RESERVED_5 = 54
|
||||
RESERVED_6 = 55
|
||||
SETTINGS = 56
|
||||
MATERIAL = 57
|
||||
UNKNOWN = 255
|
||||
NONE = -1
|
||||
|
||||
@property
|
||||
def human_name(self):
|
||||
def to_lookup_name(self) -> str:
|
||||
lower = self.name.lower()
|
||||
return {
|
||||
"animation": "animatn",
|
||||
"callingcard": "callcard",
|
||||
"texture_tga": "txtr_tga",
|
||||
"image_tga": "img_tga",
|
||||
"sound_wav": "snd_wav",
|
||||
}.get(lower, lower)
|
||||
return _ASSET_TYPE_BIDI.forward.get(lower, lower)
|
||||
|
||||
@classmethod
|
||||
def from_lookup_name(cls, legacy_name: str):
|
||||
reg_name = _ASSET_TYPE_BIDI.backward.get(legacy_name, legacy_name).upper()
|
||||
return cls[reg_name]
|
||||
|
||||
@property
|
||||
def inventory_type(self):
|
||||
@@ -96,15 +131,26 @@ class AssetType(IntEnum):
|
||||
AssetType.PERSON: InventoryType.PERSON,
|
||||
AssetType.MESH: InventoryType.MESH,
|
||||
AssetType.SETTINGS: InventoryType.SETTINGS,
|
||||
AssetType.MATERIAL: InventoryType.MATERIAL,
|
||||
}.get(self, AssetType.NONE)
|
||||
|
||||
|
||||
_INV_TYPE_BIDI: BiDiDict[str] = BiDiDict({
|
||||
"callingcard": "callcard",
|
||||
"attachment": "attach",
|
||||
"none": "-1",
|
||||
})
|
||||
|
||||
|
||||
@se.enum_field_serializer("UpdateCreateInventoryItem", "InventoryData", "InvType")
|
||||
@se.enum_field_serializer("CreateInventoryItem", "InventoryBlock", "InvType")
|
||||
@se.enum_field_serializer("LinkInventoryItem", "InventoryBlock", "InvType")
|
||||
@se.enum_field_serializer("RezObject", "InventoryData", "InvType")
|
||||
@se.enum_field_serializer("RezScript", "InventoryBlock", "InvType")
|
||||
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "InvType")
|
||||
class InventoryType(IntEnum):
|
||||
@se.enum_field_serializer("BulkUpdateInventory", "ItemData", "InvType")
|
||||
@se.enum_field_serializer("BulkUpdateInventory", "FolderData", "Type")
|
||||
class InventoryType(LookupIntEnum):
|
||||
TEXTURE = 0
|
||||
SOUND = 1
|
||||
CALLINGCARD = 2
|
||||
@@ -130,19 +176,41 @@ class InventoryType(IntEnum):
|
||||
WIDGET = 23
|
||||
PERSON = 24
|
||||
SETTINGS = 25
|
||||
MATERIAL = 26
|
||||
UNKNOWN = 255
|
||||
NONE = -1
|
||||
|
||||
@property
|
||||
def human_name(self):
|
||||
def to_lookup_name(self) -> str:
|
||||
lower = self.name.lower()
|
||||
return {
|
||||
"callingcard": "callcard",
|
||||
"none": "-1",
|
||||
}.get(lower, lower)
|
||||
return _INV_TYPE_BIDI.forward.get(lower, lower)
|
||||
|
||||
@classmethod
|
||||
def from_lookup_name(cls, legacy_name: str):
|
||||
reg_name = _INV_TYPE_BIDI.backward.get(legacy_name, legacy_name).upper()
|
||||
return cls[reg_name]
|
||||
|
||||
|
||||
class FolderType(IntEnum):
|
||||
_FOLDER_TYPE_BIDI: BiDiDict[str] = BiDiDict({
|
||||
"callingcard": "callcard",
|
||||
"lsl_text": "lsltext",
|
||||
"animation": "animatn",
|
||||
"snapshot_category": "snapshot",
|
||||
"lost_and_found": "lstndfnd",
|
||||
"ensemble_start": "ensemble",
|
||||
"ensemble_end": "ensemble",
|
||||
"current_outfit": "current",
|
||||
"my_outfits": "my_otfts",
|
||||
"basic_root": "basic_rt",
|
||||
"marketplace_listings": "merchant",
|
||||
"marketplace_stock": "stock",
|
||||
"marketplace_version": "version",
|
||||
"my_suitcase": "suitcase",
|
||||
"root_inventory": "root_inv",
|
||||
"none": "-1",
|
||||
})
|
||||
|
||||
|
||||
class FolderType(LookupIntEnum):
|
||||
TEXTURE = 0
|
||||
SOUND = 1
|
||||
CALLINGCARD = 2
|
||||
@@ -161,6 +229,7 @@ class FolderType(IntEnum):
|
||||
ANIMATION = 20
|
||||
GESTURE = 21
|
||||
FAVORITE = 23
|
||||
# The "ensemble" values aren't used, no idea what they were for.
|
||||
ENSEMBLE_START = 26
|
||||
ENSEMBLE_END = 45
|
||||
# This range is reserved for special clothing folder types.
|
||||
@@ -177,14 +246,24 @@ class FolderType(IntEnum):
|
||||
# Note: We actually *never* create folders with that type. This is used for icon override only.
|
||||
MARKETPLACE_VERSION = 55
|
||||
SETTINGS = 56
|
||||
# Firestorm folders, may not actually exist
|
||||
FIRESTORM = 57
|
||||
PHOENIX = 58
|
||||
RLV = 59
|
||||
MATERIAL = 57
|
||||
# Firestorm folders, may not actually exist in legacy schema
|
||||
FIRESTORM = 58
|
||||
PHOENIX = 59
|
||||
RLV = 60
|
||||
# Opensim folders
|
||||
MY_SUITCASE = 100
|
||||
NONE = -1
|
||||
|
||||
def to_lookup_name(self) -> str:
|
||||
lower = self.name.lower()
|
||||
return _FOLDER_TYPE_BIDI.forward.get(lower, lower)
|
||||
|
||||
@classmethod
|
||||
def from_lookup_name(cls, legacy_name: str):
|
||||
reg_name = _FOLDER_TYPE_BIDI.backward.get(legacy_name, legacy_name).upper()
|
||||
return cls[reg_name]
|
||||
|
||||
|
||||
@se.enum_field_serializer("AgentIsNowWearing", "WearableData", "WearableType")
|
||||
@se.enum_field_serializer("AgentWearablesUpdate", "WearableData", "WearableType")
|
||||
@@ -208,6 +287,12 @@ class WearableType(IntEnum):
|
||||
PHYSICS = 15
|
||||
UNIVERSAL = 16
|
||||
|
||||
@property
|
||||
def asset_type(self) -> AssetType:
|
||||
if self in (WearableType.HAIR, WearableType.SKIN, WearableType.EYES, WearableType.SHAPE):
|
||||
return AssetType.BODYPART
|
||||
return AssetType.CLOTHING
|
||||
|
||||
|
||||
def _register_permissions_flags(message_name, block_name):
|
||||
def _wrapper(flag_cls):
|
||||
@@ -227,6 +312,7 @@ def _register_permissions_flags(message_name, block_name):
|
||||
@_register_permissions_flags("RezObject", "InventoryData")
|
||||
@_register_permissions_flags("RezScript", "InventoryBlock")
|
||||
@_register_permissions_flags("RezMultipleAttachmentsFromInv", "ObjectData")
|
||||
@_register_permissions_flags("BulkUpdateInventory", "ItemData")
|
||||
class Permissions(IntFlag):
|
||||
TRANSFER = (1 << 13)
|
||||
MODIFY = (1 << 14)
|
||||
@@ -244,7 +330,11 @@ class Permissions(IntFlag):
|
||||
RESERVED = 1 << 31
|
||||
|
||||
|
||||
_SALE_TYPE_LEGACY_NAMES = ("not", "orig", "copy", "cntn")
|
||||
|
||||
|
||||
@se.enum_field_serializer("ObjectSaleInfo", "ObjectData", "SaleType")
|
||||
@se.enum_field_serializer("BulkUpdateInventory", "ItemData", "SaleType")
|
||||
@se.enum_field_serializer("ObjectProperties", "ObjectData", "SaleType")
|
||||
@se.enum_field_serializer("ObjectPropertiesFamily", "ObjectData", "SaleType")
|
||||
@se.enum_field_serializer("ObjectBuy", "ObjectData", "SaleType")
|
||||
@@ -252,12 +342,45 @@ class Permissions(IntFlag):
|
||||
@se.enum_field_serializer("RezObject", "InventoryData", "SaleType")
|
||||
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "SaleType")
|
||||
@se.enum_field_serializer("UpdateCreateInventoryItem", "InventoryData", "SaleType")
|
||||
class SaleInfo(IntEnum):
|
||||
class SaleType(LookupIntEnum):
|
||||
NOT = 0
|
||||
ORIGINAL = 1
|
||||
COPY = 2
|
||||
CONTENTS = 3
|
||||
|
||||
@classmethod
|
||||
def from_lookup_name(cls, legacy_name: str):
|
||||
return cls(_SALE_TYPE_LEGACY_NAMES.index(legacy_name))
|
||||
|
||||
def to_lookup_name(self) -> str:
|
||||
return _SALE_TYPE_LEGACY_NAMES[int(self.value)]
|
||||
|
||||
|
||||
class AggregatePermissionType(IntEnum):
|
||||
EMPTY = 0
|
||||
NONE = 1
|
||||
SOME = 2
|
||||
ALL = 3
|
||||
|
||||
|
||||
def _make_agg_perms_field():
|
||||
return se.bitfield_field(bits=2, adapter=se.IntEnum(AggregatePermissionType))
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class AggregatePerms(se.BitfieldDataclass):
|
||||
Copy: AggregatePermissionType = _make_agg_perms_field()
|
||||
Modify: AggregatePermissionType = _make_agg_perms_field()
|
||||
Transfer: AggregatePermissionType = _make_agg_perms_field()
|
||||
|
||||
|
||||
@se.subfield_serializer("ObjectProperties", "ObjectData", "AggregatePerms")
|
||||
@se.subfield_serializer("ObjectProperties", "ObjectData", "AggregatePermTextures")
|
||||
@se.subfield_serializer("ObjectProperties", "ObjectData", "AggregatePermTexturesOwner")
|
||||
class AggregatePermsSerializer(se.AdapterSubfieldSerializer):
|
||||
ORIG_INLINE = True
|
||||
ADAPTER = se.BitfieldDataclass(AggregatePerms)
|
||||
|
||||
|
||||
@se.flag_field_serializer("ParcelInfoReply", "Data", "Flags")
|
||||
class ParcelInfoFlags(IntFlag):
|
||||
@@ -276,10 +399,12 @@ class ParcelInfoFlags(IntFlag):
|
||||
class MapImageFlags(IntFlag):
|
||||
# No clue, honestly. I guess there's potentially different image types you could request.
|
||||
LAYER = 1 << 1
|
||||
RETURN_NONEXISTENT = 0x10000
|
||||
|
||||
|
||||
@se.enum_field_serializer("MapBlockReply", "Data", "Access")
|
||||
@se.enum_field_serializer("RegionInfo", "RegionInfo", "SimAccess")
|
||||
@se.enum_field_serializer("RegionHandshake", "RegionInfo", "SimAccess")
|
||||
class SimAccess(IntEnum):
|
||||
# Treated as 'unknown', usually ends up being SIM_ACCESS_PG
|
||||
MIN = 0
|
||||
@@ -857,6 +982,7 @@ class MCode(IntEnum):
|
||||
# What's in the high nybble, anything?
|
||||
STONE = 0
|
||||
METAL = 1
|
||||
GLASS = 2
|
||||
WOOD = 3
|
||||
FLESH = 4
|
||||
PLASTIC = 5
|
||||
@@ -1565,6 +1691,24 @@ class SoundFlags(IntFlag):
|
||||
STOP = 1 << 5
|
||||
|
||||
|
||||
@se.enum_field_serializer("ObjectClickAction", "ObjectData", "ClickAction")
|
||||
@se.enum_field_serializer("ObjectUpdate", "ObjectData", "ClickAction")
|
||||
class ClickAction(IntEnum):
|
||||
# "NONE" is also used as an alias for "TOUCH"
|
||||
TOUCH = 0
|
||||
SIT = 1
|
||||
BUY = 2
|
||||
PAY = 3
|
||||
OPEN = 4
|
||||
PLAY = 5
|
||||
OPEN_MEDIA = 6
|
||||
ZOOM = 7
|
||||
DISABLED = 8
|
||||
IGNORE = 9
|
||||
# I've seen this in practice, not clear what it is.
|
||||
UNKNOWN = 255
|
||||
|
||||
|
||||
class CompressedFlags(IntFlag):
|
||||
SCRATCHPAD = 1
|
||||
TREE = 1 << 1
|
||||
@@ -1599,7 +1743,7 @@ class ObjectUpdateCompressedDataSerializer(se.SimpleSubfieldSerializer):
|
||||
"State": ObjectStateAdapter(se.U8),
|
||||
"CRC": se.U32,
|
||||
"Material": se.IntEnum(MCode, se.U8),
|
||||
"ClickAction": se.U8,
|
||||
"ClickAction": se.IntEnum(ClickAction, se.U8),
|
||||
"Scale": se.Vector3,
|
||||
"Position": se.Vector3,
|
||||
"Rotation": se.PackedQuat(se.Vector3),
|
||||
@@ -1737,9 +1881,20 @@ class ChatSourceType(IntEnum):
|
||||
UNKNOWN = 3
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ThrottleData:
|
||||
resend: float = se.dataclass_field(se.F32)
|
||||
land: float = se.dataclass_field(se.F32)
|
||||
wind: float = se.dataclass_field(se.F32)
|
||||
cloud: float = se.dataclass_field(se.F32)
|
||||
task: float = se.dataclass_field(se.F32)
|
||||
texture: float = se.dataclass_field(se.F32)
|
||||
asset: float = se.dataclass_field(se.F32)
|
||||
|
||||
|
||||
@se.subfield_serializer("AgentThrottle", "Throttle", "Throttles")
|
||||
class AgentThrottlesSerializer(se.SimpleSubfieldSerializer):
|
||||
TEMPLATE = se.Collection(None, se.F32)
|
||||
TEMPLATE = se.Dataclass(ThrottleData)
|
||||
|
||||
|
||||
@se.subfield_serializer("ObjectUpdate", "ObjectData", "NameValue")
|
||||
@@ -1829,6 +1984,7 @@ class RegionFlags(IntFlag):
|
||||
ALLOW_VOICE = 1 << 28
|
||||
BLOCK_PARCEL_SEARCH = 1 << 29
|
||||
DENY_AGEUNVERIFIED = 1 << 30
|
||||
DENY_BOTS = 1 << 31
|
||||
|
||||
|
||||
@se.flag_field_serializer("RegionHandshakeReply", "RegionInfo", "Flags")
|
||||
@@ -1875,8 +2031,8 @@ class AvatarPropertiesFlags(IntFlag):
|
||||
|
||||
|
||||
@se.flag_field_serializer("AvatarGroupsReply", "GroupData", "GroupPowers")
|
||||
@se.flag_field_serializer("AvatarGroupDataUpdate", "GroupData", "GroupPowers")
|
||||
@se.flag_field_serializer("AvatarDataUpdate", "AgentDataData", "GroupPowers")
|
||||
@se.flag_field_serializer("AgentGroupDataUpdate", "GroupData", "GroupPowers")
|
||||
@se.flag_field_serializer("AgentDataUpdate", "AgentData", "GroupPowers")
|
||||
@se.flag_field_serializer("GroupProfileReply", "GroupData", "PowersMask")
|
||||
@se.flag_field_serializer("GroupRoleDataReply", "RoleData", "Powers")
|
||||
class GroupPowerFlags(IntFlag):
|
||||
@@ -2057,6 +2213,43 @@ class ScriptPermissions(IntFlag):
|
||||
CHANGE_ENVIRONMENT = 1 << 18
|
||||
|
||||
|
||||
@se.flag_field_serializer("ParcelProperties", "ParcelData", "ParcelFlags")
|
||||
class ParcelFlags(IntFlag):
|
||||
ALLOW_FLY = 1 << 0 # Can start flying
|
||||
ALLOW_OTHER_SCRIPTS = 1 << 1 # Scripts by others can run.
|
||||
FOR_SALE = 1 << 2 # Can buy this land
|
||||
FOR_SALE_OBJECTS = 1 << 7 # Can buy all objects on this land
|
||||
ALLOW_LANDMARK = 1 << 3 # Always true/deprecated
|
||||
ALLOW_TERRAFORM = 1 << 4
|
||||
ALLOW_DAMAGE = 1 << 5
|
||||
CREATE_OBJECTS = 1 << 6
|
||||
# 7 is moved above
|
||||
USE_ACCESS_GROUP = 1 << 8
|
||||
USE_ACCESS_LIST = 1 << 9
|
||||
USE_BAN_LIST = 1 << 10
|
||||
USE_PASS_LIST = 1 << 11
|
||||
SHOW_DIRECTORY = 1 << 12
|
||||
ALLOW_DEED_TO_GROUP = 1 << 13
|
||||
CONTRIBUTE_WITH_DEED = 1 << 14
|
||||
SOUND_LOCAL = 1 << 15 # Hear sounds in this parcel only
|
||||
SELL_PARCEL_OBJECTS = 1 << 16 # Objects on land are included as part of the land when the land is sold
|
||||
ALLOW_PUBLISH = 1 << 17 # Allow publishing of parcel information on the web
|
||||
MATURE_PUBLISH = 1 << 18 # The information on this parcel is mature
|
||||
URL_WEB_PAGE = 1 << 19 # The "media URL" is an HTML page
|
||||
URL_RAW_HTML = 1 << 20 # The "media URL" is a raw HTML string like <H1>Foo</H1>
|
||||
RESTRICT_PUSHOBJECT = 1 << 21 # Restrict push object to either on agent or on scripts owned by parcel owner
|
||||
DENY_ANONYMOUS = 1 << 22 # Deny all non identified/transacted accounts
|
||||
# DENY_IDENTIFIED = 1 << 23 # Deny identified accounts
|
||||
# DENY_TRANSACTED = 1 << 24 # Deny identified accounts
|
||||
ALLOW_GROUP_SCRIPTS = 1 << 25 # Allow scripts owned by group
|
||||
CREATE_GROUP_OBJECTS = 1 << 26 # Allow object creation by group members or objects
|
||||
ALLOW_ALL_OBJECT_ENTRY = 1 << 27 # Allow all objects to enter a parcel
|
||||
ALLOW_GROUP_OBJECT_ENTRY = 1 << 28 # Only allow group (and owner) objects to enter the parcel
|
||||
ALLOW_VOICE_CHAT = 1 << 29 # Allow residents to use voice chat on this parcel
|
||||
USE_ESTATE_VOICE_CHAN = 1 << 30
|
||||
DENY_AGEUNVERIFIED = 1 << 31 # Prevent residents who aren't age-verified
|
||||
|
||||
|
||||
@se.enum_field_serializer("UpdateMuteListEntry", "MuteData", "MuteType")
|
||||
class MuteType(IntEnum):
|
||||
BY_NAME = 0
|
||||
@@ -2087,20 +2280,133 @@ class MuteFlags(IntFlag):
|
||||
return 0xF
|
||||
|
||||
|
||||
class CreationDateAdapter(se.Adapter):
|
||||
class DateAdapter(se.Adapter):
|
||||
def __init__(self, multiplier: int = 1):
|
||||
super(DateAdapter, self).__init__(None)
|
||||
self._multiplier = multiplier
|
||||
|
||||
def decode(self, val: Any, ctx: Optional[se.ParseContext], pod: bool = False) -> Any:
|
||||
return datetime.datetime.fromtimestamp(val / 1_000_000).isoformat()
|
||||
return datetime.datetime.fromtimestamp(val / self._multiplier).isoformat()
|
||||
|
||||
def encode(self, val: Any, ctx: Optional[se.ParseContext]) -> Any:
|
||||
return int(datetime.datetime.fromisoformat(val).timestamp() * 1_000_000)
|
||||
return int(datetime.datetime.fromisoformat(val).timestamp() * self._multiplier)
|
||||
|
||||
|
||||
@se.enum_field_serializer("MeanCollisionAlert", "MeanCollision", "Type")
|
||||
class MeanCollisionType(IntEnum):
|
||||
INVALID = 0
|
||||
BUMP = enum.auto()
|
||||
LLPUSHOBJECT = enum.auto()
|
||||
SELECTED_OBJECT_COLLIDE = enum.auto()
|
||||
SCRIPTED_OBJECT_COLLIDE = enum.auto()
|
||||
PHYSICAL_OBJECT_COLLIDE = enum.auto()
|
||||
|
||||
|
||||
@se.subfield_serializer("ObjectProperties", "ObjectData", "CreationDate")
|
||||
class CreationDateSerializer(se.AdapterSubfieldSerializer):
|
||||
ADAPTER = CreationDateAdapter(None)
|
||||
class ObjectCreationDateSerializer(se.AdapterSubfieldSerializer):
|
||||
ADAPTER = DateAdapter(1_000_000)
|
||||
ORIG_INLINE = True
|
||||
|
||||
|
||||
@se.subfield_serializer("MeanCollisionAlert", "MeanCollision", "Time")
|
||||
@se.subfield_serializer("ParcelProperties", "ParcelData", "ClaimDate")
|
||||
@se.subfield_serializer("BulkUpdateInventory", "ItemData", "CreationDate")
|
||||
class DateSerializer(se.AdapterSubfieldSerializer):
|
||||
ADAPTER = DateAdapter()
|
||||
ORIG_INLINE = True
|
||||
|
||||
|
||||
class ParcelGridType(IntEnum):
|
||||
PUBLIC = 0x00
|
||||
OWNED = 0x01 # Presumably non-linden owned land
|
||||
GROUP = 0x02
|
||||
SELF = 0x03
|
||||
FOR_SALE = 0x04
|
||||
AUCTION = 0x05
|
||||
|
||||
|
||||
class ParcelGridFlags(IntFlag):
|
||||
UNUSED = 0x8
|
||||
HIDDEN_AVS = 0x10
|
||||
SOUND_LOCAL = 0x20
|
||||
WEST_LINE = 0x40
|
||||
SOUTH_LINE = 0x80
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ParcelGridInfo(se.BitfieldDataclass):
|
||||
PRIM_SPEC: ClassVar[se.SerializablePrimitive] = se.U8
|
||||
SHIFT: ClassVar[bool] = False
|
||||
|
||||
Type: Union[ParcelGridType, int] = se.bitfield_field(bits=3, adapter=se.IntEnum(ParcelGridType))
|
||||
Flags: ParcelGridFlags = se.bitfield_field(bits=5, adapter=se.IntFlag(ParcelGridFlags))
|
||||
|
||||
|
||||
@se.subfield_serializer("ParcelOverlay", "ParcelData", "Data")
|
||||
class ParcelOverlaySerializer(se.SimpleSubfieldSerializer):
|
||||
TEMPLATE = se.Collection(None, se.BitfieldDataclass(ParcelGridInfo))
|
||||
|
||||
|
||||
class BitmapAdapter(se.Adapter):
|
||||
def __init__(self, shape: Tuple[int, int]):
|
||||
super().__init__(None)
|
||||
self._shape = shape
|
||||
|
||||
def encode(self, val: Any, ctx: Optional[ParseContext]) -> Any:
|
||||
if val and isinstance(val[0], bytes):
|
||||
return b''.join(val)
|
||||
return np.packbits(np.array(val, dtype=np.uint8).flatten(), bitorder="little").tobytes()
|
||||
|
||||
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
|
||||
if pod:
|
||||
return [val[i:i + (self._shape[1] // 8)] for i in range(0, len(val), (self._shape[1] // 8))]
|
||||
parcel_bitmap = np.frombuffer(val, dtype=np.uint8)
|
||||
# This is a boolean array where each bit says whether the parcel occupies that grid.
|
||||
return np.unpackbits(parcel_bitmap, bitorder="little").reshape(self._shape)
|
||||
|
||||
|
||||
@se.subfield_serializer("ParcelProperties", "ParcelData", "Bitmap")
|
||||
class ParcelPropertiesBitmapSerializer(se.AdapterSubfieldSerializer):
|
||||
"""Bitmap that describes which grids a parcel occupies"""
|
||||
REGION_METERS = 256
|
||||
METERS_PER_CELL = 4
|
||||
ADAPTER = BitmapAdapter((REGION_METERS // METERS_PER_CELL, REGION_METERS // METERS_PER_CELL))
|
||||
|
||||
|
||||
@se.enum_field_serializer("ParcelProperties", "ParcelData", "LandingType")
|
||||
class LandingType(IntEnum):
|
||||
NONE = 1
|
||||
LANDING_POINT = 1
|
||||
DIRECT = 2
|
||||
|
||||
|
||||
@se.enum_field_serializer("ParcelProperties", "ParcelData", "Status")
|
||||
class LandOwnershipStatus(IntEnum):
|
||||
LEASED = 0
|
||||
LEASE_PENDING = 1
|
||||
ABANDONED = 2
|
||||
NONE = -1
|
||||
|
||||
|
||||
@se.enum_field_serializer("ParcelProperties", "ParcelData", "Category")
|
||||
class LandCategory(IntEnum):
|
||||
NONE = 0
|
||||
LINDEN = enum.auto()
|
||||
ADULT = enum.auto()
|
||||
ARTS = enum.auto()
|
||||
BUSINESS = enum.auto()
|
||||
EDUCATIONAL = enum.auto()
|
||||
GAMING = enum.auto()
|
||||
HANGOUT = enum.auto()
|
||||
NEWCOMER = enum.auto()
|
||||
PARK = enum.auto()
|
||||
RESIDENTIAL = enum.auto()
|
||||
SHOPPING = enum.auto()
|
||||
STAGE = enum.auto()
|
||||
OTHER = enum.auto()
|
||||
ANY = -1
|
||||
|
||||
|
||||
@se.http_serializer("RenderMaterials")
|
||||
class RenderMaterialsSerializer(se.BaseHTTPSerializer):
|
||||
@classmethod
|
||||
@@ -2131,69 +2437,3 @@ class RetrieveNavMeshSrcSerializer(se.BaseHTTPSerializer):
|
||||
# 15 bit window size, gzip wrapped
|
||||
deser["navmesh_data"] = zlib.decompress(deser["navmesh_data"], wbits=15 | 32)
|
||||
return deser
|
||||
|
||||
|
||||
# Beta puppetry stuff, subject to change!
|
||||
|
||||
|
||||
class PuppetryEventMask(enum.IntFlag):
|
||||
POSITION = 1 << 0
|
||||
POSITION_IN_PARENT_FRAME = 1 << 1
|
||||
ROTATION = 1 << 2
|
||||
ROTATION_IN_PARENT_FRAME = 1 << 3
|
||||
SCALE = 1 << 4
|
||||
DISABLE_CONSTRAINT = 1 << 7
|
||||
|
||||
|
||||
class PuppetryOption(se.OptionalFlagged):
|
||||
def __init__(self, flag_val, spec):
|
||||
super().__init__("mask", se.IntFlag(PuppetryEventMask, se.U8), flag_val, spec)
|
||||
|
||||
|
||||
# Range to use for puppetry's quantized floats when converting to<->from U16
|
||||
LL_PELVIS_OFFSET_RANGE = (-5.0, 5.0)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PuppetryJointData:
|
||||
# Where does this number come from? `avatar_skeleton.xml`?
|
||||
joint_id: int = se.dataclass_field(se.S16)
|
||||
# Determines which fields will follow
|
||||
mask: PuppetryEventMask = se.dataclass_field(se.IntFlag(PuppetryEventMask, se.U8))
|
||||
rotation: Optional[Quaternion] = se.dataclass_field(
|
||||
# These are very odd scales for a quantized quaternion, but that's what they are.
|
||||
PuppetryOption(PuppetryEventMask.ROTATION, se.PackedQuat(se.Vector3U16(*LL_PELVIS_OFFSET_RANGE))),
|
||||
)
|
||||
position: Optional[Vector3] = se.dataclass_field(
|
||||
PuppetryOption(PuppetryEventMask.POSITION, se.Vector3U16(*LL_PELVIS_OFFSET_RANGE)),
|
||||
)
|
||||
scale: Optional[Vector3] = se.dataclass_field(
|
||||
PuppetryOption(PuppetryEventMask.SCALE, se.Vector3U16(*LL_PELVIS_OFFSET_RANGE)),
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PuppetryEventData:
|
||||
time: int = se.dataclass_field(se.S32)
|
||||
# Must be set manually due to below issue
|
||||
num_joints: int = se.dataclass_field(se.U16)
|
||||
# This field is packed in the least helpful way possible. The length field
|
||||
# is in between the collection count and the collection data, but the length
|
||||
# field essentially only tells you how many bytes until the end of the buffer
|
||||
# proper, which you already know from msgsystem. Why is this here?
|
||||
joints: List[PuppetryJointData] = se.dataclass_field(se.TypedByteArray(
|
||||
se.U32,
|
||||
# Just treat contents as a greedy collection, tries to keep reading until EOF
|
||||
se.Collection(None, se.Dataclass(PuppetryJointData)),
|
||||
))
|
||||
|
||||
|
||||
@se.subfield_serializer("AgentAnimation", "PhysicalAvatarEventList", "TypeData")
|
||||
@se.subfield_serializer("AvatarAnimation", "PhysicalAvatarEventList", "TypeData")
|
||||
class PuppetryEventDataSerializer(se.SimpleSubfieldSerializer):
|
||||
# You can have multiple joint events packed in right after the other, implicitly.
|
||||
# They may _or may not_ be split into separate PhysicalAvatarEventList blocks?
|
||||
# This doesn't seem to be handled specifically in the decoder, is this a
|
||||
# serialization bug in the viewer?
|
||||
TEMPLATE = se.Collection(None, se.Dataclass(PuppetryEventData))
|
||||
EMPTY_IS_NONE = True
|
||||
|
||||
@@ -39,3 +39,7 @@ class MockConnectionHolder(ConnectionHolder):
|
||||
def __init__(self, circuit, message_handler):
|
||||
self.circuit = circuit
|
||||
self.message_handler = message_handler
|
||||
|
||||
|
||||
async def soon(awaitable) -> Message:
|
||||
return await asyncio.wait_for(awaitable, timeout=1.0)
|
||||
|
||||
@@ -8,6 +8,7 @@ import dataclasses
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import create_logged_task
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.circuit import ConnectionHolder
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketFlags
|
||||
@@ -108,7 +109,7 @@ class TransferManager:
|
||||
flags=PacketFlags.RELIABLE,
|
||||
))
|
||||
transfer = Transfer(transfer_id)
|
||||
asyncio.create_task(self._pump_transfer_replies(transfer))
|
||||
create_logged_task(self._pump_transfer_replies(transfer), "Transfer Pump")
|
||||
return transfer
|
||||
|
||||
async def _pump_transfer_replies(self, transfer: Transfer):
|
||||
|
||||
@@ -5,6 +5,7 @@ Body parts and linden clothing layers
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import enum
|
||||
import logging
|
||||
from io import StringIO
|
||||
from typing import *
|
||||
@@ -15,12 +16,75 @@ from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
from hippolyzer.lib.base.inventory import InventorySaleInfo, InventoryPermissions
|
||||
from hippolyzer.lib.base.legacy_schema import SchemaBase, parse_schema_line, SchemaParsingError
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.templates import WearableType
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
_T = TypeVar("_T")
|
||||
|
||||
WEARABLE_VERSION = "LLWearable version 22"
|
||||
DEFAULT_WEARABLE_TEX = UUID("c228d1cf-4b5d-4ba8-84f4-899a0796aa97")
|
||||
|
||||
|
||||
class AvatarTEIndex(enum.IntEnum):
|
||||
"""From llavatarappearancedefines.h"""
|
||||
HEAD_BODYPAINT = 0
|
||||
UPPER_SHIRT = enum.auto()
|
||||
LOWER_PANTS = enum.auto()
|
||||
EYES_IRIS = enum.auto()
|
||||
HAIR = enum.auto()
|
||||
UPPER_BODYPAINT = enum.auto()
|
||||
LOWER_BODYPAINT = enum.auto()
|
||||
LOWER_SHOES = enum.auto()
|
||||
HEAD_BAKED = enum.auto()
|
||||
UPPER_BAKED = enum.auto()
|
||||
LOWER_BAKED = enum.auto()
|
||||
EYES_BAKED = enum.auto()
|
||||
LOWER_SOCKS = enum.auto()
|
||||
UPPER_JACKET = enum.auto()
|
||||
LOWER_JACKET = enum.auto()
|
||||
UPPER_GLOVES = enum.auto()
|
||||
UPPER_UNDERSHIRT = enum.auto()
|
||||
LOWER_UNDERPANTS = enum.auto()
|
||||
SKIRT = enum.auto()
|
||||
SKIRT_BAKED = enum.auto()
|
||||
HAIR_BAKED = enum.auto()
|
||||
LOWER_ALPHA = enum.auto()
|
||||
UPPER_ALPHA = enum.auto()
|
||||
HEAD_ALPHA = enum.auto()
|
||||
EYES_ALPHA = enum.auto()
|
||||
HAIR_ALPHA = enum.auto()
|
||||
HEAD_TATTOO = enum.auto()
|
||||
UPPER_TATTOO = enum.auto()
|
||||
LOWER_TATTOO = enum.auto()
|
||||
HEAD_UNIVERSAL_TATTOO = enum.auto()
|
||||
UPPER_UNIVERSAL_TATTOO = enum.auto()
|
||||
LOWER_UNIVERSAL_TATTOO = enum.auto()
|
||||
SKIRT_TATTOO = enum.auto()
|
||||
HAIR_TATTOO = enum.auto()
|
||||
EYES_TATTOO = enum.auto()
|
||||
LEFT_ARM_TATTOO = enum.auto()
|
||||
LEFT_LEG_TATTOO = enum.auto()
|
||||
AUX1_TATTOO = enum.auto()
|
||||
AUX2_TATTOO = enum.auto()
|
||||
AUX3_TATTOO = enum.auto()
|
||||
LEFTARM_BAKED = enum.auto()
|
||||
LEFTLEG_BAKED = enum.auto()
|
||||
AUX1_BAKED = enum.auto()
|
||||
AUX2_BAKED = enum.auto()
|
||||
AUX3_BAKED = enum.auto()
|
||||
|
||||
@property
|
||||
def is_baked(self) -> bool:
|
||||
return self.name.endswith("_BAKED")
|
||||
|
||||
|
||||
class VisualParamGroup(enum.IntEnum):
|
||||
TWEAKABLE = 0
|
||||
ANIMATABLE = 1
|
||||
TWEAKABLE_NO_TRANSMIT = 2
|
||||
TRANSMIT_NOT_TWEAKABLE = 3
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
@@ -29,25 +93,48 @@ class VisualParam:
|
||||
name: str
|
||||
value_min: float
|
||||
value_max: float
|
||||
value_default: float
|
||||
group: VisualParamGroup
|
||||
# These might be `None` if the param isn't meant to be directly edited
|
||||
edit_group: Optional[str]
|
||||
wearable: Optional[str]
|
||||
|
||||
def dequantize_val(self, val: int) -> float:
|
||||
"""Dequantize U8 values from AvatarAppearance messages"""
|
||||
spec = se.QuantizedFloat(se.U8, self.value_min, self.value_max, False)
|
||||
return spec.decode(val, None)
|
||||
|
||||
|
||||
class VisualParams(List[VisualParam]):
|
||||
def __init__(self, lad_path):
|
||||
super().__init__()
|
||||
with open(lad_path, "rb") as f:
|
||||
doc = parse_etree(f)
|
||||
|
||||
temp_params = []
|
||||
for param in doc.findall(".//param"):
|
||||
self.append(VisualParam(
|
||||
temp_params.append(VisualParam(
|
||||
id=int(param.attrib["id"]),
|
||||
name=param.attrib["name"],
|
||||
group=VisualParamGroup(int(param.get("group", "0"))),
|
||||
edit_group=param.get("edit_group"),
|
||||
wearable=param.get("wearable"),
|
||||
value_min=float(param.attrib["value_min"]),
|
||||
value_max=float(param.attrib["value_max"]),
|
||||
value_default=float(param.attrib.get("value_default", 0.0))
|
||||
))
|
||||
# Some functionality relies on the list being sorted by ID, though there may be holes.
|
||||
temp_params.sort(key=lambda x: x.id)
|
||||
# Remove dupes, only using the last value present (matching indra behavior)
|
||||
# This is necessary to remove the duplicate eye pop entry...
|
||||
self.extend({x.id: x for x in temp_params}.values())
|
||||
|
||||
@property
|
||||
def appearance_params(self) -> Iterator[VisualParam]:
|
||||
for param in self:
|
||||
if param.group not in (VisualParamGroup.TWEAKABLE, VisualParamGroup.TRANSMIT_NOT_TWEAKABLE):
|
||||
continue
|
||||
yield param
|
||||
|
||||
def by_name(self, name: str) -> VisualParam:
|
||||
return [x for x in self if x.name == name][0]
|
||||
@@ -61,10 +148,44 @@ class VisualParams(List[VisualParam]):
|
||||
def by_id(self, vparam_id: int) -> VisualParam:
|
||||
return [x for x in self if x.id == vparam_id][0]
|
||||
|
||||
def parse_appearance_message(self, message: Message) -> Dict[int, float]:
|
||||
params = {}
|
||||
for param, value_block in zip(self.appearance_params, message["VisualParam"]):
|
||||
params[param.id] = param.dequantize_val(value_block["ParamValue"])
|
||||
return params
|
||||
|
||||
|
||||
VISUAL_PARAMS = VisualParams(get_resource_filename("lib/base/data/avatar_lad.xml"))
|
||||
|
||||
|
||||
# See `llpaneleditwearable.cpp`, which TE slots should be set for each wearable type is hardcoded
|
||||
# in the viewer.
|
||||
WEARABLE_TEXTURE_SLOTS: Dict[WearableType, Sequence[AvatarTEIndex]] = {
|
||||
WearableType.SHAPE: (),
|
||||
WearableType.SKIN: (AvatarTEIndex.HEAD_BODYPAINT, AvatarTEIndex.UPPER_BODYPAINT, AvatarTEIndex.LOWER_BODYPAINT),
|
||||
WearableType.HAIR: (AvatarTEIndex.HAIR,),
|
||||
WearableType.EYES: (AvatarTEIndex.EYES_IRIS,),
|
||||
WearableType.SHIRT: (AvatarTEIndex.UPPER_SHIRT,),
|
||||
WearableType.PANTS: (AvatarTEIndex.LOWER_PANTS,),
|
||||
WearableType.SHOES: (AvatarTEIndex.LOWER_SHOES,),
|
||||
WearableType.SOCKS: (AvatarTEIndex.LOWER_SOCKS,),
|
||||
WearableType.JACKET: (AvatarTEIndex.UPPER_JACKET, AvatarTEIndex.LOWER_JACKET),
|
||||
WearableType.GLOVES: (AvatarTEIndex.UPPER_GLOVES,),
|
||||
WearableType.UNDERSHIRT: (AvatarTEIndex.UPPER_UNDERSHIRT,),
|
||||
WearableType.UNDERPANTS: (AvatarTEIndex.LOWER_UNDERPANTS,),
|
||||
WearableType.SKIRT: (AvatarTEIndex.SKIRT,),
|
||||
WearableType.ALPHA: (AvatarTEIndex.LOWER_ALPHA, AvatarTEIndex.UPPER_ALPHA,
|
||||
AvatarTEIndex.HEAD_ALPHA, AvatarTEIndex.EYES_ALPHA, AvatarTEIndex.HAIR_ALPHA),
|
||||
WearableType.TATTOO: (AvatarTEIndex.LOWER_TATTOO, AvatarTEIndex.UPPER_TATTOO, AvatarTEIndex.HEAD_TATTOO),
|
||||
WearableType.UNIVERSAL: (AvatarTEIndex.HEAD_UNIVERSAL_TATTOO, AvatarTEIndex.UPPER_UNIVERSAL_TATTOO,
|
||||
AvatarTEIndex.LOWER_UNIVERSAL_TATTOO, AvatarTEIndex.SKIRT_TATTOO,
|
||||
AvatarTEIndex.HAIR_TATTOO, AvatarTEIndex.EYES_TATTOO, AvatarTEIndex.LEFT_ARM_TATTOO,
|
||||
AvatarTEIndex.LEFT_LEG_TATTOO, AvatarTEIndex.AUX1_TATTOO, AvatarTEIndex.AUX2_TATTOO,
|
||||
AvatarTEIndex.AUX3_TATTOO),
|
||||
WearableType.PHYSICS: (),
|
||||
}
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Wearable(SchemaBase):
|
||||
name: str
|
||||
@@ -73,7 +194,7 @@ class Wearable(SchemaBase):
|
||||
sale_info: InventorySaleInfo
|
||||
# VisualParam ID -> val
|
||||
parameters: Dict[int, float]
|
||||
# TextureEntry ID -> texture ID
|
||||
# TextureEntry ID -> texture UUID
|
||||
textures: Dict[int, UUID]
|
||||
|
||||
@classmethod
|
||||
@@ -148,3 +269,22 @@ class Wearable(SchemaBase):
|
||||
writer.write(f"textures {len(self.textures)}\n")
|
||||
for te_id, texture_id in self.textures.items():
|
||||
writer.write(f"{te_id} {texture_id}\n")
|
||||
|
||||
@classmethod
|
||||
def make_default(cls, w_type: WearableType) -> Self:
|
||||
instance = cls(
|
||||
name="New " + w_type.name.replace("_", " ").title(),
|
||||
permissions=InventoryPermissions.make_default(),
|
||||
sale_info=InventorySaleInfo.make_default(),
|
||||
parameters={},
|
||||
textures={},
|
||||
wearable_type=w_type,
|
||||
)
|
||||
|
||||
for te_idx in WEARABLE_TEXTURE_SLOTS[w_type]:
|
||||
instance.textures[te_idx] = DEFAULT_WEARABLE_TEX
|
||||
|
||||
for param in VISUAL_PARAMS.by_wearable(w_type.name.lower()):
|
||||
instance.parameters[param.id] = param.value_default
|
||||
|
||||
return instance
|
||||
|
||||
@@ -9,6 +9,7 @@ import random
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, RawBytes
|
||||
from hippolyzer.lib.base.helpers import create_logged_task
|
||||
from hippolyzer.lib.base.message.data_packer import TemplateDataPacker
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.msgtypes import MsgType, PacketFlags
|
||||
@@ -125,7 +126,7 @@ class XferManager:
|
||||
direction=direction,
|
||||
))
|
||||
xfer = Xfer(xfer_id, direction=direction, turbo=turbo)
|
||||
asyncio.create_task(self._pump_xfer_replies(xfer))
|
||||
create_logged_task(self._pump_xfer_replies(xfer), "Xfer Pump")
|
||||
return xfer
|
||||
|
||||
async def _pump_xfer_replies(self, xfer: Xfer):
|
||||
@@ -269,12 +270,13 @@ class XferManager:
|
||||
xfer.xfer_id = request_msg["XferID"]["ID"]
|
||||
|
||||
packet_id = 0
|
||||
# TODO: No resend yet. If it's lost, it's lost.
|
||||
while xfer.chunks:
|
||||
chunk = xfer.chunks.pop(packet_id)
|
||||
# EOF if there are no chunks left
|
||||
packet_val = XferPacket(PacketID=packet_id, IsEOF=not bool(xfer.chunks))
|
||||
self._connection_holder.circuit.send(Message(
|
||||
# We just send reliably since I don't care to implement the Xfer-specific
|
||||
# resend-on-unacked nastiness
|
||||
_ = self._connection_holder.circuit.send_reliable(Message(
|
||||
"SendXferPacket",
|
||||
Block("XferID", ID=xfer.xfer_id, Packet_=packet_val),
|
||||
Block("DataPacket", Data=chunk),
|
||||
|
||||
@@ -30,12 +30,12 @@ class AssetUploader:
|
||||
async def initiate_asset_upload(self, name: str, asset_type: AssetType,
|
||||
body: bytes, flags: Optional[int] = None) -> UploadToken:
|
||||
payload = {
|
||||
"asset_type": asset_type.human_name,
|
||||
"asset_type": asset_type.to_lookup_name(),
|
||||
"description": "(No Description)",
|
||||
"everyone_mask": 0,
|
||||
"group_mask": 0,
|
||||
"folder_id": UUID.ZERO, # Puts it in the default folder, I guess. Undocumented.
|
||||
"inventory_type": asset_type.inventory_type.human_name,
|
||||
"inventory_type": asset_type.inventory_type.to_lookup_name(),
|
||||
"name": name,
|
||||
"next_owner_mask": 581632,
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ import aiohttp
|
||||
import multidict
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3, StringEnum
|
||||
from hippolyzer.lib.base.helpers import proxify, get_resource_filename
|
||||
from hippolyzer.lib.base.helpers import proxify, get_resource_filename, create_logged_task
|
||||
from hippolyzer.lib.base.message.circuit import Circuit
|
||||
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
@@ -23,12 +23,13 @@ from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
|
||||
from hippolyzer.lib.base.network.caps_client import CapsClient, CAPS_DICT
|
||||
from hippolyzer.lib.base.network.transport import ADDR_TUPLE, Direction, SocketUDPTransport, AbstractUDPTransport
|
||||
from hippolyzer.lib.base.settings import Settings, SettingDescriptor
|
||||
from hippolyzer.lib.base.templates import RegionHandshakeReplyFlags, ChatType
|
||||
from hippolyzer.lib.base.templates import RegionHandshakeReplyFlags, ChatType, ThrottleData
|
||||
from hippolyzer.lib.base.transfer_manager import TransferManager
|
||||
from hippolyzer.lib.base.xfer_manager import XferManager
|
||||
from hippolyzer.lib.client.asset_uploader import AssetUploader
|
||||
from hippolyzer.lib.client.inventory_manager import InventoryManager
|
||||
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
|
||||
from hippolyzer.lib.client.parcel_manager import ParcelManager
|
||||
from hippolyzer.lib.client.state import BaseClientSession, BaseClientRegion, BaseClientSessionManager
|
||||
|
||||
|
||||
@@ -41,10 +42,16 @@ class StartLocation(StringEnum):
|
||||
|
||||
|
||||
class ClientSettings(Settings):
|
||||
# Off by default for now, the cert validation is a big mess due to LL using an internal CA.
|
||||
SSL_VERIFY: bool = SettingDescriptor(False)
|
||||
"""Off by default for now, the cert validation is a big mess due to LL using an internal CA."""
|
||||
SSL_CERT_PATH: str = SettingDescriptor(get_resource_filename("lib/base/network/data/ca-bundle.crt"))
|
||||
USER_AGENT: str = SettingDescriptor(f"Hippolyzer/v{version('hippolyzer')}")
|
||||
SEND_AGENT_UPDATES: bool = SettingDescriptor(True)
|
||||
"""Generally you want to send these, lots of things will break if you don't send at least one."""
|
||||
AUTO_REQUEST_PARCELS: bool = SettingDescriptor(True)
|
||||
"""Automatically request all parcel details when connecting to a region"""
|
||||
AUTO_REQUEST_MATERIALS: bool = SettingDescriptor(True)
|
||||
"""Automatically request all materials when connecting to a region"""
|
||||
|
||||
|
||||
class HippoCapsClient(CapsClient):
|
||||
@@ -101,12 +108,13 @@ class HippoClientProtocol(asyncio.DatagramProtocol):
|
||||
if should_handle:
|
||||
self.session.message_handler.handle(message)
|
||||
except:
|
||||
LOG.exception("Failed in region message handler")
|
||||
region.message_handler.handle(message)
|
||||
LOG.exception("Failed in session message handler")
|
||||
if should_handle:
|
||||
region.message_handler.handle(message)
|
||||
|
||||
|
||||
class HippoClientRegion(BaseClientRegion):
|
||||
def __init__(self, circuit_addr, seed_cap: str, session: HippoClientSession, handle=None):
|
||||
def __init__(self, circuit_addr, seed_cap: Optional[str], session: HippoClientSession, handle=None):
|
||||
super().__init__()
|
||||
self.caps = multidict.MultiDict()
|
||||
self.message_handler: MessageHandler[Message, str] = MessageHandler(take_by_default=False)
|
||||
@@ -119,6 +127,7 @@ class HippoClientRegion(BaseClientRegion):
|
||||
self.xfer_manager = XferManager(proxify(self), self.session().secure_session_id)
|
||||
self.transfer_manager = TransferManager(proxify(self), session.agent_id, session.id)
|
||||
self.asset_uploader = AssetUploader(proxify(self))
|
||||
self.parcel_manager = ParcelManager(proxify(self))
|
||||
self.objects = ClientObjectManager(self)
|
||||
self._llsd_serializer = LLSDMessageSerializer()
|
||||
self._eq_task: Optional[asyncio.Task] = None
|
||||
@@ -181,7 +190,7 @@ class HippoClientRegion(BaseClientRegion):
|
||||
"RegionInfo",
|
||||
Flags=(
|
||||
RegionHandshakeReplyFlags.SUPPORTS_SELF_APPEARANCE
|
||||
| RegionHandshakeReplyFlags.VOCACHE_IS_EMPTY
|
||||
| RegionHandshakeReplyFlags.VOCACHE_CULLING_ENABLED
|
||||
)
|
||||
)
|
||||
)
|
||||
@@ -199,15 +208,46 @@ class HippoClientRegion(BaseClientRegion):
|
||||
"Throttle",
|
||||
GenCounter=0,
|
||||
# Reasonable defaults, I guess
|
||||
Throttles_=[207360.0, 165376.0, 33075.19921875, 33075.19921875, 682700.75, 682700.75, 269312.0],
|
||||
Throttles_=ThrottleData(
|
||||
resend=207360.0,
|
||||
land=165376.0,
|
||||
wind=33075.19921875,
|
||||
cloud=33075.19921875,
|
||||
task=682700.75,
|
||||
texture=682700.75,
|
||||
asset=269312.0
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
if self.session().session_manager.settings.SEND_AGENT_UPDATES:
|
||||
# Usually we want to send at least one, since lots of messages will never be sent by the sim
|
||||
# until we send at least one AgentUpdate. For example, ParcelOverlay and LayerData.
|
||||
await self.circuit.send_reliable(
|
||||
Message(
|
||||
"AgentUpdate",
|
||||
Block(
|
||||
'AgentData',
|
||||
AgentID=self.session().agent_id,
|
||||
SessionID=self.session().id,
|
||||
# Don't really care about the other fields.
|
||||
fill_missing=True,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
async with seed_resp_fut as seed_resp:
|
||||
seed_resp.raise_for_status()
|
||||
self.update_caps(await seed_resp.read_llsd())
|
||||
|
||||
self._eq_task = asyncio.create_task(self._poll_event_queue())
|
||||
self._eq_task = create_logged_task(self._poll_event_queue(), "EQ Poll")
|
||||
|
||||
settings = self.session().session_manager.settings
|
||||
if settings.AUTO_REQUEST_PARCELS:
|
||||
_ = create_logged_task(self.parcel_manager.request_dirty_parcels(), "Parcel Request")
|
||||
if settings.AUTO_REQUEST_MATERIALS:
|
||||
_ = create_logged_task(self.objects.request_all_materials(), "Request All Materials")
|
||||
|
||||
except Exception as e:
|
||||
# Let consumers who were `await`ing the connected signal know there was an error
|
||||
if not self.connected.done():
|
||||
@@ -245,31 +285,25 @@ class HippoClientRegion(BaseClientRegion):
|
||||
ack: Optional[int] = None
|
||||
while True:
|
||||
payload = {"ack": ack, "done": False}
|
||||
async with self.caps_client.post("EventQueueGet", llsd=payload) as resp:
|
||||
if resp.status != 200:
|
||||
await asyncio.sleep(0.1)
|
||||
continue
|
||||
polled = await resp.read_llsd()
|
||||
for event in polled["events"]:
|
||||
if self._llsd_serializer.can_handle(event["message"]):
|
||||
msg = self._llsd_serializer.deserialize(event)
|
||||
else:
|
||||
# If this isn't a templated message (like some EQ-only events are),
|
||||
# then we wrap it in a synthetic `Message` so that the API for handling
|
||||
# both EQ-only and templated message events can be the same. Ick.
|
||||
msg = Message(event["message"])
|
||||
if isinstance(event["body"], dict):
|
||||
msg.add_block(Block("EventData", **event["body"]))
|
||||
try:
|
||||
async with self.caps_client.post("EventQueueGet", llsd=payload) as resp:
|
||||
if resp.status != 200:
|
||||
await asyncio.sleep(0.1)
|
||||
continue
|
||||
polled = await resp.read_llsd()
|
||||
for event in polled["events"]:
|
||||
if self._llsd_serializer.can_handle(event["message"]):
|
||||
msg = self._llsd_serializer.deserialize(event)
|
||||
else:
|
||||
# Shouldn't be any events that have anything other than a dict
|
||||
# as a body, but just to be sure...
|
||||
msg.add_block(Block("EventData", Data=event["body"]))
|
||||
msg.synthetic = True
|
||||
msg.sender = self.circuit_addr
|
||||
msg.direction = Direction.IN
|
||||
self.session().message_handler.handle(msg)
|
||||
self.message_handler.handle(msg)
|
||||
ack = polled["id"]
|
||||
msg = Message.from_eq_event(event)
|
||||
msg.sender = self.circuit_addr
|
||||
msg.direction = Direction.IN
|
||||
self.session().message_handler.handle(msg)
|
||||
self.message_handler.handle(msg)
|
||||
ack = polled["id"]
|
||||
await asyncio.sleep(0.001)
|
||||
except aiohttp.client_exceptions.ServerDisconnectedError:
|
||||
# This is expected to happen during long-polling, just pick up again where we left off.
|
||||
await asyncio.sleep(0.001)
|
||||
|
||||
async def _handle_ping_check(self, message: Message):
|
||||
@@ -289,13 +323,14 @@ class HippoClientSession(BaseClientSession):
|
||||
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[HippoClientRegion]]
|
||||
regions: List[HippoClientRegion]
|
||||
session_manager: HippoClient
|
||||
main_region: Optional[HippoClientRegion]
|
||||
|
||||
def __init__(self, id, secure_session_id, agent_id, circuit_code, session_manager: Optional[HippoClient] = None,
|
||||
login_data=None):
|
||||
super().__init__(id, secure_session_id, agent_id, circuit_code, session_manager, login_data=login_data)
|
||||
self.http_session = session_manager.http_session
|
||||
self.objects = ClientWorldObjectManager(proxify(self), session_manager.settings, None)
|
||||
self.inventory_manager = InventoryManager(proxify(self))
|
||||
self.inventory = InventoryManager(proxify(self))
|
||||
self.transport: Optional[SocketUDPTransport] = None
|
||||
self.protocol: Optional[HippoClientProtocol] = None
|
||||
self.message_handler.take_by_default = False
|
||||
@@ -343,7 +378,7 @@ class HippoClientSession(BaseClientSession):
|
||||
sim_seed = msg["EventData"]["seed-capability"]
|
||||
# We teleported or cross region, opening comms to new sim
|
||||
elif msg.name in ("TeleportFinish", "CrossedRegion"):
|
||||
sim_block = msg.get_block("RegionData", msg.get_block("Info"))[0]
|
||||
sim_block = msg.get_blocks("RegionData", msg.get_blocks("Info"))[0]
|
||||
sim_addr = (sim_block["SimIP"], sim_block["SimPort"])
|
||||
sim_handle = sim_block["RegionHandle"]
|
||||
sim_seed = sim_block["SeedCapability"]
|
||||
@@ -369,10 +404,10 @@ class HippoClientSession(BaseClientSession):
|
||||
need_connect = (region.circuit and region.circuit.is_alive) or moving_to_region
|
||||
self.open_circuit(sim_addr)
|
||||
if need_connect:
|
||||
asyncio.create_task(region.connect(main_region=moving_to_region))
|
||||
create_logged_task(region.connect(main_region=moving_to_region), "Region Connect")
|
||||
elif moving_to_region:
|
||||
# No need to connect, but we do need to complete agent movement.
|
||||
asyncio.create_task(region.complete_agent_movement())
|
||||
create_logged_task(region.complete_agent_movement(), "CompleteAgentMovement")
|
||||
|
||||
|
||||
class HippoClient(BaseClientSessionManager):
|
||||
@@ -581,7 +616,8 @@ class HippoClient(BaseClientSessionManager):
|
||||
password: str,
|
||||
login_uri: Optional[str] = None,
|
||||
agree_to_tos: bool = False,
|
||||
start_location: Union[StartLocation, str, None] = StartLocation.LAST
|
||||
start_location: Union[StartLocation, str, None] = StartLocation.LAST,
|
||||
connect: bool = True,
|
||||
):
|
||||
if self.session:
|
||||
raise RuntimeError("Already logged in!")
|
||||
@@ -637,11 +673,14 @@ class HippoClient(BaseClientSessionManager):
|
||||
self.session = HippoClientSession.from_login_data(login_data, self)
|
||||
|
||||
self.session.transport, self.session.protocol = await self._create_transport()
|
||||
self._resend_task = asyncio.create_task(self._attempt_resends())
|
||||
self._resend_task = create_logged_task(self._attempt_resends(), "Circuit Resend")
|
||||
self.session.message_handler.subscribe("AgentDataUpdate", self._handle_agent_data_update)
|
||||
self.session.message_handler.subscribe("AgentGroupDataUpdate", self._handle_agent_group_data_update)
|
||||
|
||||
assert self.session.open_circuit(self.session.regions[-1].circuit_addr)
|
||||
region = self.session.regions[-1]
|
||||
await region.connect(main_region=True)
|
||||
if connect:
|
||||
region = self.session.regions[-1]
|
||||
await region.connect(main_region=True)
|
||||
|
||||
def logout(self):
|
||||
if not self.session:
|
||||
@@ -716,7 +755,7 @@ class HippoClient(BaseClientSessionManager):
|
||||
return
|
||||
teleport_fut.set_result(None)
|
||||
|
||||
asyncio.create_task(_handle_teleport())
|
||||
create_logged_task(_handle_teleport(), "Teleport")
|
||||
|
||||
return teleport_fut
|
||||
|
||||
@@ -729,3 +768,11 @@ class HippoClient(BaseClientSessionManager):
|
||||
continue
|
||||
region.circuit.resend_unacked()
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
def _handle_agent_data_update(self, msg: Message):
|
||||
self.session.active_group = msg["AgentData"]["ActiveGroupID"]
|
||||
|
||||
def _handle_agent_group_data_update(self, msg: Message):
|
||||
self.session.groups.clear()
|
||||
for block in msg["GroupData"]:
|
||||
self.session.groups.add(block["GroupID"])
|
||||
|
||||
@@ -1,26 +1,49 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import dataclasses
|
||||
import gzip
|
||||
import itertools
|
||||
import logging
|
||||
import secrets
|
||||
from pathlib import Path
|
||||
from typing import Union, List, Tuple, Set
|
||||
from typing import Union, List, Tuple, Set, Sequence, Dict, TYPE_CHECKING
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.inventory import InventoryModel, InventoryCategory, InventoryItem
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.client.state import BaseClientSession
|
||||
from hippolyzer.lib.base.inventory import InventoryModel, InventoryCategory, InventoryItem, InventoryNodeBase
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.templates import AssetType, FolderType, InventoryType, Permissions
|
||||
from hippolyzer.lib.base.templates import WearableType
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.client.state import BaseClientSession
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CannotMoveError(Exception):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
def _get_node_id(node_or_id: InventoryNodeBase | UUID) -> UUID:
|
||||
if isinstance(node_or_id, UUID):
|
||||
return node_or_id
|
||||
return node_or_id.node_id
|
||||
|
||||
|
||||
class InventoryManager:
|
||||
def __init__(self, session: BaseClientSession):
|
||||
self._session = session
|
||||
self.model: InventoryModel = InventoryModel()
|
||||
self._load_skeleton()
|
||||
self._session.message_handler.subscribe("BulkUpdateInventory", self._handle_bulk_update_inventory)
|
||||
self._session.message_handler.subscribe("UpdateCreateInventoryItem", self._handle_update_create_inventory_item)
|
||||
self._session.message_handler.subscribe("RemoveInventoryItem", self._handle_remove_inventory_item)
|
||||
self._session.message_handler.subscribe("RemoveInventoryObjects", self._handle_remove_inventory_objects)
|
||||
self._session.message_handler.subscribe("RemoveInventoryFolder", self._handle_remove_inventory_folder)
|
||||
self._session.message_handler.subscribe("MoveInventoryItem", self._handle_move_inventory_item)
|
||||
self._session.message_handler.subscribe("MoveInventoryFolder", self._handle_move_inventory_folder)
|
||||
|
||||
def _load_skeleton(self):
|
||||
assert not self.model.nodes
|
||||
@@ -33,8 +56,8 @@ class InventoryManager:
|
||||
# Don't use the version from the skeleton, this flags the inventory as needing
|
||||
# completion from the inventory cache. This matches indra's behavior.
|
||||
version=InventoryCategory.VERSION_NONE,
|
||||
type="category",
|
||||
pref_type=skel_cat.get("type_default", "-1"),
|
||||
type=AssetType.CATEGORY,
|
||||
pref_type=FolderType(skel_cat.get("type_default", FolderType.NONE)),
|
||||
owner_id=self._session.agent_id,
|
||||
))
|
||||
|
||||
@@ -69,10 +92,8 @@ class InventoryManager:
|
||||
# Cached cat isn't the same as what the inv server says it should be, can't use it.
|
||||
if cached_cat.version != skel_versions.get(cached_cat.cat_id):
|
||||
continue
|
||||
if existing_cat:
|
||||
# Remove the category so that we can replace it, but leave any children in place
|
||||
self.model.unlink(existing_cat, single_only=True)
|
||||
self.model.add(cached_cat)
|
||||
# Update any existing category in-place, or add if not present
|
||||
self.model.upsert(cached_cat)
|
||||
# Any items in this category in our cache file are usable and should be added
|
||||
loaded_cat_ids.add(cached_cat.cat_id)
|
||||
|
||||
@@ -82,11 +103,15 @@ class InventoryManager:
|
||||
if cached_item.item_id in self.model:
|
||||
continue
|
||||
# The parent category didn't have a cache hit against the inventory skeleton, can't add!
|
||||
# We don't even know if this item would be in the current version of its parent cat!
|
||||
if cached_item.parent_id not in loaded_cat_ids:
|
||||
continue
|
||||
self.model.add(cached_item)
|
||||
|
||||
self.model.flag_if_dirty()
|
||||
|
||||
def _parse_cache(self, path: Union[str, Path]) -> Tuple[List[InventoryCategory], List[InventoryItem]]:
|
||||
"""Warning, may be incredibly slow due to llsd.parse_notation() behavior"""
|
||||
categories: List[InventoryCategory] = []
|
||||
items: List[InventoryItem] = []
|
||||
# Parse our cached items and categories out of the compressed inventory cache
|
||||
@@ -95,6 +120,7 @@ class InventoryManager:
|
||||
# Line-delimited LLSD notation!
|
||||
for line in f.readlines():
|
||||
# TODO: Parsing of invcache is dominated by `parse_notation()`. It's stupidly inefficient.
|
||||
# TODO: sniff out binary LLSD invcaches
|
||||
node_llsd = llsd.parse_notation(line)
|
||||
if first_line:
|
||||
# First line is the file header
|
||||
@@ -113,80 +139,277 @@ class InventoryManager:
|
||||
LOG.warning(f"Unknown node type in inv cache: {node_llsd!r}")
|
||||
return categories, items
|
||||
|
||||
def _handle_bulk_update_inventory(self, msg: Message):
|
||||
any_cats = False
|
||||
for folder_block in msg.get_blocks("FolderData", ()):
|
||||
if folder_block["FolderID"] == UUID.ZERO:
|
||||
continue
|
||||
any_cats = True
|
||||
self.model.upsert(
|
||||
InventoryCategory.from_folder_data(folder_block),
|
||||
# Don't clobber version, we only want to fetch the folder if it's new
|
||||
# and hasn't just moved.
|
||||
update_fields={"parent_id", "name", "pref_type"},
|
||||
)
|
||||
for item_block in msg.get_blocks("ItemData", ()):
|
||||
if item_block["ItemID"] == UUID.ZERO:
|
||||
continue
|
||||
self.model.upsert(InventoryItem.from_inventory_data(item_block))
|
||||
|
||||
# Thankfully we have 9 billion different ways to represent inventory data.
|
||||
def ais_item_to_inventory_data(ais_item: dict) -> Block:
|
||||
return Block(
|
||||
"InventoryData",
|
||||
ItemID=ais_item["item_id"],
|
||||
FolderID=ais_item["parent_id"],
|
||||
CallbackID=0,
|
||||
CreatorID=ais_item["permissions"]["creator_id"],
|
||||
OwnerID=ais_item["permissions"]["owner_id"],
|
||||
GroupID=ais_item["permissions"]["group_id"],
|
||||
BaseMask=ais_item["permissions"]["base_mask"],
|
||||
OwnerMask=ais_item["permissions"]["owner_mask"],
|
||||
GroupMask=ais_item["permissions"]["group_mask"],
|
||||
EveryoneMask=ais_item["permissions"]["everyone_mask"],
|
||||
NextOwnerMask=ais_item["permissions"]["next_owner_mask"],
|
||||
GroupOwned=0,
|
||||
AssetID=ais_item["asset_id"],
|
||||
Type=ais_item["type"],
|
||||
InvType=ais_item["inv_type"],
|
||||
Flags=ais_item["flags"],
|
||||
SaleType=ais_item["sale_info"]["sale_type"],
|
||||
SalePrice=ais_item["sale_info"]["sale_price"],
|
||||
Name=ais_item["name"],
|
||||
Description=ais_item["desc"],
|
||||
CreationDate=ais_item["created_at"],
|
||||
# Meaningless here
|
||||
CRC=secrets.randbits(32),
|
||||
)
|
||||
if any_cats:
|
||||
self.model.flag_if_dirty()
|
||||
|
||||
def _validate_recipient(self, recipient: UUID):
|
||||
if self._session.agent_id != recipient:
|
||||
raise ValueError(f"AgentID Mismatch {self._session.agent_id} != {recipient}")
|
||||
|
||||
def inventory_data_to_ais_item(inventory_data: Block) -> dict:
|
||||
return dict(
|
||||
item_id=inventory_data["ItemID"],
|
||||
parent_id=inventory_data["ParentID"],
|
||||
permissions=dict(
|
||||
creator_id=inventory_data["CreatorID"],
|
||||
owner_id=inventory_data["OwnerID"],
|
||||
group_id=inventory_data["GroupID"],
|
||||
base_mask=inventory_data["BaseMask"],
|
||||
owner_mask=inventory_data["OwnerMask"],
|
||||
group_mask=inventory_data["GroupMask"],
|
||||
everyone_mask=inventory_data["EveryoneMask"],
|
||||
next_owner_mask=inventory_data["NextOwnerMask"],
|
||||
),
|
||||
asset_id=inventory_data["AssetID"],
|
||||
type=inventory_data["Type"],
|
||||
inv_type=inventory_data["InvType"],
|
||||
flags=inventory_data["Flags"],
|
||||
sale_info=dict(
|
||||
sale_type=inventory_data["SaleType"],
|
||||
sale_price=inventory_data["SalePrice"],
|
||||
),
|
||||
name=inventory_data["Name"],
|
||||
description=inventory_data["Description"],
|
||||
creation_at=inventory_data["CreationDate"],
|
||||
)
|
||||
def _handle_update_create_inventory_item(self, msg: Message):
|
||||
self._validate_recipient(msg["AgentData"]["AgentID"])
|
||||
for inventory_block in msg["InventoryData"]:
|
||||
self.model.upsert(InventoryItem.from_inventory_data(inventory_block))
|
||||
|
||||
def _handle_remove_inventory_item(self, msg: Message):
|
||||
self._validate_recipient(msg["AgentData"]["AgentID"])
|
||||
for inventory_block in msg["InventoryData"]:
|
||||
node = self.model.get(inventory_block["ItemID"])
|
||||
if node:
|
||||
self.model.unlink(node)
|
||||
|
||||
def ais_folder_to_inventory_data(ais_folder: dict) -> Block:
|
||||
return Block(
|
||||
"FolderData",
|
||||
FolderID=ais_folder["cat_id"],
|
||||
ParentID=ais_folder["parent_id"],
|
||||
CallbackID=0,
|
||||
Type=ais_folder["preferred_type"],
|
||||
Name=ais_folder["name"],
|
||||
)
|
||||
def _handle_remove_inventory_folder(self, msg: Message):
|
||||
self._validate_recipient(msg["AgentData"]["AgentID"])
|
||||
for folder_block in msg["FolderData"]:
|
||||
node = self.model.get(folder_block["FolderID"])
|
||||
if node:
|
||||
self.model.unlink(node)
|
||||
|
||||
def _handle_remove_inventory_objects(self, msg: Message):
|
||||
self._validate_recipient(msg["AgentData"]["AgentID"])
|
||||
for item_block in msg.get_blocks("ItemData", []):
|
||||
node = self.model.get(item_block["ItemID"])
|
||||
if node:
|
||||
self.model.unlink(node)
|
||||
for folder_block in msg.get_blocks("FolderData", []):
|
||||
node = self.model.get(folder_block["FolderID"])
|
||||
if node:
|
||||
self.model.unlink(node)
|
||||
|
||||
def inventory_data_to_ais_folder(inventory_data: Block) -> dict:
|
||||
return dict(
|
||||
cat_id=inventory_data["FolderID"],
|
||||
parent_id=inventory_data["ParentID"],
|
||||
preferred_type=inventory_data["Type"],
|
||||
name=inventory_data["Name"],
|
||||
)
|
||||
def _handle_move_inventory_item(self, msg: Message):
|
||||
for inventory_block in msg["InventoryData"]:
|
||||
node = self.model.get(inventory_block["ItemID"])
|
||||
if not node:
|
||||
LOG.warning(f"Missing inventory item {inventory_block['ItemID']}")
|
||||
continue
|
||||
if inventory_block["NewName"]:
|
||||
node.name = str(inventory_block["NewName"])
|
||||
node.parent_id = inventory_block['FolderID']
|
||||
|
||||
def _handle_move_inventory_folder(self, msg: Message):
|
||||
for inventory_block in msg["InventoryData"]:
|
||||
node = self.model.get(inventory_block["FolderID"])
|
||||
if not node:
|
||||
LOG.warning(f"Missing inventory folder {inventory_block['FolderID']}")
|
||||
continue
|
||||
node.parent_id = inventory_block['ParentID']
|
||||
|
||||
def process_aisv3_response(self, payload: dict):
|
||||
if "name" in payload:
|
||||
# Just a rough guess. Assume this response is updating something if there's
|
||||
# a "name" key.
|
||||
if InventoryCategory.ID_ATTR_AIS in payload:
|
||||
if (cat_node := InventoryCategory.from_llsd(payload, flavor="ais")) is not None:
|
||||
self.model.upsert(cat_node)
|
||||
elif InventoryItem.ID_ATTR in payload:
|
||||
if (item_node := InventoryItem.from_llsd(payload, flavor="ais")) is not None:
|
||||
self.model.upsert(item_node)
|
||||
else:
|
||||
LOG.warning(f"Unknown node type in AIS payload: {payload!r}")
|
||||
|
||||
# Parse the embedded stuff
|
||||
embedded_dict = payload.get("_embedded", {})
|
||||
for category_llsd in embedded_dict.get("categories", {}).values():
|
||||
self.model.upsert(InventoryCategory.from_llsd(category_llsd, flavor="ais"))
|
||||
for item_llsd in embedded_dict.get("items", {}).values():
|
||||
self.model.upsert(InventoryItem.from_llsd(item_llsd, flavor="ais"))
|
||||
for link_llsd in embedded_dict.get("links", {}).values():
|
||||
self.model.upsert(InventoryItem.from_llsd(link_llsd, flavor="ais"))
|
||||
|
||||
for cat_id, version in payload.get("_updated_category_versions", {}).items():
|
||||
# The key will be a string, so convert to UUID first
|
||||
cat_node = self.model.get_category(UUID(cat_id))
|
||||
cat_node.version = version
|
||||
|
||||
# Get rid of anything we were asked to
|
||||
for node_id in itertools.chain(
|
||||
payload.get("_broken_links_removed", ()),
|
||||
payload.get("_removed_items", ()),
|
||||
payload.get("_category_items_removed", ()),
|
||||
payload.get("_categories_removed", ()),
|
||||
):
|
||||
node = self.model.get(node_id)
|
||||
if node:
|
||||
# Presumably this list is exhaustive, so don't unlink children.
|
||||
self.model.unlink(node, single_only=True)
|
||||
|
||||
async def make_ais_request(
|
||||
self,
|
||||
method: str,
|
||||
path: str,
|
||||
params: dict,
|
||||
payload: dict | Sequence | dataclasses.MISSING = dataclasses.MISSING,
|
||||
) -> dict:
|
||||
caps_client = self._session.main_region.caps_client
|
||||
async with caps_client.request(method, "InventoryAPIv3", path=path, params=params, llsd=payload) as resp:
|
||||
if resp.ok or resp.status == 400:
|
||||
data = await resp.read_llsd()
|
||||
if err_desc := data.get("error_description", ""):
|
||||
err_desc: str
|
||||
if err_desc.startswith("Cannot change parent_id."):
|
||||
raise CannotMoveError()
|
||||
resp.raise_for_status()
|
||||
self.process_aisv3_response(data)
|
||||
else:
|
||||
resp.raise_for_status()
|
||||
|
||||
return data
|
||||
|
||||
async def create_folder(
|
||||
self,
|
||||
parent: InventoryCategory | UUID,
|
||||
name: str,
|
||||
pref_type: int = AssetType.NONE,
|
||||
cat_id: UUID | None = None
|
||||
) -> InventoryCategory:
|
||||
parent_id = _get_node_id(parent)
|
||||
|
||||
payload = {
|
||||
"categories": [
|
||||
{
|
||||
"category_id": cat_id,
|
||||
"name": name,
|
||||
"type_default": pref_type,
|
||||
"parent_id": parent_id
|
||||
}
|
||||
]
|
||||
}
|
||||
data = await self.make_ais_request("POST", f"/category/{parent_id}", {"tid": UUID.random()}, payload)
|
||||
return self.model.get_category(data["_created_categories"][0])
|
||||
|
||||
async def create_item(
|
||||
self,
|
||||
parent: UUID | InventoryCategory,
|
||||
name: str,
|
||||
type: AssetType,
|
||||
inv_type: InventoryType,
|
||||
wearable_type: WearableType,
|
||||
transaction_id: UUID,
|
||||
next_mask: int | Permissions = 0x0008e000,
|
||||
description: str = '',
|
||||
) -> InventoryItem:
|
||||
parent_id = _get_node_id(parent)
|
||||
|
||||
with self._session.main_region.message_handler.subscribe_async(
|
||||
("UpdateCreateInventoryItem",),
|
||||
predicate=lambda x: x["AgentData"]["TransactionID"] == transaction_id,
|
||||
take=False,
|
||||
) as get_msg:
|
||||
await self._session.main_region.circuit.send_reliable(
|
||||
Message(
|
||||
'CreateInventoryItem',
|
||||
Block('AgentData', AgentID=self._session.agent_id, SessionID=self._session.id),
|
||||
Block(
|
||||
'InventoryBlock',
|
||||
CallbackID=0,
|
||||
FolderID=parent_id,
|
||||
TransactionID=transaction_id,
|
||||
NextOwnerMask=next_mask,
|
||||
Type=type,
|
||||
InvType=inv_type,
|
||||
WearableType=wearable_type,
|
||||
Name=name,
|
||||
Description=description,
|
||||
)
|
||||
)
|
||||
)
|
||||
msg = await asyncio.wait_for(get_msg(), 5.0)
|
||||
# We assume that _handle_update_create_inventory_item() has already been called internally
|
||||
# by the time that the `await` returns given asyncio scheduling
|
||||
return self.model.get_item(msg["InventoryData"]["ItemID"])
|
||||
|
||||
async def move(self, node: InventoryNodeBase, new_parent: UUID | InventoryCategory) -> None:
|
||||
# AIS error messages suggest using the MOVE HTTP method instead of setting a new parent
|
||||
# via PATCH. MOVE is not implemented in AIS. Instead, we do what the viewer does and use
|
||||
# legacy UDP messages for reparenting things
|
||||
new_parent = _get_node_id(new_parent)
|
||||
|
||||
msg = Message(
|
||||
"MoveInventoryFolder",
|
||||
Block("AgentData", AgentID=self._session.agent_id, SessionID=self._session.id, Stamp=0),
|
||||
)
|
||||
|
||||
if isinstance(node, InventoryItem):
|
||||
msg.add_block(Block("InventoryData", ItemID=node.node_id, FolderID=new_parent, NewName=b''))
|
||||
else:
|
||||
msg.add_block(Block("InventoryData", FolderID=node.node_id, ParentID=new_parent))
|
||||
|
||||
# No message to say if this even succeeded. Great.
|
||||
# TODO: probably need to update category versions for both source and target
|
||||
await self._session.main_region.circuit.send_reliable(msg)
|
||||
node.parent_id = new_parent
|
||||
|
||||
async def copy(self, node: InventoryNodeBase, destination: UUID | InventoryCategory, contents: bool = True)\
|
||||
-> InventoryItem | InventoryCategory:
|
||||
destination = _get_node_id(destination)
|
||||
if isinstance(node, InventoryItem):
|
||||
with self._session.main_region.message_handler.subscribe_async(
|
||||
("BulkUpdateInventory",),
|
||||
# Not ideal, but there doesn't seem to be an easy way to determine the transaction ID,
|
||||
# and using the callback ID seems a bit crap.
|
||||
predicate=lambda x: x["ItemData"]["Name"] == node.name,
|
||||
take=False,
|
||||
) as get_msg:
|
||||
await self._session.main_region.circuit.send_reliable(Message(
|
||||
'CopyInventoryItem',
|
||||
Block('AgentData', AgentID=self._session.agent_id, SessionID=self._session.id),
|
||||
Block(
|
||||
'InventoryData',
|
||||
CallbackID=0,
|
||||
OldAgentID=self._session.agent_id,
|
||||
OldItemID=node.item_id,
|
||||
NewFolderID=destination,
|
||||
NewName=b''
|
||||
)
|
||||
))
|
||||
msg = await asyncio.wait_for(get_msg(), 5.0)
|
||||
# BulkInventoryUpdate message may not have already been handled internally, do it manually.
|
||||
self._handle_bulk_update_inventory(msg)
|
||||
|
||||
# Now pull the item out of the inventory
|
||||
new_item = self.model.get(msg["ItemData"]["ItemID"])
|
||||
assert new_item is not None
|
||||
return new_item # type: ignore
|
||||
elif isinstance(node, InventoryCategory):
|
||||
# Keep a list of the original descendents in case we're copy a folder within itself
|
||||
to_copy = list(node.descendents)
|
||||
# There's not really any way to "copy" a category, we just create a new one with the same properties.
|
||||
new_cat = await self.create_folder(destination, node.name, node.pref_type)
|
||||
if contents:
|
||||
cat_lookup: Dict[UUID, UUID] = {node.node_id: new_cat.node_id}
|
||||
# Recreate the category hierarchy first, keeping note of the new category IDs.
|
||||
for node in to_copy:
|
||||
if isinstance(node, InventoryCategory):
|
||||
new_parent = cat_lookup[node.parent_id]
|
||||
cat_lookup[node.node_id] = (await self.copy(node, new_parent, contents=False)).node_id
|
||||
# Items have to be explicitly copied individually
|
||||
for node in to_copy:
|
||||
if isinstance(node, InventoryItem):
|
||||
new_parent = cat_lookup[node.parent_id]
|
||||
await self.copy(node, new_parent, contents=False)
|
||||
return new_cat
|
||||
else:
|
||||
raise ValueError(f"Unknown node type: {node!r}")
|
||||
|
||||
async def update(self, node: InventoryNodeBase, data: dict) -> None:
|
||||
path = f"/category/{node.node_id}"
|
||||
if isinstance(node, InventoryItem):
|
||||
path = f"/item/{node.node_id}"
|
||||
await self.make_ais_request("PATCH", path, {}, data)
|
||||
|
||||
@@ -15,6 +15,7 @@ from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, Vector3
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.inventory import InventoryItem, InventoryModel, InventoryObject
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketFlags
|
||||
@@ -26,8 +27,10 @@ from hippolyzer.lib.base.objects import (
|
||||
Object, handle_to_global_pos,
|
||||
)
|
||||
from hippolyzer.lib.base.settings import Settings
|
||||
from hippolyzer.lib.base.wearables import VISUAL_PARAMS
|
||||
from hippolyzer.lib.client.namecache import NameCache, NameCacheEntry
|
||||
from hippolyzer.lib.base.templates import PCode, ObjectStateSerializer
|
||||
from hippolyzer.lib.base.templates import PCode, ObjectStateSerializer, XferFilePath
|
||||
from hippolyzer.lib.base import llsd
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.client.state import BaseClientRegion, BaseClientSession
|
||||
@@ -35,14 +38,17 @@ if TYPE_CHECKING:
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
OBJECT_OR_LOCAL = Union[Object, int]
|
||||
MATERIAL_MAP_TYPE = Dict[UUID, dict]
|
||||
|
||||
|
||||
class ObjectUpdateType(enum.IntEnum):
|
||||
OBJECT_UPDATE = enum.auto()
|
||||
UPDATE = enum.auto()
|
||||
PROPERTIES = enum.auto()
|
||||
FAMILY = enum.auto()
|
||||
COSTS = enum.auto()
|
||||
KILL = enum.auto()
|
||||
ANIMATIONS = enum.auto()
|
||||
APPEARANCE = enum.auto()
|
||||
|
||||
|
||||
class ClientObjectManager:
|
||||
@@ -50,12 +56,13 @@ class ClientObjectManager:
|
||||
Object manager for a specific region
|
||||
"""
|
||||
|
||||
__slots__ = ("_region", "_world_objects", "state", "__weakref__")
|
||||
__slots__ = ("_region", "_world_objects", "state", "__weakref__", "_requesting_all_mats_lock")
|
||||
|
||||
def __init__(self, region: BaseClientRegion):
|
||||
self._region: BaseClientRegion = proxify(region)
|
||||
self._world_objects: ClientWorldObjectManager = proxify(region.session().objects)
|
||||
self.state: RegionObjectsState = RegionObjectsState()
|
||||
self._requesting_all_mats_lock = asyncio.Lock()
|
||||
|
||||
def __len__(self):
|
||||
return len(self.state.localid_lookup)
|
||||
@@ -129,7 +136,7 @@ class ClientObjectManager:
|
||||
# Need to wait until we get our reply
|
||||
fut = self.state.register_future(local_id, ObjectUpdateType.PROPERTIES)
|
||||
else:
|
||||
# This was selected so we should already have up to date info
|
||||
# This was selected so we should already have up-to-date info
|
||||
fut = asyncio.Future()
|
||||
fut.set_result(self.lookup_localid(local_id))
|
||||
futures.append(fut)
|
||||
@@ -163,9 +170,99 @@ class ClientObjectManager:
|
||||
|
||||
futures = []
|
||||
for local_id in local_ids:
|
||||
futures.append(self.state.register_future(local_id, ObjectUpdateType.OBJECT_UPDATE))
|
||||
futures.append(self.state.register_future(local_id, ObjectUpdateType.UPDATE))
|
||||
return futures
|
||||
|
||||
async def request_all_materials(self) -> MATERIAL_MAP_TYPE:
|
||||
"""
|
||||
Request all materials within the sim
|
||||
|
||||
Sigh, yes, this is best practice per indra :(
|
||||
"""
|
||||
if self._requesting_all_mats_lock.locked():
|
||||
# We're already requesting all materials, wait until the lock is free
|
||||
# and just return what was returned.
|
||||
async with self._requesting_all_mats_lock:
|
||||
return self.state.materials
|
||||
|
||||
async with self._requesting_all_mats_lock:
|
||||
async with self._region.caps_client.get("RenderMaterials") as resp:
|
||||
resp.raise_for_status()
|
||||
# Clear out all previous materials, this is a complete response.
|
||||
self.state.materials.clear()
|
||||
self._process_materials_response(await resp.read())
|
||||
return self.state.materials
|
||||
|
||||
async def request_materials(self, material_ids: Sequence[UUID]) -> MATERIAL_MAP_TYPE:
|
||||
if self._requesting_all_mats_lock.locked():
|
||||
# Just wait for the in-flight request for all materials to complete
|
||||
# if we have one in flight.
|
||||
async with self._requesting_all_mats_lock:
|
||||
# Wait for the lock to be released
|
||||
pass
|
||||
|
||||
not_found = set(x for x in material_ids if (x not in self.state.materials))
|
||||
if not_found:
|
||||
# Request any materials we don't already have, if there were any
|
||||
data = {"Zipped": llsd.zip_llsd([x.bytes for x in material_ids])}
|
||||
async with self._region.caps_client.post("RenderMaterials", data=data) as resp:
|
||||
resp.raise_for_status()
|
||||
self._process_materials_response(await resp.read())
|
||||
|
||||
# build up a dict of just the requested mats
|
||||
mats = {}
|
||||
for mat_id in material_ids:
|
||||
mats[mat_id] = self.state.materials[mat_id]
|
||||
return mats
|
||||
|
||||
def _process_materials_response(self, response: bytes):
|
||||
entries = llsd.unzip_llsd(llsd.parse_xml(response)["Zipped"])
|
||||
for entry in entries:
|
||||
self.state.materials[UUID(bytes=entry["ID"])] = entry["Material"]
|
||||
|
||||
async def request_object_inv(self, obj: Object) -> List[InventoryItem]:
|
||||
if "RequestTaskInventory" in self._region.cap_urls:
|
||||
return await self.request_object_inv_via_cap(obj)
|
||||
else:
|
||||
return await self.request_object_inv_via_xfer(obj)
|
||||
|
||||
async def request_object_inv_via_cap(self, obj: Object) -> List[InventoryItem]:
|
||||
async with self._region.caps_client.get("RequestTaskInventory", params={"task_id": obj.FullID}) as resp:
|
||||
resp.raise_for_status()
|
||||
all_items = [InventoryItem.from_llsd(x) for x in (await resp.read_llsd())["contents"]]
|
||||
# Synthesize the Contents directory so the items can have a parent
|
||||
parent = InventoryObject(
|
||||
obj_id=obj.FullID,
|
||||
name="Contents",
|
||||
)
|
||||
model = InventoryModel()
|
||||
model.add(parent)
|
||||
for item in all_items:
|
||||
model.add(item)
|
||||
|
||||
return all_items
|
||||
|
||||
async def request_object_inv_via_xfer(self, obj: Object) -> List[InventoryItem]:
|
||||
session = self._region.session()
|
||||
with self._region.message_handler.subscribe_async(
|
||||
('ReplyTaskInventory',), predicate=lambda x: x["InventoryData"]["TaskID"] == obj.FullID
|
||||
) as get_msg:
|
||||
await self._region.circuit.send_reliable(Message(
|
||||
'RequestTaskInventory',
|
||||
# If no session is passed in we'll use the active session when the coro was created
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block('InventoryData', LocalID=obj.LocalID),
|
||||
))
|
||||
|
||||
inv_message = await asyncio.wait_for(get_msg(), timeout=5.0)
|
||||
|
||||
# Xfer doesn't need to be immediately awaited, multiple signals can be waited on.
|
||||
xfer = await self._region.xfer_manager.request(
|
||||
file_name=inv_message["InventoryData"]["Filename"], file_path=XferFilePath.CACHE)
|
||||
|
||||
inv_model = InventoryModel.from_bytes(xfer.reassemble_chunks())
|
||||
return list(inv_model.all_items)
|
||||
|
||||
|
||||
class ObjectEvent:
|
||||
__slots__ = ("object", "updated", "update_type")
|
||||
@@ -211,6 +308,12 @@ class ClientWorldObjectManager:
|
||||
self._handle_object_properties_generic)
|
||||
message_handler.subscribe("ObjectPropertiesFamily",
|
||||
self._handle_object_properties_generic)
|
||||
message_handler.subscribe("AvatarAnimation",
|
||||
self._handle_animation_message)
|
||||
message_handler.subscribe("ObjectAnimation",
|
||||
self._handle_animation_message)
|
||||
message_handler.subscribe("AvatarAppearance",
|
||||
self._handle_avatar_appearance_message)
|
||||
|
||||
def lookup_fullid(self, full_id: UUID) -> Optional[Object]:
|
||||
return self._fullid_lookup.get(full_id, None)
|
||||
@@ -224,7 +327,7 @@ class ClientWorldObjectManager:
|
||||
|
||||
@property
|
||||
def all_avatars(self) -> Iterable[Avatar]:
|
||||
return tuple(self._avatars.values())
|
||||
return list(self._avatars.values())
|
||||
|
||||
def __len__(self):
|
||||
return len(self._fullid_lookup)
|
||||
@@ -243,7 +346,7 @@ class ClientWorldObjectManager:
|
||||
def untrack_region_objects(self, handle: int):
|
||||
"""Handle signal that a region object manager was just cleared"""
|
||||
# Make sure they're gone from our lookup table
|
||||
for obj in tuple(self._fullid_lookup.values()):
|
||||
for obj in list(self._fullid_lookup.values()):
|
||||
if obj.RegionHandle == handle:
|
||||
del self._fullid_lookup[obj.FullID]
|
||||
if handle in self._region_managers:
|
||||
@@ -278,6 +381,10 @@ class ClientWorldObjectManager:
|
||||
futs.extend(region_mgr.request_object_properties(region_objs))
|
||||
return futs
|
||||
|
||||
async def request_object_inv(self, obj: Object) -> List[InventoryItem]:
|
||||
region_mgr = self._get_region_manager(obj.RegionHandle)
|
||||
return await region_mgr.request_object_inv(obj)
|
||||
|
||||
async def load_ancestors(self, obj: Object, wait_time: float = 1.0):
|
||||
"""
|
||||
Ensure that the entire chain of parents above this object is loaded
|
||||
@@ -361,7 +468,7 @@ class ClientWorldObjectManager:
|
||||
if obj.PCode == PCode.AVATAR:
|
||||
self._avatar_objects[obj.FullID] = obj
|
||||
self._rebuild_avatar_objects()
|
||||
self._run_object_update_hooks(obj, set(obj.to_dict().keys()), ObjectUpdateType.OBJECT_UPDATE, msg)
|
||||
self._run_object_update_hooks(obj, set(obj.to_dict().keys()), ObjectUpdateType.UPDATE, msg)
|
||||
|
||||
def _kill_object_by_local_id(self, region_state: RegionObjectsState, local_id: int):
|
||||
obj = region_state.lookup_localid(local_id)
|
||||
@@ -413,7 +520,7 @@ class ClientWorldObjectManager:
|
||||
# our view of the world then we want to move it to this region.
|
||||
obj = self.lookup_fullid(object_data["FullID"])
|
||||
if obj:
|
||||
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE, msg)
|
||||
self._update_existing_object(obj, object_data, ObjectUpdateType.UPDATE, msg)
|
||||
else:
|
||||
if region_state is None:
|
||||
continue
|
||||
@@ -437,7 +544,7 @@ class ClientWorldObjectManager:
|
||||
# Need the Object as context because decoding state requires PCode.
|
||||
state_deserializer = ObjectStateSerializer.deserialize
|
||||
object_data["State"] = state_deserializer(ctx_obj=obj, val=object_data["State"])
|
||||
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE, msg)
|
||||
self._update_existing_object(obj, object_data, ObjectUpdateType.UPDATE, msg)
|
||||
else:
|
||||
if region_state:
|
||||
region_state.missing_locals.add(object_data["LocalID"])
|
||||
@@ -465,7 +572,7 @@ class ClientWorldObjectManager:
|
||||
self._update_existing_object(obj, {
|
||||
"UpdateFlags": update_flags,
|
||||
"RegionHandle": handle,
|
||||
}, ObjectUpdateType.OBJECT_UPDATE, msg)
|
||||
}, ObjectUpdateType.UPDATE, msg)
|
||||
continue
|
||||
|
||||
cached_obj_data = self._lookup_cache_entry(handle, block["ID"], block["CRC"])
|
||||
@@ -504,7 +611,7 @@ class ClientWorldObjectManager:
|
||||
LOG.warning(f"Got ObjectUpdateCompressed for unknown region {handle}: {object_data!r}")
|
||||
obj = self.lookup_fullid(object_data["FullID"])
|
||||
if obj:
|
||||
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE, msg)
|
||||
self._update_existing_object(obj, object_data, ObjectUpdateType.UPDATE, msg)
|
||||
else:
|
||||
if region_state is None:
|
||||
continue
|
||||
@@ -559,6 +666,59 @@ class ClientWorldObjectManager:
|
||||
region_state.coarse_locations.update(coarse_locations)
|
||||
self._rebuild_avatar_objects()
|
||||
|
||||
def _handle_animation_message(self, message: Message):
|
||||
sender_id = message["Sender"]["ID"]
|
||||
if message.name == "AvatarAnimation":
|
||||
avatar = self._avatars.get(sender_id)
|
||||
if not avatar:
|
||||
LOG.warning(f"Received AvatarAnimation for unknown avatar {sender_id}")
|
||||
return
|
||||
|
||||
if not avatar.Object:
|
||||
LOG.warning(f"Received AvatarAnimation for avatar with no object {sender_id}")
|
||||
return
|
||||
|
||||
obj = avatar.Object
|
||||
elif message.name == "ObjectAnimation":
|
||||
obj = self.lookup_fullid(sender_id)
|
||||
if not obj:
|
||||
# This is only a debug message in the viewer, but let's be louder.
|
||||
LOG.warning(f"Received ObjectAnimation for animesh with no object {sender_id}")
|
||||
return
|
||||
else:
|
||||
LOG.error(f"Unknown animation message type: {message.name}")
|
||||
return
|
||||
|
||||
obj.Animations.clear()
|
||||
for block in message.blocks.get("AnimationList", []):
|
||||
obj.Animations.append(block["AnimID"])
|
||||
self._run_object_update_hooks(obj, {"Animations"}, ObjectUpdateType.ANIMATIONS, message)
|
||||
|
||||
def _handle_avatar_appearance_message(self, message: Message):
|
||||
sender_id: UUID = message["Sender"]["ID"]
|
||||
if message["Sender"]["IsTrial"]:
|
||||
return
|
||||
av = self.lookup_avatar(sender_id)
|
||||
if not av:
|
||||
LOG.warning(f"Received AvatarAppearance with no avatar {sender_id}")
|
||||
return
|
||||
|
||||
version = message["AppearanceData"]["CofVersion"]
|
||||
if version < av.COFVersion:
|
||||
LOG.warning(f"Ignoring stale appearance for {sender_id}, {version} < {av.COFVersion}")
|
||||
return
|
||||
|
||||
if not message.get_blocks("VisualParam"):
|
||||
LOG.warning(f"No visual params in AvatarAppearance for {sender_id}")
|
||||
return
|
||||
|
||||
av.COFVersion = version
|
||||
av.Appearance = VISUAL_PARAMS.parse_appearance_message(message)
|
||||
|
||||
av_obj = av.Object
|
||||
if av_obj:
|
||||
self._run_object_update_hooks(av_obj, set(), ObjectUpdateType.APPEARANCE, message)
|
||||
|
||||
def _process_get_object_cost_response(self, parsed: dict):
|
||||
if "error" in parsed:
|
||||
return
|
||||
@@ -654,13 +814,14 @@ class RegionObjectsState:
|
||||
|
||||
__slots__ = (
|
||||
"handle", "missing_locals", "_orphans", "localid_lookup", "coarse_locations",
|
||||
"_object_futures"
|
||||
"_object_futures", "materials"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
self.missing_locals = set()
|
||||
self.localid_lookup: Dict[int, Object] = {}
|
||||
self.coarse_locations: Dict[UUID, Vector3] = {}
|
||||
self.materials: MATERIAL_MAP_TYPE = {}
|
||||
self._object_futures: Dict[Tuple[int, int], List[asyncio.Future]] = {}
|
||||
self._orphans: Dict[int, List[int]] = collections.defaultdict(list)
|
||||
|
||||
@@ -673,6 +834,7 @@ class RegionObjectsState:
|
||||
self.coarse_locations.clear()
|
||||
self.missing_locals.clear()
|
||||
self.localid_lookup.clear()
|
||||
self.materials.clear()
|
||||
|
||||
def lookup_localid(self, localid: int) -> Optional[Object]:
|
||||
return self.localid_lookup.get(localid)
|
||||
@@ -835,9 +997,9 @@ class Avatar:
|
||||
self.FullID: UUID = full_id
|
||||
self.Object: Optional["Object"] = obj
|
||||
self.RegionHandle: int = region_handle
|
||||
# TODO: Allow hooking into getZOffsets FS bridge response
|
||||
# to fill in the Z axis if it's infinite
|
||||
self.CoarseLocation = coarse_location
|
||||
self.Appearance: Dict[int, float] = {}
|
||||
self.COFVersion: int = -1
|
||||
self.Valid = True
|
||||
self.GuessedZ: Optional[float] = None
|
||||
self._resolved_name = resolved_name
|
||||
|
||||
251
hippolyzer/lib/client/parcel_manager.py
Normal file
251
hippolyzer/lib/client/parcel_manager.py
Normal file
@@ -0,0 +1,251 @@
|
||||
import asyncio
|
||||
import dataclasses
|
||||
import logging
|
||||
from typing import *
|
||||
|
||||
import numpy as np
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, Vector3, Vector2
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.templates import ParcelGridFlags, ParcelFlags
|
||||
from hippolyzer.lib.client.state import BaseClientRegion
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Parcel:
|
||||
local_id: int
|
||||
name: str
|
||||
flags: ParcelFlags
|
||||
group_id: UUID
|
||||
# TODO: More properties
|
||||
|
||||
|
||||
class ParcelManager:
|
||||
# We expect to receive this number of ParcelOverlay messages
|
||||
NUM_CHUNKS = 4
|
||||
# No, we don't support varregion or whatever.
|
||||
REGION_SIZE = 256
|
||||
# Basically, the minimum parcel size is 4 on either axis so each "point" in the
|
||||
# ParcelOverlay represents an area this size
|
||||
GRID_STEP = 4
|
||||
GRIDS_PER_EDGE = REGION_SIZE // GRID_STEP
|
||||
|
||||
def __init__(self, region: BaseClientRegion):
|
||||
# dimensions are south to north, west to east
|
||||
self.overlay = np.zeros((self.GRIDS_PER_EDGE, self.GRIDS_PER_EDGE), dtype=np.uint8)
|
||||
# 1-indexed parcel list index
|
||||
self.parcel_indices = np.zeros((self.GRIDS_PER_EDGE, self.GRIDS_PER_EDGE), dtype=np.uint16)
|
||||
self.parcels: List[Optional[Parcel]] = []
|
||||
self.overlay_chunks: List[Optional[bytes]] = [None] * self.NUM_CHUNKS
|
||||
self.overlay_complete = asyncio.Event()
|
||||
self.parcels_downloaded = asyncio.Event()
|
||||
self._parcels_dirty: bool = True
|
||||
self._region = region
|
||||
self._next_seq = 1
|
||||
self._region.message_handler.subscribe("ParcelOverlay", self._handle_parcel_overlay)
|
||||
|
||||
def _handle_parcel_overlay(self, message: Message):
|
||||
self.add_overlay_chunk(message["ParcelData"]["Data"], message["ParcelData"]["SequenceID"])
|
||||
|
||||
def add_overlay_chunk(self, chunk: bytes, chunk_num: int) -> bool:
|
||||
self.overlay_chunks[chunk_num] = chunk
|
||||
# Still have some pending chunks, don't try to parse this yet
|
||||
if not all(self.overlay_chunks):
|
||||
return False
|
||||
|
||||
new_overlay_data = b"".join(self.overlay_chunks)
|
||||
self.overlay_chunks = [None] * self.NUM_CHUNKS
|
||||
self._parcels_dirty = False
|
||||
if new_overlay_data != self.overlay.data[:]:
|
||||
# If the raw data doesn't match, then we have to parse again
|
||||
new_data = np.frombuffer(new_overlay_data, dtype=np.uint8).reshape(self.overlay.shape)
|
||||
np.copyto(self.overlay, new_data)
|
||||
self._parse_overlay()
|
||||
# We could optimize this by just marking specific squares dirty
|
||||
# if the parcel indices have changed between parses, but I don't care
|
||||
# to do that.
|
||||
self._parcels_dirty = True
|
||||
self.parcels_downloaded.clear()
|
||||
if not self.overlay_complete.is_set():
|
||||
self.overlay_complete.set()
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def _pos_to_grid_coords(cls, pos: Vector3) -> Tuple[int, int]:
|
||||
return round(pos.Y // cls.GRID_STEP), round(pos.X // cls.GRID_STEP)
|
||||
|
||||
def _parse_overlay(self):
|
||||
# Zero out all parcel indices
|
||||
self.parcel_indices[:, :] = 0
|
||||
next_parcel_idx = 1
|
||||
for y in range(0, self.GRIDS_PER_EDGE):
|
||||
for x in range(0, self.GRIDS_PER_EDGE):
|
||||
# We already have a parcel index for this grid, continue
|
||||
if self.parcel_indices[y, x]:
|
||||
continue
|
||||
|
||||
# Fill all adjacent grids with this parcel index
|
||||
self._flood_fill_parcel_index(y, x, next_parcel_idx)
|
||||
# SL doesn't allow disjoint grids to be part of the same parcel, so
|
||||
# whatever grid we find next without a parcel index must be a new parcel
|
||||
next_parcel_idx += 1
|
||||
|
||||
# Should have found at least one parcel
|
||||
assert next_parcel_idx >= 2
|
||||
|
||||
# Have a different number of parcels now, we can't use the existing parcel objects
|
||||
# because it's unlikely that just parcel boundaries have changed.
|
||||
if len(self.parcels) != next_parcel_idx - 1:
|
||||
# We don't know about any of these parcels yet, fill with none
|
||||
self.parcels = [None] * (next_parcel_idx - 1)
|
||||
|
||||
def _flood_fill_parcel_index(self, start_y, start_x, parcel_idx):
|
||||
"""Flood fill all neighboring grids with the parcel index, being mindful of parcel boundaries"""
|
||||
# We know the start grid is assigned to this parcel index
|
||||
self.parcel_indices[start_y, start_x] = parcel_idx
|
||||
# Queue of grids to test the neighbors of, start with the start grid.
|
||||
neighbor_test_queue: List[Tuple[int, int]] = [(start_y, start_x)]
|
||||
|
||||
while neighbor_test_queue:
|
||||
to_test = neighbor_test_queue.pop(0)
|
||||
test_grid = self.overlay[to_test]
|
||||
|
||||
for direction in ((-1, 0), (1, 0), (0, -1), (0, 1)):
|
||||
new_pos = to_test[0] + direction[0], to_test[1] + direction[1]
|
||||
|
||||
if any(x < 0 or x >= self.GRIDS_PER_EDGE for x in new_pos):
|
||||
# Outside bounds
|
||||
continue
|
||||
if self.parcel_indices[new_pos]:
|
||||
# Already set, skip
|
||||
continue
|
||||
|
||||
if direction[0] == -1 and test_grid & ParcelGridFlags.SOUTH_LINE:
|
||||
# Test grid is already on a south line, can't go south.
|
||||
continue
|
||||
if direction[1] == -1 and test_grid & ParcelGridFlags.WEST_LINE:
|
||||
# Test grid is already on a west line, can't go west.
|
||||
continue
|
||||
|
||||
grid = self.overlay[new_pos]
|
||||
|
||||
if direction[0] == 1 and grid & ParcelGridFlags.SOUTH_LINE:
|
||||
# Hit a south line going north, this is outside the current parcel
|
||||
continue
|
||||
if direction[1] == 1 and grid & ParcelGridFlags.WEST_LINE:
|
||||
# Hit a west line going east, this is outside the current parcel
|
||||
continue
|
||||
# This grid is within the current parcel, set the parcel index
|
||||
self.parcel_indices[new_pos] = parcel_idx
|
||||
# Append the grid to the neighbour testing queue
|
||||
neighbor_test_queue.append(new_pos)
|
||||
|
||||
async def request_dirty_parcels(self) -> Tuple[Parcel, ...]:
|
||||
if self._parcels_dirty:
|
||||
return await self.request_all_parcels()
|
||||
return tuple(self.parcels)
|
||||
|
||||
async def request_all_parcels(self) -> Tuple[Parcel, ...]:
|
||||
await self.overlay_complete.wait()
|
||||
# Because of how we build up the parcel index map, it's safe for us to
|
||||
# do this instead of keeping track of seen IDs in a set or similar
|
||||
last_seen_parcel_index = 0
|
||||
futs = []
|
||||
for y in range(0, self.GRIDS_PER_EDGE):
|
||||
for x in range(0, self.GRIDS_PER_EDGE):
|
||||
parcel_index = self.parcel_indices[y, x]
|
||||
assert parcel_index != 0
|
||||
if parcel_index <= last_seen_parcel_index:
|
||||
continue
|
||||
assert parcel_index == last_seen_parcel_index + 1
|
||||
last_seen_parcel_index = parcel_index
|
||||
# Request a position within the parcel
|
||||
futs.append(self.request_parcel_properties(
|
||||
Vector2(x * self.GRID_STEP + 1.0, y * self.GRID_STEP + 1.0)
|
||||
))
|
||||
|
||||
# Wait for all parcel properties to come in
|
||||
await asyncio.gather(*futs)
|
||||
self.parcels_downloaded.set()
|
||||
self._parcels_dirty = False
|
||||
return tuple(self.parcels)
|
||||
|
||||
async def request_parcel_properties(self, pos: Vector2) -> Parcel:
|
||||
await self.overlay_complete.wait()
|
||||
seq_id = self._next_seq
|
||||
# Register a wait on a ParcelProperties matching this seq
|
||||
parcel_props_fut = self._region.message_handler.wait_for(
|
||||
("ParcelProperties",),
|
||||
predicate=lambda msg: msg["ParcelData"]["SequenceID"] == seq_id,
|
||||
timeout=10.0,
|
||||
)
|
||||
# We don't care about when we receive an ack, we only care about when we receive the parcel props
|
||||
_ = self._region.circuit.send_reliable(Message(
|
||||
"ParcelPropertiesRequest",
|
||||
Block("AgentData", AgentID=self._region.session().agent_id, SessionID=self._region.session().id),
|
||||
Block(
|
||||
"ParcelData",
|
||||
SequenceID=seq_id,
|
||||
West=pos.X,
|
||||
East=pos.X,
|
||||
North=pos.Y,
|
||||
South=pos.Y,
|
||||
# What does this even mean?
|
||||
SnapSelection=0,
|
||||
),
|
||||
))
|
||||
self._next_seq += 1
|
||||
|
||||
return self._process_parcel_properties(await parcel_props_fut, pos)
|
||||
|
||||
def _process_parcel_properties(self, parcel_props: Message, pos: Optional[Vector2] = None) -> Parcel:
|
||||
data_block = parcel_props["ParcelData"][0]
|
||||
grid_coord = None
|
||||
# Parcel indices are one-indexed, convert to zero-indexed.
|
||||
if pos is not None:
|
||||
# We have a pos, figure out where in the grid we should look for the parcel index
|
||||
grid_coord = self._pos_to_grid_coords(pos)
|
||||
else:
|
||||
# Need to look at the parcel bitmap to figure out a valid grid coord.
|
||||
# This is a boolean array where each bit says whether the parcel occupies that grid.
|
||||
parcel_bitmap = data_block.deserialize_var("Bitmap")
|
||||
|
||||
for y in range(self.GRIDS_PER_EDGE):
|
||||
for x in range(self.GRIDS_PER_EDGE):
|
||||
if parcel_bitmap[y, x]:
|
||||
# This is the first grid the parcel occupies per the bitmap
|
||||
grid_coord = y, x
|
||||
break
|
||||
if grid_coord:
|
||||
break
|
||||
|
||||
parcel = Parcel(
|
||||
local_id=data_block["LocalID"],
|
||||
name=data_block["Name"],
|
||||
flags=ParcelFlags(data_block["ParcelFlags"]),
|
||||
group_id=data_block["GroupID"],
|
||||
# Parcel UUID isn't in this response :/
|
||||
)
|
||||
|
||||
# I guess the bitmap _could_ be empty, but probably not.
|
||||
if grid_coord is not None:
|
||||
parcel_idx = self.parcel_indices[grid_coord] - 1
|
||||
if len(self.parcels) > parcel_idx >= 0:
|
||||
# Okay, parcels list is sane, place the parcel in there.
|
||||
self.parcels[parcel_idx] = parcel
|
||||
else:
|
||||
LOG.warning(f"Received ParcelProperties with incomplete overlay for {grid_coord!r}")
|
||||
|
||||
return parcel
|
||||
|
||||
async def get_parcel_at(self, pos: Vector2, request_if_missing: bool = True) -> Optional[Parcel]:
|
||||
grid_coord = self._pos_to_grid_coords(pos)
|
||||
parcel = None
|
||||
if parcel_idx := self.parcel_indices[grid_coord]:
|
||||
parcel = self.parcels[parcel_idx - 1]
|
||||
if request_if_missing and parcel is None:
|
||||
return await self.request_parcel_properties(pos)
|
||||
return parcel
|
||||
@@ -17,6 +17,8 @@ from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.network.caps_client import CapsClient
|
||||
from hippolyzer.lib.base.network.transport import ADDR_TUPLE
|
||||
from hippolyzer.lib.base.objects import handle_to_global_pos
|
||||
from hippolyzer.lib.base.xfer_manager import XferManager
|
||||
from hippolyzer.lib.client.inventory_manager import InventoryManager
|
||||
|
||||
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
|
||||
|
||||
@@ -27,6 +29,7 @@ class BaseClientRegion(ConnectionHolder, abc.ABC):
|
||||
# Actually a weakref
|
||||
session: Callable[[], BaseClientSession]
|
||||
objects: ClientObjectManager
|
||||
xfer_manager: XferManager
|
||||
caps_client: CapsClient
|
||||
cap_urls: multidict.MultiDict[str]
|
||||
circuit_addr: ADDR_TUPLE
|
||||
@@ -82,11 +85,14 @@ class BaseClientSession(abc.ABC):
|
||||
id: UUID
|
||||
agent_id: UUID
|
||||
secure_session_id: UUID
|
||||
active_group: UUID
|
||||
groups: Set[UUID]
|
||||
message_handler: MessageHandler[Message, str]
|
||||
regions: MutableSequence[BaseClientRegion]
|
||||
region_by_handle: Callable[[int], Optional[BaseClientRegion]]
|
||||
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[BaseClientRegion]]
|
||||
objects: ClientWorldObjectManager
|
||||
inventory: InventoryManager
|
||||
login_data: Dict[str, Any]
|
||||
REGION_CLS = Type[BaseClientRegion]
|
||||
|
||||
@@ -100,6 +106,8 @@ class BaseClientSession(abc.ABC):
|
||||
self.circuit_code = circuit_code
|
||||
self.global_caps = {}
|
||||
self.session_manager = session_manager
|
||||
self.active_group: UUID = UUID.ZERO
|
||||
self.groups: Set[UUID] = set()
|
||||
self.regions = []
|
||||
self._main_region = None
|
||||
self.message_handler: MessageHandler[Message, str] = MessageHandler()
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.inventory import InventoryItem
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.client.asset_uploader import AssetUploader
|
||||
from hippolyzer.lib.client.inventory_manager import ais_item_to_inventory_data
|
||||
|
||||
|
||||
class ProxyAssetUploader(AssetUploader):
|
||||
@@ -22,7 +22,7 @@ class ProxyAssetUploader(AssetUploader):
|
||||
]
|
||||
}
|
||||
async with self._region.caps_client.post('FetchInventory2', llsd=ais_req_data) as resp:
|
||||
ais_item = (await resp.read_llsd())["items"][0]
|
||||
ais_item = InventoryItem.from_llsd((await resp.read_llsd())["items"][0], flavor="ais")
|
||||
|
||||
# Got it, ship it off to the viewer
|
||||
message = Message(
|
||||
@@ -33,7 +33,7 @@ class ProxyAssetUploader(AssetUploader):
|
||||
SimApproved=1,
|
||||
TransactionID=UUID.random(),
|
||||
),
|
||||
ais_item_to_inventory_data(ais_item),
|
||||
ais_item.to_inventory_data(),
|
||||
direction=Direction.IN
|
||||
)
|
||||
self._region.circuit.send(message)
|
||||
|
||||
@@ -16,6 +16,8 @@ import mitmproxy.http
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.caps import CapData, CapType
|
||||
@@ -32,6 +34,9 @@ def apply_security_monkeypatches():
|
||||
apply_security_monkeypatches()
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MITMProxyEventManager:
|
||||
"""
|
||||
Handles HTTP request and response events from the mitmproxy process
|
||||
@@ -58,7 +63,7 @@ class MITMProxyEventManager:
|
||||
try:
|
||||
await self.pump_proxy_event()
|
||||
except:
|
||||
logging.exception("Exploded when handling parsed packets")
|
||||
LOG.exception("Exploded when handling parsed packets")
|
||||
|
||||
async def pump_proxy_event(self):
|
||||
try:
|
||||
@@ -140,7 +145,7 @@ class MITMProxyEventManager:
|
||||
# Both the wrapper request and the actual asset server request went through
|
||||
# the proxy. Don't bother trying the redirect strategy anymore.
|
||||
self._asset_server_proxied = True
|
||||
logging.warning("noproxy not used, switching to URI rewrite strategy")
|
||||
LOG.warning("noproxy not used, switching to URI rewrite strategy")
|
||||
elif cap_data and cap_data.cap_name == "EventQueueGet":
|
||||
# HACK: The sim's EQ acking mechanism doesn't seem to actually work.
|
||||
# if the client drops the connection due to timeout before we can
|
||||
@@ -151,7 +156,7 @@ class MITMProxyEventManager:
|
||||
eq_manager = cap_data.region().eq_manager
|
||||
cached_resp = eq_manager.get_cached_poll_response(req_ack_id)
|
||||
if cached_resp:
|
||||
logging.warning("Had to serve a cached EventQueueGet due to client desync")
|
||||
LOG.warning("Had to serve a cached EventQueueGet due to client desync")
|
||||
flow.response = mitmproxy.http.Response.make(
|
||||
200,
|
||||
llsd.format_xml(cached_resp),
|
||||
@@ -215,7 +220,7 @@ class MITMProxyEventManager:
|
||||
try:
|
||||
message_logger.log_http_response(flow)
|
||||
except:
|
||||
logging.exception("Failed while logging HTTP flow")
|
||||
LOG.exception("Failed while logging HTTP flow")
|
||||
|
||||
# Don't process responses for requests or responses injected by the proxy.
|
||||
# We already processed it, it came from us!
|
||||
@@ -274,13 +279,13 @@ class MITMProxyEventManager:
|
||||
|
||||
if cap_data.cap_name == "Seed":
|
||||
parsed = llsd.parse_xml(flow.response.content)
|
||||
logging.debug("Got seed cap for %r : %r" % (cap_data, parsed))
|
||||
LOG.debug("Got seed cap for %r : %r" % (cap_data, parsed))
|
||||
region.update_caps(parsed)
|
||||
|
||||
# On LL's grid these URIs aren't unique across sessions or regions,
|
||||
# so we get request attribution by replacing them with a unique
|
||||
# alias URI.
|
||||
logging.debug("Replacing GetMesh caps with wrapped versions")
|
||||
LOG.debug("Replacing GetMesh caps with wrapped versions")
|
||||
wrappable_caps = {"GetMesh2", "GetMesh", "GetTexture", "ViewerAsset"}
|
||||
for cap_name in wrappable_caps:
|
||||
if cap_name in parsed:
|
||||
@@ -315,7 +320,7 @@ class MITMProxyEventManager:
|
||||
if "uploader" in parsed:
|
||||
region.register_cap(cap_data.cap_name + "Uploader", parsed["uploader"], CapType.TEMPORARY)
|
||||
except:
|
||||
logging.exception("OOPS, blew up in HTTP proxy!")
|
||||
LOG.exception("OOPS, blew up in HTTP proxy!")
|
||||
|
||||
def _handle_login_flow(self, flow: HippoHTTPFlow):
|
||||
resp = xmlrpc.client.loads(flow.response.content)[0][0] # type: ignore
|
||||
@@ -324,20 +329,35 @@ class MITMProxyEventManager:
|
||||
flow.cap_data = CapData("LoginRequest", session=weakref.ref(sess))
|
||||
|
||||
def _handle_eq_event(self, session: Session, region: ProxiedRegion, event: Dict[str, Any]):
|
||||
logging.debug("Event received on %r: %r" % (self, event))
|
||||
LOG.debug("Event received on %r: %r" % (self, event))
|
||||
message_logger = self.session_manager.message_logger
|
||||
if message_logger:
|
||||
message_logger.log_eq_event(session, region, event)
|
||||
|
||||
if self.llsd_message_serializer.can_handle(event["message"]):
|
||||
msg = self.llsd_message_serializer.deserialize(event)
|
||||
else:
|
||||
msg = Message.from_eq_event(event)
|
||||
msg.sender = region.circuit_addr
|
||||
msg.direction = Direction.IN
|
||||
|
||||
try:
|
||||
session.message_handler.handle(msg)
|
||||
except:
|
||||
LOG.exception("Failed while handling EQ message for session")
|
||||
|
||||
try:
|
||||
region.message_handler.handle(msg)
|
||||
except:
|
||||
LOG.exception("Failed while handling EQ message for region")
|
||||
|
||||
handle_event = AddonManager.handle_eq_event(session, region, event)
|
||||
if handle_event is True:
|
||||
# Addon handled the event and didn't want it sent to the viewer
|
||||
return True
|
||||
|
||||
msg = None
|
||||
# Handle events that inform us about new regions
|
||||
sim_addr, sim_handle, sim_seed = None, None, None
|
||||
if self.llsd_message_serializer.can_handle(event["message"]):
|
||||
msg = self.llsd_message_serializer.deserialize(event)
|
||||
# Sim is asking us to talk to a neighbour
|
||||
if event["message"] == "EstablishAgentCommunication":
|
||||
ip_split = event["body"]["sim-ip-and-port"].split(":")
|
||||
@@ -345,7 +365,7 @@ class MITMProxyEventManager:
|
||||
sim_seed = event["body"]["seed-capability"]
|
||||
# We teleported or cross region, opening comms to new sim
|
||||
elif msg and msg.name in ("TeleportFinish", "CrossedRegion"):
|
||||
sim_block = msg.get_block("RegionData", msg.get_block("Info"))[0]
|
||||
sim_block = msg.get_blocks("RegionData", msg.get_blocks("Info"))[0]
|
||||
sim_addr = (sim_block["SimIP"], sim_block["SimPort"])
|
||||
sim_handle = sim_block["RegionHandle"]
|
||||
sim_seed = sim_block["SeedCapability"]
|
||||
|
||||
@@ -8,6 +8,7 @@ import queue
|
||||
import typing
|
||||
import uuid
|
||||
import weakref
|
||||
from typing import Iterable
|
||||
|
||||
import mitmproxy.certs
|
||||
import mitmproxy.ctx
|
||||
@@ -15,20 +16,30 @@ import mitmproxy.log
|
||||
import mitmproxy.master
|
||||
import mitmproxy.options
|
||||
import mitmproxy.proxy
|
||||
from cryptography import x509
|
||||
from cryptography.x509 import GeneralNames
|
||||
from mitmproxy.addons import core, clientplayback, proxyserver, next_layer, disable_h2c
|
||||
from mitmproxy.certs import CertStoreEntry
|
||||
from mitmproxy.http import HTTPFlow
|
||||
from mitmproxy.proxy.layers import tls
|
||||
import OpenSSL
|
||||
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename, create_logged_task
|
||||
from hippolyzer.lib.base.multiprocessing_utils import ParentProcessWatcher
|
||||
from hippolyzer.lib.proxy.caps import SerializedCapData
|
||||
|
||||
|
||||
class SLCertStore(mitmproxy.certs.CertStore):
|
||||
def get_cert(self, commonname: typing.Optional[str], sans: typing.List[str], *args, **kwargs):
|
||||
def get_cert(
|
||||
self,
|
||||
commonname: str | None,
|
||||
sans: Iterable[x509.GeneralName],
|
||||
organization: str | None = None,
|
||||
*args,
|
||||
**kwargs
|
||||
) -> CertStoreEntry:
|
||||
entry = super().get_cert(commonname, sans, *args, **kwargs)
|
||||
cert, privkey, chain = entry.cert, entry.privatekey, entry.chain_file
|
||||
cert, privkey, chain, chain_certs = entry.cert, entry.privatekey, entry.chain_file, entry.chain_certs
|
||||
x509 = cert.to_pyopenssl()
|
||||
# The cert must have a subject key ID or the viewer will reject it.
|
||||
for i in range(0, x509.get_extension_count()):
|
||||
@@ -48,10 +59,10 @@ class SLCertStore(mitmproxy.certs.CertStore):
|
||||
])
|
||||
x509.sign(OpenSSL.crypto.PKey.from_cryptography_key(privkey), "sha256") # type: ignore
|
||||
new_entry = mitmproxy.certs.CertStoreEntry(
|
||||
mitmproxy.certs.Cert.from_pyopenssl(x509), privkey, chain
|
||||
mitmproxy.certs.Cert.from_pyopenssl(x509), privkey, chain, chain_certs,
|
||||
)
|
||||
# Replace the cert that was created in the base `get_cert()` with our modified cert
|
||||
self.certs[(commonname, tuple(sans))] = new_entry
|
||||
self.certs[(commonname, GeneralNames(sans))] = new_entry
|
||||
self.expire_queue.pop(-1)
|
||||
self.expire(new_entry)
|
||||
return new_entry
|
||||
@@ -105,21 +116,9 @@ class IPCInterceptionAddon:
|
||||
self.to_proxy_queue: multiprocessing.Queue = flow_context.to_proxy_queue
|
||||
self.shutdown_signal: multiprocessing.Event = flow_context.shutdown_signal
|
||||
|
||||
def add_log(self, entry: mitmproxy.log.LogEntry):
|
||||
if entry.level == "debug":
|
||||
logging.debug(entry.msg)
|
||||
elif entry.level in ("alert", "info"):
|
||||
# TODO: All mitmproxy infos are basically debugs, should
|
||||
# probably give these dedicated loggers
|
||||
logging.debug(entry.msg)
|
||||
elif entry.level == "warn":
|
||||
logging.warning(entry.msg)
|
||||
elif entry.level == "error":
|
||||
logging.error(entry.msg)
|
||||
|
||||
def running(self):
|
||||
# register to pump the events or something here
|
||||
asyncio.create_task(self._pump_callbacks())
|
||||
create_logged_task(self._pump_callbacks(), "Pump HTTP proxy callbacks")
|
||||
# Tell the main process mitmproxy is ready to handle requests
|
||||
self.mitmproxy_ready.set()
|
||||
|
||||
|
||||
@@ -1,17 +1,67 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import functools
|
||||
import logging
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.helpers import get_mtime
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.helpers import get_mtime, create_logged_task
|
||||
from hippolyzer.lib.client.inventory_manager import InventoryManager
|
||||
from hippolyzer.lib.client.state import BaseClientSession
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.viewer_settings import iter_viewer_cache_dirs
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.inventory import InventoryCategory, InventoryNodeBase
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.inventory import InventoryItem
|
||||
from hippolyzer.lib.base.templates import AssetType, InventoryType, WearableType, Permissions
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProxyInventoryManager(InventoryManager):
|
||||
def __init__(self, session: BaseClientSession):
|
||||
_session: "Session"
|
||||
|
||||
def __init__(self, session: "Session"):
|
||||
# These handlers all need their processing deferred until the cache has been loaded.
|
||||
# Since cache is loaded asynchronously, the viewer may get ahead of us due to parsing
|
||||
# the cache faster and start requesting inventory details we can't do anything with yet.
|
||||
self._handle_update_create_inventory_item = self._wrap_with_cache_defer(
|
||||
self._handle_update_create_inventory_item
|
||||
)
|
||||
self._handle_remove_inventory_item = self._wrap_with_cache_defer(
|
||||
self._handle_remove_inventory_item
|
||||
)
|
||||
self._handle_remove_inventory_folder = self._wrap_with_cache_defer(
|
||||
self._handle_remove_inventory_folder
|
||||
)
|
||||
self._handle_bulk_update_inventory = self._wrap_with_cache_defer(
|
||||
self._handle_bulk_update_inventory
|
||||
)
|
||||
self._handle_move_inventory_item = self._wrap_with_cache_defer(
|
||||
self._handle_move_inventory_item
|
||||
)
|
||||
self._handle_move_inventory_folder = self._wrap_with_cache_defer(
|
||||
self._handle_move_inventory_folder
|
||||
)
|
||||
self.process_aisv3_response = self._wrap_with_cache_defer(
|
||||
self.process_aisv3_response
|
||||
)
|
||||
|
||||
# Base constructor after, because it registers handlers to specific methods, which need to
|
||||
# be wrapped before we call they're registered. Handlers are registered by method reference,
|
||||
# not by name!
|
||||
super().__init__(session)
|
||||
session.http_message_handler.subscribe("InventoryAPIv3", self._handle_aisv3_flow)
|
||||
newest_cache = None
|
||||
newest_timestamp = dt.datetime(year=1970, month=1, day=1, tzinfo=dt.timezone.utc)
|
||||
# So consumers know when the inventory should be complete
|
||||
self.cache_loaded: asyncio.Event = asyncio.Event()
|
||||
self._cache_deferred_calls: List[Tuple[Callable[..., None], Tuple]] = []
|
||||
# Look for the newest version of the cached inventory and use that.
|
||||
# Not foolproof, but close enough if we're not sure what viewer is being used.
|
||||
for cache_dir in iter_viewer_cache_dirs():
|
||||
@@ -26,7 +76,119 @@ class ProxyInventoryManager(InventoryManager):
|
||||
newest_cache = inv_cache_path
|
||||
|
||||
if newest_cache:
|
||||
cache_load_fut = asyncio.ensure_future(asyncio.to_thread(self.load_cache, newest_cache))
|
||||
# Meh. Don't care if it fails.
|
||||
cache_load_fut.add_done_callback(lambda *args: self.cache_loaded.set())
|
||||
create_logged_task(self._apply_deferred_after_loaded(), "Apply deferred inventory", LOG)
|
||||
else:
|
||||
self.cache_loaded.set()
|
||||
|
||||
async def _apply_deferred_after_loaded(self):
|
||||
await self.cache_loaded.wait()
|
||||
LOG.info("Applying deferred inventory calls")
|
||||
deferred_calls = self._cache_deferred_calls[:]
|
||||
self._cache_deferred_calls.clear()
|
||||
for func, args in deferred_calls:
|
||||
try:
|
||||
self.load_cache(newest_cache)
|
||||
func(*args)
|
||||
except:
|
||||
logging.exception("Failed to load invcache")
|
||||
LOG.exception("Failed to apply deferred inventory call")
|
||||
|
||||
def _wrap_with_cache_defer(self, func: Callable[..., None]):
|
||||
@functools.wraps(func)
|
||||
def wrapped(*inner_args):
|
||||
if not self.cache_loaded.is_set():
|
||||
self._cache_deferred_calls.append((func, inner_args))
|
||||
else:
|
||||
func(*inner_args)
|
||||
return wrapped
|
||||
|
||||
def _handle_aisv3_flow(self, flow: HippoHTTPFlow):
|
||||
if flow.response.status_code < 200 or flow.response.status_code > 300:
|
||||
# Probably not a success
|
||||
return
|
||||
content_type = flow.response.headers.get("Content-Type", "")
|
||||
if "llsd" not in content_type:
|
||||
# Okay, probably still some kind of error...
|
||||
return
|
||||
|
||||
# Try and add anything from the response into the model
|
||||
self.process_aisv3_response(llsd.parse(flow.response.content))
|
||||
|
||||
async def create_folder(
|
||||
self,
|
||||
parent: InventoryCategory | UUID,
|
||||
name: str,
|
||||
pref_type: int = AssetType.NONE,
|
||||
cat_id: UUID | None = None
|
||||
) -> InventoryCategory:
|
||||
cat = await super().create_folder(parent, name, pref_type, cat_id)
|
||||
await self._session.main_region.circuit.send_reliable(self._craft_update_message(cat))
|
||||
return cat
|
||||
|
||||
async def create_item(
|
||||
self,
|
||||
parent: UUID | InventoryCategory,
|
||||
name: str,
|
||||
type: AssetType,
|
||||
inv_type: InventoryType,
|
||||
wearable_type: WearableType,
|
||||
transaction_id: UUID,
|
||||
next_mask: int | Permissions = 0x0008e000,
|
||||
description: str = '',
|
||||
) -> InventoryItem:
|
||||
item = await super().create_item(
|
||||
parent=parent,
|
||||
name=name,
|
||||
type=type,
|
||||
inv_type=inv_type,
|
||||
wearable_type=wearable_type,
|
||||
transaction_id=transaction_id,
|
||||
next_mask=next_mask,
|
||||
description=description,
|
||||
)
|
||||
await self._session.main_region.circuit.send_reliable(self._craft_update_message(item))
|
||||
return item
|
||||
|
||||
async def update(self, node: InventoryNodeBase, data: dict) -> None:
|
||||
await super().update(node, data)
|
||||
await self._session.main_region.circuit.send_reliable(self._craft_update_message(node))
|
||||
|
||||
async def move(self, node: InventoryNodeBase, new_parent: UUID | InventoryCategory) -> None:
|
||||
await super().move(node, new_parent)
|
||||
await self._session.main_region.circuit.send_reliable(self._craft_update_message(node))
|
||||
|
||||
async def copy(self, node: InventoryNodeBase, destination: UUID | InventoryCategory, contents: bool = True)\
|
||||
-> InventoryCategory | InventoryItem:
|
||||
ret_node = await super().copy(node, destination, contents)
|
||||
await self._session.main_region.circuit.send_reliable(self._craft_update_message(node))
|
||||
return ret_node
|
||||
|
||||
def _craft_removal_message(self, node: InventoryNodeBase) -> Message:
|
||||
is_folder = True
|
||||
if isinstance(node, InventoryItem):
|
||||
is_folder = False
|
||||
|
||||
msg = Message(
|
||||
"RemoveInventoryFolder" if is_folder else "RemoveInventoryItem",
|
||||
Block("AgentData", AgentID=self._session.agent_id, SessionID=self._session.id),
|
||||
direction=Direction.IN,
|
||||
)
|
||||
if is_folder:
|
||||
msg.add_block(Block("FolderData", FolderID=node.node_id))
|
||||
else:
|
||||
msg.add_block(Block("InventoryData", ItemID=node.node_id))
|
||||
return msg
|
||||
|
||||
def _craft_update_message(self, node: InventoryNodeBase):
|
||||
msg = Message(
|
||||
"BulkUpdateInventory",
|
||||
Block("AgentData", AgentID=self._session.agent_id, TransactionID=UUID.random()),
|
||||
direction=Direction.IN,
|
||||
)
|
||||
|
||||
if isinstance(node, InventoryItem):
|
||||
msg.add_block(node.to_inventory_data("ItemData"))
|
||||
elif isinstance(node, InventoryCategory):
|
||||
msg.add_block(node.to_folder_data())
|
||||
return msg
|
||||
|
||||
@@ -161,6 +161,8 @@ class InterceptingLLUDPProxyProtocol(UDPProxyProtocol):
|
||||
region.mark_dead()
|
||||
elif message.name == "RegionHandshake":
|
||||
region.name = str(message["RegionInfo"][0]["SimName"])
|
||||
elif message.name == "AgentDataUpdate" and self.session:
|
||||
self.session.active_group = message["AgentData"]["ActiveGroupID"]
|
||||
|
||||
# Send the message if it wasn't explicitly dropped or sent before
|
||||
if not message.finalized:
|
||||
|
||||
@@ -7,6 +7,7 @@ import copy
|
||||
import fnmatch
|
||||
import gzip
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import pickle
|
||||
import re
|
||||
@@ -16,10 +17,14 @@ import weakref
|
||||
from defusedxml import minidom
|
||||
|
||||
from hippolyzer.lib.base import serialization as se, llsd
|
||||
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.datatypes import TaggedUnion, UUID, TupleCoord
|
||||
from hippolyzer.lib.base.helpers import bytes_escape
|
||||
from hippolyzer.lib.base.message.message_formatting import HumanMessageSerializer
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketFlags
|
||||
from hippolyzer.lib.base.message.template_dict import DEFAULT_TEMPLATE_DICT
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.message_filter import MetaFieldSpecifier, compile_filter, BaseFilterNode, MessageFilterNode, \
|
||||
EnumFieldSpecifier, MatchResult
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
@@ -503,6 +508,8 @@ class HTTPMessageLogEntry(AbstractMessageLogEntry):
|
||||
raise
|
||||
elif any(content_type.startswith(x) for x in ("application/xml", "text/xml")):
|
||||
beautified = self._format_xml(message.content)
|
||||
elif "json" in content_type:
|
||||
beautified = json.dumps(json.loads(message.content), indent=2)
|
||||
except:
|
||||
LOG.exception("Failed to beautify message")
|
||||
|
||||
@@ -614,6 +621,19 @@ class EQMessageLogEntry(AbstractMessageLogEntry):
|
||||
return "EQ"
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
# TODO: This is a bit of a hack! Templated messages can be sent over the EQ, so let's
|
||||
# display them as template messages if that's what they are.
|
||||
if self.event['message'] in DEFAULT_TEMPLATE_DICT.message_templates:
|
||||
msg = LLSDMessageSerializer().deserialize(self.event)
|
||||
msg.synthetic = True
|
||||
msg.send_flags = PacketFlags.EQ
|
||||
msg.direction = Direction.IN
|
||||
# Annoyingly, templated messages sent over the EQ can have extra fields not specified
|
||||
# in the template, and this is often the case. ParcelProperties has fields that aren't
|
||||
# in the template. Luckily, we don't really care about extra fields, we just may not
|
||||
# be able to automatically decode U32 and friends without the hint from the template
|
||||
# that that is what they are.
|
||||
return HumanMessageSerializer.to_human_string(msg, replacements, beautify)
|
||||
return f'EQ {self.event["message"]}\n\n{self._format_llsd(self.event["body"])}'
|
||||
|
||||
@property
|
||||
|
||||
@@ -48,6 +48,7 @@ class ProxyObjectManager(ClientObjectManager):
|
||||
"RequestMultipleObjects",
|
||||
self._handle_request_multiple_objects,
|
||||
)
|
||||
region.http_message_handler.subscribe("RenderMaterials", self._handle_render_materials)
|
||||
|
||||
def load_cache(self):
|
||||
if not self.may_use_vo_cache or self.cache_loaded:
|
||||
@@ -100,6 +101,13 @@ class ProxyObjectManager(ClientObjectManager):
|
||||
# Remove any queued cache misses that the viewer just requested for itself
|
||||
self.queued_cache_misses -= {b["ID"] for b in msg["ObjectData"]}
|
||||
|
||||
def _handle_render_materials(self, flow: HippoHTTPFlow):
|
||||
if flow.response.status_code != 200:
|
||||
return
|
||||
if flow.request.method not in ("GET", "POST"):
|
||||
return
|
||||
self._process_materials_response(flow.response.content)
|
||||
|
||||
|
||||
class ProxyWorldObjectManager(ClientWorldObjectManager):
|
||||
_session: Session
|
||||
|
||||
18
hippolyzer/lib/proxy/parcel_manager.py
Normal file
18
hippolyzer/lib/proxy/parcel_manager.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.client.parcel_manager import ParcelManager
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
|
||||
|
||||
class ProxyParcelManager(ParcelManager):
|
||||
def __init__(self, region: "ProxiedRegion"):
|
||||
super().__init__(proxify(region))
|
||||
# Handle ParcelProperties messages that we didn't specifically ask for
|
||||
self._region.message_handler.subscribe("ParcelProperties", self._handle_parcel_properties)
|
||||
|
||||
def _handle_parcel_properties(self, msg: Message):
|
||||
self._process_parcel_properties(msg)
|
||||
return None
|
||||
@@ -21,6 +21,7 @@ from hippolyzer.lib.proxy.object_manager import ProxyObjectManager
|
||||
from hippolyzer.lib.base.transfer_manager import TransferManager
|
||||
from hippolyzer.lib.base.xfer_manager import XferManager
|
||||
from hippolyzer.lib.proxy.asset_uploader import ProxyAssetUploader
|
||||
from hippolyzer.lib.proxy.parcel_manager import ProxyParcelManager
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
@@ -67,6 +68,7 @@ class ProxiedRegion(BaseClientRegion):
|
||||
self.xfer_manager = XferManager(proxify(self), self.session().secure_session_id)
|
||||
self.transfer_manager = TransferManager(proxify(self), session.agent_id, session.id)
|
||||
self.asset_uploader = ProxyAssetUploader(proxify(self))
|
||||
self.parcel_manager = ProxyParcelManager(proxify(self))
|
||||
self._recalc_caps()
|
||||
|
||||
@property
|
||||
|
||||
@@ -34,6 +34,7 @@ if TYPE_CHECKING:
|
||||
|
||||
class Session(BaseClientSession):
|
||||
regions: MutableSequence[ProxiedRegion]
|
||||
inventory: ProxyInventoryManager
|
||||
region_by_handle: Callable[[int], Optional[ProxiedRegion]]
|
||||
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[ProxiedRegion]]
|
||||
main_region: Optional[ProxiedRegion]
|
||||
|
||||
@@ -16,6 +16,7 @@ from hippolyzer.lib.base.events import Event
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.objects import handle_to_gridxy
|
||||
from .connection import VivoxConnection, VivoxMessage
|
||||
from ..base.helpers import create_logged_task
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
RESP_LOG = logging.getLogger(__name__ + ".responses")
|
||||
@@ -79,7 +80,7 @@ class VoiceClient:
|
||||
self._pos = Vector3(0, 0, 0)
|
||||
|
||||
self.vivox_conn: Optional[VivoxConnection] = None
|
||||
self._poll_task = asyncio.create_task(self._poll_messages())
|
||||
self._poll_task = create_logged_task(self._poll_messages(), "Poll Vivox messages")
|
||||
self.event_handler: MessageHandler[VivoxMessage, str] = MessageHandler(take_by_default=False)
|
||||
|
||||
self.event_handler.subscribe(
|
||||
@@ -352,7 +353,7 @@ class VoiceClient:
|
||||
|
||||
RESP_LOG.debug("%s %s %s %r" % ("Request", request_id, msg_type, data))
|
||||
|
||||
asyncio.create_task(self.vivox_conn.send_request(request_id, msg_type, data))
|
||||
create_logged_task(self.vivox_conn.send_request(request_id, msg_type, data), "Send Vivox message")
|
||||
future = asyncio.Future()
|
||||
self._pending_req_futures[request_id] = future
|
||||
return future
|
||||
|
||||
76
pyproject.toml
Normal file
76
pyproject.toml
Normal file
@@ -0,0 +1,76 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=64", "setuptools-scm>=8"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "hippolyzer"
|
||||
dynamic = ["version"]
|
||||
description = "Analysis tools for SL-compatible virtual worlds"
|
||||
readme = "README.md"
|
||||
license = "LGPL-3.0-only"
|
||||
requires-python = ">=3.12"
|
||||
authors = [
|
||||
{ name = "Salad Dais", email = "83434023+SaladDais@users.noreply.github.com" },
|
||||
]
|
||||
classifiers = [
|
||||
"Operating System :: MacOS",
|
||||
"Operating System :: Microsoft :: Windows",
|
||||
"Operating System :: POSIX",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Software Development :: Testing",
|
||||
"Topic :: System :: Networking :: Monitoring",
|
||||
]
|
||||
dependencies = [
|
||||
"aiohttp<4.0.0",
|
||||
"defusedxml",
|
||||
"gltflib",
|
||||
"Glymur<0.9.7",
|
||||
"idna<3,>=2.5",
|
||||
"lazy-object-proxy",
|
||||
"llsd<1.1.0",
|
||||
"numpy<2.0",
|
||||
"pycollada",
|
||||
"recordclass>=0.23.1,<0.24",
|
||||
"transformations",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
proxy = [
|
||||
"arpeggio",
|
||||
"mitmproxy>=11.0.0,<12",
|
||||
"outleap<1.0",
|
||||
"ptpython<4.0",
|
||||
"Werkzeug<4.0",
|
||||
]
|
||||
gui = [
|
||||
"hippolyzer[proxy]",
|
||||
"pyside6-essentials",
|
||||
"qasync",
|
||||
]
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["."]
|
||||
include = ["hippolyzer*"]
|
||||
namespaces = false
|
||||
|
||||
[project.scripts]
|
||||
hippolyzer-cli = "hippolyzer.apps.proxy:main"
|
||||
hippolyzer-gui = "hippolyzer.apps.proxy_gui:gui_main"
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://github.com/SaladDais/Hippolyzer/"
|
||||
|
||||
[tool.black]
|
||||
line-length = 160
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
minversion = "6.0"
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
||||
[tool.setuptools_scm]
|
||||
131
requirements.txt
131
requirements.txt
@@ -1,70 +1,81 @@
|
||||
aiohttp==3.8.3
|
||||
aiosignal==1.2.0
|
||||
aiohappyeyeballs==2.6.1
|
||||
aiohttp==3.11.18
|
||||
aioquic==1.2.0
|
||||
aiosignal==1.3.2
|
||||
appdirs==1.4.4
|
||||
Arpeggio==1.10.2
|
||||
asgiref==3.4.1
|
||||
async-timeout==4.0.1
|
||||
attrs==21.2.0
|
||||
blinker==1.4
|
||||
Brotli==1.0.9
|
||||
certifi==2022.12.7
|
||||
cffi==1.15.0
|
||||
charset-normalizer==2.0.9
|
||||
click==8.0.3
|
||||
cryptography==36.0.2
|
||||
argon2-cffi==23.1.0
|
||||
argon2-cffi-bindings==21.2.0
|
||||
Arpeggio==2.0.2
|
||||
asgiref==3.8.1
|
||||
attrs==25.3.0
|
||||
blinker==1.9.0
|
||||
Brotli==1.1.0
|
||||
certifi==2025.4.26
|
||||
cffi==1.17.1
|
||||
click==8.2.0
|
||||
cryptography==44.0.3
|
||||
dataclasses-json==0.6.7
|
||||
defusedxml==0.7.1
|
||||
Flask==2.0.2
|
||||
frozenlist==1.3.3
|
||||
Flask==3.1.0
|
||||
frozenlist==1.6.0
|
||||
gltflib==1.0.13
|
||||
Glymur==0.9.6
|
||||
h11==0.12.0
|
||||
h11==0.14.0
|
||||
h2==4.1.0
|
||||
hpack==4.0.0
|
||||
hyperframe==6.0.1
|
||||
hpack==4.1.0
|
||||
hyperframe==6.1.0
|
||||
idna==2.10
|
||||
itsdangerous==2.0.1
|
||||
jedi==0.18.1
|
||||
Jinja2==3.0.3
|
||||
kaitaistruct==0.9
|
||||
lazy-object-proxy==1.6.0
|
||||
itsdangerous==2.2.0
|
||||
jedi==0.19.2
|
||||
Jinja2==3.1.6
|
||||
kaitaistruct==0.10
|
||||
lazy-object-proxy==1.11.0
|
||||
ldap3==2.9.1
|
||||
llsd~=1.0.0
|
||||
lxml==4.9.2
|
||||
MarkupSafe==2.0.1
|
||||
mitmproxy==8.0.0
|
||||
msgpack==1.0.3
|
||||
multidict==5.2.0
|
||||
numpy==1.24.2
|
||||
outleap~=0.4.1
|
||||
parso==0.8.3
|
||||
llsd==1.0.0
|
||||
lxml==5.4.0
|
||||
MarkupSafe==3.0.2
|
||||
marshmallow==3.26.1
|
||||
mitmproxy==11.1.3
|
||||
mitmproxy_linux==0.11.5
|
||||
mitmproxy_rs==0.11.5
|
||||
msgpack==1.1.0
|
||||
multidict==6.4.4
|
||||
mypy_extensions==1.1.0
|
||||
numpy==1.26.4
|
||||
outleap==0.7.1
|
||||
packaging==25.0
|
||||
parso==0.8.4
|
||||
passlib==1.7.4
|
||||
prompt-toolkit==3.0.23
|
||||
protobuf==3.18.1
|
||||
ptpython==3.0.20
|
||||
prompt_toolkit==3.0.51
|
||||
propcache==0.3.1
|
||||
ptpython==3.0.30
|
||||
publicsuffix2==2.20191221
|
||||
pyasn1==0.4.8
|
||||
pycparser==2.21
|
||||
pycollada==0.7.2
|
||||
Pygments==2.10.0
|
||||
pyOpenSSL==22.0.0
|
||||
pyparsing==2.4.7
|
||||
pyperclip==1.8.2
|
||||
PySide6-Essentials==6.4.2
|
||||
qasync==0.22.0
|
||||
recordclass==0.18.2
|
||||
requests==2.26.0
|
||||
ruamel.yaml==0.17.21
|
||||
ruamel.yaml.clib==0.2.7
|
||||
shiboken6==6.4.2
|
||||
six==1.16.0
|
||||
pyasn1==0.6.1
|
||||
pyasn1_modules==0.4.2
|
||||
pycollada==0.9
|
||||
pycparser==2.22
|
||||
Pygments==2.19.1
|
||||
pylsqpack==0.3.22
|
||||
pyOpenSSL==25.0.0
|
||||
pyparsing==3.2.1
|
||||
pyperclip==1.9.0
|
||||
PySide6_Essentials==6.9.0
|
||||
python-dateutil==2.9.0.post0
|
||||
qasync==0.27.1
|
||||
recordclass==0.23.1
|
||||
ruamel.yaml==0.18.10
|
||||
service-identity==24.2.0
|
||||
setuptools==80.7.1
|
||||
shiboken6==6.9.0
|
||||
six==1.17.0
|
||||
sortedcontainers==2.4.0
|
||||
tornado==6.1
|
||||
transformations==2021.6.6
|
||||
typing-extensions==4.0.1
|
||||
urllib3==1.26.7
|
||||
urwid==2.1.2
|
||||
wcwidth==0.2.5
|
||||
Werkzeug==2.0.2
|
||||
wsproto==1.0.0
|
||||
yarl==1.8.2
|
||||
zstandard<0.18.0
|
||||
tornado==6.4.2
|
||||
transformations==2025.1.1
|
||||
typing-inspect==0.9.0
|
||||
typing_extensions==4.13.2
|
||||
urwid==2.6.16
|
||||
wcwidth==0.2.13
|
||||
Werkzeug==3.1.3
|
||||
wsproto==1.2.0
|
||||
yarl==1.20.0
|
||||
zstandard==0.23.0
|
||||
|
||||
@@ -10,3 +10,10 @@ universal = 1
|
||||
max-line-length = 160
|
||||
exclude = build/*, .eggs/*
|
||||
ignore = F405, F403, E501, F841, E722, W503, E741, E731
|
||||
|
||||
[options.extras_require]
|
||||
test =
|
||||
pytest
|
||||
aioresponses
|
||||
pytest-cov
|
||||
flake8
|
||||
|
||||
119
setup.py
119
setup.py
@@ -1,117 +1,6 @@
|
||||
"""
|
||||
Copyright 2008, Linden Research, Inc.
|
||||
See NOTICE.md for previous contributors
|
||||
Copyright 2021, Salad Dais
|
||||
All Rights Reserved.
|
||||
#!/usr/bin/env python3
|
||||
|
||||
This program is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Lesser General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 3 of the License, or (at your option) any later version.
|
||||
from setuptools import setup
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
from os import path
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
here = path.abspath(path.dirname(__file__))
|
||||
|
||||
version = '0.14.0'
|
||||
|
||||
with open(path.join(here, 'README.md')) as readme_fh:
|
||||
readme = readme_fh.read()
|
||||
|
||||
setup(
|
||||
name='hippolyzer',
|
||||
version=version,
|
||||
description="Analysis tools for SL-compatible virtual worlds",
|
||||
long_description=readme,
|
||||
long_description_content_type="text/markdown",
|
||||
classifiers=[
|
||||
"License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)",
|
||||
"Operating System :: MacOS",
|
||||
"Operating System :: POSIX",
|
||||
"Operating System :: Microsoft :: Windows",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Topic :: System :: Networking :: Monitoring",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Software Development :: Testing",
|
||||
],
|
||||
author='Salad Dais',
|
||||
author_email='83434023+SaladDais@users.noreply.github.com',
|
||||
url='https://github.com/SaladDais/Hippolyzer/',
|
||||
license='LGPLv3',
|
||||
packages=find_packages(include=["hippolyzer", "hippolyzer.*"]),
|
||||
package_data={
|
||||
'hippolyzer': [
|
||||
'apps/message_builder.ui',
|
||||
'apps/proxy_mainwindow.ui',
|
||||
'apps/filter_dialog.ui',
|
||||
'apps/addon_dialog.ui',
|
||||
'lib/base/message/data/message_template.msg',
|
||||
'lib/base/message/data/message.xml',
|
||||
'lib/base/network/data/ca-bundle.crt',
|
||||
'lib/base/data/static_data.db2',
|
||||
'lib/base/data/static_index.db2',
|
||||
'lib/base/data/avatar_lad.xml',
|
||||
'lib/base/data/male_collada_joints.xml',
|
||||
'lib/base/data/avatar_skeleton.xml',
|
||||
'lib/base/data/LICENSE-artwork.txt',
|
||||
],
|
||||
},
|
||||
entry_points={
|
||||
'console_scripts': {
|
||||
'hippolyzer-gui = hippolyzer.apps.proxy_gui:gui_main',
|
||||
'hippolyzer-cli = hippolyzer.apps.proxy:main',
|
||||
}
|
||||
},
|
||||
zip_safe=False,
|
||||
python_requires='>=3.8',
|
||||
install_requires=[
|
||||
'llsd<1.1.0',
|
||||
'defusedxml',
|
||||
'aiohttp<4.0.0',
|
||||
# Newer recordclasses break!
|
||||
'recordclass>0.15,<0.18.3',
|
||||
'lazy-object-proxy',
|
||||
# requests breaks with newer idna
|
||||
'idna<3,>=2.5',
|
||||
# Needed for mesh format conversion tooling
|
||||
'pycollada',
|
||||
'transformations',
|
||||
'gltflib',
|
||||
# JP2 codec
|
||||
'Glymur<0.9.7',
|
||||
'numpy<2.0',
|
||||
|
||||
# Proxy-specific stuff
|
||||
'outleap<1.0',
|
||||
'arpeggio',
|
||||
# 7.x will be a major change.
|
||||
'mitmproxy>=8.0.0,<8.1',
|
||||
'Werkzeug<3.0',
|
||||
# For REPLs
|
||||
'ptpython<4.0',
|
||||
# These could be in extras_require if you don't want a GUI.
|
||||
'pyside6-essentials',
|
||||
'qasync',
|
||||
],
|
||||
tests_require=[
|
||||
"pytest",
|
||||
"aioresponses",
|
||||
],
|
||||
)
|
||||
if __name__ == "__main__":
|
||||
setup()
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import glob
|
||||
|
||||
import setuptools # noqa
|
||||
|
||||
import os
|
||||
@@ -32,20 +34,20 @@ TO_DELETE = [
|
||||
"lib/aiohttp/_http_writer.c",
|
||||
"lib/aiohttp/_websocket.c",
|
||||
# Improve this to work with different versions.
|
||||
"lib/aiohttp/python39.dll",
|
||||
"lib/lazy_object_proxy/python39.dll",
|
||||
"lib/lxml/python39.dll",
|
||||
"lib/markupsafe/python39.dll",
|
||||
"lib/multidict/python39.dll",
|
||||
"lib/numpy/core/python39.dll",
|
||||
"lib/numpy/fft/python39.dll",
|
||||
"lib/numpy/linalg/python39.dll",
|
||||
"lib/numpy/random/python39.dll",
|
||||
"lib/python39.dll",
|
||||
"lib/recordclass/python39.dll",
|
||||
"lib/regex/python39.dll",
|
||||
"lib/aiohttp/python3*.dll",
|
||||
"lib/lazy_object_proxy/python3*.dll",
|
||||
"lib/lxml/python3*.dll",
|
||||
"lib/markupsafe/python3*.dll",
|
||||
"lib/multidict/python3*.dll",
|
||||
"lib/numpy/core/python3*.dll",
|
||||
"lib/numpy/fft/python3*.dll",
|
||||
"lib/numpy/linalg/python3*.dll",
|
||||
"lib/numpy/random/python3*.dll",
|
||||
"lib/python3*.dll",
|
||||
"lib/recordclass/python3*.dll",
|
||||
"lib/regex/python3*.dll",
|
||||
"lib/test",
|
||||
"lib/yarl/python39.dll",
|
||||
"lib/yarl/python3*.dll",
|
||||
]
|
||||
|
||||
COPY_TO_ZIP = [
|
||||
@@ -77,11 +79,12 @@ class FinalizeCXFreezeCommand(Command):
|
||||
if path.name.startswith("exe.") and path.is_dir():
|
||||
for cleanse_suffix in TO_DELETE:
|
||||
cleanse_path = path / cleanse_suffix
|
||||
shutil.rmtree(cleanse_path, ignore_errors=True)
|
||||
try:
|
||||
os.unlink(cleanse_path)
|
||||
except:
|
||||
pass
|
||||
for globbed in glob.glob(str(cleanse_path)):
|
||||
shutil.rmtree(globbed, ignore_errors=True)
|
||||
try:
|
||||
os.unlink(globbed)
|
||||
except:
|
||||
pass
|
||||
for to_copy in COPY_TO_ZIP:
|
||||
shutil.copy(BASE_DIR / to_copy, path / to_copy)
|
||||
shutil.copytree(BASE_DIR / "addon_examples", path / "addon_examples")
|
||||
@@ -95,6 +98,7 @@ options = {
|
||||
"passlib",
|
||||
"_cffi_backend",
|
||||
"hippolyzer",
|
||||
"mitmproxy_windows",
|
||||
],
|
||||
# exclude packages that are not really needed
|
||||
"excludes": [
|
||||
|
||||
@@ -152,6 +152,15 @@ class TestDatatypes(unittest.TestCase):
|
||||
def test_str_llsd_serialization(self):
|
||||
self.assertEqual(b"'foo\\nbar'", llsd.format_notation("foo\nbar"))
|
||||
|
||||
def test_int_enum_llsd_serialization(self):
|
||||
class SomeIntEnum(IntEnum):
|
||||
FOO = 4
|
||||
|
||||
orig = SomeIntEnum.FOO
|
||||
val = llsd.parse_xml(llsd.format_xml(orig))
|
||||
self.assertIsInstance(val, int)
|
||||
self.assertEqual(orig, val)
|
||||
|
||||
def test_jank_stringy_bytes(self):
|
||||
val = JankStringyBytes(b"foo\x00")
|
||||
self.assertTrue("o" in val)
|
||||
@@ -163,3 +172,6 @@ class TestDatatypes(unittest.TestCase):
|
||||
self.assertNotEqual(b"foo", val)
|
||||
self.assertEqual(b"foo", JankStringyBytes(b"foo"))
|
||||
self.assertEqual("foo", JankStringyBytes(b"foo"))
|
||||
self.assertFalse(JankStringyBytes(b""))
|
||||
self.assertFalse(JankStringyBytes(b"\x00"))
|
||||
self.assertTrue(JankStringyBytes(b"\x01"))
|
||||
|
||||
@@ -49,3 +49,15 @@ class TestEvents(unittest.IsolatedAsyncioTestCase):
|
||||
await called.wait()
|
||||
mock.assert_called_with("foo")
|
||||
self.assertNotIn(_mock_wrapper, [x[0] for x in self.event.subscribers])
|
||||
|
||||
async def test_multiple_subscribers(self):
|
||||
called = asyncio.Event()
|
||||
called2 = asyncio.Event()
|
||||
|
||||
self.event.subscribe(lambda *args: called.set())
|
||||
self.event.subscribe(lambda *args: called2.set())
|
||||
|
||||
self.event.notify(None)
|
||||
|
||||
self.assertTrue(called.is_set())
|
||||
self.assertTrue(called2.is_set())
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import copy
|
||||
import datetime as dt
|
||||
import unittest
|
||||
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
from hippolyzer.lib.base.inventory import InventoryModel
|
||||
from hippolyzer.lib.base.inventory import InventoryModel, SaleType, InventoryItem
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.wearables import Wearable, VISUAL_PARAMS
|
||||
|
||||
SIMPLE_INV = """\tinv_object\t0
|
||||
@@ -11,6 +13,8 @@ SIMPLE_INV = """\tinv_object\t0
|
||||
\t\tparent_id\t00000000-0000-0000-0000-000000000000
|
||||
\t\ttype\tcategory
|
||||
\t\tname\tContents|
|
||||
\t\tmetadata\t<llsd><undef /></llsd>
|
||||
|
|
||||
\t}
|
||||
\tinv_item\t0
|
||||
\t{
|
||||
@@ -39,10 +43,59 @@ SIMPLE_INV = """\tinv_object\t0
|
||||
\t}
|
||||
\t\tname\tNew Script|
|
||||
\t\tdesc\t2020-04-20 04:20:39 lsl2 script|
|
||||
\t\tmetadata\t<llsd><map><key>experience</key><uuid>a2e76fcd-9360-4f6d-a924-000000000003</uuid></map></llsd>
|
||||
|
|
||||
\t\tcreation_date\t1587367239
|
||||
\t}
|
||||
"""
|
||||
|
||||
SIMPLE_INV_PARSED = [
|
||||
{
|
||||
'name': 'Contents',
|
||||
'obj_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
|
||||
'parent_id': UUID('00000000-0000-0000-0000-000000000000'),
|
||||
'type': 'category'
|
||||
},
|
||||
{
|
||||
'asset_id': UUID('00000000-0000-0000-0000-000000000000'),
|
||||
'created_at': 1587367239,
|
||||
'desc': '2020-04-20 04:20:39 lsl2 script',
|
||||
'flags': b'\x00\x00\x00\x00',
|
||||
'inv_type': 'script',
|
||||
'item_id': UUID('dd163122-946b-44df-99f6-a6030e2b9597'),
|
||||
'name': 'New Script',
|
||||
'metadata': {"experience": UUID("a2e76fcd-9360-4f6d-a924-000000000003")},
|
||||
'parent_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
|
||||
'permissions': {
|
||||
'base_mask': 2147483647,
|
||||
'creator_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
|
||||
'everyone_mask': 0,
|
||||
'group_id': UUID('00000000-0000-0000-0000-000000000000'),
|
||||
'group_mask': 0,
|
||||
'last_owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
|
||||
'next_owner_mask': 581632,
|
||||
'owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
|
||||
'owner_mask': 2147483647,
|
||||
},
|
||||
'sale_info': {
|
||||
'sale_price': 10,
|
||||
'sale_type': 'not'
|
||||
},
|
||||
'type': 'lsltext'
|
||||
}
|
||||
]
|
||||
|
||||
INV_CATEGORY = """\tinv_category\t0
|
||||
\t{
|
||||
\t\tcat_id\tf4d91477-def1-487a-b4f3-6fa201c17376
|
||||
\t\tparent_id\t00000000-0000-0000-0000-000000000000
|
||||
\t\ttype\tlsltext
|
||||
\t\tpref_type\tlsltext
|
||||
\t\tname\tScripts|
|
||||
\t\towner_id\ta2e76fcd-9360-4f6d-a924-000000000003
|
||||
\t}
|
||||
"""
|
||||
|
||||
|
||||
class TestLegacyInv(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
@@ -52,15 +105,27 @@ class TestLegacyInv(unittest.TestCase):
|
||||
self.assertTrue(UUID('f4d91477-def1-487a-b4f3-6fa201c17376') in self.model.nodes)
|
||||
self.assertIsNotNone(self.model.root)
|
||||
|
||||
def test_parse_category(self):
|
||||
model = InventoryModel.from_str(INV_CATEGORY)
|
||||
self.assertEqual(UUID('f4d91477-def1-487a-b4f3-6fa201c17376'), model.root.node_id)
|
||||
|
||||
def test_serialize(self):
|
||||
self.model = InventoryModel.from_str(SIMPLE_INV)
|
||||
new_model = InventoryModel.from_str(self.model.to_str())
|
||||
self.assertEqual(self.model, new_model)
|
||||
|
||||
def test_serialize_category(self):
|
||||
model = InventoryModel.from_str(INV_CATEGORY)
|
||||
new_model = InventoryModel.from_str(model.to_str())
|
||||
self.assertEqual(model, new_model)
|
||||
|
||||
def test_category_legacy_serialization(self):
|
||||
self.assertEqual(INV_CATEGORY, InventoryModel.from_str(INV_CATEGORY).to_str())
|
||||
|
||||
def test_item_access(self):
|
||||
item = self.model.nodes[UUID('dd163122-946b-44df-99f6-a6030e2b9597')]
|
||||
self.assertEqual(item.name, "New Script")
|
||||
self.assertEqual(item.sale_info.sale_type, "not")
|
||||
self.assertEqual(item.sale_info.sale_type, SaleType.NOT)
|
||||
self.assertDictEqual(item.metadata, {"experience": UUID("a2e76fcd-9360-4f6d-a924-000000000003")})
|
||||
self.assertEqual(item.model, self.model)
|
||||
|
||||
def test_access_children(self):
|
||||
@@ -95,43 +160,27 @@ class TestLegacyInv(unittest.TestCase):
|
||||
self.assertEqual(item, item_copy)
|
||||
|
||||
def test_llsd_serialization(self):
|
||||
self.assertEqual(self.model.to_llsd(), SIMPLE_INV_PARSED)
|
||||
|
||||
def test_llsd_date_parsing(self):
|
||||
model = InventoryModel.from_llsd(SIMPLE_INV_PARSED)
|
||||
item: InventoryItem = model.nodes.get(UUID("dd163122-946b-44df-99f6-a6030e2b9597")) # type: ignore
|
||||
self.assertEqual(item.creation_date, dt.datetime(2020, 4, 20, 7, 20, 39, tzinfo=dt.timezone.utc))
|
||||
|
||||
def test_llsd_serialization_ais(self):
|
||||
model = InventoryModel.from_str(INV_CATEGORY)
|
||||
self.assertEqual(
|
||||
self.model.to_llsd(),
|
||||
[
|
||||
{
|
||||
'name': 'Contents',
|
||||
'obj_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
|
||||
'agent_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
|
||||
'category_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
|
||||
'name': 'Scripts',
|
||||
'parent_id': UUID('00000000-0000-0000-0000-000000000000'),
|
||||
'type': 'category'
|
||||
},
|
||||
{
|
||||
'asset_id': UUID('00000000-0000-0000-0000-000000000000'),
|
||||
'created_at': 1587367239,
|
||||
'desc': '2020-04-20 04:20:39 lsl2 script',
|
||||
'flags': b'\x00\x00\x00\x00',
|
||||
'inv_type': 'script',
|
||||
'item_id': UUID('dd163122-946b-44df-99f6-a6030e2b9597'),
|
||||
'name': 'New Script',
|
||||
'parent_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
|
||||
'permissions': {
|
||||
'base_mask': 2147483647,
|
||||
'creator_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
|
||||
'everyone_mask': 0,
|
||||
'group_id': UUID('00000000-0000-0000-0000-000000000000'),
|
||||
'group_mask': 0,
|
||||
'last_owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
|
||||
'next_owner_mask': 581632,
|
||||
'owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
|
||||
'owner_mask': 2147483647,
|
||||
'is_owner_group': 0,
|
||||
},
|
||||
'sale_info': {
|
||||
'sale_price': 10,
|
||||
'sale_type': 'not'
|
||||
},
|
||||
'type': 'lsltext'
|
||||
'type_default': 10,
|
||||
'version': -1
|
||||
}
|
||||
]
|
||||
],
|
||||
model.to_llsd("ais")
|
||||
)
|
||||
|
||||
def test_llsd_legacy_equality(self):
|
||||
@@ -140,6 +189,9 @@ class TestLegacyInv(unittest.TestCase):
|
||||
new_model.root.name = "foo"
|
||||
self.assertNotEqual(self.model, new_model)
|
||||
|
||||
def test_legacy_serialization(self):
|
||||
self.assertEqual(SIMPLE_INV, self.model.to_str())
|
||||
|
||||
def test_difference_added(self):
|
||||
new_model = InventoryModel.from_llsd(self.model.to_llsd())
|
||||
diff = self.model.get_differences(new_model)
|
||||
@@ -272,6 +324,270 @@ parameters 82
|
||||
textures 0
|
||||
"""
|
||||
|
||||
# TODO: Move appearance-related stuff elsewhere.
|
||||
|
||||
GIRL_NEXT_DOOR_APPEARANCE_MSG = Message(
|
||||
'AvatarAppearance',
|
||||
Block('Sender', ID=UUID(int=1), IsTrial=0),
|
||||
# We don't care about the value of this.
|
||||
Block('ObjectData', TextureEntry=b""),
|
||||
Block('VisualParam', ParamValue=9),
|
||||
Block('VisualParam', ParamValue=30),
|
||||
Block('VisualParam', ParamValue=71),
|
||||
Block('VisualParam', ParamValue=32),
|
||||
Block('VisualParam', ParamValue=51),
|
||||
Block('VisualParam', ParamValue=132),
|
||||
Block('VisualParam', ParamValue=10),
|
||||
Block('VisualParam', ParamValue=76),
|
||||
Block('VisualParam', ParamValue=84),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=43),
|
||||
Block('VisualParam', ParamValue=83),
|
||||
Block('VisualParam', ParamValue=113),
|
||||
Block('VisualParam', ParamValue=68),
|
||||
Block('VisualParam', ParamValue=73),
|
||||
Block('VisualParam', ParamValue=43),
|
||||
Block('VisualParam', ParamValue=35),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=7),
|
||||
Block('VisualParam', ParamValue=132),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=76),
|
||||
Block('VisualParam', ParamValue=91),
|
||||
Block('VisualParam', ParamValue=129),
|
||||
Block('VisualParam', ParamValue=106),
|
||||
Block('VisualParam', ParamValue=76),
|
||||
Block('VisualParam', ParamValue=58),
|
||||
Block('VisualParam', ParamValue=99),
|
||||
Block('VisualParam', ParamValue=73),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=203),
|
||||
Block('VisualParam', ParamValue=48),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=150),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=114),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=76),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=40),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=140),
|
||||
Block('VisualParam', ParamValue=86),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=99),
|
||||
Block('VisualParam', ParamValue=84),
|
||||
Block('VisualParam', ParamValue=53),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=66),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=100),
|
||||
Block('VisualParam', ParamValue=216),
|
||||
Block('VisualParam', ParamValue=214),
|
||||
Block('VisualParam', ParamValue=204),
|
||||
Block('VisualParam', ParamValue=204),
|
||||
Block('VisualParam', ParamValue=204),
|
||||
Block('VisualParam', ParamValue=51),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=89),
|
||||
Block('VisualParam', ParamValue=109),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=61),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=115),
|
||||
Block('VisualParam', ParamValue=76),
|
||||
Block('VisualParam', ParamValue=91),
|
||||
Block('VisualParam', ParamValue=158),
|
||||
Block('VisualParam', ParamValue=102),
|
||||
Block('VisualParam', ParamValue=109),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=193),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=132),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=68),
|
||||
Block('VisualParam', ParamValue=35),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=97),
|
||||
Block('VisualParam', ParamValue=92),
|
||||
Block('VisualParam', ParamValue=79),
|
||||
Block('VisualParam', ParamValue=107),
|
||||
Block('VisualParam', ParamValue=160),
|
||||
Block('VisualParam', ParamValue=112),
|
||||
Block('VisualParam', ParamValue=63),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=159),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=73),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=102),
|
||||
Block('VisualParam', ParamValue=158),
|
||||
Block('VisualParam', ParamValue=145),
|
||||
Block('VisualParam', ParamValue=153),
|
||||
Block('VisualParam', ParamValue=163),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=122),
|
||||
Block('VisualParam', ParamValue=43),
|
||||
Block('VisualParam', ParamValue=94),
|
||||
Block('VisualParam', ParamValue=135),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=214),
|
||||
Block('VisualParam', ParamValue=204),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=56),
|
||||
Block('VisualParam', ParamValue=30),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=204),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=112),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=25),
|
||||
Block('VisualParam', ParamValue=100),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=84),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=51),
|
||||
Block('VisualParam', ParamValue=94),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=255),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=25),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=25),
|
||||
Block('VisualParam', ParamValue=23),
|
||||
Block('VisualParam', ParamValue=51),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=25),
|
||||
Block('VisualParam', ParamValue=23),
|
||||
Block('VisualParam', ParamValue=51),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=25),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=25),
|
||||
Block('VisualParam', ParamValue=23),
|
||||
Block('VisualParam', ParamValue=51),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=25),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=25),
|
||||
Block('VisualParam', ParamValue=23),
|
||||
Block('VisualParam', ParamValue=51),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=25),
|
||||
Block('VisualParam', ParamValue=23),
|
||||
Block('VisualParam', ParamValue=51),
|
||||
Block('VisualParam', ParamValue=0),
|
||||
Block('VisualParam', ParamValue=25),
|
||||
Block('VisualParam', ParamValue=23),
|
||||
Block('VisualParam', ParamValue=51),
|
||||
Block('VisualParam', ParamValue=1),
|
||||
Block('VisualParam', ParamValue=127),
|
||||
Block('AppearanceData', AppearanceVersion=1, CofVersion=100, Flags=0),
|
||||
Block('AppearanceHover', HoverHeight=Vector3(0.0, 0.0, 0.0))
|
||||
)
|
||||
|
||||
|
||||
class TestWearable(unittest.TestCase):
|
||||
def test_parse(self):
|
||||
@@ -287,3 +603,17 @@ class TestWearable(unittest.TestCase):
|
||||
def test_visual_params(self):
|
||||
param = VISUAL_PARAMS.by_name("Eyelid_Inner_Corner_Up")
|
||||
self.assertEqual(param.value_max, 1.2)
|
||||
|
||||
def test_message_equivalent(self):
|
||||
wearable = Wearable.from_str(GIRL_NEXT_DOOR_SHAPE)
|
||||
parsed = VISUAL_PARAMS.parse_appearance_message(GIRL_NEXT_DOOR_APPEARANCE_MSG)
|
||||
|
||||
for i, (param_id, param_val) in enumerate(parsed.items()):
|
||||
param = VISUAL_PARAMS.by_id(param_id)
|
||||
if param.wearable != "shape":
|
||||
continue
|
||||
# A parameter may legitimately be missing from the shape depending on its age,
|
||||
# just assume it's the default value.
|
||||
expected_val = wearable.parameters.get(param_id, param.value_default)
|
||||
# This seems like quite a large delta. Maybe we should be using different quantization here.
|
||||
self.assertAlmostEqual(expected_val, param_val, delta=0.015)
|
||||
|
||||
@@ -181,6 +181,8 @@ class TestMessageHandlers(unittest.IsolatedAsyncioTestCase):
|
||||
self.message_handler.handle(msg)
|
||||
|
||||
async def test_subscription(self):
|
||||
called = asyncio.Event()
|
||||
called2 = asyncio.Event()
|
||||
with self.message_handler.subscribe_async(
|
||||
message_names=("Foo",),
|
||||
predicate=lambda m: m["Bar"]["Baz"] == 1,
|
||||
@@ -192,6 +194,10 @@ class TestMessageHandlers(unittest.IsolatedAsyncioTestCase):
|
||||
msg3 = Message("Foo", Block("Bar", Baz=1, Biz=3))
|
||||
self._fake_received_message(msg1)
|
||||
self._fake_received_message(msg2)
|
||||
|
||||
self.message_handler.subscribe("Foo", lambda *args: called.set())
|
||||
self.message_handler.subscribe("Foo", lambda *args: called2.set())
|
||||
|
||||
self._fake_received_message(msg3)
|
||||
received = []
|
||||
while True:
|
||||
@@ -199,14 +205,15 @@ class TestMessageHandlers(unittest.IsolatedAsyncioTestCase):
|
||||
received.append(await asyncio.wait_for(get_msg(), 0.001))
|
||||
except asyncio.exceptions.TimeoutError:
|
||||
break
|
||||
self.assertEqual(len(foo_handlers), 1)
|
||||
self.assertEqual(len(foo_handlers), 3)
|
||||
self.assertListEqual(received, [msg1, msg3])
|
||||
# The message should have been take()n, making a copy
|
||||
self.assertIsNot(msg1, received[0])
|
||||
# take() was called, so this should have been marked queued
|
||||
self.assertTrue(msg1.queued)
|
||||
# Leaving the block should have unsubscribed automatically
|
||||
self.assertEqual(len(foo_handlers), 0)
|
||||
self.assertEqual(len(foo_handlers), 2)
|
||||
self.assertTrue(called.is_set())
|
||||
|
||||
async def test_subscription_no_take(self):
|
||||
with self.message_handler.subscribe_async(("Foo",), take=False) as get_msg:
|
||||
|
||||
@@ -50,6 +50,8 @@ OBJECT_UPDATE = binascii.unhexlify(''.join(OBJECT_UPDATE.split()))
|
||||
|
||||
COARSE_LOCATION_UPDATE = b'\x00\x00\x00\x00E\x00\xff\x06\x00\xff\xff\xff\xff\x00'
|
||||
|
||||
UNKNOWN_PACKET = b'\x00\x00\x00\x00E\x00\xff\xf0\x00\xff\xff\xff\xff\x00'
|
||||
|
||||
|
||||
class TestPacketDecode(unittest.TestCase):
|
||||
|
||||
@@ -110,3 +112,12 @@ class TestPacketDecode(unittest.TestCase):
|
||||
parsed = deserializer.deserialize(message)
|
||||
logging.debug("Parsed blocks: %r " % (list(parsed.blocks.keys()),))
|
||||
self.assertEqual(message, serializer.serialize(parsed))
|
||||
|
||||
def test_unknown_packet_roundtrips(self):
|
||||
message = UNKNOWN_PACKET
|
||||
deserializer = UDPMessageDeserializer(settings=self.settings)
|
||||
serializer = UDPMessageSerializer()
|
||||
parsed = deserializer.deserialize(message)
|
||||
logging.debug("Parsed blocks: %r " % (list(parsed.blocks.keys()),))
|
||||
self.assertEqual("UnknownMessage:240", parsed.name)
|
||||
self.assertEqual(message, serializer.serialize(parsed))
|
||||
|
||||
@@ -20,7 +20,7 @@ class TestSkeleton(unittest.TestCase):
|
||||
self.assertEqual(113, self.skeleton["mKneeLeft"].index)
|
||||
|
||||
def test_get_joint_parent(self):
|
||||
self.assertEqual("mChest", self.skeleton["mNeck"].parent().name)
|
||||
self.assertEqual("mChest", self.skeleton["mNeck"].parent.name)
|
||||
|
||||
def test_get_joint_matrix(self):
|
||||
expected_mat = np.array([
|
||||
@@ -30,3 +30,9 @@ class TestSkeleton(unittest.TestCase):
|
||||
[0., 0., 0., 1.]
|
||||
])
|
||||
np.testing.assert_equal(expected_mat, self.skeleton["mNeck"].matrix)
|
||||
|
||||
def test_get_inverse_joint(self):
|
||||
self.assertEqual("R_CLAVICLE", self.skeleton["L_CLAVICLE"].inverse.name)
|
||||
self.assertEqual(None, self.skeleton["mChest"].inverse)
|
||||
self.assertEqual("mHandMiddle1Right", self.skeleton["mHandMiddle1Left"].inverse.name)
|
||||
self.assertEqual("RIGHT_HANDLE", self.skeleton["LEFT_HANDLE"].inverse.name)
|
||||
|
||||
@@ -27,7 +27,7 @@ from hippolyzer.lib.base.message.data import msg_tmpl
|
||||
from hippolyzer.lib.base.message.template import MessageTemplate, MessageTemplateBlock, MessageTemplateVariable
|
||||
from hippolyzer.lib.base.message.template_dict import TemplateDictionary
|
||||
from hippolyzer.lib.base.message.template_parser import MessageTemplateParser
|
||||
from hippolyzer.lib.base.message.msgtypes import MsgFrequency, MsgTrust, MsgEncoding, \
|
||||
from hippolyzer.lib.base.message.msgtypes import MsgFrequency, MsgEncoding, \
|
||||
MsgDeprecation, MsgBlockType, MsgType
|
||||
|
||||
|
||||
@@ -45,8 +45,8 @@ class TestDictionary(unittest.TestCase):
|
||||
msg_dict = TemplateDictionary(self.template_list)
|
||||
packet = msg_dict.get_template_by_name('ConfirmEnableSimulator')
|
||||
assert packet is not None, "get_packet failed"
|
||||
assert packet.frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE, "Incorrect frequency"
|
||||
assert packet.msg_num == 8, "Incorrect message number for ConfirmEnableSimulator"
|
||||
assert packet.frequency == MsgFrequency.MEDIUM, "Incorrect frequency"
|
||||
assert packet.num == 8, "Incorrect message number for ConfirmEnableSimulator"
|
||||
|
||||
def test_get_packet_pair(self):
|
||||
msg_dict = TemplateDictionary(self.template_list)
|
||||
@@ -76,29 +76,29 @@ class TestTemplates(unittest.TestCase):
|
||||
template = self.msg_dict['CompletePingCheck']
|
||||
name = template.name
|
||||
freq = template.frequency
|
||||
num = template.msg_num
|
||||
trust = template.msg_trust
|
||||
enc = template.msg_encoding
|
||||
num = template.num
|
||||
trust = template.trusted
|
||||
enc = template.encoding
|
||||
assert name == 'CompletePingCheck', "Expected: CompletePingCheck Returned: " + name
|
||||
assert freq == MsgFrequency.HIGH_FREQUENCY_MESSAGE, "Expected: High Returned: " + freq
|
||||
assert freq == MsgFrequency.HIGH, "Expected: High Returned: " + freq
|
||||
assert num == 2, "Expected: 2 Returned: " + str(num)
|
||||
assert trust == MsgTrust.LL_NOTRUST, "Expected: NotTrusted Returned: " + trust
|
||||
assert enc == MsgEncoding.LL_UNENCODED, "Expected: Unencoded Returned: " + enc
|
||||
assert not trust, "Expected: NotTrusted Returned: " + trust
|
||||
assert enc == MsgEncoding.UNENCODED, "Expected: Unencoded Returned: " + enc
|
||||
|
||||
def test_deprecated(self):
|
||||
template = self.msg_dict['ObjectPosition']
|
||||
dep = template.msg_deprecation
|
||||
assert dep == MsgDeprecation.LL_DEPRECATED, "Expected: Deprecated Returned: " + str(dep)
|
||||
dep = template.deprecation
|
||||
assert dep == MsgDeprecation.DEPRECATED, "Expected: Deprecated Returned: " + str(dep)
|
||||
|
||||
def test_template_fixed(self):
|
||||
template = self.msg_dict['PacketAck']
|
||||
num = template.msg_num
|
||||
num = template.num
|
||||
assert num == 251, "Expected: 251 Returned: " + str(num)
|
||||
|
||||
def test_blacklisted(self):
|
||||
template = self.msg_dict['TeleportFinish']
|
||||
self.assertEqual(template.msg_deprecation,
|
||||
MsgDeprecation.LL_UDPBLACKLISTED)
|
||||
self.assertEqual(template.deprecation,
|
||||
MsgDeprecation.UDPBLACKLISTED)
|
||||
|
||||
def test_block(self):
|
||||
block = self.msg_dict['OpenCircuit'].get_block('CircuitInfo')
|
||||
@@ -167,7 +167,7 @@ class TestTemplates(unittest.TestCase):
|
||||
|
||||
frequency_counter = {"low": 0, 'medium': 0, "high": 0, 'fixed': 0}
|
||||
for template in list(self.msg_dict.message_templates.values()):
|
||||
frequency_counter[template.get_frequency_as_string()] += 1
|
||||
frequency_counter[template.frequency.name.lower()] += 1
|
||||
self.assertEqual(low_count, frequency_counter["low"])
|
||||
self.assertEqual(medium_count, frequency_counter["medium"])
|
||||
self.assertEqual(high_count, frequency_counter["high"])
|
||||
|
||||
@@ -4,6 +4,7 @@ import unittest
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import create_logged_task
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.templates import (
|
||||
@@ -61,7 +62,7 @@ class XferManagerTests(BaseTransferTests):
|
||||
))
|
||||
|
||||
async def test_small_xfer_upload(self):
|
||||
asyncio.create_task(self._handle_vfile_upload())
|
||||
_ = create_logged_task(self._handle_vfile_upload())
|
||||
await asyncio.wait_for(self.xfer_manager.upload_asset(
|
||||
AssetType.BODYPART, self.SMALL_PAYLOAD
|
||||
), timeout=0.1)
|
||||
@@ -69,7 +70,7 @@ class XferManagerTests(BaseTransferTests):
|
||||
|
||||
async def test_large_xfer_upload(self):
|
||||
# Larger payloads take a different path
|
||||
asyncio.create_task(self._handle_vfile_upload())
|
||||
_ = create_logged_task(self._handle_vfile_upload())
|
||||
await asyncio.wait_for(self.xfer_manager.upload_asset(
|
||||
AssetType.BODYPART, self.LARGE_PAYLOAD
|
||||
), timeout=0.1)
|
||||
@@ -125,7 +126,7 @@ class TestTransferManager(BaseTransferTests):
|
||||
packet_num += 1
|
||||
|
||||
async def test_simple_transfer(self):
|
||||
asyncio.create_task(self._handle_covenant_download())
|
||||
_ = create_logged_task(self._handle_covenant_download())
|
||||
transfer: Transfer = await asyncio.wait_for(self.transfer_manager.request(
|
||||
source_type=TransferSourceType.SIM_ESTATE,
|
||||
params=TransferRequestParamsSimEstate(
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
from typing import Mapping, Optional
|
||||
|
||||
import multidict
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.network.caps_client import CapsClient
|
||||
from hippolyzer.lib.base.test_utils import MockHandlingCircuit
|
||||
from hippolyzer.lib.client.hippo_client import ClientSettings
|
||||
from hippolyzer.lib.client.object_manager import ClientWorldObjectManager
|
||||
from hippolyzer.lib.client.state import BaseClientRegion, BaseClientSession, BaseClientSessionManager
|
||||
|
||||
|
||||
class MockClientRegion(BaseClientRegion):
|
||||
def __init__(self, caps_urls: Optional[dict] = None):
|
||||
super().__init__()
|
||||
self.handle = None
|
||||
self.circuit_addr = ("127.0.0.1", 1)
|
||||
self.message_handler: MessageHandler[Message, str] = MessageHandler(take_by_default=False)
|
||||
self.circuit = MockHandlingCircuit(self.message_handler)
|
||||
self._name = "Test"
|
||||
self.cap_urls = multidict.MultiDict()
|
||||
if caps_urls:
|
||||
self.cap_urls.update(caps_urls)
|
||||
self.caps_client = CapsClient(self.cap_urls)
|
||||
|
||||
def session(self):
|
||||
return MockClientSession(UUID.ZERO, UUID.ZERO, UUID.ZERO, 0, None)
|
||||
|
||||
def update_caps(self, caps: Mapping[str, str]) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class MockClientSession(BaseClientSession):
|
||||
def __init__(self, id, secure_session_id, agent_id, circuit_code,
|
||||
session_manager: Optional[BaseClientSessionManager]):
|
||||
super().__init__(id, secure_session_id, agent_id, circuit_code, session_manager)
|
||||
self.objects = ClientWorldObjectManager(self, ClientSettings(), None)
|
||||
|
||||
@@ -14,7 +14,7 @@ from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketFlags
|
||||
from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
|
||||
from hippolyzer.lib.base.network.transport import AbstractUDPTransport, UDPPacket, Direction
|
||||
from hippolyzer.lib.base.test_utils import MockTransport, MockConnectionHolder
|
||||
from hippolyzer.lib.base.test_utils import MockTransport, MockConnectionHolder, soon
|
||||
from hippolyzer.lib.client.hippo_client import HippoClient, HippoClientProtocol
|
||||
|
||||
|
||||
@@ -72,10 +72,6 @@ class MockHippoClient(HippoClient):
|
||||
return MockServerTransport(self.server), protocol
|
||||
|
||||
|
||||
async def _soon(get_msg) -> Message:
|
||||
return await asyncio.wait_for(get_msg(), timeout=1.0)
|
||||
|
||||
|
||||
class TestHippoClient(unittest.IsolatedAsyncioTestCase):
|
||||
FAKE_LOGIN_URI = "http://127.0.0.1:1/login.cgi"
|
||||
FAKE_LOGIN_RESP = {
|
||||
@@ -130,8 +126,8 @@ class TestHippoClient(unittest.IsolatedAsyncioTestCase):
|
||||
with self.server_handler.subscribe_async(
|
||||
("*",),
|
||||
) as get_msg:
|
||||
assert (await _soon(get_msg)).name == "UseCircuitCode"
|
||||
assert (await _soon(get_msg)).name == "CompleteAgentMovement"
|
||||
assert (await soon(get_msg())).name == "UseCircuitCode"
|
||||
assert (await soon(get_msg())).name == "CompleteAgentMovement"
|
||||
self.server.circuit.send(Message(
|
||||
'RegionHandshake',
|
||||
Block('RegionInfo', fill_missing=True),
|
||||
@@ -139,8 +135,8 @@ class TestHippoClient(unittest.IsolatedAsyncioTestCase):
|
||||
Block('RegionInfo3', fill_missing=True),
|
||||
Block('RegionInfo4', fill_missing=True),
|
||||
))
|
||||
assert (await _soon(get_msg)).name == "RegionHandshakeReply"
|
||||
assert (await _soon(get_msg)).name == "AgentThrottle"
|
||||
assert (await soon(get_msg())).name == "RegionHandshakeReply"
|
||||
assert (await soon(get_msg())).name == "AgentThrottle"
|
||||
await login_task
|
||||
|
||||
async def test_login(self):
|
||||
@@ -149,21 +145,21 @@ class TestHippoClient(unittest.IsolatedAsyncioTestCase):
|
||||
("*",),
|
||||
) as get_msg:
|
||||
self.client.logout()
|
||||
assert (await _soon(get_msg)).name == "LogoutRequest"
|
||||
assert (await soon(get_msg())).name == "LogoutRequest"
|
||||
|
||||
async def test_eq(self):
|
||||
await self._log_client_in(self.client)
|
||||
with self.client.session.message_handler.subscribe_async(
|
||||
("ViewerFrozenMessage", "NotTemplated"),
|
||||
) as get_msg:
|
||||
assert (await _soon(get_msg)).name == "ViewerFrozenMessage"
|
||||
msg = await _soon(get_msg)
|
||||
assert (await soon(get_msg())).name == "ViewerFrozenMessage"
|
||||
msg = await soon(get_msg())
|
||||
assert msg.name == "NotTemplated"
|
||||
assert msg["EventData"]["foo"]["bar"] == 1
|
||||
|
||||
async def test_inventory_manager(self):
|
||||
await self._log_client_in(self.client)
|
||||
self.assertEqual(self.client.session.inventory_manager.model.root.node_id, UUID(int=4))
|
||||
self.assertEqual(self.client.session.inventory.model.root.node_id, UUID(int=4))
|
||||
|
||||
async def test_resend_suppression(self):
|
||||
"""Make sure the client only handles the first seen copy of a reliable message"""
|
||||
@@ -179,5 +175,5 @@ class TestHippoClient(unittest.IsolatedAsyncioTestCase):
|
||||
self.server_transport.send_packet(packet)
|
||||
|
||||
self.server_circuit.send(Message("AgentDataUpdate", Block("AgentData", fill_missing=True)))
|
||||
assert (await _soon(get_msg)).name == "ChatFromSimulator"
|
||||
assert (await _soon(get_msg)).name == "AgentDataUpdate"
|
||||
assert (await soon(get_msg())).name == "ChatFromSimulator"
|
||||
assert (await soon(get_msg())).name == "AgentDataUpdate"
|
||||
|
||||
61
tests/client/test_inventory_manager.py
Normal file
61
tests/client/test_inventory_manager.py
Normal file
@@ -0,0 +1,61 @@
|
||||
import unittest
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.client.inventory_manager import InventoryManager
|
||||
from tests.client import MockClientRegion
|
||||
|
||||
CREATE_FOLDER_PAYLOAD = {
|
||||
'_base_uri': 'slcap://InventoryAPIv3',
|
||||
'_created_categories': [
|
||||
UUID(int=2),
|
||||
],
|
||||
'_created_items': [],
|
||||
'_embedded': {
|
||||
'categories': {
|
||||
f'{UUID(int=2)}': {
|
||||
'_embedded': {'categories': {}, 'items': {}, 'links': {}},
|
||||
'_links': {
|
||||
'parent': {'href': f'/category/{UUID(int=1)}'},
|
||||
'self': {'href': f'/category/{UUID(int=2)}'}
|
||||
},
|
||||
'agent_id': f'{UUID(int=9)}',
|
||||
'category_id': f'{UUID(int=2)}',
|
||||
'name': 'New Folder',
|
||||
'parent_id': f'{UUID(int=1)}',
|
||||
'type_default': -1,
|
||||
'version': 1
|
||||
}
|
||||
},
|
||||
'items': {}, 'links': {}
|
||||
},
|
||||
'_links': {
|
||||
'categories': {'href': f'/category/{UUID(int=1)}/categories'},
|
||||
'category': {'href': f'/category/{UUID(int=1)}', 'name': 'self'},
|
||||
'children': {'href': f'/category/{UUID(int=1)}/children'},
|
||||
'items': {'href': f'/category/{UUID(int=1)}/items'},
|
||||
'links': {'href': f'/category/{UUID(int=1)}/links'},
|
||||
'parent': {'href': '/category/00000000-0000-0000-0000-000000000000'},
|
||||
'self': {'href': f'/category/{UUID(int=1)}/children'}
|
||||
},
|
||||
'_updated_category_versions': {str(UUID(int=1)): 27},
|
||||
'agent_id': UUID(int=9),
|
||||
'category_id': UUID(int=1),
|
||||
'name': 'My Inventory',
|
||||
'parent_id': UUID.ZERO,
|
||||
'type_default': 8,
|
||||
'version': 27,
|
||||
}
|
||||
|
||||
|
||||
class TestParcelOverlay(unittest.IsolatedAsyncioTestCase):
|
||||
async def asyncSetUp(self):
|
||||
self.region = MockClientRegion()
|
||||
self.session = self.region.session()
|
||||
self.inv_manager = InventoryManager(self.session)
|
||||
self.model = self.inv_manager.model
|
||||
self.handler = self.region.message_handler
|
||||
|
||||
def test_create_folder_response(self):
|
||||
self.inv_manager.process_aisv3_response(CREATE_FOLDER_PAYLOAD)
|
||||
self.assertIsNotNone(self.model.get_category(UUID(int=1)))
|
||||
self.assertIsNotNone(self.model.get_category(UUID(int=2)))
|
||||
69
tests/client/test_material_manager.py
Normal file
69
tests/client/test_material_manager.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import unittest
|
||||
from typing import Any
|
||||
|
||||
import aioresponses
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.client.object_manager import ClientObjectManager
|
||||
|
||||
from . import MockClientRegion
|
||||
|
||||
|
||||
class MaterialManagerTest(unittest.IsolatedAsyncioTestCase):
|
||||
FAKE_CAPS = {
|
||||
"RenderMaterials": "http://127.0.0.1:8023"
|
||||
}
|
||||
|
||||
GET_RENDERMATERIALS_BODY = [
|
||||
{'ID': UUID(int=1).bytes,
|
||||
'Material': {'AlphaMaskCutoff': 0, 'DiffuseAlphaMode': 1, 'EnvIntensity': 0,
|
||||
'NormMap': UUID(int=4), 'NormOffsetX': 0, 'NormOffsetY': 0,
|
||||
'NormRepeatX': 10000, 'NormRepeatY': 10000, 'NormRotation': 0, 'SpecColor': [255, 255, 255, 255],
|
||||
'SpecExp': 51, 'SpecMap': UUID(int=5), 'SpecOffsetX': 0,
|
||||
'SpecOffsetY': 0, 'SpecRepeatX': 10000, 'SpecRepeatY': 10000, 'SpecRotation': 0}},
|
||||
{'ID': UUID(int=2).bytes,
|
||||
'Material': {'AlphaMaskCutoff': 0, 'DiffuseAlphaMode': 0, 'EnvIntensity': 0,
|
||||
'NormMap': UUID(int=6), 'NormOffsetX': 0, 'NormOffsetY': 0,
|
||||
'NormRepeatX': 10000, 'NormRepeatY': -10000, 'NormRotation': 0,
|
||||
'SpecColor': [255, 255, 255, 255], 'SpecExp': 51,
|
||||
'SpecMap': UUID(int=7), 'SpecOffsetX': 0, 'SpecOffsetY': 0,
|
||||
'SpecRepeatX': 10000, 'SpecRepeatY': -10000, 'SpecRotation': 0}},
|
||||
{'ID': UUID(int=3).bytes,
|
||||
'Material': {'AlphaMaskCutoff': 0, 'DiffuseAlphaMode': 1, 'EnvIntensity': 50,
|
||||
'NormMap': UUID.ZERO, 'NormOffsetX': 0, 'NormOffsetY': 0,
|
||||
'NormRepeatX': 10000, 'NormRepeatY': 10000, 'NormRotation': 0, 'SpecColor': [255, 255, 255, 255],
|
||||
'SpecExp': 200, 'SpecMap': UUID(int=8), 'SpecOffsetX': 0,
|
||||
'SpecOffsetY': 0, 'SpecRepeatX': 10000, 'SpecRepeatY': 10000, 'SpecRotation': 0}},
|
||||
]
|
||||
|
||||
def _make_rendermaterials_resp(self, resp: Any) -> bytes:
|
||||
return llsd.format_xml({"Zipped": llsd.zip_llsd(resp)})
|
||||
|
||||
async def asyncSetUp(self):
|
||||
self.aio_mock = aioresponses.aioresponses()
|
||||
self.aio_mock.start()
|
||||
# Requesting all materials
|
||||
self.aio_mock.get(
|
||||
self.FAKE_CAPS['RenderMaterials'],
|
||||
body=self._make_rendermaterials_resp(self.GET_RENDERMATERIALS_BODY)
|
||||
)
|
||||
# Specific material request
|
||||
self.aio_mock.post(
|
||||
self.FAKE_CAPS['RenderMaterials'],
|
||||
body=self._make_rendermaterials_resp([self.GET_RENDERMATERIALS_BODY[0]])
|
||||
)
|
||||
self.region = MockClientRegion(self.FAKE_CAPS)
|
||||
self.manager = ClientObjectManager(self.region)
|
||||
|
||||
async def asyncTearDown(self):
|
||||
self.aio_mock.stop()
|
||||
|
||||
async def test_fetch_all_materials(self):
|
||||
await self.manager.request_all_materials()
|
||||
self.assertListEqual([UUID(int=1), UUID(int=2), UUID(int=3)], list(self.manager.state.materials.keys()))
|
||||
|
||||
async def test_fetch_some_materials(self):
|
||||
mats = await self.manager.request_materials((UUID(int=1),))
|
||||
self.assertListEqual([UUID(int=1)], list(mats.keys()))
|
||||
self.assertListEqual([UUID(int=1)], list(self.manager.state.materials.keys()))
|
||||
333
tests/client/test_parcel_manager.py
Normal file
333
tests/client/test_parcel_manager.py
Normal file
@@ -0,0 +1,333 @@
|
||||
import asyncio
|
||||
import collections
|
||||
import unittest
|
||||
from typing import Dict
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base.templates import ParcelGridInfo, ParcelGridType, ParcelGridFlags, \
|
||||
ParcelPropertiesBitmapSerializer
|
||||
from hippolyzer.lib.base.test_utils import soon
|
||||
from hippolyzer.lib.client.parcel_manager import ParcelManager
|
||||
|
||||
from . import MockClientRegion
|
||||
|
||||
OVERLAY_CHUNKS = (
|
||||
b'\xc2\x82\x82\xc2\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82'
|
||||
b'\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82'
|
||||
b'\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\xc2B\x02\x02B\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x82B\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\xc2\x82\x82\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02',
|
||||
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02',
|
||||
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02',
|
||||
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
|
||||
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02',
|
||||
)
|
||||
|
||||
BITMAPS = (
|
||||
b'\x07\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00',
|
||||
|
||||
b'\xf8\xff\xff\xff\xff\xff\xff\x7f\xf8\xff\xff\xff\xff\xff\xff\xff\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff',
|
||||
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00',
|
||||
)
|
||||
|
||||
|
||||
class TestParcelOverlay(unittest.IsolatedAsyncioTestCase):
|
||||
async def asyncSetUp(self):
|
||||
self.region = MockClientRegion()
|
||||
self.parcel_manager = ParcelManager(self.region)
|
||||
self.handler = self.region.message_handler
|
||||
self.test_msgs = []
|
||||
for i, chunk in enumerate(OVERLAY_CHUNKS):
|
||||
self.test_msgs.append(Message(
|
||||
'ParcelOverlay',
|
||||
Block('ParcelData', SequenceID=i, Data=chunk),
|
||||
))
|
||||
|
||||
def test_low_level_parse(self):
|
||||
spec = se.BitfieldDataclass(ParcelGridInfo)
|
||||
reader = se.BufferReader("<", OVERLAY_CHUNKS[0])
|
||||
self.assertEqual(
|
||||
ParcelGridInfo(ParcelGridType.GROUP, ParcelGridFlags.SOUTH_LINE | ParcelGridFlags.WEST_LINE),
|
||||
reader.read(spec),
|
||||
)
|
||||
self.assertEqual(
|
||||
ParcelGridInfo(ParcelGridType.GROUP, ParcelGridFlags.SOUTH_LINE),
|
||||
reader.read(spec),
|
||||
)
|
||||
|
||||
def _get_parcel_areas(self) -> Dict[int, int]:
|
||||
c = collections.Counter()
|
||||
for parcel_idx in self.parcel_manager.parcel_indices.flatten():
|
||||
c[parcel_idx] += self.parcel_manager.GRID_STEP
|
||||
return dict(c.items())
|
||||
|
||||
async def test_handle_overlay(self):
|
||||
self.assertFalse(self.parcel_manager.overlay_complete.is_set())
|
||||
for msg in self.test_msgs:
|
||||
self.handler.handle(msg)
|
||||
self.assertTrue(self.parcel_manager.overlay_complete.is_set())
|
||||
self.assertDictEqual({1: 36, 2: 16344, 3: 4}, self._get_parcel_areas())
|
||||
|
||||
async def test_request_parcel_properties(self):
|
||||
for msg in self.test_msgs:
|
||||
self.handler.handle(msg)
|
||||
req_task = asyncio.create_task(self.parcel_manager.request_dirty_parcels())
|
||||
# HACK: Wait for requests to be sent out
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
for i in range(3):
|
||||
self.handler.handle(Message(
|
||||
"ParcelProperties",
|
||||
Block(
|
||||
"ParcelData",
|
||||
LocalID=i + 1,
|
||||
SequenceID=i + 1,
|
||||
Name=str(i + 1),
|
||||
GroupID=UUID.ZERO,
|
||||
ParcelFlags=0,
|
||||
Bitmap=BITMAPS[i],
|
||||
),
|
||||
))
|
||||
await soon(req_task)
|
||||
self.assertEqual(3, len(self.parcel_manager.parcels))
|
||||
self.assertEqual("1", self.parcel_manager.parcels[0].name)
|
||||
|
||||
async def test_parcel_bitmap_equivalence(self):
|
||||
for msg in self.test_msgs:
|
||||
self.handler.handle(msg)
|
||||
|
||||
serializer = ParcelPropertiesBitmapSerializer()
|
||||
bitmaps = [serializer.deserialize(None, x) for x in BITMAPS]
|
||||
|
||||
for y in range(ParcelManager.GRID_STEP):
|
||||
for x in range(ParcelManager.GRID_STEP):
|
||||
parcel_idx = self.parcel_manager.parcel_indices[y, x] - 1
|
||||
for i, bitmap in enumerate(bitmaps):
|
||||
bmp_set = bitmap[y, x]
|
||||
if bmp_set and parcel_idx != i:
|
||||
raise AssertionError(f"Parcel {parcel_idx} unexpected set in Bitmap {i} at {y, x}")
|
||||
elif not bmp_set and parcel_idx == i:
|
||||
raise AssertionError(f"Parcel {parcel_idx} not set in Bitmap {i} at {y, x}")
|
||||
@@ -12,6 +12,7 @@ from yarl import URL
|
||||
|
||||
from hippolyzer.apps.proxy import run_http_proxy_process
|
||||
from hippolyzer.lib.base.datatypes import Vector3
|
||||
from hippolyzer.lib.base.helpers import create_logged_task
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.http_event_manager import MITMProxyEventManager
|
||||
@@ -164,7 +165,7 @@ class TestMITMProxy(BaseProxyTest):
|
||||
def test_mitmproxy_works(self):
|
||||
async def _request_example_com():
|
||||
# Pump callbacks from mitmproxy
|
||||
asyncio.create_task(self.http_event_manager.run())
|
||||
_ = create_logged_task(self.http_event_manager.run())
|
||||
try:
|
||||
async with self.caps_client.get("http://example.com/", timeout=0.5) as resp:
|
||||
self.assertIn(b"Example Domain", await resp.read())
|
||||
|
||||
@@ -21,6 +21,9 @@ from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.test_utils import BaseProxyTest
|
||||
|
||||
|
||||
UNKNOWN_PACKET = b'\x00\x00\x00\x00E\x00\xff\xf0\x00\xff\xff\xff\xff\x00'
|
||||
|
||||
|
||||
class MockAddon(BaseAddon):
|
||||
def __init__(self):
|
||||
self.events = []
|
||||
@@ -242,6 +245,21 @@ class LLUDPIntegrationTests(BaseProxyTest):
|
||||
self.assertEqual(entry.name, "UndoLand")
|
||||
self.assertEqual(entry.message.dropped, True)
|
||||
|
||||
async def test_logging_unknown_message(self):
|
||||
message_logger = SimpleMessageLogger()
|
||||
self.session_manager.message_logger = message_logger
|
||||
self._setup_default_circuit()
|
||||
self.protocol.datagram_received(UNKNOWN_PACKET, self.region_addr)
|
||||
await self._wait_drained()
|
||||
entries = message_logger.entries
|
||||
self.assertEqual(len(entries), 1)
|
||||
entry: LLUDPMessageLogEntry = entries[0] # type: ignore
|
||||
# Freezing shouldn't affect this
|
||||
entry.freeze()
|
||||
self.assertEqual(entry.name, "UnknownMessage:240")
|
||||
self.assertEqual(entry.message.dropped, False)
|
||||
self.assertEqual(entry.message.unknown_message, True)
|
||||
|
||||
async def test_session_message_handler(self):
|
||||
self._setup_default_circuit()
|
||||
obj_update = self._make_objectupdate_compressed(1234)
|
||||
|
||||
@@ -6,6 +6,7 @@ from typing import *
|
||||
from unittest import mock
|
||||
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
from hippolyzer.lib.base.helpers import create_logged_task
|
||||
from hippolyzer.lib.base.message.message import Block, Message as Message
|
||||
from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
|
||||
from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer
|
||||
@@ -620,7 +621,7 @@ class SessionObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncio
|
||||
async def _create_after():
|
||||
await asyncio.sleep(0.001)
|
||||
self._create_object(region_handle=123, local_id=child.ParentID)
|
||||
asyncio.create_task(_create_after())
|
||||
_ = create_logged_task(_create_after())
|
||||
|
||||
await self.session.objects.load_ancestors(child)
|
||||
await self.session.objects.load_ancestors(parentless)
|
||||
@@ -667,7 +668,7 @@ class SessionObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncio
|
||||
|
||||
async def test_handle_object_update_event(self):
|
||||
with self.session.objects.events.subscribe_async(
|
||||
message_names=(ObjectUpdateType.OBJECT_UPDATE,),
|
||||
message_names=(ObjectUpdateType.UPDATE,),
|
||||
predicate=lambda e: e.object.UpdateFlags & JUST_CREATED_FLAGS and "LocalID" in e.updated,
|
||||
) as get_events:
|
||||
self._create_object(local_id=999)
|
||||
@@ -676,7 +677,7 @@ class SessionObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncio
|
||||
|
||||
async def test_handle_object_update_predicate(self):
|
||||
with self.session.objects.events.subscribe_async(
|
||||
message_names=(ObjectUpdateType.OBJECT_UPDATE,),
|
||||
message_names=(ObjectUpdateType.UPDATE,),
|
||||
) as get_events:
|
||||
self._create_object(local_id=999)
|
||||
evt = await asyncio.wait_for(get_events(), 1.0)
|
||||
@@ -684,10 +685,10 @@ class SessionObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncio
|
||||
|
||||
async def test_handle_object_update_events_two_subscribers(self):
|
||||
with self.session.objects.events.subscribe_async(
|
||||
message_names=(ObjectUpdateType.OBJECT_UPDATE,),
|
||||
message_names=(ObjectUpdateType.UPDATE,),
|
||||
) as get_events:
|
||||
with self.session.objects.events.subscribe_async(
|
||||
message_names=(ObjectUpdateType.OBJECT_UPDATE,),
|
||||
message_names=(ObjectUpdateType.UPDATE,),
|
||||
) as get_events2:
|
||||
self._create_object(local_id=999)
|
||||
evt = await asyncio.wait_for(get_events(), 1.0)
|
||||
@@ -697,10 +698,10 @@ class SessionObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncio
|
||||
|
||||
async def test_handle_object_update_events_two_subscribers_timeout(self):
|
||||
with self.session.objects.events.subscribe_async(
|
||||
message_names=(ObjectUpdateType.OBJECT_UPDATE,),
|
||||
message_names=(ObjectUpdateType.UPDATE,),
|
||||
) as get_events:
|
||||
with self.session.objects.events.subscribe_async(
|
||||
message_names=(ObjectUpdateType.OBJECT_UPDATE,),
|
||||
message_names=(ObjectUpdateType.UPDATE,),
|
||||
) as get_events2:
|
||||
self._create_object(local_id=999)
|
||||
evt = asyncio.wait_for(get_events(), 0.01)
|
||||
|
||||
Reference in New Issue
Block a user