130 Commits

Author SHA1 Message Date
Salad Dais
e6ac99458f v0.14.0 2023-12-20 01:38:31 +00:00
Salad Dais
92cadf26e9 Support inventory cache v3 2023-12-20 01:21:54 +00:00
Salad Dais
305038a31d Add HippoClient.main_caps_client convenience property 2023-12-20 00:58:12 +00:00
Salad Dais
bd67d6f19f Split out RLV handling 2023-12-20 00:49:16 +00:00
Salad Dais
81eae4edbf Make default log level less insane 2023-12-19 18:43:08 +00:00
Salad Dais
776ef71574 Fix participant removal on session close 2023-12-19 18:41:46 +00:00
Salad Dais
31125ca489 Defer returning from join_session() until we're a participant 2023-12-19 06:38:35 +00:00
Salad Dais
29ab108764 Store capture and render device info for voice 2023-12-19 05:30:21 +00:00
Salad Dais
61820f1670 Better handling of client start locations 2023-12-19 04:24:47 +00:00
Salad Dais
7fafb8b5ae message_handler -> event_handler 2023-12-19 01:31:49 +00:00
Salad Dais
28e84c0c5a Clean up session joining code 2023-12-18 23:32:57 +00:00
Salad Dais
e629214bef Switch voice stuff to use MessageHandler for events 2023-12-18 23:18:25 +00:00
Salad Dais
5e9433b4a4 3d_position -> 3d_pos 2023-12-18 21:34:39 +00:00
Salad Dais
5f2082c6e9 Minor cleanup of asyncio usage 2023-12-18 21:32:25 +00:00
Salad Dais
12c0deadee Add tests for setting voice region pos 2023-12-18 21:16:35 +00:00
Salad Dais
6da766ef22 Add test for joining voice session 2023-12-18 20:11:21 +00:00
Salad Dais
f278a4bfcf Use asyncio.Event when events should be re-awaitable 2023-12-18 18:34:14 +00:00
Salad Dais
631fe91049 Correct coveragerc exclude_lines 2023-12-18 07:27:35 +00:00
Salad Dais
159f39227a Add more voice client tests 2023-12-18 07:08:37 +00:00
Salad Dais
670acef0b4 Add tests for voice connector setup 2023-12-18 06:10:51 +00:00
Salad Dais
1165769aca Start writing voice client tests 2023-12-18 05:34:33 +00:00
Salad Dais
613dd32a40 Add tests for voice stuff 2023-12-18 03:29:40 +00:00
Salad Dais
d7a88f904e Add voice-related tooling 2023-12-18 02:02:39 +00:00
Salad Dais
a8344a231b Make hippolyzer events awaitable 2023-12-17 23:37:10 +00:00
Salad Dais
11043e365a On second thought, don't handle EnableSimulator at all 2023-12-16 21:51:56 +00:00
Salad Dais
ad34ba78ea Handle EnableSimulator correctly in client 2023-12-16 20:53:38 +00:00
Salad Dais
f9b4ae1308 Get rid of decorator so we don't mess up type signature 2023-12-16 20:34:10 +00:00
Salad Dais
7fee8f6bfe Fix Python 3.8 2023-12-16 20:08:09 +00:00
Salad Dais
2e0ca3649c Use Future instead of Event for connected signal 2023-12-16 17:29:35 +00:00
Salad Dais
e0d44741e9 Better teleport request handling 2023-12-16 04:44:49 +00:00
Salad Dais
008d59c7d6 Fix Python 3.8 2023-12-15 21:34:45 +00:00
Salad Dais
ed03b0d49f Add a teleport method to client 2023-12-15 21:32:45 +00:00
Salad Dais
4cc1513e58 Correct type signatures in MessageHandler 2023-12-15 19:07:17 +00:00
Salad Dais
c768aeaf40 Be smarter about clearing out ObjectManagers 2023-12-15 17:18:35 +00:00
Salad Dais
42ebb0e915 Fix multi-region connections 2023-12-15 17:08:00 +00:00
Salad Dais
31ba9635eb WIP multi-region support for client 2023-12-15 00:55:14 +00:00
Salad Dais
dc58512ee6 Better handle sim disconnects in client 2023-12-14 23:22:32 +00:00
Salad Dais
4a58731441 Make client circuits easier to work with 2023-12-14 12:33:23 +00:00
Salad Dais
c2b92d2d7d Add test for non-templated EQ events 2023-12-14 10:10:41 +00:00
Salad Dais
640b384d27 Add tests for resend suppression 2023-12-14 09:31:19 +00:00
Salad Dais
a2ef3d9f8e More client refactoring 2023-12-14 09:14:07 +00:00
Salad Dais
0456b4b62d Make main region caps less annoying to work with 2023-12-14 02:19:11 +00:00
Salad Dais
92c9c82e73 Move some things from session to region 2023-12-14 02:08:12 +00:00
Salad Dais
c5ed1cff24 Handle non-templated EQ events in client 2023-12-14 01:23:57 +00:00
Salad Dais
0710735546 Make client handle ping checks 2023-12-13 22:01:34 +00:00
Salad Dais
7869df224e Simplify chat client example 2023-12-13 20:42:21 +00:00
Salad Dais
6f6274ec7d Add client example 2023-12-13 19:19:14 +00:00
Salad Dais
40da130066 Update docs related to client 2023-12-13 17:57:48 +00:00
Salad Dais
5947d52c8d Add inventory manager to client 2023-12-13 17:52:03 +00:00
Salad Dais
e4b73a7196 Don't take by default in client messagehandlers 2023-12-13 04:18:49 +00:00
Salad Dais
1ded1180dc Clean up client tests 2023-12-13 04:10:43 +00:00
Salad Dais
5517d60e7a Use correct user-agent for hippolyzer client 2023-12-12 22:20:39 +00:00
Salad Dais
ed7e42625e Add Hippolyzer proxy support to client 2023-12-12 22:15:28 +00:00
Salad Dais
d5cde896fb Add tests for client EQ handling 2023-12-12 21:47:34 +00:00
Salad Dais
007c79f4a7 Add basic EQ handling to client 2023-12-12 21:17:47 +00:00
Salad Dais
f1b523b5de Support client seed cap, support async message handlers 2023-12-11 21:47:15 +00:00
Salad Dais
c42e0d7291 Make client login testable 2023-12-11 19:08:01 +00:00
Salad Dais
1ee1b9acc6 Basic working client 2023-12-10 23:55:19 +00:00
Salad Dais
9904633a99 More client work 2023-12-10 23:26:28 +00:00
Salad Dais
c8791db75e Start adding client-related lib files 2023-12-10 19:52:24 +00:00
Salad Dais
21d1c7ebfe v0.13.4 2023-12-07 18:47:43 +00:00
Salad Dais
996a43be5b Add option to allow insecure upstream SSL connections 2023-12-07 18:44:10 +00:00
Salad Dais
9e8127e577 Don't use asyncio.get_running_loop() 2023-12-06 20:35:55 +00:00
Salad Dais
cfcd324a11 Pin to Werkzeug under 3.0 2023-12-06 20:35:39 +00:00
Salad Dais
6872634bf4 Be more resilient when faced with no cap_data 2023-12-06 20:35:18 +00:00
Salad Dais
091090c6fd Reparent avatars correctly when recalculating linksets 2023-12-03 23:51:11 +00:00
Salad Dais
bd4fff4200 Add support for PBR / reflection probes 2023-12-03 23:50:32 +00:00
Salad Dais
52dfd0be05 v0.13.3 2023-10-10 23:23:57 +00:00
Salad Dais
60f1737115 Appease new flake8 rules 2023-10-10 23:20:43 +00:00
Salad Dais
7a5d6baf02 Make failing to load invcache non-fatal 2023-10-10 23:15:15 +00:00
Salad Dais
44a332a77b Handle failing to load an addon correctly 2023-10-10 23:14:59 +00:00
Salad Dais
beb0a2d6a4 v0.13.2 2023-07-06 21:49:35 +00:00
Salad Dais
9be66df52b Add AgentFOV to default message ignorelist
It's incredibly spammy when the mesh upload preview is open
2023-07-06 21:48:46 +00:00
Salad Dais
da0117db1b v0.13.1 2023-07-05 20:29:40 +00:00
Salad Dais
4dbf01a604 Blacklist new versions of recordclass 2023-07-05 20:27:05 +00:00
Salad Dais
36858ed3e2 Fix flake error 2023-06-18 18:37:14 +00:00
Salad Dais
370c586582 Decode more flags fields 2023-06-18 18:33:52 +00:00
Salad Dais
fdfffd96c9 Fix UUID serialization with invalid AIS LLSD payloads 2023-06-18 18:33:26 +00:00
Salad Dais
6da9f58b23 Pass original Message through to objectupdate hooks 2023-06-18 18:29:51 +00:00
Salad Dais
12e3912a37 Update README.md
This isn't even in there anymore!
2023-02-07 19:43:51 +00:00
Salad Dais
8147e7e1d7 Remove stylesheet from message builder 2023-02-07 19:43:29 +00:00
Salad Dais
19dba6651c v0.13.0 2023-02-07 19:36:22 +00:00
Salad Dais
274f96c710 Run CI tests on Python 3.11 instead of 3.10 2023-02-07 18:49:14 +00:00
Salad Dais
09e1d0b6fc Remove custom stylesheet for HTTP request / response panes 2023-02-07 18:49:14 +00:00
dependabot[bot]
f4fb68e310 Bump certifi from 2021.10.8 to 2022.12.7 (#34)
Bumps [certifi](https://github.com/certifi/python-certifi) from 2021.10.8 to 2022.12.7.
- [Release notes](https://github.com/certifi/python-certifi/releases)
- [Commits](https://github.com/certifi/python-certifi/compare/2021.10.08...2022.12.07)

---
updated-dependencies:
- dependency-name: certifi
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-07 13:39:26 -04:00
Salad Dais
8edf7ae89b Rough cut of Python 3.11 support 2023-02-07 17:35:44 +00:00
Salad Dais
b6458e9eb7 Add mute enum definitions 2022-11-15 06:24:28 +00:00
Salad Dais
375af1e7f6 Improvements to Object and Skeleton APIs 2022-11-14 21:54:52 +00:00
Salad Dais
76d0a72590 Fix ObjectUpdateBlame addon example always requesting 2022-11-01 23:06:17 +00:00
Salad Dais
3255556835 Add CreationDate SubfieldSerializer 2022-11-01 08:18:40 +00:00
Salad Dais
d19122c039 Fix copy/paste error in puppetry addon 2022-10-27 16:10:05 +00:00
Salad Dais
5692f7b8b6 Add WIP puppetry code 2022-10-19 02:11:04 +00:00
Salad Dais
21cea0f009 Claim LEAP client when session is first created 2022-10-19 02:06:35 +00:00
Salad Dais
193d762132 Give each addon a separate addon_ctx bucket
This fixes addons being able to accidentally stomp all over each
others' state just because they happened to use the same name for
a SessionProperty.
2022-10-18 22:40:15 +00:00
Salad Dais
227fbf7a2e Improve avatar skeleton implementation 2022-10-18 19:39:39 +00:00
Salad Dais
25a397bcc5 add LEAP client connection addon hook 2022-10-17 21:28:11 +00:00
Salad Dais
b0dca80b87 Simplify MetaBaseAddon 2022-10-15 22:56:32 +00:00
Salad Dais
ea475b528f v0.12.2 2022-10-14 06:17:07 +00:00
Salad Dais
2036e3c5b3 Add LEAP / outleap support 2022-10-14 06:11:51 +00:00
Salad Dais
584d9f11e8 Use llsd package instead of llbase.llsd 2022-10-14 03:47:48 +00:00
Salad Dais
df020281f1 Remove send_message() alias 2022-09-28 11:46:24 +00:00
Salad Dais
78c1b8869e Remove LEAP-related code
It lives in https://github.com/SaladDais/outleap now.
Hippolyzer-specific integration will be added back in later.
2022-09-19 04:37:31 +00:00
Salad Dais
87d5e8340b Split LEAPProtocol out of LEAPClient 2022-09-18 18:05:16 +00:00
Salad Dais
e6423d2f43 More work on LEAP API wrappers 2022-09-18 07:49:18 +00:00
Salad Dais
fac44a12b0 Update cap templates 2022-09-18 05:05:00 +00:00
Salad Dais
99ca7b1674 Allow paths for text_input() 2022-09-18 05:04:36 +00:00
Salad Dais
e066724a2f Add API wrappers for LLUI and LLWindow LEAP APIs 2022-09-18 03:28:20 +00:00
Salad Dais
dce032de31 Get both scoped and unscoped LEAP listeners working 2022-09-17 22:30:47 +00:00
Salad Dais
2f578b2bc4 More LEAP work 2022-09-17 08:50:52 +00:00
Salad Dais
0c1656e6ab Start of basic LEAP client / forwarding agent 2022-09-16 09:06:01 +00:00
Salad Dais
2b6d8a70f4 v0.12.1 2022-09-12 14:30:18 +00:00
Salad Dais
1a308e9671 Mesh serialization clarifications 2022-09-12 14:17:33 +00:00
Salad Dais
7b21e5634c Slightly faster weights (de)serialization 2022-09-10 00:04:49 +00:00
Salad Dais
e4548a285d Serialize LLMesh internals with NumPy
Easy 2x speedup! Still need to do the vertex weights, but those
have irregular alignment.
2022-09-08 23:44:53 +00:00
Salad Dais
72e926f04c Better bind shape application 2022-09-08 18:58:28 +00:00
Salad Dais
d9fa14b17c Faster vec3 normalization 2022-09-08 18:27:01 +00:00
Salad Dais
33c5abaaf4 Clarify glTF comments 2022-09-08 17:17:54 +00:00
Salad Dais
2dfd61fcc5 Only calculate inverse transpose bind shape matrix once 2022-09-08 05:48:52 +00:00
Salad Dais
eb58e747ce Fix glTF skinning implementation
Still a little funky, but things display correctly now.
2022-09-08 00:32:10 +00:00
Salad Dais
1d221a2289 glTF: Apply bone scale and rotation to inverse bind matrices instead
Blender can't do anything intelligent with them. Fixes major display
issues for collision volume scaling. Figure out how to round-trip
correctly on export later.
2022-09-02 06:47:09 +00:00
Salad Dais
2ffd0458d0 More glTF cleanup 2022-09-01 20:20:02 +00:00
Salad Dais
25f533a31b glTF fixups, parse skeleton definition from avatar_skeleton.xml 2022-09-01 16:57:36 +00:00
Salad Dais
570dbce181 Add WIP glTF conversion code
Related to #24
2022-08-29 14:10:56 +00:00
Salad Dais
ccb63e971b Reorganize collada code a bit 2022-08-29 13:49:55 +00:00
Salad Dais
8be4bce8bc Make mesh uploader handle multi-faced meshes 2022-08-22 01:15:35 +00:00
Salad Dais
e945706d2b Don't hardcode VisualParams path 2022-08-21 04:52:30 +00:00
Salad Dais
6c748a6ab2 More collada notes 2022-08-21 04:52:05 +00:00
Salad Dais
6abc7ca7d2 Fix colladatools log call 2022-08-19 16:57:31 +00:00
Salad Dais
c57e0e467c Better handle dynamically-imported hot_reload()s 2022-08-19 16:54:42 +00:00
Salad Dais
e46b4adad2 Update collada notes 2022-08-18 15:44:23 +00:00
84 changed files with 3952 additions and 489 deletions

View File

@@ -9,4 +9,4 @@ exclude_lines =
def __repr__
raise AssertionError
assert False
pass
^\s*pass\b

View File

@@ -1,6 +1,12 @@
name: Run Python Tests
on: [push, pull_request]
on:
push:
paths-ignore:
- '*.md'
pull_request:
paths-ignore:
- '*.md'
jobs:
build:
@@ -8,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.8", "3.10"]
python-version: ["3.8", "3.11"]
steps:
- uses: actions/checkout@v2
@@ -20,10 +26,11 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade pip wheel
pip install -r requirements.txt
pip install -r requirements-test.txt
sudo apt-get install libopenjp2-7
pip install -e .
- name: Run Flake8
run: |
flake8 .

View File

@@ -48,8 +48,7 @@ A proxy is provided with both a CLI and Qt-based interface. The proxy applicatio
custom SOCKS 5 UDP proxy, as well as an HTTP proxy based on [mitmproxy](https://mitmproxy.org/).
Multiple clients are supported at a time, and UDP messages may be injected in either
direction. The proxy UI was inspired by the Message Log and Message Builder as present in
the [Alchemy](https://github.com/AlchemyViewer/Alchemy) viewer.
direction.
### Proxy Setup
@@ -403,9 +402,13 @@ above is your only option.
### Should I use this library to make an SL client in Python?
No. If you just want to write a client in Python, you should instead look at using
Probably not. If you just want to write a client in Python, you should instead look at using
[libremetaverse](https://github.com/cinderblocks/libremetaverse/) via pythonnet.
I removed the client-related code inherited from PyOGP because libremetaverse's was simply better.
I removed the client-related code inherited from PyOGP because libremetaverse's was simply better
for general use.
<https://github.com/CasperTech/node-metaverse/> also looks like a good, modern wrapper if you
prefer TypeScript.
There is, however, a very low-level `HippoClient` class provided for testing, but it's unlikely
to be what you want for writing a general-purpose bot.

View File

@@ -114,7 +114,7 @@ class BlueishObjectListGUIAddon(BaseAddon):
region.objects.request_missing_objects()
def handle_object_updated(self, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str]):
obj: Object, updated_props: Set[str], msg: Optional[Message]):
if self.blueish_model is None:
return

View File

@@ -6,7 +6,7 @@ from hippolyzer.lib.proxy.sessions import Session
def handle_lludp_message(session: Session, region: ProxiedRegion, message: Message):
# addon_ctx will persist across addon reloads, use for storing data that
# needs to survive across calls to this function
ctx = session.addon_ctx
ctx = session.addon_ctx[__name__]
if message.name == "ChatFromViewer":
chat = message["ChatData"]["Message"]
if chat == "COUNT":

View File

@@ -152,7 +152,7 @@ class DeformerAddon(BaseAddon):
local_anim.LocalAnimAddon.apply_local_anim(session, region, "deformer_addon", anim_data)
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
cmd: str, options: List[str], param: str):
behaviour: str, options: List[str], param: str):
# An object in-world can also tell the client how to deform itself via
# RLV-style commands.
@@ -160,9 +160,9 @@ class DeformerAddon(BaseAddon):
if param != "force":
return
if cmd == "stop_deforming":
if behaviour == "stop_deforming":
self.deform_joints.clear()
elif cmd == "deform_joints":
elif behaviour == "deform_joints":
self.deform_joints.clear()
for joint_data in options:
joint_split = joint_data.split("|")

View File

@@ -0,0 +1,50 @@
"""
Example of how to control a viewer over LEAP
Must launch the viewer with `outleap-agent` LEAP script.
See https://github.com/SaladDais/outleap/ for more info on LEAP / outleap.
"""
import outleap
from outleap.scripts.inspector import LEAPInspectorGUI
from hippolyzer.lib.proxy.addon_utils import send_chat, BaseAddon, show_message
from hippolyzer.lib.proxy.commands import handle_command
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import Session, SessionManager
# Path found using `outleap-inspector`
FPS_PATH = outleap.UIPath("/main_view/menu_stack/status_bar_container/status/time_and_media_bg/FPSText")
class LEAPExampleAddon(BaseAddon):
async def handle_leap_client_added(self, session_manager: SessionManager, leap_client: outleap.LEAPClient):
# You can do things as soon as the LEAP client connects, like if you want to automate
# login or whatever.
viewer_control_api = outleap.LLViewerControlAPI(leap_client)
# Ask for a config value and print it in the viewer logs
print(await viewer_control_api.get("Global", "StatsPilotFile"))
@handle_command()
async def show_ui_inspector(self, session: Session, _region: ProxiedRegion):
"""Spawn a GUI for inspecting the UI state"""
if not session.leap_client:
show_message("No LEAP client connected?")
return
LEAPInspectorGUI(session.leap_client).show()
@handle_command()
async def say_fps(self, session: Session, _region: ProxiedRegion):
"""Say your current FPS in chat"""
if not session.leap_client:
show_message("No LEAP client connected?")
return
window_api = outleap.LLWindowAPI(session.leap_client)
fps = (await window_api.get_info(path=FPS_PATH))['value']
send_chat(f"LEAP says I'm running at {fps} FPS!")
addons = [LEAPExampleAddon()]

View File

@@ -114,15 +114,15 @@ class LocalAnimAddon(BaseAddon):
await asyncio.sleep(1.0)
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
cmd: str, options: List[str], param: str):
behaviour: str, options: List[str], param: str):
# We only handle commands
if param != "force":
return
if cmd == "stop_local_anim":
if behaviour == "stop_local_anim":
self.apply_local_anim(session, region, options[0], new_data=None)
return True
elif cmd == "start_local_anim":
elif behaviour == "start_local_anim":
self.apply_local_anim_from_file(session, region, options[0])
return True

View File

@@ -230,7 +230,7 @@ class MeshUploadInterceptingAddon(BaseAddon):
show_message("Mangled upload request")
def handle_object_updated(self, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str]):
obj: Object, updated_props: Set[str], msg: Optional[Message]):
if obj.LocalID not in self.local_mesh_target_locals:
return
if "Name" not in updated_props or obj.Name is None:

View File

@@ -10,6 +10,7 @@ before you start tracking can help too.
from typing import *
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.objects import Object
from hippolyzer.lib.base.templates import PCode
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message, SessionProperty
@@ -20,7 +21,7 @@ from hippolyzer.lib.proxy.sessions import Session
class ObjectUpdateBlameAddon(BaseAddon):
update_blame_counter: Counter[UUID] = SessionProperty(Counter)
track_update_blame: bool = SessionProperty(False)
should_track_update_blame: bool = SessionProperty(False)
@handle_command()
async def precache_objects(self, _session: Session, region: ProxiedRegion):
@@ -38,11 +39,11 @@ class ObjectUpdateBlameAddon(BaseAddon):
@handle_command()
async def track_update_blame(self, _session: Session, _region: ProxiedRegion):
self.track_update_blame = True
self.should_track_update_blame = True
@handle_command()
async def untrack_update_blame(self, _session: Session, _region: ProxiedRegion):
self.track_update_blame = False
self.should_track_update_blame = False
@handle_command()
async def clear_update_blame(self, _session: Session, _region: ProxiedRegion):
@@ -57,8 +58,8 @@ class ObjectUpdateBlameAddon(BaseAddon):
print(f"{obj_id} ({name!r}): {count}")
def handle_object_updated(self, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str]):
if not self.track_update_blame:
obj: Object, updated_props: Set[str], msg: Optional[Message]):
if not self.should_track_update_blame:
return
if region != session.main_region:
return

View File

@@ -0,0 +1,111 @@
"""
Control a puppetry-enabled viewer and make your neck spin like crazy
It currently requires a custom rebased Firestorm with puppetry applied on top,
and patches applied on top to make startup LEAP scripts be treated as puppetry modules.
Basically, you probably don't want to use this yet. But hey, Puppetry is still only
on the beta grid anyway.
"""
import asyncio
import enum
import logging
import math
from typing import *
import outleap
from hippolyzer.lib.base.datatypes import Quaternion
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
from hippolyzer.lib.proxy.sessions import Session
LOG = logging.getLogger(__name__)
class BodyPartMask(enum.IntFlag):
"""Which joints to send the viewer as part of "move" puppetry command"""
HEAD = 1 << 0
FACE = 1 << 1
LHAND = 1 << 2
RHAND = 1 << 3
FINGERS = 1 << 4
def register_puppetry_command(func: Callable[[dict], Awaitable[None]]):
"""Register a method as handling inbound puppetry commands from the viewer"""
func._puppetry_command = True
return func
class PuppetryExampleAddon(BaseAddon):
server_skeleton: Dict[str, Dict[str, Any]] = SessionProperty(dict)
camera_num: int = SessionProperty(0)
parts_active: BodyPartMask = SessionProperty(lambda: BodyPartMask(0x1F))
puppetry_api: Optional[outleap.LLPuppetryAPI] = SessionProperty(None)
leap_client: Optional[outleap.LEAPClient] = SessionProperty(None)
def handle_session_init(self, session: Session):
if not session.leap_client:
return
self.puppetry_api = outleap.LLPuppetryAPI(session.leap_client)
self.leap_client = session.leap_client
self._schedule_task(self._serve())
self._schedule_task(self._exorcist(session))
@register_puppetry_command
async def enable_parts(self, args: dict):
if (new_mask := args.get("parts_mask")) is not None:
self.parts_active = BodyPartMask(new_mask)
@register_puppetry_command
async def set_camera(self, args: dict):
if (camera_num := args.get("camera_num")) is not None:
self.camera_num = camera_num
@register_puppetry_command
async def stop(self, _args: dict):
LOG.info("Viewer asked us to stop puppetry")
@register_puppetry_command
async def log(self, _args: dict):
# Intentionally ignored, we don't care about things the viewer
# asked us to log
pass
@register_puppetry_command
async def set_skeleton(self, args: dict):
# Don't really care about what the viewer thinks the view of the skeleton is.
# Just log store it.
self.server_skeleton = args
async def _serve(self):
"""Handle inbound puppetry commands from viewer in a loop"""
async with self.leap_client.listen_scoped("puppetry.controller") as listener:
while True:
msg = await listener.get()
cmd = msg["command"]
handler = getattr(self, cmd, None)
if handler is None or not hasattr(handler, "_puppetry_command"):
LOG.warning(f"Unknown puppetry command {cmd!r}: {msg!r}")
continue
await handler(msg.get("args", {}))
async def _exorcist(self, session):
"""Do the Linda Blair thing with your neck"""
spin_rad = 0.0
while True:
await asyncio.sleep(0.05)
if not session.main_region:
continue
# Wrap spin_rad around if necessary
while spin_rad > math.pi:
spin_rad -= math.pi * 2
# LEAP wants rot as a quaternion with just the imaginary parts.
neck_rot = Quaternion.from_euler(0, 0, spin_rad).data(3)
self.puppetry_api.move({
"mNeck": {"no_constraint": True, "local_rot": neck_rot},
})
spin_rad += math.pi / 25
addons = [PuppetryExampleAddon()]

View File

@@ -13,7 +13,7 @@ def _to_spongecase(val):
def handle_lludp_message(session: Session, _region: ProxiedRegion, message: Message):
ctx = session.addon_ctx
ctx = session.addon_ctx[__name__]
ctx.setdefault("spongecase", False)
if message.name == "ChatFromViewer":
chat = message["ChatData"]["Message"]

View File

@@ -7,6 +7,8 @@ in the appropriate format.
from pathlib import Path
from typing import *
from hippolyzer.lib.base.mesh import LLMeshSerializer
from hippolyzer.lib.base.serialization import BufferReader
from hippolyzer.lib.base.templates import AssetType
from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.addon_utils import show_message, BaseAddon
@@ -38,8 +40,11 @@ class UploaderAddon(BaseAddon):
try:
if asset_type == AssetType.MESH:
# Kicking off a mesh upload works a little differently internally
# Half-parse the mesh so that we can figure out how many faces it has
reader = BufferReader("!", file_body)
mesh = reader.read(LLMeshSerializer(parse_segment_contents=False))
upload_token = await region.asset_uploader.initiate_mesh_upload(
name, file_body, flags=flags
name, mesh, flags=flags
)
else:
upload_token = await region.asset_uploader.initiate_asset_upload(

View File

@@ -0,0 +1,48 @@
"""
A simple client that just says hello to people
"""
import asyncio
from contextlib import aclosing
import os
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.templates import ChatType, ChatSourceType
from hippolyzer.lib.client.hippo_client import HippoClient
async def amain():
client = HippoClient()
async def _respond_to_chat(message: Message):
if message["ChatData"]["SourceID"] == client.session.agent_id:
return
if message["ChatData"]["SourceType"] != ChatSourceType.AGENT:
return
if "hello" not in str(message["ChatData"]["Message"]).lower():
return
await client.send_chat(f'Hello {message["ChatData"]["FromName"]}!', chat_type=ChatType.SHOUT)
async with aclosing(client):
await client.login(
username=os.environ["HIPPO_USERNAME"],
password=os.environ["HIPPO_PASSWORD"],
start_location=os.environ.get("HIPPO_START_LOCATION", "last"),
)
print("I'm here")
await client.send_chat("Hello World!", chat_type=ChatType.SHOUT)
client.session.message_handler.subscribe("ChatFromSimulator", _respond_to_chat)
# Example of how to work with caps
async with client.main_caps_client.get("SimulatorFeatures") as features_resp:
print("Features:", await features_resp.read_llsd())
while True:
try:
await asyncio.sleep(0.001)
except (KeyboardInterrupt, asyncio.CancelledError):
await client.send_chat("Goodbye World!", chat_type=ChatType.SHOUT)
return
if __name__ == "__main__":
asyncio.run(amain())

View File

@@ -191,7 +191,7 @@
</size>
</property>
<property name="styleSheet">
<string notr="true">color: rgb(80, 0, 0)</string>
<string notr="true"/>
</property>
<property name="tabChangesFocus">
<bool>true</bool>

View File

@@ -9,13 +9,14 @@ from typing import Optional
import mitmproxy.ctx
import mitmproxy.exceptions
import outleap
from hippolyzer.lib.base import llsd
from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.addon_utils import BaseAddon
from hippolyzer.lib.proxy.ca_utils import setup_ca
from hippolyzer.lib.proxy.commands import handle_command
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, create_proxy_master, HTTPFlowContext
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, HTTPFlowContext
from hippolyzer.lib.proxy.http_event_manager import MITMProxyEventManager
from hippolyzer.lib.proxy.lludp_proxy import SLSOCKS5Server
from hippolyzer.lib.base.message.message import Message
@@ -84,12 +85,12 @@ class REPLAddon(BaseAddon):
AddonManager.spawn_repl()
def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowContext):
def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowContext, ssl_insecure=False):
mitm_loop = asyncio.new_event_loop()
asyncio.set_event_loop(mitm_loop)
async def mitmproxy_loop():
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context, ssl_insecure=ssl_insecure)
gc.freeze()
await mitmproxy_master.run()
@@ -97,7 +98,7 @@ def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowCo
def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] = None,
extra_addon_paths: Optional[list] = None, proxy_host=None):
extra_addon_paths: Optional[list] = None, proxy_host=None, ssl_insecure=False):
extra_addons = extra_addons or []
extra_addon_paths = extra_addon_paths or []
extra_addons.append(SelectionManagerAddon())
@@ -112,6 +113,7 @@ def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] =
udp_proxy_port = session_manager.settings.SOCKS_PROXY_PORT
http_proxy_port = session_manager.settings.HTTP_PROXY_PORT
leap_port = session_manager.settings.LEAP_PORT
if proxy_host is None:
proxy_host = session_manager.settings.PROXY_BIND_ADDR
@@ -121,17 +123,13 @@ def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] =
# TODO: argparse
if len(sys.argv) == 3:
if sys.argv[1] == "--setup-ca":
try:
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
except mitmproxy.exceptions.MitmproxyException:
# Proxy already running, create the master so we don't try to bind to a port
mitmproxy_master = create_proxy_master(proxy_host, http_proxy_port, flow_context)
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
setup_ca(sys.argv[2], mitmproxy_master)
return sys.exit(0)
http_proc = multiprocessing.Process(
target=run_http_proxy_process,
args=(proxy_host, http_proxy_port, flow_context),
args=(proxy_host, http_proxy_port, flow_context, ssl_insecure),
daemon=True,
)
http_proc.start()
@@ -143,6 +141,10 @@ def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] =
coro = asyncio.start_server(server.handle_connection, proxy_host, udp_proxy_port)
async_server = loop.run_until_complete(coro)
leap_server = outleap.LEAPBridgeServer(session_manager.leap_client_connected)
coro = asyncio.start_server(leap_server.handle_connection, proxy_host, leap_port)
async_leap_server = loop.run_until_complete(coro)
event_manager = MITMProxyEventManager(session_manager, flow_context)
loop.create_task(event_manager.run())
@@ -169,6 +171,8 @@ def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] =
# Close the server
print("Closing SOCKS server")
async_server.close()
print("Shutting down LEAP server")
async_leap_server.close()
print("Shutting down addons")
AddonManager.shutdown()
print("Waiting for SOCKS server to close")

View File

@@ -39,10 +39,11 @@ from hippolyzer.lib.base.settings import SettingDescriptor
from hippolyzer.lib.base.ui_helpers import loadUi
import hippolyzer.lib.base.serialization as se
from hippolyzer.lib.base.network.transport import Direction, SocketUDPTransport
from hippolyzer.lib.client.state import BaseClientSessionManager
from hippolyzer.lib.proxy.addons import BaseInteractionManager, AddonManager
from hippolyzer.lib.proxy.ca_utils import setup_ca_everywhere
from hippolyzer.lib.proxy.caps_client import ProxyCapsClient
from hippolyzer.lib.proxy.http_proxy import create_proxy_master, HTTPFlowContext
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, HTTPFlowContext
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry, AbstractMessageLogEntry, WrappingMessageLogger, \
import_log_entries, export_log_entries
from hippolyzer.lib.proxy.region import ProxiedRegion
@@ -71,6 +72,7 @@ class GUISessionManager(SessionManager, QtCore.QObject):
regionRemoved = QtCore.Signal(ProxiedRegion)
def __init__(self, settings):
BaseClientSessionManager.__init__(self)
SessionManager.__init__(self, settings)
QtCore.QObject.__init__(self)
self.all_regions = []
@@ -231,7 +233,8 @@ class MessageLogWindow(QtWidgets.QMainWindow):
"AvatarRenderInfo FirestormBridge ObjectAnimation ParcelDwellRequest ParcelAccessListRequest " \
"ParcelDwellReply ParcelAccessListReply AttachedSoundGainChange " \
"ParcelPropertiesRequest ParcelProperties GetObjectCost GetObjectPhysicsData ObjectImage " \
"ViewerAsset GetTexture SetAlwaysRun GetDisplayNames MapImageService MapItemReply".split(" ")
"ViewerAsset GetTexture SetAlwaysRun GetDisplayNames MapImageService MapItemReply " \
"AgentFOV".split(" ")
DEFAULT_FILTER = f"!({' || '.join(ignored for ignored in DEFAULT_IGNORE)})"
textRequest: QtWidgets.QTextEdit
@@ -274,9 +277,11 @@ class MessageLogWindow(QtWidgets.QMainWindow):
self.actionOpenMessageBuilder.triggered.connect(self._openMessageBuilder)
self.actionProxyRemotelyAccessible.setChecked(self.settings.REMOTELY_ACCESSIBLE)
self.actionProxySSLInsecure.setChecked(self.settings.SSL_INSECURE)
self.actionUseViewerObjectCache.setChecked(self.settings.USE_VIEWER_OBJECT_CACHE)
self.actionRequestMissingObjects.setChecked(self.settings.AUTOMATICALLY_REQUEST_MISSING_OBJECTS)
self.actionProxyRemotelyAccessible.triggered.connect(self._setProxyRemotelyAccessible)
self.actionProxySSLInsecure.triggered.connect(self._setProxySSLInsecure)
self.actionUseViewerObjectCache.triggered.connect(self._setUseViewerObjectCache)
self.actionRequestMissingObjects.triggered.connect(self._setRequestMissingObjects)
self.actionOpenNewMessageLogWindow.triggered.connect(self._openNewMessageLogWindow)
@@ -457,7 +462,7 @@ class MessageLogWindow(QtWidgets.QMainWindow):
if clicked_btn is not yes_btn:
return
master = create_proxy_master("127.0.0.1", -1, HTTPFlowContext())
master = create_http_proxy("127.0.0.1", -1, HTTPFlowContext())
dirs = setup_ca_everywhere(master)
msg = QtWidgets.QMessageBox()
@@ -473,6 +478,12 @@ class MessageLogWindow(QtWidgets.QMainWindow):
msg.setText("Remote accessibility setting changes will take effect on next run")
msg.exec()
def _setProxySSLInsecure(self, checked: bool):
self.sessionManager.settings.SSL_INSECURE = checked
msg = QtWidgets.QMessageBox()
msg.setText("SSL security setting changes will take effect on next run")
msg.exec()
def _setUseViewerObjectCache(self, checked: bool):
self.sessionManager.settings.USE_VIEWER_OBJECT_CACHE = checked
@@ -936,6 +947,7 @@ def gui_main():
session_manager=window.sessionManager,
extra_addon_paths=window.getAddonList(),
proxy_host=http_host,
ssl_insecure=settings.SSL_INSECURE,
)

View File

@@ -193,7 +193,7 @@
</size>
</property>
<property name="styleSheet">
<string notr="true">color: rgb(80, 0, 0)</string>
<string notr="true"/>
</property>
<property name="tabChangesFocus">
<bool>true</bool>
@@ -213,7 +213,7 @@
</widget>
<widget class="QPlainTextEdit" name="textResponse">
<property name="styleSheet">
<string notr="true">color: rgb(0, 0, 80)</string>
<string notr="true"/>
</property>
<property name="tabChangesFocus">
<bool>true</bool>
@@ -245,7 +245,7 @@
<x>0</x>
<y>0</y>
<width>700</width>
<height>22</height>
<height>29</height>
</rect>
</property>
<widget class="QMenu" name="menuFile">
@@ -268,6 +268,7 @@
<addaction name="actionProxyRemotelyAccessible"/>
<addaction name="actionUseViewerObjectCache"/>
<addaction name="actionRequestMissingObjects"/>
<addaction name="actionProxySSLInsecure"/>
</widget>
<addaction name="menuFile"/>
</widget>
@@ -342,6 +343,17 @@
<string>Export Log Entries</string>
</property>
</action>
<action name="actionProxySSLInsecure">
<property name="checkable">
<bool>true</bool>
</property>
<property name="text">
<string>Allow Insecure SSL Connections</string>
</property>
<property name="toolTip">
<string>Allow invalid SSL certificates from upstream connections</string>
</property>
</action>
</widget>
<resources/>
<connections/>

View File

@@ -15,7 +15,7 @@ import logging
import os.path
import secrets
import sys
from typing import Dict, List, Optional, Union, Sequence
from typing import Dict, Optional
import collada
import collada.source
@@ -24,23 +24,20 @@ from lxml import etree
import numpy as np
import transformations
from hippolyzer.lib.base.datatypes import Vector3
from hippolyzer.lib.base.helpers import get_resource_filename
from hippolyzer.lib.base.serialization import BufferReader
from hippolyzer.lib.base.mesh import LLMeshSerializer, MeshAsset, positions_from_domain, SkinSegmentDict
from hippolyzer.lib.base.mesh import (
LLMeshSerializer,
MeshAsset,
positions_from_domain,
SkinSegmentDict,
llsd_to_mat4,
)
LOG = logging.getLogger(__name__)
DIR = os.path.dirname(os.path.realpath(__file__))
def llsd_to_mat4(mat: Union[np.ndarray, Sequence[float]]) -> np.ndarray:
return np.array(mat).reshape((4, 4), order='F')
def mat4_to_llsd(mat: np.ndarray) -> List[float]:
return list(mat.flatten(order='F'))
def mat4_to_collada(mat: np.ndarray) -> np.ndarray:
return mat.flatten(order='C')
@@ -98,7 +95,7 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
reflective=0.0,
shadingtype="blinn",
shininess=0.0,
diffuse=(0.0, 0.0, 0.0),
diffuse=(1.0, 1.0, 1.0),
)
mat = collada.material.Material(f"material{sub_uniq}", f"material{sub_uniq}", effect)
@@ -190,6 +187,8 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
if should_skin:
# We need a skeleton per _mesh asset_ because you could have incongruous skeletons
# within the same linkset.
# TODO: can we maintain some kind of skeleton cache, where if this skeleton has no conflicts
# with another skeleton in the cache, we just use that skeleton and add any additional joints?
skel_root = load_skeleton_nodes()
transform_skeleton(skel_root, dae, skin_seg)
skel = collada.scene.Node.load(dae, skel_root, {})
@@ -211,7 +210,6 @@ def load_skeleton_nodes() -> etree.ElementBase:
def transform_skeleton(skel_root: etree.ElementBase, dae: collada.Collada, skin_seg: SkinSegmentDict,
include_unreferenced_bones=False):
"""Update skeleton XML nodes to account for joint translations in the mesh"""
# TODO: Use translation component only.
joint_nodes: Dict[str, collada.scene.Node] = {}
for skel_node in skel_root.iter():
# xpath is loathsome so this is easier.
@@ -262,48 +260,61 @@ def _create_mat4_source(name: str, data: np.ndarray, semantic: str):
return source
def fix_weird_bind_matrices(skin_seg: SkinSegmentDict):
def fix_weird_bind_matrices(skin_seg: SkinSegmentDict) -> None:
"""
Fix weird-looking bind matrices to have normal scaling and rotations
Fix weird-looking bind matrices to have sensible scaling and rotations
Not sure why these even happen (weird mesh authoring programs?)
Sometimes get enormous inverse bind matrices (each component 10k+) and tiny
Sometimes we get enormous inverse bind matrices (each component 10k+) and tiny
bind shape matrix components. This detects inverse bind shape matrices
with weird scales and tries to set them to what they "should" be without
the weird inverted scaling.
"""
scale_fixup = Vector3(1, 1, 1)
angle_fixup = Vector3(0, 0, 0)
have_fixups = False
# Totally non-scientific method of detecting odd bind matrices based on squinting very,
# very hard at a random sample of assets.
for joint_name, joint_inv in zip(skin_seg['joint_names'], skin_seg['inverse_bind_matrix']):
if not joint_name.startswith("m"):
# We can't make very good guesses based on collision volume scales and rotations,
# skip anything but the "m" joints.
continue
joint_mat = llsd_to_mat4(joint_inv)
joint_scale, _, joint_angle, _, _ = transformations.decompose_matrix(joint_mat)
# If the scale component of an mJointName joint isn't roughly <1,1,1>, we likely have
# scaling applied to the inverse bind matrices rather than the bind matrix. Figure out
# what the fixup should be so that we can reverse it.
if abs(3.0 - sum(joint_scale)) > 0.5:
scale_fixup = Vector3(1, 1, 1) / Vector3(*joint_scale)
have_fixups = True
# I wouldn't expect mJointName joints to be rotated at all in their inverse bind matrices.
# Is this a rotation that should've been applied to the bind shape matrix instead?
# In any event, all joints are likely rotated by this amount, so calculate the inverse.
if abs(sum(joint_angle)) > 0.05:
angle_fixup = -Vector3(*joint_angle)
have_fixups = True
if have_fixups:
LOG.warning("Detected weird matrices in mesh!", scale_fixup, angle_fixup)
# The magnitude of the scales in the inverse bind matrices look very strange.
# The bind matrix itself is probably messed up as well, try to fix it.
# TODO: DON'T MESS WITH INVERSE TRANSLATION!!!! Only bind shape gets its translation scaled.
# TODO: put this back in, the previous logic was totally wrong-headed..
pass
# Sometimes we get mesh assets that have the vertex data naturally in y-up orientation,
# and get re-oriented to z-up not through the bind shape matrix, but through the
# transforms in the inverse bind matrices!
#
# Blender, for one, does not like this very much, and generally won't generate mesh
# assets like this, as explained here https://developer.blender.org/T38660.
# In vanilla Blender, these mesh assets will show up scaled and rotated _only_ according
# to the bind shape matrix, which may end up with the model 25 meters tall and sitting
# on its side.
#
# https://avalab.org/avastar/292/knowledge/compare-workbench/, while somewhat outdated,
# has some information on rest pose vs default pose and scaling that I believe is relevant.
# https://github.com/KhronosGroup/glTF-Blender-IO/issues/994 as well.
#
# While trying to figure out what was going on, I searched for something like
# "inverse bind matrix scale collada", "bind pose scale blender", etc. Pretty much every
# result was either a bug filed by, or a question asked by the creator of Avastar, or an SL user.
# I think that says a lot about how annoying it is to author mesh for SL in particular.
#
# I spent a good month or so tearing my hair out over this wondering how these values could
# even be possible. I wasn't sure how I should write mesh import code if I don't understand
# how to interpret existing data, or how it even ended up the way it did. Turns out I wasn't
# misinterpreting the data, the data really is just weird.
#
# I'd also had the idea that you could sniff which body a given rigged asset was meant
# for by doing trivial matching on the inverse bind matrices, but obviously that isn't true!
#
# Basically:
# 1) Maya is evil and generates evil, this evil bleeds into SL's assets through transforms.
# 2) Blender is also evil, but in a manner that doesn't agree with Maya's evil.
# 3) Collada was a valiant effort, but is evil in practice. Seemingly simple Collada
# files are interpreted completely differently by Blender, Maya, and sometimes SL.
# 4) Those three evils collude to make an interop nightmare for everyone like "oh my rigger
# rigs using Maya and now my model is huge and all my normals are fucked on reimport"
# 5) Yes, there's still good reasons to be using Avastar in 2022 even though nobody authoring
# rigged mesh for any other use has to use something similar.
if not skin_seg['joint_names']:
return
# TODO: calculate the correct inverse bind matrix scale & rotations from avatar_skeleton.xml
# definitions. If the rotation and scale factors are the same across all inverse bind matrices then
# they can be moved over to the bind shape matrix to keep Blender happy.
# Maybe add a scaled / rotated empty as a parent for the armature instead?
return
def main():

View File

@@ -39,12 +39,13 @@ class _IterableStub:
__iter__: Callable
class TupleCoord(recordclass.datatuple, _IterableStub): # type: ignore
__options__ = {
"fast_new": False,
}
RAD_TO_DEG = 180 / math.pi
class TupleCoord(recordclass.RecordClass, _IterableStub):
def __init__(self, *args):
# Only to help typing, doesn't actually do anything.
# All the important stuff happens in `__new__()`
pass
@classmethod
@@ -364,7 +365,7 @@ def flags_to_pod(flag_cls: Type[enum.IntFlag], val: int) -> Tuple[Union[str, int
return tuple(flag.name for flag in iter(flag_cls) if val & flag.value) + extra
class TaggedUnion(recordclass.datatuple): # type: ignore
class TaggedUnion(recordclass.RecordClass):
tag: Any
value: Any
@@ -372,5 +373,5 @@ class TaggedUnion(recordclass.datatuple): # type: ignore
__all__ = [
"Vector3", "Vector4", "Vector2", "Quaternion", "TupleCoord",
"UUID", "RawBytes", "StringEnum", "JankStringyBytes", "TaggedUnion",
"IntEnum", "IntFlag", "flags_to_pod", "Pretty"
"IntEnum", "IntFlag", "flags_to_pod", "Pretty", "RAD_TO_DEG"
]

View File

@@ -18,7 +18,7 @@ You should have received a copy of the GNU Lesser General Public License
along with this program; if not, write to the Free Software Foundation,
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import asyncio
from logging import getLogger
logger = getLogger('utilities.events')
@@ -54,22 +54,29 @@ class Event:
def notify(self, args):
for handler in self.subscribers[:]:
instance, inner_args, kwargs, one_shot, predicate = handler
handler, inner_args, kwargs, one_shot, predicate = handler
if predicate and not predicate(args):
continue
if one_shot:
self.unsubscribe(instance, *inner_args, **kwargs)
if instance(args, *inner_args, **kwargs):
self.unsubscribe(instance, *inner_args, **kwargs)
self.unsubscribe(handler, *inner_args, **kwargs)
if asyncio.iscoroutinefunction(handler):
# Note that unsubscription may be delayed due to asyncio scheduling :)
def get_subscriber_count(self):
async def _run_handler_wrapper():
unsubscribe = await handler(args, *inner_args, **kwargs)
if unsubscribe:
_ = self.unsubscribe(handler, *inner_args, **kwargs)
asyncio.create_task(_run_handler_wrapper())
else:
if handler(args, *inner_args, **kwargs) and not one_shot:
self.unsubscribe(handler, *inner_args, **kwargs)
def __len__(self):
return len(self.subscribers)
def clear_subscribers(self):
self.subscribers.clear()
return self
__iadd__ = subscribe
__isub__ = unsubscribe
__call__ = notify
__len__ = get_subscriber_count

View File

@@ -176,7 +176,7 @@ class MessageTemplateNotFound(MessageSystemError):
self.template = template
def __str__(self):
return "No message template found, context: '%s'" % self.context
return "No message template found for %s, context: '%s'" % (self.template, self.context)
class MessageTemplateParsingError(MessageSystemError):

View File

@@ -0,0 +1,528 @@
"""
WIP LLMesh -> glTF converter, for testing eventual glTF -> LLMesh conversion logic.
"""
# TODO:
# * Simple tests
# * Round-tripping skinning data from Blender-compatible glTF back to LLMesh (maybe through rig retargeting?)
# * Panda3D-glTF viewer for LLMesh? The glTFs seem to work fine in Panda3D-glTF's `gltf-viewer`.
# * Check if skew and projection components of transform matrices are ignored in practice as the spec requires.
# I suppose this would render some real assets impossible to represent with glTF.
import dataclasses
import math
import pprint
import sys
import uuid
from pathlib import Path
from typing import *
import gltflib
import numpy as np
import transformations
from hippolyzer.lib.base.datatypes import Vector3
from hippolyzer.lib.base.mesh import (
LLMeshSerializer, MeshAsset, positions_from_domain, SkinSegmentDict, VertexWeight, llsd_to_mat4
)
from hippolyzer.lib.base.mesh_skeleton import AVATAR_SKELETON
from hippolyzer.lib.base.serialization import BufferReader
class IdentityList(list):
"""
List, but does index() by object identity, not equality
GLTF references objects by their index within some list, but we prefer to pass around
actual object references internally. If we don't do this, then when we try and get
a GLTF reference to a given object via `.index()` then we could end up actually getting
a reference to some other object that just happens to be equal. This was causing issues
with all primitives ending up with the same material, due to the default material's value
being the same across all primitives.
"""
def index(self, value, start: Optional[int] = None, stop: Optional[int] = None) -> int:
view = self[start:stop]
for i, x in enumerate(view):
if x is value:
if start:
return i + start
return i
raise ValueError(value)
def sl_to_gltf_coords(coords):
"""
SL (X, Y, Z) -> GL (X, Z, Y), as GLTF commandeth
Note that this will only work when reordering axes, flipping an axis is more complicated.
"""
return coords[0], coords[2], coords[1], *coords[3:]
def sl_to_gltf_uv(uv):
"""Flip the V coordinate of a UV to match glTF convention"""
return [uv[0], -uv[1]]
def sl_mat4_to_gltf(mat: np.ndarray) -> List[float]:
"""
Convert an SL Mat4 to the glTF coordinate system
This should only be done immediately before storing the matrix in a glTF structure!
"""
# TODO: This is probably not correct. We definitely need to flip Z but there's
# probably a better way to do it.
decomp = [sl_to_gltf_coords(x) for x in transformations.decompose_matrix(mat)]
trans = decomp[3]
decomp[3] = (trans[0], trans[1], -trans[2])
return list(transformations.compose_matrix(*decomp).flatten(order='F'))
# Mat3 to convert points from SL coordinate space to GLTF coordinate space
POINT_TO_GLTF_MAT = transformations.compose_matrix(angles=(-(math.pi / 2), 0, 0))[:3, :3]
def sl_vec3_array_to_gltf(vec_list: np.ndarray) -> np.ndarray:
new_array = []
for x in vec_list:
new_array.append(POINT_TO_GLTF_MAT.dot(x))
return np.array(new_array)
def sl_weights_to_gltf(sl_weights: List[List[VertexWeight]]) -> Tuple[np.ndarray, np.ndarray]:
"""Convert SL Weights to separate JOINTS_0 and WEIGHTS_0 vec4 arrays"""
joints = np.zeros((len(sl_weights), 4), dtype=np.uint8)
weights = np.zeros((len(sl_weights), 4), dtype=np.float32)
for i, vert_weights in enumerate(sl_weights):
# We need to re-normalize these since the quantization can mess them up
collected_weights = []
for j, vert_weight in enumerate(vert_weights):
joints[i, j] = vert_weight.joint_idx
collected_weights.append(vert_weight.weight)
weight_sum = sum(collected_weights)
if weight_sum:
for j, weight in enumerate(collected_weights):
weights[i, j] = weight / weight_sum
return joints, weights
def normalize_vec3(a):
norm = np.linalg.norm(a)
if norm == 0:
return a
return a / norm
def apply_bind_shape_matrix(bind_shape_matrix: np.ndarray, verts: np.ndarray, norms: np.ndarray) \
-> Tuple[np.ndarray, np.ndarray]:
"""
Apply the bind shape matrix to the mesh data
glTF expects all verts and normals to be in armature-local space so that mesh data can be shared
between differently-oriented armatures. Or something.
# https://github.com/KhronosGroup/glTF-Blender-IO/issues/566#issuecomment-523119339
glTF also doesn't have a concept of a "bind shape matrix" like Collada does
per its skinning docs, so we have to mix it into the mesh data manually.
See https://github.com/KhronosGroup/glTF-Tutorials/blob/master/gltfTutorial/gltfTutorial_020_Skins.md
"""
scale, _, angles, translation, _ = transformations.decompose_matrix(bind_shape_matrix)
scale_mat = transformations.compose_matrix(scale=scale)[:3, :3]
rot_mat = transformations.euler_matrix(*angles)[:3, :3]
rot_scale_mat = scale_mat @ np.linalg.inv(rot_mat)
# Apply the SRT transform to each vert
verts = (verts @ rot_scale_mat) + translation
# Our scale is unlikely to be uniform, so we have to fix up our normals as well.
# https://paroj.github.io/gltut/Illumination/Tut09%20Normal%20Transformation.html
inv_transpose_mat = np.transpose(np.linalg.inv(bind_shape_matrix)[:3, :3])
new_norms = [normalize_vec3(inv_transpose_mat @ norm) for norm in norms]
return verts, np.array(new_norms)
@dataclasses.dataclass
class JointContext:
node: gltflib.Node
# Original matrix for the bone, may have custom translation, but otherwise the same.
orig_matrix: np.ndarray
# xform that must be applied to inverse bind matrices to account for the changed bone
fixup_matrix: np.ndarray
JOINT_CONTEXT_DICT = Dict[str, JointContext]
class GLTFBuilder:
def __init__(self, blender_compatibility=False):
self.scene = gltflib.Scene(nodes=IdentityList())
self.model = gltflib.GLTFModel(
asset=gltflib.Asset(version="2.0"),
accessors=IdentityList(),
nodes=IdentityList(),
materials=IdentityList(),
buffers=IdentityList(),
bufferViews=IdentityList(),
meshes=IdentityList(),
skins=IdentityList(),
scenes=IdentityList((self.scene,)),
extensionsUsed=["KHR_materials_specular"],
scene=0,
)
self.gltf = gltflib.GLTF(
model=self.model,
resources=IdentityList(),
)
self.blender_compatibility = blender_compatibility
def add_nodes_from_llmesh(self, mesh: MeshAsset, name: str, mesh_transform: Optional[np.ndarray] = None):
"""Build a glTF version of a mesh asset, appending it and its armature to the scene root"""
# TODO: mesh data instancing?
# consider https://github.com/KhronosGroup/glTF-Blender-IO/issues/1634.
if mesh_transform is None:
mesh_transform = np.identity(4)
skin_seg: Optional[SkinSegmentDict] = mesh.segments.get('skin')
skin = None
if skin_seg:
mesh_transform = llsd_to_mat4(skin_seg['bind_shape_matrix'])
joint_ctxs = self.add_joints(skin_seg)
# Give our armature a root node and parent the pelvis to it
armature_node = self.add_node("Armature")
self.scene.nodes.append(self.model.nodes.index(armature_node))
armature_node.children.append(self.model.nodes.index(joint_ctxs['mPelvis'].node))
skin = self.add_skin("Armature", joint_ctxs, skin_seg)
skin.skeleton = self.model.nodes.index(armature_node)
primitives = []
# Just the high LOD for now
for submesh in mesh.segments['high_lod']:
verts = np.array(positions_from_domain(submesh['Position'], submesh['PositionDomain']))
norms = np.array(submesh['Normal'])
tris = np.array(submesh['TriangleList'])
joints = np.array([])
weights = np.array([])
range_uv = np.array([])
if "TexCoord0" in submesh:
range_uv = np.array(positions_from_domain(submesh['TexCoord0'], submesh['TexCoord0Domain']))
if 'Weights' in submesh:
joints, weights = sl_weights_to_gltf(submesh['Weights'])
if skin:
# Convert verts and norms to armature-local space
verts, norms = apply_bind_shape_matrix(mesh_transform, verts, norms)
primitives.append(self.add_primitive(
tris=tris,
positions=verts,
normals=norms,
uvs=range_uv,
joints=joints,
weights=weights,
))
mesh_node = self.add_node(
name,
self.add_mesh(name, primitives),
transform=mesh_transform,
)
if skin:
# Node translation isn't relevant, we're going to use the bind matrices
# If you pull this into Blender you may want to untick "Guess Original Bind Pose",
# it guesses that based on the inverse bind matrices which may have Maya poisoning.
# TODO: Maybe we could automatically undo that by comparing expected bone scale and rot
# to scale and rot in the inverse bind matrices, and applying fixups to the
# bind shape matrix and inverse bind matrices?
mesh_node.matrix = None
mesh_node.skin = self.model.skins.index(skin)
self.scene.nodes.append(self.model.nodes.index(mesh_node))
def add_node(
self,
name: str,
mesh: Optional[gltflib.Mesh] = None,
transform: Optional[np.ndarray] = None,
) -> gltflib.Node:
node = gltflib.Node(
name=name,
mesh=self.model.meshes.index(mesh) if mesh else None,
matrix=sl_mat4_to_gltf(transform) if transform is not None else None,
children=[],
)
self.model.nodes.append(node)
return node
def add_mesh(
self,
name: str,
primitives: List[gltflib.Primitive],
) -> gltflib.Mesh:
for i, prim in enumerate(primitives):
# Give the materials a name relating to what "face" they belong to
self.model.materials[prim.material].name = f"{name}.{i:03}"
mesh = gltflib.Mesh(name=name, primitives=primitives)
self.model.meshes.append(mesh)
return mesh
def add_primitive(
self,
tris: np.ndarray,
positions: np.ndarray,
normals: np.ndarray,
uvs: np.ndarray,
weights: np.ndarray,
joints: np.ndarray,
) -> gltflib.Primitive:
# Make a Material for the primitive. Materials pretty much _are_ the primitives in
# LLMesh, so just make them both in one go. We need a unique material for each primitive.
material = gltflib.Material(
pbrMetallicRoughness=gltflib.PBRMetallicRoughness(
baseColorFactor=[1.0, 1.0, 1.0, 1.0],
metallicFactor=0.0,
roughnessFactor=0.0,
),
extensions={
"KHR_materials_specular": {
"specularFactor": 0.0,
"specularColorFactor": [0, 0, 0]
},
}
)
self.model.materials.append(material)
attributes = gltflib.Attributes(
POSITION=self.maybe_add_vec_array(sl_vec3_array_to_gltf(positions), gltflib.AccessorType.VEC3),
NORMAL=self.maybe_add_vec_array(sl_vec3_array_to_gltf(normals), gltflib.AccessorType.VEC3),
TEXCOORD_0=self.maybe_add_vec_array(np.array([sl_to_gltf_uv(uv) for uv in uvs]), gltflib.AccessorType.VEC2),
JOINTS_0=self.maybe_add_vec_array(joints, gltflib.AccessorType.VEC4, gltflib.ComponentType.UNSIGNED_BYTE),
WEIGHTS_0=self.maybe_add_vec_array(weights, gltflib.AccessorType.VEC4),
)
return gltflib.Primitive(
attributes=attributes,
indices=self.model.accessors.index(self.add_scalars(tris)),
material=self.model.materials.index(material),
mode=gltflib.PrimitiveMode.TRIANGLES,
)
def add_scalars(self, scalars: np.ndarray) -> gltflib.Accessor:
"""
Add a potentially multidimensional array of scalars, returning the accessor
Generally only used for triangle indices
"""
scalar_bytes = scalars.astype(np.uint32).flatten().tobytes()
buffer_view = self.add_buffer_view(scalar_bytes, None)
accessor = gltflib.Accessor(
bufferView=self.model.bufferViews.index(buffer_view),
componentType=gltflib.ComponentType.UNSIGNED_INT,
count=scalars.size, # use the flattened size!
type=gltflib.AccessorType.SCALAR.value, # type: ignore
min=[int(scalars.min())], # type: ignore
max=[int(scalars.max())], # type: ignore
)
self.model.accessors.append(accessor)
return accessor
def maybe_add_vec_array(
self,
vecs: np.ndarray,
vec_type: gltflib.AccessorType,
component_type: gltflib.ComponentType = gltflib.ComponentType.FLOAT,
) -> Optional[int]:
if not vecs.size:
return None
accessor = self.add_vec_array(vecs, vec_type, component_type)
return self.model.accessors.index(accessor)
def add_vec_array(
self,
vecs: np.ndarray,
vec_type: gltflib.AccessorType,
component_type: gltflib.ComponentType = gltflib.ComponentType.FLOAT
) -> gltflib.Accessor:
"""
Add a two-dimensional array of vecs (positions, normals, weights, UVs) returning the accessor
Vec type may be a vec2, vec3, or a vec4.
"""
# Pretty much all of these are float32 except the ones that aren't
dtype = np.float32
if component_type == gltflib.ComponentType.UNSIGNED_BYTE:
dtype = np.uint8
vec_data = vecs.astype(dtype).tobytes()
buffer_view = self.add_buffer_view(vec_data, target=None)
accessor = gltflib.Accessor(
bufferView=self.model.bufferViews.index(buffer_view),
componentType=component_type,
count=len(vecs),
type=vec_type.value, # type: ignore
min=vecs.min(axis=0).tolist(), # type: ignore
max=vecs.max(axis=0).tolist(), # type: ignore
)
self.model.accessors.append(accessor)
return accessor
def add_buffer_view(self, data: bytes, target: Optional[gltflib.BufferTarget]) -> gltflib.BufferView:
"""Create a buffer view and associated buffer and resource for a blob of data"""
resource = gltflib.FileResource(filename=f"res-{uuid.uuid4()}.bin", data=data)
self.gltf.resources.append(resource)
buffer = gltflib.Buffer(uri=resource.filename, byteLength=len(resource.data))
self.model.buffers.append(buffer)
buffer_view = gltflib.BufferView(
buffer=self.model.buffers.index(buffer),
byteLength=buffer.byteLength,
byteOffset=0,
target=target
)
self.model.bufferViews.append(buffer_view)
return buffer_view
def add_joints(self, skin: SkinSegmentDict) -> JOINT_CONTEXT_DICT:
# There may be some joints not present in the mesh that we need to add to reach the mPelvis root
required_joints = set()
for joint_name in skin['joint_names']:
joint_node = AVATAR_SKELETON[joint_name]
required_joints.add(joint_node)
required_joints.update(joint_node.ancestors)
# If this is present, it may override the joint positions from the skeleton definition
if 'alt_inverse_bind_matrix' in skin:
joint_overrides = dict(zip(skin['joint_names'], skin['alt_inverse_bind_matrix']))
else:
joint_overrides = {}
built_joints: JOINT_CONTEXT_DICT = {}
for joint in required_joints:
joint_matrix = joint.matrix
# Do we have a joint position override that would affect joint_matrix?
override = joint_overrides.get(joint.name)
if override:
decomp = list(transformations.decompose_matrix(joint_matrix))
# We specifically only want the translation from the override!
translation = transformations.translation_from_matrix(llsd_to_mat4(override))
# Only do it if the difference is over 0.1mm though
if Vector3.dist(Vector3(*translation), joint.translation) > 0.0001:
decomp[3] = translation
joint_matrix = transformations.compose_matrix(*decomp)
# Do we need to mess with the bone's matrices to make Blender cooperate?
orig_matrix = joint_matrix
fixup_matrix = np.identity(4)
if self.blender_compatibility:
joint_matrix, fixup_matrix = self._fix_blender_joint(joint_matrix)
# TODO: populate "extras" here with the metadata the Blender collada stuff uses to store
# "bind_mat" and "rest_mat" so we can go back to our original matrices when exporting
# from blender to .dae!
gltf_joint = self.add_node(joint.name, transform=joint_matrix)
# Store the node along with any fixups we may need to apply to the bind matrices later
built_joints[joint.name] = JointContext(gltf_joint, orig_matrix, fixup_matrix)
# Add each joint to the child list of their respective parent
for joint_name, joint_ctx in built_joints.items():
if parent := AVATAR_SKELETON[joint_name].parent:
built_joints[parent().name].node.children.append(self.model.nodes.index(joint_ctx.node))
return built_joints
def _fix_blender_joint(self, joint_matrix: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""
Split a joint matrix into a joint matrix and fixup matrix
If we don't account for weird scaling on the collision volumes, then
Blender freaks out. This is an issue in blender where it doesn't
apply the inverse bind matrices relative to the scale and rotation of
the bones themselves, as it should per the glTF spec. Blender's glTF loader
tries to recover from this by applying certain transforms as a pose, but
the damage has been done by that point. Nobody else runs really runs into
this because they have the good sense to not use some nightmare abomination
rig with scaling and rotation on the skeleton like SL does.
Blender will _only_ correctly handle the translation component of the joint,
any other transforms need to be mixed into the inverse bind matrices themselves.
There's no internal concept of bone scale or rot in Blender right now.
Should investigate an Avastar-style approach of optionally retargeting
to a Blender-compatible rig with translation-only bones, and modify
the bind matrices to accommodate. The glTF importer supports metadata through
the "extras" fields, so we can potentially abuse the "bind_mat" metadata field
that Blender already uses for the "Keep Bind Info" Collada import / export hack.
For context:
* https://github.com/KhronosGroup/glTF-Blender-IO/issues/1305
* https://developer.blender.org/T38660 (these are Collada, but still relevant)
* https://developer.blender.org/T29246
* https://developer.blender.org/T50412
* https://developer.blender.org/T53620 (FBX but still relevant)
"""
scale, shear, angles, translate, projection = transformations.decompose_matrix(joint_matrix)
joint_matrix = transformations.compose_matrix(translate=translate)
fixup_matrix = transformations.compose_matrix(scale=scale, angles=angles)
return joint_matrix, fixup_matrix
def add_skin(self, name: str, joint_nodes: JOINT_CONTEXT_DICT, skin_seg: SkinSegmentDict) -> gltflib.Skin:
joints_arr = []
for joint_name in skin_seg['joint_names']:
joint_ctx = joint_nodes[joint_name]
joints_arr.append(self.model.nodes.index(joint_ctx.node))
inv_binds = []
for joint_name, inv_bind in zip(skin_seg['joint_names'], skin_seg['inverse_bind_matrix']):
joint_ctx = joint_nodes[joint_name]
inv_bind = joint_ctx.fixup_matrix @ llsd_to_mat4(inv_bind)
inv_binds.append(sl_mat4_to_gltf(inv_bind))
inv_binds_data = np.array(inv_binds, dtype=np.float32).tobytes()
buffer_view = self.add_buffer_view(inv_binds_data, target=None)
accessor = gltflib.Accessor(
bufferView=self.model.bufferViews.index(buffer_view),
componentType=gltflib.ComponentType.FLOAT,
count=len(inv_binds),
type=gltflib.AccessorType.MAT4.value, # type: ignore
)
self.model.accessors.append(accessor)
accessor_idx = self.model.accessors.index(accessor)
skin = gltflib.Skin(name=name, joints=joints_arr, inverseBindMatrices=accessor_idx)
self.model.skins.append(skin)
return skin
def finalize(self):
"""Clean up the mesh to pass the glTF smell test, should be done last"""
def _nullify_empty_lists(dc):
for field in dataclasses.fields(dc):
# Empty lists should be replaced with None
if getattr(dc, field.name) == []:
setattr(dc, field.name, None)
for node in self.model.nodes:
_nullify_empty_lists(node)
_nullify_empty_lists(self.model)
return self.gltf
def main():
# Take an llmesh file as an argument and spit out basename-converted.gltf
with open(sys.argv[1], "rb") as f:
reader = BufferReader("<", f.read())
filename = Path(sys.argv[1]).stem
mesh: MeshAsset = reader.read(LLMeshSerializer(parse_segment_contents=True))
builder = GLTFBuilder(blender_compatibility=True)
builder.add_nodes_from_llmesh(mesh, filename)
gltf = builder.finalize()
pprint.pprint(gltf.model)
gltf.export_glb(sys.argv[1].rsplit(".", 1)[0] + "-converted.gltf")
if __name__ == "__main__":
main()

View File

@@ -147,7 +147,7 @@ def get_resource_filename(resource_filename: str):
return pkg_resources.resource_filename("hippolyzer", resource_filename)
def to_chunks(chunkable: Sequence[_T], chunk_size: int) -> Generator[_T, None, None]:
def to_chunks(chunkable: Sequence[_T], chunk_size: int) -> Generator[Sequence[_T], None, None]:
while chunkable:
yield chunkable[:chunk_size]
chunkable = chunkable[chunk_size:]

View File

@@ -22,6 +22,7 @@ from hippolyzer.lib.base.legacy_schema import (
SchemaFieldSerializer,
SchemaHexInt,
SchemaInt,
SchemaLLSD,
SchemaMultilineStr,
SchemaParsingError,
SchemaStr,
@@ -385,6 +386,7 @@ class InventoryObject(InventoryContainerBase):
ID_ATTR: ClassVar[str] = "obj_id"
obj_id: UUID = schema_field(SchemaUUID)
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None)
__hash__ = InventoryNodeBase.__hash__
@@ -399,6 +401,7 @@ class InventoryCategory(InventoryContainerBase):
pref_type: str = schema_field(SchemaStr, llsd_name="preferred_type")
owner_id: UUID = schema_field(SchemaUUID)
version: int = schema_field(SchemaInt)
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None)
__hash__ = InventoryNodeBase.__hash__
@@ -419,6 +422,7 @@ class InventoryItem(InventoryNodeBase):
sale_info: InventorySaleInfo = schema_field(InventorySaleInfo)
asset_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
shadow_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None)
__hash__ = InventoryNodeBase.__hash__

View File

@@ -14,6 +14,8 @@ import re
from io import StringIO
from typing import *
import hippolyzer.lib.base.llsd as llsd
from hippolyzer.lib.base.datatypes import UUID
LOG = logging.getLogger(__name__)
@@ -110,6 +112,17 @@ class SchemaUUID(SchemaFieldSerializer[UUID]):
return str(val)
class SchemaLLSD(SchemaFieldSerializer[_T]):
"""Arbitrary LLSD embedded in a field"""
@classmethod
def deserialize(cls, val: str) -> _T:
return llsd.parse_notation(val.encode("utf8"))
@classmethod
def serialize(cls, val: _T) -> str:
return llsd.format_notation(val).decode("utf8")
def schema_field(spec: Type[Union[SchemaBase, SchemaFieldSerializer]], *, default=dataclasses.MISSING, init=True,
repr=True, hash=None, compare=True, llsd_name=None, llsd_only=False) -> dataclasses.Field: # noqa
"""Describe a field in the inventory schema and the shape of its value"""

View File

@@ -1,14 +1,19 @@
import calendar
import datetime
import struct
import typing
import uuid
import zlib
from llbase.llsd import *
from llsd import *
# So we can directly reference the original wrapper funcs where necessary
import llbase.llsd
import llsd as base_llsd
from llsd.base import is_string, is_unicode
from hippolyzer.lib.base.datatypes import *
class HippoLLSDBaseFormatter(llbase.llsd.LLSDBaseFormatter):
class HippoLLSDBaseFormatter(base_llsd.base.LLSDBaseFormatter):
UUID: callable
ARRAY: callable
@@ -24,25 +29,25 @@ class HippoLLSDBaseFormatter(llbase.llsd.LLSDBaseFormatter):
return self.ARRAY(v.data())
class HippoLLSDXMLFormatter(llbase.llsd.LLSDXMLFormatter, HippoLLSDBaseFormatter):
class HippoLLSDXMLFormatter(base_llsd.serde_xml.LLSDXMLFormatter, HippoLLSDBaseFormatter):
def __init__(self):
super().__init__()
class HippoLLSDXMLPrettyFormatter(llbase.llsd.LLSDXMLPrettyFormatter, HippoLLSDBaseFormatter):
class HippoLLSDXMLPrettyFormatter(base_llsd.serde_xml.LLSDXMLPrettyFormatter, HippoLLSDBaseFormatter):
def __init__(self):
super().__init__()
def format_pretty_xml(val: typing.Any):
def format_pretty_xml(val: typing.Any) -> bytes:
return HippoLLSDXMLPrettyFormatter().format(val)
def format_xml(val: typing.Any):
def format_xml(val: typing.Any) -> bytes:
return HippoLLSDXMLFormatter().format(val)
class HippoLLSDNotationFormatter(llbase.llsd.LLSDNotationFormatter, HippoLLSDBaseFormatter):
class HippoLLSDNotationFormatter(base_llsd.serde_notation.LLSDNotationFormatter, HippoLLSDBaseFormatter):
def __init__(self):
super().__init__()
@@ -53,11 +58,11 @@ class HippoLLSDNotationFormatter(llbase.llsd.LLSDNotationFormatter, HippoLLSDBas
return super().STRING(v).replace(b"\n", b"\\n")
def format_notation(val: typing.Any):
def format_notation(val: typing.Any) -> bytes:
return HippoLLSDNotationFormatter().format(val)
def format_binary(val: typing.Any, with_header=True):
def format_binary(val: typing.Any, with_header=True) -> bytes:
val = _format_binary_recurse(val)
if with_header:
return b'<?llsd/binary?>\n' + val
@@ -84,7 +89,7 @@ def _format_binary_recurse(something) -> bytes:
return b'1'
else:
return b'0'
elif is_integer(something):
elif isinstance(something, int):
try:
return b'i' + struct.pack('!i', something)
except (OverflowError, struct.error) as exc:
@@ -129,7 +134,7 @@ def _format_binary_recurse(something) -> bytes:
(type(something), something))
class HippoLLSDBinaryParser(llbase.llsd.LLSDBinaryParser):
class HippoLLSDBinaryParser(base_llsd.serde_binary.LLSDBinaryParser):
def __init__(self):
super().__init__()
self._dispatch[ord('u')] = lambda: UUID(bytes=self._getc(16))
@@ -162,11 +167,11 @@ def parse_binary(data: bytes):
def parse_xml(data: bytes):
return llbase.llsd.parse_xml(data)
return base_llsd.parse_xml(data)
def parse_notation(data: bytes):
return llbase.llsd.parse_notation(data)
return base_llsd.parse_notation(data)
def zip_llsd(val: typing.Any):
@@ -189,6 +194,6 @@ def parse(data: bytes):
else:
return parse_notation(data)
except KeyError as e:
raise llbase.llsd.LLSDParseError('LLSD could not be parsed: %s' % (e,))
raise base_llsd.LLSDParseError('LLSD could not be parsed: %s' % (e,))
except TypeError as e:
raise llbase.llsd.LLSDParseError('Input stream not of type bytes. %s' % (e,))
raise base_llsd.LLSDParseError('Input stream not of type bytes. %s' % (e,))

View File

@@ -11,15 +11,25 @@ from typing import *
import zlib
from copy import deepcopy
import numpy as np
import recordclass
from hippolyzer.lib.base import serialization as se
from hippolyzer.lib.base.datatypes import Vector3, Vector2, UUID, TupleCoord
from hippolyzer.lib.base.llsd import zip_llsd, unzip_llsd
from hippolyzer.lib.base.serialization import ParseContext
LOG = logging.getLogger(__name__)
def llsd_to_mat4(mat: Union[np.ndarray, Sequence[float]]) -> np.ndarray:
return np.array(mat).reshape((4, 4), order='F')
def mat4_to_llsd(mat: np.ndarray) -> List[float]:
return list(mat.flatten(order='F'))
@dataclasses.dataclass
class MeshAsset:
header: MeshHeaderDict = dataclasses.field(default_factory=dict)
@@ -168,7 +178,7 @@ class DomainDict(TypedDict):
Min: List[float]
class VertexWeight(recordclass.datatuple): # type: ignore
class VertexWeight(recordclass.RecordClass):
"""Vertex weight for a specific joint on a specific vertex"""
# index of the joint within the joint_names list in the skin segment
joint_idx: int
@@ -255,7 +265,6 @@ def positions_to_domain(positions: Iterable[TupleCoord], domain: DomainDict):
class VertexWeights(se.SerializableBase):
"""Serializer for a list of joint weights on a single vertex"""
INFLUENCE_SER = se.QuantizedFloat(se.U16, 0.0, 1.0)
INFLUENCE_LIMIT = 4
INFLUENCE_TERM = 0xFF
@@ -266,18 +275,30 @@ class VertexWeights(se.SerializableBase):
for val in vals:
joint_idx, influence = val
writer.write(se.U8, joint_idx)
writer.write(cls.INFLUENCE_SER, influence, ctx=ctx)
writer.write(se.U16, round(influence * 0xFFff), ctx=ctx)
if len(vals) != cls.INFLUENCE_LIMIT:
writer.write(se.U8, cls.INFLUENCE_TERM)
@classmethod
def deserialize(cls, reader: se.Reader, ctx=None):
# NOTE: normally you'd want to do something like arrange this into a nicely
# aligned byte array with zero padding so that you could vectorize the decoding.
# In cases where having a vertex with no weights is semantically equivalent to
# having a vertex _with_ weights of a value of 0.0 that's fine. This isn't the case
# in LL's implementation of mesh:
#
# https://bitbucket.org/lindenlab/viewer/src/d31a83fb946c49a38376ea3b312b5380d0c8c065/indra/llmath/llvolume.cpp#lines-2560:2628
#
# Consider the difference between handling of b"\x00\x00\x00\xFF" and b"\xFF" with the above logic.
# To simplify round-tripping while preserving those semantics, we don't do a vectorized decode.
# I had a vectorized numpy version, but those requirements made everything a bit of a mess.
influence_list = []
for _ in range(cls.INFLUENCE_LIMIT):
joint_idx = reader.read(se.U8)
joint_idx = reader.read_bytes(1)[0]
if joint_idx == cls.INFLUENCE_TERM:
break
influence_list.append(VertexWeight(joint_idx, reader.read(cls.INFLUENCE_SER, ctx=ctx)))
weight = reader.read(se.U16, ctx=ctx) / 0xFFff
influence_list.append(VertexWeight(joint_idx, weight))
return influence_list
@@ -312,16 +333,46 @@ class SegmentSerializer:
return new_segment
class VecListAdapter(se.Adapter):
def __init__(self, child_spec: se.SERIALIZABLE_TYPE, vec_type: Type):
super().__init__(child_spec)
self.vec_type = vec_type
def encode(self, val: Any, ctx: Optional[ParseContext]) -> Any:
return val
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
new_vals = []
for elem in val:
new_vals.append(self.vec_type(*elem))
return new_vals
LE_U16: np.dtype = np.dtype(np.uint16).newbyteorder('<') # noqa
LOD_SEGMENT_SERIALIZER = SegmentSerializer({
# 16-bit indices to the verts making up the tri. Imposes a 16-bit
# upper limit on verts in any given material in the mesh.
"TriangleList": se.Collection(None, se.Collection(3, se.U16)),
"TriangleList": se.ExprAdapter(
se.NumPyArray(se.BytesGreedy(), LE_U16, 3),
decode_func=lambda x: x.tolist(),
),
# These are used to interpolate between values in their respective domains
# Each position represents a single vert.
"Position": se.Collection(None, se.Vector3U16(0.0, 1.0)),
"TexCoord0": se.Collection(None, se.Vector2U16(0.0, 1.0)),
# Normals have a static domain between -1 and 1, so just use that.
"Normal": se.Collection(None, se.Vector3U16(-1.0, 1.0)),
"Position": VecListAdapter(
se.QuantizedNumPyArray(se.NumPyArray(se.BytesGreedy(), LE_U16, 3), 0.0, 1.0),
Vector3,
),
"TexCoord0": VecListAdapter(
se.QuantizedNumPyArray(se.NumPyArray(se.BytesGreedy(), LE_U16, 2), 0.0, 1.0),
Vector2,
),
# Normals have a static domain between -1 and 1, so we just use that rather than 0.0 - 1.0.
"Normal": VecListAdapter(
se.QuantizedNumPyArray(se.NumPyArray(se.BytesGreedy(), LE_U16, 3), -1.0, 1.0),
Vector3,
),
"Weights": se.Collection(None, VertexWeights)
})

View File

@@ -0,0 +1,121 @@
from __future__ import annotations
import dataclasses
import weakref
from typing import *
import transformations
from lxml import etree
from hippolyzer.lib.base.datatypes import Vector3, RAD_TO_DEG
from hippolyzer.lib.base.helpers import get_resource_filename
MAYBE_JOINT_REF = Optional[Callable[[], "JointNode"]]
SKELETON_REF = Optional[Callable[[], "Skeleton"]]
@dataclasses.dataclass
class JointNode:
name: str
parent: MAYBE_JOINT_REF
skeleton: SKELETON_REF
translation: Vector3
pivot: Vector3 # pivot point for the joint, generally the same as translation
rotation: Vector3 # Euler rotation in degrees
scale: Vector3
type: str # bone or collision_volume
def __hash__(self):
return hash((self.name, self.type))
@property
def matrix(self):
return transformations.compose_matrix(
scale=tuple(self.scale),
angles=tuple(self.rotation / RAD_TO_DEG),
translate=tuple(self.translation),
)
@property
def index(self) -> int:
bone_idx = 0
for node in self.skeleton().joint_dict.values():
if node.type != "bone":
continue
if self is node:
return bone_idx
bone_idx += 1
raise KeyError(f"{self.name!r} doesn't exist in skeleton")
@property
def ancestors(self) -> Sequence[JointNode]:
joint_node = self
ancestors = []
while joint_node.parent:
joint_node = joint_node.parent()
ancestors.append(joint_node)
return ancestors
@property
def children(self) -> Sequence[JointNode]:
children = []
for node in self.skeleton().joint_dict.values():
if node.parent and node.parent() == self:
children.append(node)
return children
@property
def descendents(self) -> Set[JointNode]:
descendents = set()
ancestors = {self}
last_ancestors = set()
while last_ancestors != ancestors:
last_ancestors = ancestors
for node in self.skeleton().joint_dict.values():
if node.parent and node.parent() in ancestors:
ancestors.add(node)
descendents.add(node)
return descendents
class Skeleton:
def __init__(self, root_node: etree.ElementBase):
self.joint_dict: Dict[str, JointNode] = {}
self._parse_node_children(root_node, None)
def __getitem__(self, item: str) -> JointNode:
return self.joint_dict[item]
def _parse_node_children(self, node: etree.ElementBase, parent: MAYBE_JOINT_REF):
name = node.get('name')
joint = JointNode(
name=name,
parent=parent,
skeleton=weakref.ref(self),
translation=_get_vec_attr(node, "pos", Vector3()),
pivot=_get_vec_attr(node, "pivot", Vector3()),
rotation=_get_vec_attr(node, "rot", Vector3()),
scale=_get_vec_attr(node, "scale", Vector3(1, 1, 1)),
type=node.tag,
)
self.joint_dict[name] = joint
for child in node.iterchildren():
self._parse_node_children(child, weakref.ref(joint))
def _get_vec_attr(node, attr_name: str, default: Vector3) -> Vector3:
attr_val = node.get(attr_name, None)
if not attr_val:
return default
return Vector3(*(float(x) for x in attr_val.split(" ") if x))
def load_avatar_skeleton() -> Skeleton:
skel_path = get_resource_filename("lib/base/data/avatar_skeleton.xml")
with open(skel_path, 'r') as f:
skel_root = etree.fromstring(f.read())
return Skeleton(skel_root.getchildren()[0])
AVATAR_SKELETON = load_avatar_skeleton()

View File

@@ -6,6 +6,7 @@ import copy
import dataclasses
import datetime as dt
import logging
from collections import deque
from typing import *
from typing import Optional
@@ -25,16 +26,23 @@ class ReliableResendInfo:
class Circuit:
def __init__(self, near_host: Optional[ADDR_TUPLE], far_host: ADDR_TUPLE, transport):
def __init__(
self,
near_host: Optional[ADDR_TUPLE],
far_host: ADDR_TUPLE,
transport: Optional[AbstractUDPTransport] = None,
):
self.near_host: Optional[ADDR_TUPLE] = near_host
self.host: ADDR_TUPLE = far_host
self.is_alive = True
self.transport: Optional[AbstractUDPTransport] = transport
self.transport = transport
self.serializer = UDPMessageSerializer()
self.last_packet_at = dt.datetime.now()
self.packet_id_base = 0
self.unacked_reliable: Dict[Tuple[Direction, int], ReliableResendInfo] = {}
self.resend_every: float = 3.0
# Reliable messages that we've already seen and handled, for resend suppression
self.seen_reliable: deque[int] = deque(maxlen=1_000)
def _send_prepared_message(self, message: Message, transport=None):
try:
@@ -44,6 +52,11 @@ class Circuit:
raise
return self.send_datagram(serialized, message.direction, transport=transport)
def disconnect(self):
self.packet_id_base = 0
self.unacked_reliable.clear()
self.is_alive = False
def send_datagram(self, data: bytes, direction: Direction, transport=None):
self.last_packet_at = dt.datetime.now()
src_addr, dst_addr = self.host, self.near_host
@@ -66,6 +79,7 @@ class Circuit:
# If it was queued, it's not anymore
message.queued = False
message.finalized = True
return True
def send(self, message: Message, transport=None) -> UDPPacket:
if self.prepare_message(message):
@@ -77,9 +91,6 @@ class Circuit:
)
return self._send_prepared_message(message, transport)
# Temporary alias
send_message = send
def send_reliable(self, message: Message, transport=None) -> asyncio.Future:
"""send() wrapper that always sends reliably and allows `await`ing ACK receipt"""
if not message.synthetic:
@@ -123,6 +134,13 @@ class Circuit:
message.direction = direction
self.send(message)
def track_reliable(self, packet_id: int) -> bool:
"""Tracks a reliable packet, returning if it's a new message"""
if packet_id in self.seen_reliable:
return False
self.seen_reliable.append(packet_id)
return True
def __repr__(self):
return "<%s %r : %r>" % (self.__class__.__name__, self.near_host, self.host)

View File

@@ -78,7 +78,7 @@ class TemplateDataPacker:
MsgType.MVT_S8: _make_struct_spec('b'),
MsgType.MVT_U8: _make_struct_spec('B'),
MsgType.MVT_BOOL: _make_struct_spec('B'),
MsgType.MVT_LLUUID: (lambda x: UUID(bytes=bytes(x)), lambda x: x.bytes),
MsgType.MVT_LLUUID: (lambda x: UUID(bytes=bytes(x)), lambda x: UUID(x).bytes),
MsgType.MVT_IP_ADDR: (socket.inet_ntoa, socket.inet_aton),
MsgType.MVT_IP_PORT: _make_struct_spec('!H'),
MsgType.MVT_U16: _make_struct_spec('<H'),

View File

@@ -222,7 +222,7 @@ class Message:
def add_blocks(self, block_list):
# can have a list of blocks if it is multiple or variable
for block in block_list:
if type(block) == list:
if type(block) is list:
for bl in block:
self.add_block(bl)
else:

View File

@@ -20,7 +20,7 @@ Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
from logging import getLogger
from llbase import llsd
import llsd
from hippolyzer.lib.base.message.data import msg_details

View File

@@ -31,7 +31,8 @@ _T = TypeVar("_T")
_K = TypeVar("_K", bound=Hashable)
MESSAGE_HANDLER = Callable[[_T], Any]
PREDICATE = Callable[[_T], bool]
MESSAGE_NAMES = Iterable[_K]
# TODO: Can't do `Iterable[Union[_K, Literal["*"]]` apparently?
MESSAGE_NAMES = Iterable[Union[_K, str]]
class MessageHandler(Generic[_T, _K]):
@@ -43,10 +44,9 @@ class MessageHandler(Generic[_T, _K]):
LOG.debug('Creating a monitor for %s' % message_name)
return self.handlers.setdefault(message_name, Event())
def subscribe(self, message_name: _K, handler: MESSAGE_HANDLER) -> Event:
def subscribe(self, message_name: Union[_K, Literal["*"]], handler: MESSAGE_HANDLER):
notifier = self.register(message_name)
notifier.subscribe(handler)
return notifier
def _subscribe_all(self, message_names: MESSAGE_NAMES, handler: MESSAGE_HANDLER,
predicate: Optional[PREDICATE] = None) -> List[Event]:
@@ -145,7 +145,7 @@ class MessageHandler(Generic[_T, _K]):
# Always try to call wildcard handlers
self._handle_type('*', message)
def _handle_type(self, name: _K, message: _T):
def _handle_type(self, name: Union[_K, Literal["*"]], message: _T):
handler = self.handlers.get(name)
if not handler:
return

View File

@@ -126,7 +126,7 @@ class UDPMessageDeserializer:
frequency, num = _parse_msg_num(reader)
current_template = self.template_dict.get_template_by_pair(frequency, num)
if current_template is None:
raise exc.MessageTemplateNotFound("deserializing data")
raise exc.MessageTemplateNotFound("deserializing data", f"{frequency}:{num}")
msg.name = current_template.name
# extra field, see note regarding msg.offset
@@ -157,7 +157,6 @@ class UDPMessageDeserializer:
reader.seek(current_template.get_msg_freq_num_len() + msg.offset)
for tmpl_block in current_template.blocks:
LOG.debug("Parsing %s:%s" % (msg.name, tmpl_block.name))
# EOF?
if not len(reader):
# Seems like even some "Single" blocks are optional?
@@ -180,7 +179,6 @@ class UDPMessageDeserializer:
for i in range(repeat_count):
current_block = Block(tmpl_block.name)
LOG.debug("Adding block %s" % current_block.name)
msg.add_block(current_block)
for tmpl_variable in tmpl_block.variables:

View File

@@ -82,8 +82,9 @@ CAPS_DICT = Union[
class CapsClient:
def __init__(self, caps: Optional[CAPS_DICT] = None):
def __init__(self, caps: Optional[CAPS_DICT] = None, session: Optional[aiohttp.ClientSession] = None) -> None:
self._caps = caps
self._session = session
def _request_fixups(self, cap_or_url: str, headers: Dict, proxy: Optional[bool], ssl: Any):
return cap_or_url, headers, proxy, ssl
@@ -117,6 +118,7 @@ class CapsClient:
session_owned = False
# Use an existing session if we have one to take advantage of connection pooling
# otherwise create one
session = session or self._session
if session is None:
session_owned = True
session = aiohttp.ClientSession(

View File

@@ -46,6 +46,9 @@ class UDPPacket:
return self.dst_addr
return self.src_addr
def __repr__(self):
return f"<{self.__class__.__name__} src_addr={self.src_addr!r} dst_addr={self.dst_addr!r} data={self.data!r}>"
class AbstractUDPTransport(abc.ABC):
__slots__ = ()

View File

@@ -35,12 +35,7 @@ import hippolyzer.lib.base.serialization as se
import hippolyzer.lib.base.templates as tmpls
class Object(recordclass.datatuple): # type: ignore
__options__ = {
"use_weakref": True,
}
__weakref__: Any
class Object(recordclass.RecordClass, use_weakref=True): # type: ignore
LocalID: Optional[int] = None
State: Optional[int] = None
FullID: Optional[UUID] = None
@@ -199,6 +194,28 @@ class Object(recordclass.datatuple): # type: ignore
del val["Parent"]
return val
@property
def Ancestors(self) -> List[Object]:
obj = self
ancestors = []
while obj.Parent:
obj = obj.Parent
ancestors.append(obj)
return ancestors
@property
def Descendents(self) -> List[Object]:
new_children = [self]
descendents = []
while new_children:
to_check = new_children[:]
new_children.clear()
for obj in to_check:
for child in obj.Children:
new_children.append(child)
descendents.append(child)
return descendents
def handle_to_gridxy(handle: int) -> Tuple[int, int]:
return (handle >> 32) // 256, (handle & 0xFFffFFff) // 256

View File

@@ -10,6 +10,7 @@ from io import SEEK_CUR, SEEK_SET, SEEK_END, RawIOBase, BufferedIOBase
from typing import *
import lazy_object_proxy
import numpy as np
import hippolyzer.lib.base.llsd as llsd
import hippolyzer.lib.base.datatypes as dtypes
@@ -838,7 +839,7 @@ class QuantizedFloat(QuantizedFloatBase):
super().__init__(prim_spec, zero_median=False)
self.lower = lower
self.upper = upper
# We know the range in `QuantizedFloat` when it's constructed, so we can infer
# We know the range in `QuantizedFloat` when it's constructed, so we can infer
# whether or not we should round towards zero in __init__
max_error = (upper - lower) * self.step_mag
midpoint = (upper + lower) / 2.0
@@ -1610,7 +1611,9 @@ class BitfieldDataclass(DataclassAdapter):
class ExprAdapter(Adapter):
def __init__(self, child_spec: SERIALIZABLE_TYPE, decode_func: Callable, encode_func: Callable):
_ID = lambda x: x
def __init__(self, child_spec: SERIALIZABLE_TYPE, decode_func: Callable = _ID, encode_func: Callable = _ID):
super().__init__(child_spec)
self._decode_func = decode_func
self._encode_func = encode_func
@@ -1659,6 +1662,62 @@ class BinaryLLSD(SerializableBase):
writer.write_bytes(llsd.format_binary(val, with_header=False))
class NumPyArray(Adapter):
"""
An 2-dimensional, dynamic-length array of data from numpy. Greedy.
Unlike most other serializers, your endianness _must_ be specified in the dtype!
"""
__slots__ = ['dtype', 'elems']
def __init__(self, child_spec: Optional[SERIALIZABLE_TYPE], dtype: np.dtype, elems: int):
super().__init__(child_spec)
self.dtype = dtype
self.elems = elems
def _pick_dtype(self, endian: str) -> np.dtype:
return self.dtype.newbyteorder('>') if endian != "<" else self.dtype
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
num_elems = len(val) // self.dtype.itemsize
num_ndims = num_elems // self.elems
buf_array = np.frombuffer(val, dtype=self.dtype, count=num_elems)
return buf_array.reshape((num_ndims, self.elems))
def encode(self, val, ctx: Optional[ParseContext]) -> Any:
val: np.ndarray = np.array(val, dtype=self.dtype).flatten()
return val.tobytes()
class QuantizedNumPyArray(Adapter):
"""Like QuantizedFloat. Only works correctly for unsigned types, no zero midpoint rounding!"""
def __init__(self, child_spec: NumPyArray, lower: float, upper: float):
super().__init__(child_spec)
self.dtype = child_spec.dtype
self.lower = lower
self.upper = upper
self.step_mag = 1.0 / ((2 ** (self.dtype.itemsize * 8)) - 1)
def encode(self, val: Any, ctx: Optional[ParseContext]) -> Any:
val = np.array(val, dtype=np.float64)
val = np.clip(val, self.lower, self.upper)
delta = self.upper - self.lower
if delta == 0.0:
return np.zeros(val.shape, dtype=self.dtype)
val -= self.lower
val /= delta
val /= self.step_mag
return np.rint(val).astype(self.dtype)
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
val = val.astype(np.float64)
val *= self.step_mag
val *= self.upper - self.lower
val += self.lower
return val
def subfield_serializer(msg_name, block_name, var_name):
def f(orig_cls):
global SUBFIELD_SERIALIZERS
@@ -1858,7 +1917,7 @@ class IntEnumSubfieldSerializer(AdapterInstanceSubfieldSerializer):
val = super().deserialize(ctx_obj, val, pod=pod)
# Don't pretend we were able to deserialize this if we
# had to fall through to the `int` case.
if pod and type(val) == int:
if pod and type(val) is int:
return UNSERIALIZABLE
return val

View File

@@ -4,14 +4,17 @@ Serialization templates for structures used in LLUDP and HTTP bodies.
import abc
import collections
import copy
import dataclasses
import datetime
import enum
import math
import zlib
from typing import *
import hippolyzer.lib.base.serialization as se
from hippolyzer.lib.base import llsd
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3, Quaternion
from hippolyzer.lib.base.namevalue import NameValuesSerializer
@@ -1249,7 +1252,7 @@ class TextureEntryCollection:
vals = getattr(self, key)
# Fill give all faces the default value for this key
for te in as_dicts:
te[key] = vals[None]
te[key] = copy.copy(vals[None])
# Walk over the exception cases and replace the default value
for face_nums, val in vals.items():
# Default case already handled
@@ -1258,7 +1261,7 @@ class TextureEntryCollection:
for face_num in face_nums:
if face_num >= num_faces:
raise ValueError(f"Bad value for num_faces? {face_num} >= {num_faces}")
as_dicts[face_num][key] = val
as_dicts[face_num][key] = copy.copy(val)
return [TextureEntry(**x) for x in as_dicts]
@classmethod
@@ -1458,6 +1461,8 @@ class ExtraParamType(IntEnum):
RESERVED = 0x50
MESH = 0x60
EXTENDED_MESH = 0x70
RENDER_MATERIAL = 0x80
REFLECTION_PROBE = 0x90
class ExtendedMeshFlags(IntFlag):
@@ -1480,6 +1485,13 @@ class SculptTypeData:
Mirror: bool = se.bitfield_field(bits=1, adapter=se.BoolAdapter())
class ReflectionProbeFlags(IntFlag):
# use a box influence volume
BOX_VOLUME = 0x1
# render dynamic objects (avatars) into this Reflection Probe
DYNAMIC = 0x2
EXTRA_PARAM_TEMPLATES = {
ExtraParamType.FLEXIBLE: se.Template({
"Tension": se.BitField(se.U8, {"Tension": 6, "Softness1": 2}),
@@ -1511,6 +1523,15 @@ EXTRA_PARAM_TEMPLATES = {
ExtraParamType.EXTENDED_MESH: se.Template({
"Flags": se.IntFlag(ExtendedMeshFlags, se.U32),
}),
ExtraParamType.RENDER_MATERIAL: se.Collection(se.U8, se.Template({
"TEIdx": se.U8,
"TEID": se.UUID,
})),
ExtraParamType.REFLECTION_PROBE: se.Template({
"Ambiance": se.F32,
"ClipDistance": se.F32,
"Flags": se.IntFlag(ReflectionProbeFlags, se.U8),
}),
}
@@ -1856,6 +1877,8 @@ class AvatarPropertiesFlags(IntFlag):
@se.flag_field_serializer("AvatarGroupsReply", "GroupData", "GroupPowers")
@se.flag_field_serializer("AvatarGroupDataUpdate", "GroupData", "GroupPowers")
@se.flag_field_serializer("AvatarDataUpdate", "AgentDataData", "GroupPowers")
@se.flag_field_serializer("GroupProfileReply", "GroupData", "PowersMask")
@se.flag_field_serializer("GroupRoleDataReply", "RoleData", "Powers")
class GroupPowerFlags(IntFlag):
MEMBER_INVITE = 1 << 1 # Invite member
MEMBER_EJECT = 1 << 2 # Eject member from group
@@ -1945,6 +1968,15 @@ class GroupPowerFlags(IntFlag):
GROUP_BAN_ACCESS = 1 << 51 # Allows access to ban / un-ban agents from a group.
@se.flag_field_serializer("GrantUserRights", "Rights", "RelatedRights")
@se.flag_field_serializer("ChangeUserRights", "Rights", "RelatedRights")
class UserRelatedRights(IntFlag):
"""See lluserrelations.h for definitions"""
ONLINE_STATUS = 1
MAP_LOCATION = 1 << 1
MODIFY_OBJECTS = 1 << 2
@se.flag_field_serializer("RequestObjectPropertiesFamily", "ObjectData", "RequestFlags")
@se.flag_field_serializer("ObjectPropertiesFamily", "ObjectData", "RequestFlags")
class ObjectPropertiesFamilyRequestFlags(IntFlag):
@@ -2025,6 +2057,50 @@ class ScriptPermissions(IntFlag):
CHANGE_ENVIRONMENT = 1 << 18
@se.enum_field_serializer("UpdateMuteListEntry", "MuteData", "MuteType")
class MuteType(IntEnum):
BY_NAME = 0
AGENT = 1
OBJECT = 2
GROUP = 3
# Voice, presumably.
EXTERNAL = 4
@se.flag_field_serializer("UpdateMuteListEntry", "MuteData", "MuteFlags")
class MuteFlags(IntFlag):
# For backwards compatibility (since any mute list entries that were created before the flags existed
# will have a flags field of 0), some flags are "inverted".
# Note that it's possible, through flags, to completely disable an entry in the mute list.
# The code should detect this case and remove the mute list entry instead.
TEXT_CHAT = 1 << 0
VOICE_CHAT = 1 << 1
PARTICLES = 1 << 2
OBJECT_SOUNDS = 1 << 3
@property
def DEFAULT(self):
return 0x0
@property
def ALL(self):
return 0xF
class CreationDateAdapter(se.Adapter):
def decode(self, val: Any, ctx: Optional[se.ParseContext], pod: bool = False) -> Any:
return datetime.datetime.fromtimestamp(val / 1_000_000).isoformat()
def encode(self, val: Any, ctx: Optional[se.ParseContext]) -> Any:
return int(datetime.datetime.fromisoformat(val).timestamp() * 1_000_000)
@se.subfield_serializer("ObjectProperties", "ObjectData", "CreationDate")
class CreationDateSerializer(se.AdapterSubfieldSerializer):
ADAPTER = CreationDateAdapter(None)
ORIG_INLINE = True
@se.http_serializer("RenderMaterials")
class RenderMaterialsSerializer(se.BaseHTTPSerializer):
@classmethod
@@ -2055,3 +2131,69 @@ class RetrieveNavMeshSrcSerializer(se.BaseHTTPSerializer):
# 15 bit window size, gzip wrapped
deser["navmesh_data"] = zlib.decompress(deser["navmesh_data"], wbits=15 | 32)
return deser
# Beta puppetry stuff, subject to change!
class PuppetryEventMask(enum.IntFlag):
POSITION = 1 << 0
POSITION_IN_PARENT_FRAME = 1 << 1
ROTATION = 1 << 2
ROTATION_IN_PARENT_FRAME = 1 << 3
SCALE = 1 << 4
DISABLE_CONSTRAINT = 1 << 7
class PuppetryOption(se.OptionalFlagged):
def __init__(self, flag_val, spec):
super().__init__("mask", se.IntFlag(PuppetryEventMask, se.U8), flag_val, spec)
# Range to use for puppetry's quantized floats when converting to<->from U16
LL_PELVIS_OFFSET_RANGE = (-5.0, 5.0)
@dataclasses.dataclass
class PuppetryJointData:
# Where does this number come from? `avatar_skeleton.xml`?
joint_id: int = se.dataclass_field(se.S16)
# Determines which fields will follow
mask: PuppetryEventMask = se.dataclass_field(se.IntFlag(PuppetryEventMask, se.U8))
rotation: Optional[Quaternion] = se.dataclass_field(
# These are very odd scales for a quantized quaternion, but that's what they are.
PuppetryOption(PuppetryEventMask.ROTATION, se.PackedQuat(se.Vector3U16(*LL_PELVIS_OFFSET_RANGE))),
)
position: Optional[Vector3] = se.dataclass_field(
PuppetryOption(PuppetryEventMask.POSITION, se.Vector3U16(*LL_PELVIS_OFFSET_RANGE)),
)
scale: Optional[Vector3] = se.dataclass_field(
PuppetryOption(PuppetryEventMask.SCALE, se.Vector3U16(*LL_PELVIS_OFFSET_RANGE)),
)
@dataclasses.dataclass
class PuppetryEventData:
time: int = se.dataclass_field(se.S32)
# Must be set manually due to below issue
num_joints: int = se.dataclass_field(se.U16)
# This field is packed in the least helpful way possible. The length field
# is in between the collection count and the collection data, but the length
# field essentially only tells you how many bytes until the end of the buffer
# proper, which you already know from msgsystem. Why is this here?
joints: List[PuppetryJointData] = se.dataclass_field(se.TypedByteArray(
se.U32,
# Just treat contents as a greedy collection, tries to keep reading until EOF
se.Collection(None, se.Dataclass(PuppetryJointData)),
))
@se.subfield_serializer("AgentAnimation", "PhysicalAvatarEventList", "TypeData")
@se.subfield_serializer("AvatarAnimation", "PhysicalAvatarEventList", "TypeData")
class PuppetryEventDataSerializer(se.SimpleSubfieldSerializer):
# You can have multiple joint events packed in right after the other, implicitly.
# They may _or may not_ be split into separate PhysicalAvatarEventList blocks?
# This doesn't seem to be handled specifically in the decoder, is this a
# serialization bug in the viewer?
TEMPLATE = se.Collection(None, se.Dataclass(PuppetryEventData))
EMPTY_IS_NONE = True

View File

@@ -0,0 +1,41 @@
import asyncio
from typing import Any, Optional, List, Tuple
from hippolyzer.lib.base.message.circuit import Circuit, ConnectionHolder
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.network.transport import AbstractUDPTransport, ADDR_TUPLE, UDPPacket
class MockTransport(AbstractUDPTransport):
def sendto(self, data: Any, addr: Optional[ADDR_TUPLE] = ...) -> None:
pass
def abort(self) -> None:
pass
def close(self) -> None:
pass
def __init__(self):
super().__init__()
self.packets: List[Tuple[bytes, Tuple[str, int]]] = []
def send_packet(self, packet: UDPPacket) -> None:
self.packets.append((packet.data, packet.dst_addr))
class MockHandlingCircuit(Circuit):
def __init__(self, handler: MessageHandler[Message, str]):
super().__init__(("127.0.0.1", 1), ("127.0.0.1", 2), None)
self.handler = handler
def _send_prepared_message(self, message: Message, transport=None):
loop = asyncio.get_event_loop_policy().get_event_loop()
loop.call_soon(self.handler.handle, message)
class MockConnectionHolder(ConnectionHolder):
def __init__(self, circuit, message_handler):
self.circuit = circuit
self.message_handler = message_handler

View File

@@ -35,9 +35,8 @@ class VisualParam:
class VisualParams(List[VisualParam]):
def __init__(self):
def __init__(self, lad_path):
super().__init__()
lad_path = get_resource_filename("lib/base/data/avatar_lad.xml")
with open(lad_path, "rb") as f:
doc = parse_etree(f)
for param in doc.findall(".//param"):
@@ -59,8 +58,11 @@ class VisualParams(List[VisualParam]):
def by_wearable(self, wearable: str) -> List[VisualParam]:
return [x for x in self if x.wearable == wearable]
def by_id(self, vparam_id: int) -> VisualParam:
return [x for x in self if x.id == vparam_id][0]
VISUAL_PARAMS = VisualParams()
VISUAL_PARAMS = VisualParams(get_resource_filename("lib/base/data/avatar_lad.xml"))
@dataclasses.dataclass

View File

@@ -1,4 +1,4 @@
from typing import NamedTuple, Union, Optional
from typing import NamedTuple, Union, Optional, List
import hippolyzer.lib.base.serialization as se
from hippolyzer.lib.base import llsd
@@ -18,6 +18,11 @@ class UploadToken(NamedTuple):
payload: bytes
class MeshUploadDetails(NamedTuple):
mesh_bytes: bytes
num_faces: int
class AssetUploader:
def __init__(self, region: BaseClientRegion):
self._region = region
@@ -69,20 +74,15 @@ class AssetUploader:
"""
pass
# The mesh upload flow is a little special, so it gets its own methods
async def initiate_mesh_upload(self, name: str, mesh: Union[bytes, MeshAsset],
# The mesh upload flow is a little special, so it gets its own method
async def initiate_mesh_upload(self, name: str, mesh: Union[MeshUploadDetails, MeshAsset],
flags: Optional[int] = None) -> UploadToken:
"""
Very basic LL-serialized mesh uploader
Currently only handles a single mesh with a single face and no associated textures.
"""
if isinstance(mesh, MeshAsset):
writer = se.BufferWriter("!")
writer.write(LLMeshSerializer(), mesh)
mesh = writer.copy_buffer()
mesh = MeshUploadDetails(writer.copy_buffer(), len(mesh.segments['high_lod']))
asset_resources = self._build_asset_resources(name, mesh)
asset_resources = self._build_asset_resources(name, [mesh])
payload = {
'asset_resources': asset_resources,
'asset_type': 'mesh',
@@ -102,26 +102,26 @@ class AssetUploader:
upload_body = llsd.format_xml(asset_resources)
return UploadToken(resp_payload["upload_price"], resp_payload["uploader"], upload_body)
def _build_asset_resources(self, name: str, mesh: bytes) -> dict:
def _build_asset_resources(self, name: str, meshes: List[MeshUploadDetails]) -> dict:
instances = []
for mesh in meshes:
instances.append({
'face_list': [{
'diffuse_color': [1.0, 1.0, 1.0, 1.0],
'fullbright': False
}] * mesh.num_faces,
'material': 3,
'mesh': 0,
'mesh_name': name,
'physics_shape_type': 2,
'position': [0.0, 0.0, 0.0],
'rotation': [0.7071067690849304, 0.0, 0.0, 0.7071067690849304],
'scale': [1.0, 1.0, 1.0]
})
return {
'instance_list': [
{
'face_list': [
{
'diffuse_color': [1.0, 1.0, 1.0, 1.0],
'fullbright': False
}
],
'material': 3,
'mesh': 0,
'mesh_name': name,
'physics_shape_type': 2,
'position': [0.0, 0.0, 0.0],
'rotation': [0.7071067690849304, 0.0, 0.0, 0.7071067690849304],
'scale': [1.0, 1.0, 1.0]
}
],
'mesh_list': [mesh],
'instance_list': instances,
'mesh_list': [mesh.mesh_bytes for mesh in meshes],
'metric': 'MUT_Unspecified',
'texture_list': []
}

View File

@@ -0,0 +1,731 @@
from __future__ import annotations
import asyncio
import hashlib
from importlib.metadata import version
import logging
import uuid
import weakref
import xmlrpc.client
from typing import *
import aiohttp
import multidict
from hippolyzer.lib.base.datatypes import Vector3, StringEnum
from hippolyzer.lib.base.helpers import proxify, get_resource_filename
from hippolyzer.lib.base.message.circuit import Circuit
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.message.message_dot_xml import MessageDotXML
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
from hippolyzer.lib.base.network.caps_client import CapsClient, CAPS_DICT
from hippolyzer.lib.base.network.transport import ADDR_TUPLE, Direction, SocketUDPTransport, AbstractUDPTransport
from hippolyzer.lib.base.settings import Settings, SettingDescriptor
from hippolyzer.lib.base.templates import RegionHandshakeReplyFlags, ChatType
from hippolyzer.lib.base.transfer_manager import TransferManager
from hippolyzer.lib.base.xfer_manager import XferManager
from hippolyzer.lib.client.asset_uploader import AssetUploader
from hippolyzer.lib.client.inventory_manager import InventoryManager
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
from hippolyzer.lib.client.state import BaseClientSession, BaseClientRegion, BaseClientSessionManager
LOG = logging.getLogger(__name__)
class StartLocation(StringEnum):
LAST = "last"
HOME = "home"
class ClientSettings(Settings):
# Off by default for now, the cert validation is a big mess due to LL using an internal CA.
SSL_VERIFY: bool = SettingDescriptor(False)
SSL_CERT_PATH: str = SettingDescriptor(get_resource_filename("lib/base/network/data/ca-bundle.crt"))
USER_AGENT: str = SettingDescriptor(f"Hippolyzer/v{version('hippolyzer')}")
class HippoCapsClient(CapsClient):
def __init__(
self,
settings: ClientSettings,
caps: Optional[CAPS_DICT] = None,
session: Optional[aiohttp.ClientSession] = None,
) -> None:
super().__init__(caps, session)
self._settings = settings
def _request_fixups(self, cap_or_url: str, headers: Dict, proxy: Optional[bool], ssl: Any):
headers["User-Agent"] = self._settings.USER_AGENT
return cap_or_url, headers, proxy, self._settings.SSL_VERIFY
class HippoClientProtocol(asyncio.DatagramProtocol):
def __init__(self, session: HippoClientSession):
self.session = proxify(session)
self.message_xml = MessageDotXML()
self.deserializer = UDPMessageDeserializer(
settings=self.session.session_manager.settings,
)
def datagram_received(self, data, source_addr: ADDR_TUPLE):
region = self.session.region_by_circuit_addr(source_addr)
if not region:
logging.warning("Received packet from invalid address %s", source_addr)
return
message = self.deserializer.deserialize(data)
message.direction = Direction.IN
message.sender = source_addr
if not self.message_xml.validate_udp_msg(message.name):
LOG.warning(
f"Received {message.name!r} over UDP, when it should come over the event queue. Discarding."
)
raise PermissionError(f"UDPBanned message {message.name}")
region.circuit.collect_acks(message)
should_handle = True
if message.reliable:
# This is a bit crap. We send an ACK immediately through a PacketAck.
# This is pretty wasteful, we should batch them up and send them on a timer.
# We should ACK even if it's a resend of something we've already handled, maybe
# they never got the ACK.
region.circuit.send_acks((message.packet_id,))
should_handle = region.circuit.track_reliable(message.packet_id)
try:
if should_handle:
self.session.message_handler.handle(message)
except:
LOG.exception("Failed in region message handler")
region.message_handler.handle(message)
class HippoClientRegion(BaseClientRegion):
def __init__(self, circuit_addr, seed_cap: str, session: HippoClientSession, handle=None):
super().__init__()
self.caps = multidict.MultiDict()
self.message_handler: MessageHandler[Message, str] = MessageHandler(take_by_default=False)
self.circuit_addr = circuit_addr
self.handle = handle
if seed_cap:
self.caps["Seed"] = seed_cap
self.session: Callable[[], HippoClientSession] = weakref.ref(session)
self.caps_client = HippoCapsClient(session.session_manager.settings, self.caps, session.http_session)
self.xfer_manager = XferManager(proxify(self), self.session().secure_session_id)
self.transfer_manager = TransferManager(proxify(self), session.agent_id, session.id)
self.asset_uploader = AssetUploader(proxify(self))
self.objects = ClientObjectManager(self)
self._llsd_serializer = LLSDMessageSerializer()
self._eq_task: Optional[asyncio.Task] = None
self.connected: asyncio.Future = asyncio.Future()
self.message_handler.subscribe("StartPingCheck", self._handle_ping_check)
def update_caps(self, caps: Mapping[str, str]) -> None:
self.caps.update(caps)
@property
def cap_urls(self) -> multidict.MultiDict:
return self.caps.copy()
async def connect(self, main_region: bool = False):
# Disconnect first if we're already connected
if self.circuit and self.circuit.is_alive:
self.disconnect()
if self.connected.done():
self.connected = asyncio.Future()
try:
# TODO: What happens if a circuit code is invalid, again? Does it just refuse to ACK?
await self.circuit.send_reliable(
Message(
"UseCircuitCode",
Block(
"CircuitCode",
Code=self.session().circuit_code,
SessionID=self.session().id,
ID=self.session().agent_id,
),
)
)
self.circuit.is_alive = True
# Clear out any old caps urls except the seed URL, we're about to fetch new caps.
seed_url = self.caps["Seed"]
self.caps.clear()
self.caps["Seed"] = seed_url
# Kick this off and await it later
seed_resp_fut = self.caps_client.post("Seed", llsd=list(self.session().session_manager.SUPPORTED_CAPS))
# Register first so we can handle it even if the ack happens after the message is sent
region_handshake_fut = self.message_handler.wait_for(("RegionHandshake",))
# If we're connecting to the main region, it won't even send us a RegionHandshake until we
# first send a CompleteAgentMovement.
if main_region:
await self.complete_agent_movement()
self.name = str((await region_handshake_fut)["RegionInfo"][0]["SimName"])
self.session().objects.track_region_objects(self.handle)
await self.circuit.send_reliable(
Message(
"RegionHandshakeReply",
Block("AgentData", AgentID=self.session().agent_id, SessionID=self.session().id),
Block(
"RegionInfo",
Flags=(
RegionHandshakeReplyFlags.SUPPORTS_SELF_APPEARANCE
| RegionHandshakeReplyFlags.VOCACHE_IS_EMPTY
)
)
)
)
await self.circuit.send_reliable(
Message(
"AgentThrottle",
Block(
"AgentData",
AgentID=self.session().agent_id,
SessionID=self.session().id,
CircuitCode=self.session().circuit_code,
),
Block(
"Throttle",
GenCounter=0,
# Reasonable defaults, I guess
Throttles_=[207360.0, 165376.0, 33075.19921875, 33075.19921875, 682700.75, 682700.75, 269312.0],
)
)
)
async with seed_resp_fut as seed_resp:
seed_resp.raise_for_status()
self.update_caps(await seed_resp.read_llsd())
self._eq_task = asyncio.create_task(self._poll_event_queue())
except Exception as e:
# Let consumers who were `await`ing the connected signal know there was an error
if not self.connected.done():
self.connected.set_exception(e)
raise
self.connected.set_result(None)
def disconnect(self) -> None:
"""Simulator has gone away, disconnect. Should be synchronous"""
if self._eq_task is not None:
self._eq_task.cancel()
self._eq_task = None
self.circuit.disconnect()
self.objects.clear()
if self.connected.done():
self.connected = asyncio.Future()
# TODO: cancel XFers and Transfers and whatnot
async def complete_agent_movement(self) -> None:
await self.circuit.send_reliable(
Message(
"CompleteAgentMovement",
Block(
"AgentData",
AgentID=self.session().agent_id,
SessionID=self.session().id,
CircuitCode=self.session().circuit_code
),
)
)
self.session().main_region = self
async def _poll_event_queue(self):
ack: Optional[int] = None
while True:
payload = {"ack": ack, "done": False}
async with self.caps_client.post("EventQueueGet", llsd=payload) as resp:
if resp.status != 200:
await asyncio.sleep(0.1)
continue
polled = await resp.read_llsd()
for event in polled["events"]:
if self._llsd_serializer.can_handle(event["message"]):
msg = self._llsd_serializer.deserialize(event)
else:
# If this isn't a templated message (like some EQ-only events are),
# then we wrap it in a synthetic `Message` so that the API for handling
# both EQ-only and templated message events can be the same. Ick.
msg = Message(event["message"])
if isinstance(event["body"], dict):
msg.add_block(Block("EventData", **event["body"]))
else:
# Shouldn't be any events that have anything other than a dict
# as a body, but just to be sure...
msg.add_block(Block("EventData", Data=event["body"]))
msg.synthetic = True
msg.sender = self.circuit_addr
msg.direction = Direction.IN
self.session().message_handler.handle(msg)
self.message_handler.handle(msg)
ack = polled["id"]
await asyncio.sleep(0.001)
async def _handle_ping_check(self, message: Message):
self.circuit.send(
Message(
"CompletePingCheck",
Block("PingID", PingID=message["PingID"]["PingID"]),
)
)
class HippoClientSession(BaseClientSession):
"""Represents a client's view of a remote session"""
REGION_CLS = HippoClientRegion
region_by_handle: Callable[[int], Optional[HippoClientRegion]]
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[HippoClientRegion]]
regions: List[HippoClientRegion]
session_manager: HippoClient
def __init__(self, id, secure_session_id, agent_id, circuit_code, session_manager: Optional[HippoClient] = None,
login_data=None):
super().__init__(id, secure_session_id, agent_id, circuit_code, session_manager, login_data=login_data)
self.http_session = session_manager.http_session
self.objects = ClientWorldObjectManager(proxify(self), session_manager.settings, None)
self.inventory_manager = InventoryManager(proxify(self))
self.transport: Optional[SocketUDPTransport] = None
self.protocol: Optional[HippoClientProtocol] = None
self.message_handler.take_by_default = False
for msg_name in ("DisableSimulator", "CloseCircuit"):
self.message_handler.subscribe(msg_name, lambda msg: self.unregister_region(msg.sender))
for msg_name in ("TeleportFinish", "CrossedRegion", "EstablishAgentCommunication"):
self.message_handler.subscribe(msg_name, self._handle_register_region_message)
def register_region(self, circuit_addr: Optional[ADDR_TUPLE] = None, seed_url: Optional[str] = None,
handle: Optional[int] = None) -> HippoClientRegion:
return super().register_region(circuit_addr, seed_url, handle) # type:ignore
def unregister_region(self, circuit_addr: ADDR_TUPLE) -> None:
for i, region in enumerate(self.regions):
if region.circuit_addr == circuit_addr:
self.regions[i].disconnect()
del self.regions[i]
return
raise KeyError(f"No such region for {circuit_addr!r}")
def open_circuit(self, circuit_addr: ADDR_TUPLE):
for region in self.regions:
if region.circuit_addr == circuit_addr:
valid_circuit = False
if not region.circuit or not region.circuit.is_alive:
region.circuit = Circuit(("127.0.0.1", 0), circuit_addr, self.transport)
region.circuit.is_alive = False
valid_circuit = True
if region.circuit and region.circuit.is_alive:
# Whatever, already open
logging.debug("Tried to re-open circuit for %r" % (circuit_addr,))
valid_circuit = True
return valid_circuit
return False
def _handle_register_region_message(self, msg: Message):
# Handle events that inform us about new regions
sim_addr, sim_handle, sim_seed = None, None, None
moving_to_region = False
# Sim is asking us to talk to a neighbour
if msg.name == "EstablishAgentCommunication":
ip_split = msg["EventData"]["sim-ip-and-port"].split(":")
sim_addr = (ip_split[0], int(ip_split[1]))
sim_seed = msg["EventData"]["seed-capability"]
# We teleported or cross region, opening comms to new sim
elif msg.name in ("TeleportFinish", "CrossedRegion"):
sim_block = msg.get_block("RegionData", msg.get_block("Info"))[0]
sim_addr = (sim_block["SimIP"], sim_block["SimPort"])
sim_handle = sim_block["RegionHandle"]
sim_seed = sim_block["SeedCapability"]
moving_to_region = True
# Sim telling us about a neighbour
# elif msg.name == "EnableSimulator":
# sim_block = msg["SimulatorInfo"][0]
# sim_addr = (sim_block["IP"], sim_block["Port"])
# sim_handle = sim_block["Handle"]
# TODO: EnableSimulator is a little weird. It creates a region and establishes a
# circuit, but with no seed cap. The viewer will send UseCircuitCode and all that,
# but it's totally workable to just wait for an EstablishAgentCommunication to do that,
# since that's when the region actually shows up. I guess EnableSimulator just gives the
# viewer some lead time to set up the circuit before the region is actually shown through
# EstablishAgentCommunication? Either way, messing around with regions that don't have seed
# caps is annoying, so let's just not do it.
# Register a region if this message was telling us about a new one
if sim_addr is not None:
region = self.register_region(sim_addr, handle=sim_handle, seed_url=sim_seed)
# We can't actually connect without a sim seed, mind you, when we receive and EnableSimulator
# we have to wait for the EstablishAgentCommunication to actually connect.
need_connect = (region.circuit and region.circuit.is_alive) or moving_to_region
self.open_circuit(sim_addr)
if need_connect:
asyncio.create_task(region.connect(main_region=moving_to_region))
elif moving_to_region:
# No need to connect, but we do need to complete agent movement.
asyncio.create_task(region.complete_agent_movement())
class HippoClient(BaseClientSessionManager):
"""A simple client, only connects to one region at a time currently."""
SUPPORTED_CAPS: Set[str] = {
"AbuseCategories",
"AcceptFriendship",
"AcceptGroupInvite",
"AgentPreferences",
"AgentProfile",
"AgentState",
"AttachmentResources",
"AvatarPickerSearch",
"AvatarRenderInfo",
"CharacterProperties",
"ChatSessionRequest",
"CopyInventoryFromNotecard",
"CreateInventoryCategory",
"DeclineFriendship",
"DeclineGroupInvite",
"DispatchRegionInfo",
"DirectDelivery",
"EnvironmentSettings",
"EstateAccess",
"DispatchOpenRegionSettings",
"EstateChangeInfo",
"EventQueueGet",
"ExtEnvironment",
"FetchLib2",
"FetchLibDescendents2",
"FetchInventory2",
"FetchInventoryDescendents2",
"IncrementCOFVersion",
"InventoryAPIv3",
"LibraryAPIv3",
"InterestList",
"InventoryThumbnailUpload",
"GetDisplayNames",
"GetExperiences",
"AgentExperiences",
"FindExperienceByName",
"GetExperienceInfo",
"GetAdminExperiences",
"GetCreatorExperiences",
"ExperiencePreferences",
"GroupExperiences",
"UpdateExperience",
"IsExperienceAdmin",
"IsExperienceContributor",
"RegionExperiences",
"ExperienceQuery",
"GetMesh",
"GetMesh2",
"GetMetadata",
"GetObjectCost",
"GetObjectPhysicsData",
"GetTexture",
"GroupAPIv1",
"GroupMemberData",
"GroupProposalBallot",
"HomeLocation",
"LandResources",
"LSLSyntax",
"MapLayer",
"MapLayerGod",
"MeshUploadFlag",
"NavMeshGenerationStatus",
"NewFileAgentInventory",
"ObjectAnimation",
"ObjectMedia",
"ObjectMediaNavigate",
"ObjectNavMeshProperties",
"ParcelPropertiesUpdate",
"ParcelVoiceInfoRequest",
"ProductInfoRequest",
"ProvisionVoiceAccountRequest",
"ReadOfflineMsgs",
"RegionObjects",
"RemoteParcelRequest",
"RenderMaterials",
"RequestTextureDownload",
"ResourceCostSelected",
"RetrieveNavMeshSrc",
"SearchStatRequest",
"SearchStatTracking",
"SendPostcard",
"SendUserReport",
"SendUserReportWithScreenshot",
"ServerReleaseNotes",
"SetDisplayName",
"SimConsoleAsync",
"SimulatorFeatures",
"StartGroupProposal",
"TerrainNavMeshProperties",
"TextureStats",
"UntrustedSimulatorMessage",
"UpdateAgentInformation",
"UpdateAgentLanguage",
"UpdateAvatarAppearance",
"UpdateGestureAgentInventory",
"UpdateGestureTaskInventory",
"UpdateNotecardAgentInventory",
"UpdateNotecardTaskInventory",
"UpdateScriptAgent",
"UpdateScriptTask",
"UpdateSettingsAgentInventory",
"UpdateSettingsTaskInventory",
"UploadAgentProfileImage",
"UploadBakedTexture",
"UserInfo",
"ViewerAsset",
"ViewerBenefits",
"ViewerMetrics",
"ViewerStartAuction",
"ViewerStats",
}
DEFAULT_OPTIONS = {
"inventory-root",
"inventory-skeleton",
"inventory-lib-root",
"inventory-lib-owner",
"inventory-skel-lib",
"initial-outfit",
"gestures",
"display_names",
"event_notifications",
"classified_categories",
"adult_compliant",
"buddy-list",
"newuser-config",
"ui-config",
"advanced-mode",
"max-agent-groups",
"map-server-url",
"voice-config",
"tutorial_setting",
"login-flags",
"global-textures",
# Not an official option, just so this can be tracked.
"pyogp-client",
}
DEFAULT_LOGIN_URI = "https://login.agni.lindenlab.com/cgi-bin/login.cgi"
def __init__(self, options: Optional[Set[str]] = None):
self._username: Optional[str] = None
self._password: Optional[str] = None
self._mac = uuid.getnode()
self._options = options if options is not None else self.DEFAULT_OPTIONS
self.http_session: Optional[aiohttp.ClientSession] = aiohttp.ClientSession(trust_env=True)
self.session: Optional[HippoClientSession] = None
self.settings = ClientSettings()
self._resend_task: Optional[asyncio.Task] = None
@property
def main_region(self) -> Optional[HippoClientRegion]:
if not self.session:
return None
return self.session.main_region
@property
def main_circuit(self) -> Optional[Circuit]:
if not self.main_region:
return None
return self.main_region.circuit
@property
def main_caps_client(self) -> Optional[CapsClient]:
if not self.main_region:
return None
return self.main_region.caps_client
async def aclose(self):
try:
self.logout()
finally:
if self.http_session:
await self.http_session.close()
self.http_session = None
def __del__(self):
# Make sure we don't leak resources if someone was lazy.
try:
self.logout()
finally:
if self.http_session:
try:
asyncio.create_task(self.http_session.close)
except:
pass
self.http_session = None
async def _create_transport(self) -> Tuple[AbstractUDPTransport, HippoClientProtocol]:
loop = asyncio.get_event_loop_policy().get_event_loop()
transport, protocol = await loop.create_datagram_endpoint(
lambda: HippoClientProtocol(self.session),
local_addr=('0.0.0.0', 0))
transport = SocketUDPTransport(transport)
return transport, protocol
async def login(
self,
username: str,
password: str,
login_uri: Optional[str] = None,
agree_to_tos: bool = False,
start_location: Union[StartLocation, str, None] = StartLocation.LAST
):
if self.session:
raise RuntimeError("Already logged in!")
if not login_uri:
login_uri = self.DEFAULT_LOGIN_URI
if start_location is None:
start_location = StartLocation.LAST
# This isn't a symbolic start location and isn't a URI, must be a sim name.
if start_location not in iter(StartLocation) and not start_location.startswith("uri:"):
start_location = f"uri:{start_location}&128&128&128"
split_username = username.split(" ")
if len(split_username) < 2:
first_name = split_username[0]
last_name = "Resident"
else:
first_name, last_name = split_username
payload = {
"address_size": 64,
"agree_to_tos": int(agree_to_tos),
"channel": "Hippolyzer",
"extended_errors": 1,
"first": first_name,
"last": last_name,
"host_id": "",
"id0": hashlib.md5(str(self._mac).encode("ascii")).hexdigest(),
"mac": hashlib.md5(str(self._mac).encode("ascii")).hexdigest(),
"mfa_hash": "",
"passwd": "$1$" + hashlib.md5(str(password).encode("ascii")).hexdigest(),
# TODO: actually get these
"platform": "lnx",
"platform_string": "Linux 6.6",
# TODO: What is this?
"platform_version": "2.38.0",
"read_critical": 0,
"start": str(start_location),
"token": "",
"version": version("hippolyzer"),
"options": list(self._options),
}
async with self.http_session.post(
login_uri,
data=xmlrpc.client.dumps((payload,), "login_to_simulator"),
headers={"Content-Type": "text/xml", "User-Agent": self.settings.USER_AGENT},
ssl=self.settings.SSL_VERIFY,
) as resp:
resp.raise_for_status()
login_data = xmlrpc.client.loads((await resp.read()).decode("utf8"))[0][0]
self.session = HippoClientSession.from_login_data(login_data, self)
self.session.transport, self.session.protocol = await self._create_transport()
self._resend_task = asyncio.create_task(self._attempt_resends())
assert self.session.open_circuit(self.session.regions[-1].circuit_addr)
region = self.session.regions[-1]
await region.connect(main_region=True)
def logout(self):
if not self.session:
return
if self._resend_task:
self._resend_task.cancel()
self._resend_task = None
if self.main_circuit and self.main_circuit.is_alive:
# Don't need to send reliably, there's a good chance the server won't ACK anyway.
self.main_circuit.send(
Message(
"LogoutRequest",
Block("AgentData", AgentID=self.session.agent_id, SessionID=self.session.id),
)
)
session = self.session
self.session = None
for region in session.regions:
region.disconnect()
session.transport.close()
def send_chat(self, message: Union[bytes, str], channel: int = 0, chat_type=ChatType.NORMAL) -> asyncio.Future:
return self.main_circuit.send_reliable(Message(
"ChatFromViewer",
Block("AgentData", SessionID=self.session.id, AgentID=self.session.agent_id),
Block("ChatData", Message=message, Channel=channel, Type=chat_type),
))
def teleport(self, region_handle: int, local_pos=Vector3(0, 0, 0)) -> asyncio.Future:
"""Synchronously requests a teleport, returning a Future for teleport completion"""
teleport_fut = asyncio.Future()
# Send request synchronously, await asynchronously.
send_fut = self.main_circuit.send_reliable(
Message(
'TeleportLocationRequest',
Block('AgentData', AgentID=self.session.agent_id, SessionID=self.session.id),
Block('Info', RegionHandle=region_handle, Position=local_pos, fill_missing=True),
)
)
async def _handle_teleport():
# Subscribe first, we may receive an event before we receive the packet ACK.
with self.session.message_handler.subscribe_async(
("TeleportLocal", "TeleportFailed", "TeleportFinish"),
) as get_tp_done_msg:
try:
await send_fut
except Exception as e:
# Pass along error if we failed to send reliably.
teleport_fut.set_exception(e)
return
# Wait for a message that says we're done the teleport
msg = await get_tp_done_msg()
if msg.name == "TeleportFailed":
teleport_fut.set_exception(RuntimeError("Failed to teleport"))
elif msg.name == "TeleportLocal":
# Within the sim, nothing else we need to do
teleport_fut.set_result(None)
elif msg.name == "TeleportFinish":
# Non-local TP, wait until we receive the AgentMovementComplete to
# set the finished signal.
# Region should be registered by this point, wait for it to connect
try:
# just fail if it takes longer than 30 seconds for the handshake to complete
await asyncio.wait_for(self.session.region_by_handle(region_handle).connected, 30)
except Exception as e:
teleport_fut.set_exception(e)
return
teleport_fut.set_result(None)
asyncio.create_task(_handle_teleport())
return teleport_fut
async def _attempt_resends(self):
while True:
if self.session is None:
break
for region in self.session.regions:
if not region.circuit.is_alive:
continue
region.circuit.resend_unacked()
await asyncio.sleep(0.5)

View File

@@ -34,7 +34,7 @@ class InventoryManager:
# completion from the inventory cache. This matches indra's behavior.
version=InventoryCategory.VERSION_NONE,
type="category",
pref_type=skel_cat.get("type_default", -1),
pref_type=skel_cat.get("type_default", "-1"),
owner_id=self._session.agent_id,
))
@@ -99,7 +99,7 @@ class InventoryManager:
if first_line:
# First line is the file header
first_line = False
if node_llsd['inv_cache_version'] != 2:
if node_llsd['inv_cache_version'] not in (2, 3):
raise ValueError(f"Unknown cache version: {node_llsd!r}")
continue

View File

@@ -27,9 +27,11 @@ from hippolyzer.lib.base.objects import (
)
from hippolyzer.lib.base.settings import Settings
from hippolyzer.lib.client.namecache import NameCache, NameCacheEntry
from hippolyzer.lib.client.state import BaseClientSession, BaseClientRegion
from hippolyzer.lib.base.templates import PCode, ObjectStateSerializer
if TYPE_CHECKING:
from hippolyzer.lib.client.state import BaseClientRegion, BaseClientSession
LOG = logging.getLogger(__name__)
OBJECT_OR_LOCAL = Union[Object, int]
@@ -48,7 +50,7 @@ class ClientObjectManager:
Object manager for a specific region
"""
__slots__ = ("_region", "_world_objects", "state")
__slots__ = ("_region", "_world_objects", "state", "__weakref__")
def __init__(self, region: BaseClientRegion):
self._region: BaseClientRegion = proxify(region)
@@ -71,7 +73,7 @@ class ClientObjectManager:
if self._region.handle is not None:
# We're tracked by the world object manager, tell it to untrack
# any objects that we owned
self._world_objects.clear_region_objects(self._region.handle)
self._world_objects.untrack_region_objects(self._region.handle)
def lookup_localid(self, localid: int) -> Optional[Object]:
return self.state.lookup_localid(localid)
@@ -238,12 +240,14 @@ class ClientWorldObjectManager:
if self._get_region_manager(handle) is None:
self._region_managers[handle] = proxify(self._session.region_by_handle(handle).objects)
def clear_region_objects(self, handle: int):
def untrack_region_objects(self, handle: int):
"""Handle signal that a region object manager was just cleared"""
# Make sure they're gone from our lookup table
for obj in tuple(self._fullid_lookup.values()):
if obj.RegionHandle == handle:
del self._fullid_lookup[obj.FullID]
if handle in self._region_managers:
del self._region_managers[handle]
self._rebuild_avatar_objects()
def _get_region_manager(self, handle: int) -> Optional[ClientObjectManager]:
@@ -288,16 +292,17 @@ class ClientWorldObjectManager:
obj = obj.Parent
def clear(self):
for handle in tuple(self._region_managers.keys()):
self.untrack_region_objects(handle)
self._avatars.clear()
for region_mgr in self._region_managers.values():
region_mgr.clear()
if self._fullid_lookup:
LOG.warning(f"Had {len(self._fullid_lookup)} objects not tied to a region manager!")
self._fullid_lookup.clear()
self._rebuild_avatar_objects()
self._region_managers.clear()
def _update_existing_object(self, obj: Object, new_properties: dict, update_type: ObjectUpdateType):
def _update_existing_object(self, obj: Object, new_properties: dict, update_type: ObjectUpdateType,
msg: Optional[Message]):
old_parent_id = obj.ParentID
new_parent_id = new_properties.get("ParentID", obj.ParentID)
old_local_id = obj.LocalID
@@ -340,23 +345,23 @@ class ClientWorldObjectManager:
LOG.warning(f"Tried to move object {obj!r} to unknown region {new_region_handle}")
if obj.PCode == PCode.AVATAR:
# `Avatar` instances are handled separately. Update all Avatar objects so
# we can deal with the RegionHandle change.
# `Avatar` instances are handled separately. Update all Avatar objects,
# so we can deal with the RegionHandle change.
self._rebuild_avatar_objects()
elif new_parent_id != old_parent_id:
# Parent ID changed, but we're in the same region
new_region_state.handle_object_reparented(obj, old_parent_id=old_parent_id)
if actually_updated_props and new_region_state is not None:
self._run_object_update_hooks(obj, actually_updated_props, update_type)
self._run_object_update_hooks(obj, actually_updated_props, update_type, msg)
def _track_new_object(self, region: RegionObjectsState, obj: Object):
def _track_new_object(self, region: RegionObjectsState, obj: Object, msg: Message):
region.track_object(obj)
self._fullid_lookup[obj.FullID] = obj
if obj.PCode == PCode.AVATAR:
self._avatar_objects[obj.FullID] = obj
self._rebuild_avatar_objects()
self._run_object_update_hooks(obj, set(obj.to_dict().keys()), ObjectUpdateType.OBJECT_UPDATE)
self._run_object_update_hooks(obj, set(obj.to_dict().keys()), ObjectUpdateType.OBJECT_UPDATE, msg)
def _kill_object_by_local_id(self, region_state: RegionObjectsState, local_id: int):
obj = region_state.lookup_localid(local_id)
@@ -408,11 +413,11 @@ class ClientWorldObjectManager:
# our view of the world then we want to move it to this region.
obj = self.lookup_fullid(object_data["FullID"])
if obj:
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE)
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE, msg)
else:
if region_state is None:
continue
self._track_new_object(region_state, Object(**object_data))
self._track_new_object(region_state, Object(**object_data), msg)
msg.meta["ObjectUpdateIDs"] = tuple(seen_locals)
def _handle_terse_object_update(self, msg: Message):
@@ -432,7 +437,7 @@ class ClientWorldObjectManager:
# Need the Object as context because decoding state requires PCode.
state_deserializer = ObjectStateSerializer.deserialize
object_data["State"] = state_deserializer(ctx_obj=obj, val=object_data["State"])
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE)
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE, msg)
else:
if region_state:
region_state.missing_locals.add(object_data["LocalID"])
@@ -460,7 +465,7 @@ class ClientWorldObjectManager:
self._update_existing_object(obj, {
"UpdateFlags": update_flags,
"RegionHandle": handle,
}, ObjectUpdateType.OBJECT_UPDATE)
}, ObjectUpdateType.OBJECT_UPDATE, msg)
continue
cached_obj_data = self._lookup_cache_entry(handle, block["ID"], block["CRC"])
@@ -468,7 +473,7 @@ class ClientWorldObjectManager:
cached_obj = normalize_object_update_compressed_data(cached_obj_data)
cached_obj["UpdateFlags"] = update_flags
cached_obj["RegionHandle"] = handle
self._track_new_object(region_state, Object(**cached_obj))
self._track_new_object(region_state, Object(**cached_obj), msg)
continue
# Don't know about it and wasn't cached.
@@ -499,11 +504,11 @@ class ClientWorldObjectManager:
LOG.warning(f"Got ObjectUpdateCompressed for unknown region {handle}: {object_data!r}")
obj = self.lookup_fullid(object_data["FullID"])
if obj:
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE)
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE, msg)
else:
if region_state is None:
continue
self._track_new_object(region_state, Object(**object_data))
self._track_new_object(region_state, Object(**object_data), msg)
msg.meta["ObjectUpdateIDs"] = tuple(seen_locals)
def _handle_object_properties_generic(self, packet: Message):
@@ -516,7 +521,7 @@ class ClientWorldObjectManager:
obj = self.lookup_fullid(block["ObjectID"])
if obj:
seen_locals.append(obj.LocalID)
self._update_existing_object(obj, object_properties, ObjectUpdateType.PROPERTIES)
self._update_existing_object(obj, object_properties, ObjectUpdateType.PROPERTIES, packet)
else:
LOG.debug(f"Received {packet.name} for unknown {block['ObjectID']}")
packet.meta["ObjectUpdateIDs"] = tuple(seen_locals)
@@ -563,11 +568,16 @@ class ClientWorldObjectManager:
LOG.debug(f"Received ObjectCost for unknown {object_id}")
continue
obj.ObjectCosts.update(object_costs)
self._run_object_update_hooks(obj, {"ObjectCosts"}, ObjectUpdateType.COSTS)
self._run_object_update_hooks(obj, {"ObjectCosts"}, ObjectUpdateType.COSTS, None)
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: ObjectUpdateType):
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: ObjectUpdateType,
msg: Optional[Message]):
region_state = self._get_region_state(obj.RegionHandle)
region_state.resolve_futures(obj, update_type)
if region_state:
region_state.resolve_futures(obj, update_type)
else:
LOG.warning(f"{obj} not tied to a region state")
if obj.PCode == PCode.AVATAR and "NameValue" in updated_props:
if obj.NameValue:
self.name_cache.update(obj.FullID, obj.NameValue.to_dict())
@@ -756,7 +766,8 @@ class RegionObjectsState:
def handle_object_reparented(self, obj: Object, old_parent_id: int):
"""Recreate any links to ancestor Objects for obj due to parent changes"""
self._unparent_object(obj, old_parent_id)
self._parent_object(obj, insert_at_head=True)
# Avatars get sent to the _end_ of the child list when reparented
self._parent_object(obj, insert_at_head=obj.PCode != PCode.AVATAR)
def collect_orphans(self, parent_localid: int) -> Sequence[int]:
"""Take ownership of any orphan IDs belonging to parent_localid"""

View File

@@ -0,0 +1,51 @@
from typing import NamedTuple, List, Sequence
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.templates import ChatType
class RLVCommand(NamedTuple):
behaviour: str
param: str
options: List[str]
class RLVParser:
@staticmethod
def is_rlv_message(msg: Message) -> bool:
chat: str = msg["ChatData"]["Message"]
chat_type: int = msg["ChatData"]["ChatType"]
return chat and chat.startswith("@") and chat_type == ChatType.OWNER
@staticmethod
def parse_chat(chat: str) -> List[RLVCommand]:
assert chat.startswith("@")
chat = chat.lstrip("@")
commands = []
for command_str in chat.split(","):
if not command_str:
continue
# RLV-style command, `<cmd>(:<option1>;<option2>)?(=<param>)?`
# Roughly (?<behaviour>[^:=]+)(:(?<option>[^=]*))?=(?<param>\w+)
options, _, param = command_str.partition("=")
behaviour, _, options = options.partition(":")
# TODO: Not always correct, commands can specify their own parsing for the option field
# maybe special-case these?
options = options.split(";") if options else []
commands.append(RLVCommand(behaviour, param, options))
return commands
@staticmethod
def format_chat(commands: Sequence[RLVCommand]) -> str:
assert commands
chat = ""
for command in commands:
if chat:
chat += ","
chat += command.behaviour
if command.options:
chat += ":" + ";".join(command.options)
if command.param:
chat += "=" + command.param
return "@" + chat

View File

@@ -4,17 +4,21 @@ Base classes for common session-related state shared between clients and proxies
from __future__ import annotations
import abc
import logging
import weakref
from typing import *
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.circuit import ConnectionHolder
import multidict
from hippolyzer.lib.base.datatypes import UUID, Vector3
from hippolyzer.lib.base.message.circuit import ConnectionHolder, Circuit
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.network.caps_client import CapsClient
from hippolyzer.lib.base.network.transport import ADDR_TUPLE
from hippolyzer.lib.base.objects import handle_to_global_pos
if TYPE_CHECKING:
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
class BaseClientRegion(ConnectionHolder, abc.ABC):
@@ -24,6 +28,53 @@ class BaseClientRegion(ConnectionHolder, abc.ABC):
session: Callable[[], BaseClientSession]
objects: ClientObjectManager
caps_client: CapsClient
cap_urls: multidict.MultiDict[str]
circuit_addr: ADDR_TUPLE
circuit: Optional[Circuit]
_name: Optional[str]
def __init__(self):
self._name = None
self.circuit = None
@abc.abstractmethod
def update_caps(self, caps: Mapping[str, str]) -> None:
pass
@property
def name(self):
if self._name:
return self._name
return "Pending %r" % (self.circuit_addr,)
@name.setter
def name(self, val):
self._name = val
@property
def global_pos(self) -> Vector3:
if self.handle is None:
raise ValueError("Can't determine global region position without handle")
return handle_to_global_pos(self.handle)
@property
def is_alive(self):
if not self.circuit:
return False
return self.circuit.is_alive
def mark_dead(self):
logging.info("Marking %r dead" % self)
if self.circuit:
self.circuit.is_alive = False
self.objects.clear()
def __repr__(self):
return "<%s %s (%r)>" % (self.__class__.__name__, self.name, self.handle)
class BaseClientSessionManager:
pass
class BaseClientSession(abc.ABC):
@@ -32,8 +83,99 @@ class BaseClientSession(abc.ABC):
agent_id: UUID
secure_session_id: UUID
message_handler: MessageHandler[Message, str]
regions: Sequence[BaseClientRegion]
regions: MutableSequence[BaseClientRegion]
region_by_handle: Callable[[int], Optional[BaseClientRegion]]
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[BaseClientRegion]]
objects: ClientWorldObjectManager
login_data: Dict[str, Any]
REGION_CLS = Type[BaseClientRegion]
def __init__(self, id, secure_session_id, agent_id, circuit_code,
session_manager: Optional[BaseClientSessionManager], login_data=None):
self.login_data = login_data or {}
self.pending = True
self.id: UUID = id
self.secure_session_id: UUID = secure_session_id
self.agent_id: UUID = agent_id
self.circuit_code = circuit_code
self.global_caps = {}
self.session_manager = session_manager
self.regions = []
self._main_region = None
self.message_handler: MessageHandler[Message, str] = MessageHandler()
super().__init__()
@classmethod
def from_login_data(cls, login_data, session_manager):
sess = cls(
id=UUID(login_data["session_id"]),
secure_session_id=UUID(login_data["secure_session_id"]),
agent_id=UUID(login_data["agent_id"]),
circuit_code=int(login_data["circuit_code"]),
session_manager=session_manager,
login_data=login_data,
)
appearance_service = login_data.get("agent_appearance_service")
map_image_service = login_data.get("map-server-url")
if appearance_service:
sess.global_caps["AppearanceService"] = appearance_service
if map_image_service:
sess.global_caps["MapImageService"] = map_image_service
# Login data also has details about the initial sim
sess.register_region(
circuit_addr=(login_data["sim_ip"], login_data["sim_port"]),
handle=(login_data["region_x"] << 32) | login_data["region_y"],
seed_url=login_data["seed_capability"],
)
return sess
def register_region(self, circuit_addr: Optional[ADDR_TUPLE] = None, seed_url: Optional[str] = None,
handle: Optional[int] = None) -> BaseClientRegion:
if not any((circuit_addr, seed_url)):
raise ValueError("One of circuit_addr and seed_url must be defined!")
for region in self.regions:
if region.circuit_addr == circuit_addr:
if seed_url and region.cap_urls.get("Seed") != seed_url:
region.update_caps({"Seed": seed_url})
if handle:
region.handle = handle
return region
if seed_url and region.cap_urls.get("Seed") == seed_url:
return region
if not circuit_addr:
raise ValueError("Can't create region without circuit addr!")
logging.info("Registering region for %r" % (circuit_addr,))
region = self.REGION_CLS(circuit_addr, seed_url, self, handle=handle)
self.regions.append(region)
return region
@property
def main_region(self) -> Optional[BaseClientRegion]:
if self._main_region and self._main_region() in self.regions:
return self._main_region()
return None
@main_region.setter
def main_region(self, val: BaseClientRegion):
self._main_region = weakref.ref(val)
def transaction_to_assetid(self, transaction_id: UUID):
return UUID.combine(transaction_id, self.secure_session_id)
def region_by_circuit_addr(self, circuit_addr) -> Optional[BaseClientRegion]:
for region in self.regions:
if region.circuit_addr == circuit_addr and region.circuit:
return region
return None
def region_by_handle(self, handle: int) -> Optional[BaseClientRegion]:
for region in self.regions:
if region.handle == handle:
return region
return None
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.id)

View File

@@ -1,7 +1,6 @@
from __future__ import annotations
from typing import *
import abc
import copy
import dataclasses
@@ -9,6 +8,8 @@ import multiprocessing
import pickle
import warnings
import outleap
from hippolyzer.lib.base.datatypes import UUID, Vector3
from hippolyzer.lib.base.message.message import Block, Message
from hippolyzer.lib.base.objects import Object
@@ -116,16 +117,14 @@ class MetaBaseAddon(abc.ABCMeta):
Won't work as you expect!
"""
def __setattr__(self, key: str, value):
# TODO: Keep track of AddonProperties in __new__ or something?
try:
existing = object.__getattribute__(self, key)
if existing and isinstance(existing, BaseAddonProperty):
existing.__set__(self, value)
return
except AttributeError:
# If the attribute doesn't exist then it's fine to use the base setattr.
super().__setattr__(key, value)
return
if existing and isinstance(existing, BaseAddonProperty):
existing.__set__(self, value)
return
pass
super().__setattr__(key, value)
@@ -173,7 +172,7 @@ class BaseAddon(metaclass=MetaBaseAddon):
pass
def handle_object_updated(self, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str]):
obj: Object, updated_props: Set[str], msg: Optional[Message]):
pass
def handle_object_killed(self, session: Session, region: ProxiedRegion, obj: Object):
@@ -189,13 +188,16 @@ class BaseAddon(metaclass=MetaBaseAddon):
pass
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
cmd: str, options: List[str], param: str):
behaviour: str, options: List[str], param: str):
pass
def handle_proxied_packet(self, session_manager: SessionManager, packet: UDPPacket,
session: Optional[Session], region: Optional[ProxiedRegion]):
pass
async def handle_leap_client_added(self, session_manager: SessionManager, leap_client: outleap.LEAPClient):
pass
_T = TypeVar("_T")
_U = TypeVar("_U", "Session", "SessionManager")
@@ -209,13 +211,17 @@ class BaseAddonProperty(abc.ABC, Generic[_T, _U]):
session_manager.addon_ctx dict, without any namespacing. Can be accessed either
through `AddonClass.property_name` or `addon_instance.property_name`.
"""
__slots__ = ("name", "default")
__slots__ = ("name", "default", "_owner")
def __init__(self, default=dataclasses.MISSING):
self.default = default
self._owner = None
def __set_name__(self, owner, name: str):
self.name = name
# Keep track of which addon "owns" this property so that we can shove
# the data in a bucket specific to that addon name.
self._owner = owner
def _make_default(self) -> _T:
if self.default is not dataclasses.MISSING:
@@ -233,18 +239,20 @@ class BaseAddonProperty(abc.ABC, Generic[_T, _U]):
if ctx_obj is None:
raise AttributeError(
f"{self.__class__} {self.name} accessed outside proper context")
addon_state = ctx_obj.addon_ctx[self._owner.__name__]
# Set a default if we have one, otherwise let the keyerror happen.
# Maybe we should do this at addon initialization instead of on get.
if self.name not in ctx_obj.addon_ctx:
if self.name not in addon_state:
default = self._make_default()
if default is not dataclasses.MISSING:
ctx_obj.addon_ctx[self.name] = default
addon_state[self.name] = default
else:
raise AttributeError(f"{self.name} is not set")
return ctx_obj.addon_ctx[self.name]
return addon_state[self.name]
def __set__(self, _obj, value: _T) -> None:
self._get_context_obj().addon_ctx[self.name] = value
addon_state = self._get_context_obj().addon_ctx[self._owner.__name__]
addon_state[self.name] = value
class SessionProperty(BaseAddonProperty[_T, "Session"]):

View File

@@ -15,10 +15,13 @@ import time
from types import ModuleType
from typing import *
import outleap
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.helpers import get_mtime
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.network.transport import UDPPacket
from hippolyzer.lib.client.rlv import RLVParser
from hippolyzer.lib.proxy import addon_ctx
from hippolyzer.lib.proxy.task_scheduler import TaskLifeScope, TaskScheduler
@@ -172,7 +175,10 @@ class AddonManager:
def load_addon_from_path(cls, path, reload=False, raise_exceptions=True):
path = pathlib.Path(path).absolute()
mod_name = "hippolyzer.user_addon_%s" % path.stem
cls.BASE_ADDON_SPECS.append(importlib.util.spec_from_file_location(mod_name, path))
spec = importlib.util.spec_from_file_location(mod_name, path)
if not spec:
raise ValueError(f"Unable to load {path}")
cls.BASE_ADDON_SPECS.append(spec)
addon_dir = os.path.realpath(pathlib.Path(path).parent.absolute())
if addon_dir not in sys.path:
@@ -199,9 +205,9 @@ class AddonManager:
@classmethod
def _check_hotreloads(cls):
"""Mark addons that rely on changed files for reloading"""
for filename, importers in cls.HOTRELOAD_IMPORTERS.items():
mtime = get_mtime(filename)
if not mtime or mtime == cls.FILE_MTIMES.get(filename, None):
for file_path, importers in cls.HOTRELOAD_IMPORTERS.items():
mtime = get_mtime(file_path)
if not mtime or mtime == cls.FILE_MTIMES.get(file_path, None):
continue
# Mark anything that imported this as dirty too, handling circular
@@ -220,10 +226,15 @@ class AddonManager:
_dirty_importers(importers)
if file_path not in cls.BASE_ADDON_SPECS:
# Make sure we won't reload importers in a loop if this is actually something
# that was dynamically imported, where `hot_reload()` might not be called again!
cls.FILE_MTIMES[file_path] = mtime
@classmethod
def hot_reload(cls, mod: Any, require_addons_loaded=False):
# Solely to trick the type checker because ModuleType doesn't apply where it should
# and Protocols aren't well supported yet.
# and Protocols aren't well-supported yet.
imported_mod: ModuleType = mod
imported_file = imported_mod.__file__
# Mark the caller as having imported (and being dependent on) `module`
@@ -338,11 +349,11 @@ class AddonManager:
cls.SCHEDULER.kill_matching_tasks(lifetime_mask=TaskLifeScope.ADDON, creator=addon)
@classmethod
def _call_all_addon_hooks(cls, hook_name, *args, **kwargs):
def _call_all_addon_hooks(cls, hook_name, *args, call_async=False, **kwargs) -> Optional[bool]:
for module in cls.FRESH_ADDON_MODULES.values():
if not module:
continue
ret = cls._call_module_hooks(module, hook_name, *args, **kwargs)
ret = cls._call_module_hooks(module, hook_name, *args, call_async=call_async, **kwargs)
if ret:
return ret
@@ -373,15 +384,15 @@ class AddonManager:
return commands
@classmethod
def _call_module_hooks(cls, module, hook_name, *args, **kwargs):
def _call_module_hooks(cls, module, hook_name, *args, call_async=False, **kwargs):
for addon in cls._get_module_addons(module):
ret = cls._try_call_hook(addon, hook_name, *args, **kwargs)
ret = cls._try_call_hook(addon, hook_name, *args, call_async=call_async, **kwargs)
if ret:
return ret
return cls._try_call_hook(module, hook_name, *args, **kwargs)
return cls._try_call_hook(module, hook_name, *args, call_async=call_async, **kwargs)
@classmethod
def _try_call_hook(cls, addon, hook_name, *args, **kwargs):
def _try_call_hook(cls, addon, hook_name, *args, call_async=False, **kwargs) -> Optional[bool]:
if cls._SUBPROCESS:
return
@@ -391,6 +402,20 @@ class AddonManager:
if not hook_func:
return
try:
if call_async:
old_hook_func = hook_func
# Wrapper so we can invoke an async hook synchronously.
def _wrapper(*w_args, **w_kwargs):
cls.SCHEDULER.schedule_task(
old_hook_func(*w_args, **w_kwargs),
scope=TaskLifeScope.ADDON,
creator=addon,
)
# Fall through to any other handlers as well,
# async handlers don't chain.
return None
hook_func = _wrapper
return hook_func(*args, **kwargs)
except:
logging.exception("Exploded in %r's %s hook" % (addon, hook_name))
@@ -428,32 +453,30 @@ class AddonManager:
raise
return True
if message.name == "ChatFromSimulator" and "ChatData" in message:
chat: str = message["ChatData"]["Message"]
chat_type: int = message["ChatData"]["ChatType"]
# RLV-style OwnerSay?
if chat and chat.startswith("@") and chat_type == 8:
if RLVParser.is_rlv_message(message):
# RLV allows putting multiple commands into one message, blindly splitting on ",".
chat = chat.lstrip("@")
all_cmds_handled = True
for command_str in chat.split(","):
if not command_str:
continue
# RLV-style command, `@<cmd>(:<option1>;<option2>)?(=<param>)?`
options, _, param = command_str.partition("=")
cmd, _, options = options.partition(":")
# TODO: Not always correct, commands can specify their own parsing for the option field
options = options.split(";") if options else []
source = message["ChatData"]["SourceID"]
chat: str = message["ChatData"]["Message"]
source = message["ChatData"]["SourceID"]
for command in RLVParser.parse_chat(chat):
try:
with addon_ctx.push(session, region):
handled = cls._call_all_addon_hooks("handle_rlv_command",
session, region, source, cmd, options, param)
handled = cls._call_all_addon_hooks(
"handle_rlv_command",
session,
region,
source,
command.behaviour,
command.options,
command.param,
)
if handled:
region.circuit.drop_message(message)
else:
all_cmds_handled = False
except:
LOG.exception(f"Failed while handling command {command_str!r}")
LOG.exception(f"Failed while handling command {command!r}")
all_cmds_handled = False
if not cls._SWALLOW_ADDON_EXCEPTIONS:
raise
@@ -540,9 +563,9 @@ class AddonManager:
@classmethod
def handle_object_updated(cls, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str]):
obj: Object, updated_props: Set[str], msg: Optional[Message]):
with addon_ctx.push(session, region):
return cls._call_all_addon_hooks("handle_object_updated", session, region, obj, updated_props)
return cls._call_all_addon_hooks("handle_object_updated", session, region, obj, updated_props, msg)
@classmethod
def handle_object_killed(cls, session: Session, region: ProxiedRegion, obj: Object):
@@ -572,3 +595,7 @@ class AddonManager:
with addon_ctx.push(session, region):
return cls._call_all_addon_hooks("handle_proxied_packet", session_manager,
packet, session, region)
@classmethod
def handle_leap_client_added(cls, session_manager: SessionManager, leap_client: outleap.LEAPClient):
return cls._call_all_addon_hooks("handle_leap_client_added", session_manager, leap_client, call_async=True)

View File

@@ -42,7 +42,7 @@ class MITMProxyEventManager:
"UpdateNotecardAgentInventory", "UpdateNotecardTaskInventory",
"UpdateScriptAgent", "UpdateScriptTask",
"UpdateSettingsAgentInventory", "UpdateSettingsTaskInventory",
"UploadBakedTexture",
"UploadBakedTexture", "UploadAgentProfileImage",
}
def __init__(self, session_manager: SessionManager, flow_context: HTTPFlowContext):
@@ -224,6 +224,11 @@ class MITMProxyEventManager:
status = flow.response.status_code
cap_data: Optional[CapData] = flow.metadata["cap_data"]
if not cap_data:
# Make sure there's always cap data attached to the flow, even if it's
# empty. Some consumers expect it to always be there, when it might not
# be if the proxy barfed while handling the request.
cap_data = flow.metadata["cap_data"] = CapData()
if status == 200 and cap_data and cap_data.cap_name == "FirestormBridge":
# Fake FirestormBridge cap based on a bridge-like response coming from

View File

@@ -236,7 +236,7 @@ class SLMITMMaster(mitmproxy.master.Master):
)
def create_proxy_master(host, port, flow_context: HTTPFlowContext): # pragma: no cover
def create_http_proxy(host, port, flow_context: HTTPFlowContext, ssl_insecure=False): # pragma: no cover
opts = mitmproxy.options.Options()
master = SLMITMMaster(flow_context, opts)
@@ -251,10 +251,6 @@ def create_proxy_master(host, port, flow_context: HTTPFlowContext): # pragma: n
ssl_verify_upstream_trusted_ca=ca_bundle,
listen_host=host,
listen_port=port,
ssl_insecure=ssl_insecure,
)
return master
def create_http_proxy(bind_host, port, flow_context: HTTPFlowContext): # pragma: no cover
master = create_proxy_master(bind_host, port, flow_context)
return master

View File

@@ -1,4 +1,5 @@
import datetime as dt
import logging
from hippolyzer.lib.base.helpers import get_mtime
from hippolyzer.lib.client.inventory_manager import InventoryManager
@@ -25,4 +26,7 @@ class ProxyInventoryManager(InventoryManager):
newest_cache = inv_cache_path
if newest_cache:
self.load_cache(newest_cache)
try:
self.load_cache(newest_cache)
except:
logging.exception("Failed to load invcache")

View File

@@ -133,8 +133,9 @@ class ProxyWorldObjectManager(ClientWorldObjectManager):
region_mgr.queued_cache_misses |= missing_locals
region_mgr.request_missed_cached_objects_soon()
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: ObjectUpdateType):
super()._run_object_update_hooks(obj, updated_props, update_type)
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: ObjectUpdateType,
msg: Optional[Message]):
super()._run_object_update_hooks(obj, updated_props, update_type, msg)
region = self._session.region_by_handle(obj.RegionHandle)
if self._settings.ALLOW_AUTO_REQUEST_OBJECTS:
if obj.PCode == PCode.AVATAR and "ParentID" in updated_props:
@@ -145,7 +146,7 @@ class ProxyWorldObjectManager(ClientWorldObjectManager):
# have no way to get a sitting agent's true region location, even if it's ourselves.
region.objects.queued_cache_misses.add(obj.ParentID)
region.objects.request_missed_cached_objects_soon()
AddonManager.handle_object_updated(self._session, region, obj, updated_props)
AddonManager.handle_object_updated(self._session, region, obj, updated_props, msg)
def _run_kill_object_hooks(self, obj: Object):
super()._run_kill_object_hooks(obj)

View File

@@ -1,6 +1,5 @@
from __future__ import annotations
import logging
import hashlib
import uuid
import weakref
@@ -9,12 +8,11 @@ import urllib.parse
import multidict
from hippolyzer.lib.base.datatypes import Vector3, UUID
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.helpers import proxify
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.objects import handle_to_global_pos
from hippolyzer.lib.client.state import BaseClientRegion
from hippolyzer.lib.proxy.caps_client import ProxyCapsClient
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
@@ -44,14 +42,15 @@ class CapsMultiDict(multidict.MultiDict[Tuple[CapType, str]]):
class ProxiedRegion(BaseClientRegion):
circuit: Optional[ProxiedCircuit]
def __init__(self, circuit_addr, seed_cap: str, session: Session, handle=None):
super().__init__()
# A client may make a Seed request twice, and may get back two (valid!) sets of
# Cap URIs. We need to be able to look up both, so MultiDict is necessary.
self.handle: Optional[int] = handle
self._name: Optional[str] = None
# TODO: when does this change?
self.cache_id: Optional[UUID] = None
self.circuit: Optional[ProxiedCircuit] = None
self.circuit_addr = circuit_addr
self.caps = CapsMultiDict()
# Reverse lookup for URL -> cap data
@@ -71,31 +70,9 @@ class ProxiedRegion(BaseClientRegion):
self._recalc_caps()
@property
def name(self):
if self._name:
return self._name
return "Pending %r" % (self.circuit_addr,)
@name.setter
def name(self, val):
self._name = val
@property
def cap_urls(self) -> multidict.MultiDict[str, str]:
def cap_urls(self) -> multidict.MultiDict[str]:
return multidict.MultiDict((x, y[1]) for x, y in self.caps.items())
@property
def global_pos(self) -> Vector3:
if self.handle is None:
raise ValueError("Can't determine global region position without handle")
return handle_to_global_pos(self.handle)
@property
def is_alive(self):
if not self.circuit:
return False
return self.circuit.is_alive
def update_caps(self, caps: Mapping[str, str]):
for cap_name, cap_url in caps.items():
if isinstance(cap_url, str) and cap_url.startswith('http'):
@@ -158,15 +135,9 @@ class ProxiedRegion(BaseClientRegion):
return None
def mark_dead(self):
logging.info("Marking %r dead" % self)
if self.circuit:
self.circuit.is_alive = False
self.objects.clear()
super().mark_dead()
self.eq_manager.clear()
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.name)
class EventQueueManager:
def __init__(self, region: ProxiedRegion):
@@ -189,7 +160,7 @@ class EventQueueManager:
# over the EQ. That will allow us to shove our own event onto the response once it comes in,
# otherwise we have to wait until the EQ legitimately returns 200 due to a new event.
# May or may not work in OpenSim.
circuit.send_message(Message(
circuit.send(Message(
'PlacesQuery',
Block('AgentData', AgentID=session.agent_id, SessionID=session.id, QueryID=UUID()),
Block('TransactionData', TransactionID=UUID()),

View File

@@ -1,19 +1,21 @@
from __future__ import annotations
import collections
import dataclasses
import datetime
import functools
import logging
import multiprocessing
import weakref
from typing import *
from weakref import ref
from outleap import LEAPClient
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.helpers import proxify
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.client.state import BaseClientSession
from hippolyzer.lib.base.network.transport import ADDR_TUPLE
from hippolyzer.lib.client.state import BaseClientSession, BaseClientSessionManager
from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
@@ -31,28 +33,34 @@ if TYPE_CHECKING:
class Session(BaseClientSession):
def __init__(self, session_id, secure_session_id, agent_id, circuit_code,
regions: MutableSequence[ProxiedRegion]
region_by_handle: Callable[[int], Optional[ProxiedRegion]]
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[ProxiedRegion]]
main_region: Optional[ProxiedRegion]
REGION_CLS = ProxiedRegion
def __init__(self, id, secure_session_id, agent_id, circuit_code,
session_manager: Optional[SessionManager], login_data=None):
self.login_data = login_data or {}
self.pending = True
self.id: UUID = session_id
self.secure_session_id: UUID = secure_session_id
self.agent_id: UUID = agent_id
self.circuit_code = circuit_code
self.global_caps = {}
super().__init__(
id=id,
secure_session_id=secure_session_id,
agent_id=agent_id,
circuit_code=circuit_code,
session_manager=session_manager,
login_data=login_data,
)
# Bag of arbitrary data addons can use to persist data across addon reloads
self.addon_ctx = {}
self.session_manager: SessionManager = session_manager or None
# Each addon name gets its own separate dict within this dict
self.addon_ctx: Dict[str, Dict[str, Any]] = collections.defaultdict(dict)
self.session_manager: SessionManager = session_manager
self.selected: SelectionModel = SelectionModel()
self.regions: List[ProxiedRegion] = []
self.started_at = datetime.datetime.now()
self.message_handler: MessageHandler[Message, str] = MessageHandler()
self.http_message_handler: MessageHandler[HippoHTTPFlow, str] = MessageHandler()
self.objects = ProxyWorldObjectManager(self, session_manager.settings, session_manager.name_cache)
self.inventory = ProxyInventoryManager(proxify(self))
self.leap_client: Optional[LEAPClient] = None
# Base path of a newview type cache directory for this session
self.cache_dir: Optional[str] = None
self._main_region = None
@property
def global_addon_ctx(self):
@@ -60,77 +68,13 @@ class Session(BaseClientSession):
return {}
return self.session_manager.addon_ctx
@classmethod
def from_login_data(cls, login_data, session_manager):
sess = Session(
session_id=UUID(login_data["session_id"]),
secure_session_id=UUID(login_data["secure_session_id"]),
agent_id=UUID(login_data["agent_id"]),
circuit_code=int(login_data["circuit_code"]),
session_manager=session_manager,
login_data=login_data,
)
appearance_service = login_data.get("agent_appearance_service")
map_image_service = login_data.get("map-server-url")
if appearance_service:
sess.global_caps["AppearanceService"] = appearance_service
if map_image_service:
sess.global_caps["MapImageService"] = map_image_service
# Login data also has details about the initial sim
sess.register_region(
circuit_addr=(login_data["sim_ip"], login_data["sim_port"]),
handle=(login_data["region_x"] << 32) | login_data["region_y"],
seed_url=login_data["seed_capability"],
)
return sess
@property
def main_region(self) -> Optional[ProxiedRegion]:
if self._main_region and self._main_region() in self.regions:
return self._main_region()
return None
@main_region.setter
def main_region(self, val: ProxiedRegion):
self._main_region = weakref.ref(val)
def register_region(self, circuit_addr: Optional[Tuple[str, int]] = None,
def register_region(self, circuit_addr: Optional[ADDR_TUPLE] = None,
seed_url: Optional[str] = None,
handle: Optional[int] = None) -> ProxiedRegion:
if not any((circuit_addr, seed_url)):
raise ValueError("One of circuit_addr and seed_url must be defined!")
for region in self.regions:
if region.circuit_addr == circuit_addr:
if seed_url and region.cap_urls.get("Seed") != seed_url:
region.update_caps({"Seed": seed_url})
if handle:
region.handle = handle
return region
if seed_url and region.cap_urls.get("Seed") == seed_url:
return region
if not circuit_addr:
raise ValueError("Can't create region without circuit addr!")
logging.info("Registering region for %r" % (circuit_addr,))
region = ProxiedRegion(circuit_addr, seed_url, self, handle=handle)
self.regions.append(region)
region: ProxiedRegion = super().register_region(circuit_addr, seed_url, handle) # type: ignore
AddonManager.handle_region_registered(self, region)
return region
def region_by_circuit_addr(self, circuit_addr) -> Optional[ProxiedRegion]:
for region in self.regions:
if region.circuit_addr == circuit_addr and region.circuit:
return region
return None
def region_by_handle(self, handle: int) -> Optional[ProxiedRegion]:
for region in self.regions:
if region.handle == handle:
return region
return None
def open_circuit(self, near_addr, circuit_addr, transport):
for region in self.regions:
if region.circuit_addr == circuit_addr:
@@ -170,23 +114,19 @@ class Session(BaseClientSession):
return CapData(cap_name, ref(region), ref(self), base_url, cap_type)
return None
def transaction_to_assetid(self, transaction_id: UUID):
return UUID.combine(transaction_id, self.secure_session_id)
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.id)
class SessionManager:
class SessionManager(BaseClientSessionManager):
def __init__(self, settings: ProxySettings):
BaseClientSessionManager.__init__(self)
self.settings: ProxySettings = settings
self.sessions: List[Session] = []
self.shutdown_signal = multiprocessing.Event()
self.flow_context = HTTPFlowContext()
self.asset_repo = HTTPAssetRepo()
self.message_logger: Optional[BaseMessageLogger] = None
self.addon_ctx: Dict[str, Any] = {}
self.addon_ctx: Dict[str, Dict[str, Any]] = collections.defaultdict(dict)
self.name_cache = ProxyNameCache()
self.pending_leap_clients: List[LEAPClient] = []
def create_session(self, login_data) -> Session:
session = Session.from_login_data(login_data, self)
@@ -195,6 +135,15 @@ class SessionManager:
session.http_message_handler,
)
self.sessions.append(session)
# TODO: less crap way of tying a LEAP client to a session
while self.pending_leap_clients:
leap_client = self.pending_leap_clients.pop(-1)
# Client may have gone bad since it connected
if not leap_client.connected:
continue
logging.info("Assigned LEAP client to session")
session.leap_client = leap_client
break
logging.info("Created %r" % session)
return session
@@ -209,6 +158,8 @@ class SessionManager:
def close_session(self, session: Session):
logging.info("Closed %r" % session)
session.objects.clear()
if session.leap_client:
session.leap_client.disconnect()
self.sessions.remove(session)
def resolve_cap(self, url: str) -> Optional["CapData"]:
@@ -218,6 +169,10 @@ class SessionManager:
return cap_data
return CapData()
async def leap_client_connected(self, leap_client: LEAPClient):
self.pending_leap_clients.append(leap_client)
AddonManager.handle_leap_client_added(self, leap_client)
@dataclasses.dataclass
class SelectionModel:

View File

@@ -25,6 +25,7 @@ class EnvSettingDescriptor(SettingDescriptor):
class ProxySettings(Settings):
SOCKS_PROXY_PORT: int = EnvSettingDescriptor(9061, "HIPPO_UDP_PORT", int)
HTTP_PROXY_PORT: int = EnvSettingDescriptor(9062, "HIPPO_HTTP_PORT", int)
LEAP_PORT: int = EnvSettingDescriptor(9063, "HIPPO_LEAP_PORT", int)
PROXY_BIND_ADDR: str = EnvSettingDescriptor("127.0.0.1", "HIPPO_BIND_HOST", str)
REMOTELY_ACCESSIBLE: bool = SettingDescriptor(False)
USE_VIEWER_OBJECT_CACHE: bool = SettingDescriptor(False)
@@ -34,3 +35,4 @@ class ProxySettings(Settings):
AUTOMATICALLY_REQUEST_MISSING_OBJECTS: bool = SettingDescriptor(False)
ADDON_SCRIPTS: List[str] = SettingDescriptor(list)
FILTERS: Dict[str, str] = SettingDescriptor(dict)
SSL_INSECURE: bool = SettingDescriptor(False)

View File

@@ -83,7 +83,7 @@ class SOCKS5Server:
try:
# UDP Associate
if cmd == 3:
loop = asyncio.get_running_loop()
loop = asyncio.get_event_loop_policy().get_event_loop()
transport, protocol = await loop.create_datagram_endpoint(
self._udp_protocol_creator(writer.get_extra_info("peername")),
local_addr=('0.0.0.0', 0))

View File

@@ -65,7 +65,7 @@ class TaskScheduler:
task.cancel()
try:
event_loop = asyncio.get_running_loop()
event_loop = asyncio.get_event_loop_policy().get_event_loop()
await_all = asyncio.gather(*(task for task_data, task in self.tasks))
event_loop.run_until_complete(await_all)
except RuntimeError:

View File

@@ -108,4 +108,7 @@ CAP_TEMPLATES: List[CAPTemplate] = [
CAPTemplate(cap_name='ViewerBenefits', method='GET', body=b'', query=set(), path=''),
CAPTemplate(cap_name='SetDisplayName', method='POST', body=b'<?xml version="1.0" ?>\n<llsd>\n<map>\n <key>display_name</key>\n <array>\n <string>OLD_DISPLAY_NAME</string>\n <string>NEW_DISPLAY_NAME</string>\n </array>\n </map>\n</llsd>\n', query=set(), path=''),
CAPTemplate(cap_name='ObjectMediaNavigate', method='POST', body=b'<?xml version="1.0" ?>\n<llsd>\n<map>\n <key>current_url</key>\n <string></string>\n <key>object_id</key>\n <uuid><!HIPPOREPL[[SELECTED_FULL]]></uuid>\n <key>texture_index</key>\n <integer></integer>\n </map>\n</llsd>\n', query=set(), path=''),
CAPTemplate(cap_name='AgentProfile', method='GET', body=b'', query=set(), path='/<SOME_ID>'),
CAPTemplate(cap_name='InterestList', method='POST', body=b'<?xml version="1.0" ?>\n<llsd>\n<map>\n <key>mode</key>\n <string>360</string>\n </map>\n</llsd>', query=set(), path='/'),
CAPTemplate(cap_name='RegionObjects', method='GET', body=b'', query=set(), path=''),
]

View File

@@ -1,11 +1,11 @@
import asyncio
import unittest
from typing import Any, Optional, List, Tuple
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer
from hippolyzer.lib.base.network.transport import UDPPacket, AbstractUDPTransport, ADDR_TUPLE
from hippolyzer.lib.base.network.transport import UDPPacket
from hippolyzer.lib.base.test_utils import MockTransport
from hippolyzer.lib.proxy.lludp_proxy import InterceptingLLUDPProxyProtocol
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import SessionManager
@@ -63,21 +63,3 @@ class BaseProxyTest(unittest.IsolatedAsyncioTestCase):
def _msg_to_datagram(self, msg: Message, src, dst, socks_header=True):
packet = self._msg_to_packet(msg, src, dst)
return SOCKS5UDPTransport.serialize(packet, force_socks_header=socks_header)
class MockTransport(AbstractUDPTransport):
def sendto(self, data: Any, addr: Optional[ADDR_TUPLE] = ...) -> None:
pass
def abort(self) -> None:
pass
def close(self) -> None:
pass
def __init__(self):
super().__init__()
self.packets: List[Tuple[bytes, Tuple[str, int]]] = []
def send_packet(self, packet: UDPPacket) -> None:
self.packets.append((packet.data, packet.dst_addr))

View File

@@ -139,7 +139,7 @@ class ViewerObjectCache:
return RegionViewerObjectCache.from_file(objects_file)
class ViewerObjectCacheEntry(recordclass.datatuple): # type: ignore
class ViewerObjectCacheEntry(recordclass.dataobject): # type: ignore
local_id: int
crc: int
data: bytes

View File

View File

@@ -0,0 +1,484 @@
from __future__ import annotations
import asyncio
import base64
import json
import logging
import random
import subprocess
import tempfile
import urllib.parse
import uuid
from typing import Optional, Union, Any, Dict
from hippolyzer.lib.base.datatypes import Vector3
from hippolyzer.lib.base.events import Event
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.objects import handle_to_gridxy
from .connection import VivoxConnection, VivoxMessage
LOG = logging.getLogger(__name__)
RESP_LOG = logging.getLogger(__name__ + ".responses")
def launch_slvoice(voice_path, args, env=None):
return subprocess.Popen([voice_path] + args, env=env)
def uuid_to_vivox(val):
return (b"x" + base64.b64encode(uuid.UUID(val).bytes, b"-_")).decode("utf8")
def uuid_to_vivox_uri(val):
return "sip:%s@bhr.vivox.com" % uuid_to_vivox(val)
def vivox_to_uuid(val):
# Pull the base64-encoded UUID out of the URI
val = val.split(":")[-1].split("@")[0][1:]
return str(uuid.UUID(bytes=base64.b64decode(val, b"-_")))
class VoiceClient:
SERVER_URL = "https://www.bhr.vivox.com/api2/"
def __init__(self, host: str, port: int):
self._host = host
self._port = port
self.logged_in = asyncio.Event()
self.ready = asyncio.Event()
self.session_ready = asyncio.Event()
self.session_added = Event()
self.channel_info_updated = Event()
self.participant_added = Event()
self.participant_updated = Event()
self.participant_removed = Event()
self.capture_devices_received = Event()
self.render_devices_received = Event()
self.render_devices = {}
self.capture_devices = {}
self._pending_req_futures: dict[str, asyncio.Future] = {}
self._connector_handle: Optional[str] = None
self._session_handle: Optional[str] = None
self._session_group_handle: Optional[str] = None
self._account_handle: Optional[str] = None
self._account_uri: Optional[str] = None
self._username: Optional[str] = None
self._password: Optional[str] = None
self._display_name: Optional[str] = None
self._uri: Optional[str] = None
self._participants: Dict[str, dict] = {}
self._mic_muted = False
self._region_global_x = 0
self._region_global_y = 0
self._pos = Vector3(0, 0, 0)
self.vivox_conn: Optional[VivoxConnection] = None
self._poll_task = asyncio.create_task(self._poll_messages())
self.event_handler: MessageHandler[VivoxMessage, str] = MessageHandler(take_by_default=False)
self.event_handler.subscribe(
"VoiceServiceConnectionStateChangedEvent",
self._handle_voice_service_connection_state_changed
)
self.event_handler.subscribe("AccountLoginStateChangeEvent", self._handle_account_login_state_change)
self.event_handler.subscribe("SessionAddedEvent", self._handle_session_added)
self.event_handler.subscribe("SessionRemovedEvent", self._handle_session_removed)
self.event_handler.subscribe("ParticipantAddedEvent", self._handle_participant_added)
self.event_handler.subscribe("ParticipantUpdatedEvent", self._handle_participant_updated)
self.event_handler.subscribe("ParticipantRemovedEvent", self._handle_participant_removed)
@property
def username(self):
return self._username
@property
def password(self):
return self._password
@property
def display_name(self):
return self._display_name
@property
def global_pos(self):
return self._pos
@property
def region_pos(self):
return self._global_to_region(self.global_pos)
@property
def uri(self):
return self._uri
@property
def participants(self):
# TODO: wrap in something to make immutable
return self._participants
def close(self):
if self.vivox_conn is not None:
self.vivox_conn.close()
self._poll_task.cancel()
self._poll_task = None
async def aclose(self):
if self._account_handle:
await self.logout()
self.close()
@classmethod
async def simple_init(
cls,
voice_path: str,
host: Optional[str] = None,
port: Optional[int] = None,
env: Optional[dict] = None
):
"""Simple initializer for standing up a client"""
if not host:
host = "127.0.0.1"
if not port:
port = random.randrange(40000, 60000)
str_addr = "%s:%s" % (host, port)
launch_slvoice(voice_path, ["-i", str_addr, "-m", "component"], env=env)
# HACK: wait for the process to start listening
await asyncio.sleep(0.2)
client = cls(host, port)
await client.create_vivox_connection()
await client.ready.wait()
return client
async def create_vivox_connection(self):
reader, writer = await asyncio.open_connection(host=self._host, port=self._port)
self.vivox_conn = VivoxConnection(reader, writer)
async def create_connector(self):
# TODO: Move all this extra crap out of here
devices = (await self.send_message("Aux.GetCaptureDevices.1", {}))["Results"]
self.capture_devices_received.notify(devices)
self.capture_devices.clear()
self.capture_devices.update(devices)
devices = (await self.send_message("Aux.GetRenderDevices.1", {}))["Results"]
self.render_devices_received.notify(devices)
self.render_devices.clear()
self.render_devices.update(devices)
await self.set_speakers_muted(False)
await self.set_speaker_volume(62)
await self.set_mic_muted(True)
await self.set_mic_volume(50)
connector_resp = await self.send_message("Connector.Create.1", {
"ClientName": "V2 SDK",
"AccountManagementServer": self.SERVER_URL,
"Mode": "Normal",
"MinimumPort": 30000,
"MaximumPort": 50000,
"Logging": {
"Folder": tempfile.gettempdir(),
"FileNamePrefix": "VivConnector",
"FileNameSuffix": ".log",
"LogLevel": 1
},
"Application": "",
"MaxCalls": 12,
})
self._connector_handle = connector_resp['Results']['ConnectorHandle']
self.ready.set()
async def login(self, username: Union[uuid.UUID, str], password: str):
# UUID, convert to Vivox format
if isinstance(username, uuid.UUID) or len(username) == 36:
username = uuid_to_vivox(username)
self._username = username
self._password = password
if not self._connector_handle:
raise Exception("Need a connector handle to log in")
if self._account_handle:
await self.logout()
resp = await self.send_message("Account.Login.1", {
"ConnectorHandle": self._connector_handle,
"AccountName": username,
"AccountPassword": password,
"AudioSessionAnswerMode": "VerifyAnswer",
"EnableBuddiesAndPresence": "false",
"BuddyManagementMode": "Application",
"ParticipantPropertyFrequency": 5,
})
if resp["ReturnCode"] != 0:
raise Exception(resp)
self._display_name = urllib.parse.unquote(resp["Results"]["DisplayName"])
self._account_uri = resp["Results"]["Uri"]
await self.logged_in.wait()
return resp
async def logout(self):
if self._session_handle:
await self.leave_session()
if self._account_handle:
await self.send_message("Account.Logout.1", {
"AccountHandle": self._account_handle,
})
self._account_handle = None
self._account_uri = None
self.logged_in.clear()
async def join_session(self, uri: str, region_handle: Optional[int] = None):
if self._session_handle:
await self.leave_session()
self.set_ref_region(region_handle)
self._uri = uri
await self.send_message("Session.Create.1", {
"AccountHandle": self._account_handle,
"URI": uri,
"ConnectAudio": "true",
"ConnectText": "false",
"VoiceFontID": 0,
"Name": ""
})
# wait until we're actually added
await self.session_ready.wait()
async def leave_session(self):
await self.send_message("SessionGroup.Terminate.1", {
"SessionGroupHandle": self._session_group_handle,
})
self.session_ready.clear()
# TODO: refactor into a collection
for participant in self._participants.values():
self.participant_removed.notify(participant)
self._participants.clear()
self._session_handle = None
self._session_group_handle = None
self._region_global_x = 0
self._region_global_y = 0
self._uri = None
def set_3d_pos(self, pos: Vector3, vel: Vector3 = Vector3(0, 0, 0)) -> asyncio.Future:
"""Set global 3D position, in Vivox coordinates"""
self._pos = pos
future = self.send_message("Session.Set3DPosition.1", {
"SessionHandle": self._session_handle,
"SpeakerPosition": self._build_position_dict(pos),
"ListenerPosition": self._build_position_dict(pos, vel=vel),
})
self._channel_info_updated()
return future
def set_region_3d_pos(self, pos: Vector3, vel: Vector3 = Vector3(0, 0, 0)) -> asyncio.Future:
"""Set 3D position, in region-local coordinates"""
vel = Vector3(vel[0], vel[2], -vel[1])
return self.set_3d_pos(self._region_to_global(pos), vel=vel)
def set_speakers_muted(self, val: bool):
return self.send_message("Connector.MuteLocalSpeaker.1", {
"Value": json.dumps(val),
"ConnectorHandle": self._connector_handle
})
def set_mic_muted(self, val: bool):
self._mic_muted = val
return self.send_message("Connector.MuteLocalMic.1", {
"Value": json.dumps(val),
"ConnectorHandle": self._connector_handle
})
def set_mic_volume(self, vol: int):
return self.send_message("Connector.SetLocalMicVolume.1", {
"Value": vol,
"ConnectorHandle": self._connector_handle
})
def set_speaker_volume(self, vol: int):
return self.send_message("Connector.SetLocalSpeakerVolume.1", {
"Value": vol,
"ConnectorHandle": self._connector_handle
})
def set_capture_device(self, device: str):
return self.send_message("Aux.SetCaptureDevice.1", {
"CaptureDeviceSpecifier": device,
})
def set_participant_volume(self, participant: str, vol: int):
return self.send_message("Session.SetParticipantVolumeForMe.1", {
"SessionHandle": self._session_handle,
"ParticipantURI": participant,
"Volume": vol,
})
async def get_channel_info(self, uri: str) -> dict:
return await self.send_message("Account.ChannelGetInfo.1", {
"AccountHandle": self._account_handle,
"URI": uri
})
def send_web_call(self, rel_path: str, params: dict) -> asyncio.Future[dict]:
"""Make a call to a Vivox Web API"""
return self.send_message("Account.WebCall.1", {
"AccountHandle": self._account_handle,
"RelativePath": rel_path,
"Parameters": params,
})
def send_message(self, msg_type: str, data: Any) -> asyncio.Future[dict]:
request_id = self._make_request_id()
# This is apparently what the viewer does, not clear if
# request_id has any semantic significance
if msg_type == "Session.Create.1":
request_id = data["URI"]
RESP_LOG.debug("%s %s %s %r" % ("Request", request_id, msg_type, data))
asyncio.create_task(self.vivox_conn.send_request(request_id, msg_type, data))
future = asyncio.Future()
self._pending_req_futures[request_id] = future
return future
def send_raw(self, data: bytes):
return self.vivox_conn.send_raw(data)
def set_ref_region(self, region_handle: Optional[int]):
"""Set reference position for region-local coordinates"""
if region_handle is not None:
self._region_global_x, self._region_global_y = handle_to_gridxy(region_handle)
else:
self._region_global_x, self._region_global_y = (0, 0)
self._channel_info_updated()
async def _poll_messages(self):
while not self.vivox_conn:
await asyncio.sleep(0.001)
async for msg in self.vivox_conn.read_messages():
try:
RESP_LOG.debug(repr(msg))
if msg.type == "Event":
self.event_handler.handle(msg)
elif msg.type == "Response":
# Might not have this request ID if it was sent directly via the socket
if msg.request_id in self._pending_req_futures:
self._pending_req_futures[msg.request_id].set_result(msg.data)
del self._pending_req_futures[msg.request_id]
except Exception:
LOG.exception("Error in response handler?")
async def _handle_voice_service_connection_state_changed(self, _msg: VivoxMessage):
await self.create_connector()
def _handle_account_login_state_change(self, msg: VivoxMessage):
if msg.data.get('StatusString') == "OK" and msg.data['State'] == '1':
self._account_handle = msg.data['AccountHandle']
self.logged_in.set()
else:
self.logged_in.clear()
self._account_uri = None
self._account_handle = None
def _handle_session_added(self, msg: VivoxMessage):
self._session_handle = msg.data["SessionHandle"]
self._session_group_handle = msg.data["SessionGroupHandle"]
self.session_added.notify(self._session_handle)
# We still have to wait for ourselves to be added as a participant, wait on
# that to set the session_ready event.
def _handle_session_removed(self, _msg: VivoxMessage):
self._session_handle = None
# We often don't get all the `ParticipantRemoved`s before the session dies,
# clear out the participant list.
for participant in tuple(self._participants.keys()):
self._remove_participant(participant)
self.session_ready.clear()
def _handle_participant_added(self, msg: VivoxMessage):
self._participants[msg.data["ParticipantUri"]] = msg.data
self.participant_added.notify(msg.data)
if msg.data["ParticipantUri"] == self._account_uri and not self.session_ready.is_set():
self.session_ready.set()
def _handle_participant_updated(self, msg: VivoxMessage):
participant_uri = msg.data["ParticipantUri"]
if participant_uri in self._participants:
participant = self._participants[participant_uri]
participant.update(msg.data)
self.participant_updated.notify(participant)
def _handle_participant_removed(self, msg: VivoxMessage):
self._remove_participant(msg.data["ParticipantUri"])
def _remove_participant(self, participant_uri: str):
if participant_uri in self._participants:
participant = self._participants[participant_uri]
del self._participants[participant_uri]
self.participant_removed.notify(participant)
def _global_to_region(self, pos: Vector3):
x = pos.X - self._region_global_x * 256
z = pos.Z + self._region_global_y * 256
# Vivox uses a different coordinate system than SL, Y is up!
return Vector3(x, -z, pos.Y)
def _region_to_global(self, pos: Vector3):
x = pos.X + self._region_global_x * 256
y = pos.Y + self._region_global_y * 256
return Vector3(x, pos.Z, -y)
def _build_position_dict(self, pos: Vector3, vel: Vector3 = Vector3(0, 0, 0)) -> dict:
return {
"Position": {
"X": pos.X,
"Y": pos.Y,
"Z": pos.Z,
},
"Velocity": {
"X": vel.X,
"Y": vel.Y,
"Z": vel.Z,
},
"AtOrientation": {
"X": "1.29938e-05",
"Y": 0,
"Z": -1,
},
"UpOrientation": {
"X": 0,
"Y": 1,
"Z": 0,
},
"LeftOrientation": {
"X": -1,
"Y": 0,
"Z": "-1.29938e-05",
}
}
def _channel_info_updated(self):
pos = self.global_pos
if self._region_global_x is not None:
pos = self.region_pos
self.channel_info_updated.notify(pos)
def _make_request_id(self):
return str(uuid.uuid4())

View File

@@ -0,0 +1,156 @@
# TODO: some fancy parser that parses everything into
# dicts or objects using schemas.
from __future__ import annotations
import asyncio
import weakref
from typing import Any, Optional, Coroutine, NamedTuple
import defusedxml.lxml
import lxml.etree
class VivoxMessage(NamedTuple):
type: str
name: str
request_id: Optional[str]
data: dict
def xml_to_dict(element):
return element.tag, dict(map(xml_to_dict, element)) or element.text
def buildxml(r, d, list_elem_name='i'):
if isinstance(d, dict):
for k, v in d.items():
s = lxml.etree.SubElement(r, k)
buildxml(s, v, list_elem_name)
elif isinstance(d, (list, tuple, set)):
for v in d:
if isinstance(v, lxml.etree._Element): # noqa
s = r
else:
s = lxml.etree.SubElement(r, list_elem_name)
buildxml(s, v, list_elem_name)
elif isinstance(d, str):
r.text = d
elif isinstance(d, lxml.etree._Element): # noqa
r.append(d)
elif d is None:
r.text = ""
else:
r.text = str(d)
return r
_VIVOX_NS = b' xmlns="http://www.vivox.com"' # noqa
def _remove_vivox_ns(data):
return data.replace(_VIVOX_NS, b"").strip()
def _clean_message(msg_action: str, parsed, dict_msg: dict):
# TODO: refactor this into some XML -> dict schema, some XML is ambiguous
if msg_action == "Aux.GetCaptureDevices.1":
devices = []
for device in parsed.find('Results/CaptureDevices'):
devices.append(xml_to_dict(device)[1])
dict_msg["Results"]["CaptureDevices"] = devices
if msg_action == "Account.WebCall.1":
results = dict_msg["Results"]
content_type = results.get("ContentType") or ""
if content_type.startswith("text/xml"):
xml_content = _remove_vivox_ns(results["Content"].encode("utf8"))
parsed_content = defusedxml.lxml.fromstring(xml_content)
body = parsed_content.xpath("//body")[0]
results["Content"] = body
if "ReturnCode" in dict_msg:
dict_msg["ReturnCode"] = int(dict_msg["ReturnCode"])
return dict_msg
def _build_webcall_params(params: dict) -> list:
params_list = []
elem = lxml.etree.Element('base')
for name, val in params.items():
params_list.append({"Name": name, "Value": val})
buildxml(elem, params_list, 'Parameter')
return list(elem)
class VivoxConnection:
def __init__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, owned=True):
self._reader: Optional[asyncio.StreamReader] = reader
self._writer: Optional[asyncio.StreamWriter] = writer
self._owned = owned
def close(self):
if self._owned and self._writer:
self._writer.close()
self._writer = None
self._reader = None
def __del__(self):
self.close()
async def read_messages(self):
# TODO: handle interrupted read
while self._reader and not self._reader.at_eof() and not self._writer.is_closing():
yield await self.read_message()
async def read_message(self):
msg = await self._reader.readuntil(b"\n\n\n")
return self.parse(msg[:-3])
def parse(self, raw_msg) -> VivoxMessage:
parsed_msg = defusedxml.lxml.fromstring(raw_msg.decode("utf8"))
msg_type = parsed_msg.tag
request_id = parsed_msg.attrib.get("requestId", None)
# There may be no params, just use an empty dict if that's the case
dict_msg = xml_to_dict(parsed_msg)[1] or {}
if msg_type == "Event":
msg_action = parsed_msg.attrib.get("type")
elif msg_type == "Response":
msg_action = parsed_msg.attrib.get("action")
# This is pretty useless, get rid of it because it gunks up repr()s.
if 'InputXml' in dict_msg:
del dict_msg['InputXml']
dict_msg = _clean_message(msg_action, parsed_msg, dict_msg)
elif msg_type == "Request":
msg_action = parsed_msg.attrib.get("action")
else:
raise Exception("Unknown Vivox message type %r?" % msg_type)
return VivoxMessage(msg_type, msg_action, request_id, dict_msg)
def send_raw(self, buf: bytes) -> Coroutine[Any, Any, None]:
self._writer.write(buf + b"\n\n\n")
drain_coro = self._writer.drain()
# Don't whine if this isn't awaited, we may not always want to flush immediately.
weakref.finalize(drain_coro, drain_coro.close)
return drain_coro
def send_request(self, request_id: str, action: str, data: Any) -> Coroutine[Any, Any, None]:
if action == "Account.WebCall.1":
data = dict(data)
data["Parameters"] = _build_webcall_params(data["Parameters"])
return self._send_request_response("Request", request_id, action, data)
def send_response(self, request_id: str, action: str, data: Any) -> Coroutine[Any, Any, None]:
return self._send_request_response("Response", request_id, action, data)
def _send_request_response(self, msg_type: str, request_id: str, action: str, data: Any):
elem = lxml.etree.Element(msg_type)
elem.attrib["requestId"] = request_id
elem.attrib["action"] = action
serialized = lxml.etree.tostring(buildxml(elem, data))
return self.send_raw(serialized)
def send_event(self, event_type: str, data: Any) -> Coroutine[Any, Any, None]:
elem = lxml.etree.Element("Event")
elem.attrib["type"] = event_type
serialized = lxml.etree.tostring(buildxml(elem, data))
return self.send_raw(serialized)

View File

@@ -1,4 +1,4 @@
aiohttp==3.8.1
aiohttp==3.8.3
aiosignal==1.2.0
appdirs==1.4.4
Arpeggio==1.10.2
@@ -7,14 +7,15 @@ async-timeout==4.0.1
attrs==21.2.0
blinker==1.4
Brotli==1.0.9
certifi==2021.10.8
certifi==2022.12.7
cffi==1.15.0
charset-normalizer==2.0.9
click==8.0.3
cryptography==36.0.2
defusedxml==0.7.1
Flask==2.0.2
frozenlist==1.2.0
frozenlist==1.3.3
gltflib==1.0.13
Glymur==0.9.6
h11==0.12.0
h2==4.1.0
@@ -27,13 +28,14 @@ Jinja2==3.0.3
kaitaistruct==0.9
lazy-object-proxy==1.6.0
ldap3==2.9.1
llbase==1.2.11
lxml==4.6.4
llsd~=1.0.0
lxml==4.9.2
MarkupSafe==2.0.1
mitmproxy==8.0.0
msgpack==1.0.3
multidict==5.2.0
numpy==1.21.4
numpy==1.24.2
outleap~=0.4.1
parso==0.8.3
passlib==1.7.4
prompt-toolkit==3.0.23
@@ -47,13 +49,13 @@ Pygments==2.10.0
pyOpenSSL==22.0.0
pyparsing==2.4.7
pyperclip==1.8.2
PySide6==6.2.2
PySide6-Essentials==6.4.2
qasync==0.22.0
recordclass==0.14.3
recordclass==0.18.2
requests==2.26.0
ruamel.yaml==0.17.16
ruamel.yaml.clib==0.2.6
shiboken6==6.2.2
ruamel.yaml==0.17.21
ruamel.yaml.clib==0.2.7
shiboken6==6.4.2
six==1.16.0
sortedcontainers==2.4.0
tornado==6.1
@@ -64,5 +66,5 @@ urwid==2.1.2
wcwidth==0.2.5
Werkzeug==2.0.2
wsproto==1.0.0
yarl==1.7.2
zstandard==0.15.2
yarl==1.8.2
zstandard<0.18.0

View File

@@ -25,7 +25,7 @@ from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
version = '0.12.0'
version = '0.14.0'
with open(path.join(here, 'README.md')) as readme_fh:
readme = readme_fh.read()
@@ -45,6 +45,7 @@ setup(
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: System :: Networking :: Monitoring",
"Topic :: Software Development :: Libraries :: Python Modules",
@@ -75,33 +76,39 @@ setup(
entry_points={
'console_scripts': {
'hippolyzer-gui = hippolyzer.apps.proxy_gui:gui_main',
'hippolyzer-cli = hippolyzer.apps.proxy:main'
'hippolyzer-cli = hippolyzer.apps.proxy:main',
}
},
zip_safe=False,
python_requires='>=3.8',
install_requires=[
'llbase>=1.2.5',
'llsd<1.1.0',
'defusedxml',
'aiohttp<4.0.0',
'recordclass<0.15',
# Newer recordclasses break!
'recordclass>0.15,<0.18.3',
'lazy-object-proxy',
'arpeggio',
# requests breaks with newer idna
'idna<3,>=2.5',
# 7.x will be a major change.
'mitmproxy>=8.0.0,<8.1',
# For REPLs
'ptpython<4.0',
# JP2 codec
'Glymur<0.9.7',
'numpy<2.0',
# These could be in extras_require if you don't want a GUI.
'pyside6',
'qasync',
# Needed for mesh format conversion tooling
'pycollada',
'transformations',
'gltflib',
# JP2 codec
'Glymur<0.9.7',
'numpy<2.0',
# Proxy-specific stuff
'outleap<1.0',
'arpeggio',
# 7.x will be a major change.
'mitmproxy>=8.0.0,<8.1',
'Werkzeug<3.0',
# For REPLs
'ptpython<4.0',
# These could be in extras_require if you don't want a GUI.
'pyside6-essentials',
'qasync',
],
tests_require=[
"pytest",

View File

@@ -3,6 +3,7 @@ import setuptools # noqa
import os
import shutil
from distutils.core import Command
from importlib.metadata import version
from pathlib import Path
from cx_Freeze import setup, Executable
@@ -113,7 +114,7 @@ executables = [
setup(
name="hippolyzer_gui",
version="0.9.0",
version=version("hippolyzer"),
description="Hippolyzer GUI",
options=options,
executables=executables,

51
tests/base/test_events.py Normal file
View File

@@ -0,0 +1,51 @@
import asyncio
import unittest
from unittest.mock import MagicMock
from hippolyzer.lib.base.events import Event
class TestEvents(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self):
self.event = Event()
async def test_trigger_sync(self):
mock = MagicMock(return_value=False)
self.event.subscribe(mock)
self.event.notify("foo")
mock.assert_called_with("foo")
self.assertIn(mock, [x[0] for x in self.event.subscribers])
async def test_trigger_sync_unsub(self):
mock = MagicMock(return_value=True)
self.event.subscribe(mock)
self.event.notify("foo")
mock.assert_called_with("foo")
self.assertNotIn(mock, [x[0] for x in self.event.subscribers])
async def test_trigger_async(self):
called = asyncio.Event()
mock = MagicMock()
async def _mock_wrapper(*args, **kwargs):
called.set()
mock(*args, **kwargs)
self.event.subscribe(_mock_wrapper)
self.event.notify("foo")
await called.wait()
mock.assert_called_with("foo")
self.assertIn(_mock_wrapper, [x[0] for x in self.event.subscribers])
async def test_trigger_async_unsub(self):
called = asyncio.Event()
mock = MagicMock()
async def _mock_wrapper(*args, **kwargs):
called.set()
mock(*args, **kwargs)
return True
self.event.subscribe(_mock_wrapper)
self.event.notify("foo")
await called.wait()
mock.assert_called_with("foo")
self.assertNotIn(_mock_wrapper, [x[0] for x in self.event.subscribers])

View File

@@ -40,6 +40,8 @@ class TestMesh(unittest.TestCase):
writer.write(serializer, reader.read(serializer))
second_buf = writer.copy_buffer()
self.assertEqual(first_buf, second_buf)
# Dates may not round-trip correctly, but length should always be the same
self.assertEqual(len(first_buf), len(self.slm_bytes))
def test_serialize_raw_segments(self):
serializer = LLMeshSerializer(include_raw_segments=True)

View File

@@ -89,7 +89,7 @@ class _MutableMultiDictTests:
d = create_instance()
s = pickle.dumps(d, protocol)
ud = pickle.loads(s)
assert type(ud) == type(d)
assert type(ud) is type(d)
assert ud == d
alternative = pickle.dumps(create_instance("werkzeug"), protocol)
assert pickle.loads(alternative) == d

View File

@@ -6,6 +6,8 @@ import uuid
from io import BytesIO
from typing import Optional
import numpy as np
from hippolyzer.lib.base.datatypes import *
import hippolyzer.lib.base.serialization as se
from hippolyzer.lib.base.llanim import Animation, Joint, RotKeyframe
@@ -693,6 +695,46 @@ class NameValueSerializationTests(BaseSerializationTest):
deser.to_dict()
class NumPySerializationTests(BaseSerializationTest):
def setUp(self) -> None:
super().setUp()
self.writer.endianness = "<"
def test_simple(self):
quant_spec = se.Vector3U16(0.0, 1.0)
self.writer.write(quant_spec, Vector3(0, 0.1, 0))
self.writer.write(quant_spec, Vector3(1, 1, 1))
reader = self._get_reader()
np_spec = se.NumPyArray(se.BytesGreedy(), np.dtype(np.uint16), 3)
np_val = reader.read(np_spec)
expected_arr = np.array([[0, 6554, 0], [0xFFFF, 0xFFFF, 0xFFFF]], dtype=np.uint16)
np.testing.assert_array_equal(expected_arr, np_val)
# Make sure writing the array back works correctly
orig_buf = self.writer.copy_buffer()
self.writer.clear()
self.writer.write(np_spec, expected_arr)
self.assertEqual(orig_buf, self.writer.copy_buffer())
def test_quantization(self):
quant_spec = se.Vector3U16(0.0, 1.0)
self.writer.write(quant_spec, Vector3(0, 0.1, 0))
self.writer.write(quant_spec, Vector3(1, 1, 1))
reader = self._get_reader()
np_spec = se.QuantizedNumPyArray(se.NumPyArray(se.BytesGreedy(), np.dtype(np.uint16), 3), 0.0, 1.0)
np_val = reader.read(np_spec)
expected_arr = np.array([[0, 0.1, 0], [1, 1, 1]], dtype=np.float64)
np.testing.assert_array_almost_equal(expected_arr, np_val, decimal=5)
# Make sure writing the array back works correctly
orig_buf = self.writer.copy_buffer()
self.writer.clear()
self.writer.write(np_spec, expected_arr)
self.assertEqual(orig_buf, self.writer.copy_buffer())
class AnimSerializationTests(BaseSerializationTest):
SIMPLE_ANIM = b'\x01\x00\x00\x00\x01\x00\x00\x00H\x11\xd1?\x00\x00\x00\x00\x00H\x11\xd1?\x00\x00\x00\x00' \
b'\xcd\xccL>\x9a\x99\x99>\x01\x00\x00\x00\x02\x00\x00\x00mNeck\x00\x01\x00\x00\x00\x03\x00' \

View File

@@ -23,13 +23,7 @@ import unittest
from hippolyzer.lib.base.settings import Settings
class TestEvents(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
class TestSettings(unittest.TestCase):
def test_base_settings(self):
settings = Settings()
self.assertEqual(settings.ENABLE_DEFERRED_PACKET_PARSING, True)

View File

@@ -0,0 +1,32 @@
import unittest
import numpy as np
from hippolyzer.lib.base.mesh_skeleton import load_avatar_skeleton
class TestSkeleton(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.skeleton = load_avatar_skeleton()
def test_get_joint(self):
node = self.skeleton["mNeck"]
self.assertEqual("mNeck", node.name)
self.assertEqual(self.skeleton, node.skeleton())
def test_get_joint_index(self):
self.assertEqual(7, self.skeleton["mNeck"].index)
self.assertEqual(113, self.skeleton["mKneeLeft"].index)
def test_get_joint_parent(self):
self.assertEqual("mChest", self.skeleton["mNeck"].parent().name)
def test_get_joint_matrix(self):
expected_mat = np.array([
[1., 0., 0., -0.01],
[0., 1., 0., 0.],
[0., 0., 1., 0.251],
[0., 0., 0., 1.]
])
np.testing.assert_equal(expected_mat, self.skeleton["mNeck"].matrix)

View File

@@ -6,7 +6,6 @@ from typing import *
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message import Block, Message
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.message.circuit import ConnectionHolder
from hippolyzer.lib.base.templates import (
AssetType,
EstateAssetType,
@@ -16,26 +15,10 @@ from hippolyzer.lib.base.templates import (
TransferTargetType,
TransferStatus,
)
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
from hippolyzer.lib.base.network.transport import Direction
from hippolyzer.lib.base.transfer_manager import TransferManager, Transfer
from hippolyzer.lib.base.xfer_manager import XferManager
class MockHandlingCircuit(ProxiedCircuit):
def __init__(self, handler: MessageHandler[Message, str]):
super().__init__(("127.0.0.1", 1), ("127.0.0.1", 2), None)
self.handler = handler
def _send_prepared_message(self, message: Message, transport=None):
loop = asyncio.get_event_loop_policy().get_event_loop()
loop.call_soon(self.handler.handle, message)
class MockConnectionHolder(ConnectionHolder):
def __init__(self, circuit, message_handler):
self.circuit = circuit
self.message_handler = message_handler
from hippolyzer.lib.base.test_utils import MockHandlingCircuit, MockConnectionHolder
class BaseTransferTests(unittest.IsolatedAsyncioTestCase):

0
tests/client/__init__.py Normal file
View File

View File

@@ -0,0 +1,183 @@
import asyncio
import copy
import unittest
import xmlrpc.client
from typing import Tuple, Optional
import aioresponses
from hippolyzer.lib.base import llsd
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.circuit import Circuit
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.message.msgtypes import PacketFlags
from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
from hippolyzer.lib.base.network.transport import AbstractUDPTransport, UDPPacket, Direction
from hippolyzer.lib.base.test_utils import MockTransport, MockConnectionHolder
from hippolyzer.lib.client.hippo_client import HippoClient, HippoClientProtocol
class MockServer(MockConnectionHolder):
def __init__(self, circuit, message_handler):
super().__init__(circuit, message_handler)
self.deserializer = UDPMessageDeserializer()
self.protocol: Optional[HippoClientProtocol] = None
def process_inbound(self, packet: UDPPacket):
"""Process a packet that the client sent to us"""
message = self.deserializer.deserialize(packet.data)
message.direction = Direction.IN
if message.reliable:
self.circuit.send_acks((message.packet_id,))
self.circuit.collect_acks(message)
if message.name != "PacketAck":
self.message_handler.handle(message)
class PacketForwardingTransport(MockTransport):
def __init__(self):
super().__init__()
self.protocol: Optional[HippoClientProtocol] = None
def send_packet(self, packet: UDPPacket):
super().send_packet(packet)
self.protocol.datagram_received(packet.data, packet.src_addr)
class MockServerTransport(MockTransport):
"""Used for the client to send packets out"""
def __init__(self, server: MockServer):
super().__init__()
self._server = server
def send_packet(self, packet: UDPPacket) -> None:
super().send_packet(packet)
# Directly pass the packet to the server
packet = copy.copy(packet)
packet.direction = Direction.IN
# Delay calling so the client can do its ACK bookkeeping first
asyncio.get_event_loop().call_soon(lambda: self._server.process_inbound(packet))
class MockHippoClient(HippoClient):
def __init__(self, server: MockServer):
super().__init__()
self.server = server
async def _create_transport(self) -> Tuple[AbstractUDPTransport, HippoClientProtocol]:
protocol = HippoClientProtocol(self.session)
# TODO: This isn't great, but whatever.
self.server.circuit.transport.protocol = protocol
return MockServerTransport(self.server), protocol
async def _soon(get_msg) -> Message:
return await asyncio.wait_for(get_msg(), timeout=1.0)
class TestHippoClient(unittest.IsolatedAsyncioTestCase):
FAKE_LOGIN_URI = "http://127.0.0.1:1/login.cgi"
FAKE_LOGIN_RESP = {
"session_id": str(UUID(int=1)),
"secure_session_id": str(UUID(int=2)),
"agent_id": str(UUID(int=3)),
"circuit_code": 123,
"sim_ip": "127.0.0.1",
"sim_port": 2,
"region_x": 0,
"region_y": 123,
"seed_capability": "https://127.0.0.1:4/foo",
"inventory-skeleton": [
{'name': 'My Inventory', 'folder_id': str(UUID(int=4)),
'parent_id': '00000000-0000-0000-0000-000000000000', 'type_default': 8, 'version': 200}
]
}
FAKE_SEED_RESP = {
"EventQueueGet": "https://127.0.0.1:5/",
}
FAKE_EQ_RESP = {
"id": 1,
"events": [
{"message": "ViewerFrozenMessage", "body": {"FrozenData": [{"Data": False}]}},
{"message": "NotTemplated", "body": {"foo": {"bar": True}}},
],
}
async def asyncSetUp(self):
self.server_handler: MessageHandler[Message, str] = MessageHandler()
self.server_transport = PacketForwardingTransport()
self.server_circuit = Circuit(("127.0.0.1", 2), ("127.0.0.1", 99), self.server_transport)
self.server = MockServer(self.server_circuit, self.server_handler)
self.aio_mock = aioresponses.aioresponses()
self.aio_mock.start()
self.aio_mock.post(
self.FAKE_LOGIN_URI,
body=xmlrpc.client.dumps((self.FAKE_LOGIN_RESP,), None, True)
)
self.aio_mock.post(self.FAKE_LOGIN_RESP['seed_capability'], body=llsd.format_xml(self.FAKE_SEED_RESP))
self.aio_mock.post(self.FAKE_SEED_RESP['EventQueueGet'], body=llsd.format_xml(self.FAKE_EQ_RESP), repeat=True)
self.client = MockHippoClient(self.server)
async def asyncTearDown(self):
try:
await self.client.aclose()
finally:
self.aio_mock.stop()
async def _log_client_in(self, client: MockHippoClient):
login_task = asyncio.create_task(client.login("foo", "bar", login_uri=self.FAKE_LOGIN_URI))
with self.server_handler.subscribe_async(
("*",),
) as get_msg:
assert (await _soon(get_msg)).name == "UseCircuitCode"
assert (await _soon(get_msg)).name == "CompleteAgentMovement"
self.server.circuit.send(Message(
'RegionHandshake',
Block('RegionInfo', fill_missing=True),
Block('RegionInfo2', fill_missing=True),
Block('RegionInfo3', fill_missing=True),
Block('RegionInfo4', fill_missing=True),
))
assert (await _soon(get_msg)).name == "RegionHandshakeReply"
assert (await _soon(get_msg)).name == "AgentThrottle"
await login_task
async def test_login(self):
await self._log_client_in(self.client)
with self.server_handler.subscribe_async(
("*",),
) as get_msg:
self.client.logout()
assert (await _soon(get_msg)).name == "LogoutRequest"
async def test_eq(self):
await self._log_client_in(self.client)
with self.client.session.message_handler.subscribe_async(
("ViewerFrozenMessage", "NotTemplated"),
) as get_msg:
assert (await _soon(get_msg)).name == "ViewerFrozenMessage"
msg = await _soon(get_msg)
assert msg.name == "NotTemplated"
assert msg["EventData"]["foo"]["bar"] == 1
async def test_inventory_manager(self):
await self._log_client_in(self.client)
self.assertEqual(self.client.session.inventory_manager.model.root.node_id, UUID(int=4))
async def test_resend_suppression(self):
"""Make sure the client only handles the first seen copy of a reliable message"""
await self._log_client_in(self.client)
with self.client.session.message_handler.subscribe_async(
("ChatFromSimulator", "AgentDataUpdate"),
) as get_msg:
msg = Message("ChatFromSimulator", Block("ChatData", fill_missing=True))
msg.send_flags |= PacketFlags.RELIABLE
# Fake re-sending the message
packet = self.server_circuit.send(msg)
self.server_transport.send_packet(packet)
self.server_circuit.send(Message("AgentDataUpdate", Block("AgentData", fill_missing=True)))
assert (await _soon(get_msg)).name == "ChatFromSimulator"
assert (await _soon(get_msg)).name == "AgentDataUpdate"

36
tests/client/test_rlv.py Normal file
View File

@@ -0,0 +1,36 @@
import unittest
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.templates import ChatType
from hippolyzer.lib.client.rlv import RLVParser, RLVCommand
class TestRLV(unittest.TestCase):
def test_is_rlv_command(self):
msg = Message(
"ChatFromSimulator",
Block("ChatData", Message="@foobar", ChatType=ChatType.OWNER)
)
self.assertTrue(RLVParser.is_rlv_message(msg))
msg["ChatData"]["ChatType"] = ChatType.NORMAL
self.assertFalse(RLVParser.is_rlv_message(msg))
def test_rlv_parse_single_command(self):
cmd = RLVParser.parse_chat("@foo:bar;baz=quux")[0]
self.assertEqual("foo", cmd.behaviour)
self.assertListEqual(["bar", "baz"], cmd.options)
self.assertEqual("quux", cmd.param)
def test_rlv_parse_multiple_commands(self):
cmds = RLVParser.parse_chat("@foo:bar;baz=quux,bazzy")
self.assertEqual("foo", cmds[0].behaviour)
self.assertListEqual(["bar", "baz"], cmds[0].options)
self.assertEqual("quux", cmds[0].param)
self.assertEqual("bazzy", cmds[1].behaviour)
def test_rlv_format_commands(self):
chat = RLVParser.format_chat([
RLVCommand("foo", "quux", ["bar", "baz"]),
RLVCommand("bazzy", "", [])
])
self.assertEqual("@foo:bar;baz=quux,bazzy", chat)

View File

@@ -88,12 +88,12 @@ class AddonIntegrationTests(BaseProxyTest):
self._setup_default_circuit()
self._fake_command("foobar baz")
await self._wait_drained()
self.assertEqual(self.session.addon_ctx["bazquux"], "baz")
self.assertEqual(self.session.addon_ctx["MockAddon"]["bazquux"], "baz")
# In session context these should be equivalent
with addon_ctx.push(new_session=self.session):
self.assertEqual(self.session.addon_ctx["bazquux"], self.addon.bazquux)
self.assertEqual(self.session.addon_ctx["another"], "baz")
self.assertEqual(self.session.addon_ctx["MockAddon"]["bazquux"], self.addon.bazquux)
self.assertEqual(self.session.addon_ctx["MockAddon"]["another"], "baz")
# Outside session context it should raise
with self.assertRaises(AttributeError):
@@ -104,7 +104,7 @@ class AddonIntegrationTests(BaseProxyTest):
self.session.addon_ctx.clear()
with addon_ctx.push(new_session=self.session):
# This has no default so should fail
# This has no default so it should fail
with self.assertRaises(AttributeError):
_something = self.addon.bazquux
# This has a default
@@ -144,9 +144,9 @@ class AddonIntegrationTests(BaseProxyTest):
AddonManager.load_addon_from_path(str(self.parent_path), reload=True)
# Wait for the init hooks to run
await asyncio.sleep(0.001)
self.assertFalse("quux" in self.session_manager.addon_ctx)
self.assertFalse("quux" in self.session_manager.addon_ctx["ParentAddon"])
parent_addon_mod = AddonManager.FRESH_ADDON_MODULES['hippolyzer.user_addon_parent_addon']
self.assertEqual(0, parent_addon_mod.ParentAddon.quux)
self.assertEqual(0, self.session_manager.addon_ctx["quux"])
self.assertEqual(0, self.session_manager.addon_ctx["ParentAddon"]["quux"])
parent_addon_mod.ParentAddon.quux = 1
self.assertEqual(1, self.session_manager.addon_ctx["quux"])
self.assertEqual(1, self.session_manager.addon_ctx["ParentAddon"]["quux"])

View File

@@ -36,7 +36,7 @@ class MockAddon(BaseAddon):
return True
def handle_object_updated(self, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str]):
obj: Object, updated_props: Set[str], msg: Optional[Message]):
self.events.append(("object_update", session.id, region.circuit_addr, obj.LocalID, updated_props))

View File

@@ -48,7 +48,7 @@ class ObjectTrackingAddon(BaseAddon):
super().__init__()
self.events = []
def handle_object_updated(self, session, region, obj: Object, updated_props: Set[str]):
def handle_object_updated(self, session, region, obj: Object, updated_props: Set[str], msg: Optional[Message]):
self.events.append(("update", obj, updated_props))
def handle_object_killed(self, session, region, obj: Object):

0
tests/voice/__init__.py Normal file
View File

289
tests/voice/test_voice.py Normal file
View File

@@ -0,0 +1,289 @@
from typing import *
import asyncio
import unittest
from unittest import mock
from hippolyzer.lib.base.datatypes import Vector3
from hippolyzer.lib.voice.client import VoiceClient
from hippolyzer.lib.voice.connection import VivoxConnection
def _make_transport(write_func):
transport = mock.Mock()
transport.write.side_effect = write_func
transport.is_closing.return_value = False
return transport
def _make_protocol(transport: Any):
protocol = mock.Mock(transport=transport)
protocol._drain_helper = mock.AsyncMock()
return protocol
class TestVivoxConnection(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self):
self._writer_buf = bytearray()
self._transport = _make_transport(self._writer_buf.extend)
self._protocol = _make_protocol(self._transport)
self.reader = asyncio.StreamReader()
self.writer = asyncio.StreamWriter(self._transport, self._protocol, self.reader, asyncio.get_event_loop())
self.vivox_connection = VivoxConnection(self.reader, self.writer)
async def test_read_request(self):
self.reader.feed_data(
b'<Request requestId="foobar" action="Aux.GetRenderDevices.1"><Foo>1</Foo></Request>\n\n\n'
)
self.reader.feed_eof()
msg_type, msg_action, request_id, body = await self.vivox_connection.read_message()
self.assertEqual("Request", msg_type)
self.assertEqual("Aux.GetRenderDevices.1", msg_action)
self.assertEqual("foobar", request_id)
self.assertDictEqual({"Foo": "1"}, body)
async def test_read_response(self):
self.reader.feed_data(
b'<Response requestId="foobar" action="Connector.SetLocalMicVolume.1"><ReturnCode>0</ReturnCode>'
b'<Results><StatusCode>0</StatusCode><StatusString /></Results>'
b'<InputXml><Request/></InputXml></Response>\n\n\n'
)
self.reader.feed_eof()
msg_type, msg_action, request_id, body = await self.vivox_connection.read_message()
self.assertEqual("Response", msg_type)
self.assertEqual("Connector.SetLocalMicVolume.1", msg_action)
self.assertEqual("foobar", request_id)
self.assertDictEqual(
{'ReturnCode': 0, 'Results': {'StatusCode': '0', 'StatusString': None}},
body,
)
async def test_read_event(self):
self.reader.feed_data(
b'<Event type="MediaStreamUpdatedEvent"><SessionGroupHandle>4</SessionGroupHandle><SessionHandle>7'
b'</SessionHandle><StatusCode>0</StatusCode><StatusString/>'
b'<State>6</State><StateDescription>Connecting</StateDescription><Incoming>false</Incoming>'
b'<DurableMediaId/></Event>\n\n\n'
)
self.reader.feed_eof()
msg_type, msg_action, request_id, body = await self.vivox_connection.read_message()
self.assertEqual("Event", msg_type)
self.assertEqual("MediaStreamUpdatedEvent", msg_action)
self.assertEqual(None, request_id)
self.assertDictEqual(
{
'DurableMediaId': None,
'Incoming': 'false',
'SessionGroupHandle': '4',
'SessionHandle': '7',
'State': '6',
'StateDescription': 'Connecting',
'StatusCode': '0',
'StatusString': None,
},
body,
)
async def test_read_messages(self):
self.reader.feed_data(
b'<Request requestId="foobar" action="Aux.GetRenderDevices.1"><Foo>1</Foo></Request>\n\n\n'
b'<Request requestId="quux" action="Aux.GetRenderDevices.1"><Foo>1</Foo></Request>\n\n\n'
)
self.reader.feed_eof()
i = 0
async for msg in self.vivox_connection.read_messages():
if i == 0:
self.assertEqual("foobar", msg.request_id)
else:
self.assertEqual("quux", msg.request_id)
self.assertEqual("Request", msg.type)
self.assertEqual("Aux.GetRenderDevices.1", msg.name)
self.assertDictEqual({"Foo": "1"}, msg.data)
i += 1
async def test_send_message(self):
await self.vivox_connection.send_request("foo", "bar", {"baz": 1})
self.assertEqual(
b'<Request requestId="foo" action="bar"><baz>1</baz></Request>\n\n\n',
self._writer_buf
)
class TestVoiceClient(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self):
self._client_transport = _make_transport(
lambda *args: asyncio.get_event_loop().call_soon(self.server_reader.feed_data, *args)
)
self._client_protocol = _make_protocol(self._client_transport)
self.client_reader = asyncio.StreamReader()
self.client_writer = asyncio.StreamWriter(
self._client_transport,
self._client_protocol,
self.client_reader,
asyncio.get_event_loop()
)
self._server_transport = _make_transport(
lambda *args: asyncio.get_event_loop().call_soon(self.client_reader.feed_data, *args)
)
self._server_protocol = _make_protocol(self._server_transport)
self.server_reader = asyncio.StreamReader()
self.server_writer = asyncio.StreamWriter(
self._server_transport,
self._server_protocol,
self.server_reader,
asyncio.get_event_loop()
)
self.client_connection = VivoxConnection(self.client_reader, self.client_writer)
self.server_connection = VivoxConnection(self.server_reader, self.server_writer)
self.client = VoiceClient("127.0.0.1", 0)
self.client.vivox_conn = self.client_connection
def _make_request_id():
_make_request_id.i += 1
return str(_make_request_id.i)
_make_request_id.i = 0
self.client._make_request_id = _make_request_id
async def _expect_message(self, name: str):
msg = await self.server_connection.read_message()
self.assertEqual(name, msg.name)
return msg
async def _handle_message(self, name: str):
msg = await self._expect_message(name)
await self.server_connection.send_response(msg.request_id, msg.name, {
"ReturnCode": 0,
"Results": {}
})
return msg
async def _do_connector_setup(self):
async def _serve_connector_setup():
await self.server_connection.send_event(
"VoiceServiceConnectionStateChangedEvent",
{
"Connected": 1,
"Platform": "Linux",
"Version": 1,
"DataDirectory": "/tmp/whatever",
}
)
msg = await self.server_connection.read_message()
self.assertEqual('Aux.GetCaptureDevices.1', msg.name)
await self.server_connection.send_response(msg.request_id, msg.name, {
"ReturnCode": 0,
"Results": {
"StatusCode": 0,
"StatusString": None,
"CaptureDevices": []
}
})
msg = await self.server_connection.read_message()
self.assertEqual('Aux.GetRenderDevices.1', msg.name)
await self.server_connection.send_response(msg.request_id, msg.name, {
"ReturnCode": 0,
"Results": {
"StatusCode": 0,
"StatusString": None,
"RenderDevices": []
}
})
await self._handle_message("Connector.MuteLocalSpeaker.1")
await self._handle_message("Connector.SetLocalSpeakerVolume.1")
await self._handle_message("Connector.MuteLocalMic.1")
await self._handle_message("Connector.SetLocalMicVolume.1")
msg = await self.server_connection.read_message()
self.assertEqual('Connector.Create.1', msg.name)
await self.server_connection.send_response(msg.request_id, msg.name, {
"ReturnCode": 0,
"Results": {
"ConnectorHandle": 2,
}
})
serve_connector_task = asyncio.create_task(_serve_connector_setup())
await asyncio.wait_for(serve_connector_task, 0.5)
await asyncio.wait_for(self.client.ready.wait(), 0.5)
async def _do_login(self):
async def _serve_login():
msg = await self._expect_message("Account.Login.1")
self.assertEqual("foo", msg.data["AccountName"])
await self.server_connection.send_event("AccountLoginStateChangeEvent", {
"AccountHandle": 2,
"StatusCode": 200,
"StatusString": "OK",
"State": 1,
})
await self.server_connection.send_response(msg.request_id, msg.name, {
"ReturnCode": 0,
"Results": {
"StatusCode": 0,
"StatusString": None,
"AccountHandle": 2,
"DisplayName": "foo",
"Uri": "uri:baz@foo",
}
})
login_task = asyncio.create_task(_serve_login())
await asyncio.wait_for(self.client.login("foo", "bar"), 0.5)
await asyncio.wait_for(login_task, 0.5)
async def _join_session(self):
async def _serve_session():
await self._handle_message("Session.Create.1")
await self.server_connection.send_event("SessionAddedEvent", {
"SessionHandle": 4,
"SessionGroupHandle": 5,
})
await self.server_connection.send_event("ParticipantAddedEvent", {
"ParticipantUri": "uri:baz@foo",
})
serve_session_task = asyncio.create_task(_serve_session())
await asyncio.wait_for(self.client.join_session("uri:foo@bar", region_handle=256), 0.5)
self.assertIn("uri:baz@foo", self.client.participants)
await asyncio.wait_for(serve_session_task, 0.5)
async def test_create_connector(self):
await self._do_connector_setup()
async def test_login(self):
await self._do_connector_setup()
await self._do_login()
async def test_create_session(self):
await self._do_connector_setup()
await self._do_login()
await self._join_session()
async def test_set_position(self):
await self._do_connector_setup()
await self._do_login()
await self._join_session()
handle_3d_pos_task = asyncio.create_task(self._handle_message("Session.Set3DPosition.1"))
await self.client.set_region_3d_pos(Vector3(1, 2, 3))
msg = await handle_3d_pos_task
self.assertDictEqual(
{'X': '1.0', 'Y': '3.0', 'Z': '-258.0'},
msg.data["SpeakerPosition"]["Position"],
)
self.assertAlmostEqual(self.client.region_pos.X, 1.0)
self.assertAlmostEqual(self.client.region_pos.Y, 2.0)
self.assertAlmostEqual(self.client.region_pos.Z, 3.0)
self.assertAlmostEqual(self.client.global_pos.X, 1.0)
self.assertAlmostEqual(self.client.global_pos.Y, 3.0)
self.assertAlmostEqual(self.client.global_pos.Z, -258.0)

View File

@@ -0,0 +1,36 @@
"""
Connect to a voice session at 0, 0, 0 for 20 seconds, then exit.
"""
import asyncio
from contextlib import aclosing
import os
from hippolyzer.lib.base.datatypes import Vector3
from hippolyzer.lib.voice.client import VoiceClient
VOICE_PATH = os.environ["SLVOICE_PATH"]
async def amain():
client = await VoiceClient.simple_init(VOICE_PATH)
async with aclosing(client):
print("Capture Devices:", client.capture_devices)
print("Render Devices:", client.render_devices)
await client.set_mic_muted(True)
await client.set_mic_volume(60)
print(await client.login(os.environ["SLVOICE_USERNAME"], os.environ["SLVOICE_PASSWORD"]))
await client.join_session(os.environ["SLVOICE_URI"], int(os.environ["SLVOICE_HANDLE"]))
await client.set_region_3d_pos(Vector3(0, 0, 0))
print(client.region_pos)
# leave running for 20 seconds, then exit
await asyncio.sleep(20.0)
print("Bye!")
if __name__ == "__main__":
asyncio.run(amain())