37 Commits

Author SHA1 Message Date
Salad Dais
21d1c7ebfe v0.13.4 2023-12-07 18:47:43 +00:00
Salad Dais
996a43be5b Add option to allow insecure upstream SSL connections 2023-12-07 18:44:10 +00:00
Salad Dais
9e8127e577 Don't use asyncio.get_running_loop() 2023-12-06 20:35:55 +00:00
Salad Dais
cfcd324a11 Pin to Werkzeug under 3.0 2023-12-06 20:35:39 +00:00
Salad Dais
6872634bf4 Be more resilient when faced with no cap_data 2023-12-06 20:35:18 +00:00
Salad Dais
091090c6fd Reparent avatars correctly when recalculating linksets 2023-12-03 23:51:11 +00:00
Salad Dais
bd4fff4200 Add support for PBR / reflection probes 2023-12-03 23:50:32 +00:00
Salad Dais
52dfd0be05 v0.13.3 2023-10-10 23:23:57 +00:00
Salad Dais
60f1737115 Appease new flake8 rules 2023-10-10 23:20:43 +00:00
Salad Dais
7a5d6baf02 Make failing to load invcache non-fatal 2023-10-10 23:15:15 +00:00
Salad Dais
44a332a77b Handle failing to load an addon correctly 2023-10-10 23:14:59 +00:00
Salad Dais
beb0a2d6a4 v0.13.2 2023-07-06 21:49:35 +00:00
Salad Dais
9be66df52b Add AgentFOV to default message ignorelist
It's incredibly spammy when the mesh upload preview is open
2023-07-06 21:48:46 +00:00
Salad Dais
da0117db1b v0.13.1 2023-07-05 20:29:40 +00:00
Salad Dais
4dbf01a604 Blacklist new versions of recordclass 2023-07-05 20:27:05 +00:00
Salad Dais
36858ed3e2 Fix flake error 2023-06-18 18:37:14 +00:00
Salad Dais
370c586582 Decode more flags fields 2023-06-18 18:33:52 +00:00
Salad Dais
fdfffd96c9 Fix UUID serialization with invalid AIS LLSD payloads 2023-06-18 18:33:26 +00:00
Salad Dais
6da9f58b23 Pass original Message through to objectupdate hooks 2023-06-18 18:29:51 +00:00
Salad Dais
12e3912a37 Update README.md
This isn't even in there anymore!
2023-02-07 19:43:51 +00:00
Salad Dais
8147e7e1d7 Remove stylesheet from message builder 2023-02-07 19:43:29 +00:00
Salad Dais
19dba6651c v0.13.0 2023-02-07 19:36:22 +00:00
Salad Dais
274f96c710 Run CI tests on Python 3.11 instead of 3.10 2023-02-07 18:49:14 +00:00
Salad Dais
09e1d0b6fc Remove custom stylesheet for HTTP request / response panes 2023-02-07 18:49:14 +00:00
dependabot[bot]
f4fb68e310 Bump certifi from 2021.10.8 to 2022.12.7 (#34)
Bumps [certifi](https://github.com/certifi/python-certifi) from 2021.10.8 to 2022.12.7.
- [Release notes](https://github.com/certifi/python-certifi/releases)
- [Commits](https://github.com/certifi/python-certifi/compare/2021.10.08...2022.12.07)

---
updated-dependencies:
- dependency-name: certifi
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-07 13:39:26 -04:00
Salad Dais
8edf7ae89b Rough cut of Python 3.11 support 2023-02-07 17:35:44 +00:00
Salad Dais
b6458e9eb7 Add mute enum definitions 2022-11-15 06:24:28 +00:00
Salad Dais
375af1e7f6 Improvements to Object and Skeleton APIs 2022-11-14 21:54:52 +00:00
Salad Dais
76d0a72590 Fix ObjectUpdateBlame addon example always requesting 2022-11-01 23:06:17 +00:00
Salad Dais
3255556835 Add CreationDate SubfieldSerializer 2022-11-01 08:18:40 +00:00
Salad Dais
d19122c039 Fix copy/paste error in puppetry addon 2022-10-27 16:10:05 +00:00
Salad Dais
5692f7b8b6 Add WIP puppetry code 2022-10-19 02:11:04 +00:00
Salad Dais
21cea0f009 Claim LEAP client when session is first created 2022-10-19 02:06:35 +00:00
Salad Dais
193d762132 Give each addon a separate addon_ctx bucket
This fixes addons being able to accidentally stomp all over each
others' state just because they happened to use the same name for
a SessionProperty.
2022-10-18 22:40:15 +00:00
Salad Dais
227fbf7a2e Improve avatar skeleton implementation 2022-10-18 19:39:39 +00:00
Salad Dais
25a397bcc5 add LEAP client connection addon hook 2022-10-17 21:28:11 +00:00
Salad Dais
b0dca80b87 Simplify MetaBaseAddon 2022-10-15 22:56:32 +00:00
46 changed files with 599 additions and 185 deletions

View File

@@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.8", "3.10"]
python-version: ["3.8", "3.11"]
steps:
- uses: actions/checkout@v2

View File

@@ -48,8 +48,7 @@ A proxy is provided with both a CLI and Qt-based interface. The proxy applicatio
custom SOCKS 5 UDP proxy, as well as an HTTP proxy based on [mitmproxy](https://mitmproxy.org/).
Multiple clients are supported at a time, and UDP messages may be injected in either
direction. The proxy UI was inspired by the Message Log and Message Builder as present in
the [Alchemy](https://github.com/AlchemyViewer/Alchemy) viewer.
direction.
### Proxy Setup

View File

@@ -114,7 +114,7 @@ class BlueishObjectListGUIAddon(BaseAddon):
region.objects.request_missing_objects()
def handle_object_updated(self, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str]):
obj: Object, updated_props: Set[str], msg: Optional[Message]):
if self.blueish_model is None:
return

View File

@@ -6,7 +6,7 @@ from hippolyzer.lib.proxy.sessions import Session
def handle_lludp_message(session: Session, region: ProxiedRegion, message: Message):
# addon_ctx will persist across addon reloads, use for storing data that
# needs to survive across calls to this function
ctx = session.addon_ctx
ctx = session.addon_ctx[__name__]
if message.name == "ChatFromViewer":
chat = message["ChatData"]["Message"]
if chat == "COUNT":

View File

@@ -11,7 +11,7 @@ from outleap.scripts.inspector import LEAPInspectorGUI
from hippolyzer.lib.proxy.addon_utils import send_chat, BaseAddon, show_message
from hippolyzer.lib.proxy.commands import handle_command
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import Session
from hippolyzer.lib.proxy.sessions import Session, SessionManager
# Path found using `outleap-inspector`
@@ -19,6 +19,13 @@ FPS_PATH = outleap.UIPath("/main_view/menu_stack/status_bar_container/status/tim
class LEAPExampleAddon(BaseAddon):
async def handle_leap_client_added(self, session_manager: SessionManager, leap_client: outleap.LEAPClient):
# You can do things as soon as the LEAP client connects, like if you want to automate
# login or whatever.
viewer_control_api = outleap.LLViewerControlAPI(leap_client)
# Ask for a config value and print it in the viewer logs
print(await viewer_control_api.get("Global", "StatsPilotFile"))
@handle_command()
async def show_ui_inspector(self, session: Session, _region: ProxiedRegion):
"""Spawn a GUI for inspecting the UI state"""

View File

@@ -230,7 +230,7 @@ class MeshUploadInterceptingAddon(BaseAddon):
show_message("Mangled upload request")
def handle_object_updated(self, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str]):
obj: Object, updated_props: Set[str], msg: Optional[Message]):
if obj.LocalID not in self.local_mesh_target_locals:
return
if "Name" not in updated_props or obj.Name is None:

View File

@@ -10,6 +10,7 @@ before you start tracking can help too.
from typing import *
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.objects import Object
from hippolyzer.lib.base.templates import PCode
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message, SessionProperty
@@ -20,7 +21,7 @@ from hippolyzer.lib.proxy.sessions import Session
class ObjectUpdateBlameAddon(BaseAddon):
update_blame_counter: Counter[UUID] = SessionProperty(Counter)
track_update_blame: bool = SessionProperty(False)
should_track_update_blame: bool = SessionProperty(False)
@handle_command()
async def precache_objects(self, _session: Session, region: ProxiedRegion):
@@ -38,11 +39,11 @@ class ObjectUpdateBlameAddon(BaseAddon):
@handle_command()
async def track_update_blame(self, _session: Session, _region: ProxiedRegion):
self.track_update_blame = True
self.should_track_update_blame = True
@handle_command()
async def untrack_update_blame(self, _session: Session, _region: ProxiedRegion):
self.track_update_blame = False
self.should_track_update_blame = False
@handle_command()
async def clear_update_blame(self, _session: Session, _region: ProxiedRegion):
@@ -57,8 +58,8 @@ class ObjectUpdateBlameAddon(BaseAddon):
print(f"{obj_id} ({name!r}): {count}")
def handle_object_updated(self, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str]):
if not self.track_update_blame:
obj: Object, updated_props: Set[str], msg: Optional[Message]):
if not self.should_track_update_blame:
return
if region != session.main_region:
return

View File

@@ -0,0 +1,111 @@
"""
Control a puppetry-enabled viewer and make your neck spin like crazy
It currently requires a custom rebased Firestorm with puppetry applied on top,
and patches applied on top to make startup LEAP scripts be treated as puppetry modules.
Basically, you probably don't want to use this yet. But hey, Puppetry is still only
on the beta grid anyway.
"""
import asyncio
import enum
import logging
import math
from typing import *
import outleap
from hippolyzer.lib.base.datatypes import Quaternion
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
from hippolyzer.lib.proxy.sessions import Session
LOG = logging.getLogger(__name__)
class BodyPartMask(enum.IntFlag):
"""Which joints to send the viewer as part of "move" puppetry command"""
HEAD = 1 << 0
FACE = 1 << 1
LHAND = 1 << 2
RHAND = 1 << 3
FINGERS = 1 << 4
def register_puppetry_command(func: Callable[[dict], Awaitable[None]]):
"""Register a method as handling inbound puppetry commands from the viewer"""
func._puppetry_command = True
return func
class PuppetryExampleAddon(BaseAddon):
server_skeleton: Dict[str, Dict[str, Any]] = SessionProperty(dict)
camera_num: int = SessionProperty(0)
parts_active: BodyPartMask = SessionProperty(lambda: BodyPartMask(0x1F))
puppetry_api: Optional[outleap.LLPuppetryAPI] = SessionProperty(None)
leap_client: Optional[outleap.LEAPClient] = SessionProperty(None)
def handle_session_init(self, session: Session):
if not session.leap_client:
return
self.puppetry_api = outleap.LLPuppetryAPI(session.leap_client)
self.leap_client = session.leap_client
self._schedule_task(self._serve())
self._schedule_task(self._exorcist(session))
@register_puppetry_command
async def enable_parts(self, args: dict):
if (new_mask := args.get("parts_mask")) is not None:
self.parts_active = BodyPartMask(new_mask)
@register_puppetry_command
async def set_camera(self, args: dict):
if (camera_num := args.get("camera_num")) is not None:
self.camera_num = camera_num
@register_puppetry_command
async def stop(self, _args: dict):
LOG.info("Viewer asked us to stop puppetry")
@register_puppetry_command
async def log(self, _args: dict):
# Intentionally ignored, we don't care about things the viewer
# asked us to log
pass
@register_puppetry_command
async def set_skeleton(self, args: dict):
# Don't really care about what the viewer thinks the view of the skeleton is.
# Just log store it.
self.server_skeleton = args
async def _serve(self):
"""Handle inbound puppetry commands from viewer in a loop"""
async with self.leap_client.listen_scoped("puppetry.controller") as listener:
while True:
msg = await listener.get()
cmd = msg["command"]
handler = getattr(self, cmd, None)
if handler is None or not hasattr(handler, "_puppetry_command"):
LOG.warning(f"Unknown puppetry command {cmd!r}: {msg!r}")
continue
await handler(msg.get("args", {}))
async def _exorcist(self, session):
"""Do the Linda Blair thing with your neck"""
spin_rad = 0.0
while True:
await asyncio.sleep(0.05)
if not session.main_region:
continue
# Wrap spin_rad around if necessary
while spin_rad > math.pi:
spin_rad -= math.pi * 2
# LEAP wants rot as a quaternion with just the imaginary parts.
neck_rot = Quaternion.from_euler(0, 0, spin_rad).data(3)
self.puppetry_api.move({
"mNeck": {"no_constraint": True, "local_rot": neck_rot},
})
spin_rad += math.pi / 25
addons = [PuppetryExampleAddon()]

View File

@@ -13,7 +13,7 @@ def _to_spongecase(val):
def handle_lludp_message(session: Session, _region: ProxiedRegion, message: Message):
ctx = session.addon_ctx
ctx = session.addon_ctx[__name__]
ctx.setdefault("spongecase", False)
if message.name == "ChatFromViewer":
chat = message["ChatData"]["Message"]

View File

@@ -191,7 +191,7 @@
</size>
</property>
<property name="styleSheet">
<string notr="true">color: rgb(80, 0, 0)</string>
<string notr="true"/>
</property>
<property name="tabChangesFocus">
<bool>true</bool>

View File

@@ -16,7 +16,7 @@ from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.addon_utils import BaseAddon
from hippolyzer.lib.proxy.ca_utils import setup_ca
from hippolyzer.lib.proxy.commands import handle_command
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, create_proxy_master, HTTPFlowContext
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, HTTPFlowContext
from hippolyzer.lib.proxy.http_event_manager import MITMProxyEventManager
from hippolyzer.lib.proxy.lludp_proxy import SLSOCKS5Server
from hippolyzer.lib.base.message.message import Message
@@ -85,12 +85,12 @@ class REPLAddon(BaseAddon):
AddonManager.spawn_repl()
def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowContext):
def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowContext, ssl_insecure=False):
mitm_loop = asyncio.new_event_loop()
asyncio.set_event_loop(mitm_loop)
async def mitmproxy_loop():
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context, ssl_insecure=ssl_insecure)
gc.freeze()
await mitmproxy_master.run()
@@ -98,7 +98,7 @@ def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowCo
def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] = None,
extra_addon_paths: Optional[list] = None, proxy_host=None):
extra_addon_paths: Optional[list] = None, proxy_host=None, ssl_insecure=False):
extra_addons = extra_addons or []
extra_addon_paths = extra_addon_paths or []
extra_addons.append(SelectionManagerAddon())
@@ -123,17 +123,13 @@ def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] =
# TODO: argparse
if len(sys.argv) == 3:
if sys.argv[1] == "--setup-ca":
try:
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
except mitmproxy.exceptions.MitmproxyException:
# Proxy already running, create the master so we don't try to bind to a port
mitmproxy_master = create_proxy_master(proxy_host, http_proxy_port, flow_context)
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
setup_ca(sys.argv[2], mitmproxy_master)
return sys.exit(0)
http_proc = multiprocessing.Process(
target=run_http_proxy_process,
args=(proxy_host, http_proxy_port, flow_context),
args=(proxy_host, http_proxy_port, flow_context, ssl_insecure),
daemon=True,
)
http_proc.start()

View File

@@ -42,7 +42,7 @@ from hippolyzer.lib.base.network.transport import Direction, SocketUDPTransport
from hippolyzer.lib.proxy.addons import BaseInteractionManager, AddonManager
from hippolyzer.lib.proxy.ca_utils import setup_ca_everywhere
from hippolyzer.lib.proxy.caps_client import ProxyCapsClient
from hippolyzer.lib.proxy.http_proxy import create_proxy_master, HTTPFlowContext
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, HTTPFlowContext
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry, AbstractMessageLogEntry, WrappingMessageLogger, \
import_log_entries, export_log_entries
from hippolyzer.lib.proxy.region import ProxiedRegion
@@ -231,7 +231,8 @@ class MessageLogWindow(QtWidgets.QMainWindow):
"AvatarRenderInfo FirestormBridge ObjectAnimation ParcelDwellRequest ParcelAccessListRequest " \
"ParcelDwellReply ParcelAccessListReply AttachedSoundGainChange " \
"ParcelPropertiesRequest ParcelProperties GetObjectCost GetObjectPhysicsData ObjectImage " \
"ViewerAsset GetTexture SetAlwaysRun GetDisplayNames MapImageService MapItemReply".split(" ")
"ViewerAsset GetTexture SetAlwaysRun GetDisplayNames MapImageService MapItemReply " \
"AgentFOV".split(" ")
DEFAULT_FILTER = f"!({' || '.join(ignored for ignored in DEFAULT_IGNORE)})"
textRequest: QtWidgets.QTextEdit
@@ -274,9 +275,11 @@ class MessageLogWindow(QtWidgets.QMainWindow):
self.actionOpenMessageBuilder.triggered.connect(self._openMessageBuilder)
self.actionProxyRemotelyAccessible.setChecked(self.settings.REMOTELY_ACCESSIBLE)
self.actionProxySSLInsecure.setChecked(self.settings.SSL_INSECURE)
self.actionUseViewerObjectCache.setChecked(self.settings.USE_VIEWER_OBJECT_CACHE)
self.actionRequestMissingObjects.setChecked(self.settings.AUTOMATICALLY_REQUEST_MISSING_OBJECTS)
self.actionProxyRemotelyAccessible.triggered.connect(self._setProxyRemotelyAccessible)
self.actionProxySSLInsecure.triggered.connect(self._setProxySSLInsecure)
self.actionUseViewerObjectCache.triggered.connect(self._setUseViewerObjectCache)
self.actionRequestMissingObjects.triggered.connect(self._setRequestMissingObjects)
self.actionOpenNewMessageLogWindow.triggered.connect(self._openNewMessageLogWindow)
@@ -457,7 +460,7 @@ class MessageLogWindow(QtWidgets.QMainWindow):
if clicked_btn is not yes_btn:
return
master = create_proxy_master("127.0.0.1", -1, HTTPFlowContext())
master = create_http_proxy("127.0.0.1", -1, HTTPFlowContext())
dirs = setup_ca_everywhere(master)
msg = QtWidgets.QMessageBox()
@@ -473,6 +476,12 @@ class MessageLogWindow(QtWidgets.QMainWindow):
msg.setText("Remote accessibility setting changes will take effect on next run")
msg.exec()
def _setProxySSLInsecure(self, checked: bool):
self.sessionManager.settings.SSL_INSECURE = checked
msg = QtWidgets.QMessageBox()
msg.setText("SSL security setting changes will take effect on next run")
msg.exec()
def _setUseViewerObjectCache(self, checked: bool):
self.sessionManager.settings.USE_VIEWER_OBJECT_CACHE = checked
@@ -936,6 +945,7 @@ def gui_main():
session_manager=window.sessionManager,
extra_addon_paths=window.getAddonList(),
proxy_host=http_host,
ssl_insecure=settings.SSL_INSECURE,
)

View File

@@ -193,7 +193,7 @@
</size>
</property>
<property name="styleSheet">
<string notr="true">color: rgb(80, 0, 0)</string>
<string notr="true"/>
</property>
<property name="tabChangesFocus">
<bool>true</bool>
@@ -213,7 +213,7 @@
</widget>
<widget class="QPlainTextEdit" name="textResponse">
<property name="styleSheet">
<string notr="true">color: rgb(0, 0, 80)</string>
<string notr="true"/>
</property>
<property name="tabChangesFocus">
<bool>true</bool>
@@ -245,7 +245,7 @@
<x>0</x>
<y>0</y>
<width>700</width>
<height>22</height>
<height>29</height>
</rect>
</property>
<widget class="QMenu" name="menuFile">
@@ -268,6 +268,7 @@
<addaction name="actionProxyRemotelyAccessible"/>
<addaction name="actionUseViewerObjectCache"/>
<addaction name="actionRequestMissingObjects"/>
<addaction name="actionProxySSLInsecure"/>
</widget>
<addaction name="menuFile"/>
</widget>
@@ -342,6 +343,17 @@
<string>Export Log Entries</string>
</property>
</action>
<action name="actionProxySSLInsecure">
<property name="checkable">
<bool>true</bool>
</property>
<property name="text">
<string>Allow Insecure SSL Connections</string>
</property>
<property name="toolTip">
<string>Allow invalid SSL certificates from upstream connections</string>
</property>
</action>
</widget>
<resources/>
<connections/>

View File

@@ -42,12 +42,10 @@ class _IterableStub:
RAD_TO_DEG = 180 / math.pi
class TupleCoord(recordclass.datatuple, _IterableStub): # type: ignore
__options__ = {
"fast_new": False,
}
class TupleCoord(recordclass.RecordClass, _IterableStub):
def __init__(self, *args):
# Only to help typing, doesn't actually do anything.
# All the important stuff happens in `__new__()`
pass
@classmethod
@@ -367,7 +365,7 @@ def flags_to_pod(flag_cls: Type[enum.IntFlag], val: int) -> Tuple[Union[str, int
return tuple(flag.name for flag in iter(flag_cls) if val & flag.value) + extra
class TaggedUnion(recordclass.datatuple): # type: ignore
class TaggedUnion(recordclass.RecordClass):
tag: Any
value: Any

View File

@@ -176,7 +176,7 @@ class MessageTemplateNotFound(MessageSystemError):
self.template = template
def __str__(self):
return "No message template found, context: '%s'" % self.context
return "No message template found for %s, context: '%s'" % (self.template, self.context)
class MessageTemplateParsingError(MessageSystemError):

View File

@@ -385,9 +385,12 @@ class GLTFBuilder:
return buffer_view
def add_joints(self, skin: SkinSegmentDict) -> JOINT_CONTEXT_DICT:
joints: JOINT_CONTEXT_DICT = {}
# There may be some joints not present in the mesh that we need to add to reach the mPelvis root
required_joints = AVATAR_SKELETON.get_required_joints(skin['joint_names'])
required_joints = set()
for joint_name in skin['joint_names']:
joint_node = AVATAR_SKELETON[joint_name]
required_joints.add(joint_node)
required_joints.update(joint_node.ancestors)
# If this is present, it may override the joint positions from the skeleton definition
if 'alt_inverse_bind_matrix' in skin:
@@ -395,12 +398,12 @@ class GLTFBuilder:
else:
joint_overrides = {}
for joint_name in required_joints:
joint = AVATAR_SKELETON[joint_name]
built_joints: JOINT_CONTEXT_DICT = {}
for joint in required_joints:
joint_matrix = joint.matrix
# Do we have a joint position override that would affect joint_matrix?
override = joint_overrides.get(joint_name)
override = joint_overrides.get(joint.name)
if override:
decomp = list(transformations.decompose_matrix(joint_matrix))
# We specifically only want the translation from the override!
@@ -419,16 +422,16 @@ class GLTFBuilder:
# TODO: populate "extras" here with the metadata the Blender collada stuff uses to store
# "bind_mat" and "rest_mat" so we can go back to our original matrices when exporting
# from blender to .dae!
node = self.add_node(joint_name, transform=joint_matrix)
gltf_joint = self.add_node(joint.name, transform=joint_matrix)
# Store the node along with any fixups we may need to apply to the bind matrices later
joints[joint_name] = JointContext(node, orig_matrix, fixup_matrix)
built_joints[joint.name] = JointContext(gltf_joint, orig_matrix, fixup_matrix)
# Add each joint to the child list of their respective parent
for joint_name, joint_ctx in joints.items():
for joint_name, joint_ctx in built_joints.items():
if parent := AVATAR_SKELETON[joint_name].parent:
joints[parent().name].node.children.append(self.model.nodes.index(joint_ctx.node))
return joints
built_joints[parent().name].node.children.append(self.model.nodes.index(joint_ctx.node))
return built_joints
def _fix_blender_joint(self, joint_matrix: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""

View File

@@ -147,7 +147,7 @@ def get_resource_filename(resource_filename: str):
return pkg_resources.resource_filename("hippolyzer", resource_filename)
def to_chunks(chunkable: Sequence[_T], chunk_size: int) -> Generator[_T, None, None]:
def to_chunks(chunkable: Sequence[_T], chunk_size: int) -> Generator[Sequence[_T], None, None]:
while chunkable:
yield chunkable[:chunk_size]
chunkable = chunkable[chunk_size:]

View File

@@ -178,7 +178,7 @@ class DomainDict(TypedDict):
Min: List[float]
class VertexWeight(recordclass.datatuple): # type: ignore
class VertexWeight(recordclass.RecordClass):
"""Vertex weight for a specific joint on a specific vertex"""
# index of the joint within the joint_names list in the skin segment
joint_idx: int

View File

@@ -1,3 +1,5 @@
from __future__ import annotations
import dataclasses
import weakref
from typing import *
@@ -10,18 +12,23 @@ from hippolyzer.lib.base.helpers import get_resource_filename
MAYBE_JOINT_REF = Optional[Callable[[], "JointNode"]]
SKELETON_REF = Optional[Callable[[], "Skeleton"]]
@dataclasses.dataclass(unsafe_hash=True)
@dataclasses.dataclass
class JointNode:
name: str
parent: MAYBE_JOINT_REF
skeleton: SKELETON_REF
translation: Vector3
pivot: Vector3 # pivot point for the joint, generally the same as translation
rotation: Vector3 # Euler rotation in degrees
scale: Vector3
type: str # bone or collision_volume
def __hash__(self):
return hash((self.name, self.type))
@property
def matrix(self):
return transformations.compose_matrix(
@@ -30,61 +37,85 @@ class JointNode:
translate=tuple(self.translation),
)
@property
def index(self) -> int:
bone_idx = 0
for node in self.skeleton().joint_dict.values():
if node.type != "bone":
continue
if self is node:
return bone_idx
bone_idx += 1
raise KeyError(f"{self.name!r} doesn't exist in skeleton")
@property
def ancestors(self) -> Sequence[JointNode]:
joint_node = self
ancestors = []
while joint_node.parent:
joint_node = joint_node.parent()
ancestors.append(joint_node)
return ancestors
@property
def children(self) -> Sequence[JointNode]:
children = []
for node in self.skeleton().joint_dict.values():
if node.parent and node.parent() == self:
children.append(node)
return children
@property
def descendents(self) -> Set[JointNode]:
descendents = set()
ancestors = {self}
last_ancestors = set()
while last_ancestors != ancestors:
last_ancestors = ancestors
for node in self.skeleton().joint_dict.values():
if node.parent and node.parent() in ancestors:
ancestors.add(node)
descendents.add(node)
return descendents
@dataclasses.dataclass
class Skeleton:
joint_dict: Dict[str, JointNode]
def __init__(self, root_node: etree.ElementBase):
self.joint_dict: Dict[str, JointNode] = {}
self._parse_node_children(root_node, None)
def __getitem__(self, item: str) -> JointNode:
return self.joint_dict[item]
@classmethod
def _parse_node_children(cls, joint_dict: Dict[str, JointNode], node: etree.ElementBase, parent: MAYBE_JOINT_REF):
def _parse_node_children(self, node: etree.ElementBase, parent: MAYBE_JOINT_REF):
name = node.get('name')
joint = JointNode(
name=name,
parent=parent,
skeleton=weakref.ref(self),
translation=_get_vec_attr(node, "pos", Vector3()),
pivot=_get_vec_attr(node, "pivot", Vector3()),
rotation=_get_vec_attr(node, "rot", Vector3()),
scale=_get_vec_attr(node, "scale", Vector3(1, 1, 1)),
type=node.tag,
)
joint_dict[name] = joint
self.joint_dict[name] = joint
for child in node.iterchildren():
cls._parse_node_children(joint_dict, child, weakref.ref(joint))
@classmethod
def from_xml(cls, node: etree.ElementBase):
joint_dict = {}
cls._parse_node_children(joint_dict, node, None)
return cls(joint_dict)
def get_required_joints(self, joint_names: Collection[str]) -> Set[str]:
"""Get all joints required to have a chain from all joints up to the root joint"""
required = set(joint_names)
for joint_name in joint_names:
joint_node = self.joint_dict.get(joint_name)
while joint_node:
required.add(joint_node.name)
if not joint_node.parent:
break
joint_node = joint_node.parent()
return required
self._parse_node_children(child, weakref.ref(joint))
def load_avatar_skeleton() -> Skeleton:
skel_path = get_resource_filename("lib/base/data/avatar_skeleton.xml")
with open(skel_path, 'r') as f:
skel_root = etree.fromstring(f.read())
return Skeleton.from_xml(skel_root.getchildren()[0])
def _get_vec_attr(node, attr_name, default) -> Vector3:
def _get_vec_attr(node, attr_name: str, default: Vector3) -> Vector3:
attr_val = node.get(attr_name, None)
if not attr_val:
return default
return Vector3(*(float(x) for x in attr_val.split(" ") if x))
def load_avatar_skeleton() -> Skeleton:
skel_path = get_resource_filename("lib/base/data/avatar_skeleton.xml")
with open(skel_path, 'r') as f:
skel_root = etree.fromstring(f.read())
return Skeleton(skel_root.getchildren()[0])
AVATAR_SKELETON = load_avatar_skeleton()

View File

@@ -78,7 +78,7 @@ class TemplateDataPacker:
MsgType.MVT_S8: _make_struct_spec('b'),
MsgType.MVT_U8: _make_struct_spec('B'),
MsgType.MVT_BOOL: _make_struct_spec('B'),
MsgType.MVT_LLUUID: (lambda x: UUID(bytes=bytes(x)), lambda x: x.bytes),
MsgType.MVT_LLUUID: (lambda x: UUID(bytes=bytes(x)), lambda x: UUID(x).bytes),
MsgType.MVT_IP_ADDR: (socket.inet_ntoa, socket.inet_aton),
MsgType.MVT_IP_PORT: _make_struct_spec('!H'),
MsgType.MVT_U16: _make_struct_spec('<H'),

View File

@@ -222,7 +222,7 @@ class Message:
def add_blocks(self, block_list):
# can have a list of blocks if it is multiple or variable
for block in block_list:
if type(block) == list:
if type(block) is list:
for bl in block:
self.add_block(bl)
else:

View File

@@ -126,7 +126,7 @@ class UDPMessageDeserializer:
frequency, num = _parse_msg_num(reader)
current_template = self.template_dict.get_template_by_pair(frequency, num)
if current_template is None:
raise exc.MessageTemplateNotFound("deserializing data")
raise exc.MessageTemplateNotFound("deserializing data", f"{frequency}:{num}")
msg.name = current_template.name
# extra field, see note regarding msg.offset

View File

@@ -46,6 +46,9 @@ class UDPPacket:
return self.dst_addr
return self.src_addr
def __repr__(self):
return f"<{self.__class__.__name__} src_addr={self.src_addr!r} dst_addr={self.dst_addr!r} data={self.data!r}>"
class AbstractUDPTransport(abc.ABC):
__slots__ = ()

View File

@@ -35,12 +35,7 @@ import hippolyzer.lib.base.serialization as se
import hippolyzer.lib.base.templates as tmpls
class Object(recordclass.datatuple): # type: ignore
__options__ = {
"use_weakref": True,
}
__weakref__: Any
class Object(recordclass.RecordClass, use_weakref=True): # type: ignore
LocalID: Optional[int] = None
State: Optional[int] = None
FullID: Optional[UUID] = None
@@ -199,6 +194,28 @@ class Object(recordclass.datatuple): # type: ignore
del val["Parent"]
return val
@property
def Ancestors(self) -> List[Object]:
obj = self
ancestors = []
while obj.Parent:
obj = obj.Parent
ancestors.append(obj)
return ancestors
@property
def Descendents(self) -> List[Object]:
new_children = [self]
descendents = []
while new_children:
to_check = new_children[:]
new_children.clear()
for obj in to_check:
for child in obj.Children:
new_children.append(child)
descendents.append(child)
return descendents
def handle_to_gridxy(handle: int) -> Tuple[int, int]:
return (handle >> 32) // 256, (handle & 0xFFffFFff) // 256

View File

@@ -1917,7 +1917,7 @@ class IntEnumSubfieldSerializer(AdapterInstanceSubfieldSerializer):
val = super().deserialize(ctx_obj, val, pod=pod)
# Don't pretend we were able to deserialize this if we
# had to fall through to the `int` case.
if pod and type(val) == int:
if pod and type(val) is int:
return UNSERIALIZABLE
return val

View File

@@ -4,14 +4,17 @@ Serialization templates for structures used in LLUDP and HTTP bodies.
import abc
import collections
import copy
import dataclasses
import datetime
import enum
import math
import zlib
from typing import *
import hippolyzer.lib.base.serialization as se
from hippolyzer.lib.base import llsd
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3, Quaternion
from hippolyzer.lib.base.namevalue import NameValuesSerializer
@@ -1249,7 +1252,7 @@ class TextureEntryCollection:
vals = getattr(self, key)
# Fill give all faces the default value for this key
for te in as_dicts:
te[key] = vals[None]
te[key] = copy.copy(vals[None])
# Walk over the exception cases and replace the default value
for face_nums, val in vals.items():
# Default case already handled
@@ -1258,7 +1261,7 @@ class TextureEntryCollection:
for face_num in face_nums:
if face_num >= num_faces:
raise ValueError(f"Bad value for num_faces? {face_num} >= {num_faces}")
as_dicts[face_num][key] = val
as_dicts[face_num][key] = copy.copy(val)
return [TextureEntry(**x) for x in as_dicts]
@classmethod
@@ -1458,6 +1461,8 @@ class ExtraParamType(IntEnum):
RESERVED = 0x50
MESH = 0x60
EXTENDED_MESH = 0x70
RENDER_MATERIAL = 0x80
REFLECTION_PROBE = 0x90
class ExtendedMeshFlags(IntFlag):
@@ -1480,6 +1485,13 @@ class SculptTypeData:
Mirror: bool = se.bitfield_field(bits=1, adapter=se.BoolAdapter())
class ReflectionProbeFlags(IntFlag):
# use a box influence volume
BOX_VOLUME = 0x1
# render dynamic objects (avatars) into this Reflection Probe
DYNAMIC = 0x2
EXTRA_PARAM_TEMPLATES = {
ExtraParamType.FLEXIBLE: se.Template({
"Tension": se.BitField(se.U8, {"Tension": 6, "Softness1": 2}),
@@ -1511,6 +1523,15 @@ EXTRA_PARAM_TEMPLATES = {
ExtraParamType.EXTENDED_MESH: se.Template({
"Flags": se.IntFlag(ExtendedMeshFlags, se.U32),
}),
ExtraParamType.RENDER_MATERIAL: se.Collection(se.U8, se.Template({
"TEIdx": se.U8,
"TEID": se.UUID,
})),
ExtraParamType.REFLECTION_PROBE: se.Template({
"Ambiance": se.F32,
"ClipDistance": se.F32,
"Flags": se.IntFlag(ReflectionProbeFlags, se.U8),
}),
}
@@ -1856,6 +1877,8 @@ class AvatarPropertiesFlags(IntFlag):
@se.flag_field_serializer("AvatarGroupsReply", "GroupData", "GroupPowers")
@se.flag_field_serializer("AvatarGroupDataUpdate", "GroupData", "GroupPowers")
@se.flag_field_serializer("AvatarDataUpdate", "AgentDataData", "GroupPowers")
@se.flag_field_serializer("GroupProfileReply", "GroupData", "PowersMask")
@se.flag_field_serializer("GroupRoleDataReply", "RoleData", "Powers")
class GroupPowerFlags(IntFlag):
MEMBER_INVITE = 1 << 1 # Invite member
MEMBER_EJECT = 1 << 2 # Eject member from group
@@ -1945,6 +1968,15 @@ class GroupPowerFlags(IntFlag):
GROUP_BAN_ACCESS = 1 << 51 # Allows access to ban / un-ban agents from a group.
@se.flag_field_serializer("GrantUserRights", "Rights", "RelatedRights")
@se.flag_field_serializer("ChangeUserRights", "Rights", "RelatedRights")
class UserRelatedRights(IntFlag):
"""See lluserrelations.h for definitions"""
ONLINE_STATUS = 1
MAP_LOCATION = 1 << 1
MODIFY_OBJECTS = 1 << 2
@se.flag_field_serializer("RequestObjectPropertiesFamily", "ObjectData", "RequestFlags")
@se.flag_field_serializer("ObjectPropertiesFamily", "ObjectData", "RequestFlags")
class ObjectPropertiesFamilyRequestFlags(IntFlag):
@@ -2025,6 +2057,50 @@ class ScriptPermissions(IntFlag):
CHANGE_ENVIRONMENT = 1 << 18
@se.enum_field_serializer("UpdateMuteListEntry", "MuteData", "MuteType")
class MuteType(IntEnum):
BY_NAME = 0
AGENT = 1
OBJECT = 2
GROUP = 3
# Voice, presumably.
EXTERNAL = 4
@se.flag_field_serializer("UpdateMuteListEntry", "MuteData", "MuteFlags")
class MuteFlags(IntFlag):
# For backwards compatibility (since any mute list entries that were created before the flags existed
# will have a flags field of 0), some flags are "inverted".
# Note that it's possible, through flags, to completely disable an entry in the mute list.
# The code should detect this case and remove the mute list entry instead.
TEXT_CHAT = 1 << 0
VOICE_CHAT = 1 << 1
PARTICLES = 1 << 2
OBJECT_SOUNDS = 1 << 3
@property
def DEFAULT(self):
return 0x0
@property
def ALL(self):
return 0xF
class CreationDateAdapter(se.Adapter):
def decode(self, val: Any, ctx: Optional[se.ParseContext], pod: bool = False) -> Any:
return datetime.datetime.fromtimestamp(val / 1_000_000).isoformat()
def encode(self, val: Any, ctx: Optional[se.ParseContext]) -> Any:
return int(datetime.datetime.fromisoformat(val).timestamp() * 1_000_000)
@se.subfield_serializer("ObjectProperties", "ObjectData", "CreationDate")
class CreationDateSerializer(se.AdapterSubfieldSerializer):
ADAPTER = CreationDateAdapter(None)
ORIG_INLINE = True
@se.http_serializer("RenderMaterials")
class RenderMaterialsSerializer(se.BaseHTTPSerializer):
@classmethod
@@ -2055,3 +2131,69 @@ class RetrieveNavMeshSrcSerializer(se.BaseHTTPSerializer):
# 15 bit window size, gzip wrapped
deser["navmesh_data"] = zlib.decompress(deser["navmesh_data"], wbits=15 | 32)
return deser
# Beta puppetry stuff, subject to change!
class PuppetryEventMask(enum.IntFlag):
POSITION = 1 << 0
POSITION_IN_PARENT_FRAME = 1 << 1
ROTATION = 1 << 2
ROTATION_IN_PARENT_FRAME = 1 << 3
SCALE = 1 << 4
DISABLE_CONSTRAINT = 1 << 7
class PuppetryOption(se.OptionalFlagged):
def __init__(self, flag_val, spec):
super().__init__("mask", se.IntFlag(PuppetryEventMask, se.U8), flag_val, spec)
# Range to use for puppetry's quantized floats when converting to<->from U16
LL_PELVIS_OFFSET_RANGE = (-5.0, 5.0)
@dataclasses.dataclass
class PuppetryJointData:
# Where does this number come from? `avatar_skeleton.xml`?
joint_id: int = se.dataclass_field(se.S16)
# Determines which fields will follow
mask: PuppetryEventMask = se.dataclass_field(se.IntFlag(PuppetryEventMask, se.U8))
rotation: Optional[Quaternion] = se.dataclass_field(
# These are very odd scales for a quantized quaternion, but that's what they are.
PuppetryOption(PuppetryEventMask.ROTATION, se.PackedQuat(se.Vector3U16(*LL_PELVIS_OFFSET_RANGE))),
)
position: Optional[Vector3] = se.dataclass_field(
PuppetryOption(PuppetryEventMask.POSITION, se.Vector3U16(*LL_PELVIS_OFFSET_RANGE)),
)
scale: Optional[Vector3] = se.dataclass_field(
PuppetryOption(PuppetryEventMask.SCALE, se.Vector3U16(*LL_PELVIS_OFFSET_RANGE)),
)
@dataclasses.dataclass
class PuppetryEventData:
time: int = se.dataclass_field(se.S32)
# Must be set manually due to below issue
num_joints: int = se.dataclass_field(se.U16)
# This field is packed in the least helpful way possible. The length field
# is in between the collection count and the collection data, but the length
# field essentially only tells you how many bytes until the end of the buffer
# proper, which you already know from msgsystem. Why is this here?
joints: List[PuppetryJointData] = se.dataclass_field(se.TypedByteArray(
se.U32,
# Just treat contents as a greedy collection, tries to keep reading until EOF
se.Collection(None, se.Dataclass(PuppetryJointData)),
))
@se.subfield_serializer("AgentAnimation", "PhysicalAvatarEventList", "TypeData")
@se.subfield_serializer("AvatarAnimation", "PhysicalAvatarEventList", "TypeData")
class PuppetryEventDataSerializer(se.SimpleSubfieldSerializer):
# You can have multiple joint events packed in right after the other, implicitly.
# They may _or may not_ be split into separate PhysicalAvatarEventList blocks?
# This doesn't seem to be handled specifically in the decoder, is this a
# serialization bug in the viewer?
TEMPLATE = se.Collection(None, se.Dataclass(PuppetryEventData))
EMPTY_IS_NONE = True

View File

@@ -297,7 +297,8 @@ class ClientWorldObjectManager:
self._rebuild_avatar_objects()
self._region_managers.clear()
def _update_existing_object(self, obj: Object, new_properties: dict, update_type: ObjectUpdateType):
def _update_existing_object(self, obj: Object, new_properties: dict, update_type: ObjectUpdateType,
msg: Optional[Message]):
old_parent_id = obj.ParentID
new_parent_id = new_properties.get("ParentID", obj.ParentID)
old_local_id = obj.LocalID
@@ -340,23 +341,23 @@ class ClientWorldObjectManager:
LOG.warning(f"Tried to move object {obj!r} to unknown region {new_region_handle}")
if obj.PCode == PCode.AVATAR:
# `Avatar` instances are handled separately. Update all Avatar objects so
# we can deal with the RegionHandle change.
# `Avatar` instances are handled separately. Update all Avatar objects,
# so we can deal with the RegionHandle change.
self._rebuild_avatar_objects()
elif new_parent_id != old_parent_id:
# Parent ID changed, but we're in the same region
new_region_state.handle_object_reparented(obj, old_parent_id=old_parent_id)
if actually_updated_props and new_region_state is not None:
self._run_object_update_hooks(obj, actually_updated_props, update_type)
self._run_object_update_hooks(obj, actually_updated_props, update_type, msg)
def _track_new_object(self, region: RegionObjectsState, obj: Object):
def _track_new_object(self, region: RegionObjectsState, obj: Object, msg: Message):
region.track_object(obj)
self._fullid_lookup[obj.FullID] = obj
if obj.PCode == PCode.AVATAR:
self._avatar_objects[obj.FullID] = obj
self._rebuild_avatar_objects()
self._run_object_update_hooks(obj, set(obj.to_dict().keys()), ObjectUpdateType.OBJECT_UPDATE)
self._run_object_update_hooks(obj, set(obj.to_dict().keys()), ObjectUpdateType.OBJECT_UPDATE, msg)
def _kill_object_by_local_id(self, region_state: RegionObjectsState, local_id: int):
obj = region_state.lookup_localid(local_id)
@@ -408,11 +409,11 @@ class ClientWorldObjectManager:
# our view of the world then we want to move it to this region.
obj = self.lookup_fullid(object_data["FullID"])
if obj:
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE)
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE, msg)
else:
if region_state is None:
continue
self._track_new_object(region_state, Object(**object_data))
self._track_new_object(region_state, Object(**object_data), msg)
msg.meta["ObjectUpdateIDs"] = tuple(seen_locals)
def _handle_terse_object_update(self, msg: Message):
@@ -432,7 +433,7 @@ class ClientWorldObjectManager:
# Need the Object as context because decoding state requires PCode.
state_deserializer = ObjectStateSerializer.deserialize
object_data["State"] = state_deserializer(ctx_obj=obj, val=object_data["State"])
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE)
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE, msg)
else:
if region_state:
region_state.missing_locals.add(object_data["LocalID"])
@@ -460,7 +461,7 @@ class ClientWorldObjectManager:
self._update_existing_object(obj, {
"UpdateFlags": update_flags,
"RegionHandle": handle,
}, ObjectUpdateType.OBJECT_UPDATE)
}, ObjectUpdateType.OBJECT_UPDATE, msg)
continue
cached_obj_data = self._lookup_cache_entry(handle, block["ID"], block["CRC"])
@@ -468,7 +469,7 @@ class ClientWorldObjectManager:
cached_obj = normalize_object_update_compressed_data(cached_obj_data)
cached_obj["UpdateFlags"] = update_flags
cached_obj["RegionHandle"] = handle
self._track_new_object(region_state, Object(**cached_obj))
self._track_new_object(region_state, Object(**cached_obj), msg)
continue
# Don't know about it and wasn't cached.
@@ -499,11 +500,11 @@ class ClientWorldObjectManager:
LOG.warning(f"Got ObjectUpdateCompressed for unknown region {handle}: {object_data!r}")
obj = self.lookup_fullid(object_data["FullID"])
if obj:
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE)
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE, msg)
else:
if region_state is None:
continue
self._track_new_object(region_state, Object(**object_data))
self._track_new_object(region_state, Object(**object_data), msg)
msg.meta["ObjectUpdateIDs"] = tuple(seen_locals)
def _handle_object_properties_generic(self, packet: Message):
@@ -516,7 +517,7 @@ class ClientWorldObjectManager:
obj = self.lookup_fullid(block["ObjectID"])
if obj:
seen_locals.append(obj.LocalID)
self._update_existing_object(obj, object_properties, ObjectUpdateType.PROPERTIES)
self._update_existing_object(obj, object_properties, ObjectUpdateType.PROPERTIES, packet)
else:
LOG.debug(f"Received {packet.name} for unknown {block['ObjectID']}")
packet.meta["ObjectUpdateIDs"] = tuple(seen_locals)
@@ -563,11 +564,16 @@ class ClientWorldObjectManager:
LOG.debug(f"Received ObjectCost for unknown {object_id}")
continue
obj.ObjectCosts.update(object_costs)
self._run_object_update_hooks(obj, {"ObjectCosts"}, ObjectUpdateType.COSTS)
self._run_object_update_hooks(obj, {"ObjectCosts"}, ObjectUpdateType.COSTS, None)
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: ObjectUpdateType):
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: ObjectUpdateType,
msg: Optional[Message]):
region_state = self._get_region_state(obj.RegionHandle)
region_state.resolve_futures(obj, update_type)
if region_state:
region_state.resolve_futures(obj, update_type)
else:
LOG.warning(f"{obj} not tied to a region state")
if obj.PCode == PCode.AVATAR and "NameValue" in updated_props:
if obj.NameValue:
self.name_cache.update(obj.FullID, obj.NameValue.to_dict())
@@ -756,7 +762,8 @@ class RegionObjectsState:
def handle_object_reparented(self, obj: Object, old_parent_id: int):
"""Recreate any links to ancestor Objects for obj due to parent changes"""
self._unparent_object(obj, old_parent_id)
self._parent_object(obj, insert_at_head=True)
# Avatars get sent to the _end_ of the child list when reparented
self._parent_object(obj, insert_at_head=obj.PCode != PCode.AVATAR)
def collect_orphans(self, parent_localid: int) -> Sequence[int]:
"""Take ownership of any orphan IDs belonging to parent_localid"""

View File

@@ -1,7 +1,6 @@
from __future__ import annotations
from typing import *
import abc
import copy
import dataclasses
@@ -9,6 +8,8 @@ import multiprocessing
import pickle
import warnings
import outleap
from hippolyzer.lib.base.datatypes import UUID, Vector3
from hippolyzer.lib.base.message.message import Block, Message
from hippolyzer.lib.base.objects import Object
@@ -116,16 +117,14 @@ class MetaBaseAddon(abc.ABCMeta):
Won't work as you expect!
"""
def __setattr__(self, key: str, value):
# TODO: Keep track of AddonProperties in __new__ or something?
try:
existing = object.__getattribute__(self, key)
if existing and isinstance(existing, BaseAddonProperty):
existing.__set__(self, value)
return
except AttributeError:
# If the attribute doesn't exist then it's fine to use the base setattr.
super().__setattr__(key, value)
return
if existing and isinstance(existing, BaseAddonProperty):
existing.__set__(self, value)
return
pass
super().__setattr__(key, value)
@@ -173,7 +172,7 @@ class BaseAddon(metaclass=MetaBaseAddon):
pass
def handle_object_updated(self, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str]):
obj: Object, updated_props: Set[str], msg: Optional[Message]):
pass
def handle_object_killed(self, session: Session, region: ProxiedRegion, obj: Object):
@@ -196,6 +195,9 @@ class BaseAddon(metaclass=MetaBaseAddon):
session: Optional[Session], region: Optional[ProxiedRegion]):
pass
async def handle_leap_client_added(self, session_manager: SessionManager, leap_client: outleap.LEAPClient):
pass
_T = TypeVar("_T")
_U = TypeVar("_U", "Session", "SessionManager")
@@ -209,13 +211,17 @@ class BaseAddonProperty(abc.ABC, Generic[_T, _U]):
session_manager.addon_ctx dict, without any namespacing. Can be accessed either
through `AddonClass.property_name` or `addon_instance.property_name`.
"""
__slots__ = ("name", "default")
__slots__ = ("name", "default", "_owner")
def __init__(self, default=dataclasses.MISSING):
self.default = default
self._owner = None
def __set_name__(self, owner, name: str):
self.name = name
# Keep track of which addon "owns" this property so that we can shove
# the data in a bucket specific to that addon name.
self._owner = owner
def _make_default(self) -> _T:
if self.default is not dataclasses.MISSING:
@@ -233,18 +239,20 @@ class BaseAddonProperty(abc.ABC, Generic[_T, _U]):
if ctx_obj is None:
raise AttributeError(
f"{self.__class__} {self.name} accessed outside proper context")
addon_state = ctx_obj.addon_ctx[self._owner.__name__]
# Set a default if we have one, otherwise let the keyerror happen.
# Maybe we should do this at addon initialization instead of on get.
if self.name not in ctx_obj.addon_ctx:
if self.name not in addon_state:
default = self._make_default()
if default is not dataclasses.MISSING:
ctx_obj.addon_ctx[self.name] = default
addon_state[self.name] = default
else:
raise AttributeError(f"{self.name} is not set")
return ctx_obj.addon_ctx[self.name]
return addon_state[self.name]
def __set__(self, _obj, value: _T) -> None:
self._get_context_obj().addon_ctx[self.name] = value
addon_state = self._get_context_obj().addon_ctx[self._owner.__name__]
addon_state[self.name] = value
class SessionProperty(BaseAddonProperty[_T, "Session"]):

View File

@@ -15,6 +15,8 @@ import time
from types import ModuleType
from typing import *
import outleap
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.helpers import get_mtime
from hippolyzer.lib.base.message.message import Message
@@ -172,7 +174,10 @@ class AddonManager:
def load_addon_from_path(cls, path, reload=False, raise_exceptions=True):
path = pathlib.Path(path).absolute()
mod_name = "hippolyzer.user_addon_%s" % path.stem
cls.BASE_ADDON_SPECS.append(importlib.util.spec_from_file_location(mod_name, path))
spec = importlib.util.spec_from_file_location(mod_name, path)
if not spec:
raise ValueError(f"Unable to load {path}")
cls.BASE_ADDON_SPECS.append(spec)
addon_dir = os.path.realpath(pathlib.Path(path).parent.absolute())
if addon_dir not in sys.path:
@@ -343,11 +348,11 @@ class AddonManager:
cls.SCHEDULER.kill_matching_tasks(lifetime_mask=TaskLifeScope.ADDON, creator=addon)
@classmethod
def _call_all_addon_hooks(cls, hook_name, *args, **kwargs):
def _call_all_addon_hooks(cls, hook_name, *args, call_async=False, **kwargs):
for module in cls.FRESH_ADDON_MODULES.values():
if not module:
continue
ret = cls._call_module_hooks(module, hook_name, *args, **kwargs)
ret = cls._call_module_hooks(module, hook_name, *args, call_async=call_async, **kwargs)
if ret:
return ret
@@ -378,15 +383,15 @@ class AddonManager:
return commands
@classmethod
def _call_module_hooks(cls, module, hook_name, *args, **kwargs):
def _call_module_hooks(cls, module, hook_name, *args, call_async=False, **kwargs):
for addon in cls._get_module_addons(module):
ret = cls._try_call_hook(addon, hook_name, *args, **kwargs)
ret = cls._try_call_hook(addon, hook_name, *args, call_async=call_async, **kwargs)
if ret:
return ret
return cls._try_call_hook(module, hook_name, *args, **kwargs)
return cls._try_call_hook(module, hook_name, *args, call_async=call_async, **kwargs)
@classmethod
def _try_call_hook(cls, addon, hook_name, *args, **kwargs):
def _try_call_hook(cls, addon, hook_name, *args, call_async=False, **kwargs):
if cls._SUBPROCESS:
return
@@ -396,6 +401,20 @@ class AddonManager:
if not hook_func:
return
try:
if call_async:
old_hook_func = hook_func
# Wrapper so we can invoke an async hook synchronously.
def _wrapper(*w_args, **w_kwargs):
cls.SCHEDULER.schedule_task(
old_hook_func(*w_args, **w_kwargs),
scope=TaskLifeScope.ADDON,
creator=addon,
)
# Fall through to any other handlers as well,
# async handlers don't chain.
return None
hook_func = _wrapper
return hook_func(*args, **kwargs)
except:
logging.exception("Exploded in %r's %s hook" % (addon, hook_name))
@@ -545,9 +564,9 @@ class AddonManager:
@classmethod
def handle_object_updated(cls, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str]):
obj: Object, updated_props: Set[str], msg: Optional[Message]):
with addon_ctx.push(session, region):
return cls._call_all_addon_hooks("handle_object_updated", session, region, obj, updated_props)
return cls._call_all_addon_hooks("handle_object_updated", session, region, obj, updated_props, msg)
@classmethod
def handle_object_killed(cls, session: Session, region: ProxiedRegion, obj: Object):
@@ -577,3 +596,7 @@ class AddonManager:
with addon_ctx.push(session, region):
return cls._call_all_addon_hooks("handle_proxied_packet", session_manager,
packet, session, region)
@classmethod
def handle_leap_client_added(cls, session_manager: SessionManager, leap_client: outleap.LEAPClient):
return cls._call_all_addon_hooks("handle_leap_client_added", session_manager, leap_client, call_async=True)

View File

@@ -224,6 +224,11 @@ class MITMProxyEventManager:
status = flow.response.status_code
cap_data: Optional[CapData] = flow.metadata["cap_data"]
if not cap_data:
# Make sure there's always cap data attached to the flow, even if it's
# empty. Some consumers expect it to always be there, when it might not
# be if the proxy barfed while handling the request.
cap_data = flow.metadata["cap_data"] = CapData()
if status == 200 and cap_data and cap_data.cap_name == "FirestormBridge":
# Fake FirestormBridge cap based on a bridge-like response coming from

View File

@@ -236,7 +236,7 @@ class SLMITMMaster(mitmproxy.master.Master):
)
def create_proxy_master(host, port, flow_context: HTTPFlowContext): # pragma: no cover
def create_http_proxy(host, port, flow_context: HTTPFlowContext, ssl_insecure=False): # pragma: no cover
opts = mitmproxy.options.Options()
master = SLMITMMaster(flow_context, opts)
@@ -251,10 +251,6 @@ def create_proxy_master(host, port, flow_context: HTTPFlowContext): # pragma: n
ssl_verify_upstream_trusted_ca=ca_bundle,
listen_host=host,
listen_port=port,
ssl_insecure=ssl_insecure,
)
return master
def create_http_proxy(bind_host, port, flow_context: HTTPFlowContext): # pragma: no cover
master = create_proxy_master(bind_host, port, flow_context)
return master

View File

@@ -1,4 +1,5 @@
import datetime as dt
import logging
from hippolyzer.lib.base.helpers import get_mtime
from hippolyzer.lib.client.inventory_manager import InventoryManager
@@ -25,4 +26,7 @@ class ProxyInventoryManager(InventoryManager):
newest_cache = inv_cache_path
if newest_cache:
self.load_cache(newest_cache)
try:
self.load_cache(newest_cache)
except:
logging.exception("Failed to load invcache")

View File

@@ -133,8 +133,9 @@ class ProxyWorldObjectManager(ClientWorldObjectManager):
region_mgr.queued_cache_misses |= missing_locals
region_mgr.request_missed_cached_objects_soon()
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: ObjectUpdateType):
super()._run_object_update_hooks(obj, updated_props, update_type)
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: ObjectUpdateType,
msg: Optional[Message]):
super()._run_object_update_hooks(obj, updated_props, update_type, msg)
region = self._session.region_by_handle(obj.RegionHandle)
if self._settings.ALLOW_AUTO_REQUEST_OBJECTS:
if obj.PCode == PCode.AVATAR and "ParentID" in updated_props:
@@ -145,7 +146,7 @@ class ProxyWorldObjectManager(ClientWorldObjectManager):
# have no way to get a sitting agent's true region location, even if it's ourselves.
region.objects.queued_cache_misses.add(obj.ParentID)
region.objects.request_missed_cached_objects_soon()
AddonManager.handle_object_updated(self._session, region, obj, updated_props)
AddonManager.handle_object_updated(self._session, region, obj, updated_props, msg)
def _run_kill_object_hooks(self, obj: Object):
super()._run_kill_object_hooks(obj)

View File

@@ -1,5 +1,6 @@
from __future__ import annotations
import collections
import dataclasses
import datetime
import functools
@@ -43,7 +44,8 @@ class Session(BaseClientSession):
self.circuit_code = circuit_code
self.global_caps = {}
# Bag of arbitrary data addons can use to persist data across addon reloads
self.addon_ctx = {}
# Each addon name gets its own separate dict within this dict
self.addon_ctx: Dict[str, Dict[str, Any]] = collections.defaultdict(dict)
self.session_manager: SessionManager = session_manager or None
self.selected: SelectionModel = SelectionModel()
self.regions: List[ProxiedRegion] = []
@@ -188,7 +190,7 @@ class SessionManager:
self.flow_context = HTTPFlowContext()
self.asset_repo = HTTPAssetRepo()
self.message_logger: Optional[BaseMessageLogger] = None
self.addon_ctx: Dict[str, Any] = {}
self.addon_ctx: Dict[str, Dict[str, Any]] = collections.defaultdict(dict)
self.name_cache = ProxyNameCache()
self.pending_leap_clients: List[LEAPClient] = []
@@ -199,6 +201,15 @@ class SessionManager:
session.http_message_handler,
)
self.sessions.append(session)
# TODO: less crap way of tying a LEAP client to a session
while self.pending_leap_clients:
leap_client = self.pending_leap_clients.pop(-1)
# Client may have gone bad since it connected
if not leap_client.connected:
continue
logging.info("Assigned LEAP client to session")
session.leap_client = leap_client
break
logging.info("Created %r" % session)
return session
@@ -207,15 +218,6 @@ class SessionManager:
if session.pending and session.id == session_id:
logging.info("Claimed %r" % session)
session.pending = False
# TODO: less crap way of tying a LEAP client to a session
while self.pending_leap_clients:
leap_client = self.pending_leap_clients.pop(-1)
# Client may have gone bad since it connected
if not leap_client.connected:
continue
logging.info("Assigned LEAP client to session")
session.leap_client = leap_client
break
return session
return None
@@ -235,6 +237,7 @@ class SessionManager:
async def leap_client_connected(self, leap_client: LEAPClient):
self.pending_leap_clients.append(leap_client)
AddonManager.handle_leap_client_added(self, leap_client)
@dataclasses.dataclass

View File

@@ -35,3 +35,4 @@ class ProxySettings(Settings):
AUTOMATICALLY_REQUEST_MISSING_OBJECTS: bool = SettingDescriptor(False)
ADDON_SCRIPTS: List[str] = SettingDescriptor(list)
FILTERS: Dict[str, str] = SettingDescriptor(dict)
SSL_INSECURE: bool = SettingDescriptor(False)

View File

@@ -83,7 +83,7 @@ class SOCKS5Server:
try:
# UDP Associate
if cmd == 3:
loop = asyncio.get_running_loop()
loop = asyncio.get_event_loop_policy().get_event_loop()
transport, protocol = await loop.create_datagram_endpoint(
self._udp_protocol_creator(writer.get_extra_info("peername")),
local_addr=('0.0.0.0', 0))

View File

@@ -65,7 +65,7 @@ class TaskScheduler:
task.cancel()
try:
event_loop = asyncio.get_running_loop()
event_loop = asyncio.get_event_loop_policy().get_event_loop()
await_all = asyncio.gather(*(task for task_data, task in self.tasks))
event_loop.run_until_complete(await_all)
except RuntimeError:

View File

@@ -139,7 +139,7 @@ class ViewerObjectCache:
return RegionViewerObjectCache.from_file(objects_file)
class ViewerObjectCacheEntry(recordclass.datatuple): # type: ignore
class ViewerObjectCacheEntry(recordclass.dataobject): # type: ignore
local_id: int
crc: int
data: bytes

View File

@@ -1,4 +1,4 @@
aiohttp==3.8.1
aiohttp==3.8.3
aiosignal==1.2.0
appdirs==1.4.4
Arpeggio==1.10.2
@@ -7,14 +7,14 @@ async-timeout==4.0.1
attrs==21.2.0
blinker==1.4
Brotli==1.0.9
certifi==2021.10.8
certifi==2022.12.7
cffi==1.15.0
charset-normalizer==2.0.9
click==8.0.3
cryptography==36.0.2
defusedxml==0.7.1
Flask==2.0.2
frozenlist==1.2.0
frozenlist==1.3.3
gltflib==1.0.13
Glymur==0.9.6
h11==0.12.0
@@ -29,12 +29,12 @@ kaitaistruct==0.9
lazy-object-proxy==1.6.0
ldap3==2.9.1
llsd~=1.0.0
lxml==4.6.4
lxml==4.9.2
MarkupSafe==2.0.1
mitmproxy==8.0.0
msgpack==1.0.3
multidict==5.2.0
numpy==1.21.4
numpy==1.24.2
outleap~=0.4.1
parso==0.8.3
passlib==1.7.4
@@ -49,13 +49,13 @@ Pygments==2.10.0
pyOpenSSL==22.0.0
pyparsing==2.4.7
pyperclip==1.8.2
PySide6==6.2.2
PySide6-Essentials==6.4.2
qasync==0.22.0
recordclass==0.14.3
recordclass==0.18.2
requests==2.26.0
ruamel.yaml==0.17.16
ruamel.yaml.clib==0.2.6
shiboken6==6.2.2
ruamel.yaml==0.17.21
ruamel.yaml.clib==0.2.7
shiboken6==6.4.2
six==1.16.0
sortedcontainers==2.4.0
tornado==6.1
@@ -66,5 +66,5 @@ urwid==2.1.2
wcwidth==0.2.5
Werkzeug==2.0.2
wsproto==1.0.0
yarl==1.7.2
zstandard==0.15.2
yarl==1.8.2
zstandard<0.18.0

View File

@@ -25,7 +25,7 @@ from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
version = '0.12.2'
version = '0.13.4'
with open(path.join(here, 'README.md')) as readme_fh:
readme = readme_fh.read()
@@ -45,6 +45,7 @@ setup(
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: System :: Networking :: Monitoring",
"Topic :: Software Development :: Libraries :: Python Modules",
@@ -85,20 +86,22 @@ setup(
'outleap<1.0',
'defusedxml',
'aiohttp<4.0.0',
'recordclass<0.15',
# Newer recordclasses break!
'recordclass>0.15,<0.18.3',
'lazy-object-proxy',
'arpeggio',
# requests breaks with newer idna
'idna<3,>=2.5',
# 7.x will be a major change.
'mitmproxy>=8.0.0,<8.1',
'Werkzeug<3.0',
# For REPLs
'ptpython<4.0',
# JP2 codec
'Glymur<0.9.7',
'numpy<2.0',
# These could be in extras_require if you don't want a GUI.
'pyside6',
'pyside6-essentials',
'qasync',
# Needed for mesh format conversion tooling
'pycollada',

View File

@@ -3,6 +3,7 @@ import setuptools # noqa
import os
import shutil
from distutils.core import Command
from importlib.metadata import version
from pathlib import Path
from cx_Freeze import setup, Executable
@@ -113,7 +114,7 @@ executables = [
setup(
name="hippolyzer_gui",
version="0.12.2",
version=version("hippolyzer"),
description="Hippolyzer GUI",
options=options,
executables=executables,

View File

@@ -89,7 +89,7 @@ class _MutableMultiDictTests:
d = create_instance()
s = pickle.dumps(d, protocol)
ud = pickle.loads(s)
assert type(ud) == type(d)
assert type(ud) is type(d)
assert ud == d
alternative = pickle.dumps(create_instance("werkzeug"), protocol)
assert pickle.loads(alternative) == d

View File

@@ -0,0 +1,32 @@
import unittest
import numpy as np
from hippolyzer.lib.base.mesh_skeleton import load_avatar_skeleton
class TestSkeleton(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.skeleton = load_avatar_skeleton()
def test_get_joint(self):
node = self.skeleton["mNeck"]
self.assertEqual("mNeck", node.name)
self.assertEqual(self.skeleton, node.skeleton())
def test_get_joint_index(self):
self.assertEqual(7, self.skeleton["mNeck"].index)
self.assertEqual(113, self.skeleton["mKneeLeft"].index)
def test_get_joint_parent(self):
self.assertEqual("mChest", self.skeleton["mNeck"].parent().name)
def test_get_joint_matrix(self):
expected_mat = np.array([
[1., 0., 0., -0.01],
[0., 1., 0., 0.],
[0., 0., 1., 0.251],
[0., 0., 0., 1.]
])
np.testing.assert_equal(expected_mat, self.skeleton["mNeck"].matrix)

View File

@@ -88,12 +88,12 @@ class AddonIntegrationTests(BaseProxyTest):
self._setup_default_circuit()
self._fake_command("foobar baz")
await self._wait_drained()
self.assertEqual(self.session.addon_ctx["bazquux"], "baz")
self.assertEqual(self.session.addon_ctx["MockAddon"]["bazquux"], "baz")
# In session context these should be equivalent
with addon_ctx.push(new_session=self.session):
self.assertEqual(self.session.addon_ctx["bazquux"], self.addon.bazquux)
self.assertEqual(self.session.addon_ctx["another"], "baz")
self.assertEqual(self.session.addon_ctx["MockAddon"]["bazquux"], self.addon.bazquux)
self.assertEqual(self.session.addon_ctx["MockAddon"]["another"], "baz")
# Outside session context it should raise
with self.assertRaises(AttributeError):
@@ -104,7 +104,7 @@ class AddonIntegrationTests(BaseProxyTest):
self.session.addon_ctx.clear()
with addon_ctx.push(new_session=self.session):
# This has no default so should fail
# This has no default so it should fail
with self.assertRaises(AttributeError):
_something = self.addon.bazquux
# This has a default
@@ -144,9 +144,9 @@ class AddonIntegrationTests(BaseProxyTest):
AddonManager.load_addon_from_path(str(self.parent_path), reload=True)
# Wait for the init hooks to run
await asyncio.sleep(0.001)
self.assertFalse("quux" in self.session_manager.addon_ctx)
self.assertFalse("quux" in self.session_manager.addon_ctx["ParentAddon"])
parent_addon_mod = AddonManager.FRESH_ADDON_MODULES['hippolyzer.user_addon_parent_addon']
self.assertEqual(0, parent_addon_mod.ParentAddon.quux)
self.assertEqual(0, self.session_manager.addon_ctx["quux"])
self.assertEqual(0, self.session_manager.addon_ctx["ParentAddon"]["quux"])
parent_addon_mod.ParentAddon.quux = 1
self.assertEqual(1, self.session_manager.addon_ctx["quux"])
self.assertEqual(1, self.session_manager.addon_ctx["ParentAddon"]["quux"])

View File

@@ -36,7 +36,7 @@ class MockAddon(BaseAddon):
return True
def handle_object_updated(self, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str]):
obj: Object, updated_props: Set[str], msg: Optional[Message]):
self.events.append(("object_update", session.id, region.circuit_addr, obj.LocalID, updated_props))

View File

@@ -48,7 +48,7 @@ class ObjectTrackingAddon(BaseAddon):
super().__init__()
self.events = []
def handle_object_updated(self, session, region, obj: Object, updated_props: Set[str]):
def handle_object_updated(self, session, region, obj: Object, updated_props: Set[str], msg: Optional[Message]):
self.events.append(("update", obj, updated_props))
def handle_object_killed(self, session, region, obj: Object):