Compare commits
31 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b2f0de2db5 | ||
|
|
0b0e031091 | ||
|
|
4eeac738dc | ||
|
|
d9416363b3 | ||
|
|
5906140921 | ||
|
|
58932e585e | ||
|
|
b9f8ce0da2 | ||
|
|
67aa5e6bcd | ||
|
|
2a05529ceb | ||
|
|
a97aa88cc9 | ||
|
|
febc0793f2 | ||
|
|
141eb3afcd | ||
|
|
517888b1fa | ||
|
|
376b100ed9 | ||
|
|
07fbec47e1 | ||
|
|
7836527305 | ||
|
|
21b18b7a52 | ||
|
|
28b09144f2 | ||
|
|
1e13fede82 | ||
|
|
1bfb719f08 | ||
|
|
e5b63f7550 | ||
|
|
91328ac448 | ||
|
|
46dbacd475 | ||
|
|
187742c20a | ||
|
|
5eae956750 | ||
|
|
37e8f8a20e | ||
|
|
b3125f3231 | ||
|
|
46fed98d6a | ||
|
|
3b5938cf5c | ||
|
|
c7aeb03ea4 | ||
|
|
ab1bd16b5c |
7
.github/workflows/pytest.yml
vendored
7
.github/workflows/pytest.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: Run Python Tests
|
||||
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -23,6 +23,9 @@ jobs:
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements-test.txt
|
||||
- name: Run Flake8
|
||||
run: |
|
||||
flake8 .
|
||||
- name: Test with pytest
|
||||
# Tests are intentionally covered to detect broken tests.
|
||||
run: |
|
||||
@@ -39,7 +42,5 @@ jobs:
|
||||
env_vars: OS,PYTHON
|
||||
name: codecov-umbrella
|
||||
fail_ci_if_error: false
|
||||
# We don't care if coverage drops
|
||||
continue-on-error: true
|
||||
path_to_write_report: ./coverage/codecov_report.txt
|
||||
verbose: false
|
||||
|
||||
21
README.md
21
README.md
@@ -327,6 +327,19 @@ This package [includes portions of the Second Life(TM) Viewer Artwork](https://g
|
||||
Copyright (C) 2008 Linden Research, Inc. The viewer artwork is licensed under the Creative Commons
|
||||
Attribution-Share Alike 3.0 License.
|
||||
|
||||
## Contributing
|
||||
|
||||
Ensure that any patches are clean with no unnecessary whitespace or formatting changes, and that you
|
||||
add new tests for any added functionality.
|
||||
|
||||
## Philosophy
|
||||
|
||||
With a few notable exceptions, Hippolyzer focuses mainly on decomposition of data, and doesn't
|
||||
provide many high-level abstractions for interpreting or manipulating that data. It's careful
|
||||
to only do lossless transforms on data that are just prettier representations of the data sent
|
||||
over the wire. Hippolyzer's goal is to help people understand how Second Life actually works,
|
||||
automatically employing abstractions that hide how SL works is counter to that goal.
|
||||
|
||||
## For Client Developers
|
||||
|
||||
This section is mostly useful if you're developing a new SL-compatible client from scratch. Clients based
|
||||
@@ -340,18 +353,20 @@ UDP proxy and an HTTP proxy.
|
||||
To have your client's traffic proxied through Hippolyzer the general flow is:
|
||||
|
||||
* Open a TCP connection to Hippolyzer's SOCKS 5 proxy port
|
||||
* * This should be done once per logical user session, as Hippolyzer assumes a 1:1 mapping of SOCKS
|
||||
* * This should be done once per logical user session, as Hippolyzer assumes a 1:1 mapping of SOCKS TCP
|
||||
connections to SL sessions
|
||||
* Send a UDP associate command without authentication
|
||||
* The proxy will respond with a host / port pair that UDP messages may be sent through
|
||||
* At this point you will no longer need to use the TCP connection, but it must be kept
|
||||
* At this point you will no longer need to use the TCP connection, but it must be kept
|
||||
alive until you want to break the UDP association
|
||||
* Whenever you send a UDP packet to a remote host, you'll need to instead send it to the host / port
|
||||
from the UDP associate response. A SOCKS 5 header must be prepended to the data indicating the ultimate destination
|
||||
of the packet
|
||||
* Any received UDP packets will also have a SOCKS 5 header indicating the real source IP and address
|
||||
* * When in doubt, check `socks_proxy.py`, `packets.py` and the SOCKS 5 RFC for more info on how to deal with SOCKS.
|
||||
* All HTTP requests must be sent through the Hippolyzer's HTTP proxy port.
|
||||
* * <https://github.com/SaladDais/WinHippoAutoProxy/blob/master/winhippoautoproxy/socks5udphooker.cpp> is a simple
|
||||
example that wraps around `recvfrom()` and `sendto()` and could be used as a starting point.
|
||||
* All HTTP requests must be sent through the Hippolyzer's HTTP proxy port.
|
||||
* * You may not need to do any extra plumbing to get this to work if your chosen HTTP client
|
||||
respects the `HTTP_PROXY` environment variable.
|
||||
* All HTTPS connections will be encrypted with the proxy's TLS key. You'll need to either add it to whatever
|
||||
|
||||
@@ -9,23 +9,22 @@ class GreetingAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def greetings(self, session: Session, region: ProxiedRegion):
|
||||
"""Greet everyone around you"""
|
||||
agent_obj = region.objects.lookup_fullid(session.agent_id)
|
||||
if not agent_obj:
|
||||
our_avatar = region.objects.lookup_avatar(session.agent_id)
|
||||
if not our_avatar:
|
||||
show_message("Don't have an agent object?")
|
||||
|
||||
# Note that this will only have avatars closeish to your camera. The sim sends
|
||||
# KillObjects for avatars that get too far away.
|
||||
other_agents = [o for o in region.objects.all_avatars if o.FullID != agent_obj.FullID]
|
||||
other_avatars = [o for o in region.objects.all_avatars if o.FullID != our_avatar.FullID]
|
||||
|
||||
if not other_agents:
|
||||
show_message("No other agents?")
|
||||
if not other_avatars:
|
||||
show_message("No other avatars?")
|
||||
|
||||
for other_agent in other_agents:
|
||||
dist = Vector3.dist(agent_obj.Position, other_agent.Position)
|
||||
for other_avatar in other_avatars:
|
||||
dist = Vector3.dist(our_avatar.RegionPosition, other_avatar.RegionPosition)
|
||||
if dist >= 19.0:
|
||||
continue
|
||||
nv = other_agent.NameValue.to_dict()
|
||||
send_chat(f"Greetings, {nv['FirstName']} {nv['LastName']}!")
|
||||
if other_avatar.Name is None:
|
||||
continue
|
||||
send_chat(f"Greetings, {other_avatar.Name}!")
|
||||
|
||||
|
||||
addons = [GreetingAddon()]
|
||||
|
||||
@@ -23,8 +23,7 @@ import ctypes
|
||||
import secrets
|
||||
from typing import *
|
||||
|
||||
import mitmproxy
|
||||
from mitmproxy.http import HTTPFlow
|
||||
import mitmproxy.http
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
|
||||
@@ -37,6 +37,22 @@ from hippolyzer.lib.proxy.templates import TextureEntry
|
||||
|
||||
glymur.set_option('lib.num_threads', 4)
|
||||
|
||||
# These should never be replaced, they're only used as aliases to tell the viewer
|
||||
# it should fetch the relevant texture from the appearance service
|
||||
BAKES_ON_MESH_TEXTURE_IDS = {UUID(x) for x in (
|
||||
"5a9f4a74-30f2-821c-b88d-70499d3e7183",
|
||||
"ae2de45c-d252-50b8-5c6e-19f39ce79317",
|
||||
"24daea5f-0539-cfcf-047f-fbc40b2786ba",
|
||||
"52cc6bb6-2ee5-e632-d3ad-50197b1dcb8a",
|
||||
"43529ce8-7faa-ad92-165a-bc4078371687",
|
||||
"09aac1fb-6bce-0bee-7d44-caac6dbb6c63",
|
||||
"ff62763f-d60a-9855-890b-0c96f8f8cd98",
|
||||
"8e915e25-31d1-cc95-ae08-d58a47488251",
|
||||
"9742065b-19b5-297c-858a-29711d539043",
|
||||
"03642e83-2bd1-4eb9-34b4-4c47ed586d2d",
|
||||
"edd51b77-fc10-ce7a-4b3d-011dfc349e4f",
|
||||
)}
|
||||
|
||||
|
||||
def _modify_crc(crc_tweak: int, crc_val: int):
|
||||
return ctypes.c_uint32(crc_val ^ crc_tweak).value
|
||||
@@ -137,6 +153,8 @@ class MonochromeAddon(BaseAddon):
|
||||
# and we don't want to change the canonical view.
|
||||
parsed_te = copy.deepcopy(parsed_te)
|
||||
for k, v in parsed_te.Textures.items():
|
||||
if v in BAKES_ON_MESH_TEXTURE_IDS:
|
||||
continue
|
||||
# Replace textures with their alias to bust the viewer cache
|
||||
parsed_te.Textures[k] = tracker.get_alias_uuid(v)
|
||||
for k, v in parsed_te.Color.items():
|
||||
@@ -166,6 +184,8 @@ class MonochromeAddon(BaseAddon):
|
||||
orig_texture_id = self.mono_tracker.get_orig_uuid(UUID(texture_id))
|
||||
if not orig_texture_id:
|
||||
return
|
||||
if orig_texture_id in BAKES_ON_MESH_TEXTURE_IDS:
|
||||
return
|
||||
|
||||
# The request was for a fake texture ID we created, rewrite the request to
|
||||
# request the real asset and mark the flow for modification once we receive
|
||||
|
||||
@@ -4,10 +4,9 @@ from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import IMDialogType
|
||||
from hippolyzer.lib.proxy.templates import IMDialogType, XferFilePath
|
||||
|
||||
SUSPICIOUS_PACKETS = {"RequestXfer", "TransferRequest", "UUIDNameRequest",
|
||||
"UUIDGroupNameRequest", "OpenCircuit"}
|
||||
SUSPICIOUS_PACKETS = {"TransferRequest", "UUIDNameRequest", "UUIDGroupNameRequest", "OpenCircuit"}
|
||||
REGULAR_IM_DIALOGS = (IMDialogType.TYPING_STOP, IMDialogType.TYPING_STOP, IMDialogType.NOTHING_SPECIAL)
|
||||
|
||||
|
||||
@@ -29,6 +28,13 @@ class ShieldAddon(BaseAddon):
|
||||
else:
|
||||
expected_id = from_agent ^ session.agent_id
|
||||
msg_block["ID"] = expected_id
|
||||
if message.name == "RequestXfer":
|
||||
xfer_block = message["XferID"][0]
|
||||
# Don't allow Xfers for files, only assets
|
||||
if xfer_block["FilePath"] != XferFilePath.NONE or xfer_block["Filename"].strip(b"\x00"):
|
||||
show_message(f"Blocked suspicious {message.name} packet")
|
||||
region.circuit.drop_message(message)
|
||||
return True
|
||||
|
||||
|
||||
addons = [ShieldAddon()]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Example of how to request an Xfer
|
||||
"""
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.legacy_inv import InventoryModel
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message
|
||||
@@ -8,7 +9,7 @@ from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import XferFilePath
|
||||
from hippolyzer.lib.proxy.templates import XferFilePath, AssetType, InventoryType, WearableType
|
||||
|
||||
|
||||
class XferExampleAddon(BaseAddon):
|
||||
@@ -60,5 +61,61 @@ class XferExampleAddon(BaseAddon):
|
||||
item_names = [item.name for item in inv_model.items.values()]
|
||||
show_message(item_names)
|
||||
|
||||
@handle_command()
|
||||
async def eyes_for_you(self, session: Session, region: ProxiedRegion):
|
||||
"""Upload an eye bodypart and create an item for it"""
|
||||
asset_data = f"""LLWearable version 22
|
||||
New Eyes
|
||||
|
||||
\tpermissions 0
|
||||
\t{{
|
||||
\t\tbase_mask\t7fffffff
|
||||
\t\towner_mask\t7fffffff
|
||||
\t\tgroup_mask\t00000000
|
||||
\t\teveryone_mask\t00000000
|
||||
\t\tnext_owner_mask\t00082000
|
||||
\t\tcreator_id\t{session.agent_id}
|
||||
\t\towner_id\t{session.agent_id}
|
||||
\t\tlast_owner_id\t00000000-0000-0000-0000-000000000000
|
||||
\t\tgroup_id\t00000000-0000-0000-0000-000000000000
|
||||
\t}}
|
||||
\tsale_info\t0
|
||||
\t{{
|
||||
\t\tsale_type\tnot
|
||||
\t\tsale_price\t10
|
||||
\t}}
|
||||
type 3
|
||||
parameters 2
|
||||
98 0
|
||||
99 0
|
||||
textures 1
|
||||
3 89556747-24cb-43ed-920b-47caed15465f
|
||||
"""
|
||||
# If we want to create an item containing the asset we need to know the transaction id
|
||||
# used to create the asset.
|
||||
transaction_id = UUID.random()
|
||||
await region.xfer_manager.upload_asset(
|
||||
AssetType.BODYPART,
|
||||
data=asset_data,
|
||||
transaction_id=transaction_id
|
||||
)
|
||||
region.circuit.send_message(ProxiedMessage(
|
||||
'CreateInventoryItem',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block(
|
||||
'InventoryBlock',
|
||||
CallbackID=0,
|
||||
# Null folder ID will put it in the default folder for the type
|
||||
FolderID=UUID(),
|
||||
TransactionID=transaction_id,
|
||||
NextOwnerMask=0x7fFFffFF,
|
||||
Type=AssetType.BODYPART,
|
||||
InvType=InventoryType.WEARABLE,
|
||||
WearableType=WearableType.EYES,
|
||||
Name='Eyes For You',
|
||||
Description=b''
|
||||
),
|
||||
))
|
||||
|
||||
|
||||
addons = [XferExampleAddon()]
|
||||
|
||||
@@ -136,7 +136,7 @@ def start_proxy(extra_addons: Optional[list] = None, extra_addon_paths: Optional
|
||||
async_server = loop.run_until_complete(coro)
|
||||
|
||||
event_manager = MITMProxyEventManager(session_manager, flow_context)
|
||||
loop.create_task(event_manager.pump_proxy_events())
|
||||
loop.create_task(event_manager.run())
|
||||
|
||||
addon_paths = sys.argv[1:]
|
||||
addon_paths.extend(extra_addon_paths)
|
||||
@@ -179,7 +179,7 @@ def start_proxy(extra_addons: Optional[list] = None, extra_addon_paths: Optional
|
||||
|
||||
def _windows_timeout_killer(pid: int):
|
||||
time.sleep(2.0)
|
||||
print(f"Killing hanging event loop")
|
||||
print("Killing hanging event loop")
|
||||
os.kill(pid, 9)
|
||||
|
||||
|
||||
|
||||
@@ -299,6 +299,32 @@ class StringEnum(str, enum.Enum):
|
||||
return self.value
|
||||
|
||||
|
||||
class IntEnum(enum.IntEnum):
|
||||
# Give a special repr() that'll eval in a REPL.
|
||||
def __repr__(self):
|
||||
return f"{self.__class__.__name__}.{self.name}"
|
||||
|
||||
|
||||
class IntFlag(enum.IntFlag):
|
||||
def __repr__(self):
|
||||
# Make an ORed together version of the flags based on the POD version
|
||||
flags = flags_to_pod(type(self), self)
|
||||
flags = " | ".join(
|
||||
(f"{self.__class__.__name__}.{v}" if isinstance(v, str) else str(v))
|
||||
for v in flags
|
||||
)
|
||||
return f"({flags})"
|
||||
|
||||
|
||||
def flags_to_pod(flag_cls: Type[enum.IntFlag], val: int) -> Tuple[Union[str, int], ...]:
|
||||
# Shove any bits not represented in the IntFlag into an int
|
||||
left_over = val
|
||||
for flag in iter(flag_cls):
|
||||
left_over &= ~flag.value
|
||||
extra = (int(left_over),) if left_over else ()
|
||||
return tuple(flag.name for flag in iter(flag_cls) if val & flag.value) + extra
|
||||
|
||||
|
||||
class TaggedUnion(recordclass.datatuple): # type: ignore
|
||||
tag: Any
|
||||
value: Any
|
||||
@@ -306,5 +332,6 @@ class TaggedUnion(recordclass.datatuple): # type: ignore
|
||||
|
||||
__all__ = [
|
||||
"Vector3", "Vector4", "Vector2", "Quaternion", "TupleCoord",
|
||||
"UUID", "RawBytes", "StringEnum", "JankStringyBytes", "TaggedUnion"
|
||||
"UUID", "RawBytes", "StringEnum", "JankStringyBytes", "TaggedUnion",
|
||||
"IntEnum", "IntFlag", "flags_to_pod"
|
||||
]
|
||||
|
||||
@@ -347,7 +347,7 @@ class RegionCapNotAvailable(RegionDomainError):
|
||||
|
||||
class RegionMessageError(RegionDomainError):
|
||||
""" an error raised when a region does not have a connection
|
||||
over which it can send UDP messages
|
||||
over which it can send UDP messages
|
||||
|
||||
accepts a region object as an attribute
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
import functools
|
||||
import pkg_resources
|
||||
import re
|
||||
|
||||
@@ -19,5 +19,3 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@@ -20,8 +20,6 @@ along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
|
||||
msg_tmpl = open(get_resource_filename("lib/base/message/data/message_template.msg"))
|
||||
|
||||
@@ -34,13 +34,13 @@ VAR_TYPE = Union[TupleCoord, bytes, str, float, int, Tuple, UUID]
|
||||
|
||||
|
||||
class Block:
|
||||
"""
|
||||
"""
|
||||
base representation of a block
|
||||
Block expects a name, and kwargs for variables (var_name = value)
|
||||
"""
|
||||
__slots__ = ('name', 'size', 'vars', 'message_name', '_ser_cache', 'fill_missing',)
|
||||
|
||||
def __init__(self, name, /, fill_missing=False, **kwargs):
|
||||
def __init__(self, name, /, *, fill_missing=False, **kwargs):
|
||||
self.name = name
|
||||
self.size = 0
|
||||
self.message_name: Optional[str] = None
|
||||
@@ -129,24 +129,7 @@ class Block:
|
||||
continue
|
||||
# We have a serializer, include the pretty output in the repr,
|
||||
# using the _ suffix so the builder knows it needs to be serialized.
|
||||
deserialized = self.deserialize_var(key)
|
||||
type_name = type(deserialized).__name__
|
||||
# TODO: replace __repr__ for these in a context manager so nested
|
||||
# Enums / Flags get handled correctly as well. The point of the
|
||||
# pretty repr() is to make messages directly paste-able into code.
|
||||
if isinstance(deserialized, enum.IntEnum):
|
||||
deserialized = f"{type_name}.{deserialized.name}"
|
||||
elif isinstance(deserialized, enum.IntFlag):
|
||||
# Make an ORed together version of the flags based on the POD version
|
||||
flags = se.flags_to_pod(type(deserialized), deserialized)
|
||||
flags = " | ".join(
|
||||
(f"{type_name}.{v}" if isinstance(v, str) else str(v))
|
||||
for v in flags
|
||||
)
|
||||
deserialized = f"({flags})"
|
||||
else:
|
||||
deserialized = repr(deserialized)
|
||||
block_vars[f"{key}_"] = deserialized
|
||||
block_vars[f"{key}_"] = repr(self.deserialize_var(key))
|
||||
else:
|
||||
block_vars = self.vars
|
||||
|
||||
|
||||
@@ -66,7 +66,7 @@ class MessageTemplateBlock:
|
||||
self.variables: typing.List[MessageTemplateVariable] = []
|
||||
self.variable_map: typing.Dict[str, MessageTemplateVariable] = {}
|
||||
self.name = name
|
||||
self.block_type = 0
|
||||
self.block_type: MsgBlockType = MsgBlockType.MBT_SINGLE
|
||||
self.number = 0
|
||||
|
||||
def add_variable(self, var):
|
||||
|
||||
@@ -19,6 +19,3 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from typing import *
|
||||
|
||||
import lazy_object_proxy
|
||||
@@ -253,12 +254,17 @@ class Object(recordclass.datatuple): # type: ignore
|
||||
updated_properties = set()
|
||||
for key, val in properties.items():
|
||||
if hasattr(self, key):
|
||||
old_val = getattr(self, key, val)
|
||||
old_val = getattr(self, key, dataclasses.MISSING)
|
||||
# Don't check equality if we're using a lazy proxy,
|
||||
# parsing is deferred until we actually use it.
|
||||
is_proxy = isinstance(val, lazy_object_proxy.Proxy)
|
||||
if is_proxy or old_val != val:
|
||||
updated_properties.add(key)
|
||||
if isinstance(val, lazy_object_proxy.Proxy):
|
||||
# TODO: be smarter about this. Can we store the raw bytes and
|
||||
# compare those if it's an unparsed object?
|
||||
if old_val is not val:
|
||||
updated_properties.add(key)
|
||||
else:
|
||||
if old_val != val:
|
||||
updated_properties.add(key)
|
||||
setattr(self, key, val)
|
||||
return updated_properties
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ import enum
|
||||
import math
|
||||
import struct
|
||||
import types
|
||||
import typing
|
||||
import weakref
|
||||
from io import SEEK_CUR, SEEK_SET, SEEK_END, RawIOBase, BufferedIOBase
|
||||
from typing import *
|
||||
@@ -1092,15 +1091,6 @@ class IntEnum(Adapter):
|
||||
return lambda: self.enum_cls(0)
|
||||
|
||||
|
||||
def flags_to_pod(flag_cls: Type[enum.IntFlag], val: int) -> typing.Tuple[Union[str, int], ...]:
|
||||
# Shove any bits not represented in the IntFlag into an int
|
||||
left_over = val
|
||||
for flag in iter(flag_cls):
|
||||
left_over &= ~flag.value
|
||||
extra = (int(left_over),) if left_over else ()
|
||||
return tuple(flag.name for flag in iter(flag_cls) if val & flag.value) + extra
|
||||
|
||||
|
||||
class IntFlag(Adapter):
|
||||
def __init__(self, flag_cls: Type[enum.IntFlag],
|
||||
flag_spec: Optional[SerializablePrimitive] = None):
|
||||
@@ -1121,7 +1111,7 @@ class IntFlag(Adapter):
|
||||
|
||||
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
|
||||
if pod:
|
||||
return flags_to_pod(self.flag_cls, val)
|
||||
return dtypes.flags_to_pod(self.flag_cls, val)
|
||||
return self.flag_cls(val)
|
||||
|
||||
def default_value(self) -> Any:
|
||||
@@ -1613,7 +1603,7 @@ class BufferedLLSDBinaryParser(llsd.HippoLLSDBinaryParser):
|
||||
byte = self._getc()[0]
|
||||
except IndexError:
|
||||
byte = None
|
||||
raise llsd.LLSDParseError("%s at byte %d: %s" % (message, self._index+offset, byte))
|
||||
raise llsd.LLSDParseError("%s at byte %d: %s" % (message, self._index + offset, byte))
|
||||
|
||||
def _getc(self, num=1):
|
||||
return self._buffer.read_bytes(num)
|
||||
@@ -1641,8 +1631,14 @@ def subfield_serializer(msg_name, block_name, var_name):
|
||||
return f
|
||||
|
||||
|
||||
_ENUM_TYPE = TypeVar("_ENUM_TYPE", bound=Type[dtypes.IntEnum])
|
||||
_FLAG_TYPE = TypeVar("_FLAG_TYPE", bound=Type[dtypes.IntFlag])
|
||||
|
||||
|
||||
def enum_field_serializer(msg_name, block_name, var_name):
|
||||
def f(orig_cls):
|
||||
def f(orig_cls: _ENUM_TYPE) -> _ENUM_TYPE:
|
||||
if not issubclass(orig_cls, dtypes.IntEnum):
|
||||
raise ValueError(f"{orig_cls} must be a subclass of Hippolyzer's IntEnum class")
|
||||
wrapper = subfield_serializer(msg_name, block_name, var_name)
|
||||
wrapper(IntEnumSubfieldSerializer(orig_cls))
|
||||
return orig_cls
|
||||
@@ -1650,7 +1646,9 @@ def enum_field_serializer(msg_name, block_name, var_name):
|
||||
|
||||
|
||||
def flag_field_serializer(msg_name, block_name, var_name):
|
||||
def f(orig_cls):
|
||||
def f(orig_cls: _FLAG_TYPE) -> _FLAG_TYPE:
|
||||
if not issubclass(orig_cls, dtypes.IntFlag):
|
||||
raise ValueError(f"{orig_cls!r} must be a subclass of Hippolyzer's IntFlag class")
|
||||
wrapper = subfield_serializer(msg_name, block_name, var_name)
|
||||
wrapper(IntFlagSubfieldSerializer(orig_cls))
|
||||
return orig_cls
|
||||
|
||||
@@ -22,11 +22,11 @@ Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
class Settings:
|
||||
def __init__(self, quiet_logging=False, spammy_logging=False, log_tests=True):
|
||||
""" some lovely configurable settings
|
||||
""" some lovely configurable settings
|
||||
|
||||
These are applied application wide, and can be
|
||||
overridden at any time in a specific instance
|
||||
|
||||
|
||||
quiet_logging overrides spammy_logging
|
||||
"""
|
||||
|
||||
|
||||
@@ -33,7 +33,6 @@ def setup_ca_everywhere(mitmproxy_master):
|
||||
pass
|
||||
except PermissionError:
|
||||
pass
|
||||
|
||||
return valid_paths
|
||||
|
||||
|
||||
|
||||
@@ -51,36 +51,39 @@ class MITMProxyEventManager:
|
||||
self.llsd_message_serializer = LLSDMessageSerializer()
|
||||
self._asset_server_proxied = False
|
||||
|
||||
async def pump_proxy_events(self):
|
||||
async def run(self):
|
||||
while not self.shutdown_signal.is_set():
|
||||
try:
|
||||
try:
|
||||
event_type, flow_state = self.from_proxy_queue.get(False)
|
||||
except queue.Empty:
|
||||
await asyncio.sleep(0.001)
|
||||
continue
|
||||
|
||||
flow = HippoHTTPFlow.from_state(flow_state, self.session_manager)
|
||||
try:
|
||||
if event_type == "request":
|
||||
self._handle_request(flow)
|
||||
# A response was injected early in the cycle, we won't get a response
|
||||
# callback from mitmproxy so just log it now.
|
||||
message_logger = self.session_manager.message_logger
|
||||
if message_logger and flow.response_injected:
|
||||
message_logger.log_http_response(flow)
|
||||
elif event_type == "response":
|
||||
self._handle_response(flow)
|
||||
else:
|
||||
raise Exception(f"Unknown mitmproxy event type {event_type}")
|
||||
finally:
|
||||
# If someone has taken this request out of the regular callback flow,
|
||||
# they'll manually send a callback at some later time.
|
||||
if not flow.taken:
|
||||
self.to_proxy_queue.put(("callback", flow.id, flow.get_state()))
|
||||
await self.pump_proxy_event()
|
||||
except:
|
||||
logging.exception("Exploded when handling parsed packets")
|
||||
|
||||
async def pump_proxy_event(self):
|
||||
try:
|
||||
event_type, flow_state = self.from_proxy_queue.get(False)
|
||||
except queue.Empty:
|
||||
await asyncio.sleep(0.001)
|
||||
return
|
||||
|
||||
flow = HippoHTTPFlow.from_state(flow_state, self.session_manager)
|
||||
try:
|
||||
if event_type == "request":
|
||||
self._handle_request(flow)
|
||||
# A response was injected early in the cycle, we won't get a response
|
||||
# callback from mitmproxy so just log it now.
|
||||
message_logger = self.session_manager.message_logger
|
||||
if message_logger and flow.response_injected:
|
||||
message_logger.log_http_response(flow)
|
||||
elif event_type == "response":
|
||||
self._handle_response(flow)
|
||||
else:
|
||||
raise Exception(f"Unknown mitmproxy event type {event_type}")
|
||||
finally:
|
||||
# If someone has taken this request out of the regular callback flow,
|
||||
# they'll manually send a callback at some later time.
|
||||
if not flow.taken:
|
||||
self.to_proxy_queue.put(("callback", flow.id, flow.get_state()))
|
||||
|
||||
def _handle_request(self, flow: HippoHTTPFlow):
|
||||
url = flow.request.url
|
||||
cap_data = self.session_manager.resolve_cap(url)
|
||||
@@ -118,11 +121,14 @@ class MITMProxyEventManager:
|
||||
else:
|
||||
flow.response = mitmproxy.http.HTTPResponse.make(
|
||||
307,
|
||||
b"Redirecting...",
|
||||
# Can't provide explanation in the body because this results in failing Range requests under
|
||||
# mitmproxy that return garbage data. Chances are there's weird interactions
|
||||
# between HTTP/1.x pipelining and range requests under mitmproxy that no other
|
||||
# applications have hit. If that's a concern then Connection: close should be used.
|
||||
b"",
|
||||
{
|
||||
"Content-Type": "text/plain",
|
||||
"Connection": "keep-alive",
|
||||
"Location": redir_url,
|
||||
"Connection": "close",
|
||||
}
|
||||
)
|
||||
elif cap_data and cap_data.asset_server_cap:
|
||||
|
||||
@@ -249,9 +249,9 @@ def create_http_proxy(bind_host, port, flow_context: HTTPFlowContext): # pragma
|
||||
|
||||
def is_asset_server_cap_name(cap_name):
|
||||
return cap_name and (
|
||||
cap_name.startswith("GetMesh") or
|
||||
cap_name.startswith("GetTexture") or
|
||||
cap_name.startswith("ViewerAsset")
|
||||
cap_name.startswith("GetMesh")
|
||||
or cap_name.startswith("GetTexture")
|
||||
or cap_name.startswith("ViewerAsset")
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ import logging
|
||||
import math
|
||||
import os
|
||||
import re
|
||||
import typing
|
||||
import uuid
|
||||
from typing import *
|
||||
|
||||
|
||||
@@ -367,8 +367,8 @@ class HTTPMessageLogEntry(AbstractMessageLogEntry):
|
||||
cap_name = cap_data and cap_data.cap_name
|
||||
base_url = cap_name and cap_data.base_url
|
||||
temporary_cap = cap_data and cap_data.type == CapType.TEMPORARY
|
||||
beautify_url = (beautify and base_url and cap_name and
|
||||
not temporary_cap and self.session and want_request)
|
||||
beautify_url = (beautify and base_url and cap_name
|
||||
and not temporary_cap and self.session and want_request)
|
||||
if want_request:
|
||||
buf.write(message.method)
|
||||
buf.write(" ")
|
||||
|
||||
41
hippolyzer/lib/proxy/namecache.py
Normal file
41
hippolyzer/lib/proxy/namecache.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class NameCacheEntry:
|
||||
FirstName: Optional[str] = None
|
||||
LastName: Optional[str] = None
|
||||
DisplayName: Optional[str] = None
|
||||
|
||||
|
||||
class NameCache:
|
||||
# TODO: persist this somewhere across runs
|
||||
def __init__(self):
|
||||
self._cache: Dict[UUID, NameCacheEntry] = {}
|
||||
|
||||
def lookup(self, uuid: UUID) -> Optional[NameCacheEntry]:
|
||||
return self._cache.get(uuid)
|
||||
|
||||
def update(self, uuid: UUID, vals: dict):
|
||||
# upsert the cache entry
|
||||
entry = self._cache.get(uuid) or NameCacheEntry()
|
||||
entry.LastName = vals.get("LastName") or entry.LastName
|
||||
entry.FirstName = vals.get("FirstName") or entry.FirstName
|
||||
entry.DisplayName = vals.get("DisplayName") or entry.DisplayName
|
||||
self._cache[uuid] = entry
|
||||
|
||||
def handle_uuid_name_reply(self, msg: ProxiedMessage):
|
||||
"""UUID lookup reply handler to be registered by regions"""
|
||||
for block in msg.blocks["UUIDNameBlock"]:
|
||||
self.update(block["ID"], {
|
||||
"FirstName": block["FirstName"],
|
||||
"LastName": block["LastName"],
|
||||
})
|
||||
@@ -2,13 +2,15 @@ from __future__ import annotations
|
||||
|
||||
import collections
|
||||
import copy
|
||||
import enum
|
||||
import logging
|
||||
import math
|
||||
import typing
|
||||
import weakref
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID, TaggedUnion
|
||||
from hippolyzer.lib.base.datatypes import UUID, TaggedUnion, Vector3
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.namevalue import NameValueCollection
|
||||
@@ -16,6 +18,7 @@ from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.namecache import NameCache
|
||||
from hippolyzer.lib.proxy.templates import PCode, ObjectStateSerializer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -59,6 +62,47 @@ class OrphanManager:
|
||||
OBJECT_OR_LOCAL = typing.Union[Object, int]
|
||||
|
||||
|
||||
class LocationType(enum.IntEnum):
|
||||
COARSE = enum.auto()
|
||||
EXACT = enum.auto()
|
||||
|
||||
|
||||
class Avatar:
|
||||
"""Wrapper for an avatar known through ObjectUpdate or CoarseLocationUpdate"""
|
||||
def __init__(
|
||||
self,
|
||||
full_id: UUID,
|
||||
obj: Optional["Object"] = None,
|
||||
coarse_location: Optional[Vector3] = None,
|
||||
resolved_name: Optional[str] = None,
|
||||
):
|
||||
self.FullID: UUID = full_id
|
||||
self.Object: Optional["Object"] = obj
|
||||
self._coarse_location = coarse_location
|
||||
self._resolved_name = resolved_name
|
||||
|
||||
@property
|
||||
def LocationType(self) -> "LocationType":
|
||||
if self.Object:
|
||||
return LocationType.EXACT
|
||||
return LocationType.COARSE
|
||||
|
||||
@property
|
||||
def RegionPosition(self) -> Vector3:
|
||||
if self.Object:
|
||||
return self.Object.RegionPosition
|
||||
if self._coarse_location is not None:
|
||||
return self._coarse_location
|
||||
raise ValueError(f"Avatar {self.FullID} has no known position")
|
||||
|
||||
@property
|
||||
def Name(self) -> Optional[str]:
|
||||
if self.Object:
|
||||
nv: Dict[str, str] = self.Object.NameValue.to_dict()
|
||||
return f"{nv['FirstName']} {nv['LastName']}"
|
||||
return self._resolved_name
|
||||
|
||||
|
||||
class ObjectManager:
|
||||
"""
|
||||
Object manager for a specific region
|
||||
@@ -77,15 +121,24 @@ class ObjectManager:
|
||||
def __init__(self, region: ProxiedRegion):
|
||||
self._localid_lookup: typing.Dict[int, Object] = {}
|
||||
self._fullid_lookup: typing.Dict[UUID, int] = {}
|
||||
self._coarse_locations: typing.Dict[UUID, Vector3] = {}
|
||||
# Objects that we've seen references to but don't have data for
|
||||
self.missing_locals = set()
|
||||
self._region: ProxiedRegion = proxify(region)
|
||||
self._orphan_manager = OrphanManager()
|
||||
name_cache = None
|
||||
session = self._region.session()
|
||||
if session:
|
||||
name_cache = session.session_manager.name_cache
|
||||
# Use a local namecache if we don't have a session manager
|
||||
self.name_cache: Optional[NameCache] = name_cache or NameCache()
|
||||
|
||||
message_handler = region.message_handler
|
||||
message_handler.subscribe("ObjectUpdate", self._handle_object_update)
|
||||
message_handler.subscribe("ImprovedTerseObjectUpdate",
|
||||
self._handle_terse_object_update)
|
||||
message_handler.subscribe("CoarseLocationUpdate",
|
||||
self._handle_coarse_location_update)
|
||||
message_handler.subscribe("ObjectUpdateCompressed",
|
||||
self._handle_object_update_compressed)
|
||||
message_handler.subscribe("ObjectUpdateCached",
|
||||
@@ -107,12 +160,27 @@ class ObjectManager:
|
||||
return self._localid_lookup.values()
|
||||
|
||||
@property
|
||||
def all_avatars(self) -> typing.Iterable[Object]:
|
||||
# This is only avatars within draw distance. Might be useful to have another
|
||||
# accessor for UUID + pos that's based on CoarseLocationUpdate.
|
||||
return (o for o in self.all_objects if o.PCode == PCode.AVATAR)
|
||||
def all_avatars(self) -> typing.Iterable[Avatar]:
|
||||
av_objects = {o.FullID: o for o in self.all_objects if o.PCode == PCode.AVATAR}
|
||||
all_ids = set(av_objects.keys()) | self._coarse_locations.keys()
|
||||
|
||||
def lookup_localid(self, localid) -> typing.Optional[Object]:
|
||||
avatars: List[Avatar] = []
|
||||
for av_id in all_ids:
|
||||
av_obj = av_objects.get(av_id)
|
||||
coarse_location = self._coarse_locations.get(av_id)
|
||||
|
||||
resolved_name = None
|
||||
if namecache_entry := self.name_cache.lookup(av_id):
|
||||
resolved_name = f"{namecache_entry.FirstName} {namecache_entry.LastName}"
|
||||
avatars.append(Avatar(
|
||||
full_id=av_id,
|
||||
coarse_location=coarse_location,
|
||||
obj=av_obj,
|
||||
resolved_name=resolved_name,
|
||||
))
|
||||
return avatars
|
||||
|
||||
def lookup_localid(self, localid: int) -> typing.Optional[Object]:
|
||||
return self._localid_lookup.get(localid, None)
|
||||
|
||||
def lookup_fullid(self, fullid: UUID) -> typing.Optional[Object]:
|
||||
@@ -121,6 +189,12 @@ class ObjectManager:
|
||||
return None
|
||||
return self.lookup_localid(local_id)
|
||||
|
||||
def lookup_avatar(self, fullid: UUID) -> typing.Optional[Avatar]:
|
||||
for avatar in self.all_avatars:
|
||||
if avatar.FullID == fullid:
|
||||
return avatar
|
||||
return None
|
||||
|
||||
def _track_object(self, obj: Object, notify: bool = True):
|
||||
self._localid_lookup[obj.LocalID] = obj
|
||||
self._fullid_lookup[obj.FullID] = obj.LocalID
|
||||
@@ -137,7 +211,7 @@ class ObjectManager:
|
||||
self._parent_object(child_obj)
|
||||
|
||||
if notify:
|
||||
self._notify_object_updated(obj, set(obj.to_dict().keys()))
|
||||
self._run_object_update_hooks(obj, set(obj.to_dict().keys()))
|
||||
|
||||
def _untrack_object(self, obj: Object):
|
||||
former_child_ids = obj.ChildIDs[:]
|
||||
@@ -228,7 +302,7 @@ class ObjectManager:
|
||||
# Common case where this may be falsy is if we get an ObjectUpdateCached
|
||||
# that didn't have a changed UpdateFlags field.
|
||||
if actually_updated_props:
|
||||
self._notify_object_updated(obj, actually_updated_props)
|
||||
self._run_object_update_hooks(obj, actually_updated_props)
|
||||
|
||||
def _normalize_object_update(self, block: Block):
|
||||
object_data = {
|
||||
@@ -308,6 +382,24 @@ class ObjectManager:
|
||||
|
||||
packet.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_coarse_location_update(self, packet: ProxiedMessage):
|
||||
self._coarse_locations.clear()
|
||||
|
||||
coarse_locations: typing.Dict[UUID, Vector3] = {}
|
||||
for agent_block, location_block in zip(packet["AgentData"], packet["Location"]):
|
||||
x, y, z = location_block["X"], location_block["Y"], location_block["Z"]
|
||||
coarse_locations[agent_block["AgentID"]] = Vector3(
|
||||
X=x,
|
||||
Y=y,
|
||||
# The z-axis is multiplied by 4 to obtain true Z location
|
||||
# The z-axis is also limited to 1020m in height
|
||||
# If z == 255 then the true Z is unknown.
|
||||
# http://wiki.secondlife.com/wiki/CoarseLocationUpdate
|
||||
Z=z * 4 if z != 255 else math.inf,
|
||||
)
|
||||
|
||||
self._coarse_locations.update(coarse_locations)
|
||||
|
||||
def _handle_object_update_cached(self, packet: ProxiedMessage):
|
||||
seen_locals = []
|
||||
for block in packet['ObjectData']:
|
||||
@@ -430,14 +522,18 @@ class ObjectManager:
|
||||
LOG.debug(f"Received ObjectCost for unknown {object_id}")
|
||||
continue
|
||||
obj.ObjectCosts.update(object_costs)
|
||||
self._notify_object_updated(obj, {"ObjectCosts"})
|
||||
self._run_object_update_hooks(obj, {"ObjectCosts"})
|
||||
|
||||
def _notify_object_updated(self, obj: Object, updated_props: Set[str]):
|
||||
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str]):
|
||||
if obj.PCode == PCode.AVATAR and "NameValue" in updated_props:
|
||||
if obj.NameValue:
|
||||
self.name_cache.update(obj.FullID, obj.NameValue.to_dict())
|
||||
AddonManager.handle_object_updated(self._region.session(), self._region, obj, updated_props)
|
||||
|
||||
def clear(self):
|
||||
self._localid_lookup.clear()
|
||||
self._fullid_lookup.clear()
|
||||
self._coarse_locations.clear()
|
||||
self._orphan_manager.clear()
|
||||
self.missing_locals.clear()
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ from hippolyzer.lib.base.datatypes import Vector3
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.proxy.caps_client import CapsClient
|
||||
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
|
||||
from hippolyzer.lib.proxy.namecache import NameCache
|
||||
from hippolyzer.lib.proxy.objects import ObjectManager
|
||||
from hippolyzer.lib.proxy.transfer_manager import TransferManager
|
||||
from hippolyzer.lib.proxy.xfer_manager import XferManager
|
||||
@@ -60,6 +61,9 @@ class ProxiedRegion:
|
||||
self.transfer_manager = TransferManager(self)
|
||||
self.caps_client = CapsClient(self)
|
||||
self.objects = ObjectManager(self)
|
||||
if session:
|
||||
name_cache: NameCache = session.session_manager.name_cache
|
||||
self.message_handler.subscribe("UUIDNameReply", name_cache.handle_uuid_name_reply)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -103,6 +107,9 @@ class ProxiedRegion:
|
||||
seed_id = self._caps["Seed"][1].split("/")[-1].encode("utf8")
|
||||
# Give it a unique domain tied to the current Seed URI
|
||||
parsed[1] = f"{name}-{hashlib.sha256(seed_id).hexdigest()[:16]}.hippo-proxy.localhost"
|
||||
# Force the URL to HTTP, we're going to handle the request ourselves so it doesn't need
|
||||
# to be secure. This should save on expensive TLS context setup for each req.
|
||||
parsed[0] = "http"
|
||||
wrapper_url = urllib.parse.urlunsplit(parsed)
|
||||
self._caps.add(name + "ProxyWrapper", (CapType.WRAPPER, wrapper_url))
|
||||
return wrapper_url
|
||||
|
||||
@@ -12,6 +12,7 @@ from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
|
||||
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
|
||||
from hippolyzer.lib.proxy.http_proxy import HTTPFlowContext, is_asset_server_cap_name, SerializedCapData
|
||||
from hippolyzer.lib.proxy.namecache import NameCache
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion, CapType
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -151,6 +152,7 @@ class SessionManager:
|
||||
self.asset_repo = HTTPAssetRepo()
|
||||
self.message_logger: Optional[BaseMessageLogger] = None
|
||||
self.addon_ctx: Dict[str, Any] = {}
|
||||
self.name_cache = NameCache()
|
||||
|
||||
def create_session(self, login_data) -> Session:
|
||||
session = Session.from_login_data(login_data, self)
|
||||
|
||||
@@ -162,8 +162,8 @@ class UDPProxyProtocol(asyncio.DatagramProtocol):
|
||||
data = data[4:]
|
||||
elif address_type == 3: # Domain name
|
||||
domain_length = data[0]
|
||||
address = data[1:1+domain_length]
|
||||
data = data[1+domain_length:]
|
||||
address = data[1:1 + domain_length]
|
||||
data = data[1 + domain_length:]
|
||||
else:
|
||||
logging.error("Don't understand addr type %d" % address_type)
|
||||
return None
|
||||
|
||||
@@ -8,7 +8,7 @@ from typing import *
|
||||
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag
|
||||
from hippolyzer.lib.base.namevalue import NameValuesSerializer
|
||||
|
||||
try:
|
||||
@@ -25,7 +25,7 @@ except:
|
||||
@se.enum_field_serializer("RezObject", "InventoryData", "Type")
|
||||
@se.enum_field_serializer("RezScript", "InventoryBlock", "Type")
|
||||
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "Type")
|
||||
class AssetType(enum.IntEnum):
|
||||
class AssetType(IntEnum):
|
||||
TEXTURE = 0
|
||||
SOUND = 1
|
||||
CALLINGCARD = 2
|
||||
@@ -103,7 +103,7 @@ class AssetType(enum.IntEnum):
|
||||
@se.enum_field_serializer("RezObject", "InventoryData", "InvType")
|
||||
@se.enum_field_serializer("RezScript", "InventoryBlock", "InvType")
|
||||
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "InvType")
|
||||
class InventoryType(enum.IntEnum):
|
||||
class InventoryType(IntEnum):
|
||||
TEXTURE = 0
|
||||
SOUND = 1
|
||||
CALLINGCARD = 2
|
||||
@@ -143,7 +143,7 @@ class InventoryType(enum.IntEnum):
|
||||
@se.enum_field_serializer("AgentIsNowWearing", "WearableData", "WearableType")
|
||||
@se.enum_field_serializer("AgentWearablesUpdate", "WearableData", "WearableType")
|
||||
@se.enum_field_serializer("CreateInventoryItem", "InventoryBlock", "WearableType")
|
||||
class WearableType(enum.IntEnum):
|
||||
class WearableType(IntEnum):
|
||||
SHAPE = 0
|
||||
SKIN = 1
|
||||
HAIR = 2
|
||||
@@ -180,7 +180,7 @@ def _register_permissions_flags(message_name, block_name):
|
||||
@_register_permissions_flags("RezObject", "InventoryData")
|
||||
@_register_permissions_flags("RezScript", "InventoryBlock")
|
||||
@_register_permissions_flags("RezMultipleAttachmentsFromInv", "ObjectData")
|
||||
class Permissions(enum.IntFlag):
|
||||
class Permissions(IntFlag):
|
||||
TRANSFER = (1 << 13)
|
||||
MODIFY = (1 << 14)
|
||||
COPY = (1 << 15)
|
||||
@@ -202,7 +202,7 @@ class Permissions(enum.IntFlag):
|
||||
@se.flag_field_serializer("RezObject", "InventoryData", "Flags")
|
||||
@se.flag_field_serializer("UpdateCreateInventoryItem", "InventoryData", "Flags")
|
||||
@se.flag_field_serializer("UpdateTaskInventory", "InventoryData", "Flags")
|
||||
class InventoryItemFlags(enum.IntFlag):
|
||||
class InventoryItemFlags(IntFlag):
|
||||
# The asset has only one reference in the system. If the
|
||||
# inventory item is deleted, or the assetid updated, then we
|
||||
# can remove the old reference.
|
||||
@@ -233,7 +233,7 @@ class InventoryItemFlags(enum.IntFlag):
|
||||
|
||||
|
||||
@se.enum_field_serializer("ObjectPermissions", "ObjectData", "Field")
|
||||
class PermissionType(enum.IntEnum):
|
||||
class PermissionType(IntEnum):
|
||||
BASE = 0x01
|
||||
OWNER = 0x02
|
||||
GROUP = 0x04
|
||||
@@ -242,7 +242,7 @@ class PermissionType(enum.IntEnum):
|
||||
|
||||
|
||||
@se.enum_field_serializer("TransferRequest", "TransferInfo", "SourceType")
|
||||
class TransferSourceType(enum.IntEnum):
|
||||
class TransferSourceType(IntEnum):
|
||||
UNKNOWN = 0
|
||||
FILE = enum.auto()
|
||||
ASSET = enum.auto()
|
||||
@@ -250,7 +250,7 @@ class TransferSourceType(enum.IntEnum):
|
||||
SIM_ESTATE = enum.auto()
|
||||
|
||||
|
||||
class EstateAssetType(enum.IntEnum):
|
||||
class EstateAssetType(IntEnum):
|
||||
NONE = -1
|
||||
COVENANT = 0
|
||||
|
||||
@@ -308,14 +308,14 @@ class TransferParamsSerializer(se.EnumSwitchedSubfieldSerializer):
|
||||
@se.enum_field_serializer("TransferPacket", "TransferData", "ChannelType")
|
||||
@se.enum_field_serializer("TransferRequest", "TransferInfo", "ChannelType")
|
||||
@se.enum_field_serializer("TransferInfo", "TransferInfo", "ChannelType")
|
||||
class TransferChannelType(enum.IntEnum):
|
||||
class TransferChannelType(IntEnum):
|
||||
UNKNOWN = 0
|
||||
MISC = enum.auto()
|
||||
ASSET = enum.auto()
|
||||
|
||||
|
||||
@se.enum_field_serializer("TransferInfo", "TransferInfo", "TargetType")
|
||||
class TransferTargetType(enum.IntEnum):
|
||||
class TransferTargetType(IntEnum):
|
||||
UNKNOWN = 0
|
||||
FILE = enum.auto()
|
||||
VFILE = enum.auto()
|
||||
@@ -323,7 +323,7 @@ class TransferTargetType(enum.IntEnum):
|
||||
|
||||
@se.enum_field_serializer("TransferInfo", "TransferInfo", "Status")
|
||||
@se.enum_field_serializer("TransferPacket", "TransferData", "Status")
|
||||
class TransferStatus(enum.IntEnum):
|
||||
class TransferStatus(IntEnum):
|
||||
OK = 0
|
||||
DONE = 1
|
||||
SKIP = 2
|
||||
@@ -380,7 +380,7 @@ class TransferInfoSerializer(se.BaseSubfieldSerializer):
|
||||
|
||||
|
||||
@se.enum_field_serializer("RequestXfer", "XferID", "FilePath")
|
||||
class XferFilePath(enum.IntEnum):
|
||||
class XferFilePath(IntEnum):
|
||||
NONE = 0
|
||||
USER_SETTINGS = 1
|
||||
APP_SETTINGS = 2
|
||||
@@ -403,7 +403,7 @@ class XferFilePath(enum.IntEnum):
|
||||
|
||||
|
||||
@se.enum_field_serializer("AbortXfer", "XferID", "Result")
|
||||
class XferError(enum.IntEnum):
|
||||
class XferError(IntEnum):
|
||||
FILE_EMPTY = -44
|
||||
FILE_NOT_FOUND = -43
|
||||
CANNOT_OPEN_FILE = -42
|
||||
@@ -423,7 +423,7 @@ class SendXferPacketIDSerializer(se.AdapterSubfieldSerializer):
|
||||
|
||||
|
||||
@se.enum_field_serializer("ViewerEffect", "Effect", "Type")
|
||||
class ViewerEffectType(enum.IntEnum):
|
||||
class ViewerEffectType(IntEnum):
|
||||
TEXT = 0
|
||||
ICON = enum.auto()
|
||||
CONNECTOR = enum.auto()
|
||||
@@ -445,7 +445,7 @@ class ViewerEffectType(enum.IntEnum):
|
||||
EFFECT_BLOB = enum.auto()
|
||||
|
||||
|
||||
class LookAtTarget(enum.IntEnum):
|
||||
class LookAtTarget(IntEnum):
|
||||
NONE = 0
|
||||
IDLE = enum.auto()
|
||||
AUTO_LISTEN = enum.auto()
|
||||
@@ -459,7 +459,7 @@ class LookAtTarget(enum.IntEnum):
|
||||
CLEAR = enum.auto()
|
||||
|
||||
|
||||
class PointAtTarget(enum.IntEnum):
|
||||
class PointAtTarget(IntEnum):
|
||||
NONE = 0
|
||||
SELECT = enum.auto()
|
||||
GRAB = enum.auto()
|
||||
@@ -499,7 +499,7 @@ class ViewerEffectDataSerializer(se.EnumSwitchedSubfieldSerializer):
|
||||
|
||||
@se.enum_field_serializer("MoneyTransferRequest", "MoneyData", "TransactionType")
|
||||
@se.enum_field_serializer("MoneyBalanceReply", "TransactionInfo", "TransactionType")
|
||||
class MoneyTransactionType(enum.IntEnum):
|
||||
class MoneyTransactionType(IntEnum):
|
||||
# _many_ of these codes haven't been used in decades.
|
||||
# Money transaction failure codes
|
||||
NULL = 0
|
||||
@@ -561,7 +561,7 @@ class MoneyTransactionType(enum.IntEnum):
|
||||
|
||||
|
||||
@se.flag_field_serializer("MoneyTransferRequest", "MoneyData", "Flags")
|
||||
class MoneyTransactionFlags(enum.IntFlag):
|
||||
class MoneyTransactionFlags(IntFlag):
|
||||
SOURCE_GROUP = 1
|
||||
DEST_GROUP = 1 << 1
|
||||
OWNER_GROUP = 1 << 2
|
||||
@@ -570,7 +570,7 @@ class MoneyTransactionFlags(enum.IntFlag):
|
||||
|
||||
|
||||
@se.enum_field_serializer("ImprovedInstantMessage", "MessageBlock", "Dialog")
|
||||
class IMDialogType(enum.IntEnum):
|
||||
class IMDialogType(IntEnum):
|
||||
NOTHING_SPECIAL = 0
|
||||
MESSAGEBOX = 1
|
||||
GROUP_INVITATION = 3
|
||||
@@ -728,7 +728,7 @@ class ObjectUpdateDataSerializer(se.SimpleSubfieldSerializer):
|
||||
|
||||
@se.enum_field_serializer("ObjectUpdate", "ObjectData", "PCode")
|
||||
@se.enum_field_serializer("ObjectAdd", "ObjectData", "PCode")
|
||||
class PCode(enum.IntEnum):
|
||||
class PCode(IntEnum):
|
||||
# Should actually be a bitmask, these are just some common ones.
|
||||
PRIMITIVE = 9
|
||||
AVATAR = 47
|
||||
@@ -742,7 +742,7 @@ class PCode(enum.IntEnum):
|
||||
@se.flag_field_serializer("ObjectUpdateCompressed", "ObjectData", "UpdateFlags")
|
||||
@se.flag_field_serializer("ObjectUpdateCached", "ObjectData", "UpdateFlags")
|
||||
@se.flag_field_serializer("ObjectAdd", "ObjectData", "AddFlags")
|
||||
class ObjectUpdateFlags(enum.IntFlag):
|
||||
class ObjectUpdateFlags(IntFlag):
|
||||
USE_PHYSICS = 1 << 0
|
||||
CREATE_SELECTED = 1 << 1
|
||||
OBJECT_MODIFY = 1 << 2
|
||||
@@ -796,7 +796,7 @@ class AttachmentStateAdapter(se.Adapter):
|
||||
|
||||
|
||||
@se.flag_field_serializer("AgentUpdate", "AgentData", "State")
|
||||
class AgentState(enum.IntFlag):
|
||||
class AgentState(IntFlag):
|
||||
TYPING = 1 << 3
|
||||
EDITING = 1 << 4
|
||||
|
||||
@@ -836,7 +836,7 @@ class ImprovedTerseObjectUpdateDataSerializer(se.SimpleSubfieldSerializer):
|
||||
})
|
||||
|
||||
|
||||
class ShineLevel(enum.IntEnum):
|
||||
class ShineLevel(IntEnum):
|
||||
OFF = 0
|
||||
LOW = 1
|
||||
MEDIUM = 2
|
||||
@@ -854,7 +854,7 @@ class BasicMaterials:
|
||||
BUMP_SHINY_FULLBRIGHT = se.BitfieldDataclass(BasicMaterials, se.U8)
|
||||
|
||||
|
||||
class TexGen(enum.IntEnum):
|
||||
class TexGen(IntEnum):
|
||||
DEFAULT = 0
|
||||
PLANAR = 0x2
|
||||
# These are unused / not supported
|
||||
@@ -1056,7 +1056,7 @@ class DPTextureEntrySubfieldSerializer(se.SimpleSubfieldSerializer):
|
||||
TEMPLATE = DATA_PACKER_TE_TEMPLATE
|
||||
|
||||
|
||||
class TextureAnimMode(enum.IntFlag):
|
||||
class TextureAnimMode(IntFlag):
|
||||
ON = 0x01
|
||||
LOOP = 0x02
|
||||
REVERSE = 0x04
|
||||
@@ -1092,7 +1092,7 @@ class TextureIDListSerializer(se.SimpleSubfieldSerializer):
|
||||
TEMPLATE = se.Collection(None, se.UUID)
|
||||
|
||||
|
||||
class ParticleDataFlags(enum.IntFlag):
|
||||
class ParticleDataFlags(IntFlag):
|
||||
INTERP_COLOR = 0x001
|
||||
INTERP_SCALE = 0x002
|
||||
BOUNCE = 0x004
|
||||
@@ -1108,12 +1108,12 @@ class ParticleDataFlags(enum.IntFlag):
|
||||
DATA_BLEND = 0x20000
|
||||
|
||||
|
||||
class ParticleFlags(enum.IntFlag):
|
||||
class ParticleFlags(IntFlag):
|
||||
OBJECT_RELATIVE = 0x1
|
||||
USE_NEW_ANGLE = 0x2
|
||||
|
||||
|
||||
class ParticleBlendFunc(enum.IntEnum):
|
||||
class ParticleBlendFunc(IntEnum):
|
||||
ONE = 0
|
||||
ZERO = 1
|
||||
DEST_COLOR = 2
|
||||
@@ -1150,7 +1150,7 @@ PDATA_BLOCK_TEMPLATE = se.Template({
|
||||
})
|
||||
|
||||
|
||||
class PartPattern(enum.IntFlag):
|
||||
class PartPattern(IntFlag):
|
||||
NONE = 0
|
||||
DROP = 0x1
|
||||
EXPLODE = 0x2
|
||||
@@ -1199,7 +1199,7 @@ class PSBlockSerializer(se.SimpleSubfieldSerializer):
|
||||
|
||||
|
||||
@se.enum_field_serializer("ObjectExtraParams", "ObjectData", "ParamType")
|
||||
class ExtraParamType(enum.IntEnum):
|
||||
class ExtraParamType(IntEnum):
|
||||
FLEXIBLE = 0x10
|
||||
LIGHT = 0x20
|
||||
SCULPT = 0x30
|
||||
@@ -1209,11 +1209,11 @@ class ExtraParamType(enum.IntEnum):
|
||||
EXTENDED_MESH = 0x70
|
||||
|
||||
|
||||
class ExtendedMeshFlags(enum.IntFlag):
|
||||
class ExtendedMeshFlags(IntFlag):
|
||||
ANIMATED_MESH = 0x1
|
||||
|
||||
|
||||
class SculptType(enum.IntEnum):
|
||||
class SculptType(IntEnum):
|
||||
NONE = 0
|
||||
SPHERE = 1
|
||||
TORUS = 2
|
||||
@@ -1238,10 +1238,10 @@ EXTRA_PARAM_TEMPLATES = {
|
||||
"UserForce": se.IfPresent(se.Vector3),
|
||||
}),
|
||||
ExtraParamType.LIGHT: se.Template({
|
||||
"Color": Color4(),
|
||||
"Radius": se.F32,
|
||||
"Cutoff": se.F32,
|
||||
"Falloff": se.F32,
|
||||
"Color": Color4(),
|
||||
"Radius": se.F32,
|
||||
"Cutoff": se.F32,
|
||||
"Falloff": se.F32,
|
||||
}),
|
||||
ExtraParamType.SCULPT: se.Template({
|
||||
"Texture": se.UUID,
|
||||
@@ -1284,7 +1284,7 @@ class ObjectUpdateExtraParamsSerializer(se.SimpleSubfieldSerializer):
|
||||
|
||||
|
||||
@se.flag_field_serializer("ObjectUpdate", "ObjectData", "Flags")
|
||||
class SoundFlags(enum.IntFlag):
|
||||
class SoundFlags(IntFlag):
|
||||
LOOP = 1 << 0
|
||||
SYNC_MASTER = 1 << 1
|
||||
SYNC_SLAVE = 1 << 2
|
||||
@@ -1293,7 +1293,7 @@ class SoundFlags(enum.IntFlag):
|
||||
STOP = 1 << 5
|
||||
|
||||
|
||||
class CompressedFlags(enum.IntFlag):
|
||||
class CompressedFlags(IntFlag):
|
||||
SCRATCHPAD = 1
|
||||
TREE = 1 << 1
|
||||
TEXT = 1 << 2
|
||||
@@ -1381,7 +1381,7 @@ class ObjectUpdateCompressedDataSerializer(se.SimpleSubfieldSerializer):
|
||||
|
||||
|
||||
@se.flag_field_serializer("MultipleObjectUpdate", "ObjectData", "Type")
|
||||
class MultipleObjectUpdateFlags(enum.IntFlag):
|
||||
class MultipleObjectUpdateFlags(IntFlag):
|
||||
POSITION = 0x01
|
||||
ROTATION = 0x02
|
||||
SCALE = 0x04
|
||||
@@ -1401,7 +1401,7 @@ class MultipleObjectUpdateDataSerializer(se.FlagSwitchedSubfieldSerializer):
|
||||
|
||||
@se.flag_field_serializer("AgentUpdate", "AgentData", "ControlFlags")
|
||||
@se.flag_field_serializer("ScriptControlChange", "Data", "Controls")
|
||||
class AgentControlFlags(enum.IntFlag):
|
||||
class AgentControlFlags(IntFlag):
|
||||
AT_POS = 1
|
||||
AT_NEG = 1 << 1
|
||||
LEFT_POS = 1 << 2
|
||||
@@ -1437,14 +1437,14 @@ class AgentControlFlags(enum.IntFlag):
|
||||
|
||||
|
||||
@se.flag_field_serializer("AgentUpdate", "AgentData", "Flags")
|
||||
class AgentUpdateFlags(enum.IntFlag):
|
||||
class AgentUpdateFlags(IntFlag):
|
||||
HIDE_TITLE = 1
|
||||
CLIENT_AUTOPILOT = 1 << 1
|
||||
|
||||
|
||||
@se.enum_field_serializer("ChatFromViewer", "ChatData", "Type")
|
||||
@se.enum_field_serializer("ChatFromSimulator", "ChatData", "ChatType")
|
||||
class ChatType(enum.IntEnum):
|
||||
class ChatType(IntEnum):
|
||||
WHISPER = 0
|
||||
NORMAL = 1
|
||||
SHOUT = 2
|
||||
@@ -1461,7 +1461,7 @@ class ChatType(enum.IntEnum):
|
||||
|
||||
|
||||
@se.enum_field_serializer("ChatFromSimulator", "ChatData", "SourceType")
|
||||
class ChatSourceType(enum.IntEnum):
|
||||
class ChatSourceType(IntEnum):
|
||||
SYSTEM = 0
|
||||
AGENT = 1
|
||||
OBJECT = 2
|
||||
@@ -1479,7 +1479,7 @@ class NameValueSerializer(se.SimpleSubfieldSerializer):
|
||||
|
||||
|
||||
@se.enum_field_serializer("SetFollowCamProperties", "CameraProperty", "Type")
|
||||
class CameraPropertyType(enum.IntEnum):
|
||||
class CameraPropertyType(IntEnum):
|
||||
PITCH = 0
|
||||
FOCUS_OFFSET = enum.auto()
|
||||
FOCUS_OFFSET_X = enum.auto()
|
||||
@@ -1506,7 +1506,7 @@ class CameraPropertyType(enum.IntEnum):
|
||||
|
||||
|
||||
@se.enum_field_serializer("DeRezObject", "AgentBlock", "Destination")
|
||||
class DeRezObjectDestination(enum.IntEnum):
|
||||
class DeRezObjectDestination(IntEnum):
|
||||
SAVE_INTO_AGENT_INVENTORY = 0 # deprecated, disabled
|
||||
ACQUIRE_TO_AGENT_INVENTORY = 1 # try to leave copy in world
|
||||
SAVE_INTO_TASK_INVENTORY = 2
|
||||
@@ -1526,7 +1526,7 @@ class DeRezObjectDestination(enum.IntEnum):
|
||||
@se.flag_field_serializer("SimStats", "RegionInfo", "RegionFlagsExtended")
|
||||
@se.flag_field_serializer("RegionInfo", "RegionInfo", "RegionFlags")
|
||||
@se.flag_field_serializer("RegionInfo", "RegionInfo3", "RegionFlagsExtended")
|
||||
class RegionFlags(enum.IntFlag):
|
||||
class RegionFlags(IntFlag):
|
||||
ALLOW_DAMAGE = 1 << 0
|
||||
ALLOW_LANDMARK = 1 << 1
|
||||
ALLOW_SET_HOME = 1 << 2
|
||||
@@ -1562,12 +1562,35 @@ class RegionFlags(enum.IntFlag):
|
||||
|
||||
|
||||
@se.flag_field_serializer("RegionHandshakeReply", "RegionInfo", "Flags")
|
||||
class RegionHandshakeReplyFlags(enum.IntFlag):
|
||||
class RegionHandshakeReplyFlags(IntFlag):
|
||||
VOCACHE_CULLING_ENABLED = 0x1 # ask sim to send all cacheable objects.
|
||||
VOCACHE_IS_EMPTY = 0x2 # the cache file is empty, no need to send cache probes.
|
||||
SUPPORTS_SELF_APPEARANCE = 0x4 # inbound AvatarAppearance for self is ok
|
||||
|
||||
|
||||
@se.flag_field_serializer("TeleportStart", "Info", "TeleportFlags")
|
||||
@se.flag_field_serializer("TeleportProgress", "Info", "TeleportFlags")
|
||||
@se.flag_field_serializer("TeleportFinish", "Info", "TeleportFlags")
|
||||
@se.flag_field_serializer("TeleportLureRequest", "Info", "TeleportFlags")
|
||||
class TeleportFlags(IntFlag):
|
||||
SET_HOME_TO_TARGET = 1 << 0 # newbie leaving prelude (starter area)
|
||||
SET_LAST_TO_TARGET = 1 << 1
|
||||
VIA_LURE = 1 << 2
|
||||
VIA_LANDMARK = 1 << 3
|
||||
VIA_LOCATION = 1 << 4
|
||||
VIA_HOME = 1 << 5
|
||||
VIA_TELEHUB = 1 << 6
|
||||
VIA_LOGIN = 1 << 7
|
||||
VIA_GODLIKE_LURE = 1 << 8
|
||||
GODLIKE = 1 << 9
|
||||
NINE_ONE_ONE = 1 << 10 # What is this?
|
||||
DISABLE_CANCEL = 1 << 11 # Used for llTeleportAgentHome()
|
||||
VIA_REGION_ID = 1 << 12
|
||||
IS_FLYING = 1 << 13
|
||||
SHOW_RESET_HOME = 1 << 14
|
||||
FORCE_REDIRECT = 1 << 15
|
||||
|
||||
|
||||
@se.http_serializer("RenderMaterials")
|
||||
class RenderMaterialsSerializer(se.BaseHTTPSerializer):
|
||||
@classmethod
|
||||
|
||||
@@ -128,7 +128,7 @@ class TransferManager:
|
||||
elif msg.name == "TransferAbort":
|
||||
transfer.error_code = msg["TransferID"][0].deserialize_var("Result")
|
||||
transfer.set_exception(
|
||||
ConnectionAbortedError(f"Unknown failure")
|
||||
ConnectionAbortedError("Unknown failure")
|
||||
)
|
||||
|
||||
def _handle_transfer_packet(self, msg: ProxiedMessage, transfer: Transfer):
|
||||
@@ -136,7 +136,7 @@ class TransferManager:
|
||||
packet_id: int = transfer_block["Packet"]
|
||||
packet_data = transfer_block["Data"]
|
||||
transfer.chunks[packet_id] = packet_data
|
||||
if transfer_block["Status"] == TransferStatus.DONE:
|
||||
if transfer_block["Status"] == TransferStatus.DONE and not transfer.done():
|
||||
transfer.mark_done()
|
||||
|
||||
def _handle_transfer_info(self, msg: ProxiedMessage, transfer: Transfer):
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
"""
|
||||
Outbound Xfer only.
|
||||
|
||||
sim->viewer Xfer is only legitimately used for terrain so not worth implementing.
|
||||
Managers for inbound and outbound xfer as well as the AssetUploadRequest flow
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import enum
|
||||
import random
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.datatypes import UUID, RawBytes
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.data_packer import TemplateDataPacker
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
@@ -24,7 +23,7 @@ _XFER_MESSAGES = {"AbortXfer", "ConfirmXferPacket", "RequestXfer", "SendXferPack
|
||||
|
||||
|
||||
class Xfer:
|
||||
def __init__(self, xfer_id: int):
|
||||
def __init__(self, xfer_id: Optional[int] = None):
|
||||
super().__init__()
|
||||
self.xfer_id: Optional[int] = xfer_id
|
||||
self.chunks: Dict[int, bytes] = {}
|
||||
@@ -65,6 +64,11 @@ class Xfer:
|
||||
return self._future.__await__()
|
||||
|
||||
|
||||
class UploadStrategy(enum.IntEnum):
|
||||
XFER = enum.auto()
|
||||
ASSET_UPLOAD_REQUEST = enum.auto()
|
||||
|
||||
|
||||
class XferManager:
|
||||
def __init__(self, region: ProxiedRegion):
|
||||
self._region: ProxiedRegion = proxify(region)
|
||||
@@ -141,5 +145,96 @@ class XferManager:
|
||||
))
|
||||
|
||||
xfer.chunks[packet_id.PacketID] = packet_data
|
||||
if packet_id.IsEOF:
|
||||
if packet_id.IsEOF and not xfer.done():
|
||||
xfer.mark_done()
|
||||
|
||||
def upload_asset(
|
||||
self,
|
||||
asset_type: AssetType,
|
||||
data: bytes,
|
||||
store_local: bool = False,
|
||||
temp_file: bool = False,
|
||||
transaction_id: Optional[UUID] = None,
|
||||
upload_strategy: Optional[UploadStrategy] = None,
|
||||
) -> asyncio.Future[UUID]:
|
||||
"""Upload an asset through the Xfer upload path"""
|
||||
if not transaction_id:
|
||||
transaction_id = UUID.random()
|
||||
|
||||
# Small amounts of data can be sent inline, decide based on size
|
||||
if upload_strategy is None:
|
||||
if len(data) >= 1150:
|
||||
upload_strategy = UploadStrategy.XFER
|
||||
else:
|
||||
upload_strategy = UploadStrategy.ASSET_UPLOAD_REQUEST
|
||||
|
||||
xfer = None
|
||||
inline_data = b''
|
||||
if upload_strategy == UploadStrategy.XFER:
|
||||
# Prepend the expected length field to the first chunk
|
||||
if not isinstance(data, RawBytes):
|
||||
data = TemplateDataPacker.pack(len(data), MsgType.MVT_S32) + data
|
||||
xfer = Xfer()
|
||||
chunk_num = 0
|
||||
while data:
|
||||
xfer.chunks[chunk_num] = data[:1150]
|
||||
data = data[1150:]
|
||||
else:
|
||||
inline_data = data
|
||||
|
||||
self._region.circuit.send_message(ProxiedMessage(
|
||||
"AssetUploadRequest",
|
||||
Block(
|
||||
"AssetBlock",
|
||||
TransactionID=transaction_id,
|
||||
Type=asset_type,
|
||||
Tempfile=temp_file,
|
||||
StoreLocal=store_local,
|
||||
AssetData=inline_data,
|
||||
)
|
||||
))
|
||||
fut = asyncio.Future()
|
||||
asyncio.create_task(self._pump_asset_upload(xfer, transaction_id, fut))
|
||||
return fut
|
||||
|
||||
async def _pump_asset_upload(self, xfer: Optional[Xfer], transaction_id: UUID, fut: asyncio.Future):
|
||||
message_handler = self._region.message_handler
|
||||
# We'll receive an Xfer request for the asset we're uploading.
|
||||
# asset ID is determined by hashing secure session ID with chosen transaction ID.
|
||||
asset_id: UUID = self._region.session().tid_to_assetid(transaction_id)
|
||||
try:
|
||||
# Only need to do this if we're using the xfer upload strategy, otherwise all the
|
||||
# data was already sent in the AssetUploadRequest and we don't expect a RequestXfer.
|
||||
if xfer is not None:
|
||||
def request_predicate(request_msg: ProxiedMessage):
|
||||
return request_msg["XferID"]["VFileID"] == asset_id
|
||||
msg = await message_handler.wait_for(
|
||||
'RequestXfer', predicate=request_predicate, timeout=5000)
|
||||
xfer.xfer_id = msg["XferID"]["ID"]
|
||||
|
||||
packet_id = 0
|
||||
# TODO: No resend yet. If it's lost, it's lost.
|
||||
while xfer.chunks:
|
||||
chunk = xfer.chunks.pop(packet_id)
|
||||
# EOF if there are no chunks left
|
||||
packet_val = XferPacket(PacketID=packet_id, IsEOF=not bool(xfer.chunks))
|
||||
self._region.circuit.send_message(ProxiedMessage(
|
||||
"SendXferPacket",
|
||||
Block("XferID", ID=xfer.xfer_id, Packet_=packet_val),
|
||||
Block("DataPacket", Data=chunk),
|
||||
))
|
||||
# Don't care about the value, just want to know it was confirmed.
|
||||
await message_handler.wait_for(
|
||||
"ConfirmXferPacket", predicate=xfer.is_our_message, timeout=5000)
|
||||
packet_id += 1
|
||||
|
||||
def complete_predicate(complete_msg: ProxiedMessage):
|
||||
return complete_msg["AssetBlock"]["UUID"] == asset_id
|
||||
msg = await message_handler.wait_for('AssetUploadComplete', predicate=complete_predicate)
|
||||
if msg["AssetBlock"]["Success"] == 1:
|
||||
fut.set_result(asset_id)
|
||||
else:
|
||||
fut.set_exception(RuntimeError(f"Xfer for transaction {transaction_id} failed"))
|
||||
|
||||
except asyncio.TimeoutError as e:
|
||||
fut.set_exception(e)
|
||||
|
||||
@@ -5,3 +5,8 @@ license_files =
|
||||
|
||||
[bdist_wheel]
|
||||
universal = 1
|
||||
|
||||
[flake8]
|
||||
max-line-length = 160
|
||||
exclude = build/*, .eggs/*
|
||||
ignore = F405, F403, E501, F841, E722, W503, E741
|
||||
|
||||
4
setup.py
4
setup.py
@@ -25,7 +25,7 @@ from setuptools import setup, find_packages
|
||||
|
||||
here = path.abspath(path.dirname(__file__))
|
||||
|
||||
version = '0.4.1'
|
||||
version = '0.5.0'
|
||||
|
||||
with open(path.join(here, 'README.md')) as readme_fh:
|
||||
readme = readme_fh.read()
|
||||
@@ -50,7 +50,7 @@ setup(
|
||||
"Topic :: Software Development :: Testing",
|
||||
],
|
||||
author='Salad Dais',
|
||||
author_email='SaladDais@users.noreply.github.com',
|
||||
author_email='83434023+SaladDais@users.noreply.github.com',
|
||||
url='https://github.com/SaladDais/Hippolyzer/',
|
||||
license='LGPLv3',
|
||||
packages=find_packages(include=["hippolyzer", "hippolyzer.*"]),
|
||||
|
||||
@@ -111,7 +111,7 @@ executables = [
|
||||
|
||||
setup(
|
||||
name="hippolyzer_gui",
|
||||
version="0.4.0",
|
||||
version="0.5.0",
|
||||
description="Hippolyzer GUI",
|
||||
options=options,
|
||||
executables=executables,
|
||||
|
||||
@@ -664,14 +664,13 @@ class NameValueSerializationTests(BaseSerializationTest):
|
||||
self.assertEqual(test.decode("utf8"), str(reader.read(NameValueSerializer())))
|
||||
|
||||
def test_namevalues_stringify(self):
|
||||
test_list = \
|
||||
b"Alpha STRING R S 'Twas brillig and the slighy toves/Did gyre and gimble in the wabe\n" + \
|
||||
b"Beta F32 R S 3.14159\n" + \
|
||||
b"Gamma S32 R S -12345\n" + \
|
||||
b"Delta VEC3 R S <1.2, -3.4, 5.6>\n" + \
|
||||
b"Epsilon U32 R S 12345\n" + \
|
||||
b"Zeta ASSET R S 041a8591-6f30-42f8-b9f7-7f281351f375\n" + \
|
||||
b"Eta U64 R S 9223372036854775807"
|
||||
test_list = b"Alpha STRING R S 'Twas brillig and the slighy toves/Did gyre and gimble in the wabe\n" + \
|
||||
b"Beta F32 R S 3.14159\n" + \
|
||||
b"Gamma S32 R S -12345\n" + \
|
||||
b"Delta VEC3 R S <1.2, -3.4, 5.6>\n" + \
|
||||
b"Epsilon U32 R S 12345\n" + \
|
||||
b"Zeta ASSET R S 041a8591-6f30-42f8-b9f7-7f281351f375\n" + \
|
||||
b"Eta U64 R S 9223372036854775807"
|
||||
|
||||
self.writer.clear()
|
||||
self.writer.write_bytes(test_list)
|
||||
|
||||
@@ -39,11 +39,7 @@ class TestDictionary(unittest.TestCase):
|
||||
self.template_list = parser.message_templates
|
||||
|
||||
def test_create_dictionary(self):
|
||||
try:
|
||||
_msg_dict = TemplateDictionary(None)
|
||||
assert False, "Template dictionary fail case list==None not caught"
|
||||
except:
|
||||
assert True
|
||||
TemplateDictionary(None)
|
||||
|
||||
def test_get_packet(self):
|
||||
msg_dict = TemplateDictionary(self.template_list)
|
||||
@@ -55,7 +51,7 @@ class TestDictionary(unittest.TestCase):
|
||||
def test_get_packet_pair(self):
|
||||
msg_dict = TemplateDictionary(self.template_list)
|
||||
packet = msg_dict.get_template_by_pair('Medium', 8)
|
||||
assert packet.name == 'ConfirmEnableSimulator', "Frequency-Number pair resulting in incorrect packet"
|
||||
assert packet.name == 'ConfirmEnableSimulator', "Frequency-Number pair resulting in incorrect packet"
|
||||
|
||||
|
||||
class TestTemplates(unittest.TestCase):
|
||||
@@ -69,11 +65,8 @@ class TestTemplates(unittest.TestCase):
|
||||
assert parser.message_templates is not None, "Parsing template file failed"
|
||||
|
||||
def test_parser_fail(self):
|
||||
try:
|
||||
with self.assertRaises(Exception):
|
||||
_parser = MessageTemplateParser(None)
|
||||
assert False, "Fail case TEMPLATE_FILE == NONE not caught"
|
||||
except:
|
||||
assert True
|
||||
|
||||
def test_parser_version(self):
|
||||
version = self.parser.version
|
||||
@@ -111,15 +104,15 @@ class TestTemplates(unittest.TestCase):
|
||||
block = self.msg_dict['OpenCircuit'].get_block('CircuitInfo')
|
||||
tp = block.block_type
|
||||
num = block.number
|
||||
assert tp == MsgBlockType.MBT_SINGLE, "Expected: Single Returned: " + tp
|
||||
assert num == 0, "Expected: 0 Returned: " + str(num)
|
||||
assert tp == MsgBlockType.MBT_SINGLE, "Expected: Single Returned: " + tp
|
||||
assert num == 0, "Expected: 0 Returned: " + str(num)
|
||||
|
||||
def test_block_multiple(self):
|
||||
block = self.msg_dict['NeighborList'].get_block('NeighborBlock')
|
||||
tp = block.block_type
|
||||
num = block.number
|
||||
assert tp == MsgBlockType.MBT_MULTIPLE, "Expected: Multiple Returned: " + tp
|
||||
assert num == 4, "Expected: 4 Returned: " + str(num)
|
||||
assert num == 4, "Expected: 4 Returned: " + str(num)
|
||||
|
||||
def test_variable(self):
|
||||
variable = self.msg_dict['StartPingCheck'].get_block('PingID').get_variable('PingID')
|
||||
@@ -153,7 +146,7 @@ class TestTemplates(unittest.TestCase):
|
||||
medium_count = 0
|
||||
high_count = 0
|
||||
fixed_count = 0
|
||||
while True:
|
||||
while True:
|
||||
try:
|
||||
line = next(lines)
|
||||
except StopIteration:
|
||||
|
||||
@@ -86,6 +86,6 @@ class TestDeserializer(unittest.TestCase):
|
||||
# test the 72 byte ObjectUpdate.ObjectData.ObjectData case
|
||||
hex_string = '00000000000000000000803f6666da41660000432fffff422233e34100000000000000000000000000000000000000' \
|
||||
'000000000000000000000000000e33de3c000000000000000000000000'
|
||||
position = TemplateDataPacker.unpack(unhexlify(hex_string)[16:16+12], MsgType.MVT_LLVector3)
|
||||
position = TemplateDataPacker.unpack(unhexlify(hex_string)[16:16 + 12], MsgType.MVT_LLVector3)
|
||||
self.assertEqual(position, (128.00155639648438, 127.99840545654297, 28.399967193603516))
|
||||
self.assertIsInstance(position, Vector3)
|
||||
|
||||
72
tests/proxy/integration/test_http.py
Normal file
72
tests/proxy/integration/test_http.py
Normal file
@@ -0,0 +1,72 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from mitmproxy.test import tflow, tutils
|
||||
from mitmproxy.http import HTTPFlow
|
||||
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.http_event_manager import MITMProxyEventManager
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.http_proxy import HTTPFlowContext, SerializedCapData
|
||||
from hippolyzer.lib.proxy.message_logger import FilteringMessageLogger
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager
|
||||
|
||||
from . import BaseIntegrationTest
|
||||
|
||||
|
||||
class MockAddon(BaseAddon):
|
||||
def handle_http_request(self, session_manager: SessionManager, flow: HippoHTTPFlow):
|
||||
flow.metadata["touched_addon"] = True
|
||||
|
||||
def handle_http_response(self, session_manager: SessionManager, flow: HippoHTTPFlow):
|
||||
flow.metadata["touched_addon"] = True
|
||||
|
||||
|
||||
class SimpleMessageLogger(FilteringMessageLogger):
|
||||
@property
|
||||
def entries(self):
|
||||
return self._filtered_entries
|
||||
|
||||
|
||||
class LLUDPIntegrationTests(BaseIntegrationTest):
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.addon = MockAddon()
|
||||
AddonManager.init([], self.session_manager, [self.addon])
|
||||
self.flow_context = HTTPFlowContext()
|
||||
self.http_event_manager = MITMProxyEventManager(self.session_manager, self.flow_context)
|
||||
self._setup_circuit()
|
||||
|
||||
async def _pump_one_event(self):
|
||||
# If we don't yield then the new entry won't end up in the queue
|
||||
await asyncio.sleep(0.001)
|
||||
await self.http_event_manager.pump_proxy_event()
|
||||
await asyncio.sleep(0.001)
|
||||
|
||||
async def test_http_flow_request(self):
|
||||
# mimic a request coming in from mitmproxy over the queue
|
||||
fake_flow = tflow.tflow(req=tutils.treq(host="example.com"))
|
||||
fake_flow.metadata["cap_data_ser"] = SerializedCapData()
|
||||
self.flow_context.from_proxy_queue.put(("request", fake_flow.get_state()), True)
|
||||
await self._pump_one_event()
|
||||
self.assertTrue(self.flow_context.from_proxy_queue.empty())
|
||||
self.assertFalse(self.flow_context.to_proxy_queue.empty())
|
||||
flow_state = self.flow_context.to_proxy_queue.get(True)[2]
|
||||
mitm_flow: HTTPFlow = HTTPFlow.from_state(flow_state)
|
||||
# The response sent back to mitmproxy should have been our modified version
|
||||
self.assertEqual(True, mitm_flow.metadata["touched_addon"])
|
||||
|
||||
async def test_http_flow_response(self):
|
||||
# mimic a request coming in from mitmproxy over the queue
|
||||
fake_flow = tflow.tflow(req=tutils.treq(host="example.com"), resp=tutils.tresp())
|
||||
fake_flow.metadata["cap_data_ser"] = SerializedCapData()
|
||||
self.flow_context.from_proxy_queue.put(("response", fake_flow.get_state()), True)
|
||||
await self._pump_one_event()
|
||||
self.assertTrue(self.flow_context.from_proxy_queue.empty())
|
||||
self.assertFalse(self.flow_context.to_proxy_queue.empty())
|
||||
flow_state = self.flow_context.to_proxy_queue.get(True)[2]
|
||||
mitm_flow: HTTPFlow = HTTPFlow.from_state(flow_state)
|
||||
# The response sent back to mitmproxy should have been our modified version
|
||||
self.assertEqual(True, mitm_flow.metadata["touched_addon"])
|
||||
@@ -33,7 +33,7 @@ class MockAddon(BaseAddon):
|
||||
|
||||
def handle_object_updated(self, session: Session, region: ProxiedRegion,
|
||||
obj: Object, updated_props: Set[str]):
|
||||
self.events.append(("object_update", session.id, region.circuit_addr, obj.LocalID))
|
||||
self.events.append(("object_update", session.id, region.circuit_addr, obj.LocalID, updated_props))
|
||||
|
||||
|
||||
class SimpleMessageLogger(FilteringMessageLogger):
|
||||
@@ -53,31 +53,31 @@ class LLUDPIntegrationTests(BaseIntegrationTest):
|
||||
localid = random.getrandbits(32)
|
||||
|
||||
return b'\x00\x00\x00\x0c\xba\x00\r\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x03\xd0\x04\x00\x10' \
|
||||
b'\xe6\x00\x89UgG$\xcbC\xed\x92\x0bG\xca\xed\x15F_' + struct.pack("<I", localid) + \
|
||||
b'\xe6\x00\x12\x12\x10\xbf\x16XB~\x8f\xb4\xfb\x00\x1a\xcd\x9b\xe5' + struct.pack("<I", localid) + \
|
||||
b'\t\x00\xcdG\x00\x00\x03\x00\x00\x00\x1cB\x00\x00\x1cB\xcd\xcc\xcc=\xedG,' \
|
||||
b'B\x9e\xb1\x9eBff\xa0A\x00\x00\x00\x00\x00\x00\x00\x00[' \
|
||||
b'\x8b\xf8\xbe\xc0\x00\x00\x00\x89UgG$\xcbC\xed\x92\x0bG\xca\xed\x15F_\x00\x00\x00\x00\x00' \
|
||||
b'\x8b\xf8\xbe\xc0\x00\x00\x00k\x9b\xc4\xfe3\nOa\xbb\xe2\xe4\xb2C\xac7\xbd\x00\x00\x00\x00\x00' \
|
||||
b'\x00\x00\x00\x00\x00\xa2=\x010\x00\x11\x00\x00\x00\x89UgG$\xcbC\xed\x92\x0bG\xca\xed' \
|
||||
b'\x15F_@ \x00\x00\x00\x00d\x96\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \
|
||||
b'\x00?\x00\x00\x00\x89UgG$\xcbC\xed\x92\x0bG\xca\xed\x15F_\x00\x00\x00\x003\x00ff\x86\xbf' \
|
||||
b'\x00?\x00\x00\x00\x1c\x9fJoI\x8dH\xa0\x9d\xc4&\'\'\x19=g\x00\x00\x00\x003\x00ff\x86\xbf' \
|
||||
b'\x00ff\x86?\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x89UgG$\xcbC' \
|
||||
b'\xed\x92\x0bG\xca\xed\x15F_\x10\x00\x00\x003\x00\x01\x01\x00\x00\x00\x00\xdb\x0f\xc9@\xa6' \
|
||||
b'\x9b\xc4=\xd0\x04\x00\x10\xe6\x00\x89UgG$\xcbC\xed\x92\x0bG\xca\xed\x15F_\\\x04\x00\x00\t' \
|
||||
b'\x00\xd3G\x00\x00\x03\x00\x00\x00\x1cB\x00\x00\x1cB\xcd\xcc\xcc=\t\x08\x9cA\xf2\x03' \
|
||||
b'\xa5Bff\xa0A\x00\x00\x00\x00\x00\x00\x00\x00[' \
|
||||
b'\x8b\xf8\xbe\xc0\x00\x00\x00\x89UgG$\xcbC\xed\x92\x0bG\xca\xed\x15F_\x00\x00\x00\x00\x00' \
|
||||
b'\x9b\xc4=\xd0\x04\x00\x10\xe6\x00\xc2\xa62\xe2\x9b\xd7L\xc4\xbb\xd6\x1fKC\xa6\xdf\x8d\\\x04\x00' \
|
||||
b'\x00\t\x00\xd3G\x00\x00\x03\x00\x00\x00\x1cB\x00\x00\x1cB\xcd\xcc\xcc=\t\x08\x9cA\xf2\x03' \
|
||||
b'\xa5Bff\xa0A\x00\x00\x00\x00\x00\x00\x00\x00[\x8b\xf8' \
|
||||
b'\xbe\xc0\x00\x00\x00\x0b\x1b\xa0\xd1\x97=C\xcd\xae\x19\xfd\xc9\xbb\x88\x05\xc3\x00\x00\x00\x00\x00' \
|
||||
b'\x00\x00\x00\x00\x00\xa2=\x010\x00\x11\x00\x00\x00\x89UgG$\xcbC\xed\x92\x0bG\xca\xed' \
|
||||
b'\x15F_@ \x00\x00\x00\x00d\x96\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \
|
||||
b'\x00?\x00\x00\x00\x89UgG$\xcbC\xed\x92\x0bG\xca\xed\x15F_\x00\x00\x00\x003\x00ff\x86\xbf' \
|
||||
b'\x00?\x00\x00\x00\xbd\x8b\xd7h{\xdbM\xbc\x8c3X\xa6\xa6\x0c\x94\xd7\x00\x00\x00\x003\x00ff\x86\xbf' \
|
||||
b'\x00ff\x86?\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x89UgG$\xcbC' \
|
||||
b'\xed\x92\x0bG\xca\xed\x15F_\x10\x00\x00\x003\x00\x01\x01\x00\x00\x00\x00\xdb\x0f\xc9@\xa6' \
|
||||
b'\x9b\xc4=\xd0\x04\x00\x10\xe6\x00\x89UgG$\xcbC\xed\x92\x0bG\xca\xed\x15F_\xe2\x05\x00\x00' \
|
||||
b'\x9b\xc4=\xd0\x04\x00\x10\xe6\x00\xd1e\xac\xff,NBK\x91d\xbb\x15\\\x0b\xc3\x9c\xe2\x05\x00\x00' \
|
||||
b'\t\x00\xbbG\x00\x00\x03\x00\x00\x00\x1cB\x00\x00\x1cB\xcd\xcc\xcc=\x0f5\x97AY\x98ZBff' \
|
||||
b'\xa0A\x00\x00\x00\x00\x00\x00\x00\x00\xe6Y0\xbf\xc0\x00\x00\x00\x89UgG$\xcbC\xed\x92\x0bG' \
|
||||
b'\xca\xed\x15F_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa2=\x010\x00\x11\x00\x00\x00' \
|
||||
b'\x89UgG$\xcbC\xed\x92\x0bG\xca\xed\x15F_@ ' \
|
||||
b'#\xce\xf8\xf4\x0cJD.\xb7"\x96\x1cK\xd9\x01\x1b@ ' \
|
||||
b'\x00\x00\x00\x00d\x96\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \
|
||||
b'?\x00\x00\x00\x89UgG$\xcbC\xed\x92\x0bG\xca\xed\x15F_\x00\x00\x00\x003\x00ff\x86\xbf' \
|
||||
b'?\x00\x00\x003\xe1\xa1\xcf<\xbdD\xc4\xa0\xe6b\xe9\xbf=\xa2@\x00\x00\x00\x003\x00ff\x86\xbf' \
|
||||
b'\x00ff\x86?\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x89UgG$\xcbC' \
|
||||
b'\xed\x92\x0bG\xca\xed\x15F_\x10\x00\x00\x003\x00\x01\x01\x00\x00\x00\x00\xdb\x0f\xc9@\xa6' \
|
||||
b'\x9b\xc4='
|
||||
@@ -175,7 +175,23 @@ class LLUDPIntegrationTests(BaseIntegrationTest):
|
||||
await self._wait_drained()
|
||||
obj = self.session.regions[0].objects.lookup_localid(1234)
|
||||
self.assertIsInstance(obj.TextureEntry, lazy_object_proxy.Proxy)
|
||||
self.assertEqual(obj.TextureEntry.Textures[None], UUID("89556747-24cb-43ed-920b-47caed15465f"))
|
||||
self.assertEqual(obj.TextureEntry.Textures[None], UUID("1c9f4a6f-498d-48a0-9dc4-262727193d67"))
|
||||
self.assertEqual(len(self.session.regions[0].objects), 3)
|
||||
|
||||
async def test_object_updated_changed_property_list(self):
|
||||
self._setup_circuit()
|
||||
# One creating update and one no-op update
|
||||
obj_update = self._make_objectupdate_compressed(1234)
|
||||
self.protocol.datagram_received(obj_update, self.region_addr)
|
||||
obj_update = self._make_objectupdate_compressed(1234)
|
||||
self.protocol.datagram_received(obj_update, self.region_addr)
|
||||
await self._wait_drained()
|
||||
self.assertEqual(len(self.session.regions[0].objects), 3)
|
||||
object_events = [e for e in self.addon.events if e[0] == "object_update"]
|
||||
# 3 objects in example packet and we sent it twice
|
||||
self.assertEqual(len(object_events), 6)
|
||||
# Only TextureEntry should be marked updated since it's a proxy object
|
||||
self.assertEqual(object_events[-1][-1], {"TextureEntry"})
|
||||
|
||||
async def test_message_logger(self):
|
||||
message_logger = SimpleMessageLogger()
|
||||
|
||||
@@ -12,7 +12,7 @@ from hippolyzer.lib.proxy.sessions import SessionManager
|
||||
|
||||
class TestCapsClient(unittest.IsolatedAsyncioTestCase):
|
||||
def setUp(self) -> None:
|
||||
self.session = self.session = SessionManager().create_session({
|
||||
self.session = SessionManager().create_session({
|
||||
"session_id": UUID.random(),
|
||||
"secure_session_id": UUID.random(),
|
||||
"agent_id": UUID.random(),
|
||||
|
||||
@@ -4,7 +4,6 @@ from mitmproxy.test import tflow, tutils
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.http_proxy import SerializedCapData
|
||||
from hippolyzer.lib.proxy.message_logger import HTTPMessageLogEntry
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager
|
||||
|
||||
@@ -21,17 +20,22 @@ class TestHTTPFlows(unittest.TestCase):
|
||||
"sim_port": "1",
|
||||
"seed_capability": "https://test.localhost:4/foo",
|
||||
})
|
||||
self.region = self.session.register_region(
|
||||
("127.0.0.1", 2),
|
||||
"https://test.localhost:4/foo",
|
||||
handle=90,
|
||||
)
|
||||
self.region.update_caps({
|
||||
"FakeCap": "http://example.com",
|
||||
"ViewerAsset": "http://assets.example.com",
|
||||
})
|
||||
|
||||
def test_request_formatting(self):
|
||||
req = tutils.treq(host="example.com", port=80)
|
||||
resp = tutils.tresp()
|
||||
fake_flow = tflow.tflow(req=req, resp=resp)
|
||||
fake_flow.metadata["cap_data_ser"] = SerializedCapData(
|
||||
cap_name="FakeCap",
|
||||
session_id=str(self.session.id),
|
||||
base_url="http://example.com",
|
||||
)
|
||||
fake_flow = tflow.tflow(req=req, resp=tutils.tresp())
|
||||
flow = HippoHTTPFlow.from_state(fake_flow.get_state(), self.session_manager)
|
||||
# Make sure cap resolution works correctly
|
||||
flow.cap_data = self.session_manager.resolve_cap(flow.request.url)
|
||||
entry = HTTPMessageLogEntry(flow)
|
||||
self.assertEqual(entry.request(beautify=True), """GET [[FakeCap]]/path HTTP/1.1\r
|
||||
# http://example.com/path\r
|
||||
@@ -39,3 +43,67 @@ header: qvalue\r
|
||||
content-length: 7\r
|
||||
\r
|
||||
content""")
|
||||
|
||||
def test_binary_request_formatting(self):
|
||||
req = tutils.treq(host="example.com", port=80)
|
||||
fake_flow = tflow.tflow(req=req, resp=tutils.tresp())
|
||||
flow = HippoHTTPFlow.from_state(fake_flow.get_state(), self.session_manager)
|
||||
# This should trigger the escaped body path without changing content-length
|
||||
flow.request.content = b"c\x00ntent"
|
||||
entry = HTTPMessageLogEntry(flow)
|
||||
self.assertEqual(entry.request(beautify=True), """GET http://example.com/path HTTP/1.1\r
|
||||
header: qvalue\r
|
||||
content-length: 7\r
|
||||
X-Hippo-Escaped-Body: 1\r
|
||||
\r
|
||||
c\\x00ntent""")
|
||||
|
||||
def test_llsd_response_formatting(self):
|
||||
fake_flow = tflow.tflow(req=tutils.treq(), resp=tutils.tresp())
|
||||
flow = HippoHTTPFlow.from_state(fake_flow.get_state(), self.session_manager)
|
||||
# Half the time LLSD is sent with a random Content-Type and no PI indicating
|
||||
# what flavor of LLSD it is. Make sure the sniffing works correctly.
|
||||
flow.response.content = b"<llsd><integer>1</integer></llsd>"
|
||||
entry = HTTPMessageLogEntry(flow)
|
||||
self.assertEqual(entry.response(beautify=True), """HTTP/1.1 200 OK\r
|
||||
header-response: svalue\r
|
||||
content-length: 33\r
|
||||
\r
|
||||
<?xml version="1.0" ?>
|
||||
<llsd>
|
||||
<integer>1</integer>
|
||||
</llsd>
|
||||
""")
|
||||
|
||||
def test_flow_state_serde(self):
|
||||
fake_flow = tflow.tflow(req=tutils.treq(host="example.com"), resp=tutils.tresp())
|
||||
flow = HippoHTTPFlow.from_state(fake_flow.get_state(), self.session_manager)
|
||||
# Make sure cap resolution works correctly
|
||||
flow.cap_data = self.session_manager.resolve_cap(flow.request.url)
|
||||
flow_state = flow.get_state()
|
||||
new_flow = HippoHTTPFlow.from_state(flow_state, self.session_manager)
|
||||
self.assertIs(self.session, new_flow.cap_data.session())
|
||||
|
||||
def test_http_asset_repo(self):
|
||||
asset_repo = self.session_manager.asset_repo
|
||||
asset_id = asset_repo.create_asset(b"foobar", one_shot=True)
|
||||
req = tutils.treq(host="assets.example.com", path=f"/?animatn_id={asset_id}")
|
||||
fake_flow = tflow.tflow(req=req)
|
||||
flow = HippoHTTPFlow.from_state(fake_flow.get_state(), self.session_manager)
|
||||
# Have to resolve cap data so the asset repo knows this is an asset server cap
|
||||
flow.cap_data = self.session_manager.resolve_cap(flow.request.url)
|
||||
self.assertTrue(asset_repo.try_serve_asset(flow))
|
||||
self.assertEqual(b"foobar", flow.response.content)
|
||||
|
||||
def test_temporary_cap_resolution(self):
|
||||
self.region.register_temporary_cap("TempExample", "http://not.example.com")
|
||||
self.region.register_temporary_cap("TempExample", "http://not2.example.com")
|
||||
# Resolving the cap should consume it
|
||||
cap_data = self.session_manager.resolve_cap("http://not.example.com")
|
||||
self.assertEqual(cap_data.cap_name, "TempExample")
|
||||
# A CapData object should always be returned, but the cap_name field will be None
|
||||
new_cap_data = self.session_manager.resolve_cap("http://not.example.com")
|
||||
self.assertIsNone(new_cap_data.cap_name)
|
||||
# The second temp cap with the same name should still be in there
|
||||
cap_data = self.session_manager.resolve_cap("http://not2.example.com")
|
||||
self.assertEqual(cap_data.cap_name, "TempExample")
|
||||
|
||||
@@ -294,10 +294,12 @@ class HumanReadableMessageTests(unittest.TestCase):
|
||||
|
||||
class TestMessageSubfieldSerializers(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.chat_msg = ProxiedMessage('ChatFromViewer',
|
||||
Block('AgentData', AgentID=UUID('550e8400-e29b-41d4-a716-446655440000'),
|
||||
SessionID=UUID('550e8400-e29b-41d4-a716-446655440000')),
|
||||
Block('ChatData', Message="Chatting\n", Type=1, Channel=0))
|
||||
self.chat_msg = ProxiedMessage(
|
||||
'ChatFromViewer',
|
||||
Block('AgentData',
|
||||
AgentID=UUID('550e8400-e29b-41d4-a716-446655440000'),
|
||||
SessionID=UUID('550e8400-e29b-41d4-a716-446655440000')),
|
||||
Block('ChatData', Message="Chatting\n", Type=1, Channel=0))
|
||||
|
||||
def test_pretty_repr(self):
|
||||
expected_repr = r"""ProxiedMessage('ChatFromViewer',
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import math
|
||||
import random
|
||||
import unittest
|
||||
from typing import *
|
||||
@@ -46,10 +47,10 @@ class ObjectManagerTests(unittest.TestCase):
|
||||
AddonManager.init([], None, [self.object_addon])
|
||||
|
||||
def _create_object_update(self, local_id=None, full_id=None, parent_id=None, pos=None, rot=None,
|
||||
pcode=None) -> Message:
|
||||
pcode=None, namevalue=None) -> Message:
|
||||
pos = pos if pos is not None else (1.0, 2.0, 3.0)
|
||||
rot = rot if rot is not None else (0.0, 0.0, 0.0, 1.0)
|
||||
pcode = pcode if pcode is not None else 9
|
||||
pcode = pcode if pcode is not None else PCode.PRIMITIVE
|
||||
msg = Message(
|
||||
"ObjectUpdate",
|
||||
Block("RegionData", RegionHandle=123, TimeDilation=123),
|
||||
@@ -65,6 +66,7 @@ class ObjectManagerTests(unittest.TestCase):
|
||||
ProfileCurve=1,
|
||||
PathScaleX=100,
|
||||
PathScaleY=100,
|
||||
NameValue=namevalue,
|
||||
TextureEntry=b'\x89UgG$\xcbC\xed\x92\x0bG\xca\xed\x15F_\x00\x00\x00\x00\x00\x00\x00\x00\x80?\x00\x00'
|
||||
b'\x00\x80?\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
|
||||
@@ -89,9 +91,11 @@ class ObjectManagerTests(unittest.TestCase):
|
||||
# Run through (de)serializer to fill in any missing vars
|
||||
return self.deserializer.deserialize(self.serializer.serialize(msg))
|
||||
|
||||
def _create_object(self, local_id=None, full_id=None, parent_id=None, pos=None, rot=None, pcode=None) -> Object:
|
||||
def _create_object(self, local_id=None, full_id=None, parent_id=None, pos=None, rot=None,
|
||||
pcode=None, namevalue=None) -> Object:
|
||||
msg = self._create_object_update(
|
||||
local_id=local_id, full_id=full_id, parent_id=parent_id, pos=pos, rot=rot, pcode=pcode)
|
||||
local_id=local_id, full_id=full_id, parent_id=parent_id, pos=pos, rot=rot,
|
||||
pcode=pcode, namevalue=namevalue)
|
||||
self.message_handler.handle(msg)
|
||||
return self.object_manager.lookup_fullid(msg["ObjectData"]["FullID"])
|
||||
|
||||
@@ -107,6 +111,9 @@ class ObjectManagerTests(unittest.TestCase):
|
||||
def _kill_object(self, obj: Object):
|
||||
self.message_handler.handle(self._create_kill_object(obj.LocalID))
|
||||
|
||||
def _get_avatar_positions(self) -> Dict[UUID, Vector3]:
|
||||
return {av.FullID: av.RegionPosition for av in self.object_manager.all_avatars}
|
||||
|
||||
def test_basic_tracking(self):
|
||||
"""Does creating an object result in it being tracked?"""
|
||||
msg = self._create_object_update()
|
||||
@@ -244,6 +251,65 @@ class ObjectManagerTests(unittest.TestCase):
|
||||
self.assertEqual(parent.RegionPosition, (0.0, 0.0, 0.0))
|
||||
self.assertEqual(child.RegionPosition, (1.0, 2.0, 0.0))
|
||||
|
||||
def test_avatar_locations(self):
|
||||
agent1_id = UUID.random()
|
||||
agent2_id = UUID.random()
|
||||
self.message_handler.handle(Message(
|
||||
"CoarseLocationUpdate",
|
||||
Block("AgentData", AgentID=agent1_id),
|
||||
Block("AgentData", AgentID=agent2_id),
|
||||
Block("Location", X=1, Y=2, Z=3),
|
||||
Block("Location", X=2, Y=3, Z=4),
|
||||
))
|
||||
self.assertDictEqual(self._get_avatar_positions(), {
|
||||
# CoarseLocation's Z axis is multiplied by 4
|
||||
agent1_id: Vector3(1, 2, 12),
|
||||
agent2_id: Vector3(2, 3, 16),
|
||||
})
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
# Simulate an avatar sitting on an object
|
||||
seat_object = self._create_object(pos=(0, 0, 3))
|
||||
# If we have a real object pos it should override coarse pos
|
||||
avatar_obj = self._create_object(full_id=agent1_id, pcode=PCode.AVATAR,
|
||||
parent_id=seat_object.LocalID, pos=Vector3(0, 0, 2))
|
||||
self.assertDictEqual(self._get_avatar_positions(), {
|
||||
# Agent is seated, make sure this is region and not local pos
|
||||
agent1_id: Vector3(0, 0, 5),
|
||||
agent2_id: Vector3(2, 3, 16),
|
||||
})
|
||||
|
||||
# If the object is killed and no coarse pos, it shouldn't be in the dict
|
||||
# CoarseLocationUpdates are expected to be complete, so any agents missing
|
||||
# are no longer in the sim.
|
||||
self._kill_object(avatar_obj)
|
||||
self.message_handler.handle(Message(
|
||||
"CoarseLocationUpdate",
|
||||
Block("AgentData", AgentID=agent2_id),
|
||||
Block("Location", X=2, Y=3, Z=4),
|
||||
))
|
||||
self.assertDictEqual(self._get_avatar_positions(), {
|
||||
agent2_id: Vector3(2, 3, 16),
|
||||
})
|
||||
|
||||
# 255 on Z axis means we can't guess the real Z
|
||||
self.message_handler.handle(Message(
|
||||
"CoarseLocationUpdate",
|
||||
Block("AgentData", AgentID=agent2_id),
|
||||
Block("Location", X=2, Y=3, Z=math.inf),
|
||||
))
|
||||
self.assertDictEqual(self._get_avatar_positions(), {
|
||||
agent2_id: Vector3(2, 3, math.inf),
|
||||
})
|
||||
|
||||
def test_name_cache(self):
|
||||
# Receiving an update with a NameValue for an avatar should update NameCache
|
||||
obj = self._create_object(
|
||||
pcode=PCode.AVATAR,
|
||||
namevalue=b'DisplayName STRING RW DS unicodename\n'
|
||||
b'FirstName STRING RW DS firstname\n'
|
||||
b'LastName STRING RW DS Resident\n'
|
||||
b'Title STRING RW DS foo',
|
||||
)
|
||||
self.assertEqual(self.object_manager.name_cache.lookup(obj.FullID).FirstName, "firstname")
|
||||
av = self.object_manager.lookup_avatar(obj.FullID)
|
||||
self.assertEqual(av.Name, "firstname Resident")
|
||||
|
||||
Reference in New Issue
Block a user