34 Commits

Author SHA1 Message Date
Salad Dais
0c0de2bcbc v0.7.1 2021-09-04 07:27:20 +00:00
Salad Dais
9f2d2f2194 Pin recordclass version, use requirements.txt for windows build
recordclass had some breaking changes in 0.15
2021-09-04 07:12:45 +00:00
Salad Dais
c6e0a400a9 v0.7.0 2021-08-10 01:16:20 +00:00
Salad Dais
d01122d542 Call correct method to raise new message log window 2021-08-10 01:11:21 +00:00
Salad Dais
690d6b51b8 Upgrade to mitmproxy 7.0.2
Our fix for `Flow.set_state()` has been upstreamed
2021-08-09 22:16:23 +00:00
Salad Dais
2437a8b14f Add a framework for simple local anim creation, tail animator 2021-08-05 21:08:18 +00:00
Salad Dais
afa601fffe Support session-specific viewer cache directories 2021-08-02 18:23:13 +00:00
Salad Dais
874feff471 Fix incorrect reference to mitmproxy class 2021-08-01 12:16:10 +00:00
Salad Dais
05c53bba9f Add CapsClient to BaseClientSession 2021-08-01 06:39:04 +00:00
Salad Dais
578f1d8c4e Add setting to disable all proxy object autorequests
Will help with #18 by not changing object request behaviour when
running through the proxy.
2021-08-01 06:37:33 +00:00
Salad Dais
7d8e18440a Add local anim mangler support with example
Analogous to local mesh mangler support.
2021-07-31 11:56:17 +00:00
Salad Dais
66e112dd52 Add basic message log import / export feature
Closes #20
2021-07-30 03:13:33 +00:00
Salad Dais
02ac022ab3 Add export formats for message log entries 2021-07-30 01:06:29 +00:00
Salad Dais
33ce74754e Fix mirror_target_agent check in http hooks 2021-07-30 01:06:29 +00:00
Salad Dais
74dd6b977c Add extended to_dict() format for Message class
This will allow proper import / export of message logs.
2021-07-29 10:26:42 +00:00
Salad Dais
387652731a Add Message Mirror example addon 2021-07-29 09:43:20 +00:00
Salad Dais
e4601fd879 Support multiple Message Log windows
Closes #19
2021-07-29 01:00:57 +00:00
Salad Dais
6eb25f96d9 Support logging to a hierarchy of message loggers
Necessary to eventually support multiple message log windows
2021-07-27 02:35:03 +00:00
Salad Dais
22b9eeb5cb Better handling of optional command parameters 2021-07-22 23:59:55 +00:00
Salad Dais
0dbedcb2f5 Improve coverage 2021-07-22 23:58:17 +00:00
Salad Dais
7d9712c16e Fix message dropping and queueing corner cases 2021-07-22 05:08:47 +00:00
Salad Dais
82663c0fc2 Add parse_bool helper function for command parameters 2021-07-21 06:39:29 +00:00
Salad Dais
9fb4884470 Extend TlsLayer.tls_start_server instead of monkeypatching OpenSSL funcs
We have a more elegant way of unsetting `X509_CHECK_FLAG_NEVER_CHECK_SUBJECT`
now that mitmproxy 7.0 is out.

See https://github.com/mitmproxy/mitmproxy/pull/4688
2021-07-19 20:17:31 +00:00
Salad Dais
cf69c42f67 Rework HTTP proxying code to work with mitmproxy 7.0.0 2021-07-18 07:02:45 +00:00
Salad Dais
be658b9026 v0.6.3
Cutting a release before working on mitmproxy upgrade
2021-07-18 06:57:40 +00:00
Salad Dais
c505941595 Improve test for TE serialization 2021-07-18 06:33:55 +00:00
Salad Dais
96f471d6b7 Add initial support for Message-specific Block subclasses 2021-07-07 12:49:32 +00:00
Salad Dais
4238016767 Change readme wording
:)
2021-07-07 12:49:32 +00:00
Salad Dais
a35a67718d Add default_value to MessateTemplateVariable 2021-07-01 21:25:51 +00:00
Salad Dais
c2981b107a Remove CodeQL scanning
Maybe later, doesn't seem to do anything useful out of the box.
2021-06-28 06:00:42 -03:00
Salad Dais
851375499a Add CodeQL scanning 2021-06-28 05:44:02 -03:00
Salad Dais
d064ecd466 Don't raise when reading a new avatar_name_cache.xml 2021-06-25 18:45:42 +00:00
Salad Dais
fda37656c9 Reduce boilerplate for mesh mangling addons
Makes it less annoying to compose separate addons with different manglers
2021-06-24 05:29:23 +00:00
Salad Dais
49a9c6f28f Workaround for failed teleports due to EventQueue timeouts
Closes #16
2021-06-23 16:43:09 +00:00
48 changed files with 1324 additions and 404 deletions

View File

@@ -8,3 +8,5 @@ exclude_lines =
if typing.TYPE_CHECKING:
def __repr__
raise AssertionError
assert False
pass

View File

@@ -29,6 +29,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install -e .
pip install cx_freeze

View File

@@ -2,7 +2,7 @@
![Python Test Status](https://github.com/SaladDais/Hippolyzer/workflows/Run%20Python%20Tests/badge.svg) [![codecov](https://codecov.io/gh/SaladDais/Hippolyzer/branch/master/graph/badge.svg?token=HCTFA4RAXX)](https://codecov.io/gh/SaladDais/Hippolyzer)
[Hippolyzer](http://wiki.secondlife.com/wiki/Hippo) is a fork of Linden Lab's abandoned
[Hippolyzer](http://wiki.secondlife.com/wiki/Hippo) is a revival of Linden Lab's
[PyOGP library](http://wiki.secondlife.com/wiki/PyOGP)
targeting modern Python 3, with a focus on debugging issues in Second Life-compatible
servers and clients. There is a secondary focus on mocking up new features without requiring a

View File

@@ -0,0 +1,32 @@
"""
Example anim mangler addon, to be used with local anim addon.
You can edit this live to apply various transforms to local anims,
as well as any uploaded anims. Any changes will be reflected in currently
playing local anims.
This example modifies any position keys of an animation's mHipRight joint.
"""
from hippolyzer.lib.base.llanim import Animation
from hippolyzer.lib.proxy.addons import AddonManager
import local_anim
AddonManager.hot_reload(local_anim, require_addons_loaded=True)
def offset_right_hip(anim: Animation):
hip_joint = anim.joints.get("mHipRight")
if hip_joint:
for pos_frame in hip_joint.pos_keyframes:
pos_frame.pos.Z *= 2.5
pos_frame.pos.X *= 5.0
return anim
class ExampleAnimManglerAddon(local_anim.BaseAnimManglerAddon):
ANIM_MANGLERS = [
offset_right_hip,
]
addons = [ExampleAnimManglerAddon()]

View File

@@ -105,7 +105,7 @@ class HorrorAnimatorAddon(BaseAddon):
# send the response back immediately
block = STATIC_VFS[orig_anim_id]
anim_data = STATIC_VFS.read_block(block)
flow.response = mitmproxy.http.HTTPResponse.make(
flow.response = mitmproxy.http.Response.make(
200,
_mutate_anim_bytes(anim_data),
{

View File

@@ -5,24 +5,38 @@ Local animations
assuming you loaded something.anim
/524 start_local_anim something
/524 stop_local_anim something
/524 save_local_anim something
If you want to trigger the animation from an object to simulate llStartAnimation():
llOwnerSay("@start_local_anim:something=force");
Also includes a concept of "anim manglers" similar to the "mesh manglers" of the
local mesh addon. This is useful if you want to test making procedural changes
to animations before uploading them. The manglers will be applied to any uploaded
animations as well.
May also be useful if you need to make ad-hoc changes to a bunch of animations on
bulk upload, like changing priority or removing a joint.
"""
import asyncio
import os
import pathlib
from abc import abstractmethod
from typing import *
from hippolyzer.lib.base import serialization as se
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.llanim import Animation
from hippolyzer.lib.base.message.message import Block, Message
from hippolyzer.lib.proxy import addon_ctx
from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty, GlobalProperty, show_message
from hippolyzer.lib.proxy.commands import handle_command
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import Session
from hippolyzer.lib.proxy.sessions import Session, SessionManager
def _get_mtime(path: str):
@@ -35,12 +49,19 @@ def _get_mtime(path: str):
class LocalAnimAddon(BaseAddon):
# name -> path, only for anims actually from files
local_anim_paths: Dict[str, str] = SessionProperty(dict)
# name -> anim bytes
local_anim_bytes: Dict[str, bytes] = SessionProperty(dict)
# name -> mtime or None. Only for anims from files.
local_anim_mtimes: Dict[str, Optional[float]] = SessionProperty(dict)
# name -> current asset ID (changes each play)
local_anim_playing_ids: Dict[str, UUID] = SessionProperty(dict)
anim_manglers: List[Callable[[Animation], Animation]] = GlobalProperty(list)
def handle_init(self, session_manager: SessionManager):
self.remangle_local_anims(session_manager)
def handle_session_init(self, session: Session):
# Reload anims and reload any manglers if we have any
self._schedule_task(self._try_reload_anims(session))
@handle_command()
@@ -66,11 +87,23 @@ class LocalAnimAddon(BaseAddon):
"""Stop a named local animation"""
self.apply_local_anim(session, region, anim_name, new_data=None)
@handle_command(anim_name=str)
async def save_local_anim(self, _session: Session, _region: ProxiedRegion, anim_name: str):
"""Save a named local anim to disk"""
anim_bytes = self.local_anim_bytes.get(anim_name)
if not anim_bytes:
return
filename = await AddonManager.UI.save_file(filter_str="SL Anim (*.anim)", default_suffix="anim")
if not filename:
return
with open(filename, "wb") as f:
f.write(anim_bytes)
async def _try_reload_anims(self, session: Session):
while True:
region = session.main_region
if not region:
await asyncio.sleep(2.0)
await asyncio.sleep(1.0)
continue
# Loop over local anims we loaded
@@ -80,7 +113,7 @@ class LocalAnimAddon(BaseAddon):
continue
# is playing right now, check if there's a newer version
self.apply_local_anim_from_file(session, region, anim_name, only_if_changed=True)
await asyncio.sleep(2.0)
await asyncio.sleep(1.0)
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
cmd: str, options: List[str], param: str):
@@ -127,9 +160,11 @@ class LocalAnimAddon(BaseAddon):
StartAnim=True,
))
cls.local_anim_playing_ids[anim_name] = next_id
cls.local_anim_bytes[anim_name] = new_data
else:
# No data means just stop the anim
cls.local_anim_playing_ids.pop(anim_name, None)
cls.local_anim_bytes.pop(anim_name, None)
region.circuit.send_message(new_msg)
print(f"Changing {anim_name} to {next_id}")
@@ -156,9 +191,94 @@ class LocalAnimAddon(BaseAddon):
with open(anim_path, "rb") as f:
anim_data = f.read()
anim_data = cls._mangle_anim(anim_data)
else:
print(f"Unknown anim {anim_name!r}")
cls.apply_local_anim(session, region, anim_name, new_data=anim_data)
@classmethod
def _mangle_anim(cls, anim_data: bytes) -> bytes:
if not cls.anim_manglers:
return anim_data
reader = se.BufferReader("<", anim_data)
spec = se.Dataclass(Animation)
anim = reader.read(spec)
for mangler in cls.anim_manglers:
anim = mangler(anim)
writer = se.BufferWriter("<")
writer.write(spec, anim)
return writer.copy_buffer()
@classmethod
def remangle_local_anims(cls, session_manager: SessionManager):
# Anim manglers are global, so we need to re-mangle anims for all sessions
for session in session_manager.sessions:
# Push the context of this session onto the stack so we can access
# session-scoped properties
with addon_ctx.push(new_session=session, new_region=session.main_region):
cls.local_anim_mtimes.clear()
def handle_http_request(self, session_manager: SessionManager, flow: HippoHTTPFlow):
if flow.name == "NewFileAgentInventoryUploader":
# Don't bother looking at this if we have no manglers
if not self.anim_manglers:
return
# This is kind of a crappy match but these magic bytes shouldn't match anything that SL
# allows as an upload type but animations.
if not flow.request.content or not flow.request.content.startswith(b"\x01\x00\x00\x00"):
return
# Replace the uploaded anim with the mangled version
flow.request.content = self._mangle_anim(flow.request.content)
show_message("Mangled upload request")
class BaseAnimManglerAddon(BaseAddon):
"""Base class for addons that mangle uploaded or file-based local animations"""
ANIM_MANGLERS: List[Callable[[Animation], Animation]]
def handle_init(self, session_manager: SessionManager):
# Add our manglers into the list
LocalAnimAddon.anim_manglers.extend(self.ANIM_MANGLERS)
LocalAnimAddon.remangle_local_anims(session_manager)
def handle_unload(self, session_manager: SessionManager):
# Clean up our manglers before we go away
mangler_list = LocalAnimAddon.anim_manglers
for mangler in self.ANIM_MANGLERS:
if mangler in mangler_list:
mangler_list.remove(mangler)
LocalAnimAddon.remangle_local_anims(session_manager)
class BaseAnimHelperAddon(BaseAddon):
"""
Base class for local creation of procedural animations
Animation generated by build_anim() gets applied to all active sessions
"""
ANIM_NAME: str
def handle_session_init(self, session: Session):
self._reapply_anim(session, session.main_region)
def handle_session_closed(self, session: Session):
LocalAnimAddon.apply_local_anim(session, session.main_region, self.ANIM_NAME, None)
def handle_unload(self, session_manager: SessionManager):
for session in session_manager.sessions:
# TODO: Nasty. Since we need to access session-local attrs we need to set the
# context even though we also explicitly pass session and region.
# Need to rethink the LocalAnimAddon API.
with addon_ctx.push(session, session.main_region):
LocalAnimAddon.apply_local_anim(session, session.main_region, self.ANIM_NAME, None)
@abstractmethod
def build_anim(self) -> Animation:
pass
def _reapply_anim(self, session: Session, region: ProxiedRegion):
LocalAnimAddon.apply_local_anim(session, region, self.ANIM_NAME, self.build_anim().to_bytes())
addons = [LocalAnimAddon()]

View File

@@ -201,7 +201,7 @@ class MeshUploadInterceptingAddon(BaseAddon):
self.local_mesh_mapping = {x["mesh_name"]: x["mesh"] for x in instances}
# Fake a response, we don't want to actually send off the request.
flow.response = mitmproxy.http.HTTPResponse.make(
flow.response = mitmproxy.http.Response.make(
200,
b"",
{
@@ -280,4 +280,23 @@ class MeshUploadInterceptingAddon(BaseAddon):
cls._replace_local_mesh(session.main_region, asset_repo, mesh_list)
class BaseMeshManglerAddon(BaseAddon):
"""Base class for addons that mangle uploaded or local mesh"""
MESH_MANGLERS: List[Callable[[MeshAsset], MeshAsset]]
def handle_init(self, session_manager: SessionManager):
# Add our manglers into the list
MeshUploadInterceptingAddon.mesh_manglers.extend(self.MESH_MANGLERS)
# Tell the local mesh plugin that the mangler list changed, and to re-apply
MeshUploadInterceptingAddon.remangle_local_mesh(session_manager)
def handle_unload(self, session_manager: SessionManager):
# Clean up our manglers before we go away
mangler_list = MeshUploadInterceptingAddon.mesh_manglers
for mangler in self.MESH_MANGLERS:
if mangler in mangler_list:
mangler_list.remove(mangler)
MeshUploadInterceptingAddon.remangle_local_mesh(session_manager)
addons = [MeshUploadInterceptingAddon()]

View File

@@ -11,8 +11,6 @@ to add to give a mesh an arbitrary center of rotation / scaling.
from hippolyzer.lib.base.mesh import MeshAsset
from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.addon_utils import BaseAddon
from hippolyzer.lib.proxy.sessions import SessionManager
import local_mesh
AddonManager.hot_reload(local_mesh, require_addons_loaded=True)
@@ -37,6 +35,9 @@ def reorient_mesh(orientation):
# X=1, Y=2, Z=3
def _reorienter(mesh: MeshAsset):
for material in mesh.iter_lod_materials():
if "Position" not in material:
# Must be a NoGeometry LOD
continue
# We don't need to use positions_(to/from)_domain here since we're just naively
# flipping the axes around.
material["Position"] = _reorient_coord_list(material["Position"], orientation)
@@ -46,28 +47,11 @@ def reorient_mesh(orientation):
return _reorienter
OUR_MANGLERS = [
# Negate the X and Y axes on any mesh we upload or create temp
reorient_mesh((-1, -2, 3)),
]
class ExampleMeshManglerAddon(local_mesh.BaseMeshManglerAddon):
MESH_MANGLERS = [
# Negate the X and Y axes on any mesh we upload or create temp
reorient_mesh((-1, -2, 3)),
]
class MeshManglerExampleAddon(BaseAddon):
def handle_init(self, session_manager: SessionManager):
# Add our manglers into the list
local_mesh_addon = local_mesh.MeshUploadInterceptingAddon
local_mesh_addon.mesh_manglers.extend(OUR_MANGLERS)
# Tell the local mesh plugin that the mangler list changed, and to re-apply
local_mesh_addon.remangle_local_mesh(session_manager)
def handle_unload(self, session_manager: SessionManager):
# Clean up our manglers before we go away
local_mesh_addon = local_mesh.MeshUploadInterceptingAddon
mangler_list = local_mesh_addon.mesh_manglers
for mangler in OUR_MANGLERS:
if mangler in mangler_list:
mangler_list.remove(mangler)
local_mesh_addon.remangle_local_mesh(session_manager)
addons = [MeshManglerExampleAddon()]
addons = [ExampleMeshManglerAddon()]

View File

@@ -0,0 +1,244 @@
"""
Message Mirror
Re-routes messages through the circuit of another agent running through this proxy,
rewriting the messages to use the credentials tied to that circuit.
Useful if you need to quickly QA authorization checks on a message handler or script.
Or if you want to chat as two people at once. Whatever.
Also shows some advanced ways of managing / rerouting Messages and HTTP flows.
Fiddle with the values of `SEND_NORMALLY` and `MIRROR` to change how and which
messages get moved to other circuits.
Usage: /524 mirror_to <mirror_agent_uuid>
To Disable: /524 mirror_to
"""
import weakref
from typing import Optional
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.message.template_dict import DEFAULT_TEMPLATE_DICT
from hippolyzer.lib.base.network.transport import Direction
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty, show_message
from hippolyzer.lib.proxy.commands import handle_command, Parameter, parse_bool
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
from hippolyzer.lib.proxy.caps import CapData, CapType
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import Session, SessionManager
# Things that make no sense to mirror, or will make everything explode if mirrored.
SEND_NORMALLY = {
'StartPingCheck', 'CompletePingCheck', 'PacketAck', 'SimulatorViewerTimeMessage', 'SimStats',
'SoundTrigger', 'EventQueueGet', 'GetMesh', 'GetMesh2', 'ParcelDwellRequest', 'ViewerEffect', 'ViewerStats',
'ParcelAccessListRequest', 'FirestormBridge', 'AvatarRenderInfo', 'ParcelPropertiesRequest', 'GetObjectCost',
'RequestMultipleObjects', 'GetObjectPhysicsData', 'GetExperienceInfo', 'RequestTaskInventory', 'AgentRequestSit',
'MuteListRequest', 'UpdateMuteListEntry', 'RemoveMuteListEntry', 'RequestImage',
'AgentThrottle', 'UseCircuitCode', 'AgentWearablesRequest', 'AvatarPickerRequest', 'CloseCircuit',
'CompleteAgentMovement', 'RegionHandshakeReply', 'LogoutRequest', 'ParcelPropertiesRequest',
'ParcelPropertiesRequestByID', 'MapBlockRequest', 'MapLayerRequest', 'MapItemRequest', 'MapNameRequest',
'ParcelAccessListRequest', 'AvatarPropertiesRequest', 'DirFindQuery',
'SetAlwaysRun', 'GetDisplayNames', 'ViewerMetrics', 'AgentResume', 'AgentPause',
'ViewerAsset', 'GetTexture', 'UUIDNameRequest', 'AgentUpdate', 'AgentAnimation'
# Would just be confusing for everyone
'ImprovedInstantMessage',
# Xfer system isn't authed to begin with, and duping Xfers can lead to premature file deletion. Skip.
'RequestXfer', 'ConfirmXferPacket', 'AbortXfer', 'SendXferPacket',
}
# Messages that _must_ be sent normally, but are worth mirroring onto the target session to see how
# they would respond
MIRROR = {
'RequestObjectPropertiesFamily', 'ObjectSelect', 'RequestObjectProperties', 'TransferRequest',
'RequestMultipleObjects', 'RequestTaskInventory', 'FetchInventory2', 'ScriptDialogReply',
'ObjectDeselect', 'GenericMessage', 'ChatFromViewer'
}
for msg_name in DEFAULT_TEMPLATE_DICT.message_templates.keys():
# There are a lot of these.
if msg_name.startswith("Group") and msg_name.endswith("Request"):
MIRROR.add(msg_name)
class MessageMirrorAddon(BaseAddon):
mirror_target_agent: Optional[UUID] = SessionProperty(None)
mirror_use_target_session: bool = SessionProperty(True)
mirror_use_target_agent: bool = SessionProperty(True)
@handle_command(target_agent=Parameter(UUID, optional=True))
async def mirror_to(self, session: Session, _region, target_agent: Optional[UUID] = None):
"""
Send this session's outbound messages over another proxied agent's circuit
"""
if target_agent:
if target_agent == session.agent_id:
show_message("Can't mirror our own session")
target_agent = None
elif not any(s.agent_id == target_agent for s in session.session_manager.sessions):
show_message(f"No active proxied session for agent {target_agent}")
target_agent = None
self.mirror_target_agent = target_agent
if target_agent:
show_message(f"Mirroring to {target_agent}")
else:
show_message("Message mirroring disabled")
@handle_command(enabled=parse_bool)
async def set_mirror_use_target_session(self, _session, _region, enabled):
"""Replace the original session ID with the target session's ID when mirroring"""
self.mirror_use_target_session = enabled
@handle_command(enabled=parse_bool)
async def set_mirror_use_target_agent(self, _session, _region, enabled):
"""Replace the original agent ID with the target agent's ID when mirroring"""
self.mirror_use_target_agent = enabled
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
if message.direction != Direction.OUT:
return
if not self.mirror_target_agent:
return
if message.name in SEND_NORMALLY:
return
target_session = None
for poss_session in session.session_manager.sessions:
if poss_session.agent_id == self.mirror_target_agent:
target_session = poss_session
if not target_session:
print("Couldn't find target session?")
return
target_region = None
for poss_region in target_session.regions:
if poss_region.circuit_addr == region.circuit_addr:
target_region = poss_region
if not target_region:
print("Couldn't find equivalent target region?")
return
# Send the message normally first if we're mirroring
if message.name in MIRROR:
region.circuit.send_message(message)
# We're going to send the message on a new circuit, we need to take
# it so we get a new packet ID and clean ACKs
message = message.take()
self._lludp_fixups(target_session, message)
target_region.circuit.send_message(message)
return True
def _lludp_fixups(self, target_session: Session, message: Message):
if "AgentData" in message:
agent_block = message["AgentData"][0]
if "AgentID" in agent_block and self.mirror_use_target_agent:
agent_block["AgentID"] = target_session.agent_id
if "SessionID" in agent_block and self.mirror_use_target_session:
agent_block["SessionID"] = target_session.id
if message.name == "TransferRequest":
transfer_block = message["TransferInfo"][0]
# This is a duplicated message so we need to give it a new ID
transfer_block["TransferID"] = UUID.random()
params = transfer_block.deserialize_var("Params")
# This kind of Transfer might not even use agent credentials
if self.mirror_use_target_agent and hasattr(params, 'AgentID'):
params.AgentID = target_session.agent_id
if self.mirror_use_target_session and hasattr(params, 'SessionID'):
params.SessionID = target_session.id
transfer_block.serialize_var("Params", params)
def handle_http_request(self, session_manager: SessionManager, flow: HippoHTTPFlow):
# Already mirrored, ignore.
if flow.is_replay:
return
cap_data = flow.cap_data
if not cap_data:
return
if cap_data.cap_name in SEND_NORMALLY:
return
if cap_data.asset_server_cap:
return
# Likely doesn't have an exact equivalent in the target session, this is a temporary
# cap like an uploader URL or a stats URL.
if cap_data.type == CapType.TEMPORARY:
return
session: Optional[Session] = cap_data.session and cap_data.session()
if not session:
return
region: Optional[ProxiedRegion] = cap_data.region and cap_data.region()
if not region:
return
# Session-scoped, so we need to know if we have a session before checking
if not self.mirror_target_agent:
return
target_session: Optional[Session] = None
for poss_session in session.session_manager.sessions:
if poss_session.agent_id == self.mirror_target_agent:
target_session = poss_session
if not target_session:
return
caps_source = target_session
target_region: Optional[ProxiedRegion] = None
if region:
target_region = None
for poss_region in target_session.regions:
if poss_region.circuit_addr == region.circuit_addr:
target_region = poss_region
if not target_region:
print("No region in cap?")
return
caps_source = target_region
new_base_url = caps_source.caps.get(cap_data.cap_name)
if not new_base_url:
print("No equiv cap?")
return
if cap_data.cap_name in MIRROR:
flow = flow.copy()
# Have the cap data reflect the new URL we're pointing at
flow.metadata["cap_data"] = CapData(
cap_name=cap_data.cap_name,
region=weakref.ref(target_region) if target_region else None,
session=weakref.ref(target_session),
base_url=new_base_url,
)
# Tack any params onto the new base URL for the cap
new_url = new_base_url + flow.request.url[len(cap_data.base_url):]
flow.request.url = new_url
if cap_data.cap_name in MIRROR:
self._replay_flow(flow, session.session_manager)
def _replay_flow(self, flow: HippoHTTPFlow, session_manager: SessionManager):
# Work around mitmproxy bug, changing the URL updates the Host header, which may
# cause it to drop the port even when it shouldn't have. Fix the host header.
if flow.request.port not in (80, 443) and ":" not in flow.request.host_header:
flow.request.host_header = f"{flow.request.host}:{flow.request.port}"
# Should get repopulated when it goes back through the MITM addon
flow.metadata.pop("cap_data_ser", None)
flow.metadata.pop("cap_data", None)
proxy_queue = session_manager.flow_context.to_proxy_queue
proxy_queue.put_nowait(("replay", None, flow.get_state()))
addons = [MessageMirrorAddon()]

View File

@@ -0,0 +1,55 @@
"""
Tail animation generator
Demonstrates programmatic generation of local motions using BaseAnimHelperAddon
You can use this to create an animation with a script, fiddle with it until it
looks right, then finally save it with /524 save_local_anim <ANIM_NAME>.
The built animation is automatically applied to all active sessions when loaded,
and is re-generated whenever the script is edited. Unloading the script stops
the animations.
"""
from hippolyzer.lib.base.anim_utils import shift_keyframes, smooth_rot
from hippolyzer.lib.base.datatypes import Quaternion
from hippolyzer.lib.base.llanim import Animation, Joint
from hippolyzer.lib.proxy.addons import AddonManager
import local_anim
AddonManager.hot_reload(local_anim, require_addons_loaded=True)
class TailAnimator(local_anim.BaseAnimHelperAddon):
# Should be unique
ANIM_NAME = "tail_anim"
def build_anim(self) -> Animation:
anim = Animation(
base_priority=5,
duration=5.0,
loop_out_point=5.0,
loop=True,
)
# Iterate along tail joints 1 through 6
for joint_num in range(1, 7):
# Give further along joints a wider range of motion
start_rot = Quaternion.from_euler(0.2, -0.3, 0.15 * joint_num)
end_rot = Quaternion.from_euler(-0.2, -0.3, -0.15 * joint_num)
rot_keyframes = [
# Tween between start_rot and end_rot, using smooth interpolation.
# SL's keyframes only allow linear interpolation which doesn't look great
# for natural motions. `smooth_rot()` gets around that by generating
# smooth inter frames for SL to linearly interpolate between.
*smooth_rot(start_rot, end_rot, inter_frames=10, time=0.0, duration=2.5),
*smooth_rot(end_rot, start_rot, inter_frames=10, time=2.5, duration=2.5),
]
anim.joints[f"mTail{joint_num}"] = Joint(
priority=5,
# Each joint's frames should be ahead of the previous joint's by 2 frames
rot_keyframes=shift_keyframes(rot_keyframes, joint_num * 2),
)
return anim
addons = [TailAnimator()]

View File

@@ -19,9 +19,9 @@ class MessageLogHeader(enum.IntEnum):
class MessageLogModel(QtCore.QAbstractTableModel, FilteringMessageLogger):
def __init__(self, parent=None):
def __init__(self, parent=None, maxlen=2000):
QtCore.QAbstractTableModel.__init__(self, parent)
FilteringMessageLogger.__init__(self)
FilteringMessageLogger.__init__(self, maxlen=maxlen)
def _begin_insert(self, insert_idx: int):
self.beginInsertRows(QtCore.QModelIndex(), insert_idx, insert_idx)

View File

@@ -43,7 +43,7 @@ class SelectionManagerAddon(BaseAddon):
LOG.debug(f"Don't know about selected {local_id}, requesting object")
needed_objects.add(local_id)
if needed_objects:
if needed_objects and session.session_manager.settings.ALLOW_AUTO_REQUEST_OBJECTS:
region.objects.request_objects(needed_objects)
# ParcelDwellRequests are sent whenever "about land" is opened. This gives us a
# decent mechanism for selecting parcels.
@@ -89,7 +89,6 @@ def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowCo
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
mitmproxy_master.start_server()
gc.freeze()
flow_context.mitmproxy_ready.set()
mitm_loop.run_forever()
@@ -120,7 +119,7 @@ def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] =
if sys.argv[1] == "--setup-ca":
try:
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
except mitmproxy.exceptions.ServerException:
except mitmproxy.exceptions.MitmproxyException:
# Proxy already running, create the master so we don't try to bind to a port
mitmproxy_master = create_proxy_master(proxy_host, http_proxy_port, flow_context)
setup_ca(sys.argv[2], mitmproxy_master)

View File

@@ -17,7 +17,7 @@ import urllib.parse
from typing import *
import multidict
from qasync import QEventLoop
from qasync import QEventLoop, asyncSlot
from PySide2 import QtCore, QtWidgets, QtGui
from hippolyzer.apps.model import MessageLogModel, MessageLogHeader, RegionListModel
@@ -42,7 +42,8 @@ from hippolyzer.lib.proxy.addons import BaseInteractionManager, AddonManager
from hippolyzer.lib.proxy.ca_utils import setup_ca_everywhere
from hippolyzer.lib.proxy.caps_client import ProxyCapsClient
from hippolyzer.lib.proxy.http_proxy import create_proxy_master, HTTPFlowContext
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry, AbstractMessageLogEntry
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry, AbstractMessageLogEntry, WrappingMessageLogger, \
import_log_entries, export_log_entries
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import Session, SessionManager
from hippolyzer.lib.proxy.settings import ProxySettings
@@ -68,11 +69,11 @@ class GUISessionManager(SessionManager, QtCore.QObject):
regionAdded = QtCore.Signal(ProxiedRegion)
regionRemoved = QtCore.Signal(ProxiedRegion)
def __init__(self, settings, model):
def __init__(self, settings):
SessionManager.__init__(self, settings)
QtCore.QObject.__init__(self)
self.all_regions = []
self.message_logger = model
self.message_logger = WrappingMessageLogger()
def checkRegions(self):
new_regions = itertools.chain(*[s.regions for s in self.sessions])
@@ -101,12 +102,16 @@ class GUIInteractionManager(BaseInteractionManager, QtCore.QObject):
dialog.open()
return future
async def _file_dialog(self, caption: str, directory: str, filter_str: str, mode: QtWidgets.QFileDialog.FileMode) \
-> Tuple[bool, QtWidgets.QFileDialog]:
async def _file_dialog(
self, caption: str, directory: str, filter_str: str, mode: QtWidgets.QFileDialog.FileMode,
default_suffix: str = '',
) -> Tuple[bool, QtWidgets.QFileDialog]:
dialog = QtWidgets.QFileDialog(self.parent(), caption=caption, directory=directory, filter=filter_str)
dialog.setFileMode(mode)
if mode == QtWidgets.QFileDialog.FileMode.AnyFile:
dialog.setAcceptMode(QtWidgets.QFileDialog.AcceptMode.AcceptSave)
if default_suffix:
dialog.setDefaultSuffix(default_suffix)
res = await self._dialog_async_exec(dialog)
return res, dialog
@@ -134,9 +139,10 @@ class GUIInteractionManager(BaseInteractionManager, QtCore.QObject):
return None
return dialog.selectedFiles()[0]
async def save_file(self, caption: str = '', directory: str = '', filter_str: str = '') -> Optional[str]:
async def save_file(self, caption: str = '', directory: str = '', filter_str: str = '',
default_suffix: str = '') -> Optional[str]:
res, dialog = await self._file_dialog(
caption, directory, filter_str, QtWidgets.QFileDialog.FileMode.AnyFile
caption, directory, filter_str, QtWidgets.QFileDialog.FileMode.AnyFile, default_suffix,
)
if not res or not dialog.selectedFiles():
return None
@@ -156,6 +162,22 @@ class GUIInteractionManager(BaseInteractionManager, QtCore.QObject):
return (await fut) == QtWidgets.QMessageBox.Ok
class GUIProxySettings(ProxySettings):
"""Persistent settings backed by QSettings"""
def __init__(self, settings: QtCore.QSettings):
super().__init__()
self._settings_obj = settings
def get_setting(self, name: str) -> Any:
val: Any = self._settings_obj.value(name, defaultValue=dataclasses.MISSING)
if val is dataclasses.MISSING:
return val
return json.loads(val)
def set_setting(self, name: str, val: Any):
self._settings_obj.setValue(name, json.dumps(val))
def nonFatalExceptions(f):
@functools.wraps(f)
def _wrapper(self, *args, **kwargs):
@@ -169,7 +191,35 @@ def nonFatalExceptions(f):
return _wrapper
class ProxyGUI(QtWidgets.QMainWindow):
def buildReplacements(session: Session, region: ProxiedRegion):
if not session or not region:
return {}
selected = session.selected
agent_object = region.objects.lookup_fullid(session.agent_id)
selected_local = selected.object_local
selected_object = None
if selected_local:
# We may or may not have an object for this
selected_object = region.objects.lookup_localid(selected_local)
return {
"SELECTED_LOCAL": selected_local,
"SELECTED_FULL": selected_object.FullID if selected_object else None,
"SELECTED_PARCEL_LOCAL": selected.parcel_local,
"SELECTED_PARCEL_FULL": selected.parcel_full,
"SELECTED_SCRIPT_ITEM": selected.script_item,
"SELECTED_TASK_ITEM": selected.task_item,
"AGENT_ID": session.agent_id,
"AGENT_LOCAL": agent_object.LocalID if agent_object else None,
"SESSION_ID": session.id,
"AGENT_POS": agent_object.Position if agent_object else None,
"NULL_KEY": UUID(),
"RANDOM_KEY": UUID.random,
"CIRCUIT_CODE": session.circuit_code,
"REGION_HANDLE": region.handle,
}
class MessageLogWindow(QtWidgets.QMainWindow):
DEFAULT_IGNORE = "StartPingCheck CompletePingCheck PacketAck SimulatorViewerTimeMessage SimStats " \
"AgentUpdate AgentAnimation AvatarAnimation ViewerEffect CoarseLocationUpdate LayerData " \
"CameraConstraint ObjectUpdateCached RequestMultipleObjects ObjectUpdate ObjectUpdateCompressed " \
@@ -183,23 +233,36 @@ class ProxyGUI(QtWidgets.QMainWindow):
textRequest: QtWidgets.QTextEdit
def __init__(self):
super().__init__()
def __init__(
self, settings: GUIProxySettings, session_manager: GUISessionManager,
log_live_messages: bool, parent: Optional[QtWidgets.QWidget] = None,
):
super().__init__(parent=parent)
loadUi(MAIN_WINDOW_UI_PATH, self)
if parent:
self.setWindowTitle("Message Log")
self.menuBar.setEnabled(False) # type: ignore
self.menuBar.hide() # type: ignore
self._selectedEntry: Optional[AbstractMessageLogEntry] = None
self.settings = GUIProxySettings(QtCore.QSettings("SaladDais", "hippolyzer"))
self.model = MessageLogModel(parent=self.tableView)
self.settings = settings
self.sessionManager = session_manager
if log_live_messages:
self.model = MessageLogModel(parent=self.tableView)
session_manager.message_logger.loggers.append(self.model)
else:
self.model = MessageLogModel(parent=self.tableView, maxlen=None)
self.tableView.setModel(self.model)
self.model.rowsAboutToBeInserted.connect(self.beforeInsert)
self.model.rowsInserted.connect(self.afterInsert)
self.tableView.selectionModel().selectionChanged.connect(self._messageSelected)
self.checkBeautify.clicked.connect(self._showSelectedMessage)
self.checkPause.clicked.connect(self._setPaused)
self._setFilter(self.DEFAULT_FILTER)
self.setFilter(self.DEFAULT_FILTER)
self.btnClearLog.clicked.connect(self.model.clear)
self.lineEditFilter.editingFinished.connect(self._setFilter)
self.lineEditFilter.editingFinished.connect(self.setFilter)
self.btnMessageBuilder.clicked.connect(self._sendToMessageBuilder)
self.btnCopyRepr.clicked.connect(self._copyRepr)
self.actionInstallHTTPSCerts.triggered.connect(self._installHTTPSCerts)
@@ -213,15 +276,14 @@ class ProxyGUI(QtWidgets.QMainWindow):
self.actionProxyRemotelyAccessible.triggered.connect(self._setProxyRemotelyAccessible)
self.actionUseViewerObjectCache.triggered.connect(self._setUseViewerObjectCache)
self.actionRequestMissingObjects.triggered.connect(self._setRequestMissingObjects)
self.actionOpenNewMessageLogWindow.triggered.connect(self._openNewMessageLogWindow)
self.actionImportLogEntries.triggered.connect(self._importLogEntries)
self.actionExportLogEntries.triggered.connect(self._exportLogEntries)
self._filterMenu = QtWidgets.QMenu()
self._populateFilterMenu()
self.toolButtonFilter.setMenu(self._filterMenu)
self.sessionManager = GUISessionManager(self.settings, self.model)
self.interactionManager = GUIInteractionManager(self)
AddonManager.UI = self.interactionManager
self._shouldScrollOnInsert = True
self.tableView.horizontalHeader().resizeSection(MessageLogHeader.Host, 80)
self.tableView.horizontalHeader().resizeSection(MessageLogHeader.Method, 60)
@@ -230,10 +292,16 @@ class ProxyGUI(QtWidgets.QMainWindow):
self.textResponse.hide()
def closeEvent(self, event) -> None:
loggers = self.sessionManager.message_logger.loggers
if self.model in loggers:
loggers.remove(self.model)
super().closeEvent(event)
def _populateFilterMenu(self):
def _addFilterAction(text, filter_str):
filter_action = QtWidgets.QAction(text, self)
filter_action.triggered.connect(lambda: self._setFilter(filter_str))
filter_action.triggered.connect(lambda: self.setFilter(filter_str))
self._filterMenu.addAction(filter_action)
self._filterMenu.clear()
@@ -252,7 +320,7 @@ class ProxyGUI(QtWidgets.QMainWindow):
dialog.exec_()
@nonFatalExceptions
def _setFilter(self, filter_str=None):
def setFilter(self, filter_str=None):
if filter_str is None:
filter_str = self.lineEditFilter.text()
else:
@@ -284,7 +352,7 @@ class ProxyGUI(QtWidgets.QMainWindow):
return
req = entry.request(
beautify=self.checkBeautify.isChecked(),
replacements=self.buildReplacements(entry.session, entry.region),
replacements=buildReplacements(entry.session, entry.region),
)
highlight_range = None
if isinstance(req, SpannedString):
@@ -324,7 +392,7 @@ class ProxyGUI(QtWidgets.QMainWindow):
win.show()
msg = self._selectedEntry
beautify = self.checkBeautify.isChecked()
replacements = self.buildReplacements(msg.session, msg.region)
replacements = buildReplacements(msg.session, msg.region)
win.setMessageText(msg.request(beautify=beautify, replacements=replacements))
@nonFatalExceptions
@@ -340,32 +408,38 @@ class ProxyGUI(QtWidgets.QMainWindow):
win = MessageBuilderWindow(self, self.sessionManager)
win.show()
def buildReplacements(self, session: Session, region: ProxiedRegion):
if not session or not region:
return {}
selected = session.selected
agent_object = region.objects.lookup_fullid(session.agent_id)
selected_local = selected.object_local
selected_object = None
if selected_local:
# We may or may not have an object for this
selected_object = region.objects.lookup_localid(selected_local)
return {
"SELECTED_LOCAL": selected_local,
"SELECTED_FULL": selected_object.FullID if selected_object else None,
"SELECTED_PARCEL_LOCAL": selected.parcel_local,
"SELECTED_PARCEL_FULL": selected.parcel_full,
"SELECTED_SCRIPT_ITEM": selected.script_item,
"SELECTED_TASK_ITEM": selected.task_item,
"AGENT_ID": session.agent_id,
"AGENT_LOCAL": agent_object.LocalID if agent_object else None,
"SESSION_ID": session.id,
"AGENT_POS": agent_object.Position if agent_object else None,
"NULL_KEY": UUID(),
"RANDOM_KEY": UUID.random,
"CIRCUIT_CODE": session.circuit_code,
"REGION_HANDLE": region.handle,
}
def _openNewMessageLogWindow(self):
win: QtWidgets.QMainWindow = MessageLogWindow(
self.settings, self.sessionManager, log_live_messages=True, parent=self)
win.setFilter(self.lineEditFilter.text())
win.show()
win.activateWindow()
@asyncSlot()
async def _importLogEntries(self):
log_file = await AddonManager.UI.open_file(
caption="Import Log Entries", filter_str="Hippolyzer Logs (*.hippolog)"
)
if not log_file:
return
win = MessageLogWindow(self.settings, self.sessionManager, log_live_messages=False, parent=self)
win.setFilter(self.lineEditFilter.text())
with open(log_file, "rb") as f:
entries = import_log_entries(f.read())
for entry in entries:
win.model.add_log_entry(entry)
win.show()
win.activateWindow()
@asyncSlot()
async def _exportLogEntries(self):
log_file = await AddonManager.UI.save_file(
caption="Export Log Entries", filter_str="Hippolyzer Logs (*.hippolog)", default_suffix="hippolog",
)
if not log_file:
return
with open(log_file, "wb") as f:
f.write(export_log_entries(self.model))
def _installHTTPSCerts(self):
msg = QtWidgets.QMessageBox()
@@ -575,24 +649,9 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
if var.name in ("TaskID", "ObjectID"):
return VerbatimHumanVal("[[SELECTED_FULL]]")
if var.type.is_int:
return 0
elif var.type.is_float:
return 0.0
elif var.type == MsgType.MVT_LLUUID:
return UUID()
elif var.type == MsgType.MVT_BOOL:
return False
elif var.type == MsgType.MVT_VARIABLE:
return ""
elif var.type in (MsgType.MVT_LLVector3, MsgType.MVT_LLVector3d, MsgType.MVT_LLQuaternion):
return VerbatimHumanVal("(0.0, 0.0, 0.0)")
elif var.type == MsgType.MVT_LLVector4:
return VerbatimHumanVal("(0.0, 0.0, 0.0, 0.0)")
elif var.type == MsgType.MVT_FIXED:
return b"\x00" * var.size
elif var.type == MsgType.MVT_IP_ADDR:
return "0.0.0.0"
default_val = var.default_value
if default_val is not None:
return default_val
return VerbatimHumanVal("")
@nonFatalExceptions
@@ -600,7 +659,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
session, region = self._getTarget()
msg_text = self.textRequest.toPlainText()
replacements = self.parent().buildReplacements(session, region)
replacements = buildReplacements(session, region)
if re.match(r"\A\s*(in|out)\s+", msg_text, re.I):
sender_func = self._sendLLUDPMessage
@@ -632,7 +691,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
msg = HumanMessageSerializer.from_human_string(msg_text, replacements, env, safe=False)
if self.checkLLUDPViaCaps.isChecked():
if msg.direction == Direction.IN:
region.eq_manager.queue_event(
region.eq_manager.inject_event(
self.llsdSerializer.serialize(msg, as_dict=True)
)
else:
@@ -656,7 +715,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
raise RuntimeError("Need a valid session and region to send EQ event")
message_line, _, body = (x.strip() for x in msg_text.partition("\n"))
message_name = message_line.rsplit(" ", 1)[-1]
region.eq_manager.queue_event({
region.eq_manager.inject_event({
"message": message_name,
"body": llsd.parse_xml(body.encode("utf8")),
})
@@ -749,7 +808,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
class AddonDialog(QtWidgets.QDialog):
listAddons: QtWidgets.QListWidget
def __init__(self, parent: ProxyGUI):
def __init__(self, parent: MessageLogWindow):
super().__init__()
loadUi(ADDON_DIALOG_UI_PATH, self)
@@ -800,7 +859,7 @@ class AddonDialog(QtWidgets.QDialog):
class FilterDialog(QtWidgets.QDialog):
listFilters: QtWidgets.QListWidget
def __init__(self, parent: ProxyGUI):
def __init__(self, parent: MessageLogWindow):
super().__init__()
loadUi(FILTER_DIALOG_UI_PATH, self)
@@ -838,29 +897,16 @@ class FilterDialog(QtWidgets.QDialog):
self.listFilters.takeItem(idx)
class GUIProxySettings(ProxySettings):
"""Persistent settings backed by QSettings"""
def __init__(self, settings: QtCore.QSettings):
super().__init__()
self._settings_obj = settings
def get_setting(self, name: str) -> Any:
val: Any = self._settings_obj.value(name, defaultValue=dataclasses.MISSING)
if val is dataclasses.MISSING:
return val
return json.loads(val)
def set_setting(self, name: str, val: Any):
self._settings_obj.setValue(name, json.dumps(val))
def gui_main():
multiprocessing.set_start_method('spawn')
QtCore.QCoreApplication.setAttribute(QtCore.Qt.AA_ShareOpenGLContexts)
app = QtWidgets.QApplication(sys.argv)
loop = QEventLoop(app)
asyncio.set_event_loop(loop)
window = ProxyGUI()
settings = GUIProxySettings(QtCore.QSettings("SaladDais", "hippolyzer"))
session_manager = GUISessionManager(settings)
window = MessageLogWindow(settings, session_manager, log_live_messages=True)
AddonManager.UI = GUIInteractionManager(window)
timer = QtCore.QTimer(app)
timer.timeout.connect(window.sessionManager.checkRegions)
timer.start(100)

View File

@@ -256,6 +256,10 @@
<bool>true</bool>
</property>
<addaction name="actionOpenMessageBuilder"/>
<addaction name="actionOpenNewMessageLogWindow"/>
<addaction name="separator"/>
<addaction name="actionImportLogEntries"/>
<addaction name="actionExportLogEntries"/>
<addaction name="separator"/>
<addaction name="actionInstallHTTPSCerts"/>
<addaction name="actionManageAddons"/>
@@ -323,6 +327,21 @@
<string>Force the proxy to request objects that it doesn't know about due to cache misses</string>
</property>
</action>
<action name="actionOpenNewMessageLogWindow">
<property name="text">
<string>Open New Message Log Window</string>
</property>
</action>
<action name="actionImportLogEntries">
<property name="text">
<string>Import Log Entries</string>
</property>
</action>
<action name="actionExportLogEntries">
<property name="text">
<string>Export Log Entries</string>
</property>
</action>
</widget>
<resources/>
<connections/>

View File

@@ -0,0 +1,91 @@
"""
Assorted utilities to make creating animations from scratch easier
"""
import copy
from typing import List, Union
from hippolyzer.lib.base.datatypes import Vector3, Quaternion
from hippolyzer.lib.base.llanim import PosKeyframe, RotKeyframe
def smooth_step(t: float):
t = max(0.0, min(1.0, t))
return t * t * (3 - 2 * t)
def rot_interp(r0: Quaternion, r1: Quaternion, t: float):
"""
Bad quaternion interpolation
TODO: This is definitely not correct yet seems to work ok? Implement slerp.
"""
# Ignore W
r0 = r0.data(3)
r1 = r1.data(3)
return Quaternion(*map(lambda pair: ((pair[0] * (1.0 - t)) + (pair[1] * t)), zip(r0, r1)))
def unique_frames(frames: List[Union[PosKeyframe, RotKeyframe]]):
"""Drop frames where time and coordinate are exact duplicates of another frame"""
new_frames = []
for frame in frames:
# TODO: fudge factor for float comparison instead
if frame not in new_frames:
new_frames.append(frame)
return new_frames
def shift_keyframes(frames: List[Union[PosKeyframe, RotKeyframe]], num: int):
"""
Shift keyframes around by `num` frames
Assumes keyframes occur at a set cadence, and that first and last keyframe are at the same coord.
"""
# Get rid of duplicate frames
frames = unique_frames(frames)
pop_idx = -1
insert_idx = 0
if num < 0:
insert_idx = len(frames) - 1
pop_idx = 0
num = -num
old_times = [f.time for f in frames]
new_frames = frames.copy()
# Drop last, duped frame. We'll copy the first frame to replace it later
new_frames.pop(-1)
for _ in range(num):
new_frames.insert(insert_idx, new_frames.pop(pop_idx))
# Put first frame back on the end
new_frames.append(copy.copy(new_frames[0]))
assert len(old_times) == len(new_frames)
assert new_frames[0] == new_frames[-1]
# Make the times of the shifted keyframes match up with the previous timeline
for old_time, new_frame in zip(old_times, new_frames):
new_frame.time = old_time
return new_frames
def smooth_pos(start: Vector3, end: Vector3, inter_frames: int, time: float, duration: float) -> List[PosKeyframe]:
"""Generate keyframes to smoothly interpolate between two positions"""
frames = [PosKeyframe(time=time, pos=start)]
for i in range(0, inter_frames):
t = (i + 1) / (inter_frames + 1)
smooth_t = smooth_step(t)
pos = Vector3(smooth_t, smooth_t, smooth_t).interpolate(start, end)
frames.append(PosKeyframe(time=time + (t * duration), pos=pos))
return frames + [PosKeyframe(time=time + duration, pos=end)]
def smooth_rot(start: Quaternion, end: Quaternion, inter_frames: int, time: float, duration: float)\
-> List[RotKeyframe]:
"""Generate keyframes to smoothly interpolate between two rotations"""
frames = [RotKeyframe(time=time, rot=start)]
for i in range(0, inter_frames):
t = (i + 1) / (inter_frames + 1)
smooth_t = smooth_step(t)
frames.append(RotKeyframe(time=time + (t * duration), rot=rot_interp(start, end, smooth_t)))
return frames + [RotKeyframe(time=time + duration, rot=end)]

View File

@@ -294,6 +294,17 @@ class RawBytes(bytes):
pass
_T = TypeVar("_T")
class Pretty(Generic[_T]):
"""Wrapper for var values so Messages will know to serialize"""
__slots__ = ("value",)
def __init__(self, value: _T):
self.value: _T = value
class StringEnum(str, enum.Enum):
def __str__(self):
return self.value
@@ -333,5 +344,5 @@ class TaggedUnion(recordclass.datatuple): # type: ignore
__all__ = [
"Vector3", "Vector4", "Vector2", "Quaternion", "TupleCoord",
"UUID", "RawBytes", "StringEnum", "JankStringyBytes", "TaggedUnion",
"IntEnum", "IntFlag", "flags_to_pod"
"IntEnum", "IntFlag", "flags_to_pod", "Pretty"
]

View File

@@ -48,6 +48,8 @@ class Circuit:
self.packet_id_base += 1
if not message.acks:
message.send_flags &= PacketFlags.ACK
# If it was queued, it's not anymore
message.queued = False
message.finalized = True
def send_message(self, message: Message, transport=None):

View File

@@ -32,6 +32,7 @@ from typing import *
from hippolyzer.lib.base.datatypes import *
import hippolyzer.lib.base.serialization as se
import hippolyzer.lib.base.templates as templates
from hippolyzer.lib.base.datatypes import Pretty
from hippolyzer.lib.base.message.msgtypes import PacketFlags
from hippolyzer.lib.base.network.transport import Direction, ADDR_TUPLE
@@ -62,11 +63,12 @@ class Block:
Block expects a name, and kwargs for variables (var_name = value)
"""
__slots__ = ('name', 'size', 'vars', 'message_name', '_ser_cache', 'fill_missing',)
PARENT_MESSAGE_NAME: ClassVar[Optional[str]] = None
def __init__(self, name, /, *, fill_missing=False, **kwargs):
self.name = name
self.size = 0
self.message_name: Optional[str] = None
self.message_name: Optional[str] = self.PARENT_MESSAGE_NAME
self.vars: Dict[str, VAR_TYPE] = {}
self._ser_cache: Dict[str, Any] = {}
self.fill_missing = fill_missing
@@ -83,6 +85,9 @@ class Block:
return self.vars[name]
def __setitem__(self, key, value):
if isinstance(value, Pretty):
return self.serialize_var(key, value.value)
# These don't pickle well since they're likely to get hot-reloaded
if isinstance(value, (enum.IntEnum, enum.IntFlag)):
value = int(value)
@@ -181,7 +186,7 @@ class MsgBlockList(List["Block"]):
class Message:
__slots__ = ("name", "send_flags", "_packet_id", "acks", "body_boundaries", "queued",
__slots__ = ("name", "send_flags", "packet_id", "acks", "body_boundaries", "queued",
"offset", "raw_extra", "raw_body", "deserializer", "_blocks", "finalized",
"direction", "meta", "injected", "dropped", "sender")
@@ -191,7 +196,7 @@ class Message:
self.name = name
self.send_flags = flags
self._packet_id: Optional[int] = packet_id # aka, sequence number
self.packet_id: Optional[int] = packet_id # aka, sequence number
self.acks = acks if acks is not None else tuple()
self.body_boundaries = (-1, -1)
@@ -214,16 +219,6 @@ class Message:
self.add_blocks(args)
@property
def packet_id(self) -> Optional[int]:
return self._packet_id
@packet_id.setter
def packet_id(self, val: Optional[int]):
self._packet_id = val
# Changing packet ID clears the finalized flag
self.finalized = False
def add_blocks(self, block_list):
# can have a list of blocks if it is multiple or variable
for block in block_list:
@@ -296,7 +291,7 @@ class Message:
if self.raw_body and self.deserializer():
self.deserializer().parse_message_body(self)
def to_dict(self):
def to_dict(self, extended=False):
""" A dict representation of a message.
This is the form used for templated messages sent via EQ.
@@ -312,6 +307,18 @@ class Message:
new_vars[var_name] = val
dict_blocks.append(new_vars)
if extended:
base_repr.update({
"packet_id": self.packet_id,
"meta": self.meta.copy(),
"dropped": self.dropped,
"injected": self.injected,
"direction": self.direction.name,
"send_flags": int(self.send_flags),
"extra": self.extra,
"acks": self.acks,
})
return base_repr
@classmethod
@@ -321,6 +328,17 @@ class Message:
msg.create_block_list(block_type)
for block in blocks:
msg.add_block(Block(block_type, **block))
if 'packet_id' in dict_val:
# extended format
msg.packet_id = dict_val['packet_id']
msg.meta = dict_val['meta']
msg.dropped = dict_val['dropped']
msg.injected = dict_val['injected']
msg.direction = Direction[dict_val['direction']]
msg.send_flags = dict_val['send_flags']
msg.extra = dict_val['extra']
msg.acks = dict_val['acks']
return msg
def invalidate_caches(self):
@@ -359,12 +377,15 @@ class Message:
message_copy = copy.deepcopy(self)
# Set the queued flag so the original will be dropped and acks will be sent
self.queued = True
if not self.finalized:
self.queued = True
# Original was dropped so let's make sure we have clean acks and packet id
message_copy.acks = tuple()
message_copy.send_flags &= ~PacketFlags.ACK
message_copy.packet_id = None
message_copy.dropped = False
message_copy.finalized = False
return message_copy
def to_summary(self):

View File

@@ -22,6 +22,7 @@ Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import typing
from .msgtypes import MsgType, MsgBlockType
from ..datatypes import UUID
class MessageTemplateVariable:
@@ -61,6 +62,32 @@ class MessageTemplateVariable:
self._probably_text = self._probably_text and self.name != "NameValue"
return self._probably_text
@property
def default_value(self):
if self.type.is_int:
return 0
elif self.type.is_float:
return 0.0
elif self.type == MsgType.MVT_LLUUID:
return UUID()
elif self.type == MsgType.MVT_BOOL:
return False
elif self.type == MsgType.MVT_VARIABLE:
if self.probably_binary:
return b""
if self.probably_text:
return ""
return b""
elif self.type in (MsgType.MVT_LLVector3, MsgType.MVT_LLVector3d, MsgType.MVT_LLQuaternion):
return 0.0, 0.0, 0.0
elif self.type == MsgType.MVT_LLVector4:
return 0.0, 0.0, 0.0, 0.0
elif self.type == MsgType.MVT_FIXED:
return b"\x00" * self.size
elif self.type == MsgType.MVT_IP_ADDR:
return "0.0.0.0"
return None
class MessageTemplateBlock:
def __init__(self, name):

View File

@@ -10,6 +10,7 @@ from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.circuit import ConnectionHolder
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.network.caps_client import CapsClient
from hippolyzer.lib.base.network.transport import ADDR_TUPLE
if TYPE_CHECKING:
@@ -18,10 +19,11 @@ if TYPE_CHECKING:
class BaseClientRegion(ConnectionHolder, abc.ABC):
"""Represents a client's view of a remote region"""
# Actually a weakref
handle: Optional[int]
# Actually a weakref
session: Callable[[], BaseClientSession]
objects: ClientObjectManager
caps_client: CapsClient
class BaseClientSession(abc.ABC):

View File

@@ -52,7 +52,8 @@ class BaseInteractionManager:
pass
@abc.abstractmethod
async def save_file(self, caption: str = '', directory: str = '', filter_str: str = '') -> Optional[str]:
async def save_file(self, caption: str = '', directory: str = '', filter_str: str = '',
default_suffix: str = '') -> Optional[str]:
pass
@abc.abstractmethod

View File

@@ -0,0 +1,89 @@
from __future__ import annotations
import enum
import typing
from weakref import ref
from typing import *
if TYPE_CHECKING:
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import Session, SessionManager
def is_asset_server_cap_name(cap_name):
return cap_name and (
cap_name.startswith("GetMesh")
or cap_name.startswith("GetTexture")
or cap_name.startswith("ViewerAsset")
)
class CapType(enum.Enum):
NORMAL = enum.auto()
TEMPORARY = enum.auto()
WRAPPER = enum.auto()
PROXY_ONLY = enum.auto()
class SerializedCapData(typing.NamedTuple):
cap_name: typing.Optional[str] = None
region_addr: typing.Optional[str] = None
session_id: typing.Optional[str] = None
base_url: typing.Optional[str] = None
type: str = "NORMAL"
def __bool__(self):
return bool(self.cap_name or self.session_id)
@property
def asset_server_cap(self):
return is_asset_server_cap_name(self.cap_name)
class CapData(NamedTuple):
cap_name: Optional[str] = None
# Actually they're weakrefs but the type sigs suck.
region: Optional[Callable[[], Optional[ProxiedRegion]]] = None
session: Optional[Callable[[], Optional[Session]]] = None
base_url: Optional[str] = None
type: CapType = CapType.NORMAL
def __bool__(self):
return bool(self.cap_name or self.session)
def serialize(self) -> "SerializedCapData":
return SerializedCapData(
cap_name=self.cap_name,
region_addr=str(self.region().circuit_addr) if self.region and self.region() else None,
session_id=str(self.session().id) if self.session and self.session() else None,
base_url=self.base_url,
type=self.type.name,
)
@classmethod
def deserialize(
cls,
ser_cap_data: "SerializedCapData",
session_mgr: Optional[SessionManager],
) -> "CapData":
cap_session = None
cap_region = None
if session_mgr and ser_cap_data.session_id:
for session in session_mgr.sessions:
if ser_cap_data.session_id == str(session.id):
cap_session = session
if cap_session and ser_cap_data.region_addr:
for region in cap_session.regions:
if ser_cap_data.region_addr == str(region.circuit_addr):
cap_region = region
return cls(
cap_name=ser_cap_data.cap_name,
region=ref(cap_region) if cap_region else None,
session=ref(cap_session) if cap_session else None,
base_url=ser_cap_data.base_url,
type=CapType[ser_cap_data.type],
)
@property
def asset_server_cap(self) -> bool:
return is_asset_server_cap_name(self.cap_name)

View File

@@ -37,6 +37,9 @@ class ProxiedCircuit(Circuit):
def prepare_message(self, message: Message, direction=None):
if message.finalized:
raise RuntimeError(f"Trying to re-send finalized {message!r}")
if message.queued:
# This is due to be dropped, nothing should be sending the original
raise RuntimeError(f"Trying to send original of queued {message!r}")
direction = direction or getattr(message, 'direction')
fwd_injections, reverse_injections = self._get_injections(direction)

View File

@@ -26,6 +26,10 @@ class CommandDetails(NamedTuple):
lifetime: Optional[TaskLifeScope] = None
def parse_bool(val: str) -> bool:
return val.lower() in ('on', 'true', '1', '1.0', 'yes')
def handle_command(command_name: Optional[str] = None, /, *, lifetime: Optional[TaskLifeScope] = None,
single_instance: bool = False, **params: Union[Parameter, callable]):
"""
@@ -61,13 +65,13 @@ def handle_command(command_name: Optional[str] = None, /, *, lifetime: Optional[
# Greedy, takes the rest of the message
if param.sep is None:
param_val = message
message = None
message = ""
else:
message = message.lstrip(param.sep)
if not message:
if param.optional:
break
raise KeyError(f"Missing parameter {param_name}")
if not param.optional:
raise KeyError(f"Missing parameter {param_name}")
continue
param_val, _, message = message.partition(param.sep) # type: ignore
param_vals[param_name] = param.parser(param_val)

View File

@@ -58,7 +58,7 @@ class HTTPAssetRepo(collections.UserDict):
return False
asset = self[asset_id]
flow.response = http.HTTPResponse.make(
flow.response = http.Response.make(
content=asset.data,
headers={
"Content-Type": "application/octet-stream",

View File

@@ -18,8 +18,9 @@ from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
from hippolyzer.lib.proxy.region import ProxiedRegion, CapType
from hippolyzer.lib.proxy.sessions import SessionManager, CapData, Session
from hippolyzer.lib.proxy.caps import CapData, CapType
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import SessionManager, Session
from hippolyzer.lib.proxy.http_proxy import HTTPFlowContext
@@ -120,7 +121,7 @@ class MITMProxyEventManager:
if not flow.can_stream or self._asset_server_proxied:
flow.request.url = redir_url
else:
flow.response = mitmproxy.http.HTTPResponse.make(
flow.response = mitmproxy.http.Response.make(
307,
# Can't provide explanation in the body because this results in failing Range requests under
# mitmproxy that return garbage data. Chances are there's weird interactions
@@ -137,6 +138,27 @@ class MITMProxyEventManager:
# the proxy
self._asset_server_proxied = True
logging.warning("noproxy not used, switching to URI rewrite strategy")
elif cap_data and cap_data.cap_name == "EventQueueGet":
# HACK: The sim's EQ acking mechanism doesn't seem to actually work.
# if the client drops the connection due to timeout before we can
# proxy back the response then it will be lost forever. Keep around
# the last EQ response we got so we can re-send it if the client repeats
# its previous request.
req_ack_id = llsd.parse_xml(flow.request.content)["ack"]
eq_manager = cap_data.region().eq_manager
cached_resp = eq_manager.get_cached_poll_response(req_ack_id)
if cached_resp:
logging.warning("Had to serve a cached EventQueueGet due to client desync")
flow.response = mitmproxy.http.Response.make(
200,
llsd.format_xml(cached_resp),
{
"Content-Type": "application/llsd+xml",
# So we can differentiate these in the log
"X-Hippo-Fake-EQ": "1",
"Connection": "close",
},
)
elif not cap_data:
if self._is_login_request(flow):
# Not strictly a Cap, but makes it easier to filter on.
@@ -145,7 +167,7 @@ class MITMProxyEventManager:
if cap_data and cap_data.type == CapType.PROXY_ONLY:
# A proxy addon was supposed to respond itself, but it didn't.
if not flow.taken and not flow.response_injected:
flow.response = mitmproxy.http.HTTPResponse.make(
flow.response = mitmproxy.http.Response.make(
500,
b"Proxy didn't handle proxy-only Cap correctly",
{
@@ -251,11 +273,14 @@ class MITMProxyEventManager:
new_events.append(event)
# Add on any fake events that've been queued by addons
eq_manager = cap_data.region().eq_manager
new_events.extend(eq_manager.take_events())
new_events.extend(eq_manager.take_injected_events())
parsed_eq_resp["events"] = new_events
# Empty event list is an error, need to return undef instead.
if old_events and not new_events:
# Need at least one event or the viewer will refuse to ack!
new_events.append({"message": "NOP", "body": {}})
parsed_eq_resp = None
# HACK: see note in above request handler for EventQueueGet
req_ack_id = llsd.parse_xml(flow.request.content)["ack"]
eq_manager.cache_last_poll_response(req_ack_id, parsed_eq_resp)
flow.response.content = llsd.format_pretty_xml(parsed_eq_resp)
elif cap_data.cap_name in self.UPLOAD_CREATING_CAPS:
if not region:

View File

@@ -2,12 +2,15 @@ from __future__ import annotations
import copy
from typing import *
from typing import Optional
import mitmproxy.http
from mitmproxy.http import HTTPFlow
from hippolyzer.lib.proxy.caps import CapData
if TYPE_CHECKING:
from hippolyzer.lib.proxy.sessions import CapData, SessionManager
from hippolyzer.lib.proxy.sessions import SessionManager
class HippoHTTPFlow:
@@ -30,11 +33,11 @@ class HippoHTTPFlow:
meta.setdefault("from_browser", False)
@property
def request(self) -> mitmproxy.http.HTTPRequest:
def request(self) -> mitmproxy.http.Request:
return self.flow.request
@property
def response(self) -> Optional[mitmproxy.http.HTTPResponse]:
def response(self) -> Optional[mitmproxy.http.Response]:
return self.flow.response
@property
@@ -42,7 +45,7 @@ class HippoHTTPFlow:
return self.flow.id
@response.setter
def response(self, val: Optional[mitmproxy.http.HTTPResponse]):
def response(self, val: Optional[mitmproxy.http.Response]):
self.flow.metadata["response_injected"] = True
self.flow.response = val
@@ -113,12 +116,12 @@ class HippoHTTPFlow:
return state
@classmethod
def from_state(cls, flow_state: Dict, session_manager: SessionManager) -> HippoHTTPFlow:
def from_state(cls, flow_state: Dict, session_manager: Optional[SessionManager]) -> HippoHTTPFlow:
flow: Optional[HTTPFlow] = HTTPFlow.from_state(flow_state)
assert flow is not None
cap_data_ser = flow.metadata.get("cap_data_ser")
if cap_data_ser is not None:
flow.metadata["cap_data"] = session_manager.deserialize_cap_data(cap_data_ser)
flow.metadata["cap_data"] = CapData.deserialize(cap_data_ser, session_manager)
else:
flow.metadata["cap_data"] = None
return cls(flow)

View File

@@ -1,5 +1,4 @@
import asyncio
import functools
import logging
import multiprocessing
import os
@@ -15,42 +14,30 @@ import mitmproxy.log
import mitmproxy.master
import mitmproxy.options
import mitmproxy.proxy
from mitmproxy.addons import core, clientplayback
from mitmproxy.addons import core, clientplayback, proxyserver, next_layer, disable_h2c
from mitmproxy.http import HTTPFlow
from mitmproxy.proxy.layers import tls
import OpenSSL
from hippolyzer.lib.base.helpers import get_resource_filename
from hippolyzer.lib.base.multiprocessing_utils import ParentProcessWatcher
orig_sethostflags = OpenSSL.SSL._lib.X509_VERIFY_PARAM_set_hostflags # noqa
@functools.wraps(orig_sethostflags)
def _sethostflags_wrapper(param, flags):
# Since 2000 the recommendation per RFCs has been to only check SANs and not the CN field.
# Most browsers do this, as does mitmproxy. The viewer does not, and the sim certs have no SAN
# field. Just monkeypatch out this flag since mitmproxy's internals are in flux and there's
# no good way to stop setting this flag currently.
return orig_sethostflags(
param,
flags & (~OpenSSL.SSL._lib.X509_CHECK_FLAG_NEVER_CHECK_SUBJECT) # noqa
)
OpenSSL.SSL._lib.X509_VERIFY_PARAM_set_hostflags = _sethostflags_wrapper # noqa
from hippolyzer.lib.proxy.caps import SerializedCapData
class SLCertStore(mitmproxy.certs.CertStore):
def get_cert(self, commonname: typing.Optional[bytes], sans: typing.List[bytes], *args):
cert, privkey, chain = super().get_cert(commonname, sans, *args)
x509: OpenSSL.crypto.X509 = cert.x509
def get_cert(self, commonname: typing.Optional[str], sans: typing.List[str], *args, **kwargs):
entry = super().get_cert(commonname, sans, *args, **kwargs)
cert, privkey, chain = entry.cert, entry.privatekey, entry.chain_file
x509 = cert.to_pyopenssl()
# The cert must have a subject key ID or the viewer will reject it.
for i in range(0, x509.get_extension_count()):
ext = x509.get_extension(i)
# This cert already has a subject key id, pass through.
if ext.get_short_name() == b"subjectKeyIdentifier":
return cert, privkey, chain
return entry
# Need to add a subject key ID onto this cert or the viewer will reject it.
# The viewer doesn't actually use the subject key ID for its intended purpose,
# so a random, unique value is fine.
x509.add_extensions([
OpenSSL.crypto.X509Extension(
b"subjectKeyIdentifier",
@@ -58,17 +45,24 @@ class SLCertStore(mitmproxy.certs.CertStore):
uuid.uuid4().hex.encode("utf8"),
),
])
x509.sign(privkey, "sha256") # type: ignore
return cert, privkey, chain
x509.sign(OpenSSL.crypto.PKey.from_cryptography_key(privkey), "sha256") # type: ignore
new_entry = mitmproxy.certs.CertStoreEntry(
mitmproxy.certs.Cert.from_pyopenssl(x509), privkey, chain
)
# Replace the cert that was created in the base `get_cert()` with our modified cert
self.certs[(commonname, tuple(sans))] = new_entry
self.expire_queue.pop(-1)
self.expire(new_entry)
return new_entry
class SLProxyConfig(mitmproxy.proxy.ProxyConfig):
def configure(self, options, updated) -> None:
super().configure(options, updated)
class SLTlsConfig(mitmproxy.addons.tlsconfig.TlsConfig):
def running(self):
super().running()
old_cert_store = self.certstore
# Replace the cert store with one that knows how to add
# a subject key ID extension.
self.certstore = SLCertStore( # noqa
self.certstore = SLCertStore(
default_privatekey=old_cert_store.default_privatekey,
default_ca=old_cert_store.default_ca,
default_chain_file=old_cert_store.default_chain_file,
@@ -76,6 +70,18 @@ class SLProxyConfig(mitmproxy.proxy.ProxyConfig):
)
self.certstore.certs = old_cert_store.certs
def tls_start_server(self, tls_start: tls.TlsStartData):
super().tls_start_server(tls_start)
# Since 2000 the recommendation per RFCs has been to only check SANs and not the CN field.
# Most browsers do this, as does mitmproxy. The viewer does not, and the sim certs have no SAN
# field. set the host verification flags to remove the flag that disallows falling back to
# checking the CN (X509_CHECK_FLAG_NEVER_CHECK_SUBJECT)
param = OpenSSL.SSL._lib.SSL_get0_param(tls_start.ssl_conn._ssl) # noqa
# get_hostflags() doesn't seem to be exposed, just set the usual flags without
# the problematic `X509_CHECK_FLAG_NEVER_CHECK_SUBJECT` flag.
flags = OpenSSL.SSL._lib.X509_CHECK_FLAG_NO_PARTIAL_WILDCARDS # noqa
OpenSSL.SSL._lib.X509_VERIFY_PARAM_set_hostflags(param, flags) # noqa
class HTTPFlowContext:
def __init__(self):
@@ -92,12 +98,13 @@ class IPCInterceptionAddon:
flow which is merged in and resumed.
"""
def __init__(self, flow_context: HTTPFlowContext):
self.mitmproxy_ready = flow_context.mitmproxy_ready
self.intercepted_flows: typing.Dict[str, HTTPFlow] = {}
self.from_proxy_queue: multiprocessing.Queue = flow_context.from_proxy_queue
self.to_proxy_queue: multiprocessing.Queue = flow_context.to_proxy_queue
self.shutdown_signal: multiprocessing.Event = flow_context.shutdown_signal
def log(self, entry: mitmproxy.log.LogEntry):
def add_log(self, entry: mitmproxy.log.LogEntry):
if entry.level == "debug":
logging.debug(entry.msg)
elif entry.level in ("alert", "info"):
@@ -112,6 +119,8 @@ class IPCInterceptionAddon:
def running(self):
# register to pump the events or something here
asyncio.create_task(self._pump_callbacks())
# Tell the main process mitmproxy is ready to handle requests
self.mitmproxy_ready.set()
async def _pump_callbacks(self):
watcher = ParentProcessWatcher(self.shutdown_signal)
@@ -213,7 +222,11 @@ class SLMITMMaster(mitmproxy.master.Master):
self.addons.add(
core.Core(),
clientplayback.ClientPlayback(),
SLMITMAddon(flow_context)
disable_h2c.DisableH2C(),
proxyserver.Proxyserver(),
next_layer.NextLayer(),
SLTlsConfig(),
SLMITMAddon(flow_context),
)
def start_server(self):
@@ -242,30 +255,4 @@ def create_proxy_master(host, port, flow_context: HTTPFlowContext): # pragma: n
def create_http_proxy(bind_host, port, flow_context: HTTPFlowContext): # pragma: no cover
master = create_proxy_master(bind_host, port, flow_context)
pconf = SLProxyConfig(master.options)
server = mitmproxy.proxy.server.ProxyServer(pconf)
master.server = server
return master
def is_asset_server_cap_name(cap_name):
return cap_name and (
cap_name.startswith("GetMesh")
or cap_name.startswith("GetTexture")
or cap_name.startswith("ViewerAsset")
)
class SerializedCapData(typing.NamedTuple):
cap_name: typing.Optional[str] = None
region_addr: typing.Optional[str] = None
session_id: typing.Optional[str] = None
base_url: typing.Optional[str] = None
type: str = "NORMAL"
def __bool__(self):
return bool(self.cap_name or self.session_id)
@property
def asset_server_cap(self):
return is_asset_server_cap_name(self.cap_name)

View File

@@ -131,7 +131,7 @@ class InterceptingLLUDPProxyProtocol(UDPProxyProtocol):
# This message is owned by an async handler, drop it so it doesn't get
# sent with the normal flow.
if message.queued and not message.dropped:
if message.queued:
region.circuit.drop_message(message)
# Shouldn't mutate the message past this point, so log it now.
@@ -146,7 +146,8 @@ class InterceptingLLUDPProxyProtocol(UDPProxyProtocol):
elif message.name == "RegionHandshake":
region.name = str(message["RegionInfo"][0]["SimName"])
if not message.dropped:
# Send the message if it wasn't explicitly dropped or sent before
if not message.finalized:
region.circuit.send_message(message)
def close(self):

View File

@@ -12,7 +12,7 @@ def literal():
# https://stackoverflow.com/questions/14366401/#comment79795017_14366904
RegExMatch(r'''b?(\"\"\"|\'\'\'|\"|\')((?<!\\)(\\\\)*\\\1|.)*?\1'''),
# base16
RegExMatch(r'0x\d+'),
RegExMatch(r'0x[0-9a-fA-F]+'),
# base10 int or float.
RegExMatch(r'\d+(\.\d+)?'),
"None",

View File

@@ -1,8 +1,11 @@
from __future__ import annotations
import abc
import ast
import collections
import copy
import fnmatch
import gzip
import io
import logging
import pickle
@@ -13,16 +16,16 @@ import weakref
from defusedxml import minidom
from hippolyzer.lib.base import serialization as se, llsd
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.datatypes import TaggedUnion, UUID, TupleCoord
from hippolyzer.lib.base.helpers import bytes_escape
from hippolyzer.lib.base.message.message_formatting import HumanMessageSerializer
from hippolyzer.lib.proxy.message_filter import MetaFieldSpecifier, compile_filter, BaseFilterNode, MessageFilterNode, \
EnumFieldSpecifier
from hippolyzer.lib.proxy.region import CapType
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
from hippolyzer.lib.proxy.caps import CapType, SerializedCapData
if typing.TYPE_CHECKING:
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import Session
@@ -30,24 +33,42 @@ LOG = logging.getLogger(__name__)
class BaseMessageLogger:
paused: bool
def log_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
pass
if self.paused:
return False
return self.add_log_entry(LLUDPMessageLogEntry(message, region, session))
def log_http_response(self, flow: HippoHTTPFlow):
pass
if self.paused:
return False
# These are huge, let's not log them for now.
if flow.cap_data and flow.cap_data.asset_server_cap:
return False
return self.add_log_entry(HTTPMessageLogEntry(flow))
def log_eq_event(self, session: Session, region: ProxiedRegion, event: dict):
if self.paused:
return False
return self.add_log_entry(EQMessageLogEntry(event, region, session))
@abc.abstractmethod
def add_log_entry(self, entry: AbstractMessageLogEntry):
pass
class FilteringMessageLogger(BaseMessageLogger):
def __init__(self):
def __init__(self, maxlen=2000):
BaseMessageLogger.__init__(self)
self._raw_entries = collections.deque(maxlen=2000)
self._raw_entries = collections.deque(maxlen=maxlen)
self._filtered_entries: typing.List[AbstractMessageLogEntry] = []
self._paused = False
self.paused = False
self.filter: BaseFilterNode = compile_filter("")
def __iter__(self) -> typing.Iterator[AbstractMessageLogEntry]:
return iter(self._filtered_entries)
def set_filter(self, filter_str: str):
self.filter = compile_filter(filter_str)
self._begin_reset()
@@ -61,25 +82,7 @@ class FilteringMessageLogger(BaseMessageLogger):
self._end_reset()
def set_paused(self, paused: bool):
self._paused = paused
def log_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
if self._paused:
return
self._add_log_entry(LLUDPMessageLogEntry(message, region, session))
def log_http_response(self, flow: HippoHTTPFlow):
if self._paused:
return
# These are huge, let's not log them for now.
if flow.cap_data and flow.cap_data.asset_server_cap:
return
self._add_log_entry(HTTPMessageLogEntry(flow))
def log_eq_event(self, session: Session, region: ProxiedRegion, event: dict):
if self._paused:
return
self._add_log_entry(EQMessageLogEntry(event, region, session))
self.paused = paused
# Hooks that Qt models will want to implement
def _begin_insert(self, insert_idx: int):
@@ -94,25 +97,21 @@ class FilteringMessageLogger(BaseMessageLogger):
def _end_reset(self):
pass
def _add_log_entry(self, entry: AbstractMessageLogEntry):
def add_log_entry(self, entry: AbstractMessageLogEntry):
try:
# Paused, throw it away.
if self._paused:
return
if self.paused:
return False
self._raw_entries.append(entry)
if self.filter.match(entry):
next_idx = len(self._filtered_entries)
self._begin_insert(next_idx)
self._filtered_entries.append(entry)
self._end_insert()
entry.cache_summary()
# In the common case we don't need to keep around the serialization
# caches anymore. If the filter changes, the caches will be repopulated
# as necessary.
entry.freeze()
return True
except Exception:
LOG.exception("Failed to filter queued message")
return False
def clear(self):
self._begin_reset()
@@ -121,7 +120,27 @@ class FilteringMessageLogger(BaseMessageLogger):
self._end_reset()
class AbstractMessageLogEntry:
class WrappingMessageLogger(BaseMessageLogger):
def __init__(self):
self.loggers: typing.List[BaseMessageLogger] = []
@property
def paused(self):
return all(x.paused for x in self.loggers)
def add_log_entry(self, entry: AbstractMessageLogEntry):
logged = False
for logger in self.loggers:
if logger.add_log_entry(entry):
logged = True
# At least one logger ended up keeping the message around, so let's
# cache the summary before we freeze the message.
if logged:
entry.cache_summary()
entry.freeze()
class AbstractMessageLogEntry(abc.ABC):
region: typing.Optional[ProxiedRegion]
session: typing.Optional[Session]
name: str
@@ -129,7 +148,7 @@ class AbstractMessageLogEntry:
__slots__ = ["_region", "_session", "_region_name", "_agent_id", "_summary", "meta"]
def __init__(self, region, session):
def __init__(self, region: typing.Optional[ProxiedRegion], session: typing.Optional[Session]):
if region and not isinstance(region, weakref.ReferenceType):
region = weakref.ref(region)
if session and not isinstance(session, weakref.ReferenceType):
@@ -159,6 +178,45 @@ class AbstractMessageLogEntry:
"SelectedFull": self._current_selected_full(),
}
def to_dict(self) -> dict:
meta = self.meta.copy()
def _dehydrate_meta_uuid(key: str):
if meta[key]:
meta[key] = str(meta[key])
_dehydrate_meta_uuid("AgentID")
_dehydrate_meta_uuid("SelectedFull")
_dehydrate_meta_uuid("SessionID")
return {
"type": self.type,
"region_name": self.region_name,
"agent_id": str(self.agent_id) if self.agent_id is not None else None,
"summary": self.summary,
"meta": meta,
}
@classmethod
@abc.abstractmethod
def from_dict(cls, val: dict):
pass
def apply_dict(self, val: dict) -> None:
self._region_name = val['region_name']
self._agent_id = UUID(val['agent_id']) if val['agent_id'] else None
self._summary = val['summary']
meta = val['meta'].copy()
def _hydrate_meta_uuid(key: str):
if meta[key]:
meta[key] = UUID(meta[key])
_hydrate_meta_uuid("AgentID")
_hydrate_meta_uuid("SelectedFull")
_hydrate_meta_uuid("SessionID")
self.meta.update(meta)
def freeze(self):
pass
@@ -483,6 +541,26 @@ class HTTPMessageLogEntry(AbstractMessageLogEntry):
return "application/xml"
return content_type
def to_dict(self):
val = super().to_dict()
val['flow'] = self.flow.get_state()
cap_data = val['flow'].get('metadata', {}).get('cap_data_ser')
if cap_data is not None:
# Have to convert this from a namedtuple to a dict to make
# it importable
cap_dict = cap_data._asdict() # noqa
val['flow']['metadata']['cap_data_ser'] = cap_dict
return val
@classmethod
def from_dict(cls, val: dict):
cap_data = val['flow'].get('metadata', {}).get('cap_data_ser')
if cap_data:
val['flow']['metadata']['cap_data_ser'] = SerializedCapData(**cap_data)
ev = cls(HippoHTTPFlow.from_state(val['flow'], None))
ev.apply_dict(val)
return ev
class EQMessageLogEntry(AbstractMessageLogEntry):
__slots__ = ["event"]
@@ -510,6 +588,17 @@ class EQMessageLogEntry(AbstractMessageLogEntry):
self._summary = llsd.format_notation(self.event["body"]).decode("utf8")[:500]
return self._summary
def to_dict(self) -> dict:
val = super().to_dict()
val['event'] = llsd.format_notation(self.event)
return val
@classmethod
def from_dict(cls, val: dict):
ev = cls(llsd.parse_notation(val['event']), None, None)
ev.apply_dict(val)
return ev
class LLUDPMessageLogEntry(AbstractMessageLogEntry):
__slots__ = ["_message", "_name", "_direction", "_frozen_message", "_seq", "_deserializer"]
@@ -642,3 +731,30 @@ class LLUDPMessageLogEntry(AbstractMessageLogEntry):
if self._message:
self._seq = self._message.packet_id
return self._seq
def to_dict(self):
val = super().to_dict()
val['message'] = llsd.format_notation(self.message.to_dict(extended=True))
return val
@classmethod
def from_dict(cls, val: dict):
ev = cls(Message.from_dict(llsd.parse_notation(val['message'])), None, None)
ev.apply_dict(val)
return ev
def export_log_entries(entries: typing.Iterable[AbstractMessageLogEntry]) -> bytes:
return gzip.compress(repr([e.to_dict() for e in entries]).encode("utf8"))
_TYPE_CLASSES = {
"HTTP": HTTPMessageLogEntry,
"LLUDP": LLUDPMessageLogEntry,
"EQ": EQMessageLogEntry,
}
def import_log_entries(data: bytes) -> typing.List[AbstractMessageLogEntry]:
entries = ast.literal_eval(gzip.decompress(data).decode("utf8"))
return [_TYPE_CLASSES[e['type']].from_dict(e) for e in entries]

View File

@@ -32,6 +32,9 @@ class ProxyNameCache(NameCache):
with open(namecache_file, "rb") as f:
namecache_bytes = f.read()
agents = llsd.parse_xml(namecache_bytes)["agents"]
# Can be `None` if the file was just created
if not agents:
continue
for agent_id, agent_data in agents.items():
# Don't set display name if they just have the default
display_name = None

View File

@@ -57,7 +57,11 @@ class ProxyObjectManager(ClientObjectManager):
LOG.warning(f"Tried to load cache for {self._region} without a handle")
return
self.cache_loaded = True
self.object_cache = RegionViewerObjectCacheChain.for_region(handle, self._region.cache_id)
self.object_cache = RegionViewerObjectCacheChain.for_region(
handle=handle,
cache_id=self._region.cache_id,
cache_dir=self._region.session().cache_dir,
)
def request_missed_cached_objects_soon(self):
if self._cache_miss_timer:
@@ -106,6 +110,8 @@ class ProxyWorldObjectManager(ClientWorldObjectManager):
)
def _handle_object_update_cached_misses(self, region_handle: int, missing_locals: Set[int]):
if not self._settings.ALLOW_AUTO_REQUEST_OBJECTS:
return
if self._settings.AUTOMATICALLY_REQUEST_MISSING_OBJECTS:
# Schedule these local IDs to be requested soon if the viewer doesn't request
# them itself. Ideally we could just mutate the CRC of the ObjectUpdateCached
@@ -120,14 +126,15 @@ class ProxyWorldObjectManager(ClientWorldObjectManager):
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: UpdateType):
super()._run_object_update_hooks(obj, updated_props, update_type)
region = self._session.region_by_handle(obj.RegionHandle)
if obj.PCode == PCode.AVATAR and "ParentID" in updated_props:
if obj.ParentID and not region.objects.lookup_localid(obj.ParentID):
# If an avatar just sat on an object we don't know about, add it to the queued
# cache misses and request if if the viewer doesn't. This should happen
# regardless of the auto-request object setting because otherwise we have no way
# to get a sitting agent's true region location, even if it's ourself.
region.objects.queued_cache_misses.add(obj.ParentID)
region.objects.request_missed_cached_objects_soon()
if self._settings.ALLOW_AUTO_REQUEST_OBJECTS:
if obj.PCode == PCode.AVATAR and "ParentID" in updated_props:
if obj.ParentID and not region.objects.lookup_localid(obj.ParentID):
# If an avatar just sat on an object we don't know about, add it to the queued
# cache misses and request if if the viewer doesn't. This should happen
# regardless of the auto-request object setting because otherwise we have no way
# to get a sitting agent's true region location, even if it's ourself.
region.objects.queued_cache_misses.add(obj.ParentID)
region.objects.request_missed_cached_objects_soon()
AddonManager.handle_object_updated(self._session, region, obj, updated_props)
def _run_kill_object_hooks(self, obj: Object):

View File

@@ -1,6 +1,5 @@
from __future__ import annotations
import enum
import logging
import hashlib
import uuid
@@ -18,6 +17,7 @@ from hippolyzer.lib.base.objects import handle_to_global_pos
from hippolyzer.lib.client.state import BaseClientRegion
from hippolyzer.lib.proxy.caps_client import ProxyCapsClient
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
from hippolyzer.lib.proxy.caps import CapType
from hippolyzer.lib.proxy.object_manager import ProxyObjectManager
from hippolyzer.lib.base.transfer_manager import TransferManager
from hippolyzer.lib.base.xfer_manager import XferManager
@@ -27,13 +27,6 @@ if TYPE_CHECKING:
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
class CapType(enum.Enum):
NORMAL = enum.auto()
TEMPORARY = enum.auto()
WRAPPER = enum.auto()
PROXY_ONLY = enum.auto()
class CapsMultiDict(multidict.MultiDict[Tuple[CapType, str]]):
# TODO: Make a view object for this that's just name -> URL
# deriving from MultiMapping[_T] so we don't have to do
@@ -162,6 +155,7 @@ class ProxiedRegion(BaseClientRegion):
if self.circuit:
self.circuit.is_alive = False
self.objects.clear()
self.eq_manager.clear()
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.name)
@@ -172,11 +166,27 @@ class EventQueueManager:
# TODO: Per-EQ InjectionTracker so we can inject fake responses on 499
self._queued_events = []
self._region = weakref.proxy(region)
self._last_ack: Optional[int] = None
self._last_payload: Optional[Any] = None
def queue_event(self, event: dict):
def inject_event(self, event: dict):
self._queued_events.append(event)
def take_events(self):
def take_injected_events(self):
events = self._queued_events
self._queued_events = []
return events
def cache_last_poll_response(self, req_ack: int, payload: Any):
self._last_ack = req_ack
self._last_payload = payload
def get_cached_poll_response(self, req_ack: Optional[int]) -> Optional[Any]:
if self._last_ack == req_ack:
return self._last_payload
return None
def clear(self):
self._queued_events.clear()
self._last_ack = None
self._last_payload = None

View File

@@ -16,10 +16,11 @@ from hippolyzer.lib.client.state import BaseClientSession
from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
from hippolyzer.lib.proxy.http_proxy import HTTPFlowContext, is_asset_server_cap_name, SerializedCapData
from hippolyzer.lib.proxy.http_proxy import HTTPFlowContext
from hippolyzer.lib.proxy.caps import is_asset_server_cap_name, CapData, CapType
from hippolyzer.lib.proxy.namecache import ProxyNameCache
from hippolyzer.lib.proxy.object_manager import ProxyWorldObjectManager
from hippolyzer.lib.proxy.region import ProxiedRegion, CapType
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.settings import ProxySettings
if TYPE_CHECKING:
@@ -46,6 +47,8 @@ class Session(BaseClientSession):
self.message_handler: MessageHandler[Message, str] = MessageHandler()
self.http_message_handler: MessageHandler[HippoHTTPFlow, str] = MessageHandler()
self.objects = ProxyWorldObjectManager(self, session_manager.settings, session_manager.name_cache)
# Base path of a newview type cache directory for this session
self.cache_dir: Optional[str] = None
self._main_region = None
@property
@@ -211,50 +214,6 @@ class SessionManager:
return cap_data
return CapData()
def deserialize_cap_data(self, ser_cap_data: "SerializedCapData") -> "CapData":
cap_session = None
cap_region = None
if ser_cap_data.session_id:
for session in self.sessions:
if ser_cap_data.session_id == str(session.id):
cap_session = session
if cap_session and ser_cap_data.region_addr:
for region in cap_session.regions:
if ser_cap_data.region_addr == str(region.circuit_addr):
cap_region = region
return CapData(
cap_name=ser_cap_data.cap_name,
region=ref(cap_region) if cap_region else None,
session=ref(cap_session) if cap_session else None,
base_url=ser_cap_data.base_url,
type=CapType[ser_cap_data.type],
)
class CapData(NamedTuple):
cap_name: Optional[str] = None
# Actually they're weakrefs but the type sigs suck.
region: Optional[Callable[[], Optional[ProxiedRegion]]] = None
session: Optional[Callable[[], Optional[Session]]] = None
base_url: Optional[str] = None
type: CapType = CapType.NORMAL
def __bool__(self):
return bool(self.cap_name or self.session)
def serialize(self) -> "SerializedCapData":
return SerializedCapData(
cap_name=self.cap_name,
region_addr=str(self.region().circuit_addr) if self.region and self.region() else None,
session_id=str(self.session().id) if self.session and self.session() else None,
base_url=self.base_url,
type=self.type.name,
)
@property
def asset_server_cap(self) -> bool:
return is_asset_server_cap_name(self.cap_name)
@dataclasses.dataclass
class SelectionModel:

View File

@@ -28,6 +28,9 @@ class ProxySettings(Settings):
PROXY_BIND_ADDR: str = EnvSettingDescriptor("127.0.0.1", "HIPPO_BIND_HOST", str)
REMOTELY_ACCESSIBLE: bool = SettingDescriptor(False)
USE_VIEWER_OBJECT_CACHE: bool = SettingDescriptor(False)
# Whether having the proxy do automatic internal requests objects is allowed at all
ALLOW_AUTO_REQUEST_OBJECTS: bool = SettingDescriptor(True)
# Whether the viewer should request any directly referenced objects it didn't know about.
AUTOMATICALLY_REQUEST_MISSING_OBJECTS: bool = SettingDescriptor(False)
ADDON_SCRIPTS: List[str] = SettingDescriptor(list)
FILTERS: Dict[str, str] = SettingDescriptor(dict)

View File

@@ -58,6 +58,7 @@ from __future__ import annotations
import io
import logging
import pathlib
from pathlib import Path
from typing import *
@@ -82,6 +83,7 @@ class ViewerObjectCache:
@classmethod
def from_path(cls, base_path: Union[str, Path]):
base_path = pathlib.Path(base_path)
cache = cls(base_path)
with open(cache.base_path / "object.cache", "rb") as fh:
reader = se.BufferReader("<", fh.read())
@@ -143,6 +145,10 @@ class ViewerObjectCacheEntry(recordclass.datatuple): # type: ignore
data: bytes
def is_valid_vocache_dir(cache_dir):
return (pathlib.Path(cache_dir) / "objectcache" / "object.cache").exists()
class RegionViewerObjectCache:
"""Parser and container for .slc files"""
def __init__(self, cache_id: UUID, entries: List[ViewerObjectCacheEntry]):
@@ -201,7 +207,7 @@ class RegionViewerObjectCacheChain:
return None
@classmethod
def for_region(cls, handle: int, cache_id: UUID):
def for_region(cls, handle: int, cache_id: UUID, cache_dir: Optional[str] = None):
"""
Get a cache chain for a specific region, called on region connection
@@ -209,8 +215,13 @@ class RegionViewerObjectCacheChain:
so we have to try every region object cache file for every viewer installed.
"""
caches = []
for cache_dir in iter_viewer_cache_dirs():
if not (cache_dir / "objectcache" / "object.cache").exists():
if cache_dir is None:
cache_dirs = iter_viewer_cache_dirs()
else:
cache_dirs = [pathlib.Path(cache_dir)]
for cache_dir in cache_dirs:
if not is_valid_vocache_dir(cache_dir):
continue
cache = ViewerObjectCache.from_path(cache_dir / "objectcache")
if cache:

View File

@@ -1,69 +1,65 @@
aiohttp==3.7.4.post0
appdirs==1.4.4
Arpeggio==1.10.2
asgiref==3.3.4
asgiref==3.4.1
async-timeout==3.0.1
attrs==20.3.0
black==21.4b2
attrs==21.2.0
blinker==1.4
Brotli==1.0.9
certifi==2020.12.5
cffi==1.14.5
certifi==2021.5.30
cffi==1.14.6
chardet==4.0.0
click==7.1.2
cryptography==3.3.2
charset-normalizer==2.0.3
click==8.0.1
cryptography==3.4.7
defusedxml==0.7.1
Flask==1.1.2
Flask==2.0.1
Glymur==0.9.3
h11==0.12.0
h2==4.0.0
hpack==4.0.0
hyperframe==6.0.1
idna==2.10
itsdangerous==1.1.0
itsdangerous==2.0.1
jedi==0.18.0
Jinja2==2.11.3
Jinja2==3.0.1
kaitaistruct==0.9
lazy-object-proxy==1.6.0
ldap3==2.8.1
llbase==1.2.10
ldap3==2.9
llbase==1.2.11
lxml==4.6.3
MarkupSafe==1.1.1
mitmproxy==6.0.2
MarkupSafe==2.0.1
mitmproxy==7.0.2
msgpack==1.0.2
multidict==5.1.0
mypy-extensions==0.4.3
numpy==1.20.2
numpy==1.21.0
parso==0.8.2
passlib==1.7.4
pathspec==0.8.1
prompt-toolkit==3.0.18
protobuf==3.14.0
ptpython==3.0.17
prompt-toolkit==3.0.19
protobuf==3.17.3
ptpython==3.0.19
publicsuffix2==2.20191221
pyasn1==0.4.8
pycparser==2.20
Pygments==2.8.1
Pygments==2.9.0
pyOpenSSL==20.0.1
pyparsing==2.4.7
pyperclip==1.8.2
PySide2==5.15.2
qasync==0.15.0
qasync==0.17.0
recordclass==0.14.3
regex==2021.4.4
requests==2.25.1
ruamel.yaml==0.16.13
ruamel.yaml.clib==0.2.2
requests==2.26.0
ruamel.yaml==0.17.10
ruamel.yaml.clib==0.2.6
shiboken2==5.15.2
six==1.15.0
sortedcontainers==2.3.0
toml==0.10.2
six==1.16.0
sortedcontainers==2.4.0
tornado==6.1
typing-extensions==3.7.4.3
urllib3==1.26.5
typing-extensions==3.10.0.0
urllib3==1.26.6
urwid==2.1.2
wcwidth==0.2.5
Werkzeug==1.0.1
Werkzeug==2.0.1
wsproto==1.0.0
yarl==1.6.3
zstandard==0.14.1
zstandard==0.15.2

View File

@@ -25,7 +25,7 @@ from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
version = '0.6.2'
version = '0.7.1'
with open(path.join(here, 'README.md')) as readme_fh:
readme = readme_fh.read()
@@ -82,13 +82,13 @@ setup(
'llbase>=1.2.5',
'defusedxml',
'aiohttp<4.0.0',
'recordclass',
'recordclass<0.15',
'lazy-object-proxy',
'arpeggio',
# requests breaks with newer idna
'idna<3,>=2.5',
# 7.x will be a major change.
'mitmproxy<7.0.0',
'mitmproxy>=7.0.2,<8.0',
# For REPLs
'ptpython<4.0',
# JP2 codec

View File

@@ -112,7 +112,7 @@ executables = [
setup(
name="hippolyzer_gui",
version="0.6.2",
version="0.7.1",
description="Hippolyzer GUI",
options=options,
executables=executables,

View File

@@ -50,4 +50,4 @@ class TestCapsClient(unittest.IsolatedAsyncioTestCase):
with self.assertRaises(KeyError):
with self.caps_client.get("BadCap"):
pass
assert False

View File

@@ -146,6 +146,12 @@ class TestMessage(unittest.TestCase):
new_msg = Message.from_dict(self.chat_msg.to_dict())
self.assertEqual(pickle.dumps(self.chat_msg), pickle.dumps(new_msg))
def test_todict_extended(self):
self.chat_msg.packet_id = 5
new_msg = Message.from_dict(self.chat_msg.to_dict(extended=True))
self.assertEqual(5, new_msg.packet_id)
self.assertEqual(pickle.dumps(self.chat_msg), pickle.dumps(new_msg))
def test_todict_multiple_blocks(self):
chat_msg = self.chat_msg
# If we dupe the ChatData block it should survive to_dict()

View File

@@ -791,7 +791,3 @@ class SubfieldSerializationTests(BaseSerializationTest):
self.assertEqual(ser.serialize(None, FooFlags.FOO), 1)
self.assertEqual(ser.serialize(None, 3), 3)
self.assertEqual(ser.serialize(None, 7), 7)
if __name__ == "__main__":
unittest.main()

View File

@@ -6,9 +6,8 @@ import multiprocessing
from urllib.parse import urlparse
import aioresponses
from mitmproxy.net import http
from mitmproxy.test import tflow, tutils
from mitmproxy.http import HTTPFlow
from mitmproxy.http import HTTPFlow, Headers
from yarl import URL
from hippolyzer.apps.proxy import run_http_proxy_process
@@ -17,8 +16,7 @@ from hippolyzer.lib.proxy.addon_utils import BaseAddon
from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.http_event_manager import MITMProxyEventManager
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
from hippolyzer.lib.proxy.http_proxy import SerializedCapData
from hippolyzer.lib.proxy.message_logger import FilteringMessageLogger
from hippolyzer.lib.proxy.caps import SerializedCapData
from hippolyzer.lib.proxy.sessions import SessionManager
from hippolyzer.lib.proxy.test_utils import BaseProxyTest
@@ -31,12 +29,6 @@ class MockAddon(BaseAddon):
flow.metadata["touched_addon"] = True
class SimpleMessageLogger(FilteringMessageLogger):
@property
def entries(self):
return self._filtered_entries
class HTTPIntegrationTests(BaseProxyTest):
def setUp(self) -> None:
super().setUp()
@@ -88,7 +80,7 @@ class HTTPIntegrationTests(BaseProxyTest):
fake_flow = tflow.tflow(
req=tutils.treq(host="example.com", content=b'<llsd><string>getZOffsets|'),
resp=tutils.tresp(
headers=http.Headers((
headers=Headers((
(b"X-SecondLife-Object-Name", b"#Firestorm LSL Bridge v99999"),
(b"X-SecondLife-Owner-Key", str(self.session.agent_id).encode("utf8")),
)),

View File

@@ -26,7 +26,13 @@ class ExampleCommandHandler:
y=str,
)
async def own_name(self, _session, _region, y):
self.bar = y
pass
@handle_command(
x=Parameter(str, optional=True),
)
async def optional(self, _session, _region, x=42):
self.bar = x
class TestCommandHandlers(unittest.IsolatedAsyncioTestCase):
@@ -47,9 +53,20 @@ class TestCommandHandlers(unittest.IsolatedAsyncioTestCase):
async def test_own_name(self):
self.assertEqual(self.handler.own_name.command.name, "own_name")
async def test_missing_param(self):
with self.assertRaises(KeyError):
await self.handler.foo(None, None, "")
async def test_optional_param(self):
await self.handler.optional(None, None, "foo") # type: ignore
self.assertEqual(self.handler.bar, "foo")
await self.handler.optional(None, None, "") # type: ignore
# Should have picked up the default value
self.assertEqual(self.handler.bar, 42)
async def test_bad_command(self):
with self.assertRaises(ValueError):
class _BadCommandHandler:
@handle_command("foobaz")
def bad_command(self, session, region):
pass
assert False

View File

@@ -2,13 +2,14 @@ import unittest
from mitmproxy.test import tflow, tutils
from hippolyzer.lib.base.datatypes import Vector3
from hippolyzer.lib.base.datatypes import Vector3, UUID
from hippolyzer.lib.base.message.message import Block, Message as Message
from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
from hippolyzer.lib.base.settings import Settings
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
from hippolyzer.lib.proxy.http_proxy import SerializedCapData
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry, HTTPMessageLogEntry
from hippolyzer.lib.proxy.caps import SerializedCapData
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry, HTTPMessageLogEntry, export_log_entries, \
import_log_entries
from hippolyzer.lib.proxy.message_filter import compile_filter
from hippolyzer.lib.proxy.sessions import SessionManager
from hippolyzer.lib.proxy.settings import ProxySettings
@@ -24,7 +25,7 @@ OBJECT_UPDATE = b'\xc0\x00\x00\x00Q\x00\x0c\x00\x01\xea\x03\x00\x02\xe6\x03\x00\
b'\x88\x00"'
class MessageFilterTests(unittest.TestCase):
class MessageFilterTests(unittest.IsolatedAsyncioTestCase):
def _filter_matches(self, filter_str, message):
compiled = compile_filter(filter_str)
return compiled.match(message)
@@ -118,6 +119,17 @@ class MessageFilterTests(unittest.TestCase):
self.assertTrue(self._filter_matches("ObjectUpdate.ObjectData.ObjectData.Position > (88, 41, 25)", entry))
self.assertTrue(self._filter_matches("ObjectUpdate.ObjectData.ObjectData.Position < (90, 43, 27)", entry))
def test_import_export_message(self):
msg = LLUDPMessageLogEntry(Message(
"Foo",
Block("Bar", Baz=1, Quux=UUID.random(), Foo=0xFFffFFffFF)
), None, None)
msg.freeze()
msg = import_log_entries(export_log_entries([msg]))[0]
self.assertTrue(self._filter_matches("Foo.Bar.Baz == 1", msg))
# Make sure numbers outside 32bit range come through
self.assertTrue(self._filter_matches("Foo.Bar.Foo == 0xFFffFFffFF", msg))
def test_http_flow(self):
session_manager = SessionManager(ProxySettings())
fake_flow = tflow.tflow(req=tutils.treq(), resp=tutils.tresp())
@@ -129,6 +141,11 @@ class MessageFilterTests(unittest.TestCase):
self.assertTrue(self._filter_matches("FakeCap", entry))
self.assertFalse(self._filter_matches("NotFakeCap", entry))
if __name__ == "__main__":
unittest.main()
def test_export_import_http_flow(self):
fake_flow = tflow.tflow(req=tutils.treq(), resp=tutils.tresp())
fake_flow.metadata["cap_data_ser"] = SerializedCapData(
cap_name="FakeCap",
)
flow = HippoHTTPFlow.from_state(fake_flow.get_state(), None)
new_entry = import_log_entries(export_log_entries([HTTPMessageLogEntry(flow)]))[0]
self.assertEqual("FakeCap", new_entry.name)

View File

@@ -236,11 +236,11 @@ class PacketIDTests(unittest.TestCase):
with self.assertRaises(RuntimeError):
# Re-dropping the same message should raise
self.circuit.drop_message(to_drop)
# Clears finalized flag
to_drop.packet_id = None
self.circuit.send_message(to_drop)
# Returns a new message without finalized flag
new_msg = to_drop.take()
self.circuit.send_message(new_msg)
with self.assertRaises(RuntimeError):
self.circuit.send_message(to_drop)
self.circuit.send_message(new_msg)
self.assertSequenceEqual(self.circuit.sent_simple, [
(1, "ChatFromViewer", Direction.OUT, False, ()),
(1, "PacketAck", Direction.IN, True, ()),

View File

@@ -5,10 +5,10 @@ from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message_formatting import HumanMessageSerializer
from hippolyzer.lib.base.templates import TextureEntrySubfieldSerializer, TEFaceBitfield, TextureEntry
EXAMPLE_TE = b"\x89UgG$\xcbC\xed\x92\x0bG\xca\xed\x15F_\x08\xe7\xb2\x98\x04\xca\x10;\x85\x94\x05Lj\x8d\xd4" \
b"\x0b\x1f\x01B\xcb\xe6|\x1d,\xa7sc\xa6\x1a\xa2L\xb1u\x01\x00\x00\x00\x00\x00\x00\x00\x00\x80?" \
b"\x00\x00\x00\x80?\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" \
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
EXAMPLE_TE = b'\x89UgG$\xcbC\xed\x92\x0bG\xca\xed\x15F_\x08\xca*\x98:\x18\x02,\r\xf4\x1e\xc6\xf5\x91\x01]\x83\x014' \
b'\x00\x90i+\x10\x80\xa1\xaa\xa2g\x11o\xa8]\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x80?\x00\x00\x00\x80?' \
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \
b'\x00\x00\x00\x00\x00\x00\x00'
class TemplateTests(unittest.TestCase):
@@ -58,13 +58,12 @@ class TemplateTests(unittest.TestCase):
str_msg = HumanMessageSerializer.to_human_string(msg, beautify=True)
msg = HumanMessageSerializer.from_human_string(str_msg)
spec = msg["ObjectData"][0].get_serializer("TextureEntry")
deser = spec.deserialize(None, msg["ObjectData"]["TextureEntry"], pod=True)
data_field = msg["ObjectData"]["TextureEntry"]
# Serialization order and format should match indra's exactly
self.assertEqual(EXAMPLE_TE, data_field)
deser = spec.deserialize(None, data_field, pod=True)
self.assertEqual(deser, pod_te)
def test_textureentry_defaults(self):
te = TextureEntry()
self.assertEqual(UUID('89556747-24cb-43ed-920b-47caed15465f'), te.Textures[None])
if __name__ == "__main__":
unittest.main()