Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0d9593e14c | ||
|
|
28dfe2f1b2 | ||
|
|
c8f7231eae | ||
|
|
00e9ecb765 | ||
|
|
2892bbeb98 | ||
|
|
28f57a8836 | ||
|
|
943b8b11d5 | ||
|
|
88915dd8d7 | ||
|
|
60b39e27f8 | ||
|
|
8af87befbd | ||
|
|
95e34bb07a | ||
|
|
106eb5c063 | ||
|
|
e7f88eeed9 | ||
|
|
d07f100452 | ||
|
|
02c212e4a6 |
5
.github/workflows/pytest.yml
vendored
5
.github/workflows/pytest.yml
vendored
@@ -21,9 +21,10 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest pytest-cov
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements-test.txt
|
||||
- name: Test with pytest
|
||||
# Tests are intentionally covered to detect broken tests.
|
||||
run: |
|
||||
pytest --cov=./hippolyzer --cov=./tests --cov-report=xml
|
||||
|
||||
|
||||
@@ -95,6 +95,9 @@ agent's session, you can do `(Meta.AgentID == None || Meta.AgentID == "d929385f-
|
||||
Vectors can also be compared. This will get any ObjectUpdate variant that occurs within a certain range:
|
||||
`(*ObjectUpdate*.ObjectData.*Data.Position > (110, 50, 100) && *ObjectUpdate*.ObjectData.*Data.Position < (115, 55, 105))`
|
||||
|
||||
If you want to compare against an enum or a flag class in defined in `templates.py`, you can just specify its name:
|
||||
`ViewerEffect.Effect.Type == ViewerEffectType.EFFECT_BEAM`
|
||||
|
||||
### Logging
|
||||
|
||||
Decoded messages are displayed in the log pane, clicking one will show the request and
|
||||
@@ -301,8 +304,6 @@ If you are a viewer developer, please put them in a viewer.
|
||||
|
||||
* AISv3 wrapper?
|
||||
* Higher level wrappers for common things? I don't really need these, so only if people want to write them.
|
||||
* Highlight matched portion of message in log view, if applicable
|
||||
* * Remember deep filters and return a map of them, have message formatter return text ranges?
|
||||
* Move things out of `templates.py`, right now most binary serialization stuff lives there
|
||||
because it's more convenient for me to hot-reload.
|
||||
* Ability to add menus?
|
||||
|
||||
@@ -5,7 +5,10 @@ coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
# Do not fail PRs if the code coverage drops.
|
||||
# Do not fail commits if the code coverage drops.
|
||||
target: 0%
|
||||
threshold: 100%
|
||||
base: auto
|
||||
patch:
|
||||
default:
|
||||
only_pulls: true
|
||||
|
||||
@@ -35,7 +35,7 @@ from hippolyzer.lib.proxy.ca_utils import setup_ca_everywhere
|
||||
from hippolyzer.lib.proxy.caps_client import CapsClient
|
||||
from hippolyzer.lib.proxy.http_proxy import create_proxy_master, HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage, VerbatimHumanVal, proxy_eval
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage, VerbatimHumanVal, proxy_eval, SpannedString
|
||||
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry, AbstractMessageLogEntry
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
@@ -161,6 +161,8 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
"ViewerAsset GetTexture SetAlwaysRun GetDisplayNames MapImageService MapItemReply".split(" ")
|
||||
DEFAULT_FILTER = f"!({' || '.join(ignored for ignored in DEFAULT_IGNORE)})"
|
||||
|
||||
textRequest: QtWidgets.QTextEdit
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
loadUi(MAIN_WINDOW_UI_PATH, self)
|
||||
@@ -263,8 +265,23 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
beautify=self.checkBeautify.isChecked(),
|
||||
replacements=self.buildReplacements(entry.session, entry.region),
|
||||
)
|
||||
resp = entry.response(beautify=self.checkBeautify.isChecked())
|
||||
highlight_range = None
|
||||
if isinstance(req, SpannedString):
|
||||
match_result = self.model.filter.match(entry)
|
||||
# Match result was a tuple indicating what matched
|
||||
if isinstance(match_result, tuple):
|
||||
highlight_range = req.spans.get(match_result)
|
||||
|
||||
self.textRequest.setPlainText(req)
|
||||
if highlight_range:
|
||||
cursor = self.textRequest.textCursor()
|
||||
cursor.setPosition(highlight_range[0], QtGui.QTextCursor.MoveAnchor)
|
||||
cursor.setPosition(highlight_range[1], QtGui.QTextCursor.KeepAnchor)
|
||||
highlight_format = QtGui.QTextBlockFormat()
|
||||
highlight_format.setBackground(QtCore.Qt.yellow)
|
||||
cursor.setBlockFormat(highlight_format)
|
||||
|
||||
resp = entry.response(beautify=self.checkBeautify.isChecked())
|
||||
if resp:
|
||||
self.textResponse.show()
|
||||
self.textResponse.setPlainText(resp)
|
||||
|
||||
@@ -39,6 +39,7 @@ class MeshAsset:
|
||||
# These TypedDicts describe the expected shape of the LLSD in the mesh
|
||||
# header and various segments. They're mainly for type hinting.
|
||||
class MeshHeaderDict(TypedDict, total=False):
|
||||
"""Header of the mesh file, includes offsets & sizes for segments' LLSD"""
|
||||
version: int
|
||||
creator: UUID
|
||||
date: dt.datetime
|
||||
@@ -54,6 +55,7 @@ class MeshHeaderDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class SegmentHeaderDict(TypedDict):
|
||||
"""Standard shape for segment references within the header"""
|
||||
offset: int
|
||||
size: int
|
||||
|
||||
@@ -73,6 +75,7 @@ class PhysicsHavokSegmentHeaderDict(PhysicsSegmentHeaderDict, total=False):
|
||||
|
||||
|
||||
class PhysicsCostDataHeaderDict(TypedDict, total=False):
|
||||
"""Cost of physical representation, populated by server"""
|
||||
decomposition: float
|
||||
decomposition_discounted_vertices: int
|
||||
decomposition_hulls: int
|
||||
@@ -85,6 +88,7 @@ class PhysicsCostDataHeaderDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class MeshSegmentDict(TypedDict, total=False):
|
||||
"""Dict of segments unpacked using the MeshHeaderDict"""
|
||||
high_lod: List[LODSegmentDict]
|
||||
medium_lod: List[LODSegmentDict]
|
||||
low_lod: List[LODSegmentDict]
|
||||
@@ -96,6 +100,7 @@ class MeshSegmentDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class LODSegmentDict(TypedDict, total=False):
|
||||
"""Represents a single entry within the material list of a LOD segment"""
|
||||
# Only present if True and no geometry
|
||||
NoGeometry: bool
|
||||
# -1.0 - 1.0
|
||||
@@ -113,17 +118,22 @@ class LODSegmentDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class DomainDict(TypedDict):
|
||||
"""Description of the real range for quantized coordinates"""
|
||||
# number of elems depends on what the domain is for, Vec2 or Vec3
|
||||
Max: List[float]
|
||||
Min: List[float]
|
||||
|
||||
|
||||
class VertexWeight(recordclass.datatuple): # type: ignore
|
||||
"""Vertex weight for a specific joint on a specific vertex"""
|
||||
# index of the joint within the joint_names list in the skin segment
|
||||
joint_idx: int
|
||||
# 0.0 - 1.0
|
||||
weight: float
|
||||
|
||||
|
||||
class SkinSegmentDict(TypedDict, total=False):
|
||||
"""Rigging information"""
|
||||
joint_names: List[str]
|
||||
# model -> world transform matrix for model
|
||||
bind_shape_matrix: List[float]
|
||||
@@ -137,14 +147,17 @@ class SkinSegmentDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class PhysicsConvexSegmentDict(DomainDict, total=False):
|
||||
"""Data for convex hull collisions, populated by the client"""
|
||||
# Min / Max domain vals are inline, unlike for LODs
|
||||
HullList: List[int]
|
||||
# -1.0 - 1.0
|
||||
# -1.0 - 1.0, dequantized from binary field of U16s
|
||||
Positions: List[Vector3]
|
||||
# -1.0 - 1.0
|
||||
# -1.0 - 1.0, dequantized from binary field of U16s
|
||||
BoundingVerts: List[Vector3]
|
||||
|
||||
|
||||
class PhysicsHavokSegmentDict(TypedDict, total=False):
|
||||
"""Cached data for Havok collisions, populated by sim and not used by client."""
|
||||
HullMassProps: MassPropsDict
|
||||
MOPP: MOPPDict
|
||||
MeshDecompMassProps: MassPropsDict
|
||||
@@ -169,8 +182,11 @@ class MOPPDict(TypedDict, total=False):
|
||||
|
||||
|
||||
def positions_from_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
# Used for turning positions into their actual positions within the mesh / domain
|
||||
# for ex: positions_from_domain(lod["Position"], lod["PositionDomain])
|
||||
"""
|
||||
Used for turning positions into their actual positions within the mesh / domain
|
||||
|
||||
for ex: positions_from_domain(lod["Position"], lod["PositionDomain])
|
||||
"""
|
||||
lower = domain['Min']
|
||||
upper = domain['Max']
|
||||
return [
|
||||
@@ -179,7 +195,7 @@ def positions_from_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
|
||||
|
||||
def positions_to_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
# Used for turning positions into their actual positions within the mesh / domain
|
||||
"""Used for turning positions into their actual positions within the mesh / domain"""
|
||||
lower = domain['Min']
|
||||
upper = domain['Max']
|
||||
return [
|
||||
@@ -187,7 +203,36 @@ def positions_to_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
]
|
||||
|
||||
|
||||
class VertexWeights(se.SerializableBase):
|
||||
"""Serializer for a list of joint weights on a single vertex"""
|
||||
INFLUENCE_SER = se.QuantizedFloat(se.U16, 0.0, 1.0)
|
||||
INFLUENCE_LIMIT = 4
|
||||
INFLUENCE_TERM = 0xFF
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, vals, writer: se.BufferWriter, ctx=None):
|
||||
if len(vals) > cls.INFLUENCE_LIMIT:
|
||||
raise ValueError(f"{vals!r} is too long, can only have {cls.INFLUENCE_LIMIT} influences!")
|
||||
for val in vals:
|
||||
joint_idx, influence = val
|
||||
writer.write(se.U8, joint_idx)
|
||||
writer.write(cls.INFLUENCE_SER, influence, ctx=ctx)
|
||||
if len(vals) != cls.INFLUENCE_LIMIT:
|
||||
writer.write(se.U8, cls.INFLUENCE_TERM)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, reader: se.Reader, ctx=None):
|
||||
influence_list = []
|
||||
for _ in range(cls.INFLUENCE_LIMIT):
|
||||
joint_idx = reader.read(se.U8)
|
||||
if joint_idx == cls.INFLUENCE_TERM:
|
||||
break
|
||||
influence_list.append(VertexWeight(joint_idx, reader.read(cls.INFLUENCE_SER, ctx=ctx)))
|
||||
return influence_list
|
||||
|
||||
|
||||
class SegmentSerializer:
|
||||
"""Serializer for binary fields within an LLSD object"""
|
||||
def __init__(self, templates):
|
||||
self._templates: Dict[str, se.SerializableBase] = templates
|
||||
|
||||
@@ -217,33 +262,6 @@ class SegmentSerializer:
|
||||
return new_segment
|
||||
|
||||
|
||||
class VertexWeights(se.SerializableBase):
|
||||
INFLUENCE_SER = se.QuantizedFloat(se.U16, 0.0, 1.0)
|
||||
INFLUENCE_LIMIT = 4
|
||||
INFLUENCE_TERM = 0xFF
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, vals, writer: se.BufferWriter, ctx=None):
|
||||
if len(vals) > cls.INFLUENCE_LIMIT:
|
||||
raise ValueError(f"{vals!r} is too long, can only have {cls.INFLUENCE_LIMIT} influences!")
|
||||
for val in vals:
|
||||
joint_idx, influence = val
|
||||
writer.write(se.U8, joint_idx)
|
||||
writer.write(cls.INFLUENCE_SER, influence, ctx=ctx)
|
||||
if len(vals) != cls.INFLUENCE_LIMIT:
|
||||
writer.write(se.U8, cls.INFLUENCE_TERM)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, reader: se.Reader, ctx=None):
|
||||
influence_list = []
|
||||
for _ in range(cls.INFLUENCE_LIMIT):
|
||||
joint_idx = reader.read(se.U8)
|
||||
if joint_idx == cls.INFLUENCE_TERM:
|
||||
break
|
||||
influence_list.append(VertexWeight(joint_idx, reader.read(cls.INFLUENCE_SER, ctx=ctx)))
|
||||
return influence_list
|
||||
|
||||
|
||||
LOD_SEGMENT_SERIALIZER = SegmentSerializer({
|
||||
# 16-bit indices to the verts making up the tri. Imposes a 16-bit
|
||||
# upper limit on verts in any given material in the mesh.
|
||||
@@ -265,6 +283,7 @@ class LLMeshSerializer(se.SerializableBase):
|
||||
KNOWN_SEGMENTS = ("lowest_lod", "low_lod", "medium_lod", "high_lod",
|
||||
"physics_mesh", "physics_convex", "skin", "physics_havok")
|
||||
|
||||
# Define unpackers for specific binary fields within the parsed LLSD segments
|
||||
SEGMENT_TEMPLATES: Dict[str, SegmentSerializer] = {
|
||||
"lowest_lod": LOD_SEGMENT_SERIALIZER,
|
||||
"low_lod": LOD_SEGMENT_SERIALIZER,
|
||||
|
||||
@@ -193,12 +193,21 @@ class Message:
|
||||
# should be set once a packet is sent / dropped to prevent accidental
|
||||
# re-sending or re-dropping
|
||||
self.finalized = False
|
||||
# Whether message is owned by the queue or should be sent immediately
|
||||
# Whether message is owned by a queue or should be sent immediately
|
||||
self.queued: bool = False
|
||||
self._blocks: BLOCK_DICT = {}
|
||||
|
||||
self.add_blocks(args)
|
||||
|
||||
def __reduce_ex__(self, protocol):
|
||||
reduced: Tuple[Any] = super().__reduce_ex__(protocol)
|
||||
# https://docs.python.org/3/library/pickle.html#object.__reduce__
|
||||
# We need to make some changes to the object state to make it serializable
|
||||
state_dict: Dict = reduced[2][1]
|
||||
# Have to remove the deserializer weak ref so we can pickle
|
||||
state_dict['deserializer'] = None
|
||||
return reduced
|
||||
|
||||
@property
|
||||
def packet_id(self) -> Optional[int]:
|
||||
return self._packet_id
|
||||
|
||||
@@ -5,6 +5,7 @@ import logging
|
||||
import math
|
||||
import os
|
||||
import re
|
||||
import typing
|
||||
import uuid
|
||||
from typing import *
|
||||
|
||||
@@ -71,6 +72,14 @@ def proxy_eval(eval_str: str, globals_=None, locals_=None):
|
||||
)
|
||||
|
||||
|
||||
TextSpan = Tuple[int, int]
|
||||
SpanDict = Dict[Tuple[Union[str, int], ...], TextSpan]
|
||||
|
||||
|
||||
class SpannedString(str):
|
||||
spans: SpanDict = {}
|
||||
|
||||
|
||||
class ProxiedMessage(Message):
|
||||
__slots__ = ("meta", "injected", "dropped", "direction")
|
||||
|
||||
@@ -83,9 +92,10 @@ class ProxiedMessage(Message):
|
||||
_maybe_reload_templates()
|
||||
|
||||
def to_human_string(self, replacements=None, beautify=False,
|
||||
template: Optional[MessageTemplate] = None):
|
||||
template: Optional[MessageTemplate] = None) -> SpannedString:
|
||||
replacements = replacements or {}
|
||||
_maybe_reload_templates()
|
||||
spans: SpanDict = {}
|
||||
string = ""
|
||||
if self.direction is not None:
|
||||
string += f'{self.direction.name} '
|
||||
@@ -101,11 +111,18 @@ class ProxiedMessage(Message):
|
||||
block_suffix = ""
|
||||
if template and template.get_block(block_name).block_type == MsgBlockType.MBT_VARIABLE:
|
||||
block_suffix = ' # Variable'
|
||||
for block in block_list:
|
||||
for block_num, block in enumerate(block_list):
|
||||
string += f"[{block_name}]{block_suffix}\n"
|
||||
for var_name, val in block.items():
|
||||
start_len = len(string)
|
||||
string += self._format_var(block, var_name, val, replacements, beautify)
|
||||
return string
|
||||
end_len = len(string)
|
||||
# Store the spans for each var so we can highlight specific matches
|
||||
spans[(self.name, block_name, block_num, var_name)] = (start_len, end_len)
|
||||
string += "\n"
|
||||
spanned = SpannedString(string)
|
||||
spanned.spans = spans
|
||||
return spanned
|
||||
|
||||
def _format_var(self, block, var_name, var_val, replacements, beautify=False):
|
||||
string = ""
|
||||
@@ -129,7 +146,7 @@ class ProxiedMessage(Message):
|
||||
if serializer.AS_HEX and isinstance(var_val, int):
|
||||
var_data = hex(var_val)
|
||||
if serializer.ORIG_INLINE:
|
||||
string += f" #{var_data}\n"
|
||||
string += f" #{var_data}"
|
||||
return string
|
||||
else:
|
||||
string += "\n"
|
||||
@@ -146,7 +163,7 @@ class ProxiedMessage(Message):
|
||||
if "CircuitCode" in var_name or ("Code" in var_name and "Circuit" in block.name):
|
||||
if var_val == replacements.get("CIRCUIT_CODE"):
|
||||
var_data = "[[CIRCUIT_CODE]]"
|
||||
string += f" {field_prefix}{var_name} = {var_data}\n"
|
||||
string += f" {field_prefix}{var_name} = {var_data}"
|
||||
return string
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -11,6 +11,9 @@ def literal():
|
||||
# Nightmare. str or bytes literal.
|
||||
# https://stackoverflow.com/questions/14366401/#comment79795017_14366904
|
||||
RegExMatch(r'''b?(\"\"\"|\'\'\'|\"|\')((?<!\\)(\\\\)*\\\1|.)*?\1'''),
|
||||
# base16
|
||||
RegExMatch(r'0x\d+'),
|
||||
# base10 int or float.
|
||||
RegExMatch(r'\d+(\.\d+)?'),
|
||||
"None",
|
||||
"True",
|
||||
@@ -23,7 +26,7 @@ def literal():
|
||||
|
||||
|
||||
def identifier():
|
||||
return RegExMatch(r'[a-zA-Z*]([a-zA-Z0-9*]+)?')
|
||||
return RegExMatch(r'[a-zA-Z*]([a-zA-Z0-9_*]+)?')
|
||||
|
||||
|
||||
def field_specifier():
|
||||
@@ -42,12 +45,16 @@ def meta_field_specifier():
|
||||
return "Meta", ".", identifier
|
||||
|
||||
|
||||
def enum_field_specifier():
|
||||
return identifier, ".", identifier
|
||||
|
||||
|
||||
def compare_val():
|
||||
return [literal, meta_field_specifier]
|
||||
return [literal, meta_field_specifier, enum_field_specifier]
|
||||
|
||||
|
||||
def binary_expression():
|
||||
return field_specifier, ["==", "!=", "^=", "$=", "~=", ">", ">=", "<", "<="], compare_val
|
||||
return field_specifier, ["==", "!=", "^=", "$=", "~=", ">", ">=", "<", "<=", "&"], compare_val
|
||||
|
||||
|
||||
def term():
|
||||
@@ -62,9 +69,12 @@ def message_filter():
|
||||
return expression, EOF
|
||||
|
||||
|
||||
MATCH_RESULT = typing.Union[bool, typing.Tuple]
|
||||
|
||||
|
||||
class BaseFilterNode(abc.ABC):
|
||||
@abc.abstractmethod
|
||||
def match(self, msg) -> bool:
|
||||
def match(self, msg) -> MATCH_RESULT:
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
@@ -94,17 +104,17 @@ class BinaryFilterNode(BaseFilterNode, abc.ABC):
|
||||
|
||||
|
||||
class UnaryNotFilterNode(UnaryFilterNode):
|
||||
def match(self, msg) -> bool:
|
||||
def match(self, msg) -> MATCH_RESULT:
|
||||
return not self.node.match(msg)
|
||||
|
||||
|
||||
class OrFilterNode(BinaryFilterNode):
|
||||
def match(self, msg) -> bool:
|
||||
def match(self, msg) -> MATCH_RESULT:
|
||||
return self.left_node.match(msg) or self.right_node.match(msg)
|
||||
|
||||
|
||||
class AndFilterNode(BinaryFilterNode):
|
||||
def match(self, msg) -> bool:
|
||||
def match(self, msg) -> MATCH_RESULT:
|
||||
return self.left_node.match(msg) and self.right_node.match(msg)
|
||||
|
||||
|
||||
@@ -114,7 +124,7 @@ class MessageFilterNode(BaseFilterNode):
|
||||
self.operator = operator
|
||||
self.value = value
|
||||
|
||||
def match(self, msg) -> bool:
|
||||
def match(self, msg) -> MATCH_RESULT:
|
||||
return msg.matches(self)
|
||||
|
||||
@property
|
||||
@@ -126,6 +136,11 @@ class MetaFieldSpecifier(str):
|
||||
pass
|
||||
|
||||
|
||||
class EnumFieldSpecifier(typing.NamedTuple):
|
||||
enum_name: str
|
||||
field_name: str
|
||||
|
||||
|
||||
class LiteralValue:
|
||||
"""Only exists because we can't return `None` in a visitor, need to box it"""
|
||||
def __init__(self, value):
|
||||
@@ -145,6 +160,9 @@ class MessageFilterVisitor(PTNodeVisitor):
|
||||
def visit_meta_field_specifier(self, _node, children):
|
||||
return MetaFieldSpecifier(children[0])
|
||||
|
||||
def visit_enum_field_specifier(self, _node, children):
|
||||
return EnumFieldSpecifier(*children)
|
||||
|
||||
def visit_unary_field_specifier(self, _node, children):
|
||||
# Looks like a bare field specifier with no operator
|
||||
return MessageFilterNode(tuple(children), None, None)
|
||||
|
||||
@@ -15,7 +15,8 @@ from defusedxml import minidom
|
||||
from hippolyzer.lib.base import serialization as se, llsd
|
||||
from hippolyzer.lib.base.datatypes import TaggedUnion, UUID, TupleCoord
|
||||
from hippolyzer.lib.base.helpers import bytes_escape
|
||||
from hippolyzer.lib.proxy.message_filter import MetaFieldSpecifier, compile_filter, BaseFilterNode, MessageFilterNode
|
||||
from hippolyzer.lib.proxy.message_filter import MetaFieldSpecifier, compile_filter, BaseFilterNode, MessageFilterNode, \
|
||||
EnumFieldSpecifier
|
||||
from hippolyzer.lib.proxy.region import CapType
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
@@ -254,6 +255,11 @@ class AbstractMessageLogEntry:
|
||||
expected = expected()
|
||||
else:
|
||||
expected = str(expected)
|
||||
elif isinstance(expected, EnumFieldSpecifier):
|
||||
# Local import so we get a fresh copy of the templates module
|
||||
from hippolyzer.lib.proxy import templates
|
||||
enum_cls = getattr(templates, expected.enum_name)
|
||||
expected = enum_cls[expected.field_name]
|
||||
elif expected is not None:
|
||||
# Unbox the expected value
|
||||
expected = expected.value
|
||||
@@ -286,6 +292,8 @@ class AbstractMessageLogEntry:
|
||||
return val > expected
|
||||
elif operator == ">=":
|
||||
return val >= expected
|
||||
elif operator == "&":
|
||||
return val & expected
|
||||
else:
|
||||
raise ValueError(f"Unexpected operator {operator!r}")
|
||||
|
||||
@@ -546,7 +554,6 @@ class LLUDPMessageLogEntry(AbstractMessageLogEntry):
|
||||
# These are expensive to keep around. pickle them and un-pickle on
|
||||
# an as-needed basis.
|
||||
self._deserializer = self.message.deserializer
|
||||
self.message.deserializer = None
|
||||
self._frozen_message = pickle.dumps(self._message, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
self._message = None
|
||||
|
||||
@@ -586,15 +593,19 @@ class LLUDPMessageLogEntry(AbstractMessageLogEntry):
|
||||
for block_name in message.blocks:
|
||||
if not fnmatch.fnmatchcase(block_name, matcher.selector[1]):
|
||||
continue
|
||||
for block in message[block_name]:
|
||||
for block_num, block in enumerate(message[block_name]):
|
||||
for var_name in block.vars.keys():
|
||||
if not fnmatch.fnmatchcase(var_name, matcher.selector[2]):
|
||||
continue
|
||||
# So we know where the match happened
|
||||
span_key = (message.name, block_name, block_num, var_name)
|
||||
if selector_len == 3:
|
||||
# We're just matching on the var existing, not having any particular value
|
||||
if matcher.value is None:
|
||||
return True
|
||||
return span_key
|
||||
if self._val_matches(matcher.operator, block[var_name], matcher.value):
|
||||
return True
|
||||
return span_key
|
||||
# Need to invoke a special unpacker
|
||||
elif selector_len == 4:
|
||||
try:
|
||||
deserialized = block.deserialize_var(var_name)
|
||||
@@ -608,9 +619,9 @@ class LLUDPMessageLogEntry(AbstractMessageLogEntry):
|
||||
for key in deserialized.keys():
|
||||
if fnmatch.fnmatchcase(str(key), matcher.selector[3]):
|
||||
if matcher.value is None:
|
||||
return True
|
||||
return span_key
|
||||
if self._val_matches(matcher.operator, deserialized[key], matcher.value):
|
||||
return True
|
||||
return span_key
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@@ -44,8 +44,8 @@ class OrphanManager:
|
||||
del self._orphans[parent_id]
|
||||
return removed
|
||||
|
||||
def collect_orphans(self, parent: Object) -> typing.Sequence[int]:
|
||||
return self._orphans.pop(parent.LocalID, [])
|
||||
def collect_orphans(self, parent_localid: int) -> typing.Sequence[int]:
|
||||
return self._orphans.pop(parent_localid, [])
|
||||
|
||||
def track_orphan(self, obj: Object):
|
||||
self.track_orphan_by_id(obj.LocalID, obj.ParentID)
|
||||
@@ -60,7 +60,19 @@ OBJECT_OR_LOCAL = typing.Union[Object, int]
|
||||
|
||||
|
||||
class ObjectManager:
|
||||
"""Object manager for a specific region"""
|
||||
"""
|
||||
Object manager for a specific region
|
||||
|
||||
TODO: This model does not make sense given how region->region object handoff works.
|
||||
The ObjectManager has to notice when an ObjectUpdate for an object came from a
|
||||
new region and update the associated region itself. It will not receive a KillObject
|
||||
from the old region in the case of physical region crossings. Right now this means
|
||||
physical objects or agents that physically cross a sim border get dangling object
|
||||
references. This is not the case when they teleport, even across a small distance
|
||||
to a neighbor, as that will send a KillObject in the old sim.
|
||||
Needs to switch to one manager managing objects for a full session rather than one
|
||||
manager per region.
|
||||
"""
|
||||
|
||||
def __init__(self, region: ProxiedRegion):
|
||||
self._localid_lookup: typing.Dict[int, Object] = {}
|
||||
@@ -87,6 +99,9 @@ class ObjectManager:
|
||||
message_handler.subscribe("KillObject",
|
||||
self._handle_kill_object)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._localid_lookup)
|
||||
|
||||
@property
|
||||
def all_objects(self) -> typing.Iterable[Object]:
|
||||
return self._localid_lookup.values()
|
||||
@@ -106,7 +121,7 @@ class ObjectManager:
|
||||
return None
|
||||
return self.lookup_localid(local_id)
|
||||
|
||||
def _track_object(self, obj: Object):
|
||||
def _track_object(self, obj: Object, notify: bool = True):
|
||||
self._localid_lookup[obj.LocalID] = obj
|
||||
self._fullid_lookup[obj.FullID] = obj.LocalID
|
||||
# If it was missing, it's not missing anymore.
|
||||
@@ -115,13 +130,34 @@ class ObjectManager:
|
||||
self._parent_object(obj)
|
||||
|
||||
# Adopt any of our orphaned child objects.
|
||||
for orphan_local in self._orphan_manager.collect_orphans(obj):
|
||||
for orphan_local in self._orphan_manager.collect_orphans(obj.LocalID):
|
||||
child_obj = self.lookup_localid(orphan_local)
|
||||
# Shouldn't be any dead children in the orphanage
|
||||
assert child_obj is not None
|
||||
self._parent_object(child_obj)
|
||||
|
||||
self._notify_object_updated(obj, set(obj.to_dict().keys()))
|
||||
if notify:
|
||||
self._notify_object_updated(obj, set(obj.to_dict().keys()))
|
||||
|
||||
def _untrack_object(self, obj: Object):
|
||||
former_child_ids = obj.ChildIDs[:]
|
||||
for child_id in former_child_ids:
|
||||
child_obj = self.lookup_localid(child_id)
|
||||
assert child_obj is not None
|
||||
self._unparent_object(child_obj, child_obj.ParentID)
|
||||
|
||||
# Place any remaining unkilled children in the orphanage
|
||||
for child_id in former_child_ids:
|
||||
self._orphan_manager.track_orphan_by_id(child_id, obj.LocalID)
|
||||
|
||||
assert not obj.ChildIDs
|
||||
|
||||
# Make sure the parent knows we went away
|
||||
self._unparent_object(obj, obj.ParentID)
|
||||
|
||||
# Do this last in case we only have a weak reference
|
||||
del self._fullid_lookup[obj.FullID]
|
||||
del self._localid_lookup[obj.LocalID]
|
||||
|
||||
def _parent_object(self, obj: Object, insert_at_head=False):
|
||||
if obj.ParentID:
|
||||
@@ -163,9 +199,27 @@ class ObjectManager:
|
||||
|
||||
def _update_existing_object(self, obj: Object, new_properties):
|
||||
new_parent_id = new_properties.get("ParentID", obj.ParentID)
|
||||
|
||||
actually_updated_props = set()
|
||||
|
||||
if obj.LocalID != new_properties.get("LocalID", obj.LocalID):
|
||||
# Our LocalID changed, and we deal with linkages to other prims by
|
||||
# LocalID association. Break any links since our LocalID is changing.
|
||||
# Could happen if we didn't mark an attachment prim dead and the parent agent
|
||||
# came back into the sim. Attachment FullIDs do not change across TPs,
|
||||
# LocalIDs do. This at least lets us partially recover from the bad state.
|
||||
# Currently known to happen due to physical region crossings, so only debug.
|
||||
new_localid = new_properties["LocalID"]
|
||||
LOG.debug(f"Got an update with new LocalID for {obj.FullID}, {obj.LocalID} != {new_localid}. "
|
||||
f"May have mishandled a KillObject for a prim that left and re-entered region.")
|
||||
self._untrack_object(obj)
|
||||
obj.LocalID = new_localid
|
||||
self._track_object(obj, notify=False)
|
||||
actually_updated_props |= {"LocalID"}
|
||||
|
||||
old_parent_id = obj.ParentID
|
||||
|
||||
actually_updated_props = obj.update_properties(new_properties)
|
||||
actually_updated_props |= obj.update_properties(new_properties)
|
||||
|
||||
if new_parent_id != old_parent_id:
|
||||
self._unparent_object(obj, old_parent_id)
|
||||
@@ -307,8 +361,8 @@ class ObjectManager:
|
||||
seen_locals = []
|
||||
for block in packet['ObjectData']:
|
||||
object_data = self._normalize_object_update_compressed(block)
|
||||
obj = self.lookup_localid(object_data["LocalID"])
|
||||
seen_locals.append(object_data["LocalID"])
|
||||
obj = self.lookup_localid(object_data["LocalID"])
|
||||
if obj:
|
||||
self._update_existing_object(obj, object_data)
|
||||
else:
|
||||
@@ -334,33 +388,38 @@ class ObjectManager:
|
||||
def _handle_kill_object(self, packet: ProxiedMessage):
|
||||
seen_locals = []
|
||||
for block in packet["ObjectData"]:
|
||||
obj = self.lookup_localid(block["ID"])
|
||||
self._kill_object_by_local_id(block["ID"])
|
||||
seen_locals.append(block["ID"])
|
||||
self.missing_locals -= {block["ID"]}
|
||||
if obj:
|
||||
AddonManager.handle_object_killed(self._region.session(), self._region, obj)
|
||||
|
||||
former_child_ids = obj.ChildIDs[:]
|
||||
for child_id in former_child_ids:
|
||||
child_obj = self.lookup_localid(child_id)
|
||||
assert child_obj is not None
|
||||
self._unparent_object(child_obj, child_obj.ParentID)
|
||||
|
||||
del self._localid_lookup[obj.LocalID]
|
||||
del self._fullid_lookup[obj.FullID]
|
||||
|
||||
# Place any remaining unkilled children in the orphanage
|
||||
for child_id in former_child_ids:
|
||||
self._orphan_manager.track_orphan_by_id(child_id, obj.LocalID)
|
||||
|
||||
assert not obj.ChildIDs
|
||||
|
||||
# Make sure the parent knows we went away
|
||||
self._unparent_object(obj, obj.ParentID)
|
||||
else:
|
||||
logging.debug(f"Received {packet.name} for unknown {block['ID']}")
|
||||
packet.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _kill_object_by_local_id(self, local_id: int):
|
||||
obj = self.lookup_localid(local_id)
|
||||
self.missing_locals -= {local_id}
|
||||
child_ids: Sequence[int]
|
||||
if obj:
|
||||
AddonManager.handle_object_killed(self._region.session(), self._region, obj)
|
||||
child_ids = obj.ChildIDs
|
||||
else:
|
||||
LOG.debug(f"Tried to kill unknown object {local_id}")
|
||||
# If it had any orphans, they need to die.
|
||||
child_ids = self._orphan_manager.collect_orphans(local_id)
|
||||
|
||||
# KillObject implicitly kills descendents
|
||||
# This may mutate child_ids, use the reversed iterator so we don't
|
||||
# invalidate the iterator during removal.
|
||||
for child_id in reversed(child_ids):
|
||||
# indra special-cases avatar PCodes and doesn't mark them dead
|
||||
# due to cascading kill. Is this correct? Do avatars require
|
||||
# explicit kill?
|
||||
child_obj = self.lookup_localid(child_id)
|
||||
if child_obj and child_obj.PCode == PCode.AVATAR:
|
||||
continue
|
||||
self._kill_object_by_local_id(child_id)
|
||||
|
||||
# Have to do this last, since untracking will clear child IDs
|
||||
if obj:
|
||||
self._untrack_object(obj)
|
||||
|
||||
def _handle_get_object_cost(self, flow: HippoHTTPFlow):
|
||||
parsed = llsd.parse_xml(flow.response.content)
|
||||
if "error" in parsed:
|
||||
|
||||
@@ -1283,8 +1283,8 @@ class ObjectUpdateExtraParamsSerializer(se.SimpleSubfieldSerializer):
|
||||
EMPTY_IS_NONE = True
|
||||
|
||||
|
||||
@se.enum_field_serializer("ObjectUpdate", "ObjectData", "Flags")
|
||||
class SoundFlags(enum.IntEnum):
|
||||
@se.flag_field_serializer("ObjectUpdate", "ObjectData", "Flags")
|
||||
class SoundFlags(enum.IntFlag):
|
||||
LOOP = 1 << 0
|
||||
SYNC_MASTER = 1 << 1
|
||||
SYNC_SLAVE = 1 << 2
|
||||
|
||||
4
requirements-test.txt
Normal file
4
requirements-test.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
aioresponses
|
||||
pytest
|
||||
pytest-cov
|
||||
flake8
|
||||
3
setup.py
3
setup.py
@@ -25,7 +25,7 @@ from setuptools import setup, find_packages
|
||||
|
||||
here = path.abspath(path.dirname(__file__))
|
||||
|
||||
version = '0.3.2'
|
||||
version = '0.4.0'
|
||||
|
||||
with open(path.join(here, 'README.md')) as readme_fh:
|
||||
readme = readme_fh.read()
|
||||
@@ -98,5 +98,6 @@ setup(
|
||||
],
|
||||
tests_require=[
|
||||
"pytest",
|
||||
"aioresponses",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -111,7 +111,7 @@ executables = [
|
||||
|
||||
setup(
|
||||
name="hippolyzer_gui",
|
||||
version="0.3.2",
|
||||
version="0.4.0",
|
||||
description="Hippolyzer GUI",
|
||||
options=options,
|
||||
executables=executables,
|
||||
|
||||
@@ -126,8 +126,6 @@ class TestMessage(unittest.TestCase):
|
||||
def test_partial_decode_pickle(self):
|
||||
msg = self.deserial.deserialize(self.serial.serialize(self.chat_msg))
|
||||
self.assertEqual(msg.deserializer(), self.deserial)
|
||||
# Have to remove the weak ref so we can pickle
|
||||
msg.deserializer = None
|
||||
msg = pickle.loads(pickle.dumps(msg, protocol=pickle.HIGHEST_PROTOCOL))
|
||||
|
||||
# We should still have the raw body at this point
|
||||
|
||||
@@ -52,6 +52,7 @@ class BaseIntegrationTest(unittest.IsolatedAsyncioTestCase):
|
||||
self.session.open_circuit(self.client_addr, self.region_addr,
|
||||
self.protocol.transport)
|
||||
self.session.main_region = self.session.regions[-1]
|
||||
self.session.main_region.handle = 0
|
||||
|
||||
def _msg_to_datagram(self, msg: ProxiedMessage, src, dst, direction, socks_header=True):
|
||||
serialized = self.serializer.serialize(msg)
|
||||
|
||||
65
tests/proxy/test_capsclient.py
Normal file
65
tests/proxy/test_capsclient.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import unittest
|
||||
|
||||
import aiohttp
|
||||
import aioresponses
|
||||
from yarl import URL
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.proxy.caps_client import CapsClient
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager
|
||||
|
||||
|
||||
class TestCapsClient(unittest.IsolatedAsyncioTestCase):
|
||||
def setUp(self) -> None:
|
||||
self.session = self.session = SessionManager().create_session({
|
||||
"session_id": UUID.random(),
|
||||
"secure_session_id": UUID.random(),
|
||||
"agent_id": UUID.random(),
|
||||
"circuit_code": 0,
|
||||
"sim_ip": "127.0.0.1",
|
||||
"sim_port": "1",
|
||||
"seed_capability": "https://test.localhost:4/foo",
|
||||
})
|
||||
self.region = ProxiedRegion(("127.0.0.1", 1), "", self.session)
|
||||
self.caps_client = CapsClient(self.region)
|
||||
|
||||
async def test_bare_url_works(self):
|
||||
with aioresponses.aioresponses() as m:
|
||||
m.get("https://example.com/", body=b"foo")
|
||||
async with self.caps_client.get("https://example.com/") as resp:
|
||||
self.assertEqual(await resp.read(), b"foo")
|
||||
|
||||
async def test_own_session_works(self):
|
||||
with aioresponses.aioresponses() as m:
|
||||
async with aiohttp.ClientSession() as sess:
|
||||
m.get("https://example.com/", body=b"foo")
|
||||
async with self.caps_client.get("https://example.com/", session=sess) as resp:
|
||||
self.assertEqual(await resp.read(), b"foo")
|
||||
|
||||
async def test_read_llsd(self):
|
||||
with aioresponses.aioresponses() as m:
|
||||
m.get("https://example.com/", body=b"<llsd><integer>2</integer></llsd>")
|
||||
async with self.caps_client.get("https://example.com/") as resp:
|
||||
self.assertEqual(await resp.read_llsd(), 2)
|
||||
|
||||
async def test_caps(self):
|
||||
self.region.update_caps({"Foobar": "https://example.com/"})
|
||||
with aioresponses.aioresponses() as m:
|
||||
m.post("https://example.com/baz", body=b"ok")
|
||||
data = {"hi": "hello"}
|
||||
headers = {"Foo": "bar"}
|
||||
async with self.caps_client.post("Foobar", path="baz", llsd=data, headers=headers) as resp:
|
||||
self.assertEqual(await resp.read(), b"ok")
|
||||
|
||||
# Our original dict should not have been touched
|
||||
self.assertEqual(headers, {"Foo": "bar"})
|
||||
|
||||
req_key = ("POST", URL("https://example.com/baz"))
|
||||
req_body = m.requests[req_key][0].kwargs['data']
|
||||
self.assertEqual(req_body, b'<?xml version="1.0" ?><llsd><map><key>hi</key><string>hello'
|
||||
b'</string></map></llsd>')
|
||||
|
||||
with self.assertRaises(KeyError):
|
||||
with self.caps_client.get("BadCap"):
|
||||
pass
|
||||
41
tests/proxy/test_httpflows.py
Normal file
41
tests/proxy/test_httpflows.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import unittest
|
||||
|
||||
from mitmproxy.test import tflow, tutils
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.http_proxy import SerializedCapData
|
||||
from hippolyzer.lib.proxy.message_logger import HTTPMessageLogEntry
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager
|
||||
|
||||
|
||||
class TestHTTPFlows(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.session_manager = SessionManager()
|
||||
self.session = self.session = self.session_manager.create_session({
|
||||
"session_id": UUID.random(),
|
||||
"secure_session_id": UUID.random(),
|
||||
"agent_id": UUID.random(),
|
||||
"circuit_code": 0,
|
||||
"sim_ip": "127.0.0.1",
|
||||
"sim_port": "1",
|
||||
"seed_capability": "https://test.localhost:4/foo",
|
||||
})
|
||||
|
||||
def test_request_formatting(self):
|
||||
req = tutils.treq(host="example.com", port=80)
|
||||
resp = tutils.tresp()
|
||||
fake_flow = tflow.tflow(req=req, resp=resp)
|
||||
fake_flow.metadata["cap_data_ser"] = SerializedCapData(
|
||||
cap_name="FakeCap",
|
||||
session_id=str(self.session.id),
|
||||
base_url="http://example.com",
|
||||
)
|
||||
flow = HippoHTTPFlow.from_state(fake_flow.get_state(), self.session_manager)
|
||||
entry = HTTPMessageLogEntry(flow)
|
||||
self.assertEqual(entry.request(beautify=True), """GET [[FakeCap]]/path HTTP/1.1\r
|
||||
# http://example.com/path\r
|
||||
header: qvalue\r
|
||||
content-length: 7\r
|
||||
\r
|
||||
content""")
|
||||
@@ -1,13 +1,17 @@
|
||||
import unittest
|
||||
|
||||
from mitmproxy.test import tflow, tutils
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
|
||||
from hippolyzer.lib.base.settings import Settings
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.http_proxy import SerializedCapData
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage as Message
|
||||
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry
|
||||
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry, HTTPMessageLogEntry
|
||||
from hippolyzer.lib.proxy.message_filter import compile_filter
|
||||
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager
|
||||
|
||||
OBJECT_UPDATE = b'\xc0\x00\x00\x00Q\x00\x0c\x00\x01\xea\x03\x00\x02\xe6\x03\x00\x01\xbe\xff\x01\x06\xbc\x8e\x0b\x00' \
|
||||
b'\x01i\x94\x8cjM"\x1bf\xec\xe4\xac1c\x93\xcbKW\x89\x98\x01\t\x03\x00\x01Q@\x88>Q@\x88>Q@\x88><\xa2D' \
|
||||
@@ -46,8 +50,10 @@ class MessageFilterTests(unittest.TestCase):
|
||||
def test_equality(self):
|
||||
msg = LLUDPMessageLogEntry(Message("Foo", Block("Bar", Baz=1)), None, None)
|
||||
self.assertTrue(self._filter_matches("Foo.Bar.Baz == 1", msg))
|
||||
self.assertTrue(self._filter_matches("Foo.Bar.Baz == 0x1", msg))
|
||||
msg.message["Bar"]["Baz"] = 2
|
||||
self.assertFalse(self._filter_matches("Foo.Bar.Baz == 1", msg))
|
||||
self.assertFalse(self._filter_matches("Foo.Bar.Baz == 0x1", msg))
|
||||
|
||||
def test_and(self):
|
||||
msg = LLUDPMessageLogEntry(Message("Foo", Block("Bar", Baz=1)), None, None)
|
||||
@@ -95,6 +101,14 @@ class MessageFilterTests(unittest.TestCase):
|
||||
self.assertFalse(self._filter_matches("Foo.Bar.Baz < (0, 3, 0)", msg))
|
||||
self.assertTrue(self._filter_matches("Foo.Bar.Baz > (0, 0, 0)", msg))
|
||||
|
||||
def test_enum_specifier(self):
|
||||
# 2 is the enum val for SculptType.TORUS
|
||||
msg = LLUDPMessageLogEntry(Message("Foo", Block("Bar", Baz=2)), None, None)
|
||||
self.assertTrue(self._filter_matches("Foo.Bar.Baz == SculptType.TORUS", msg))
|
||||
# bitwise AND should work as well
|
||||
self.assertTrue(self._filter_matches("Foo.Bar.Baz & SculptType.TORUS", msg))
|
||||
self.assertFalse(self._filter_matches("Foo.Bar.Baz == SculptType.SPHERE", msg))
|
||||
|
||||
def test_tagged_union_subfield(self):
|
||||
settings = Settings()
|
||||
settings.ENABLE_DEFERRED_PACKET_PARSING = False
|
||||
@@ -105,6 +119,17 @@ class MessageFilterTests(unittest.TestCase):
|
||||
self.assertTrue(self._filter_matches("ObjectUpdate.ObjectData.ObjectData.Position > (88, 41, 25)", entry))
|
||||
self.assertTrue(self._filter_matches("ObjectUpdate.ObjectData.ObjectData.Position < (90, 43, 27)", entry))
|
||||
|
||||
def test_http_flow(self):
|
||||
session_manager = SessionManager()
|
||||
fake_flow = tflow.tflow(req=tutils.treq(), resp=tutils.tresp())
|
||||
fake_flow.metadata["cap_data_ser"] = SerializedCapData(
|
||||
cap_name="FakeCap",
|
||||
)
|
||||
flow = HippoHTTPFlow.from_state(fake_flow.get_state(), session_manager)
|
||||
entry = HTTPMessageLogEntry(flow)
|
||||
self.assertTrue(self._filter_matches("FakeCap", entry))
|
||||
self.assertFalse(self._filter_matches("NotFakeCap", entry))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -12,11 +12,13 @@ from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.objects import ObjectManager
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage as Message
|
||||
from hippolyzer.lib.proxy.templates import PCode
|
||||
|
||||
|
||||
class MockRegion:
|
||||
def __init__(self, message_handler: MessageHandler):
|
||||
self.session = lambda: None
|
||||
self.handle = 123
|
||||
self.message_handler = message_handler
|
||||
self.http_message_handler = MessageHandler()
|
||||
|
||||
@@ -43,9 +45,11 @@ class ObjectManagerTests(unittest.TestCase):
|
||||
self.object_addon = ObjectTrackingAddon()
|
||||
AddonManager.init([], None, [self.object_addon])
|
||||
|
||||
def _create_object_update(self, local_id=None, full_id=None, parent_id=None, pos=None, rot=None) -> Message:
|
||||
def _create_object_update(self, local_id=None, full_id=None, parent_id=None, pos=None, rot=None,
|
||||
pcode=None) -> Message:
|
||||
pos = pos if pos is not None else (1.0, 2.0, 3.0)
|
||||
rot = rot if rot is not None else (0.0, 0.0, 0.0, 1.0)
|
||||
pcode = pcode if pcode is not None else 9
|
||||
msg = Message(
|
||||
"ObjectUpdate",
|
||||
Block("RegionData", RegionHandle=123, TimeDilation=123),
|
||||
@@ -53,7 +57,7 @@ class ObjectManagerTests(unittest.TestCase):
|
||||
"ObjectData",
|
||||
ID=local_id if local_id is not None else random.getrandbits(32),
|
||||
FullID=full_id if full_id else UUID.random(),
|
||||
PCode=9,
|
||||
PCode=pcode,
|
||||
Scale=Vector3(0.5, 0.5, 0.5),
|
||||
UpdateFlags=268568894,
|
||||
PathCurve=16,
|
||||
@@ -85,8 +89,9 @@ class ObjectManagerTests(unittest.TestCase):
|
||||
# Run through (de)serializer to fill in any missing vars
|
||||
return self.deserializer.deserialize(self.serializer.serialize(msg))
|
||||
|
||||
def _create_object(self, local_id=None, full_id=None, parent_id=None, pos=None, rot=None) -> Object:
|
||||
msg = self._create_object_update(local_id=local_id, full_id=full_id, parent_id=parent_id, pos=pos, rot=rot)
|
||||
def _create_object(self, local_id=None, full_id=None, parent_id=None, pos=None, rot=None, pcode=None) -> Object:
|
||||
msg = self._create_object_update(
|
||||
local_id=local_id, full_id=full_id, parent_id=parent_id, pos=pos, rot=rot, pcode=pcode)
|
||||
self.message_handler.handle(msg)
|
||||
return self.object_manager.lookup_fullid(msg["ObjectData"]["FullID"])
|
||||
|
||||
@@ -122,14 +127,33 @@ class ObjectManagerTests(unittest.TestCase):
|
||||
self.assertEqual(set(), self.object_manager.missing_locals)
|
||||
self.assertSequenceEqual([child.LocalID], parent.ChildIDs)
|
||||
|
||||
def test_killing_parent_orphans_children(self):
|
||||
child = self._create_object(local_id=2, parent_id=1)
|
||||
def test_killing_parent_kills_children(self):
|
||||
_child = self._create_object(local_id=2, parent_id=1)
|
||||
parent = self._create_object(local_id=1)
|
||||
# This should orphan the child again
|
||||
self._kill_object(parent)
|
||||
parent = self._create_object(local_id=1)
|
||||
# Did we pick the orphan back up?
|
||||
self.assertSequenceEqual([child.LocalID], parent.ChildIDs)
|
||||
# We should not have picked up any children
|
||||
self.assertSequenceEqual([], parent.ChildIDs)
|
||||
|
||||
def test_hierarchy_killed(self):
|
||||
_child = self._create_object(local_id=3, parent_id=2)
|
||||
_other_child = self._create_object(local_id=4, parent_id=2)
|
||||
_parent = self._create_object(local_id=2, parent_id=1)
|
||||
grandparent = self._create_object(local_id=1)
|
||||
# KillObject implicitly kills all known descendents at that point
|
||||
self._kill_object(grandparent)
|
||||
self.assertEqual(0, len(self.object_manager))
|
||||
|
||||
def test_hierarchy_avatar_not_killed(self):
|
||||
_child = self._create_object(local_id=3, parent_id=2)
|
||||
_parent = self._create_object(local_id=2, parent_id=1, pcode=PCode.AVATAR)
|
||||
grandparent = self._create_object(local_id=1)
|
||||
# KillObject should only "unsit" child avatars (does this require an ObjectUpdate
|
||||
# or is ParentID=0 implied?)
|
||||
self._kill_object(grandparent)
|
||||
self.assertEqual(2, len(self.object_manager))
|
||||
self.assertIsNotNone(self.object_manager.lookup_localid(2))
|
||||
|
||||
def test_attachment_orphan_parent_tracking(self):
|
||||
"""
|
||||
@@ -142,15 +166,6 @@ class ObjectManagerTests(unittest.TestCase):
|
||||
parent = self._create_object(local_id=2, parent_id=1)
|
||||
self.assertSequenceEqual([child.LocalID], parent.ChildIDs)
|
||||
|
||||
def test_killing_attachment_parent_orphans_children(self):
|
||||
child = self._create_object(local_id=3, parent_id=2)
|
||||
parent = self._create_object(local_id=2, parent_id=1)
|
||||
# This should orphan the child again
|
||||
self._kill_object(parent)
|
||||
parent = self._create_object(local_id=2, parent_id=1)
|
||||
# Did we pick the orphan back up?
|
||||
self.assertSequenceEqual([child.LocalID], parent.ChildIDs)
|
||||
|
||||
def test_unparenting_succeeds(self):
|
||||
child = self._create_object(local_id=3, parent_id=2)
|
||||
parent = self._create_object(local_id=2)
|
||||
|
||||
Reference in New Issue
Block a user