diff --git a/hippolyzer/lib/base/objects.py b/hippolyzer/lib/base/objects.py index 2a7e67a..aca25b3 100644 --- a/hippolyzer/lib/base/objects.py +++ b/hippolyzer/lib/base/objects.py @@ -141,8 +141,9 @@ class Object(recordclass.datatuple): # type: ignore TouchName: Optional[str] = None SitName: Optional[str] = None TextureID: Optional[Any] = None + RegionHandle: Optional[int] = None - def __init__(self, **kwargs): + def __init__(self, **_kwargs): """ set up the object attributes """ self.ExtraParams = self.ExtraParams or {} # Variable 1 self.ObjectCosts = self.ObjectCosts or {} @@ -150,6 +151,10 @@ class Object(recordclass.datatuple): # type: ignore # Same as parent, contains weakref proxies. self.Children: List[Object] = [] + @property + def GlobalPosition(self) -> Vector3: + return handle_to_global_pos(self.RegionHandle) + self.RegionPosition + @property def RegionPosition(self) -> Vector3: if not self.ParentID: @@ -215,8 +220,13 @@ def gridxy_to_handle(x: int, y: int): return ((x * 256) << 32) | (y * 256) -def normalize_object_update(block: Block): +def handle_to_global_pos(handle: int) -> Vector3: + return Vector3(handle >> 32, handle & 0xFFffFFff) + + +def normalize_object_update(block: Block, handle: int): object_data = { + "RegionHandle": handle, "FootCollisionPlane": None, "SoundFlags": block["Flags"], "SoundGain": block["Gain"], @@ -247,11 +257,12 @@ def normalize_object_update(block: Block): return object_data -def normalize_terse_object_update(block: Block): +def normalize_terse_object_update(block: Block, handle: int): object_data = { **block.deserialize_var("Data", make_copy=False), **dict(block.items()), "TextureEntry": block.deserialize_var("TextureEntry", make_copy=False), + "RegionHandle": handle, } object_data["LocalID"] = object_data.pop("ID") object_data.pop("Data") @@ -295,9 +306,10 @@ def normalize_object_update_compressed_data(data: bytes): return object_data -def normalize_object_update_compressed(block: Block): +def normalize_object_update_compressed(block: Block, handle: int): compressed = normalize_object_update_compressed_data(block["Data"]) compressed["UpdateFlags"] = block.deserialize_var("UpdateFlags", make_copy=False) + compressed["RegionHandle"] = handle return compressed diff --git a/hippolyzer/lib/proxy/http_event_manager.py b/hippolyzer/lib/proxy/http_event_manager.py index 7042c27..5a092de 100644 --- a/hippolyzer/lib/proxy/http_event_manager.py +++ b/hippolyzer/lib/proxy/http_event_manager.py @@ -195,6 +195,11 @@ class MITMProxyEventManager: self._handle_login_flow(flow) return try: + session = cap_data.session and cap_data.session() + if not session: + return + session.http_message_handler.handle(flow) + region = cap_data.region and cap_data.region() if not region: return diff --git a/hippolyzer/lib/proxy/lludp_proxy.py b/hippolyzer/lib/proxy/lludp_proxy.py index 0789946..60e9fb8 100644 --- a/hippolyzer/lib/proxy/lludp_proxy.py +++ b/hippolyzer/lib/proxy/lludp_proxy.py @@ -130,6 +130,10 @@ class InterceptingLLUDPProxyProtocol(BaseLLUDPProxyProtocol): except: LOG.exception("Failed to load region cache, skipping") + try: + self.session.message_handler.handle(message) + except: + LOG.exception("Failed in session message handler") try: region.message_handler.handle(message) except: diff --git a/hippolyzer/lib/proxy/objects.py b/hippolyzer/lib/proxy/objects.py index 226adcf..32b86dc 100644 --- a/hippolyzer/lib/proxy/objects.py +++ b/hippolyzer/lib/proxy/objects.py @@ -15,6 +15,7 @@ from hippolyzer.lib.base.datatypes import UUID, Vector3 from hippolyzer.lib.base.helpers import proxify from hippolyzer.lib.base.message.message import Block from hippolyzer.lib.base.objects import ( + handle_to_global_pos, normalize_object_update, normalize_terse_object_update, normalize_object_update_compressed_data, @@ -30,6 +31,7 @@ from hippolyzer.lib.proxy.vocache import RegionViewerObjectCacheChain if TYPE_CHECKING: from hippolyzer.lib.proxy.region import ProxiedRegion + from hippolyzer.lib.proxy.sessions import Session LOG = logging.getLogger(__name__) @@ -86,12 +88,14 @@ class Avatar: def __init__( self, full_id: UUID, + region_handle: int, obj: Optional["Object"] = None, coarse_location: Optional[Vector3] = None, resolved_name: Optional[str] = None, ): self.FullID: UUID = full_id self.Object: Optional["Object"] = obj + self.RegionHandle: int = region_handle self._coarse_location = coarse_location self._resolved_name = resolved_name @@ -109,6 +113,10 @@ class Avatar: return self._coarse_location raise ValueError(f"Avatar {self.FullID} has no known position") + @property + def GlobalPosition(self) -> Vector3: + return self.RegionPosition + handle_to_global_pos(self.RegionHandle) + @property def Name(self) -> Optional[str]: if self.Object: @@ -120,16 +128,6 @@ class Avatar: class ObjectManager: """ Object manager for a specific region - - TODO: This model does not make sense given how region->region object handoff works. - The ObjectManager has to notice when an ObjectUpdate for an object came from a - new region and update the associated region itself. It will not receive a KillObject - from the old region in the case of physical region crossings. Right now this means - physical objects or agents that physically cross a sim border get dangling object - references. This is not the case when they teleport, even across a small distance - to a neighbor, as that will send a KillObject in the old sim. - Needs to switch to one manager managing objects for a full session rather than one - manager per region. """ def __init__(self, region: ProxiedRegion, use_vo_cache: bool = False): @@ -140,46 +138,36 @@ class ObjectManager: self._localid_lookup: typing.Dict[int, Object] = {} self._fullid_lookup: typing.Dict[UUID, int] = {} self._coarse_locations: typing.Dict[UUID, Vector3] = {} - self._update_futures: typing.Dict[int, List[asyncio.Future]] = collections.defaultdict(list) - self._property_futures: typing.Dict[int, List[asyncio.Future]] = collections.defaultdict(list) + self._object_futures: typing.Dict[Tuple[int, int], List[asyncio.Future]] = {} # Objects that we've seen references to but don't have data for self.missing_locals = set() self._orphan_manager = OrphanManager() - name_cache = None - session = self._region.session() - if session and session.session_manager: - name_cache = session.session_manager.name_cache + self._world_objects: WorldObjectManager = region.session().objects + name_cache = region.session().session_manager.name_cache # Use a local namecache if we don't have a session manager self.name_cache: Optional[NameCache] = name_cache or NameCache() message_handler = region.message_handler - message_handler.subscribe("ObjectUpdate", self._handle_object_update) - message_handler.subscribe("ImprovedTerseObjectUpdate", - self._handle_terse_object_update) message_handler.subscribe("CoarseLocationUpdate", self._handle_coarse_location_update) - message_handler.subscribe("ObjectUpdateCompressed", - self._handle_object_update_compressed) - message_handler.subscribe("ObjectUpdateCached", - self._handle_object_update_cached) - message_handler.subscribe("ObjectProperties", - self._handle_object_properties_generic) - message_handler.subscribe("ObjectPropertiesFamily", - self._handle_object_properties_generic) region.http_message_handler.subscribe("GetObjectCost", self._handle_get_object_cost) message_handler.subscribe("KillObject", self._handle_kill_object) + message_handler.subscribe("ObjectProperties", + self._handle_object_properties_generic) + message_handler.subscribe("ObjectPropertiesFamily", + self._handle_object_properties_generic) def __len__(self): return len(self._localid_lookup) @property - def all_objects(self) -> typing.Iterable[Object]: + def all_objects(self) -> Iterable[Object]: return self._localid_lookup.values() @property - def all_avatars(self) -> typing.Iterable[Avatar]: + def all_avatars(self) -> Iterable[Avatar]: av_objects = {o.FullID: o for o in self.all_objects if o.PCode == PCode.AVATAR} all_ids = set(av_objects.keys()) | self._coarse_locations.keys() @@ -193,6 +181,7 @@ class ObjectManager: resolved_name = f"{namecache_entry.FirstName} {namecache_entry.LastName}" avatars.append(Avatar( full_id=av_id, + region_handle=self._region.handle, coarse_location=coarse_location, obj=av_obj, resolved_name=resolved_name, @@ -214,7 +203,53 @@ class ObjectManager: return avatar return None - def _track_object(self, obj: Object, notify: bool = True): + def _update_existing_object(self, obj: Object, new_properties: dict, update_type: UpdateType): + new_parent_id = new_properties.get("ParentID", obj.ParentID) + new_region_handle = new_properties.get("RegionHandle", obj.RegionHandle) + new_local_id = new_properties.get("LocalID", obj.LocalID) + old_parent_id = obj.ParentID + old_region_handle = obj.RegionHandle + old_region = self._region.session().region_by_handle(old_region_handle) + + actually_updated_props = set() + + # The object just changed regions, we have to remove it from the old one. + if old_region_handle != new_region_handle: + old_region.objects.untrack_object(obj) + elif obj.LocalID != new_local_id: + # Our LocalID changed, and we deal with linkages to other prims by + # LocalID association. Break any links since our LocalID is changing. + # Could happen if we didn't mark an attachment prim dead and the parent agent + # came back into the sim. Attachment FullIDs do not change across TPs, + # LocalIDs do. This at least lets us partially recover from the bad state. + new_localid = new_properties["LocalID"] + LOG.warning(f"Got an update with new LocalID for {obj.FullID}, {obj.LocalID} != {new_localid}. " + f"May have mishandled a KillObject for a prim that left and re-entered region.") + old_region.objects.untrack_object(obj) + obj.LocalID = new_localid + old_region.objects.track_object(obj) + actually_updated_props |= {"LocalID"} + + actually_updated_props |= obj.update_properties(new_properties) + + if new_region_handle != old_region_handle: + # Region just changed to this region, we should have untracked it before + # so mark it tracked on this region. + self.track_object(obj) + elif new_parent_id != old_parent_id: + # LocalID just changed and we're in the same region + self._unparent_object(obj, old_parent_id) + self._parent_object(obj, insert_at_head=True) + + if actually_updated_props: + self.run_object_update_hooks(obj, actually_updated_props, update_type) + + def _track_new_object(self, obj: Object): + self.track_object(obj) + self._world_objects.handle_new_object(obj) + self.run_object_update_hooks(obj, set(obj.to_dict().keys()), UpdateType.OBJECT_UPDATE) + + def track_object(self, obj: Object): self._localid_lookup[obj.LocalID] = obj self._fullid_lookup[obj.FullID] = obj.LocalID # If it was missing, it's not missing anymore. @@ -229,10 +264,7 @@ class ObjectManager: assert child_obj is not None self._parent_object(child_obj) - if notify: - self._run_object_update_hooks(obj, set(obj.to_dict().keys()), UpdateType.OBJECT_UPDATE) - - def _untrack_object(self, obj: Object): + def untrack_object(self, obj: Object): former_child_ids = obj.ChildIDs[:] for child_id in former_child_ids: child_obj = self.lookup_localid(child_id) @@ -248,6 +280,8 @@ class ObjectManager: # Make sure the parent knows we went away self._unparent_object(obj, obj.ParentID) + self._cancel_futures(obj.LocalID) + # Do this last in case we only have a weak reference del self._fullid_lookup[obj.FullID] del self._localid_lookup[obj.LocalID] @@ -290,138 +324,20 @@ class ObjectManager: else: LOG.debug(f"Changing parent of {obj.LocalID}, but couldn't find old parent") - def _update_existing_object(self, obj: Object, new_properties: dict, update_type: UpdateType): - new_parent_id = new_properties.get("ParentID", obj.ParentID) - - actually_updated_props = set() - - if obj.LocalID != new_properties.get("LocalID", obj.LocalID): - # Our LocalID changed, and we deal with linkages to other prims by - # LocalID association. Break any links since our LocalID is changing. - # Could happen if we didn't mark an attachment prim dead and the parent agent - # came back into the sim. Attachment FullIDs do not change across TPs, - # LocalIDs do. This at least lets us partially recover from the bad state. - # Currently known to happen due to physical region crossings, so only debug. - new_localid = new_properties["LocalID"] - LOG.debug(f"Got an update with new LocalID for {obj.FullID}, {obj.LocalID} != {new_localid}. " - f"May have mishandled a KillObject for a prim that left and re-entered region.") - self._untrack_object(obj) - obj.LocalID = new_localid - self._track_object(obj, notify=False) - actually_updated_props |= {"LocalID"} - - old_parent_id = obj.ParentID - - actually_updated_props |= obj.update_properties(new_properties) - - if new_parent_id != old_parent_id: - self._unparent_object(obj, old_parent_id) - self._parent_object(obj, insert_at_head=True) - - # Common case where this may be falsy is if we get an ObjectUpdateCached - # that didn't have a changed UpdateFlags field. - if actually_updated_props: - self._run_object_update_hooks(obj, actually_updated_props, update_type) - - def _handle_object_update(self, packet: ProxiedMessage): - seen_locals = [] - for block in packet['ObjectData']: - object_data = normalize_object_update(block) - - seen_locals.append(object_data["LocalID"]) - obj = self.lookup_fullid(object_data["FullID"]) - if obj: - self._update_existing_object(obj, object_data, UpdateType.OBJECT_UPDATE) - else: - obj = Object(**object_data) - self._track_object(obj) - packet.meta["ObjectUpdateIDs"] = tuple(seen_locals) - - def _handle_terse_object_update(self, packet: ProxiedMessage): - seen_locals = [] - for block in packet['ObjectData']: - object_data = normalize_terse_object_update(block) - obj = self.lookup_localid(object_data["LocalID"]) - # Can only update existing object with this message - if obj: - # Need the Object as context because decoding state requires PCode. - state_deserializer = ObjectStateSerializer.deserialize - object_data["State"] = state_deserializer(ctx_obj=obj, val=object_data["State"]) - - seen_locals.append(object_data["LocalID"]) - if obj: - self._update_existing_object(obj, object_data, UpdateType.OBJECT_UPDATE) - else: - self.missing_locals.add(object_data["LocalID"]) - LOG.debug(f"Received terse update for unknown object {object_data['LocalID']}") - - packet.meta["ObjectUpdateIDs"] = tuple(seen_locals) - - def _handle_object_update_cached(self, packet: ProxiedMessage): - seen_locals = [] - for block in packet['ObjectData']: - seen_locals.append(block["ID"]) - update_flags = block.deserialize_var("UpdateFlags", make_copy=False) - - # Check if we already know about the object - obj = self.lookup_localid(block["ID"]) - if obj is not None: - self._update_existing_object(obj, { - "UpdateFlags": update_flags, - }, UpdateType.OBJECT_UPDATE) - continue - - # Check if the object is in a viewer's VOCache - cached_obj_data = self.object_cache.lookup_object_data(block["ID"], block["CRC"]) - if cached_obj_data is not None: - cached_obj = normalize_object_update_compressed_data(cached_obj_data) - cached_obj["UpdateFlags"] = update_flags - self._track_object(Object(**cached_obj)) - continue - - # Don't know about it and wasn't cached. - self.missing_locals.add(block["ID"]) - packet.meta["ObjectUpdateIDs"] = tuple(seen_locals) - - def _handle_object_update_compressed(self, packet: ProxiedMessage): - seen_locals = [] - for block in packet['ObjectData']: - object_data = normalize_object_update_compressed(block) - seen_locals.append(object_data["LocalID"]) - obj = self.lookup_localid(object_data["LocalID"]) - if obj: - self._update_existing_object(obj, object_data, UpdateType.OBJECT_UPDATE) - else: - obj = Object(**object_data) - self._track_object(obj) - packet.meta["ObjectUpdateIDs"] = tuple(seen_locals) - - def _handle_object_properties_generic(self, packet: ProxiedMessage): - seen_locals = [] - for block in packet["ObjectData"]: - object_properties = dict(block.items()) - if packet.name == "ObjectProperties": - object_properties["TextureID"] = block.deserialize_var("TextureID") - - obj = self.lookup_fullid(block["ObjectID"]) - if obj: - seen_locals.append(obj.LocalID) - self._update_existing_object(obj, object_properties, UpdateType.PROPERTIES) - else: - LOG.debug(f"Received {packet.name} for unknown {block['ObjectID']}") - packet.meta["ObjectUpdateIDs"] = tuple(seen_locals) - - def _handle_kill_object(self, packet: ProxiedMessage): - seen_locals = [] - for block in packet["ObjectData"]: - self._kill_object_by_local_id(block["ID"]) - seen_locals.append(block["ID"]) - packet.meta["ObjectUpdateIDs"] = tuple(seen_locals) + def _cancel_futures(self, local_id: int): + # Object was killed, so need to kill any pending futures. + for fut_key, futs in self._object_futures.items(): + if fut_key[0] == local_id: + for fut in futs: + fut.cancel() + break def _kill_object_by_local_id(self, local_id: int): obj = self.lookup_localid(local_id) self.missing_locals -= {local_id} child_ids: Sequence[int] + + self._cancel_futures(local_id) if obj: AddonManager.handle_object_killed(self._region.session(), self._region, obj) child_ids = obj.ChildIDs @@ -444,7 +360,98 @@ class ObjectManager: # Have to do this last, since untracking will clear child IDs if obj: - self._untrack_object(obj) + self.untrack_object(obj) + self._world_objects.handle_object_gone(obj) + + def handle_object_update(self, packet: ProxiedMessage): + seen_locals = [] + for block in packet['ObjectData']: + object_data = normalize_object_update(block, self._region.handle) + + seen_locals.append(object_data["LocalID"]) + obj = self._world_objects.lookup_fullid(object_data["FullID"]) + if obj: + self._update_existing_object(obj, object_data, UpdateType.OBJECT_UPDATE) + else: + obj = Object(**object_data) + self._track_new_object(obj) + packet.meta["ObjectUpdateIDs"] = tuple(seen_locals) + + def handle_terse_object_update(self, packet: ProxiedMessage): + seen_locals = [] + for block in packet['ObjectData']: + object_data = normalize_terse_object_update(block, self._region.handle) + obj = self.lookup_localid(object_data["LocalID"]) + # Can only update existing object with this message + if obj: + # Need the Object as context because decoding state requires PCode. + state_deserializer = ObjectStateSerializer.deserialize + object_data["State"] = state_deserializer(ctx_obj=obj, val=object_data["State"]) + + seen_locals.append(object_data["LocalID"]) + if obj: + self._update_existing_object(obj, object_data, UpdateType.OBJECT_UPDATE) + else: + self.missing_locals.add(object_data["LocalID"]) + LOG.debug(f"Received terse update for unknown object {object_data['LocalID']}") + + packet.meta["ObjectUpdateIDs"] = tuple(seen_locals) + + def handle_object_update_cached(self, packet: ProxiedMessage): + seen_locals = [] + for block in packet['ObjectData']: + seen_locals.append(block["ID"]) + update_flags = block.deserialize_var("UpdateFlags", make_copy=False) + + # Check if we already know about the object + obj = self.lookup_localid(block["ID"]) + if obj is not None: + self._update_existing_object(obj, { + "UpdateFlags": update_flags, + "RegionHandle": self._region.handle, + }, UpdateType.OBJECT_UPDATE) + continue + + # Check if the object is in a viewer's VOCache + cached_obj_data = self.object_cache.lookup_object_data(block["ID"], block["CRC"]) + if cached_obj_data is not None: + cached_obj = normalize_object_update_compressed_data(cached_obj_data) + cached_obj["UpdateFlags"] = update_flags + cached_obj["RegionHandle"] = self._region.handle + self._track_new_object(Object(**cached_obj)) + continue + + # Don't know about it and wasn't cached. + self.missing_locals.add(block["ID"]) + packet.meta["ObjectUpdateIDs"] = tuple(seen_locals) + + def handle_object_update_compressed(self, packet: ProxiedMessage): + seen_locals = [] + for block in packet['ObjectData']: + object_data = normalize_object_update_compressed(block, self._region.handle) + seen_locals.append(object_data["LocalID"]) + obj = self._world_objects.lookup_fullid(object_data["FullID"]) + if obj: + self._update_existing_object(obj, object_data, UpdateType.OBJECT_UPDATE) + else: + obj = Object(**object_data) + self._track_new_object(obj) + packet.meta["ObjectUpdateIDs"] = tuple(seen_locals) + + def _handle_object_properties_generic(self, packet: ProxiedMessage): + seen_locals = [] + for block in packet["ObjectData"]: + object_properties = dict(block.items()) + if packet.name == "ObjectProperties": + object_properties["TextureID"] = block.deserialize_var("TextureID") + + obj = self.lookup_fullid(block["ObjectID"]) + if obj: + seen_locals.append(obj.LocalID) + self._update_existing_object(obj, object_properties, UpdateType.PROPERTIES) + else: + LOG.debug(f"Received {packet.name} for unknown {block['ObjectID']}") + packet.meta["ObjectUpdateIDs"] = tuple(seen_locals) def _handle_get_object_cost(self, flow: HippoHTTPFlow): parsed = llsd.parse_xml(flow.response.content) @@ -456,9 +463,21 @@ class ObjectManager: LOG.debug(f"Received ObjectCost for unknown {object_id}") continue obj.ObjectCosts.update(object_costs) - self._run_object_update_hooks(obj, {"ObjectCosts"}, UpdateType.COSTS) + self.run_object_update_hooks(obj, {"ObjectCosts"}, UpdateType.COSTS) + + def _handle_kill_object(self, packet: ProxiedMessage): + seen_locals = [] + for block in packet["ObjectData"]: + self._kill_object_by_local_id(block["ID"]) + seen_locals.append(block["ID"]) + packet.meta["ObjectUpdateIDs"] = tuple(seen_locals) def _handle_coarse_location_update(self, packet: ProxiedMessage): + # TODO: This could lead to weird situations when an avatar crosses a + # region border. Might temporarily still have a CoarseLocationUpdate containing + # the avatar in the old region, making the avatar appear to be in both regions. + # figure out best way to deal with that. Store CoarseLocations by region handle + # and always use the newest one containing a particular avatar ID? self._coarse_locations.clear() coarse_locations: typing.Dict[UUID, Vector3] = {} @@ -476,18 +495,13 @@ class ObjectManager: self._coarse_locations.update(coarse_locations) - def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: UpdateType): + def run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: UpdateType): if obj.PCode == PCode.AVATAR and "NameValue" in updated_props: if obj.NameValue: self.name_cache.update(obj.FullID, obj.NameValue.to_dict()) - if update_type == UpdateType.OBJECT_UPDATE: - update_futures = self._update_futures[obj.LocalID] - for fut in update_futures[:]: - fut.set_result(obj) - elif update_type == UpdateType.PROPERTIES: - property_futures = self._property_futures[obj.LocalID] - for fut in property_futures[:]: - fut.set_result(obj) + futures = self._object_futures.get((obj.LocalID, update_type), []) + for fut in futures[:]: + fut.set_result(obj) AddonManager.handle_object_updated(self._region.session(), self._region, obj, updated_props) def load_cache(self): @@ -501,22 +515,19 @@ class ObjectManager: self.object_cache = RegionViewerObjectCacheChain.for_region(handle, self._region.cache_id) def clear(self): + for obj in self._localid_lookup.values(): + self._world_objects.handle_object_gone(obj) self._localid_lookup.clear() self._fullid_lookup.clear() self._coarse_locations.clear() self._orphan_manager.clear() self.missing_locals.clear() - self._clear_futures(self._update_futures) - self._clear_futures(self._property_futures) + for fut in tuple(itertools.chain(*self._object_futures.values())): + fut.cancel() + self._object_futures.clear() self.object_cache = RegionViewerObjectCacheChain([]) self.cache_loaded = False - @staticmethod - def _clear_futures(future_dict: dict): - for future in itertools.chain(*future_dict.values()): - future.cancel() - future_dict.clear() - def request_object_properties(self, objects: typing.Union[OBJECT_OR_LOCAL, typing.Sequence[OBJECT_OR_LOCAL]])\ -> List[asyncio.Future[Object]]: if isinstance(objects, (Object, int)): @@ -547,8 +558,10 @@ class ObjectManager: fut = asyncio.Future() if local_id in unselected_ids: # Need to wait until we get our reply - local_futs = self._property_futures[local_id] + fut_key = (local_id, UpdateType.PROPERTIES) + local_futs = self._object_futures.get(fut_key, []) local_futs.append(fut) + self._object_futures[fut_key] = local_futs fut.add_done_callback(local_futs.remove) else: # This was selected so we should already have up to date info @@ -556,10 +569,10 @@ class ObjectManager: futures.append(fut) return futures - def request_missing_objects(self) -> List[Awaitable[Object]]: + def request_missing_objects(self) -> List[asyncio.Future[Object]]: return self.request_objects(self.missing_locals) - def request_objects(self, local_ids) -> List[Awaitable[Object]]: + def request_objects(self, local_ids) -> List[asyncio.Future[Object]]: """ Request object local IDs, returning a list of awaitable handles for the objects @@ -583,8 +596,110 @@ class ObjectManager: futures = [] for local_id in local_ids: fut = asyncio.Future() - local_futs = self._update_futures[local_id] + fut_key = (local_id, UpdateType.OBJECT_UPDATE) + local_futs = self._object_futures.get(fut_key, []) local_futs.append(fut) + self._object_futures[fut_key] = local_futs fut.add_done_callback(local_futs.remove) futures.append(fut) return futures + + +class WorldObjectManager: + """Manages Objects for a session's whole world""" + def __init__(self, session: Session): + self._session: Session = proxify(session) + self._fullid_lookup: Dict[UUID, Object] = {} + message_handler = self._session.message_handler + message_handler.subscribe("ObjectUpdate", self._handle_object_update) + message_handler.subscribe("ImprovedTerseObjectUpdate", + self._handle_terse_object_update) + message_handler.subscribe("ObjectUpdateCompressed", + self._handle_object_update_compressed) + message_handler.subscribe("ObjectUpdateCached", + self._handle_object_update_cached) + + def _wrap_region_update_handler(self, handler: Callable, message: ProxiedMessage): + """ + Dispatch an ObjectUpdate to a region's handler based on RegionHandle + + Indra doesn't care what region actually sent the message, just what + region handle is in the message, so we need a global message handler + plus dispatch. + """ + region = self._session.region_by_handle(message["RegionData"]["RegionHandle"]) + if not region: + return + return handler(region.objects, message) + + def _handle_object_update(self, message: ProxiedMessage): + self._wrap_region_update_handler(ObjectManager.handle_object_update, message) + + def _handle_terse_object_update(self, message: ProxiedMessage): + self._wrap_region_update_handler(ObjectManager.handle_terse_object_update, message) + + def _handle_object_update_compressed(self, message: ProxiedMessage): + self._wrap_region_update_handler(ObjectManager.handle_object_update_compressed, message) + + def _handle_object_update_cached(self, message: ProxiedMessage): + self._wrap_region_update_handler(ObjectManager.handle_object_update_cached, message) + + def handle_new_object(self, obj: Object): + """Called by a region's ObjectManager when a new Object is tracked""" + self._fullid_lookup[obj.FullID] = obj + + def handle_object_gone(self, obj: Object): + """Called by a region's ObjectManager on KillObject or region going away""" + self._fullid_lookup.pop(obj.FullID, None) + + def lookup_fullid(self, full_id: UUID) -> Optional[Object]: + return self._fullid_lookup.get(full_id, None) + + def __len__(self): + return len(self._fullid_lookup) + + @property + def all_objects(self) -> Iterable[Object]: + return self._fullid_lookup.values() + + @property + def all_avatars(self) -> Iterable[Avatar]: + return itertools.chain(*(r.objects.all_avatars for r in self._session.regions)) + + def request_missing_objects(self) -> List[asyncio.Future[Object]]: + futs = [] + for region in self._session.regions: + futs.extend(region.objects.request_missing_objects()) + return futs + + def request_object_properties(self, objects: typing.Union[Object, typing.Sequence[Object]]) \ + -> List[asyncio.Future[Object]]: + # Doesn't accept local ID unlike ObjectManager because they're ambiguous here. + if isinstance(objects, Object): + objects = (objects,) + if not objects: + return [] + + # Has to be sent to the region they belong to, so split the objects out by region handle. + objs_by_region = collections.defaultdict(list) + for obj in objects: + objs_by_region[obj.RegionHandle].append(obj) + + futs = [] + for region_handle, region_objs in objs_by_region.items(): + region = self._session.region_by_handle(region_handle) + futs.extend(region.objects.request_object_properties(region_objs)) + return futs + + async def ensure_ancestors_loaded(self, obj: Object): + """ + Ensure that the entire chain of parents above this object is loaded + + Use this to make sure the object you're dealing with isn't orphaned and + its RegionPosition can be determined. + """ + region = self._session.region_by_handle(obj.RegionHandle) + while obj.ParentID: + if obj.Parent is None: + await asyncio.wait_for(region.objects.request_objects(obj.ParentID)[0], 1.0) + obj = obj.Parent diff --git a/hippolyzer/lib/proxy/region.py b/hippolyzer/lib/proxy/region.py index 52efd14..4b184aa 100644 --- a/hippolyzer/lib/proxy/region.py +++ b/hippolyzer/lib/proxy/region.py @@ -12,6 +12,7 @@ import multidict from hippolyzer.lib.base.datatypes import Vector3, UUID from hippolyzer.lib.base.message.message_handler import MessageHandler +from hippolyzer.lib.base.objects import handle_to_global_pos from hippolyzer.lib.proxy.caps_client import CapsClient from hippolyzer.lib.proxy.circuit import ProxiedCircuit from hippolyzer.lib.proxy.namecache import NameCache @@ -84,10 +85,10 @@ class ProxiedRegion: return multidict.MultiDict((x, y[1]) for x, y in self._caps.items()) @property - def global_pos(self): + def global_pos(self) -> Vector3: if self.handle is None: raise ValueError("Can't determine global region position without handle") - return Vector3(self.handle >> 32, self.handle & 0xFFffFFff) + return handle_to_global_pos(self.handle) @property def is_alive(self): diff --git a/hippolyzer/lib/proxy/sessions.py b/hippolyzer/lib/proxy/sessions.py index 94c0b0c..baf4c87 100644 --- a/hippolyzer/lib/proxy/sessions.py +++ b/hippolyzer/lib/proxy/sessions.py @@ -9,14 +9,18 @@ from typing import * from weakref import ref from hippolyzer.lib.base.datatypes import UUID +from hippolyzer.lib.base.message.message_handler import MessageHandler from hippolyzer.lib.proxy.circuit import ProxiedCircuit from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo from hippolyzer.lib.proxy.http_proxy import HTTPFlowContext, is_asset_server_cap_name, SerializedCapData from hippolyzer.lib.proxy.namecache import NameCache +from hippolyzer.lib.proxy.objects import WorldObjectManager from hippolyzer.lib.proxy.region import ProxiedRegion, CapType if TYPE_CHECKING: from hippolyzer.lib.proxy.message_logger import BaseMessageLogger + from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow + from hippolyzer.lib.proxy.message import ProxiedMessage class Session: @@ -35,6 +39,9 @@ class Session: self.selected: SelectionModel = SelectionModel() self.regions: List[ProxiedRegion] = [] self.started_at = datetime.datetime.now() + self.message_handler: MessageHandler[ProxiedMessage] = MessageHandler() + self.http_message_handler: MessageHandler[HippoHTTPFlow] = MessageHandler() + self.objects = WorldObjectManager(self) self._main_region = None @property @@ -107,6 +114,12 @@ class Session: return region return None + def region_by_handle(self, handle: int) -> Optional[ProxiedRegion]: + for region in self.regions: + if region.handle == handle: + return region + return None + def open_circuit(self, near_addr, circuit_addr, transport): for region in self.regions: if region.circuit_addr == circuit_addr: diff --git a/tests/proxy/__init__.py b/tests/proxy/__init__.py index e69de29..b3587f3 100644 --- a/tests/proxy/__init__.py +++ b/tests/proxy/__init__.py @@ -0,0 +1,66 @@ +import asyncio +from typing import * +import unittest + +from hippolyzer.lib.base.datatypes import UUID +from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer +from hippolyzer.lib.proxy.lludp_proxy import InterceptingLLUDPProxyProtocol +from hippolyzer.lib.proxy.message import ProxiedMessage +from hippolyzer.lib.proxy.packets import ProxiedUDPPacket +from hippolyzer.lib.proxy.region import ProxiedRegion +from hippolyzer.lib.proxy.sessions import SessionManager + + +class MockTransport(asyncio.DatagramTransport): + def __init__(self): + super().__init__() + self.packets: List[Tuple[bytes, Tuple[str, int]]] = [] + + def sendto(self, data: Any, addr=None) -> None: + self.packets.append((data, addr)) + + +class BaseProxyTest(unittest.IsolatedAsyncioTestCase): + def setUp(self) -> None: + self.client_addr = ("127.0.0.1", 1) + self.region_addr = ("127.0.0.1", 3) + self.circuit_code = 1234 + self.session_manager = SessionManager() + self.session = self.session_manager.create_session({ + "session_id": UUID.random(), + "secure_session_id": UUID.random(), + "agent_id": UUID.random(), + "circuit_code": self.circuit_code, + "sim_ip": self.region_addr[0], + "sim_port": self.region_addr[1], + "region_x": 0, + "region_y": 123, + "seed_capability": "https://test.localhost:4/foo", + }) + self.transport = MockTransport() + self.protocol = InterceptingLLUDPProxyProtocol( + self.client_addr, self.session_manager) + self.protocol.transport = self.transport + self.serializer = UDPMessageSerializer() + + async def _wait_drained(self): + await asyncio.sleep(0.001) + + def _setup_default_circuit(self): + self._setup_region_circuit(self.session.regions[-1]) + self.session.main_region = self.session.regions[-1] + + def _setup_region_circuit(self, region: ProxiedRegion): + # Not going to send a UseCircuitCode, so have to pretend we already did the + # client -> region NAT hole-punching + self.protocol.session = self.session + self.protocol.far_to_near_map[region.circuit_addr] = self.client_addr + self.session_manager.claim_session(self.session.id) + self.session.open_circuit(self.client_addr, region.circuit_addr, + self.protocol.transport) + + def _msg_to_datagram(self, msg: ProxiedMessage, src, dst, direction, socks_header=True): + serialized = self.serializer.serialize(msg) + packet = ProxiedUDPPacket(src_addr=src, dst_addr=dst, data=serialized, + direction=direction) + return packet.serialize(socks_header=socks_header) diff --git a/tests/proxy/integration/__init__.py b/tests/proxy/integration/__init__.py index e4e1414..e69de29 100644 --- a/tests/proxy/integration/__init__.py +++ b/tests/proxy/integration/__init__.py @@ -1,63 +0,0 @@ -import asyncio -from typing import * -import unittest - -from hippolyzer.lib.base.datatypes import * -from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer -from hippolyzer.lib.proxy.lludp_proxy import InterceptingLLUDPProxyProtocol -from hippolyzer.lib.proxy.message import ProxiedMessage -from hippolyzer.lib.proxy.packets import ProxiedUDPPacket -from hippolyzer.lib.proxy.sessions import SessionManager - - -class MockTransport(asyncio.DatagramTransport): - def __init__(self): - super().__init__() - self.packets: List[Tuple[bytes, Tuple[str, int]]] = [] - - def sendto(self, data: Any, addr=None) -> None: - self.packets.append((data, addr)) - - -class BaseIntegrationTest(unittest.IsolatedAsyncioTestCase): - def setUp(self) -> None: - self.client_addr = ("127.0.0.1", 1) - self.region_addr = ("127.0.0.1", 3) - self.circuit_code = 1234 - self.session_manager = SessionManager() - self.session = self.session_manager.create_session({ - "session_id": UUID.random(), - "secure_session_id": UUID.random(), - "agent_id": UUID.random(), - "circuit_code": self.circuit_code, - "sim_ip": self.region_addr[0], - "sim_port": self.region_addr[1], - "region_x": 1, - "region_y": 2, - "seed_capability": "https://test.localhost:4/foo", - }) - self.transport = MockTransport() - self.protocol = InterceptingLLUDPProxyProtocol( - self.client_addr, self.session_manager) - self.protocol.transport = self.transport - self.serializer = UDPMessageSerializer() - - async def _wait_drained(self): - await asyncio.sleep(0.001) - - def _setup_circuit(self): - # Not going to send a UseCircuitCode, so have to pretend we already did the - # client -> region NAT hole-punching - self.protocol.session = self.session - self.protocol.far_to_near_map[self.region_addr] = self.client_addr - self.session_manager.claim_session(self.session.id) - self.session.open_circuit(self.client_addr, self.region_addr, - self.protocol.transport) - self.session.main_region = self.session.regions[-1] - self.session.main_region.handle = 0 - - def _msg_to_datagram(self, msg: ProxiedMessage, src, dst, direction, socks_header=True): - serialized = self.serializer.serialize(msg) - packet = ProxiedUDPPacket(src_addr=src, dst_addr=dst, data=serialized, - direction=direction) - return packet.serialize(socks_header=socks_header) diff --git a/tests/proxy/integration/test_addons.py b/tests/proxy/integration/test_addons.py index 7df815e..73cfc1b 100644 --- a/tests/proxy/integration/test_addons.py +++ b/tests/proxy/integration/test_addons.py @@ -15,7 +15,7 @@ from hippolyzer.lib.proxy.packets import Direction from hippolyzer.lib.proxy.region import ProxiedRegion from hippolyzer.lib.proxy.sessions import Session -from . import BaseIntegrationTest +from .. import BaseProxyTest class MockAddon(BaseAddon): @@ -30,7 +30,7 @@ class MockAddon(BaseAddon): show_message(bar) -class AddonIntegrationTests(BaseIntegrationTest): +class AddonIntegrationTests(BaseProxyTest): def setUp(self) -> None: super().setUp() self.addon = MockAddon() @@ -50,7 +50,7 @@ class AddonIntegrationTests(BaseIntegrationTest): self.protocol.datagram_received(packet, self.client_addr) async def test_simple_command_setting_params(self): - self._setup_circuit() + self._setup_default_circuit() self._fake_command("foobar baz") await self._wait_drained() self.assertEqual(self.session.addon_ctx["bazquux"], "baz") diff --git a/tests/proxy/integration/test_http.py b/tests/proxy/integration/test_http.py index c9b6f22..721055a 100644 --- a/tests/proxy/integration/test_http.py +++ b/tests/proxy/integration/test_http.py @@ -13,7 +13,7 @@ from hippolyzer.lib.proxy.http_proxy import HTTPFlowContext, SerializedCapData from hippolyzer.lib.proxy.message_logger import FilteringMessageLogger from hippolyzer.lib.proxy.sessions import SessionManager -from . import BaseIntegrationTest +from .. import BaseProxyTest class MockAddon(BaseAddon): @@ -30,14 +30,14 @@ class SimpleMessageLogger(FilteringMessageLogger): return self._filtered_entries -class LLUDPIntegrationTests(BaseIntegrationTest): +class LLUDPIntegrationTests(BaseProxyTest): def setUp(self) -> None: super().setUp() self.addon = MockAddon() AddonManager.init([], self.session_manager, [self.addon]) self.flow_context = HTTPFlowContext() self.http_event_manager = MITMProxyEventManager(self.session_manager, self.flow_context) - self._setup_circuit() + self._setup_default_circuit() async def _pump_one_event(self): # If we don't yield then the new entry won't end up in the queue diff --git a/tests/proxy/integration/test_lludp.py b/tests/proxy/integration/test_lludp.py index 118f0f9..9f038c4 100644 --- a/tests/proxy/integration/test_lludp.py +++ b/tests/proxy/integration/test_lludp.py @@ -18,7 +18,7 @@ from hippolyzer.lib.proxy.packets import ProxiedUDPPacket, Direction from hippolyzer.lib.proxy.region import ProxiedRegion from hippolyzer.lib.proxy.sessions import Session -from . import BaseIntegrationTest +from .. import BaseProxyTest class MockAddon(BaseAddon): @@ -46,17 +46,17 @@ class SimpleMessageLogger(FilteringMessageLogger): return self._filtered_entries -class LLUDPIntegrationTests(BaseIntegrationTest): +class LLUDPIntegrationTests(BaseProxyTest): def setUp(self) -> None: super().setUp() self.addon = MockAddon() AddonManager.init([], self.session_manager, [self.addon]) - def _make_objectupdate_compressed(self, localid: Optional[int] = None): + def _make_objectupdate_compressed(self, localid: Optional[int] = None, handle: Optional[int] = 123): if localid is None: localid = random.getrandbits(32) - return b'\x00\x00\x00\x0c\xba\x00\r\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x03\xd0\x04\x00\x10' \ + return b'\x00\x00\x00\x0c\xba\x00\r' + struct.pack(" None: - self.session = SessionManager().create_session({ - "session_id": UUID.random(), - "secure_session_id": UUID.random(), - "agent_id": UUID.random(), - "circuit_code": 0, - "sim_ip": "127.0.0.1", - "sim_port": "1", - "region_x": 1, - "region_y": 2, - "seed_capability": "https://test.localhost:4/foo", - }) + super().setUp() self.region = ProxiedRegion(("127.0.0.1", 1), "", self.session) self.caps_client = CapsClient(self.region) diff --git a/tests/proxy/test_httpflows.py b/tests/proxy/test_httpflows.py index 63b7579..197599a 100644 --- a/tests/proxy/test_httpflows.py +++ b/tests/proxy/test_httpflows.py @@ -1,27 +1,14 @@ -import unittest - from mitmproxy.test import tflow, tutils -from hippolyzer.lib.base.datatypes import UUID from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow from hippolyzer.lib.proxy.message_logger import HTTPMessageLogEntry -from hippolyzer.lib.proxy.sessions import SessionManager + +from . import BaseProxyTest -class TestHTTPFlows(unittest.TestCase): +class TestHTTPFlows(BaseProxyTest): def setUp(self) -> None: - self.session_manager = SessionManager() - self.session = self.session = self.session_manager.create_session({ - "session_id": UUID.random(), - "secure_session_id": UUID.random(), - "agent_id": UUID.random(), - "circuit_code": 0, - "sim_ip": "127.0.0.1", - "region_x": 1, - "region_y": 2, - "sim_port": "1", - "seed_capability": "https://test.localhost:4/foo", - }) + super().setUp() self.region = self.session.register_region( ("127.0.0.1", 2), "https://test.localhost:4/foo", diff --git a/tests/proxy/test_object_manager.py b/tests/proxy/test_object_manager.py index c9e8fb9..fb658b4 100644 --- a/tests/proxy/test_object_manager.py +++ b/tests/proxy/test_object_manager.py @@ -7,18 +7,19 @@ from unittest import mock from hippolyzer.lib.base.datatypes import * from hippolyzer.lib.base.message.message import Block -from hippolyzer.lib.base.message.message_handler import MessageHandler from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer from hippolyzer.lib.base.objects import Object, normalize_object_update_compressed_data -from hippolyzer.lib.base.templates import ExtraParamType, SculptTypeData, SculptType +from hippolyzer.lib.base.templates import ExtraParamType from hippolyzer.lib.proxy.addons import AddonManager from hippolyzer.lib.proxy.addon_utils import BaseAddon -from hippolyzer.lib.proxy.objects import ObjectManager from hippolyzer.lib.proxy.message import ProxiedMessage as Message +from hippolyzer.lib.proxy.region import ProxiedRegion from hippolyzer.lib.proxy.templates import PCode from hippolyzer.lib.proxy.vocache import RegionViewerObjectCacheChain, RegionViewerObjectCache, ViewerObjectCacheEntry +from . import BaseProxyTest + OBJECT_UPDATE_COMPRESSED_DATA = ( b"\x12\x12\x10\xbf\x16XB~\x8f\xb4\xfb\x00\x1a\xcd\x9b\xe5\xd2\x04\x00\x00\t\x00\xcdG\x00\x00" @@ -34,21 +35,14 @@ OBJECT_UPDATE_COMPRESSED_DATA = ( ) -class MockSession: - def __init__(self): - self.id = UUID.random() - self.agent_id = UUID.random() - self.session_manager = None +class WrappingMessageHandler: + """Calls both the session and region-local message handlers""" + def __init__(self, region: ProxiedRegion): + self.region = region - -class MockRegion: - def __init__(self, message_handler: MessageHandler): - self.session = lambda: MockSession() - self.handle = 123 - self.circuit = mock.MagicMock() - self.cache_id = UUID.random() - self.message_handler = message_handler - self.http_message_handler = MessageHandler() + def handle(self, message: Message): + self.region.session().message_handler.handle(message) + self.region.message_handler.handle(message) class ObjectTrackingAddon(BaseAddon): @@ -63,28 +57,32 @@ class ObjectTrackingAddon(BaseAddon): self.events.append(("kill", obj)) -class ObjectManagerTestMixin(unittest.TestCase): +class ObjectManagerTestMixin(BaseProxyTest): def setUp(self) -> None: - self.message_handler = MessageHandler() - self.region = MockRegion(self.message_handler) + super().setUp() + self._setup_default_circuit() + self.region = self.session.main_region + self.message_handler = WrappingMessageHandler(self.region) patched = mock.patch('hippolyzer.lib.proxy.vocache.RegionViewerObjectCacheChain.for_region') self.addCleanup(patched.stop) self.mock_get_region_object_cache_chain = patched.start() self.mock_get_region_object_cache_chain.return_value = RegionViewerObjectCacheChain([]) - self.object_manager = ObjectManager(self.region, use_vo_cache=True) # type: ignore + self.object_manager = self.region.objects self.serializer = UDPMessageSerializer() self.deserializer = UDPMessageDeserializer(message_cls=Message) self.object_addon = ObjectTrackingAddon() AddonManager.init([], None, [self.object_addon]) def _create_object_update(self, local_id=None, full_id=None, parent_id=None, pos=None, rot=None, - pcode=None, namevalue=None) -> Message: + pcode=None, namevalue=None, region_handle=None) -> Message: pos = pos if pos is not None else (1.0, 2.0, 3.0) rot = rot if rot is not None else (0.0, 0.0, 0.0, 1.0) pcode = pcode if pcode is not None else PCode.PRIMITIVE + if region_handle is None: + region_handle = 123 msg = Message( "ObjectUpdate", - Block("RegionData", RegionHandle=123, TimeDilation=123), + Block("RegionData", RegionHandle=region_handle, TimeDilation=123), Block( "ObjectData", ID=local_id if local_id is not None else random.getrandbits(32), @@ -123,10 +121,10 @@ class ObjectManagerTestMixin(unittest.TestCase): return self.deserializer.deserialize(self.serializer.serialize(msg)) def _create_object(self, local_id=None, full_id=None, parent_id=None, pos=None, rot=None, - pcode=None, namevalue=None) -> Object: + pcode=None, namevalue=None, region_handle=None) -> Object: msg = self._create_object_update( local_id=local_id, full_id=full_id, parent_id=parent_id, pos=pos, rot=rot, - pcode=pcode, namevalue=namevalue) + pcode=pcode, namevalue=namevalue, region_handle=region_handle) self.message_handler.handle(msg) return self.object_manager.lookup_fullid(msg["ObjectData"]["FullID"]) @@ -139,14 +137,14 @@ class ObjectManagerTestMixin(unittest.TestCase): ) ) - def _kill_object(self, obj: Object): - self.message_handler.handle(self._create_kill_object(obj.LocalID)) + def _kill_object(self, local_id: int): + self.message_handler.handle(self._create_kill_object(local_id)) def _get_avatar_positions(self) -> Dict[UUID, Vector3]: return {av.FullID: av.RegionPosition for av in self.object_manager.all_avatars} -class ObjectManagerTests(ObjectManagerTestMixin, unittest.TestCase): +class RegionObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncioTestCase): def test_basic_tracking(self): """Does creating an object result in it being tracked?""" msg = self._create_object_update() @@ -171,7 +169,7 @@ class ObjectManagerTests(ObjectManagerTestMixin, unittest.TestCase): _child = self._create_object(local_id=2, parent_id=1) parent = self._create_object(local_id=1) # This should orphan the child again - self._kill_object(parent) + self._kill_object(parent.LocalID) parent = self._create_object(local_id=1) # We should not have picked up any children self.assertSequenceEqual([], parent.ChildIDs) @@ -182,7 +180,7 @@ class ObjectManagerTests(ObjectManagerTestMixin, unittest.TestCase): _parent = self._create_object(local_id=2, parent_id=1) grandparent = self._create_object(local_id=1) # KillObject implicitly kills all known descendents at that point - self._kill_object(grandparent) + self._kill_object(grandparent.LocalID) self.assertEqual(0, len(self.object_manager)) def test_hierarchy_avatar_not_killed(self): @@ -191,7 +189,7 @@ class ObjectManagerTests(ObjectManagerTestMixin, unittest.TestCase): grandparent = self._create_object(local_id=1) # KillObject should only "unsit" child avatars (does this require an ObjectUpdate # or is ParentID=0 implied?) - self._kill_object(grandparent) + self._kill_object(grandparent.LocalID) self.assertEqual(2, len(self.object_manager)) self.assertIsNotNone(self.object_manager.lookup_localid(2)) @@ -284,6 +282,10 @@ class ObjectManagerTests(ObjectManagerTestMixin, unittest.TestCase): self.assertEqual(parent.RegionPosition, (0.0, 0.0, 0.0)) self.assertEqual(child.RegionPosition, (1.0, 2.0, 0.0)) + def test_global_position(self): + obj = self._create_object(pos=(0.0, 0.0, 0.0)) + self.assertEqual(obj.GlobalPosition, (0.0, 123.0, 0.0)) + def test_avatar_locations(self): agent1_id = UUID.random() agent2_id = UUID.random() @@ -312,7 +314,7 @@ class ObjectManagerTests(ObjectManagerTestMixin, unittest.TestCase): }) # Simulate missing parent for agent - self._kill_object(seat_object) + self._kill_object(seat_object.LocalID) self.assertDictEqual(self._get_avatar_positions(), { # Agent is seated, but we don't know its parent. We have # to use the coarse location. @@ -323,7 +325,7 @@ class ObjectManagerTests(ObjectManagerTestMixin, unittest.TestCase): # If the object is killed and no coarse pos, it shouldn't be in the dict # CoarseLocationUpdates are expected to be complete, so any agents missing # are no longer in the sim. - self._kill_object(avatar_obj) + self._kill_object(avatar_obj.LocalID) self.message_handler.handle(Message( "CoarseLocationUpdate", Block("AgentData", AgentID=agent2_id), @@ -342,6 +344,8 @@ class ObjectManagerTests(ObjectManagerTestMixin, unittest.TestCase): self.assertDictEqual(self._get_avatar_positions(), { agent2_id: Vector3(2, 3, math.inf), }) + agent2_avatar = self.object_manager.lookup_avatar(agent2_id) + self.assertEqual(agent2_avatar.GlobalPosition, Vector3(2, 126, math.inf)) def test_name_cache(self): # Receiving an update with a NameValue for an avatar should update NameCache @@ -378,12 +382,6 @@ class ObjectManagerTests(ObjectManagerTestMixin, unittest.TestCase): 'Text': None, 'TextColor': None, 'MediaURL': None, - 'ExtraParams': { - ExtraParamType.SCULPT: { - 'Texture': UUID('89556747-24cb-43ed-920b-47caed15465f'), - 'TypeData': SculptTypeData(Type=SculptType.NONE, Invert=True, Mirror=False) - } - }, 'Sound': None, 'SoundGain': None, 'SoundFlags': None, @@ -406,10 +404,12 @@ class ObjectManagerTests(ObjectManagerTestMixin, unittest.TestCase): 'PathSkew': 0, 'ProfileBegin': 0, 'ProfileEnd': 0, - 'ProfileHollow': 0 + 'ProfileHollow': 0, } filtered_normalized = {k: v for k, v in normalized.items() if k in expected} - self.assertEqual(filtered_normalized, expected) + self.assertDictEqual(filtered_normalized, expected) + sculpt_texture = normalized["ExtraParams"][ExtraParamType.SCULPT]["Texture"] + self.assertEqual(sculpt_texture, UUID('89556747-24cb-43ed-920b-47caed15465f')) self.assertIsNotNone(normalized['TextureAnim']) self.assertIsNotNone(normalized['TextureEntry']) @@ -425,6 +425,7 @@ class ObjectManagerTests(ObjectManagerTestMixin, unittest.TestCase): ]) cache_msg = Message( 'ObjectUpdateCached', + Block("RegionData", TimeDilation=102, RegionHandle=123), Block( "ObjectData", ID=1234, @@ -441,10 +442,8 @@ class ObjectManagerTests(ObjectManagerTestMixin, unittest.TestCase): # Flags from the ObjectUpdateCached should have been merged in self.assertEqual(obj.UpdateFlags, 4321) - -class AsyncObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncioTestCase): async def test_request_objects(self): - # request four objects, two of which won't receive an ObjectUpdate + # request five objects, three of which won't receive an ObjectUpdate futures = self.object_manager.request_objects((1234, 1235, 1236, 1237)) self._create_object(1234) self._create_object(1235) @@ -454,9 +453,6 @@ class AsyncObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncioTe self.assertEqual(set(o.LocalID for o in objects), {1234, 1235}) pending = list(pending) self.assertEqual(2, len(pending)) - # The other futures being resolved should have removed them from the dict - pending_futures = sum(len(x) for x in self.object_manager._update_futures.values()) - self.assertEqual(2, pending_futures) pending_1, pending_2 = pending # Timing out should cancel @@ -464,7 +460,94 @@ class AsyncObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncioTe await asyncio.wait_for(pending_1, 0.00001) self.assertTrue(pending_1.cancelled()) + fut = self.object_manager.request_objects(1238)[0] + self._kill_object(1238) + self.assertTrue(fut.cancelled()) + # Object manager being cleared due to region death should cancel self.assertFalse(pending_2.cancelled()) self.object_manager.clear() self.assertTrue(pending_2.cancelled()) + # The clear should have triggered the objects to be removed from the world view as well + self.assertEqual(0, len(self.session.objects)) + + +class SessionObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncioTestCase): + def setUp(self) -> None: + super().setUp() + self.second_region = self.session.register_region( + ("127.0.0.1", 9), "https://localhost:5", 124 + ) + self._setup_region_circuit(self.second_region) + + def test_get_fullid(self): + obj = self._create_object() + self.assertIs(self.session.objects.lookup_fullid(obj.FullID), obj) + self._kill_object(obj.LocalID) + self.assertIsNone(self.session.objects.lookup_fullid(obj.FullID)) + + def test_region_handle_change(self): + obj = self._create_object(region_handle=123) + self.assertEqual(obj.RegionHandle, 123) + self.assertIs(self.region.objects.lookup_fullid(obj.FullID), obj) + self.assertIs(self.region.objects.lookup_localid(obj.LocalID), obj) + + # Send an update moving the object to the new region + self._create_object(local_id=~obj.LocalID & 0xFFffFFff, full_id=obj.FullID, region_handle=124) + self.assertEqual(obj.RegionHandle, 124) + self.assertIsNone(self.region.objects.lookup_fullid(obj.FullID)) + self.assertIsNone(self.region.objects.lookup_localid(obj.LocalID)) + self.assertIs(self.second_region.objects.lookup_fullid(obj.FullID), obj) + self.assertIs(self.second_region.objects.lookup_localid(obj.LocalID), obj) + self.assertEqual(1, len(self.session.objects)) + self.assertEqual(0, len(self.region.objects)) + self.assertEqual(1, len(self.second_region.objects)) + + def test_linkset_region_handle_change(self): + parent = self._create_object(region_handle=123) + child = self._create_object(region_handle=123, parent_id=parent.LocalID) + self._create_object(local_id=~parent.LocalID & 0xFFffFFff, full_id=parent.FullID, region_handle=124) + # Children reference their parents, not the other way around. Moving this to a new region + # should have cleared the list because it now has no children in the same region. + self.assertEqual([], parent.ChildIDs) + # Move the child to the same region + self._create_object( + local_id=child.LocalID, full_id=child.FullID, region_handle=124, parent_id=parent.LocalID) + # Child should be back in the children list + self.assertEqual([child.LocalID], parent.ChildIDs) + self.assertEqual(parent.LocalID, child.ParentID) + self.assertEqual(0, len(self.region.objects)) + self.assertEqual(2, len(self.second_region.objects)) + self.assertEqual(0, len(self.region.objects.missing_locals)) + self.assertEqual(0, len(self.second_region.objects.missing_locals)) + + def test_all_objects(self): + obj = self._create_object() + self.assertEqual([obj], list(self.session.objects.all_objects)) + + def test_all_avatars(self): + obj = self._create_object(pcode=PCode.AVATAR) + av_list = list(self.session.objects.all_avatars) + self.assertEqual(1, len(av_list)) + self.assertEqual(obj, av_list[0].Object) + + async def test_requesting_properties(self): + obj = self._create_object() + futs = self.session.objects.request_object_properties(obj) + self.region.message_handler.handle(Message( + "ObjectProperties", + Block("ObjectData", ObjectID=obj.FullID, Name="Foobar", TextureID=b""), + )) + await asyncio.wait_for(futs[0], timeout=0.0001) + self.assertEqual(obj.Name, "Foobar") + + async def test_ensure_ancestors_loaded(self): + child = self._create_object(region_handle=123, parent_id=1) + parentless = self._create_object(region_handle=123) + + async def _create_after(): + await asyncio.sleep(0.001) + self._create_object(region_handle=123, local_id=child.ParentID) + asyncio.create_task(_create_after()) + await self.session.objects.ensure_ancestors_loaded(child) + await self.session.objects.ensure_ancestors_loaded(parentless)