15 Commits

Author SHA1 Message Date
Salad Dais
4962d8e7bf Add example addon for debugging object animations starting / stopping 2025-06-15 17:44:40 +00:00
Salad Dais
a652779cc5 Add object inventory helpers to region object manager 2025-06-15 17:44:03 +00:00
Salad Dais
d7092e7733 Track animations for avatars and objects 2025-06-14 23:33:53 +00:00
Salad Dais
8b5a7ebecf Add RLV at home 2025-06-14 07:48:19 +00:00
Salad Dais
8effd431a6 Some typing fixups 2025-06-14 07:06:18 +00:00
Salad Dais
22fb44ef28 Move asset_type helper to WearableType, where it belongs 2025-06-14 03:21:44 +00:00
Salad Dais
c8dc67ea37 More inventory / wearables updates 2025-06-13 09:26:42 +00:00
Salad Dais
0dbba40fe1 Serialization template updates 2025-06-09 13:18:01 +00:00
Salad Dais
97e567be77 More inventory fixups 2025-06-09 13:17:42 +00:00
Salad Dais
76216ee390 More inventory code cleanup 2025-06-07 10:00:03 +00:00
Salad Dais
c60c2819ac Add more AIS-related util functions 2025-06-06 12:43:57 +00:00
Salad Dais
7cbef457cf Update inventory handling code 2025-06-05 16:33:26 +00:00
Salad Dais
4916bdc543 Relax UDP serialization behavior when previous var blocks missing 2025-06-05 16:08:55 +00:00
Salad Dais
bb0e88e460 Add more inventory-related utilities 2025-06-05 00:46:22 +00:00
Salad Dais
46e598cded Don't use setup.py for bundling 2025-05-26 19:15:33 +00:00
17 changed files with 708 additions and 155 deletions

View File

@@ -27,11 +27,11 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip setuptools wheel
python -m pip install --upgrade pip setuptools wheel build
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Build
run: >-
python setup.py sdist bdist_wheel
python -m build
# We do this, since failures on test.pypi aren't that bad
- name: Publish to Test PyPI
if: startsWith(github.event.ref, 'refs/tags') || github.event_name == 'release'

View File

@@ -0,0 +1,125 @@
"""
Debugger for detecting when animations within an object get started or stopped
Useful for tracking down animation sequence-related bugs within your LSL scripts,
or debugging automatic animation stopping behavior in the viewer.
If an animation unexpectedly stops and nobody requested it be stopped, it's a potential viewer bug (or priority issue).
If an animation unexpectedly stops and the viewer requested it be stopped, it's also a potential viewer bug.
If an animation unexpectedly stops and only the server requested it be stopped, it's a potential script / server bug.
"""
from typing import *
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.network.transport import Direction
from hippolyzer.lib.base.objects import Object
from hippolyzer.lib.base.templates import AssetType
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import Session
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.proxy.commands import handle_command
from hippolyzer.lib.proxy.addon_utils import show_message
class AnimTrackerAddon(BaseAddon):
should_track_anims: bool = SessionProperty(False)
anims_lookup: Dict[UUID, str] = SessionProperty(dict)
last_tracker_anims: Set[UUID] = SessionProperty(set)
def _format_anim_diffs(self, started_anims: Set[UUID], stopped_anims: Set[UUID]):
added_strs = [f"+{self.anims_lookup[x]!r}" for x in started_anims]
removed_strs = [f"-{self.anims_lookup[x]!r}" for x in stopped_anims]
return ", ".join(removed_strs + added_strs)
@handle_command()
async def track_anims(self, session: Session, region: ProxiedRegion):
"""Track when animations within this object get started or stopped"""
if self.should_track_anims:
self.last_tracker_anims.clear()
self.anims_lookup.clear()
selected = region.objects.lookup_localid(session.selected.object_local)
if not selected:
return
self.should_track_anims = True
object_items = await region.objects.request_object_inv(selected)
anims: Dict[UUID, str] = {}
for item in object_items:
if item.type != AssetType.ANIMATION:
continue
anims[item.true_asset_id] = item.name
self.anims_lookup = anims
@handle_command()
async def stop_tracking_anims(self, _session: Session, _region: ProxiedRegion):
"""Stop reporting differences"""
if self.should_track_anims:
self.should_track_anims = False
self.last_tracker_anims.clear()
self.anims_lookup.clear()
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
if not self.should_track_anims:
return
if message.name != "AgentAnimation" or message.direction != Direction.OUT:
# AgentAnimation is the message the viewer uses to request manually starting or stopping animations.
# We don't care about other messages, we're just interested in distinguishing cases where the viewer
# specifically requested something vs something being done by the server on its own.
return
av = region.objects.lookup_avatar(session.agent_id)
if not av or not av.Object:
print("Somehow didn't know about our own av object?")
return
current_anims = set([x for x in av.Object.Animations if x in self.anims_lookup])
started_anims: Set[UUID] = set()
stopped_anims: Set[UUID] = set()
for block in message["AnimationList"]:
anim_id = block["AnimID"]
if anim_id not in self.anims_lookup:
continue
start_anim = block["StartAnim"]
already_started = anim_id in current_anims
if start_anim == already_started:
# No change
continue
if start_anim:
started_anims.add(anim_id)
else:
stopped_anims.add(anim_id)
if started_anims or stopped_anims:
show_message("Viewer Requested Anims: " + self._format_anim_diffs(started_anims, stopped_anims))
def handle_object_updated(self, session: Session, region: ProxiedRegion,
obj: Object, updated_props: Set[str], msg: Optional[Message]):
if not self.should_track_anims:
return
if obj.FullID != session.agent_id:
return
if "Animations" not in updated_props:
return
current_anims = set([x for x in obj.Animations if x in self.anims_lookup])
started_anims = current_anims - self.last_tracker_anims
stopped_anims = self.last_tracker_anims - current_anims
self.last_tracker_anims.clear()
self.last_tracker_anims.update(current_anims)
if started_anims or stopped_anims:
show_message("Anim Diffs: " + self._format_anim_diffs(started_anims, stopped_anims))
addons = [AnimTrackerAddon()]

View File

@@ -0,0 +1,53 @@
"""
You don't need RLV, we have RLV at home.
RLV at home:
"""
from typing import *
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.templates import ChatType
from hippolyzer.lib.proxy.addon_utils import BaseAddon, send_chat
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import Session
def send_rlv_chat(channel: int, message: str):
# We always shout.
send_chat(channel=channel, message=message, chat_type=ChatType.NORMAL)
class RLVAtHomeAddon(BaseAddon):
"""
Addon for pretending to be an RLV-enabled viewer
Useful if you want only a specific subset of RLV and don't want everything RLV normally allows,
or want to override some RLV builtins.
"""
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
behaviour: str, options: List[str], param: str) -> bool | None:
# print(behaviour, options, param)
if behaviour == "clear":
return True
elif behaviour in ("versionnum", "versionnew", "version"):
# People tend to just check that this returned anything at all. Just say we're 2.0.0 for all of these.
send_rlv_chat(int(param), "2.0.0")
return True
elif behaviour == "getinv":
# Pretend we don't have anything
send_rlv_chat(int(param), "")
return True
elif behaviour == "sit":
# Sure, we can sit on stuff, whatever.
region.circuit.send(Message(
'AgentRequestSit',
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
Block('TargetObject', TargetID=UUID(options[0]), Offset=(0, 0, 0)),
))
return True
return None
addons = [RLVAtHomeAddon()]

View File

@@ -163,9 +163,9 @@ class InventoryBase(SchemaBase):
writer.write("\t{\n")
# Make sure the ID field always comes first, if there is one.
fields_dict = {}
fields_dict: Dict[str, dataclasses.Field] = {}
if hasattr(self, "ID_ATTR"):
fields_dict = {getattr(self, "ID_ATTR"): None}
fields_dict = {getattr(self, "ID_ATTR"): dataclasses.field()}
# update()ing will put all fields that aren't yet in the dict after the ID attr.
fields_dict.update(self._get_fields_dict())
@@ -226,15 +226,17 @@ class InventoryModel(InventoryBase):
return model
@classmethod
def from_llsd(cls, llsd_val: List[Dict], flavor: str = "legacy") -> InventoryModel:
def from_llsd(cls, llsd_val: List[Dict], flavor: str = "legacy") -> Self:
model = cls()
for obj_dict in llsd_val:
obj = None
for inv_type in INVENTORY_TYPES:
if inv_type.ID_ATTR in obj_dict:
if (obj := inv_type.from_llsd(obj_dict, flavor)) is not None:
model.add(obj)
break
LOG.warning(f"Unknown object type {obj_dict!r}")
if obj is None:
LOG.warning(f"Unknown object type {obj_dict!r}")
return model
@property
@@ -258,7 +260,7 @@ class InventoryModel(InventoryBase):
def all_items(self) -> Iterable[InventoryItem]:
for node in self.nodes.values():
if not isinstance(node, InventoryContainerBase):
yield node
yield node # type: ignore
def __eq__(self, other):
if not isinstance(other, InventoryModel):
@@ -354,8 +356,20 @@ class InventoryModel(InventoryBase):
def __contains__(self, item: UUID):
return item in self.nodes
def get(self, item: UUID) -> Optional[InventoryNodeBase]:
return self.nodes.get(item)
def get(self, key: UUID) -> Optional[InventoryNodeBase]:
return self.nodes.get(key)
def get_category(self, key: UUID) -> InventoryCategory:
node = self.get(key)
if not isinstance(node, InventoryCategory):
raise ValueError(f"{node!r} is not a category")
return node
def get_item(self, key: UUID) -> InventoryItem:
node = self.get(key)
if not isinstance(node, InventoryItem):
raise ValueError(f"{node!r} is not an item")
return node
@dataclasses.dataclass
@@ -375,6 +389,21 @@ class InventoryPermissions(InventoryBase):
# It's kind of redundant since it just means owner_id == NULL_KEY && group_id != NULL_KEY.
is_owner_group: Optional[int] = schema_field(SchemaInt, default=None, llsd_only=True)
@classmethod
def make_default(cls) -> Self:
return cls(
base_mask=0xFFffFFff,
owner_mask=0xFFffFFff,
group_mask=0,
everyone_mask=0,
next_owner_mask=0x82000,
creator_id=UUID.ZERO,
owner_id=UUID.ZERO,
last_owner_id=UUID.ZERO,
group_id=UUID.ZERO,
is_owner_group=None
)
@dataclasses.dataclass
class InventorySaleInfo(InventoryBase):
@@ -383,17 +412,22 @@ class InventorySaleInfo(InventoryBase):
sale_type: SaleType = schema_field(SchemaEnumField(SaleType))
sale_price: int = schema_field(SchemaInt)
@classmethod
def make_default(cls) -> Self:
return cls(sale_type=SaleType.NOT, sale_price=10)
class _HasName(abc.ABC):
class _HasBaseNodeAttrs(abc.ABC):
"""
Only exists so that we can assert that all subclasses should have this without forcing
a particular serialization order, as would happen if this was present on InventoryNodeBase.
"""
name: str
type: AssetType
@dataclasses.dataclass
class InventoryNodeBase(InventoryBase, _HasName):
class InventoryNodeBase(InventoryBase, _HasBaseNodeAttrs):
ID_ATTR: ClassVar[str]
parent_id: Optional[UUID] = schema_field(SchemaUUID)
@@ -531,7 +565,7 @@ class InventoryCategory(InventoryContainerBase):
)
@classmethod
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy"):
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy") -> Self:
if flavor == "ais" and "type" not in inv_dict:
inv_dict = inv_dict.copy()
inv_dict["type"] = AssetType.CATEGORY
@@ -583,9 +617,9 @@ class InventoryItem(InventoryNodeBase):
return self.asset_id
return self.shadow_id ^ MAGIC_ID
def to_inventory_data(self) -> Block:
def to_inventory_data(self, block_name: str = "InventoryData") -> Block:
return Block(
"InventoryData",
block_name,
ItemID=self.item_id,
FolderID=self.parent_id,
CallbackID=0,
@@ -640,7 +674,7 @@ class InventoryItem(InventoryNodeBase):
),
name=block["Name"],
desc=block["Description"],
creation_date=block["CreationDate"],
creation_date=SchemaDate.from_llsd(block["CreationDate"], "legacy"),
)
def to_llsd(self, flavor: str = "legacy"):
@@ -657,7 +691,7 @@ class InventoryItem(InventoryNodeBase):
return val
@classmethod
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy"):
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy") -> Self:
if flavor == "ais" and "linked_id" in inv_dict:
# Links get represented differently than other items for whatever reason.
# This is incredibly annoying, under *NIX there's nothing really special about symlinks.

View File

@@ -164,7 +164,7 @@ def parse_schema_line(line: str):
@dataclasses.dataclass
class SchemaBase(abc.ABC):
@classmethod
def _get_fields_dict(cls, llsd_flavor: Optional[str] = None):
def _get_fields_dict(cls, llsd_flavor: Optional[str] = None) -> Dict[str, dataclasses.Field]:
fields_dict = {}
for field in dataclasses.fields(cls):
field_name = field.name
@@ -174,7 +174,7 @@ class SchemaBase(abc.ABC):
return fields_dict
@classmethod
def from_str(cls, text: str):
def from_str(cls, text: str) -> Self:
return cls.from_reader(StringIO(text))
@classmethod
@@ -183,17 +183,17 @@ class SchemaBase(abc.ABC):
pass
@classmethod
def from_bytes(cls, data: bytes):
def from_bytes(cls, data: bytes) -> Self:
return cls.from_str(data.decode("utf8"))
@classmethod
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy"):
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy") -> Self:
fields = cls._get_fields_dict(llsd_flavor=flavor)
obj_dict = {}
try:
for key, val in inv_dict.items():
if key in fields:
field: dataclasses.Field = fields[key]
field = fields[key]
key = field.name
spec = field.metadata.get("spec")
# Not a real key, an internal var on our dataclass
@@ -262,5 +262,5 @@ class SchemaBase(abc.ABC):
pass
@classmethod
def _obj_from_dict(cls, obj_dict: Dict):
def _obj_from_dict(cls, obj_dict: Dict) -> Self:
return cls(**obj_dict) # type: ignore

View File

@@ -35,6 +35,12 @@ class HippoLLSDXMLFormatter(base_llsd.serde_xml.LLSDXMLFormatter, HippoLLSDBaseF
def __init__(self):
super().__init__()
def _generate(self, something):
if isinstance(something, int) and type(something) is not int:
# The lookup in the underlying library will fail if we don't convert IntEnums to actual ints.
something = int(something)
return super()._generate(something)
class HippoLLSDXMLPrettyFormatter(base_llsd.serde_xml.LLSDXMLPrettyFormatter, HippoLLSDBaseFormatter):
def __init__(self):

View File

@@ -57,7 +57,7 @@ class MessageHandler(Generic[_T, _K]):
@contextlib.contextmanager
def subscribe_async(self, message_names: MESSAGE_NAMES, predicate: Optional[PREDICATE] = None,
take: Optional[bool] = None) -> ContextManager[Callable[[], Awaitable[_T]]]:
take: Optional[bool] = None) -> Generator[Callable[[], Awaitable[_T]], None, None]:
"""
Subscribe to a set of message matching predicate while within a block
@@ -92,6 +92,7 @@ class MessageHandler(Generic[_T, _K]):
finally:
for n in notifiers:
n.unsubscribe(_handler_wrapper)
return None
def wait_for(self, message_names: MESSAGE_NAMES, predicate: Optional[PREDICATE] = None,
timeout: Optional[float] = None, take: Optional[bool] = None) -> Awaitable[_T]:

View File

@@ -75,7 +75,7 @@ class UDPMessageSerializer:
# We're going to pop off keys as we go, so shallow copy the dict.
blocks = copy.copy(msg.blocks)
missing_block = None
missing_blocks: List[MessageTemplateBlock] = []
# Iterate based on the order of the blocks in the message template
for tmpl_block in current_template.blocks:
block_list = blocks.pop(tmpl_block.name, None)
@@ -83,13 +83,21 @@ class UDPMessageSerializer:
# omitted by SL. Not an error unless another block containing data follows it.
# Keep track.
if block_list is None:
missing_block = tmpl_block.name
missing_blocks.append(tmpl_block)
logger.debug("No block %s, bailing out" % tmpl_block.name)
continue
# Had a missing block before, but we found one later in the template?
elif missing_block:
raise ValueError(f"Unexpected {tmpl_block.name} block after missing {missing_block}")
self._serialize_block(body_writer, tmpl_block, block_list)
# Had a missing block before, but we specified one defined later in the template?
elif missing_blocks:
if not all(x.block_type == MsgBlockType.MBT_VARIABLE for x in missing_blocks):
raise ValueError(f"Unexpected {tmpl_block.name} block after missing {missing_blocks!r}")
# This is okay, we just need to put empty blocks for all the variable blocks that came before.
# Normally we wouldn't even put these to match SL behavior, but in this case we need the
# empty blocks so the decoder will decode these as the correct block type.
for missing_block in missing_blocks:
self._serialize_block_list(body_writer, missing_block, MsgBlockList())
missing_blocks.clear()
self._serialize_block_list(body_writer, tmpl_block, block_list)
if blocks:
raise KeyError(f"Unexpected {tuple(blocks.keys())!r} blocks in {msg.name}")
@@ -105,8 +113,8 @@ class UDPMessageSerializer:
writer.write(se.U8, len(msg.acks))
return writer.copy_buffer()
def _serialize_block(self, writer: se.BufferWriter, tmpl_block: MessageTemplateBlock,
block_list: MsgBlockList):
def _serialize_block_list(self, writer: se.BufferWriter, tmpl_block: MessageTemplateBlock,
block_list: MsgBlockList):
block_count = len(block_list)
# Multiple block type means there is a static number of blocks
if tmpl_block.block_type == MsgBlockType.MBT_MULTIPLE:

View File

@@ -125,12 +125,14 @@ class Object(recordclass.RecordClass, use_weakref=True): # type: ignore
SitName: Optional[str] = None
TextureID: Optional[List[UUID]] = None
RegionHandle: Optional[int] = None
Animations: Optional[List[UUID]] = None
def __init__(self, **_kwargs):
""" set up the object attributes """
self.ExtraParams = self.ExtraParams or {} # Variable 1
self.ObjectCosts = self.ObjectCosts or {}
self.ChildIDs = []
self.Animations = self.Animations or []
# Same as parent, contains weakref proxies.
self.Children: List[Object] = []
@@ -253,7 +255,7 @@ def normalize_object_update(block: Block, handle: int):
# OwnerID is only set in this packet if a sound is playing. Don't allow
# ObjectUpdates to clobber _real_ OwnerIDs we had from ObjectProperties
# with a null UUID.
if object_data["OwnerID"] == UUID():
if object_data["OwnerID"] == UUID.ZERO:
del object_data["OwnerID"]
del object_data["Flags"]
del object_data["Gain"]
@@ -309,7 +311,7 @@ def normalize_object_update_compressed_data(data: bytes):
compressed["SoundFlags"] = 0
compressed["SoundGain"] = 0.0
compressed["SoundRadius"] = 0.0
compressed["Sound"] = UUID()
compressed["Sound"] = UUID.ZERO
if compressed["TextureEntry"] is None:
compressed["TextureEntry"] = tmpls.TextureEntryCollection()
@@ -323,7 +325,7 @@ def normalize_object_update_compressed_data(data: bytes):
# Don't clobber OwnerID in case the object has a proper one from
# a previous ObjectProperties. OwnerID isn't expected to be populated
# on ObjectUpdates unless an attached sound is playing.
if object_data["OwnerID"] == UUID():
if object_data["OwnerID"] == UUID.ZERO:
del object_data["OwnerID"]
return object_data

View File

@@ -16,7 +16,7 @@ import numpy as np
import hippolyzer.lib.base.serialization as se
from hippolyzer.lib.base import llsd
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3, Quaternion
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3
from hippolyzer.lib.base.helpers import BiDiDict
from hippolyzer.lib.base.namevalue import NameValuesSerializer
from hippolyzer.lib.base.serialization import ParseContext
@@ -60,6 +60,7 @@ _ASSET_TYPE_BIDI: BiDiDict[str] = BiDiDict({
@se.enum_field_serializer("RezObject", "InventoryData", "Type")
@se.enum_field_serializer("RezScript", "InventoryBlock", "Type")
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "Type")
@se.enum_field_serializer("BulkUpdateInventory", "ItemData", "Type")
class AssetType(LookupIntEnum):
TEXTURE = 0
SOUND = 1
@@ -145,6 +146,8 @@ _INV_TYPE_BIDI: BiDiDict[str] = BiDiDict({
@se.enum_field_serializer("RezObject", "InventoryData", "InvType")
@se.enum_field_serializer("RezScript", "InventoryBlock", "InvType")
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "InvType")
@se.enum_field_serializer("BulkUpdateInventory", "ItemData", "InvType")
@se.enum_field_serializer("BulkUpdateInventory", "FolderData", "Type")
class InventoryType(LookupIntEnum):
TEXTURE = 0
SOUND = 1
@@ -282,6 +285,12 @@ class WearableType(IntEnum):
PHYSICS = 15
UNIVERSAL = 16
@property
def asset_type(self) -> AssetType:
if self in (WearableType.HAIR, WearableType.SKIN, WearableType.EYES, WearableType.SHAPE):
return AssetType.BODYPART
return AssetType.CLOTHING
def _register_permissions_flags(message_name, block_name):
def _wrapper(flag_cls):
@@ -301,6 +310,7 @@ def _register_permissions_flags(message_name, block_name):
@_register_permissions_flags("RezObject", "InventoryData")
@_register_permissions_flags("RezScript", "InventoryBlock")
@_register_permissions_flags("RezMultipleAttachmentsFromInv", "ObjectData")
@_register_permissions_flags("BulkUpdateInventory", "ItemData")
class Permissions(IntFlag):
TRANSFER = (1 << 13)
MODIFY = (1 << 14)
@@ -322,6 +332,7 @@ _SALE_TYPE_LEGACY_NAMES = ("not", "orig", "copy", "cntn")
@se.enum_field_serializer("ObjectSaleInfo", "ObjectData", "SaleType")
@se.enum_field_serializer("BulkUpdateInventory", "ItemData", "SaleType")
@se.enum_field_serializer("ObjectProperties", "ObjectData", "SaleType")
@se.enum_field_serializer("ObjectPropertiesFamily", "ObjectData", "SaleType")
@se.enum_field_serializer("ObjectBuy", "ObjectData", "SaleType")
@@ -343,6 +354,32 @@ class SaleType(LookupIntEnum):
return _SALE_TYPE_LEGACY_NAMES[int(self.value)]
class AggregatePermissionType(IntEnum):
EMPTY = 0
NONE = 1
SOME = 2
ALL = 3
def _make_agg_perms_field():
return se.bitfield_field(bits=2, adapter=se.IntEnum(AggregatePermissionType))
@dataclasses.dataclass
class AggregatePerms(se.BitfieldDataclass):
Copy: AggregatePermissionType = _make_agg_perms_field()
Modify: AggregatePermissionType = _make_agg_perms_field()
Transfer: AggregatePermissionType = _make_agg_perms_field()
@se.subfield_serializer("ObjectProperties", "ObjectData", "AggregatePerms")
@se.subfield_serializer("ObjectProperties", "ObjectData", "AggregatePermTextures")
@se.subfield_serializer("ObjectProperties", "ObjectData", "AggregatePermTexturesOwner")
class AggregatePermsSerializer(se.AdapterSubfieldSerializer):
ORIG_INLINE = True
ADAPTER = se.BitfieldDataclass(AggregatePerms)
@se.flag_field_serializer("ParcelInfoReply", "Data", "Flags")
class ParcelInfoFlags(IntFlag):
MATURE = 1 << 0
@@ -365,6 +402,7 @@ class MapImageFlags(IntFlag):
@se.enum_field_serializer("MapBlockReply", "Data", "Access")
@se.enum_field_serializer("RegionInfo", "RegionInfo", "SimAccess")
@se.enum_field_serializer("RegionHandshake", "RegionInfo", "SimAccess")
class SimAccess(IntEnum):
# Treated as 'unknown', usually ends up being SIM_ACCESS_PG
MIN = 0
@@ -2243,13 +2281,14 @@ class MeanCollisionType(IntEnum):
@se.subfield_serializer("ObjectProperties", "ObjectData", "CreationDate")
class CreationDateSerializer(se.AdapterSubfieldSerializer):
class ObjectCreationDateSerializer(se.AdapterSubfieldSerializer):
ADAPTER = DateAdapter(1_000_000)
ORIG_INLINE = True
@se.subfield_serializer("MeanCollisionAlert", "MeanCollision", "Time")
@se.subfield_serializer("ParcelProperties", "ParcelData", "ClaimDate")
@se.subfield_serializer("BulkUpdateInventory", "ItemData", "CreationDate")
class DateSerializer(se.AdapterSubfieldSerializer):
ADAPTER = DateAdapter()
ORIG_INLINE = True
@@ -2307,7 +2346,9 @@ class BitmapAdapter(se.Adapter):
@se.subfield_serializer("ParcelProperties", "ParcelData", "Bitmap")
class ParcelPropertiesBitmapSerializer(se.AdapterSubfieldSerializer):
"""Bitmap that describes which grids a parcel occupies"""
ADAPTER = BitmapAdapter((256 // 4, 256 // 4))
REGION_METERS = 256
METERS_PER_CELL = 4
ADAPTER = BitmapAdapter((REGION_METERS // METERS_PER_CELL, REGION_METERS // METERS_PER_CELL))
@se.enum_field_serializer("ParcelProperties", "ParcelData", "LandingType")
@@ -2374,69 +2415,3 @@ class RetrieveNavMeshSrcSerializer(se.BaseHTTPSerializer):
# 15 bit window size, gzip wrapped
deser["navmesh_data"] = zlib.decompress(deser["navmesh_data"], wbits=15 | 32)
return deser
# Beta puppetry stuff, subject to change!
class PuppetryEventMask(IntFlag):
POSITION = 1 << 0
POSITION_IN_PARENT_FRAME = 1 << 1
ROTATION = 1 << 2
ROTATION_IN_PARENT_FRAME = 1 << 3
SCALE = 1 << 4
DISABLE_CONSTRAINT = 1 << 7
class PuppetryOption(se.OptionalFlagged):
def __init__(self, flag_val, spec):
super().__init__("mask", se.IntFlag(PuppetryEventMask, se.U8), flag_val, spec)
# Range to use for puppetry's quantized floats when converting to<->from U16
LL_PELVIS_OFFSET_RANGE = (-5.0, 5.0)
@dataclasses.dataclass
class PuppetryJointData:
# Where does this number come from? `avatar_skeleton.xml`?
joint_id: int = se.dataclass_field(se.S16)
# Determines which fields will follow
mask: PuppetryEventMask = se.dataclass_field(se.IntFlag(PuppetryEventMask, se.U8))
rotation: Optional[Quaternion] = se.dataclass_field(
# These are very odd scales for a quantized quaternion, but that's what they are.
PuppetryOption(PuppetryEventMask.ROTATION, se.PackedQuat(se.Vector3U16(*LL_PELVIS_OFFSET_RANGE))),
)
position: Optional[Vector3] = se.dataclass_field(
PuppetryOption(PuppetryEventMask.POSITION, se.Vector3U16(*LL_PELVIS_OFFSET_RANGE)),
)
scale: Optional[Vector3] = se.dataclass_field(
PuppetryOption(PuppetryEventMask.SCALE, se.Vector3U16(*LL_PELVIS_OFFSET_RANGE)),
)
@dataclasses.dataclass
class PuppetryEventData:
time: int = se.dataclass_field(se.S32)
# Must be set manually due to below issue
num_joints: int = se.dataclass_field(se.U16)
# This field is packed in the least helpful way possible. The length field
# is in between the collection count and the collection data, but the length
# field essentially only tells you how many bytes until the end of the buffer
# proper, which you already know from msgsystem. Why is this here?
joints: List[PuppetryJointData] = se.dataclass_field(se.TypedByteArray(
se.U32,
# Just treat contents as a greedy collection, tries to keep reading until EOF
se.Collection(None, se.Dataclass(PuppetryJointData)),
))
@se.subfield_serializer("AgentAnimation", "PhysicalAvatarEventList", "TypeData")
@se.subfield_serializer("AvatarAnimation", "PhysicalAvatarEventList", "TypeData")
class PuppetryEventDataSerializer(se.SimpleSubfieldSerializer):
# You can have multiple joint events packed in right after the other, implicitly.
# They may _or may not_ be split into separate PhysicalAvatarEventList blocks?
# This doesn't seem to be handled specifically in the decoder, is this a
# serialization bug in the viewer?
TEMPLATE = se.Collection(None, se.Dataclass(PuppetryEventData))
EMPTY_IS_NONE = True

View File

@@ -84,6 +84,7 @@ class VisualParam:
name: str
value_min: float
value_max: float
value_default: float
# These might be `None` if the param isn't meant to be directly edited
edit_group: Optional[str]
wearable: Optional[str]
@@ -102,6 +103,7 @@ class VisualParams(List[VisualParam]):
wearable=param.get("wearable"),
value_min=float(param.attrib["value_min"]),
value_max=float(param.attrib["value_max"]),
value_default=float(param.attrib.get("value_default", 0.0))
))
def by_name(self, name: str) -> VisualParam:
@@ -120,6 +122,34 @@ class VisualParams(List[VisualParam]):
VISUAL_PARAMS = VisualParams(get_resource_filename("lib/base/data/avatar_lad.xml"))
# See `llpaneleditwearable.cpp`, which TE slots should be set for each wearable type is hardcoded
# in the viewer.
WEARABLE_TEXTURE_SLOTS: Dict[WearableType, Sequence[AvatarTEIndex]] = {
WearableType.SHAPE: (),
WearableType.SKIN: (AvatarTEIndex.HEAD_BODYPAINT, AvatarTEIndex.UPPER_BODYPAINT, AvatarTEIndex.LOWER_BODYPAINT),
WearableType.HAIR: (AvatarTEIndex.HAIR,),
WearableType.EYES: (AvatarTEIndex.EYES_IRIS,),
WearableType.SHIRT: (AvatarTEIndex.UPPER_SHIRT,),
WearableType.PANTS: (AvatarTEIndex.LOWER_PANTS,),
WearableType.SHOES: (AvatarTEIndex.LOWER_SHOES,),
WearableType.SOCKS: (AvatarTEIndex.LOWER_SOCKS,),
WearableType.JACKET: (AvatarTEIndex.UPPER_JACKET, AvatarTEIndex.LOWER_JACKET),
WearableType.GLOVES: (AvatarTEIndex.UPPER_GLOVES,),
WearableType.UNDERSHIRT: (AvatarTEIndex.UPPER_UNDERSHIRT,),
WearableType.UNDERPANTS: (AvatarTEIndex.LOWER_UNDERPANTS,),
WearableType.SKIRT: (AvatarTEIndex.SKIRT,),
WearableType.ALPHA: (AvatarTEIndex.LOWER_ALPHA, AvatarTEIndex.UPPER_ALPHA,
AvatarTEIndex.HEAD_ALPHA, AvatarTEIndex.EYES_ALPHA, AvatarTEIndex.HAIR_ALPHA),
WearableType.TATTOO: (AvatarTEIndex.LOWER_TATTOO, AvatarTEIndex.UPPER_TATTOO, AvatarTEIndex.HEAD_TATTOO),
WearableType.UNIVERSAL: (AvatarTEIndex.HEAD_UNIVERSAL_TATTOO, AvatarTEIndex.UPPER_UNIVERSAL_TATTOO,
AvatarTEIndex.LOWER_UNIVERSAL_TATTOO, AvatarTEIndex.SKIRT_TATTOO,
AvatarTEIndex.HAIR_TATTOO, AvatarTEIndex.EYES_TATTOO, AvatarTEIndex.LEFT_ARM_TATTOO,
AvatarTEIndex.LEFT_LEG_TATTOO, AvatarTEIndex.AUX1_TATTOO, AvatarTEIndex.AUX2_TATTOO,
AvatarTEIndex.AUX3_TATTOO),
WearableType.PHYSICS: (),
}
@dataclasses.dataclass
class Wearable(SchemaBase):
name: str
@@ -128,7 +158,7 @@ class Wearable(SchemaBase):
sale_info: InventorySaleInfo
# VisualParam ID -> val
parameters: Dict[int, float]
# TextureEntry ID -> texture ID
# TextureEntry ID -> texture UUID
textures: Dict[int, UUID]
@classmethod
@@ -203,3 +233,22 @@ class Wearable(SchemaBase):
writer.write(f"textures {len(self.textures)}\n")
for te_id, texture_id in self.textures.items():
writer.write(f"{te_id} {texture_id}\n")
@classmethod
def make_default(cls, w_type: WearableType) -> Self:
instance = cls(
name="New " + w_type.name.replace("_", " ").title(),
permissions=InventoryPermissions.make_default(),
sale_info=InventorySaleInfo.make_default(),
parameters={},
textures={},
wearable_type=w_type,
)
for te_idx in WEARABLE_TEXTURE_SLOTS[w_type]:
instance.textures[te_idx] = DEFAULT_WEARABLE_TEX
for param in VISUAL_PARAMS.by_wearable(w_type.name.lower()):
instance.parameters[param.id] = param.value_default
return instance

View File

@@ -1,22 +1,35 @@
from __future__ import annotations
import asyncio
import dataclasses
import gzip
import itertools
import logging
from pathlib import Path
from typing import Union, List, Tuple, Set
from typing import Union, List, Tuple, Set, Sequence
from hippolyzer.lib.base import llsd
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.inventory import InventoryModel, InventoryCategory, InventoryItem
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.templates import AssetType, FolderType
from hippolyzer.lib.base.inventory import InventoryModel, InventoryCategory, InventoryItem, InventoryNodeBase
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.templates import AssetType, FolderType, InventoryType, Permissions
from hippolyzer.lib.client.state import BaseClientSession
from hippolyzer.lib.base.templates import WearableType
LOG = logging.getLogger(__name__)
class CannotMoveError(Exception):
def __init__(self):
pass
def _get_node_id(node_or_id: InventoryNodeBase | UUID) -> UUID:
if isinstance(node_or_id, UUID):
return node_or_id
return node_or_id.node_id
class InventoryManager:
def __init__(self, session: BaseClientSession):
self._session = session
@@ -25,7 +38,9 @@ class InventoryManager:
self._session.message_handler.subscribe("BulkUpdateInventory", self._handle_bulk_update_inventory)
self._session.message_handler.subscribe("UpdateCreateInventoryItem", self._handle_update_create_inventory_item)
self._session.message_handler.subscribe("RemoveInventoryItem", self._handle_remove_inventory_item)
self._session.message_handler.subscribe("RemoveInventoryFolder", self._handle_remove_inventory_folder)
self._session.message_handler.subscribe("MoveInventoryItem", self._handle_move_inventory_item)
self._session.message_handler.subscribe("MoveInventoryFolder", self._handle_move_inventory_folder)
def _load_skeleton(self):
assert not self.model.nodes
@@ -85,7 +100,7 @@ class InventoryManager:
if cached_item.item_id in self.model:
continue
# The parent category didn't have a cache hit against the inventory skeleton, can't add!
# We don't even know if this item would be in the current version of it's parent cat!
# We don't even know if this item would be in the current version of its parent cat!
if cached_item.parent_id not in loaded_cat_ids:
continue
self.model.add(cached_item)
@@ -173,6 +188,14 @@ class InventoryManager:
node.name = str(inventory_block["NewName"])
node.parent_id = inventory_block['FolderID']
def _handle_move_inventory_folder(self, msg: Message):
for inventory_block in msg["InventoryData"]:
node = self.model.get(inventory_block["FolderID"])
if not node:
LOG.warning(f"Missing inventory folder {inventory_block['FolderID']}")
continue
node.parent_id = inventory_block['ParentID']
def process_aisv3_response(self, payload: dict):
if "name" in payload:
# Just a rough guess. Assume this response is updating something if there's
@@ -195,6 +218,11 @@ class InventoryManager:
for link_llsd in embedded_dict.get("links", {}).values():
self.model.upsert(InventoryItem.from_llsd(link_llsd, flavor="ais"))
for cat_id, version in payload.get("_updated_category_versions", {}).items():
# The key will be a string, so convert to UUID first
cat_node = self.model.get_category(UUID(cat_id))
cat_node.version = version
# Get rid of anything we were asked to
for node_id in itertools.chain(
payload.get("_broken_links_removed", ()),
@@ -206,3 +234,115 @@ class InventoryManager:
if node:
# Presumably this list is exhaustive, so don't unlink children.
self.model.unlink(node, single_only=True)
async def make_ais_request(
self,
method: str,
path: str,
params: dict,
payload: dict | Sequence | dataclasses.MISSING = dataclasses.MISSING,
) -> dict:
caps_client = self._session.main_region.caps_client
async with caps_client.request(method, "InventoryAPIv3", path=path, params=params, llsd=payload) as resp:
if resp.ok or resp.status == 400:
data = await resp.read_llsd()
if err_desc := data.get("error_description", ""):
err_desc: str
if err_desc.startswith("Cannot change parent_id."):
raise CannotMoveError()
resp.raise_for_status()
self.process_aisv3_response(data)
else:
resp.raise_for_status()
return data
async def create_folder(
self,
parent: InventoryCategory | UUID,
name: str,
pref_type: int = AssetType.NONE,
cat_id: UUID | None = None
) -> InventoryCategory:
parent_id = _get_node_id(parent)
payload = {
"categories": [
{
"category_id": cat_id,
"name": name,
"type_default": pref_type,
"parent_id": parent_id
}
]
}
data = await self.make_ais_request("POST", f"/category/{parent_id}", {"tid": UUID.random()}, payload)
return self.model.get_category(data["_created_categories"][0])
async def create_item(
self,
parent: UUID | InventoryCategory,
name: str,
type: AssetType,
inv_type: InventoryType,
wearable_type: WearableType,
transaction_id: UUID,
next_mask: int | Permissions = 0x0008e000,
description: str = '',
) -> InventoryItem:
parent_id = _get_node_id(parent)
with self._session.main_region.message_handler.subscribe_async(
("UpdateCreateInventoryItem",),
predicate=lambda x: x["AgentData"]["TransactionID"] == transaction_id,
take=False,
) as get_msg:
await self._session.main_region.circuit.send_reliable(
Message(
'CreateInventoryItem',
Block('AgentData', AgentID=self._session.agent_id, SessionID=self._session.id),
Block(
'InventoryBlock',
CallbackID=0,
FolderID=parent_id,
TransactionID=transaction_id,
NextOwnerMask=next_mask,
Type=type,
InvType=inv_type,
WearableType=wearable_type,
Name=name,
Description=description,
)
)
)
msg = await asyncio.wait_for(get_msg(), 5.0)
# We assume that _handle_update_create_inventory_item() has already been called internally
# by the time that the `await` returns given asyncio scheduling
return self.model.get_item(msg["InventoryData"]["ItemID"])
async def move(self, node: InventoryNodeBase, new_parent: UUID | InventoryCategory) -> None:
# AIS error messages suggest using the MOVE HTTP method instead of setting a new parent
# via PATCH. MOVE is not implemented in AIS. Instead, we do what the viewer does and use
# legacy UDP messages for reparenting things
new_parent = _get_node_id(new_parent)
msg = Message(
"MoveInventoryFolder",
Block("AgentData", AgentID=self._session.agent_id, SessionID=self._session.id, Stamp=0),
)
if isinstance(node, InventoryItem):
msg.add_block(Block("InventoryData", ItemID=node.node_id, FolderID=new_parent, NewName=b''))
else:
msg.add_block(Block("InventoryData", FolderID=node.node_id, ParentID=new_parent))
# No message to say if this even succeeded. Great.
# TODO: probably need to update category versions for both source and target
await self._session.main_region.circuit.send_reliable(msg)
node.parent_id = new_parent
async def update(self, node: InventoryNodeBase, data: dict) -> None:
path = f"/category/{node.node_id}"
if isinstance(node, InventoryItem):
path = f"/item/{node.node_id}"
await self.make_ais_request("PATCH", path, {}, data)

View File

@@ -15,6 +15,7 @@ from typing import *
from hippolyzer.lib.base.datatypes import UUID, Vector3
from hippolyzer.lib.base.helpers import proxify
from hippolyzer.lib.base.inventory import InventoryItem, InventoryModel
from hippolyzer.lib.base.message.message import Block, Message
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.message.msgtypes import PacketFlags
@@ -27,7 +28,7 @@ from hippolyzer.lib.base.objects import (
)
from hippolyzer.lib.base.settings import Settings
from hippolyzer.lib.client.namecache import NameCache, NameCacheEntry
from hippolyzer.lib.base.templates import PCode, ObjectStateSerializer
from hippolyzer.lib.base.templates import PCode, ObjectStateSerializer, XferFilePath
from hippolyzer.lib.base import llsd
if TYPE_CHECKING:
@@ -45,6 +46,7 @@ class ObjectUpdateType(enum.IntEnum):
FAMILY = enum.auto()
COSTS = enum.auto()
KILL = enum.auto()
ANIMATIONS = enum.auto()
class ClientObjectManager:
@@ -132,7 +134,7 @@ class ClientObjectManager:
# Need to wait until we get our reply
fut = self.state.register_future(local_id, ObjectUpdateType.PROPERTIES)
else:
# This was selected so we should already have up to date info
# This was selected so we should already have up-to-date info
fut = asyncio.Future()
fut.set_result(self.lookup_localid(local_id))
futures.append(fut)
@@ -216,6 +218,38 @@ class ClientObjectManager:
for entry in entries:
self.state.materials[UUID(bytes=entry["ID"])] = entry["Material"]
async def request_object_inv(self, obj: Object) -> List[InventoryItem]:
if "RequestTaskInventory" in self._region.cap_urls:
return await self.request_object_inv_via_cap(obj)
else:
return await self.request_object_inv_via_xfer(obj)
async def request_object_inv_via_cap(self, obj: Object) -> List[InventoryItem]:
async with self._region.caps_client.get("RequestTaskInventory", params={"task_id": obj.FullID}) as resp:
resp.raise_for_status()
return [InventoryItem.from_llsd(x) for x in (await resp.read_llsd())["contents"]]
async def request_object_inv_via_xfer(self, obj: Object) -> List[InventoryItem]:
session = self._region.session()
with self._region.message_handler.subscribe_async(
('ReplyTaskInventory',), predicate=lambda x: x["InventoryData"]["TaskID"] == obj.FullID
) as get_msg:
await self._region.circuit.send_reliable(Message(
'RequestTaskInventory',
# If no session is passed in we'll use the active session when the coro was created
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
Block('InventoryData', LocalID=obj.LocalID),
))
inv_message = await asyncio.wait_for(get_msg(), timeout=5.0)
# Xfer doesn't need to be immediately awaited, multiple signals can be waited on.
xfer = await self._region.xfer_manager.request(
file_name=inv_message["InventoryData"]["Filename"], file_path=XferFilePath.CACHE)
inv_model = InventoryModel.from_bytes(xfer.reassemble_chunks())
return list(inv_model.all_items)
class ObjectEvent:
__slots__ = ("object", "updated", "update_type")
@@ -261,6 +295,10 @@ class ClientWorldObjectManager:
self._handle_object_properties_generic)
message_handler.subscribe("ObjectPropertiesFamily",
self._handle_object_properties_generic)
message_handler.subscribe("AvatarAnimation",
self._handle_animation_message)
message_handler.subscribe("ObjectAnimation",
self._handle_animation_message)
def lookup_fullid(self, full_id: UUID) -> Optional[Object]:
return self._fullid_lookup.get(full_id, None)
@@ -274,7 +312,7 @@ class ClientWorldObjectManager:
@property
def all_avatars(self) -> Iterable[Avatar]:
return tuple(self._avatars.values())
return list(self._avatars.values())
def __len__(self):
return len(self._fullid_lookup)
@@ -293,7 +331,7 @@ class ClientWorldObjectManager:
def untrack_region_objects(self, handle: int):
"""Handle signal that a region object manager was just cleared"""
# Make sure they're gone from our lookup table
for obj in tuple(self._fullid_lookup.values()):
for obj in list(self._fullid_lookup.values()):
if obj.RegionHandle == handle:
del self._fullid_lookup[obj.FullID]
if handle in self._region_managers:
@@ -609,6 +647,33 @@ class ClientWorldObjectManager:
region_state.coarse_locations.update(coarse_locations)
self._rebuild_avatar_objects()
def _handle_animation_message(self, message: Message):
sender_id = message["Sender"]["ID"]
if message.name == "AvatarAnimation":
avatar = self._avatars.get(sender_id)
if not avatar:
LOG.warning(f"Received AvatarAnimation for unknown avatar {sender_id}")
return
if not avatar.Object:
LOG.warning(f"Received AvatarAnimation for avatar with no object {sender_id}")
return
obj = avatar.Object
elif message.name == "ObjectAnimation":
obj = self.lookup_fullid(sender_id)
if not obj:
LOG.warning(f"Received AvatarAnimation for avatar with no object {sender_id}")
return
else:
LOG.error(f"Unknown animation message type: {message.name}")
return
obj.Animations.clear()
for block in message["AnimationList"]:
obj.Animations.append(block["AnimID"])
self._run_object_update_hooks(obj, {"Animations"}, ObjectUpdateType.ANIMATIONS, message)
def _process_get_object_cost_response(self, parsed: dict):
if "error" in parsed:
return
@@ -887,8 +952,6 @@ class Avatar:
self.FullID: UUID = full_id
self.Object: Optional["Object"] = obj
self.RegionHandle: int = region_handle
# TODO: Allow hooking into getZOffsets FS bridge response
# to fill in the Z axis if it's infinite
self.CoarseLocation = coarse_location
self.Valid = True
self.GuessedZ: Optional[float] = None

View File

@@ -17,6 +17,7 @@ from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.network.caps_client import CapsClient
from hippolyzer.lib.base.network.transport import ADDR_TUPLE
from hippolyzer.lib.base.objects import handle_to_global_pos
from hippolyzer.lib.base.xfer_manager import XferManager
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
@@ -27,6 +28,7 @@ class BaseClientRegion(ConnectionHolder, abc.ABC):
# Actually a weakref
session: Callable[[], BaseClientSession]
objects: ClientObjectManager
xfer_manager: XferManager
caps_client: CapsClient
cap_urls: multidict.MultiDict[str]
circuit_addr: ADDR_TUPLE

View File

@@ -9,6 +9,12 @@ from hippolyzer.lib.base.helpers import get_mtime, create_logged_task
from hippolyzer.lib.client.inventory_manager import InventoryManager
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
from hippolyzer.lib.proxy.viewer_settings import iter_viewer_cache_dirs
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.inventory import InventoryCategory, InventoryNodeBase
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.inventory import InventoryItem
from hippolyzer.lib.base.templates import AssetType, InventoryType, WearableType, Permissions
from hippolyzer.lib.base.network.transport import Direction
if TYPE_CHECKING:
from hippolyzer.lib.proxy.sessions import Session
@@ -39,6 +45,9 @@ class ProxyInventoryManager(InventoryManager):
self._handle_move_inventory_item = self._wrap_with_cache_defer(
self._handle_move_inventory_item
)
self._handle_move_inventory_folder = self._wrap_with_cache_defer(
self._handle_move_inventory_folder
)
self.process_aisv3_response = self._wrap_with_cache_defer(
self.process_aisv3_response
)
@@ -105,3 +114,75 @@ class ProxyInventoryManager(InventoryManager):
# Try and add anything from the response into the model
self.process_aisv3_response(llsd.parse(flow.response.content))
async def create_folder(
self,
parent: InventoryCategory | UUID,
name: str,
pref_type: int = AssetType.NONE,
cat_id: UUID | None = None
) -> InventoryCategory:
cat = await super().create_folder(parent, name, pref_type, cat_id)
await self._session.main_region.circuit.send_reliable(self._craft_update_message(cat))
return cat
async def create_item(
self,
parent: UUID | InventoryCategory,
name: str,
type: AssetType,
inv_type: InventoryType,
wearable_type: WearableType,
transaction_id: UUID,
next_mask: int | Permissions = 0x0008e000,
description: str = '',
) -> InventoryItem:
item = await super().create_item(
parent=parent,
name=name,
type=type,
inv_type=inv_type,
wearable_type=wearable_type,
transaction_id=transaction_id,
next_mask=next_mask,
description=description,
)
await self._session.main_region.circuit.send_reliable(self._craft_update_message(item))
return item
async def update(self, node: InventoryNodeBase, data: dict) -> None:
await super().update(node, data)
await self._session.main_region.circuit.send_reliable(self._craft_update_message(node))
async def move(self, node: InventoryNodeBase, new_parent: UUID | InventoryCategory) -> None:
await super().move(node, new_parent)
await self._session.main_region.circuit.send_reliable(self._craft_update_message(node))
def _craft_removal_message(self, node: InventoryNodeBase) -> Message:
is_folder = True
if isinstance(node, InventoryItem):
is_folder = False
msg = Message(
"RemoveInventoryFolder" if is_folder else "RemoveInventoryItem",
Block("AgentData", AgentID=self._session.agent_id, SessionID=self._session.id),
direction=Direction.IN,
)
if is_folder:
msg.add_block(Block("FolderData", FolderID=node.node_id))
else:
msg.add_block(Block("InventoryData", ItemID=node.node_id))
return msg
def _craft_update_message(self, node: InventoryNodeBase):
msg = Message(
"BulkUpdateInventory",
Block("AgentData", AgentID=self._session.agent_id, TransactionID=UUID.random()),
direction=Direction.IN,
)
if isinstance(node, InventoryItem):
msg.add_block(node.to_inventory_data("ItemData"))
elif isinstance(node, InventoryCategory):
msg.add_block(node.to_folder_data())
return msg

View File

@@ -152,6 +152,15 @@ class TestDatatypes(unittest.TestCase):
def test_str_llsd_serialization(self):
self.assertEqual(b"'foo\\nbar'", llsd.format_notation("foo\nbar"))
def test_int_enum_llsd_serialization(self):
class SomeIntEnum(IntEnum):
FOO = 4
orig = SomeIntEnum.FOO
val = llsd.parse_xml(llsd.format_xml(orig))
self.assertIsInstance(val, int)
self.assertEqual(orig, val)
def test_jank_stringy_bytes(self):
val = JankStringyBytes(b"foo\x00")
self.assertTrue("o" in val)

View File

@@ -1,8 +1,9 @@
import copy
import datetime as dt
import unittest
from hippolyzer.lib.base.datatypes import *
from hippolyzer.lib.base.inventory import InventoryModel, SaleType
from hippolyzer.lib.base.inventory import InventoryModel, SaleType, InventoryItem
from hippolyzer.lib.base.wearables import Wearable, VISUAL_PARAMS
SIMPLE_INV = """\tinv_object\t0
@@ -47,6 +48,42 @@ SIMPLE_INV = """\tinv_object\t0
\t}
"""
SIMPLE_INV_PARSED = [
{
'name': 'Contents',
'obj_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
'parent_id': UUID('00000000-0000-0000-0000-000000000000'),
'type': 'category'
},
{
'asset_id': UUID('00000000-0000-0000-0000-000000000000'),
'created_at': 1587367239,
'desc': '2020-04-20 04:20:39 lsl2 script',
'flags': b'\x00\x00\x00\x00',
'inv_type': 'script',
'item_id': UUID('dd163122-946b-44df-99f6-a6030e2b9597'),
'name': 'New Script',
'metadata': {"experience": UUID("a2e76fcd-9360-4f6d-a924-000000000003")},
'parent_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
'permissions': {
'base_mask': 2147483647,
'creator_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
'everyone_mask': 0,
'group_id': UUID('00000000-0000-0000-0000-000000000000'),
'group_mask': 0,
'last_owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
'next_owner_mask': 581632,
'owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
'owner_mask': 2147483647,
},
'sale_info': {
'sale_price': 10,
'sale_type': 'not'
},
'type': 'lsltext'
}
]
INV_CATEGORY = """\tinv_category\t0
\t{
\t\tcat_id\tf4d91477-def1-487a-b4f3-6fa201c17376
@@ -122,44 +159,12 @@ class TestLegacyInv(unittest.TestCase):
self.assertEqual(item, item_copy)
def test_llsd_serialization(self):
self.assertEqual(
self.model.to_llsd(),
[
{
'name': 'Contents',
'obj_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
'parent_id': UUID('00000000-0000-0000-0000-000000000000'),
'type': 'category'
},
{
'asset_id': UUID('00000000-0000-0000-0000-000000000000'),
'created_at': 1587367239,
'desc': '2020-04-20 04:20:39 lsl2 script',
'flags': b'\x00\x00\x00\x00',
'inv_type': 'script',
'item_id': UUID('dd163122-946b-44df-99f6-a6030e2b9597'),
'name': 'New Script',
'metadata': {"experience": UUID("a2e76fcd-9360-4f6d-a924-000000000003")},
'parent_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
'permissions': {
'base_mask': 2147483647,
'creator_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
'everyone_mask': 0,
'group_id': UUID('00000000-0000-0000-0000-000000000000'),
'group_mask': 0,
'last_owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
'next_owner_mask': 581632,
'owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
'owner_mask': 2147483647,
},
'sale_info': {
'sale_price': 10,
'sale_type': 'not'
},
'type': 'lsltext'
}
]
)
self.assertEqual(self.model.to_llsd(), SIMPLE_INV_PARSED)
def test_llsd_date_parsing(self):
model = InventoryModel.from_llsd(SIMPLE_INV_PARSED)
item: InventoryItem = model.nodes.get(UUID("dd163122-946b-44df-99f6-a6030e2b9597")) # type: ignore
self.assertEqual(item.creation_date, dt.datetime(2020, 4, 20, 7, 20, 39, tzinfo=dt.timezone.utc))
def test_llsd_serialization_ais(self):
model = InventoryModel.from_str(INV_CATEGORY)