Compare commits
14 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5ef9b5354a | ||
|
|
34ca7d54be | ||
|
|
cb316f1992 | ||
|
|
da05a6cf1f | ||
|
|
f06c31e225 | ||
|
|
b4e5596ca2 | ||
|
|
49a54ce099 | ||
|
|
0349fd9078 | ||
|
|
118ef2813a | ||
|
|
256f74b71a | ||
|
|
4a84453ca4 | ||
|
|
34316cb166 | ||
|
|
0f7d35cdca | ||
|
|
2ee8a6f008 |
@@ -20,6 +20,7 @@ bulk upload, like changing priority or removing a joint.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import pathlib
|
||||
from abc import abstractmethod
|
||||
from typing import *
|
||||
@@ -106,7 +107,10 @@ class LocalAnimAddon(BaseAddon):
|
||||
if not anim_id:
|
||||
continue
|
||||
# is playing right now, check if there's a newer version
|
||||
self.apply_local_anim_from_file(session, region, anim_name, only_if_changed=True)
|
||||
try:
|
||||
self.apply_local_anim_from_file(session, region, anim_name, only_if_changed=True)
|
||||
except Exception:
|
||||
logging.exception("Exploded while replaying animation")
|
||||
await asyncio.sleep(1.0)
|
||||
|
||||
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
|
||||
@@ -175,7 +179,6 @@ class LocalAnimAddon(BaseAddon):
|
||||
if only_if_changed and old_mtime == mtime:
|
||||
return
|
||||
|
||||
cls.local_anim_mtimes[anim_name] = mtime
|
||||
# file might not even exist anymore if mtime is `None`,
|
||||
# anim will automatically stop if that happens.
|
||||
if mtime:
|
||||
@@ -187,6 +190,7 @@ class LocalAnimAddon(BaseAddon):
|
||||
with open(anim_path, "rb") as f:
|
||||
anim_data = f.read()
|
||||
anim_data = cls._mangle_anim(anim_data)
|
||||
cls.local_anim_mtimes[anim_name] = mtime
|
||||
else:
|
||||
print(f"Unknown anim {anim_name!r}")
|
||||
cls.apply_local_anim(session, region, anim_name, new_data=anim_data)
|
||||
|
||||
@@ -11,12 +11,11 @@
|
||||
# * * Collada tooling sucks and even LL is moving away from it
|
||||
# * * Ensuring LLMesh->Collada and LLMesh->GLTF conversion don't differ semantically is easy via assimp.
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import os.path
|
||||
import secrets
|
||||
import statistics
|
||||
import sys
|
||||
from typing import Dict, List, Iterable, Optional
|
||||
from typing import Dict, List, Optional, Union, Sequence
|
||||
|
||||
import collada
|
||||
import collada.source
|
||||
@@ -25,13 +24,27 @@ from lxml import etree
|
||||
import numpy as np
|
||||
import transformations
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
from hippolyzer.lib.base.serialization import BufferReader
|
||||
from hippolyzer.lib.base.mesh import LLMeshSerializer, MeshAsset, positions_from_domain, SkinSegmentDict
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
|
||||
def llsd_to_mat4(mat: Union[np.ndarray, Sequence[float]]) -> np.ndarray:
|
||||
return np.array(mat).reshape((4, 4), order='F')
|
||||
|
||||
|
||||
def mat4_to_llsd(mat: np.ndarray) -> List[float]:
|
||||
return list(mat.flatten(order='F'))
|
||||
|
||||
|
||||
def mat4_to_collada(mat: np.ndarray) -> np.ndarray:
|
||||
return mat.flatten(order='C')
|
||||
|
||||
|
||||
def mesh_to_collada(ll_mesh: MeshAsset, include_skin=True) -> collada.Collada:
|
||||
dae = collada.Collada()
|
||||
axis = collada.asset.UP_AXIS.Z_UP
|
||||
@@ -52,7 +65,7 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
|
||||
skin_seg = ll_mesh.segments.get('skin')
|
||||
bind_shape_matrix = None
|
||||
if include_skin and skin_seg:
|
||||
bind_shape_matrix = np.array(skin_seg["bind_shape_matrix"]).reshape((4, 4))
|
||||
bind_shape_matrix = llsd_to_mat4(skin_seg["bind_shape_matrix"])
|
||||
should_skin = True
|
||||
# Transform from the skin will be applied on the controller, not the node
|
||||
node_transform = np.identity(4)
|
||||
@@ -119,9 +132,8 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
|
||||
accessor.set('source', f"#{accessor.get('source')}")
|
||||
|
||||
flattened_bind_poses = []
|
||||
# LLMesh matrices are row-major, convert to col-major for Collada.
|
||||
for bind_pose in skin_seg['inverse_bind_matrix']:
|
||||
flattened_bind_poses.append(np.array(bind_pose).reshape((4, 4)).flatten('F'))
|
||||
flattened_bind_poses.append(mat4_to_collada(llsd_to_mat4(bind_pose)))
|
||||
flattened_bind_poses = np.array(flattened_bind_poses)
|
||||
inv_bind_source = _create_mat4_source(f"bind-poses{sub_uniq}", flattened_bind_poses, "TRANSFORM")
|
||||
|
||||
@@ -142,7 +154,7 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
|
||||
# in SL, with their own distinct sets of weights and vertex data.
|
||||
controller_node = E.controller(
|
||||
E.skin(
|
||||
E.bind_shape_matrix(' '.join(str(x) for x in bind_shape_matrix.flatten('F'))),
|
||||
E.bind_shape_matrix(' '.join(str(x) for x in mat4_to_collada(bind_shape_matrix))),
|
||||
joints_source.xmlnode,
|
||||
inv_bind_source.xmlnode,
|
||||
weights_source.xmlnode,
|
||||
@@ -173,7 +185,7 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
|
||||
node = collada.scene.Node(
|
||||
node_name,
|
||||
children=geom_nodes,
|
||||
transforms=[collada.scene.MatrixTransform(np.array(node_transform.flatten('F')))],
|
||||
transforms=[collada.scene.MatrixTransform(mat4_to_collada(node_transform))],
|
||||
)
|
||||
if should_skin:
|
||||
# We need a skeleton per _mesh asset_ because you could have incongruous skeletons
|
||||
@@ -208,7 +220,8 @@ def transform_skeleton(skel_root: etree.ElementBase, dae: collada.Collada, skin_
|
||||
joint_nodes[skel_node.get('name')] = collada.scene.Node.load(dae, skel_node, {})
|
||||
for joint_name, matrix in zip(skin_seg['joint_names'], skin_seg.get('alt_inverse_bind_matrix', [])):
|
||||
joint_node = joint_nodes[joint_name]
|
||||
joint_node.matrix = np.array(matrix).reshape((4, 4)).flatten('F')
|
||||
joint_decomp = transformations.decompose_matrix(llsd_to_mat4(matrix))
|
||||
joint_node.matrix = mat4_to_collada(transformations.compose_matrix(translate=joint_decomp[3]))
|
||||
# Update the underlying XML element with the new transform matrix
|
||||
joint_node.save()
|
||||
|
||||
@@ -251,7 +264,7 @@ def _create_mat4_source(name: str, data: np.ndarray, semantic: str):
|
||||
|
||||
def fix_weird_bind_matrices(skin_seg: SkinSegmentDict):
|
||||
"""
|
||||
Fix weird-looking bind matrices to have normal scaling
|
||||
Fix weird-looking bind matrices to have normal scaling and rotations
|
||||
|
||||
Not sure why these even happen (weird mesh authoring programs?)
|
||||
Sometimes get enormous inverse bind matrices (each component 10k+) and tiny
|
||||
@@ -259,38 +272,38 @@ def fix_weird_bind_matrices(skin_seg: SkinSegmentDict):
|
||||
with weird scales and tries to set them to what they "should" be without
|
||||
the weird inverted scaling.
|
||||
"""
|
||||
axis_counters = [collections.Counter() for _ in range(3)]
|
||||
for joint_inv in skin_seg['inverse_bind_matrix']:
|
||||
joint_mat = np.array(joint_inv).reshape((4, 4))
|
||||
joint_scale = transformations.decompose_matrix(joint_mat)[0]
|
||||
for axis_counter, axis_val in zip(axis_counters, joint_scale):
|
||||
axis_counter[axis_val] += 1
|
||||
most_common_inv_scale = []
|
||||
for axis_counter in axis_counters:
|
||||
most_common_inv_scale.append(axis_counter.most_common(1)[0][0])
|
||||
scale_fixup = Vector3(1, 1, 1)
|
||||
angle_fixup = Vector3(0, 0, 0)
|
||||
have_fixups = False
|
||||
# Totally non-scientific method of detecting odd bind matrices based on squinting very,
|
||||
# very hard at a random sample of assets.
|
||||
for joint_name, joint_inv in zip(skin_seg['joint_names'], skin_seg['inverse_bind_matrix']):
|
||||
if not joint_name.startswith("m"):
|
||||
# We can't make very good guesses based on collision volume scales and rotations,
|
||||
# skip anything but the "m" joints.
|
||||
continue
|
||||
joint_mat = llsd_to_mat4(joint_inv)
|
||||
joint_scale, _, joint_angle, _, _ = transformations.decompose_matrix(joint_mat)
|
||||
# If the scale component of an mJointName joint isn't roughly <1,1,1>, we likely have
|
||||
# scaling applied to the inverse bind matrices rather than the bind matrix. Figure out
|
||||
# what the fixup should be so that we can reverse it.
|
||||
if abs(3.0 - sum(joint_scale)) > 0.5:
|
||||
scale_fixup = Vector3(1, 1, 1) / Vector3(*joint_scale)
|
||||
have_fixups = True
|
||||
# I wouldn't expect mJointName joints to be rotated at all in their inverse bind matrices.
|
||||
# Is this a rotation that should've been applied to the bind shape matrix instead?
|
||||
# In any event, all joints are likely rotated by this amount, so calculate the inverse.
|
||||
if abs(sum(joint_angle)) > 0.05:
|
||||
angle_fixup = -Vector3(*joint_angle)
|
||||
have_fixups = True
|
||||
|
||||
if abs(1.0 - statistics.fmean(most_common_inv_scale)) > 1.0:
|
||||
if have_fixups:
|
||||
LOG.warning("Detected weird matrices in mesh!", scale_fixup, angle_fixup)
|
||||
# The magnitude of the scales in the inverse bind matrices look very strange.
|
||||
# The bind matrix itself is probably messed up as well, try to fix it.
|
||||
skin_seg['bind_shape_matrix'] = fix_llsd_matrix_scale(skin_seg['bind_shape_matrix'], most_common_inv_scale)
|
||||
if joint_positions := skin_seg.get('alt_inverse_bind_matrix', None):
|
||||
fix_matrix_list_scale(joint_positions, most_common_inv_scale)
|
||||
rev_scale = tuple(1.0 / x for x in most_common_inv_scale)
|
||||
fix_matrix_list_scale(skin_seg['inverse_bind_matrix'], rev_scale)
|
||||
|
||||
|
||||
def fix_matrix_list_scale(source: List[List[float]], scale_fixup: Iterable[float]):
|
||||
for i, alt_inv_matrix in enumerate(source):
|
||||
source[i] = fix_llsd_matrix_scale(alt_inv_matrix, scale_fixup)
|
||||
|
||||
|
||||
def fix_llsd_matrix_scale(source: List[float], scale_fixup: Iterable[float]):
|
||||
matrix = np.array(source).reshape((4, 4))
|
||||
decomposed = list(transformations.decompose_matrix(matrix))
|
||||
# Need to handle both the scale and translation matrices
|
||||
for idx in (0, 3):
|
||||
decomposed[idx] = tuple(x * y for x, y in zip(decomposed[idx], scale_fixup))
|
||||
return list(transformations.compose_matrix(*decomposed).flatten('C'))
|
||||
# TODO: DON'T MESS WITH INVERSE TRANSLATION!!!! Only bind shape gets its translation scaled.
|
||||
# TODO: put this back in, the previous logic was totally wrong-headed..
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
@@ -29,6 +29,7 @@ import math
|
||||
from typing import *
|
||||
|
||||
import recordclass
|
||||
import transformations
|
||||
|
||||
logger = getLogger('hippolyzer.lib.base.datatypes')
|
||||
|
||||
@@ -220,6 +221,15 @@ class Quaternion(TupleCoord):
|
||||
)
|
||||
return super().__mul__(other)
|
||||
|
||||
@classmethod
|
||||
def from_transformations(cls, coord) -> Quaternion:
|
||||
"""Convert to W (S) last form"""
|
||||
return cls(coord[1], coord[2], coord[3], coord[0])
|
||||
|
||||
def to_transformations(self) -> Tuple[float, float, float, float]:
|
||||
"""Convert to W (S) first form for use with the transformations lib"""
|
||||
return self.W, self.X, self.Y, self.Z
|
||||
|
||||
@classmethod
|
||||
def from_euler(cls, roll, pitch, yaw, degrees=False):
|
||||
if degrees:
|
||||
@@ -241,6 +251,9 @@ class Quaternion(TupleCoord):
|
||||
|
||||
return cls(X=x, Y=y, Z=z, W=w)
|
||||
|
||||
def to_euler(self) -> Vector3:
|
||||
return Vector3(*transformations.euler_from_quaternion(self.to_transformations()))
|
||||
|
||||
def data(self, wanted_components=None):
|
||||
if wanted_components == 3:
|
||||
return self.X, self.Y, self.Z
|
||||
|
||||
@@ -117,6 +117,8 @@ class InventoryBase(SchemaBase):
|
||||
# Not meant to be serialized
|
||||
if not spec:
|
||||
continue
|
||||
if field.metadata.get("llsd_only"):
|
||||
continue
|
||||
|
||||
val = getattr(self, field_name)
|
||||
if val is None:
|
||||
@@ -166,16 +168,11 @@ class InventoryModel(InventoryBase):
|
||||
def from_llsd(cls, llsd_val: List[Dict]) -> InventoryModel:
|
||||
model = cls()
|
||||
for obj_dict in llsd_val:
|
||||
if InventoryCategory.ID_ATTR in obj_dict:
|
||||
if (obj := InventoryCategory.from_llsd(obj_dict)) is not None:
|
||||
model.add(obj)
|
||||
elif InventoryObject.ID_ATTR in obj_dict:
|
||||
if (obj := InventoryObject.from_llsd(obj_dict)) is not None:
|
||||
model.add(obj)
|
||||
elif InventoryItem.ID_ATTR in obj_dict:
|
||||
if (obj := InventoryItem.from_llsd(obj_dict)) is not None:
|
||||
model.add(obj)
|
||||
else:
|
||||
for inv_type in INVENTORY_TYPES:
|
||||
if inv_type.ID_ATTR in obj_dict:
|
||||
if (obj := inv_type.from_llsd(obj_dict)) is not None:
|
||||
model.add(obj)
|
||||
break
|
||||
LOG.warning(f"Unknown object type {obj_dict!r}")
|
||||
return model
|
||||
|
||||
@@ -218,13 +215,13 @@ class InventoryModel(InventoryBase):
|
||||
self.root = node
|
||||
node.model = weakref.proxy(self)
|
||||
|
||||
def unlink(self, node: InventoryNodeBase) -> Sequence[InventoryNodeBase]:
|
||||
def unlink(self, node: InventoryNodeBase, single_only: bool = False) -> Sequence[InventoryNodeBase]:
|
||||
"""Unlink a node and its descendants from the tree, returning the removed nodes"""
|
||||
assert node.model == self
|
||||
if node == self.root:
|
||||
self.root = None
|
||||
unlinked = [node]
|
||||
if isinstance(node, InventoryContainerBase):
|
||||
if isinstance(node, InventoryContainerBase) and not single_only:
|
||||
for child in node.children:
|
||||
unlinked.extend(self.unlink(child))
|
||||
self.nodes.pop(node.node_id, None)
|
||||
@@ -257,6 +254,15 @@ class InventoryModel(InventoryBase):
|
||||
removed=removed_in_other,
|
||||
)
|
||||
|
||||
def __getitem__(self, item: UUID) -> InventoryNodeBase:
|
||||
return self.nodes[item]
|
||||
|
||||
def __contains__(self, item: UUID):
|
||||
return item in self.nodes
|
||||
|
||||
def get(self, item: UUID) -> Optional[InventoryNodeBase]:
|
||||
return self.nodes.get(item)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryPermissions(InventoryBase):
|
||||
@@ -271,6 +277,9 @@ class InventoryPermissions(InventoryBase):
|
||||
owner_id: UUID = schema_field(SchemaUUID)
|
||||
last_owner_id: UUID = schema_field(SchemaUUID)
|
||||
group_id: UUID = schema_field(SchemaUUID)
|
||||
# Nothing actually cares about this, but it could be there.
|
||||
# It's kind of redundant since it just means owner_id == NULL_KEY && group_id != NULL_KEY.
|
||||
is_owner_group: int = schema_field(SchemaInt, default=0, llsd_only=True)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
@@ -384,6 +393,7 @@ class InventoryObject(InventoryContainerBase):
|
||||
class InventoryCategory(InventoryContainerBase):
|
||||
ID_ATTR: ClassVar[str] = "cat_id"
|
||||
SCHEMA_NAME: ClassVar[str] = "inv_category"
|
||||
VERSION_NONE: ClassVar[int] = -1
|
||||
|
||||
cat_id: UUID = schema_field(SchemaUUID)
|
||||
pref_type: str = schema_field(SchemaStr, llsd_name="preferred_type")
|
||||
@@ -417,3 +427,6 @@ class InventoryItem(InventoryNodeBase):
|
||||
if self.asset_id is not None:
|
||||
return self.asset_id
|
||||
return self.shadow_id ^ MAGIC_ID
|
||||
|
||||
|
||||
INVENTORY_TYPES: Tuple[Type[InventoryNodeBase], ...] = (InventoryCategory, InventoryObject, InventoryItem)
|
||||
|
||||
@@ -111,10 +111,10 @@ class SchemaUUID(SchemaFieldSerializer[UUID]):
|
||||
|
||||
|
||||
def schema_field(spec: Type[Union[SchemaBase, SchemaFieldSerializer]], *, default=dataclasses.MISSING, init=True,
|
||||
repr=True, hash=None, compare=True, llsd_name=None) -> dataclasses.Field: # noqa
|
||||
repr=True, hash=None, compare=True, llsd_name=None, llsd_only=False) -> dataclasses.Field: # noqa
|
||||
"""Describe a field in the inventory schema and the shape of its value"""
|
||||
return dataclasses.field(
|
||||
metadata={"spec": spec, "llsd_name": llsd_name}, default=default,
|
||||
return dataclasses.field( # noqa
|
||||
metadata={"spec": spec, "llsd_name": llsd_name, "llsd_only": llsd_only}, default=default,
|
||||
init=init, repr=repr, hash=hash, compare=compare,
|
||||
)
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import datetime
|
||||
import typing
|
||||
import zlib
|
||||
|
||||
@@ -47,6 +46,12 @@ class HippoLLSDNotationFormatter(llbase.llsd.LLSDNotationFormatter, HippoLLSDBas
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def STRING(self, v):
|
||||
# llbase's notation LLSD encoder isn't suitable for generating line-delimited
|
||||
# LLSD because the string formatter leaves \n unencoded, unlike indra's llcommon.
|
||||
# Add our own escaping rule.
|
||||
return super().STRING(v).replace(b"\n", b"\\n")
|
||||
|
||||
|
||||
def format_notation(val: typing.Any):
|
||||
return HippoLLSDNotationFormatter().format(val)
|
||||
@@ -63,10 +68,9 @@ def format_binary(val: typing.Any, with_header=True):
|
||||
# With a few minor changes to make serialization round-trip correctly. It's evil.
|
||||
def _format_binary_recurse(something) -> bytes:
|
||||
"""Binary formatter workhorse."""
|
||||
def _format_list(something):
|
||||
array_builder = []
|
||||
array_builder.append(b'[' + struct.pack('!i', len(something)))
|
||||
for item in something:
|
||||
def _format_list(list_something):
|
||||
array_builder = [b'[' + struct.pack('!i', len(list_something))]
|
||||
for item in list_something:
|
||||
array_builder.append(_format_binary_recurse(item))
|
||||
array_builder.append(b']')
|
||||
return b''.join(array_builder)
|
||||
@@ -108,8 +112,7 @@ def _format_binary_recurse(something) -> bytes:
|
||||
elif isinstance(something, (list, tuple)):
|
||||
return _format_list(something)
|
||||
elif isinstance(something, dict):
|
||||
map_builder = []
|
||||
map_builder.append(b'{' + struct.pack('!i', len(something)))
|
||||
map_builder = [b'{' + struct.pack('!i', len(something))]
|
||||
for key, value in something.items():
|
||||
if isinstance(key, str):
|
||||
key = key.encode("utf8")
|
||||
|
||||
@@ -27,6 +27,14 @@ class _Unserializable:
|
||||
return False
|
||||
|
||||
|
||||
class MissingType:
|
||||
"""Simple sentinel type like dataclasses._MISSING_TYPE"""
|
||||
pass
|
||||
|
||||
|
||||
MISSING = MissingType()
|
||||
|
||||
|
||||
UNSERIALIZABLE = _Unserializable()
|
||||
_T = TypeVar("_T")
|
||||
|
||||
@@ -288,7 +296,7 @@ class SerializableBase(abc.ABC):
|
||||
@classmethod
|
||||
def default_value(cls) -> Any:
|
||||
# None may be a valid default, so return MISSING as a sentinel val
|
||||
return dataclasses.MISSING
|
||||
return MISSING
|
||||
|
||||
|
||||
class Adapter(SerializableBase, abc.ABC):
|
||||
@@ -328,18 +336,18 @@ class ForwardSerializable(SerializableBase):
|
||||
def __init__(self, func: Callable[[], SERIALIZABLE_TYPE]):
|
||||
super().__init__()
|
||||
self._func = func
|
||||
self._wrapped = dataclasses.MISSING
|
||||
self._wrapped: Union[MissingType, SERIALIZABLE_TYPE] = MISSING
|
||||
|
||||
def _ensure_evaled(self):
|
||||
if self._wrapped is dataclasses.MISSING:
|
||||
if self._wrapped is MISSING:
|
||||
self._wrapped = self._func()
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self._wrapped, attr)
|
||||
|
||||
def default_value(self) -> Any:
|
||||
if self._wrapped is dataclasses.MISSING:
|
||||
return dataclasses.MISSING
|
||||
if self._wrapped is MISSING:
|
||||
return MISSING
|
||||
return self._wrapped.default_value()
|
||||
|
||||
def serialize(self, val, writer: BufferWriter, ctx: Optional[ParseContext]):
|
||||
@@ -357,10 +365,10 @@ class Template(SerializableBase):
|
||||
def __init__(self, template_spec: Dict[str, SERIALIZABLE_TYPE], skip_missing=False):
|
||||
self._template_spec = template_spec
|
||||
self._skip_missing = skip_missing
|
||||
self._size = dataclasses.MISSING
|
||||
self._size = MISSING
|
||||
|
||||
def calc_size(self):
|
||||
if self._size is not dataclasses.MISSING:
|
||||
if self._size is not MISSING:
|
||||
return self._size
|
||||
sum_bytes = 0
|
||||
for _, field_type in self._template_spec.items():
|
||||
@@ -1196,9 +1204,9 @@ class ContextMixin(Generic[_T]):
|
||||
def _choose_option(self, ctx: Optional[ParseContext]) -> _T:
|
||||
idx = self._fun(ctx)
|
||||
if idx not in self._options:
|
||||
if dataclasses.MISSING not in self._options:
|
||||
if MISSING not in self._options:
|
||||
raise KeyError(f"{idx!r} not found in {self._options!r}")
|
||||
idx = dataclasses.MISSING
|
||||
idx = MISSING
|
||||
return self._options[idx]
|
||||
|
||||
|
||||
@@ -1442,7 +1450,7 @@ class StringEnumAdapter(Adapter):
|
||||
class FixedPoint(SerializableBase):
|
||||
def __init__(self, ser_spec, int_bits, frac_bits, signed=False):
|
||||
# Should never be used due to how this handles signs :/
|
||||
assert(not ser_spec.is_signed)
|
||||
assert (not ser_spec.is_signed)
|
||||
|
||||
self._ser_spec: SerializablePrimitive = ser_spec
|
||||
self._signed = signed
|
||||
@@ -1452,7 +1460,7 @@ class FixedPoint(SerializableBase):
|
||||
self._min_val = ((1 << int_bits) * -1) if signed else 0
|
||||
self._max_val = 1 << int_bits
|
||||
|
||||
assert(required_bits == (ser_spec.calc_size() * 8))
|
||||
assert (required_bits == (ser_spec.calc_size() * 8))
|
||||
|
||||
def deserialize(self, reader: Reader, ctx):
|
||||
fixed_val = float(self._ser_spec.deserialize(reader, ctx))
|
||||
@@ -1482,8 +1490,8 @@ def _make_undefined_raiser():
|
||||
return f
|
||||
|
||||
|
||||
def dataclass_field(spec: Union[SERIALIZABLE_TYPE, Callable], *, default=dataclasses.MISSING,
|
||||
default_factory=dataclasses.MISSING, init=True, repr=True, # noqa
|
||||
def dataclass_field(spec: Union[SERIALIZABLE_TYPE, Callable], *, default: Any = dataclasses.MISSING,
|
||||
default_factory: Any = dataclasses.MISSING, init=True, repr=True, # noqa
|
||||
hash=None, compare=True) -> dataclasses.Field: # noqa
|
||||
enrich_factory = False
|
||||
# Lambda, need to defer evaluation of spec until it's actually used.
|
||||
@@ -1504,7 +1512,7 @@ def dataclass_field(spec: Union[SERIALIZABLE_TYPE, Callable], *, default=datacla
|
||||
metadata={"spec": spec}, default=default, default_factory=default_factory, init=init,
|
||||
repr=repr, hash=hash, compare=compare
|
||||
)
|
||||
# Need to stuff this on so it knows which field went unspecified.
|
||||
# Need to stuff this on, so it knows which field went unspecified.
|
||||
if enrich_factory:
|
||||
default_factory.field = field
|
||||
return field
|
||||
|
||||
@@ -5,9 +5,6 @@ Serialization templates for structures used in LLUDP and HTTP bodies.
|
||||
import abc
|
||||
import collections
|
||||
import dataclasses
|
||||
import enum
|
||||
import importlib
|
||||
import logging
|
||||
import math
|
||||
import zlib
|
||||
from typing import *
|
||||
@@ -17,11 +14,6 @@ from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3
|
||||
from hippolyzer.lib.base.namevalue import NameValuesSerializer
|
||||
|
||||
try:
|
||||
importlib.reload(se) # type: ignore
|
||||
except:
|
||||
logging.exception("Failed to reload serialization lib")
|
||||
|
||||
|
||||
@se.enum_field_serializer("RequestXfer", "XferID", "VFileType")
|
||||
@se.enum_field_serializer("AssetUploadRequest", "AssetBlock", "Type")
|
||||
@@ -143,6 +135,7 @@ class InventoryType(IntEnum):
|
||||
lower = self.name.lower()
|
||||
return {
|
||||
"callingcard": "callcard",
|
||||
"none": "-1",
|
||||
}.get(lower, lower)
|
||||
|
||||
|
||||
@@ -359,10 +352,10 @@ class PermissionType(IntEnum):
|
||||
@se.enum_field_serializer("TransferRequest", "TransferInfo", "SourceType")
|
||||
class TransferSourceType(IntEnum):
|
||||
UNKNOWN = 0
|
||||
FILE = enum.auto()
|
||||
ASSET = enum.auto()
|
||||
SIM_INV_ITEM = enum.auto()
|
||||
SIM_ESTATE = enum.auto()
|
||||
FILE = 1
|
||||
ASSET = 2
|
||||
SIM_INV_ITEM = 3
|
||||
SIM_ESTATE = 4
|
||||
|
||||
|
||||
class EstateAssetType(IntEnum):
|
||||
@@ -425,15 +418,15 @@ class TransferParamsSerializer(se.EnumSwitchedSubfieldSerializer):
|
||||
@se.enum_field_serializer("TransferInfo", "TransferInfo", "ChannelType")
|
||||
class TransferChannelType(IntEnum):
|
||||
UNKNOWN = 0
|
||||
MISC = enum.auto()
|
||||
ASSET = enum.auto()
|
||||
MISC = 1
|
||||
ASSET = 2
|
||||
|
||||
|
||||
@se.enum_field_serializer("TransferInfo", "TransferInfo", "TargetType")
|
||||
class TransferTargetType(IntEnum):
|
||||
UNKNOWN = 0
|
||||
FILE = enum.auto()
|
||||
VFILE = enum.auto()
|
||||
FILE = 1
|
||||
VFILE = 2
|
||||
|
||||
|
||||
@se.enum_field_serializer("TransferInfo", "TransferInfo", "Status")
|
||||
@@ -540,45 +533,45 @@ class SendXferPacketIDSerializer(se.AdapterSubfieldSerializer):
|
||||
@se.enum_field_serializer("ViewerEffect", "Effect", "Type")
|
||||
class ViewerEffectType(IntEnum):
|
||||
TEXT = 0
|
||||
ICON = enum.auto()
|
||||
CONNECTOR = enum.auto()
|
||||
FLEXIBLE_OBJECT = enum.auto()
|
||||
ANIMAL_CONTROLS = enum.auto()
|
||||
LOCAL_ANIMATION_OBJECT = enum.auto()
|
||||
CLOTH = enum.auto()
|
||||
EFFECT_BEAM = enum.auto()
|
||||
EFFECT_GLOW = enum.auto()
|
||||
EFFECT_POINT = enum.auto()
|
||||
EFFECT_TRAIL = enum.auto()
|
||||
EFFECT_SPHERE = enum.auto()
|
||||
EFFECT_SPIRAL = enum.auto()
|
||||
EFFECT_EDIT = enum.auto()
|
||||
EFFECT_LOOKAT = enum.auto()
|
||||
EFFECT_POINTAT = enum.auto()
|
||||
EFFECT_VOICE_VISUALIZER = enum.auto()
|
||||
NAME_TAG = enum.auto()
|
||||
EFFECT_BLOB = enum.auto()
|
||||
ICON = 1
|
||||
CONNECTOR = 2
|
||||
FLEXIBLE_OBJECT = 3
|
||||
ANIMAL_CONTROLS = 4
|
||||
LOCAL_ANIMATION_OBJECT = 5
|
||||
CLOTH = 6
|
||||
EFFECT_BEAM = 7
|
||||
EFFECT_GLOW = 8
|
||||
EFFECT_POINT = 9
|
||||
EFFECT_TRAIL = 10
|
||||
EFFECT_SPHERE = 11
|
||||
EFFECT_SPIRAL = 12
|
||||
EFFECT_EDIT = 13
|
||||
EFFECT_LOOKAT = 14
|
||||
EFFECT_POINTAT = 15
|
||||
EFFECT_VOICE_VISUALIZER = 16
|
||||
NAME_TAG = 17
|
||||
EFFECT_BLOB = 18
|
||||
|
||||
|
||||
class LookAtTarget(IntEnum):
|
||||
NONE = 0
|
||||
IDLE = enum.auto()
|
||||
AUTO_LISTEN = enum.auto()
|
||||
FREELOOK = enum.auto()
|
||||
RESPOND = enum.auto()
|
||||
HOVER = enum.auto()
|
||||
CONVERSATION = enum.auto()
|
||||
SELECT = enum.auto()
|
||||
FOCUS = enum.auto()
|
||||
MOUSELOOK = enum.auto()
|
||||
CLEAR = enum.auto()
|
||||
IDLE = 1
|
||||
AUTO_LISTEN = 2
|
||||
FREELOOK = 3
|
||||
RESPOND = 4
|
||||
HOVER = 5
|
||||
CONVERSATION = 6
|
||||
SELECT = 7
|
||||
FOCUS = 8
|
||||
MOUSELOOK = 9
|
||||
CLEAR = 10
|
||||
|
||||
|
||||
class PointAtTarget(IntEnum):
|
||||
NONE = 0
|
||||
SELECT = enum.auto()
|
||||
GRAB = enum.auto()
|
||||
CLEAR = enum.auto()
|
||||
SELECT = 1
|
||||
GRAB = 2
|
||||
CLEAR = 3
|
||||
|
||||
|
||||
@se.subfield_serializer("ViewerEffect", "Effect", "TypeData")
|
||||
@@ -943,7 +936,7 @@ class ObjectStateAdapter(se.ContextAdapter):
|
||||
PCode.AVATAR: se.IntFlag(AgentState),
|
||||
PCode.PRIMITIVE: AttachmentStateAdapter(None),
|
||||
# Other cases are probably just a number (tree species ID or something.)
|
||||
dataclasses.MISSING: se.IdentityAdapter(),
|
||||
se.MISSING: se.IdentityAdapter(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1146,9 +1139,15 @@ class TEExceptionField(se.SerializableBase):
|
||||
return dict
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_TE_FIELD_KEY = Optional[Sequence[int]]
|
||||
_TE_DICT = Dict[_TE_FIELD_KEY, _T]
|
||||
|
||||
|
||||
def _te_field(spec: se.SERIALIZABLE_TYPE, first=False, optional=False,
|
||||
default_factory=dataclasses.MISSING, default=dataclasses.MISSING):
|
||||
if default_factory is not dataclasses.MISSING:
|
||||
default_factory: Union[se.MissingType, Callable[[], _T]] = se.MISSING,
|
||||
default: Union[se.MissingType, _T] = se.MISSING):
|
||||
if default_factory is not se.MISSING:
|
||||
new_default_factory = lambda: {None: default_factory()}
|
||||
elif default is not None:
|
||||
new_default_factory = lambda: {None: default}
|
||||
@@ -1160,9 +1159,6 @@ def _te_field(spec: se.SERIALIZABLE_TYPE, first=False, optional=False,
|
||||
)
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_TE_FIELD_KEY = Optional[Sequence[int]]
|
||||
|
||||
# If this seems weird it's because it is. TE offsets are S16s with `0` as the actual 0
|
||||
# point, and LL divides by `0x7FFF` to convert back to float. Negative S16s can
|
||||
# actually go to -0x8000 due to two's complement, creating a larger range for negatives.
|
||||
@@ -1221,22 +1217,22 @@ MAX_TES = 45
|
||||
|
||||
@dataclasses.dataclass
|
||||
class TextureEntryCollection:
|
||||
Textures: Dict[_TE_FIELD_KEY, UUID] = _te_field(
|
||||
Textures: _TE_DICT[UUID] = _te_field(
|
||||
# Plywood texture
|
||||
se.UUID, first=True, default=UUID('89556747-24cb-43ed-920b-47caed15465f'))
|
||||
# Bytes are inverted so fully opaque white is \x00\x00\x00\x00
|
||||
Color: Dict[_TE_FIELD_KEY, bytes] = _te_field(Color4(invert_bytes=True), default=b"\xff\xff\xff\xff")
|
||||
ScalesS: Dict[_TE_FIELD_KEY, float] = _te_field(se.F32, default=1.0)
|
||||
ScalesT: Dict[_TE_FIELD_KEY, float] = _te_field(se.F32, default=1.0)
|
||||
OffsetsS: Dict[_TE_FIELD_KEY, float] = _te_field(TE_S16_COORD, default=0.0)
|
||||
OffsetsT: Dict[_TE_FIELD_KEY, float] = _te_field(TE_S16_COORD, default=0.0)
|
||||
Rotation: Dict[_TE_FIELD_KEY, float] = _te_field(PackedTERotation(), default=0.0)
|
||||
BasicMaterials: Dict[_TE_FIELD_KEY, "BasicMaterials"] = _te_field(
|
||||
Color: _TE_DICT[bytes] = _te_field(Color4(invert_bytes=True), default=b"\xff\xff\xff\xff")
|
||||
ScalesS: _TE_DICT[float] = _te_field(se.F32, default=1.0)
|
||||
ScalesT: _TE_DICT[float] = _te_field(se.F32, default=1.0)
|
||||
OffsetsS: _TE_DICT[float] = _te_field(TE_S16_COORD, default=0.0)
|
||||
OffsetsT: _TE_DICT[float] = _te_field(TE_S16_COORD, default=0.0)
|
||||
Rotation: _TE_DICT[float] = _te_field(PackedTERotation(), default=0.0)
|
||||
BasicMaterials: _TE_DICT["BasicMaterials"] = _te_field(
|
||||
BUMP_SHINY_FULLBRIGHT, default_factory=BasicMaterials,
|
||||
)
|
||||
MediaFlags: Dict[_TE_FIELD_KEY, "MediaFlags"] = _te_field(MEDIA_FLAGS, default_factory=MediaFlags)
|
||||
Glow: Dict[_TE_FIELD_KEY, float] = _te_field(se.QuantizedFloat(se.U8, 0.0, 1.0), default=0.0)
|
||||
Materials: Dict[_TE_FIELD_KEY, UUID] = _te_field(se.UUID, optional=True, default=UUID.ZERO)
|
||||
MediaFlags: _TE_DICT["MediaFlags"] = _te_field(MEDIA_FLAGS, default_factory=MediaFlags)
|
||||
Glow: _TE_DICT[float] = _te_field(se.QuantizedFloat(se.U8, 0.0, 1.0), default=0.0)
|
||||
Materials: _TE_DICT[UUID] = _te_field(se.UUID, optional=True, default=UUID.ZERO)
|
||||
|
||||
def unwrap(self):
|
||||
"""Return `self` regardless of whether this is lazy wrapped object or not"""
|
||||
@@ -1733,28 +1729,28 @@ class NameValueSerializer(se.SimpleSubfieldSerializer):
|
||||
@se.enum_field_serializer("SetFollowCamProperties", "CameraProperty", "Type")
|
||||
class CameraPropertyType(IntEnum):
|
||||
PITCH = 0
|
||||
FOCUS_OFFSET = enum.auto()
|
||||
FOCUS_OFFSET_X = enum.auto()
|
||||
FOCUS_OFFSET_Y = enum.auto()
|
||||
FOCUS_OFFSET_Z = enum.auto()
|
||||
POSITION_LAG = enum.auto()
|
||||
FOCUS_LAG = enum.auto()
|
||||
DISTANCE = enum.auto()
|
||||
BEHINDNESS_ANGLE = enum.auto()
|
||||
BEHINDNESS_LAG = enum.auto()
|
||||
POSITION_THRESHOLD = enum.auto()
|
||||
FOCUS_THRESHOLD = enum.auto()
|
||||
ACTIVE = enum.auto()
|
||||
POSITION = enum.auto()
|
||||
POSITION_X = enum.auto()
|
||||
POSITION_Y = enum.auto()
|
||||
POSITION_Z = enum.auto()
|
||||
FOCUS = enum.auto()
|
||||
FOCUS_X = enum.auto()
|
||||
FOCUS_Y = enum.auto()
|
||||
FOCUS_Z = enum.auto()
|
||||
POSITION_LOCKED = enum.auto()
|
||||
FOCUS_LOCKED = enum.auto()
|
||||
FOCUS_OFFSET = 1
|
||||
FOCUS_OFFSET_X = 2
|
||||
FOCUS_OFFSET_Y = 3
|
||||
FOCUS_OFFSET_Z = 4
|
||||
POSITION_LAG = 5
|
||||
FOCUS_LAG = 6
|
||||
DISTANCE = 7
|
||||
BEHINDNESS_ANGLE = 8
|
||||
BEHINDNESS_LAG = 9
|
||||
POSITION_THRESHOLD = 10
|
||||
FOCUS_THRESHOLD = 11
|
||||
ACTIVE = 12
|
||||
POSITION = 13
|
||||
POSITION_X = 14
|
||||
POSITION_Y = 15
|
||||
POSITION_Z = 16
|
||||
FOCUS = 17
|
||||
FOCUS_X = 18
|
||||
FOCUS_Y = 19
|
||||
FOCUS_Z = 20
|
||||
POSITION_LOCKED = 21
|
||||
FOCUS_LOCKED = 22
|
||||
|
||||
|
||||
@se.enum_field_serializer("DeRezObject", "AgentBlock", "Destination")
|
||||
@@ -1869,30 +1865,33 @@ class GroupPowerFlags(IntFlag):
|
||||
# Roles
|
||||
ROLE_CREATE = 1 << 4 # Create new roles
|
||||
ROLE_DELETE = 1 << 5 # Delete roles
|
||||
ROLE_PROPERTIES = 1 << 6 # Change Role Names, Titles, and Descriptions (Of roles the user is in, only, or any role in group?)
|
||||
ROLE_PROPERTIES = 1 << 6 # Change Role Names, Titles, and Descriptions
|
||||
ROLE_ASSIGN_MEMBER_LIMITED = 1 << 7 # Assign Member to a Role that the assigner is in
|
||||
ROLE_ASSIGN_MEMBER = 1 << 8 # Assign Member to Role
|
||||
ROLE_REMOVE_MEMBER = 1 << 9 # Remove Member from Role
|
||||
ROLE_CHANGE_ACTIONS = 1 << 10 # Change actions a role can perform
|
||||
|
||||
# Group Identity
|
||||
GROUP_CHANGE_IDENTITY = 1 << 11 # Charter, insignia, 'Show In Group List', 'Publish on the web', 'Mature', all 'Show Member In Group Profile' checkboxes
|
||||
GROUP_CHANGE_IDENTITY = 1 << 11 # Charter, insignia, 'Show In Group List', 'Publish on the web', 'Mature', etc.
|
||||
|
||||
# Parcel Management
|
||||
LAND_DEED = 1 << 12 # Deed Land and Buy Land for Group
|
||||
LAND_RELEASE = 1 << 13 # Release Land (to Gov. Linden)
|
||||
LAND_SET_SALE_INFO = 1 << 14 # Set for sale info (Toggle "For Sale", Set Price, Set Target, Toggle "Sell objects with the land")
|
||||
# Set for sale info (Toggle "For Sale", Set Price, Set Target, Toggle "Sell objects with the land")
|
||||
LAND_SET_SALE_INFO = 1 << 14
|
||||
LAND_DIVIDE_JOIN = 1 << 15 # Divide and Join Parcels
|
||||
|
||||
# Parcel Identity
|
||||
LAND_FIND_PLACES = 1 << 17 # Toggle "Show in Find Places" and Set Category.
|
||||
LAND_CHANGE_IDENTITY = 1 << 18 # Change Parcel Identity: Parcel Name, Parcel Description, Snapshot, 'Publish on the web', and 'Mature' checkbox
|
||||
# Change Parcel Identity: Parcel Name, Parcel Description, Snapshot, 'Publish on the web', and 'Mature' checkbox
|
||||
LAND_CHANGE_IDENTITY = 1 << 18
|
||||
LAND_SET_LANDING_POINT = 1 << 19 # Set Landing Point
|
||||
|
||||
# Parcel Settings
|
||||
LAND_CHANGE_MEDIA = 1 << 20 # Change Media Settings
|
||||
LAND_EDIT = 1 << 21 # Toggle Edit Land
|
||||
LAND_OPTIONS = 1 << 22 # Toggle Set Home Point, Fly, Outside Scripts, Create/Edit Objects, Landmark, and Damage checkboxes
|
||||
# Toggle Set Home Point, Fly, Outside Scripts, Create/Edit Objects, Landmark, and Damage checkboxes
|
||||
LAND_OPTIONS = 1 << 22
|
||||
|
||||
# Parcel Powers
|
||||
LAND_ALLOW_EDIT_LAND = 1 << 23 # Bypass Edit Land Restriction
|
||||
|
||||
192
hippolyzer/lib/client/inventory_manager.py
Normal file
192
hippolyzer/lib/client/inventory_manager.py
Normal file
@@ -0,0 +1,192 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import gzip
|
||||
import logging
|
||||
import secrets
|
||||
from pathlib import Path
|
||||
from typing import Union, List, Tuple, Set
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.inventory import InventoryModel, InventoryCategory, InventoryItem
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.client.state import BaseClientSession
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InventoryManager:
|
||||
def __init__(self, session: BaseClientSession):
|
||||
self._session = session
|
||||
self.model: InventoryModel = InventoryModel()
|
||||
self._load_skeleton()
|
||||
|
||||
def _load_skeleton(self):
|
||||
assert not self.model.nodes
|
||||
skel_cats: List[dict] = self._session.login_data.get('inventory-skeleton', [])
|
||||
for skel_cat in skel_cats:
|
||||
self.model.add(InventoryCategory(
|
||||
name=skel_cat["name"],
|
||||
cat_id=UUID(skel_cat["folder_id"]),
|
||||
parent_id=UUID(skel_cat["parent_id"]),
|
||||
# Don't use the version from the skeleton, this flags the inventory as needing
|
||||
# completion from the inventory cache. This matches indra's behavior.
|
||||
version=InventoryCategory.VERSION_NONE,
|
||||
type="category",
|
||||
pref_type=skel_cat.get("type_default", -1),
|
||||
owner_id=self._session.agent_id,
|
||||
))
|
||||
|
||||
def load_cache(self, path: Union[str, Path]):
|
||||
# Per indra, rough flow for loading inv on login is:
|
||||
# 1. Look at inventory skeleton from login response
|
||||
# 2. Pre-populate model with categories from the skeleton, including their versions
|
||||
# 3. Read the inventory cache, tracking categories and items separately
|
||||
# 4. Walk the list of categories in our cache. If the cat exists in the skeleton and the versions
|
||||
# match, then we may load the category and its descendants from cache.
|
||||
# 5. Any categories in the skeleton but not in the cache, or those with mismatched versions must be fetched.
|
||||
# The viewer does this by setting the local version of the cats to -1 and forcing a descendent fetch
|
||||
# over AIS.
|
||||
#
|
||||
# By the time you call this function call, you should have already loaded the inventory skeleton
|
||||
# into the model set its inventory category versions to VERSION_NONE.
|
||||
|
||||
skel_cats: List[dict] = self._session.login_data['inventory-skeleton']
|
||||
# UUID -> version map for inventory skeleton
|
||||
skel_versions = {UUID(cat["folder_id"]): cat["version"] for cat in skel_cats}
|
||||
LOG.info(f"Parsing inv cache at {path}")
|
||||
cached_categories, cached_items = self._parse_cache(path)
|
||||
LOG.info(f"Done parsing inv cache at {path}")
|
||||
loaded_cat_ids: Set[UUID] = set()
|
||||
|
||||
for cached_cat in cached_categories:
|
||||
existing_cat: InventoryCategory = self.model.get(cached_cat.cat_id) # noqa
|
||||
# Don't clobber an existing cat unless it just has a placeholder version,
|
||||
# maybe from loading the skeleton?
|
||||
if existing_cat and existing_cat.version != InventoryCategory.VERSION_NONE:
|
||||
continue
|
||||
# Cached cat isn't the same as what the inv server says it should be, can't use it.
|
||||
if cached_cat.version != skel_versions.get(cached_cat.cat_id):
|
||||
continue
|
||||
if existing_cat:
|
||||
# Remove the category so that we can replace it, but leave any children in place
|
||||
self.model.unlink(existing_cat, single_only=True)
|
||||
self.model.add(cached_cat)
|
||||
# Any items in this category in our cache file are usable and should be added
|
||||
loaded_cat_ids.add(cached_cat.cat_id)
|
||||
|
||||
for cached_item in cached_items:
|
||||
# The skeleton doesn't have any items, so if we run into any items they should be exactly the
|
||||
# same as what we're trying to add. No point clobbering.
|
||||
if cached_item.item_id in self.model:
|
||||
continue
|
||||
# The parent category didn't have a cache hit against the inventory skeleton, can't add!
|
||||
if cached_item.parent_id not in loaded_cat_ids:
|
||||
continue
|
||||
self.model.add(cached_item)
|
||||
|
||||
def _parse_cache(self, path: Union[str, Path]) -> Tuple[List[InventoryCategory], List[InventoryItem]]:
|
||||
categories: List[InventoryCategory] = []
|
||||
items: List[InventoryItem] = []
|
||||
# Parse our cached items and categories out of the compressed inventory cache
|
||||
first_line = True
|
||||
with gzip.open(path, "rb") as f:
|
||||
# Line-delimited LLSD notation!
|
||||
for line in f.readlines():
|
||||
# TODO: Parsing of invcache is dominated by `parse_notation()`. It's stupidly inefficient.
|
||||
node_llsd = llsd.parse_notation(line)
|
||||
if first_line:
|
||||
# First line is the file header
|
||||
first_line = False
|
||||
if node_llsd['inv_cache_version'] != 2:
|
||||
raise ValueError(f"Unknown cache version: {node_llsd!r}")
|
||||
continue
|
||||
|
||||
if InventoryCategory.ID_ATTR in node_llsd:
|
||||
if (cat_node := InventoryCategory.from_llsd(node_llsd)) is not None:
|
||||
categories.append(cat_node)
|
||||
elif InventoryItem.ID_ATTR in node_llsd:
|
||||
if (item_node := InventoryItem.from_llsd(node_llsd)) is not None:
|
||||
items.append(item_node)
|
||||
else:
|
||||
LOG.warning(f"Unknown node type in inv cache: {node_llsd!r}")
|
||||
return categories, items
|
||||
|
||||
|
||||
# Thankfully we have 9 billion different ways to represent inventory data.
|
||||
def ais_item_to_inventory_data(ais_item: dict) -> Block:
|
||||
return Block(
|
||||
"InventoryData",
|
||||
ItemID=ais_item["item_id"],
|
||||
FolderID=ais_item["parent_id"],
|
||||
CallbackID=0,
|
||||
CreatorID=ais_item["permissions"]["creator_id"],
|
||||
OwnerID=ais_item["permissions"]["owner_id"],
|
||||
GroupID=ais_item["permissions"]["group_id"],
|
||||
BaseMask=ais_item["permissions"]["base_mask"],
|
||||
OwnerMask=ais_item["permissions"]["owner_mask"],
|
||||
GroupMask=ais_item["permissions"]["group_mask"],
|
||||
EveryoneMask=ais_item["permissions"]["everyone_mask"],
|
||||
NextOwnerMask=ais_item["permissions"]["next_owner_mask"],
|
||||
GroupOwned=0,
|
||||
AssetID=ais_item["asset_id"],
|
||||
Type=ais_item["type"],
|
||||
InvType=ais_item["inv_type"],
|
||||
Flags=ais_item["flags"],
|
||||
SaleType=ais_item["sale_info"]["sale_type"],
|
||||
SalePrice=ais_item["sale_info"]["sale_price"],
|
||||
Name=ais_item["name"],
|
||||
Description=ais_item["desc"],
|
||||
CreationDate=ais_item["created_at"],
|
||||
# Meaningless here
|
||||
CRC=secrets.randbits(32),
|
||||
)
|
||||
|
||||
|
||||
def inventory_data_to_ais_item(inventory_data: Block) -> dict:
|
||||
return dict(
|
||||
item_id=inventory_data["ItemID"],
|
||||
parent_id=inventory_data["ParentID"],
|
||||
permissions=dict(
|
||||
creator_id=inventory_data["CreatorID"],
|
||||
owner_id=inventory_data["OwnerID"],
|
||||
group_id=inventory_data["GroupID"],
|
||||
base_mask=inventory_data["BaseMask"],
|
||||
owner_mask=inventory_data["OwnerMask"],
|
||||
group_mask=inventory_data["GroupMask"],
|
||||
everyone_mask=inventory_data["EveryoneMask"],
|
||||
next_owner_mask=inventory_data["NextOwnerMask"],
|
||||
),
|
||||
asset_id=inventory_data["AssetID"],
|
||||
type=inventory_data["Type"],
|
||||
inv_type=inventory_data["InvType"],
|
||||
flags=inventory_data["Flags"],
|
||||
sale_info=dict(
|
||||
sale_type=inventory_data["SaleType"],
|
||||
sale_price=inventory_data["SalePrice"],
|
||||
),
|
||||
name=inventory_data["Name"],
|
||||
description=inventory_data["Description"],
|
||||
creation_at=inventory_data["CreationDate"],
|
||||
)
|
||||
|
||||
|
||||
def ais_folder_to_inventory_data(ais_folder: dict) -> Block:
|
||||
return Block(
|
||||
"FolderData",
|
||||
FolderID=ais_folder["cat_id"],
|
||||
ParentID=ais_folder["parent_id"],
|
||||
CallbackID=0,
|
||||
Type=ais_folder["preferred_type"],
|
||||
Name=ais_folder["name"],
|
||||
)
|
||||
|
||||
|
||||
def inventory_data_to_ais_folder(inventory_data: Block) -> dict:
|
||||
return dict(
|
||||
cat_id=inventory_data["FolderID"],
|
||||
parent_id=inventory_data["ParentID"],
|
||||
preferred_type=inventory_data["Type"],
|
||||
name=inventory_data["Name"],
|
||||
)
|
||||
@@ -36,3 +36,4 @@ class BaseClientSession(abc.ABC):
|
||||
region_by_handle: Callable[[int], Optional[BaseClientRegion]]
|
||||
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[BaseClientRegion]]
|
||||
objects: ClientWorldObjectManager
|
||||
login_data: Dict[str, Any]
|
||||
|
||||
@@ -7,7 +7,6 @@ import copy
|
||||
import dataclasses
|
||||
import multiprocessing
|
||||
import pickle
|
||||
import secrets
|
||||
import warnings
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, Vector3
|
||||
@@ -103,46 +102,6 @@ def send_chat(message: Union[bytes, str], channel=0, chat_type=ChatType.NORMAL,
|
||||
))
|
||||
|
||||
|
||||
def ais_item_to_inventory_data(ais_item: dict):
|
||||
return Block(
|
||||
"InventoryData",
|
||||
ItemID=ais_item["item_id"],
|
||||
FolderID=ais_item["parent_id"],
|
||||
CallbackID=0,
|
||||
CreatorID=ais_item["permissions"]["creator_id"],
|
||||
OwnerID=ais_item["permissions"]["owner_id"],
|
||||
GroupID=ais_item["permissions"]["group_id"],
|
||||
BaseMask=ais_item["permissions"]["base_mask"],
|
||||
OwnerMask=ais_item["permissions"]["owner_mask"],
|
||||
GroupMask=ais_item["permissions"]["group_mask"],
|
||||
EveryoneMask=ais_item["permissions"]["everyone_mask"],
|
||||
NextOwnerMask=ais_item["permissions"]["next_owner_mask"],
|
||||
GroupOwned=0,
|
||||
AssetID=ais_item["asset_id"],
|
||||
Type=ais_item["type"],
|
||||
InvType=ais_item["inv_type"],
|
||||
Flags=ais_item["flags"],
|
||||
SaleType=ais_item["sale_info"]["sale_type"],
|
||||
SalePrice=ais_item["sale_info"]["sale_price"],
|
||||
Name=ais_item["name"],
|
||||
Description=ais_item["desc"],
|
||||
CreationDate=ais_item["created_at"],
|
||||
# Meaningless here
|
||||
CRC=secrets.randbits(32),
|
||||
)
|
||||
|
||||
|
||||
def ais_folder_to_inventory_data(ais_folder: dict):
|
||||
return Block(
|
||||
"FolderData",
|
||||
FolderID=ais_folder["cat_id"],
|
||||
ParentID=ais_folder["parent_id"],
|
||||
CallbackID=0,
|
||||
Type=ais_folder["preferred_type"],
|
||||
Name=ais_folder["name"],
|
||||
)
|
||||
|
||||
|
||||
class MetaBaseAddon(abc.ABCMeta):
|
||||
"""
|
||||
Metaclass for BaseAddon that prevents class member assignments from clobbering descriptors
|
||||
|
||||
@@ -432,22 +432,34 @@ class AddonManager:
|
||||
chat_type: int = message["ChatData"]["ChatType"]
|
||||
# RLV-style OwnerSay?
|
||||
if chat and chat.startswith("@") and chat_type == 8:
|
||||
# RLV-style command, `@<cmd>(:<option1>;<option2>)?(=<param>)?`
|
||||
options, _, param = chat.rpartition("=")
|
||||
cmd, _, options = options.lstrip("@").partition(":")
|
||||
options = options.split(";")
|
||||
source = message["ChatData"]["SourceID"]
|
||||
try:
|
||||
with addon_ctx.push(session, region):
|
||||
handled = cls._call_all_addon_hooks("handle_rlv_command",
|
||||
session, region, source, cmd, options, param)
|
||||
if handled:
|
||||
region.circuit.drop_message(message)
|
||||
return True
|
||||
except:
|
||||
LOG.exception(f"Failed while handling command {chat!r}")
|
||||
if not cls._SWALLOW_ADDON_EXCEPTIONS:
|
||||
raise
|
||||
# RLV allows putting multiple commands into one message, blindly splitting on ",".
|
||||
chat = chat.lstrip("@")
|
||||
all_cmds_handled = True
|
||||
for command_str in chat.split(","):
|
||||
if not command_str:
|
||||
continue
|
||||
# RLV-style command, `@<cmd>(:<option1>;<option2>)?(=<param>)?`
|
||||
options, _, param = command_str.partition("=")
|
||||
cmd, _, options = options.partition(":")
|
||||
# TODO: Not always correct, commands can specify their own parsing for the option field
|
||||
options = options.split(";") if options else []
|
||||
source = message["ChatData"]["SourceID"]
|
||||
try:
|
||||
with addon_ctx.push(session, region):
|
||||
handled = cls._call_all_addon_hooks("handle_rlv_command",
|
||||
session, region, source, cmd, options, param)
|
||||
if handled:
|
||||
region.circuit.drop_message(message)
|
||||
else:
|
||||
all_cmds_handled = False
|
||||
except:
|
||||
LOG.exception(f"Failed while handling command {command_str!r}")
|
||||
all_cmds_handled = False
|
||||
if not cls._SWALLOW_ADDON_EXCEPTIONS:
|
||||
raise
|
||||
# Drop the chat message if all commands it contained were handled by an addon
|
||||
if all_cmds_handled:
|
||||
return True
|
||||
|
||||
with addon_ctx.push(session, region):
|
||||
return cls._call_all_addon_hooks("handle_lludp_message", session, region, message)
|
||||
|
||||
@@ -2,7 +2,7 @@ from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message, Block
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.client.asset_uploader import AssetUploader
|
||||
from hippolyzer.lib.proxy.addon_utils import ais_item_to_inventory_data
|
||||
from hippolyzer.lib.client.inventory_manager import ais_item_to_inventory_data
|
||||
|
||||
|
||||
class ProxyAssetUploader(AssetUploader):
|
||||
|
||||
28
hippolyzer/lib/proxy/inventory_manager.py
Normal file
28
hippolyzer/lib/proxy/inventory_manager.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import datetime as dt
|
||||
|
||||
from hippolyzer.lib.base.helpers import get_mtime
|
||||
from hippolyzer.lib.client.inventory_manager import InventoryManager
|
||||
from hippolyzer.lib.client.state import BaseClientSession
|
||||
from hippolyzer.lib.proxy.viewer_settings import iter_viewer_cache_dirs
|
||||
|
||||
|
||||
class ProxyInventoryManager(InventoryManager):
|
||||
def __init__(self, session: BaseClientSession):
|
||||
super().__init__(session)
|
||||
newest_cache = None
|
||||
newest_timestamp = dt.datetime(year=1970, month=1, day=1, tzinfo=dt.timezone.utc)
|
||||
# Look for the newest version of the cached inventory and use that.
|
||||
# Not foolproof, but close enough if we're not sure what viewer is being used.
|
||||
for cache_dir in iter_viewer_cache_dirs():
|
||||
inv_cache_path = cache_dir / (str(session.agent_id) + ".inv.llsd.gz")
|
||||
if inv_cache_path.exists():
|
||||
mod = get_mtime(inv_cache_path)
|
||||
if not mod:
|
||||
continue
|
||||
mod_ts = dt.datetime.fromtimestamp(mod, dt.timezone.utc)
|
||||
if mod_ts <= newest_timestamp:
|
||||
continue
|
||||
newest_cache = inv_cache_path
|
||||
|
||||
if newest_cache:
|
||||
self.load_cache(newest_cache)
|
||||
@@ -401,7 +401,7 @@ class AbstractMessageLogEntry(abc.ABC):
|
||||
beautified = minidom.parseString(content).toprettyxml(indent=" ")
|
||||
# kill blank lines. will break cdata sections. meh.
|
||||
beautified = re.sub(r'\n\s*\n', '\n', beautified, flags=re.MULTILINE)
|
||||
return re.sub(r'<([\w]+)>\s*</\1>', r'<\1></\1>',
|
||||
return re.sub(r'<(\w+)>\s*</\1>', r'<\1></\1>',
|
||||
beautified, flags=re.MULTILINE)
|
||||
|
||||
|
||||
@@ -522,7 +522,7 @@ class HTTPMessageLogEntry(AbstractMessageLogEntry):
|
||||
buf.write(bytes(headers).decode("utf8", errors="replace"))
|
||||
buf.write("\r\n")
|
||||
|
||||
buf.write(message_body)
|
||||
buf.write(message_body or "")
|
||||
return buf.getvalue()
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
@@ -549,6 +549,12 @@ class HTTPMessageLogEntry(AbstractMessageLogEntry):
|
||||
return self._summary
|
||||
|
||||
def _guess_content_type(self, message):
|
||||
# SL's login service lies and says that its XML-RPC response is LLSD+XML.
|
||||
# It is not, and it blows up the parser. It's been broken ever since the
|
||||
# login rewrite and a fix is likely not forthcoming. I'm sick of seeing
|
||||
# the traceback, so just hack around it.
|
||||
if self.name == "LoginRequest":
|
||||
return "application/xml"
|
||||
content_type = message.headers.get("Content-Type", "")
|
||||
if not message.content or content_type.startswith("application/llsd"):
|
||||
return content_type
|
||||
|
||||
@@ -10,6 +10,7 @@ from typing import *
|
||||
from weakref import ref
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.client.state import BaseClientSession
|
||||
@@ -18,6 +19,7 @@ from hippolyzer.lib.proxy.circuit import ProxiedCircuit
|
||||
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
|
||||
from hippolyzer.lib.proxy.http_proxy import HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.caps import is_asset_server_cap_name, CapData, CapType
|
||||
from hippolyzer.lib.proxy.inventory_manager import ProxyInventoryManager
|
||||
from hippolyzer.lib.proxy.namecache import ProxyNameCache
|
||||
from hippolyzer.lib.proxy.object_manager import ProxyWorldObjectManager
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
@@ -47,6 +49,7 @@ class Session(BaseClientSession):
|
||||
self.message_handler: MessageHandler[Message, str] = MessageHandler()
|
||||
self.http_message_handler: MessageHandler[HippoHTTPFlow, str] = MessageHandler()
|
||||
self.objects = ProxyWorldObjectManager(self, session_manager.settings, session_manager.name_cache)
|
||||
self.inventory = ProxyInventoryManager(proxify(self))
|
||||
# Base path of a newview type cache directory for this session
|
||||
self.cache_dir: Optional[str] = None
|
||||
self._main_region = None
|
||||
|
||||
2
setup.py
2
setup.py
@@ -25,7 +25,7 @@ from setuptools import setup, find_packages
|
||||
|
||||
here = path.abspath(path.dirname(__file__))
|
||||
|
||||
version = '0.11.3'
|
||||
version = '0.12.0'
|
||||
|
||||
with open(path.join(here, 'README.md')) as readme_fh:
|
||||
readme = readme_fh.read()
|
||||
|
||||
@@ -79,6 +79,20 @@ class TestDatatypes(unittest.TestCase):
|
||||
quat = Quaternion(X=128.0, Y=128.0, Z=22.0)
|
||||
self.assertEqual(quat, (128.0, 128.0, 22.0, 0.0))
|
||||
|
||||
def test_quaternion_euler_roundtrip(self):
|
||||
orig_vec = Vector3(0.0, -1.0, 2.0)
|
||||
quat = Quaternion.from_euler(*orig_vec)
|
||||
for orig_comp, new_comp in zip(orig_vec, quat.to_euler()):
|
||||
self.assertAlmostEqual(orig_comp, new_comp)
|
||||
|
||||
def test_quaternion_transformations(self):
|
||||
quat = Quaternion(0.4034226801113349, -0.2590347239999257, 0.7384602626041288, 0.4741598817790379)
|
||||
expected_trans = (0.4741598817790379, 0.4034226801113349, -0.2590347239999257, 0.7384602626041288)
|
||||
trans_quat = quat.to_transformations()
|
||||
self.assertSequenceEqual(expected_trans, trans_quat)
|
||||
new_quat = Quaternion.from_transformations(trans_quat)
|
||||
self.assertEqual(quat, new_quat)
|
||||
|
||||
def test_uuid_from_bytes(self):
|
||||
tmp_uuid = uuid.UUID('2b7f7a6e-32c5-dbfd-e2c7-926d1a9f0aca')
|
||||
tmp_uuid2 = uuid.UUID('1dd5efe2-faaf-1864-5ac9-bc61c5d8d7ea')
|
||||
@@ -135,6 +149,9 @@ class TestDatatypes(unittest.TestCase):
|
||||
self.assertIsInstance(val, UUID)
|
||||
self.assertEqual(orig, val)
|
||||
|
||||
def test_str_llsd_serialization(self):
|
||||
self.assertEqual(b"'foo\\nbar'", llsd.format_notation("foo\nbar"))
|
||||
|
||||
def test_jank_stringy_bytes(self):
|
||||
val = JankStringyBytes(b"foo\x00")
|
||||
self.assertTrue("o" in val)
|
||||
|
||||
@@ -122,7 +122,8 @@ class TestLegacyInv(unittest.TestCase):
|
||||
'last_owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
|
||||
'next_owner_mask': 581632,
|
||||
'owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
|
||||
'owner_mask': 2147483647
|
||||
'owner_mask': 2147483647,
|
||||
'is_owner_group': 0,
|
||||
},
|
||||
'sale_info': {
|
||||
'sale_price': 10,
|
||||
|
||||
Reference in New Issue
Block a user