26 Commits

Author SHA1 Message Date
Salad Dais
5ef9b5354a v0.12.0 2022-08-18 15:13:02 +00:00
Salad Dais
34ca7d54be Support formatting SL's busted login endpoint responses 2022-08-18 14:40:33 +00:00
Salad Dais
cb316f1992 Only load the newest version of an agent's inventory cache
This isn't entirely correct, but without a cross-platform way to
map specifically the requesting viewer to a cache directory this
is the least annoying thing we can do.
2022-08-18 14:39:49 +00:00
Salad Dais
da05a6cf1f Begin reshuffling inventory management code 2022-08-18 14:30:42 +00:00
Salad Dais
f06c31e225 Greatly improve matrix handling logic in collada code 2022-08-18 14:29:28 +00:00
Salad Dais
b4e5596ca2 Add more utils for converting between quat and euler 2022-08-08 00:38:09 +00:00
Salad Dais
49a54ce099 Fix anim mangler exceptions causing reload to fail 2022-08-07 04:42:06 +00:00
Salad Dais
0349fd9078 Fix RLV command parser to better match RLV's actual behavior 2022-08-02 08:18:28 +00:00
Salad Dais
118ef2813a Fix new flake8 lint errors 2022-08-01 01:41:15 +00:00
Salad Dais
256f74b71a Add InventoryManager to proxy Session object 2022-07-31 18:31:56 +00:00
Salad Dais
4a84453ca4 Add start of proxy inventory manager 2022-07-31 16:54:57 +00:00
Salad Dais
34316cb166 Fix LLSD notation serialization with embedded newline 2022-07-30 14:39:48 +00:00
Salad Dais
0f7d35cdca Handle HTTP messages with missing (not just empty) body 2022-07-30 00:37:35 +00:00
Salad Dais
2ee8a6f008 Clean up typing to appease the linter 2022-07-28 18:26:05 +00:00
Salad Dais
848a6745c0 v0.11.3 2022-07-28 03:55:22 +00:00
Salad Dais
0cbbedd27b Make assignments on BaseAddon class objects work as expected
The descriptors were being silently clobbered for a while now, and
I never noticed. Oops!
2022-07-28 03:39:53 +00:00
Salad Dais
e951a5b5c3 Make datetime objects (de)serialize in binary LLSD more accurately
Fixes some precision issues with LLBase's LLSD serialization stuff
where the microseconds component was dropped. May still get some
off-by-one serialization differences due to rounding.
2022-07-27 22:42:58 +00:00
Salad Dais
68bf3ba4a2 More comments in mesh module 2022-07-27 22:21:42 +00:00
Salad Dais
5b4f8f03dc Use same compression ratio for LLSD as indra 2022-07-27 22:16:31 +00:00
Salad Dais
d7c2215cbc Remove special Firestorm section from readme
The new Firestorm release added proxy configuration back in.
2022-07-27 02:50:06 +00:00
Salad Dais
629e59d3f9 Add option to upload mesh deformer directly 2022-07-26 04:13:15 +00:00
Salad Dais
8f68bc219e Split up deformer helper a little 2022-07-26 03:44:32 +00:00
Salad Dais
ba296377de Save mesh deformers as files rather than uploading directly 2022-07-26 02:12:54 +00:00
Salad Dais
e34927a996 Improve AssetUploader API, make uploader example addon use it 2022-07-26 00:11:37 +00:00
Salad Dais
3c6a917550 Add command to deformer_helper addon that uploads mesh deformers
Sometimes these are preferable to deformer anims.
2022-07-25 23:11:15 +00:00
Salad Dais
dbae2acf27 Add basic AssetUploader class
Should make it less anoying to upload procedurally generated mesh
outside of local mesh mode
2022-07-25 22:08:28 +00:00
27 changed files with 1009 additions and 332 deletions

View File

@@ -83,27 +83,9 @@ SOCKS 5 works correctly on these platforms, so you can just configure it through
the `no_proxy` env var appropriately. For ex. `no_proxy="asset-cdn.glb.agni.lindenlab.com" ./firestorm`.
* Log in!
##### Firestorm
The proxy selection dialog in the most recent Firestorm release is non-functional, as
https://bitbucket.org/lindenlab/viewer/commits/454c7f4543688126b2fa5c0560710f5a1733702e was not pulled in.
As a workaround, you can go to `Debug -> Show Debug Settings` and enter the following values:
| Name | Value |
|---------------------|-----------|
| HttpProxyType | Web |
| BrowserProxyAddress | 127.0.0.1 |
| BrowserProxyEnabled | TRUE |
| BrowserProxyPort | 9062 |
| Socks5ProxyEnabled | TRUE |
| Socks5ProxyHost | 127.0.0.1 |
| Socks5ProxyPort | 9061 |
Or, if you're on Linux, you can also use [LinHippoAutoProxy](https://github.com/SaladDais/LinHippoAutoProxy).
Connections from the in-viewer browser will likely _not_ be run through Hippolyzer when using either of
these workarounds.
Or, if you're on Linux, you can instead use [LinHippoAutoProxy](https://github.com/SaladDais/LinHippoAutoProxy)
to launch your viewer, which will configure everything for you. Note that connections from the in-viewer browser will
likely _not_ be run through Hippolyzer when using LinHippoAutoProxy.
### Filtering

View File

@@ -4,8 +4,13 @@ Helper for making deformer anims. This could have a GUI I guess.
import dataclasses
from typing import *
import numpy as np
import transformations
from hippolyzer.lib.base.datatypes import Vector3, Quaternion, UUID
from hippolyzer.lib.base.llanim import Joint, Animation, PosKeyframe, RotKeyframe
from hippolyzer.lib.base.mesh import MeshAsset, SegmentHeaderDict, SkinSegmentDict, LLMeshSerializer
from hippolyzer.lib.base.serialization import BufferWriter
from hippolyzer.lib.proxy.addon_utils import show_message, BaseAddon, SessionProperty
from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.commands import handle_command, Parameter
@@ -45,6 +50,58 @@ def build_deformer(joints: Dict[str, DeformerJoint]) -> bytes:
return anim.to_bytes()
def build_mesh_deformer(joints: Dict[str, DeformerJoint]) -> bytes:
skin_seg = SkinSegmentDict(
joint_names=[],
bind_shape_matrix=identity_mat4(),
inverse_bind_matrix=[],
alt_inverse_bind_matrix=[],
pelvis_offset=0.0,
lock_scale_if_joint_position=False
)
for joint_name, joint in joints.items():
# We can only represent joint translations, ignore this joint if it doesn't have any.
if not joint.pos:
continue
skin_seg['joint_names'].append(joint_name)
# Inverse bind matrix isn't actually used, so we can just give it a placeholder value of the
# identity mat4. This might break things in weird ways because the matrix isn't actually sensible.
skin_seg['inverse_bind_matrix'].append(identity_mat4())
# Create a flattened mat4 that only has a translation component of our joint pos
# The viewer ignores any other component of these matrices so no point putting shear
# or perspective or whatever :)
joint_mat4 = pos_to_mat4(joint.pos)
# Ask the viewer to override this joint's usual parent-relative position with our matrix
skin_seg['alt_inverse_bind_matrix'].append(joint_mat4)
# Make a dummy mesh and shove our skin segment onto it. None of the tris are rigged, so the
# viewer will freak out and refuse to display the tri, only the joint translations will be used.
# Supposedly a mesh with a `skin` segment but no weights on the material should just result in an
# effectively unrigged material, but that's not the case. Oh well.
mesh = MeshAsset.make_triangle()
mesh.header['skin'] = SegmentHeaderDict(offset=0, size=0)
mesh.segments['skin'] = skin_seg
writer = BufferWriter("!")
writer.write(LLMeshSerializer(), mesh)
return writer.copy_buffer()
def identity_mat4() -> List[float]:
"""
Return an "Identity" mat4
Effectively represents a transform of no rot, no translation, no shear, no perspective
and scaling by 1.0 on every axis.
"""
return list(np.identity(4).flatten('F'))
def pos_to_mat4(pos: Vector3) -> List[float]:
"""Convert a position Vector3 to a Translation Mat4"""
return list(transformations.compose_matrix(translate=tuple(pos)).flatten('F'))
class DeformerAddon(BaseAddon):
deform_joints: Dict[str, DeformerJoint] = SessionProperty(dict)
@@ -118,5 +175,41 @@ class DeformerAddon(BaseAddon):
self._reapply_deformer(session, region)
return True
@handle_command()
async def save_deformer_as_mesh(self, _session: Session, _region: ProxiedRegion):
"""
Export the deformer as a crafted rigged mesh rather than an animation
Mesh deformers have the advantage that they don't cause your joints to "stick"
like animations do when using animations with pos keyframes.
"""
filename = await AddonManager.UI.save_file(filter_str="LL Mesh (*.llmesh)")
if not filename:
return
with open(filename, "wb") as f:
f.write(build_mesh_deformer(self.deform_joints))
@handle_command()
async def upload_deformer_as_mesh(self, _session: Session, region: ProxiedRegion):
"""Same as save_deformer_as_mesh, but uploads the mesh directly to SL."""
mesh_bytes = build_mesh_deformer(self.deform_joints)
try:
# Send off mesh to calculate upload cost
upload_token = await region.asset_uploader.initiate_mesh_upload("deformer", mesh_bytes)
except Exception as e:
show_message(e)
raise
if not await AddonManager.UI.confirm("Upload", f"Spend {upload_token.linden_cost}L on upload?"):
return
# Do the actual upload
try:
await region.asset_uploader.complete_upload(upload_token)
except Exception as e:
show_message(e)
raise
addons = [DeformerAddon()]

View File

@@ -20,6 +20,7 @@ bulk upload, like changing priority or removing a joint.
"""
import asyncio
import logging
import pathlib
from abc import abstractmethod
from typing import *
@@ -106,7 +107,10 @@ class LocalAnimAddon(BaseAddon):
if not anim_id:
continue
# is playing right now, check if there's a newer version
self.apply_local_anim_from_file(session, region, anim_name, only_if_changed=True)
try:
self.apply_local_anim_from_file(session, region, anim_name, only_if_changed=True)
except Exception:
logging.exception("Exploded while replaying animation")
await asyncio.sleep(1.0)
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
@@ -175,7 +179,6 @@ class LocalAnimAddon(BaseAddon):
if only_if_changed and old_mtime == mtime:
return
cls.local_anim_mtimes[anim_name] = mtime
# file might not even exist anymore if mtime is `None`,
# anim will automatically stop if that happens.
if mtime:
@@ -187,6 +190,7 @@ class LocalAnimAddon(BaseAddon):
with open(anim_path, "rb") as f:
anim_data = f.read()
anim_data = cls._mangle_anim(anim_data)
cls.local_anim_mtimes[anim_name] = mtime
else:
print(f"Unknown anim {anim_name!r}")
cls.apply_local_anim(session, region, anim_name, new_data=anim_data)

View File

@@ -2,21 +2,15 @@
Example of how to upload assets, assumes assets are already encoded
in the appropriate format.
/524 upload <asset type>
/524 upload_asset <asset type>
"""
import pprint
from pathlib import Path
from typing import *
import aiohttp
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message import Block, Message
from hippolyzer.lib.base.templates import AssetType
from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.addon_utils import ais_item_to_inventory_data, show_message, BaseAddon
from hippolyzer.lib.proxy.addon_utils import show_message, BaseAddon
from hippolyzer.lib.proxy.commands import handle_command, Parameter
from hippolyzer.lib.base.network.transport import Direction
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import Session
@@ -29,7 +23,6 @@ class UploaderAddon(BaseAddon):
async def upload_asset(self, _session: Session, region: ProxiedRegion,
asset_type: AssetType, flags: Optional[int] = None):
"""Upload a raw asset with optional flags"""
inv_type = asset_type.inventory_type
file = await AddonManager.UI.open_file()
if not file:
return
@@ -42,67 +35,29 @@ class UploaderAddon(BaseAddon):
with open(file, "rb") as f:
file_body = f.read()
params = {
"asset_type": asset_type.human_name,
"description": "(No Description)",
"everyone_mask": 0,
"group_mask": 0,
"folder_id": UUID(), # Puts it in the default folder, I guess. Undocumented.
"inventory_type": inv_type.human_name,
"name": name,
"next_owner_mask": 581632,
}
if flags is not None:
params['flags'] = flags
try:
if asset_type == AssetType.MESH:
# Kicking off a mesh upload works a little differently internally
upload_token = await region.asset_uploader.initiate_mesh_upload(
name, file_body, flags=flags
)
else:
upload_token = await region.asset_uploader.initiate_asset_upload(
name, asset_type, file_body, flags=flags,
)
except Exception as e:
show_message(e)
raise
caps = region.caps_client
async with aiohttp.ClientSession() as sess:
async with caps.post('NewFileAgentInventory', llsd=params, session=sess) as resp:
parsed = await resp.read_llsd()
if "uploader" not in parsed:
show_message(f"Upload error!: {parsed!r}")
return
print("Got upload URL, uploading...")
if not await AddonManager.UI.confirm("Upload", f"Spend {upload_token.linden_cost}L on upload?"):
return
async with caps.post(parsed["uploader"], data=file_body, session=sess) as resp:
upload_parsed = await resp.read_llsd()
if "new_inventory_item" not in upload_parsed:
show_message(f"Got weird upload resp: {pprint.pformat(upload_parsed)}")
return
await self._force_inv_update(region, upload_parsed['new_inventory_item'])
@handle_command(item_id=UUID)
async def force_inv_update(self, _session: Session, region: ProxiedRegion, item_id: UUID):
"""Force an inventory update for a given item id"""
await self._force_inv_update(region, item_id)
async def _force_inv_update(self, region: ProxiedRegion, item_id: UUID):
session = region.session()
ais_req_data = {
"items": [
{
"owner_id": session.agent_id,
"item_id": item_id,
}
]
}
async with region.caps_client.post('FetchInventory2', llsd=ais_req_data) as resp:
ais_item = (await resp.read_llsd())["items"][0]
message = Message(
"UpdateCreateInventoryItem",
Block(
"AgentData",
AgentID=session.agent_id,
SimApproved=1,
TransactionID=UUID.random(),
),
ais_item_to_inventory_data(ais_item),
direction=Direction.IN
)
region.circuit.send(message)
# Do the actual upload
try:
await region.asset_uploader.complete_upload(upload_token)
except Exception as e:
show_message(e)
raise
addons = [UploaderAddon()]

View File

@@ -11,12 +11,11 @@
# * * Collada tooling sucks and even LL is moving away from it
# * * Ensuring LLMesh->Collada and LLMesh->GLTF conversion don't differ semantically is easy via assimp.
import collections
import logging
import os.path
import secrets
import statistics
import sys
from typing import Dict, List, Iterable, Optional
from typing import Dict, List, Optional, Union, Sequence
import collada
import collada.source
@@ -25,13 +24,27 @@ from lxml import etree
import numpy as np
import transformations
from hippolyzer.lib.base.datatypes import Vector3
from hippolyzer.lib.base.helpers import get_resource_filename
from hippolyzer.lib.base.serialization import BufferReader
from hippolyzer.lib.base.mesh import LLMeshSerializer, MeshAsset, positions_from_domain, SkinSegmentDict
LOG = logging.getLogger(__name__)
DIR = os.path.dirname(os.path.realpath(__file__))
def llsd_to_mat4(mat: Union[np.ndarray, Sequence[float]]) -> np.ndarray:
return np.array(mat).reshape((4, 4), order='F')
def mat4_to_llsd(mat: np.ndarray) -> List[float]:
return list(mat.flatten(order='F'))
def mat4_to_collada(mat: np.ndarray) -> np.ndarray:
return mat.flatten(order='C')
def mesh_to_collada(ll_mesh: MeshAsset, include_skin=True) -> collada.Collada:
dae = collada.Collada()
axis = collada.asset.UP_AXIS.Z_UP
@@ -52,7 +65,7 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
skin_seg = ll_mesh.segments.get('skin')
bind_shape_matrix = None
if include_skin and skin_seg:
bind_shape_matrix = np.array(skin_seg["bind_shape_matrix"]).reshape((4, 4))
bind_shape_matrix = llsd_to_mat4(skin_seg["bind_shape_matrix"])
should_skin = True
# Transform from the skin will be applied on the controller, not the node
node_transform = np.identity(4)
@@ -119,9 +132,8 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
accessor.set('source', f"#{accessor.get('source')}")
flattened_bind_poses = []
# LLMesh matrices are row-major, convert to col-major for Collada.
for bind_pose in skin_seg['inverse_bind_matrix']:
flattened_bind_poses.append(np.array(bind_pose).reshape((4, 4)).flatten('F'))
flattened_bind_poses.append(mat4_to_collada(llsd_to_mat4(bind_pose)))
flattened_bind_poses = np.array(flattened_bind_poses)
inv_bind_source = _create_mat4_source(f"bind-poses{sub_uniq}", flattened_bind_poses, "TRANSFORM")
@@ -142,7 +154,7 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
# in SL, with their own distinct sets of weights and vertex data.
controller_node = E.controller(
E.skin(
E.bind_shape_matrix(' '.join(str(x) for x in bind_shape_matrix.flatten('F'))),
E.bind_shape_matrix(' '.join(str(x) for x in mat4_to_collada(bind_shape_matrix))),
joints_source.xmlnode,
inv_bind_source.xmlnode,
weights_source.xmlnode,
@@ -173,7 +185,7 @@ def llmesh_to_node(ll_mesh: MeshAsset, dae: collada.Collada, uniq=None,
node = collada.scene.Node(
node_name,
children=geom_nodes,
transforms=[collada.scene.MatrixTransform(np.array(node_transform.flatten('F')))],
transforms=[collada.scene.MatrixTransform(mat4_to_collada(node_transform))],
)
if should_skin:
# We need a skeleton per _mesh asset_ because you could have incongruous skeletons
@@ -208,7 +220,8 @@ def transform_skeleton(skel_root: etree.ElementBase, dae: collada.Collada, skin_
joint_nodes[skel_node.get('name')] = collada.scene.Node.load(dae, skel_node, {})
for joint_name, matrix in zip(skin_seg['joint_names'], skin_seg.get('alt_inverse_bind_matrix', [])):
joint_node = joint_nodes[joint_name]
joint_node.matrix = np.array(matrix).reshape((4, 4)).flatten('F')
joint_decomp = transformations.decompose_matrix(llsd_to_mat4(matrix))
joint_node.matrix = mat4_to_collada(transformations.compose_matrix(translate=joint_decomp[3]))
# Update the underlying XML element with the new transform matrix
joint_node.save()
@@ -251,7 +264,7 @@ def _create_mat4_source(name: str, data: np.ndarray, semantic: str):
def fix_weird_bind_matrices(skin_seg: SkinSegmentDict):
"""
Fix weird-looking bind matrices to have normal scaling
Fix weird-looking bind matrices to have normal scaling and rotations
Not sure why these even happen (weird mesh authoring programs?)
Sometimes get enormous inverse bind matrices (each component 10k+) and tiny
@@ -259,38 +272,38 @@ def fix_weird_bind_matrices(skin_seg: SkinSegmentDict):
with weird scales and tries to set them to what they "should" be without
the weird inverted scaling.
"""
axis_counters = [collections.Counter() for _ in range(3)]
for joint_inv in skin_seg['inverse_bind_matrix']:
joint_mat = np.array(joint_inv).reshape((4, 4))
joint_scale = transformations.decompose_matrix(joint_mat)[0]
for axis_counter, axis_val in zip(axis_counters, joint_scale):
axis_counter[axis_val] += 1
most_common_inv_scale = []
for axis_counter in axis_counters:
most_common_inv_scale.append(axis_counter.most_common(1)[0][0])
scale_fixup = Vector3(1, 1, 1)
angle_fixup = Vector3(0, 0, 0)
have_fixups = False
# Totally non-scientific method of detecting odd bind matrices based on squinting very,
# very hard at a random sample of assets.
for joint_name, joint_inv in zip(skin_seg['joint_names'], skin_seg['inverse_bind_matrix']):
if not joint_name.startswith("m"):
# We can't make very good guesses based on collision volume scales and rotations,
# skip anything but the "m" joints.
continue
joint_mat = llsd_to_mat4(joint_inv)
joint_scale, _, joint_angle, _, _ = transformations.decompose_matrix(joint_mat)
# If the scale component of an mJointName joint isn't roughly <1,1,1>, we likely have
# scaling applied to the inverse bind matrices rather than the bind matrix. Figure out
# what the fixup should be so that we can reverse it.
if abs(3.0 - sum(joint_scale)) > 0.5:
scale_fixup = Vector3(1, 1, 1) / Vector3(*joint_scale)
have_fixups = True
# I wouldn't expect mJointName joints to be rotated at all in their inverse bind matrices.
# Is this a rotation that should've been applied to the bind shape matrix instead?
# In any event, all joints are likely rotated by this amount, so calculate the inverse.
if abs(sum(joint_angle)) > 0.05:
angle_fixup = -Vector3(*joint_angle)
have_fixups = True
if abs(1.0 - statistics.fmean(most_common_inv_scale)) > 1.0:
if have_fixups:
LOG.warning("Detected weird matrices in mesh!", scale_fixup, angle_fixup)
# The magnitude of the scales in the inverse bind matrices look very strange.
# The bind matrix itself is probably messed up as well, try to fix it.
skin_seg['bind_shape_matrix'] = fix_llsd_matrix_scale(skin_seg['bind_shape_matrix'], most_common_inv_scale)
if joint_positions := skin_seg.get('alt_inverse_bind_matrix', None):
fix_matrix_list_scale(joint_positions, most_common_inv_scale)
rev_scale = tuple(1.0 / x for x in most_common_inv_scale)
fix_matrix_list_scale(skin_seg['inverse_bind_matrix'], rev_scale)
def fix_matrix_list_scale(source: List[List[float]], scale_fixup: Iterable[float]):
for i, alt_inv_matrix in enumerate(source):
source[i] = fix_llsd_matrix_scale(alt_inv_matrix, scale_fixup)
def fix_llsd_matrix_scale(source: List[float], scale_fixup: Iterable[float]):
matrix = np.array(source).reshape((4, 4))
decomposed = list(transformations.decompose_matrix(matrix))
# Need to handle both the scale and translation matrices
for idx in (0, 3):
decomposed[idx] = tuple(x * y for x, y in zip(decomposed[idx], scale_fixup))
return list(transformations.compose_matrix(*decomposed).flatten('C'))
# TODO: DON'T MESS WITH INVERSE TRANSLATION!!!! Only bind shape gets its translation scaled.
# TODO: put this back in, the previous logic was totally wrong-headed..
pass
def main():

View File

@@ -29,6 +29,7 @@ import math
from typing import *
import recordclass
import transformations
logger = getLogger('hippolyzer.lib.base.datatypes')
@@ -220,6 +221,15 @@ class Quaternion(TupleCoord):
)
return super().__mul__(other)
@classmethod
def from_transformations(cls, coord) -> Quaternion:
"""Convert to W (S) last form"""
return cls(coord[1], coord[2], coord[3], coord[0])
def to_transformations(self) -> Tuple[float, float, float, float]:
"""Convert to W (S) first form for use with the transformations lib"""
return self.W, self.X, self.Y, self.Z
@classmethod
def from_euler(cls, roll, pitch, yaw, degrees=False):
if degrees:
@@ -241,6 +251,9 @@ class Quaternion(TupleCoord):
return cls(X=x, Y=y, Z=z, W=w)
def to_euler(self) -> Vector3:
return Vector3(*transformations.euler_from_quaternion(self.to_transformations()))
def data(self, wanted_components=None):
if wanted_components == 3:
return self.X, self.Y, self.Z

View File

@@ -117,6 +117,8 @@ class InventoryBase(SchemaBase):
# Not meant to be serialized
if not spec:
continue
if field.metadata.get("llsd_only"):
continue
val = getattr(self, field_name)
if val is None:
@@ -166,16 +168,11 @@ class InventoryModel(InventoryBase):
def from_llsd(cls, llsd_val: List[Dict]) -> InventoryModel:
model = cls()
for obj_dict in llsd_val:
if InventoryCategory.ID_ATTR in obj_dict:
if (obj := InventoryCategory.from_llsd(obj_dict)) is not None:
model.add(obj)
elif InventoryObject.ID_ATTR in obj_dict:
if (obj := InventoryObject.from_llsd(obj_dict)) is not None:
model.add(obj)
elif InventoryItem.ID_ATTR in obj_dict:
if (obj := InventoryItem.from_llsd(obj_dict)) is not None:
model.add(obj)
else:
for inv_type in INVENTORY_TYPES:
if inv_type.ID_ATTR in obj_dict:
if (obj := inv_type.from_llsd(obj_dict)) is not None:
model.add(obj)
break
LOG.warning(f"Unknown object type {obj_dict!r}")
return model
@@ -218,13 +215,13 @@ class InventoryModel(InventoryBase):
self.root = node
node.model = weakref.proxy(self)
def unlink(self, node: InventoryNodeBase) -> Sequence[InventoryNodeBase]:
def unlink(self, node: InventoryNodeBase, single_only: bool = False) -> Sequence[InventoryNodeBase]:
"""Unlink a node and its descendants from the tree, returning the removed nodes"""
assert node.model == self
if node == self.root:
self.root = None
unlinked = [node]
if isinstance(node, InventoryContainerBase):
if isinstance(node, InventoryContainerBase) and not single_only:
for child in node.children:
unlinked.extend(self.unlink(child))
self.nodes.pop(node.node_id, None)
@@ -257,6 +254,15 @@ class InventoryModel(InventoryBase):
removed=removed_in_other,
)
def __getitem__(self, item: UUID) -> InventoryNodeBase:
return self.nodes[item]
def __contains__(self, item: UUID):
return item in self.nodes
def get(self, item: UUID) -> Optional[InventoryNodeBase]:
return self.nodes.get(item)
@dataclasses.dataclass
class InventoryPermissions(InventoryBase):
@@ -271,6 +277,9 @@ class InventoryPermissions(InventoryBase):
owner_id: UUID = schema_field(SchemaUUID)
last_owner_id: UUID = schema_field(SchemaUUID)
group_id: UUID = schema_field(SchemaUUID)
# Nothing actually cares about this, but it could be there.
# It's kind of redundant since it just means owner_id == NULL_KEY && group_id != NULL_KEY.
is_owner_group: int = schema_field(SchemaInt, default=0, llsd_only=True)
@dataclasses.dataclass
@@ -384,6 +393,7 @@ class InventoryObject(InventoryContainerBase):
class InventoryCategory(InventoryContainerBase):
ID_ATTR: ClassVar[str] = "cat_id"
SCHEMA_NAME: ClassVar[str] = "inv_category"
VERSION_NONE: ClassVar[int] = -1
cat_id: UUID = schema_field(SchemaUUID)
pref_type: str = schema_field(SchemaStr, llsd_name="preferred_type")
@@ -417,3 +427,6 @@ class InventoryItem(InventoryNodeBase):
if self.asset_id is not None:
return self.asset_id
return self.shadow_id ^ MAGIC_ID
INVENTORY_TYPES: Tuple[Type[InventoryNodeBase], ...] = (InventoryCategory, InventoryObject, InventoryItem)

View File

@@ -111,10 +111,10 @@ class SchemaUUID(SchemaFieldSerializer[UUID]):
def schema_field(spec: Type[Union[SchemaBase, SchemaFieldSerializer]], *, default=dataclasses.MISSING, init=True,
repr=True, hash=None, compare=True, llsd_name=None) -> dataclasses.Field: # noqa
repr=True, hash=None, compare=True, llsd_name=None, llsd_only=False) -> dataclasses.Field: # noqa
"""Describe a field in the inventory schema and the shape of its value"""
return dataclasses.field(
metadata={"spec": spec, "llsd_name": llsd_name}, default=default,
return dataclasses.field( # noqa
metadata={"spec": spec, "llsd_name": llsd_name, "llsd_only": llsd_only}, default=default,
init=init, repr=repr, hash=hash, compare=compare,
)

View File

@@ -46,22 +46,103 @@ class HippoLLSDNotationFormatter(llbase.llsd.LLSDNotationFormatter, HippoLLSDBas
def __init__(self):
super().__init__()
def STRING(self, v):
# llbase's notation LLSD encoder isn't suitable for generating line-delimited
# LLSD because the string formatter leaves \n unencoded, unlike indra's llcommon.
# Add our own escaping rule.
return super().STRING(v).replace(b"\n", b"\\n")
def format_notation(val: typing.Any):
return HippoLLSDNotationFormatter().format(val)
def format_binary(val: typing.Any, with_header=True):
val = llbase.llsd.format_binary(val)
if not with_header:
return val.split(b"\n", 1)[1]
val = _format_binary_recurse(val)
if with_header:
return b'<?llsd/binary?>\n' + val
return val
# This is copied almost wholesale from https://bitbucket.org/lindenlab/llbase/src/master/llbase/llsd.py
# With a few minor changes to make serialization round-trip correctly. It's evil.
def _format_binary_recurse(something) -> bytes:
"""Binary formatter workhorse."""
def _format_list(list_something):
array_builder = [b'[' + struct.pack('!i', len(list_something))]
for item in list_something:
array_builder.append(_format_binary_recurse(item))
array_builder.append(b']')
return b''.join(array_builder)
if something is None:
return b'!'
elif isinstance(something, LLSD):
return _format_binary_recurse(something.thing)
elif isinstance(something, bool):
if something:
return b'1'
else:
return b'0'
elif is_integer(something):
try:
return b'i' + struct.pack('!i', something)
except (OverflowError, struct.error) as exc:
raise LLSDSerializationError(str(exc), something)
elif isinstance(something, float):
try:
return b'r' + struct.pack('!d', something)
except SystemError as exc:
raise LLSDSerializationError(str(exc), something)
elif isinstance(something, uuid.UUID):
return b'u' + something.bytes
elif isinstance(something, binary):
return b'b' + struct.pack('!i', len(something)) + something
elif is_string(something):
if is_unicode(something):
something = something.encode("utf8")
return b's' + struct.pack('!i', len(something)) + something
elif isinstance(something, uri):
return b'l' + struct.pack('!i', len(something)) + something.encode("utf8")
elif isinstance(something, datetime.datetime):
return b'd' + struct.pack('<d', something.timestamp())
elif isinstance(something, datetime.date):
seconds_since_epoch = calendar.timegm(something.timetuple())
return b'd' + struct.pack('<d', seconds_since_epoch)
elif isinstance(something, (list, tuple)):
return _format_list(something)
elif isinstance(something, dict):
map_builder = [b'{' + struct.pack('!i', len(something))]
for key, value in something.items():
if isinstance(key, str):
key = key.encode("utf8")
map_builder.append(b'k' + struct.pack('!i', len(key)) + key)
map_builder.append(_format_binary_recurse(value))
map_builder.append(b'}')
return b''.join(map_builder)
else:
try:
return _format_list(list(something))
except TypeError:
raise LLSDSerializationError(
"Cannot serialize unknown type: %s (%s)" %
(type(something), something))
class HippoLLSDBinaryParser(llbase.llsd.LLSDBinaryParser):
def __init__(self):
super().__init__()
self._dispatch[ord('u')] = lambda: UUID(bytes=self._getc(16))
self._dispatch[ord('d')] = self._parse_date
def _parse_date(self):
seconds = struct.unpack("<d", self._getc(8))[0]
try:
return datetime.datetime.fromtimestamp(seconds, tz=datetime.timezone.utc)
except OverflowError as exc:
# A garbage seconds value can cause utcfromtimestamp() to raise
# OverflowError: timestamp out of range for platform time_t
self._error(exc, -8)
def _parse_string(self):
# LLSD's C++ API lets you stuff binary in a string field even though it's only
@@ -89,7 +170,7 @@ def parse_notation(data: bytes):
def zip_llsd(val: typing.Any):
return zlib.compress(format_binary(val, with_header=False))
return zlib.compress(format_binary(val, with_header=False), level=zlib.Z_BEST_COMPRESSION)
def unzip_llsd(data: bytes):

View File

@@ -26,6 +26,50 @@ class MeshAsset:
segments: MeshSegmentDict = dataclasses.field(default_factory=dict)
raw_segments: Dict[str, bytes] = dataclasses.field(default_factory=dict)
@classmethod
def make_triangle(cls) -> MeshAsset:
"""Make an asset representing an un-rigged single-sided mesh triangle"""
inst = cls()
inst.header = {
"version": 1,
"high_lod": {"offset": 0, "size": 0},
"physics_mesh": {"offset": 0, "size": 0},
"physics_convex": {"offset": 0, "size": 0},
}
base_lod: LODSegmentDict = {
'Normal': [
Vector3(-0.0, -0.0, -1.0),
Vector3(-0.0, -0.0, -1.0),
Vector3(-0.0, -0.0, -1.0)
],
'PositionDomain': {'Max': [0.5, 0.5, 0.0], 'Min': [-0.5, -0.5, 0.0]},
'Position': [
Vector3(0.0, 0.0, 0.0),
Vector3(1.0, 0.0, 0.0),
Vector3(0.5, 1.0, 0.0)
],
'TexCoord0Domain': {'Max': [1.0, 1.0], 'Min': [0.0, 0.0]},
'TexCoord0': [
Vector2(0.0, 0.0),
Vector2(1.0, 0.0),
Vector2(0.5, 1.0)
],
'TriangleList': [[0, 1, 2]],
}
inst.segments['physics_mesh'] = [deepcopy(base_lod)]
inst.segments['high_lod'] = [deepcopy(base_lod)]
convex_segment: PhysicsConvexSegmentDict = {
'BoundingVerts': [
Vector3(-0.0, 1.0, -1.0),
Vector3(-1.0, -1.0, -1.0),
Vector3(1.0, -1.0, -1.0)
],
'Max': [0.5, 0.5, 0.0],
'Min': [-0.5, -0.5, 0.0]
}
inst.segments['physics_convex'] = convex_segment
return inst
def iter_lods(self) -> Generator[List[LODSegmentDict], None, None]:
for lod_name, lod_val in self.segments.items():
if lod_name.endswith("_lod"):
@@ -135,20 +179,26 @@ class VertexWeight(recordclass.datatuple): # type: ignore
class SkinSegmentDict(TypedDict, total=False):
"""Rigging information"""
joint_names: List[str]
# model -> world transform matrix for model
# model -> world transform mat4 for model
bind_shape_matrix: List[float]
# world -> joint local transform matrices
# world -> joint local transform mat4s
inverse_bind_matrix: List[List[float]]
# offset matrices for joints, translation-only.
# Not sure what these are relative to, base joint or model <0,0,0>.
# Transform mat4s for the joint nodes themselves.
# The matrices may have scale or other components, but only the
# translation component will be used by the viewer.
# All translations are relative to the joint's parent.
alt_inverse_bind_matrix: List[List[float]]
lock_scale_if_joint_position: bool
pelvis_offset: float
class PhysicsConvexSegmentDict(DomainDict, total=False):
"""Data for convex hull collisions, populated by the client"""
# Min / Max domain vals are inline, unlike for LODs
"""
Data for convex hull collisions, populated by the client
Min / Max pos domain vals are inline, unlike for LODs, so this inherits from DomainDict
"""
# Indices into the Positions list
HullList: List[int]
# -1.0 - 1.0, dequantized from binary field of U16s
Positions: List[Vector3]
@@ -158,13 +208,13 @@ class PhysicsConvexSegmentDict(DomainDict, total=False):
class PhysicsHavokSegmentDict(TypedDict, total=False):
"""Cached data for Havok collisions, populated by sim and not used by client."""
HullMassProps: MassPropsDict
MOPP: MOPPDict
MeshDecompMassProps: MassPropsDict
HullMassProps: HavokMassPropsDict
MOPP: HavokMOPPDict
MeshDecompMassProps: HavokMassPropsDict
WeldingData: bytes
class MassPropsDict(TypedDict, total=False):
class HavokMassPropsDict(TypedDict, total=False):
# Vec, center of mass
CoM: List[float]
# 9 floats, Mat3?
@@ -173,7 +223,7 @@ class MassPropsDict(TypedDict, total=False):
volume: float
class MOPPDict(TypedDict, total=False):
class HavokMOPPDict(TypedDict, total=False):
"""Memory Optimized Partial Polytope"""
BuildType: int
MoppData: bytes

View File

@@ -27,6 +27,14 @@ class _Unserializable:
return False
class MissingType:
"""Simple sentinel type like dataclasses._MISSING_TYPE"""
pass
MISSING = MissingType()
UNSERIALIZABLE = _Unserializable()
_T = TypeVar("_T")
@@ -288,7 +296,7 @@ class SerializableBase(abc.ABC):
@classmethod
def default_value(cls) -> Any:
# None may be a valid default, so return MISSING as a sentinel val
return dataclasses.MISSING
return MISSING
class Adapter(SerializableBase, abc.ABC):
@@ -328,18 +336,18 @@ class ForwardSerializable(SerializableBase):
def __init__(self, func: Callable[[], SERIALIZABLE_TYPE]):
super().__init__()
self._func = func
self._wrapped = dataclasses.MISSING
self._wrapped: Union[MissingType, SERIALIZABLE_TYPE] = MISSING
def _ensure_evaled(self):
if self._wrapped is dataclasses.MISSING:
if self._wrapped is MISSING:
self._wrapped = self._func()
def __getattr__(self, attr):
return getattr(self._wrapped, attr)
def default_value(self) -> Any:
if self._wrapped is dataclasses.MISSING:
return dataclasses.MISSING
if self._wrapped is MISSING:
return MISSING
return self._wrapped.default_value()
def serialize(self, val, writer: BufferWriter, ctx: Optional[ParseContext]):
@@ -357,10 +365,10 @@ class Template(SerializableBase):
def __init__(self, template_spec: Dict[str, SERIALIZABLE_TYPE], skip_missing=False):
self._template_spec = template_spec
self._skip_missing = skip_missing
self._size = dataclasses.MISSING
self._size = MISSING
def calc_size(self):
if self._size is not dataclasses.MISSING:
if self._size is not MISSING:
return self._size
sum_bytes = 0
for _, field_type in self._template_spec.items():
@@ -1196,9 +1204,9 @@ class ContextMixin(Generic[_T]):
def _choose_option(self, ctx: Optional[ParseContext]) -> _T:
idx = self._fun(ctx)
if idx not in self._options:
if dataclasses.MISSING not in self._options:
if MISSING not in self._options:
raise KeyError(f"{idx!r} not found in {self._options!r}")
idx = dataclasses.MISSING
idx = MISSING
return self._options[idx]
@@ -1442,7 +1450,7 @@ class StringEnumAdapter(Adapter):
class FixedPoint(SerializableBase):
def __init__(self, ser_spec, int_bits, frac_bits, signed=False):
# Should never be used due to how this handles signs :/
assert(not ser_spec.is_signed)
assert (not ser_spec.is_signed)
self._ser_spec: SerializablePrimitive = ser_spec
self._signed = signed
@@ -1452,7 +1460,7 @@ class FixedPoint(SerializableBase):
self._min_val = ((1 << int_bits) * -1) if signed else 0
self._max_val = 1 << int_bits
assert(required_bits == (ser_spec.calc_size() * 8))
assert (required_bits == (ser_spec.calc_size() * 8))
def deserialize(self, reader: Reader, ctx):
fixed_val = float(self._ser_spec.deserialize(reader, ctx))
@@ -1482,8 +1490,8 @@ def _make_undefined_raiser():
return f
def dataclass_field(spec: Union[SERIALIZABLE_TYPE, Callable], *, default=dataclasses.MISSING,
default_factory=dataclasses.MISSING, init=True, repr=True, # noqa
def dataclass_field(spec: Union[SERIALIZABLE_TYPE, Callable], *, default: Any = dataclasses.MISSING,
default_factory: Any = dataclasses.MISSING, init=True, repr=True, # noqa
hash=None, compare=True) -> dataclasses.Field: # noqa
enrich_factory = False
# Lambda, need to defer evaluation of spec until it's actually used.
@@ -1504,7 +1512,7 @@ def dataclass_field(spec: Union[SERIALIZABLE_TYPE, Callable], *, default=datacla
metadata={"spec": spec}, default=default, default_factory=default_factory, init=init,
repr=repr, hash=hash, compare=compare
)
# Need to stuff this on so it knows which field went unspecified.
# Need to stuff this on, so it knows which field went unspecified.
if enrich_factory:
default_factory.field = field
return field

View File

@@ -5,9 +5,6 @@ Serialization templates for structures used in LLUDP and HTTP bodies.
import abc
import collections
import dataclasses
import enum
import importlib
import logging
import math
import zlib
from typing import *
@@ -17,11 +14,6 @@ from hippolyzer.lib.base import llsd
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3
from hippolyzer.lib.base.namevalue import NameValuesSerializer
try:
importlib.reload(se) # type: ignore
except:
logging.exception("Failed to reload serialization lib")
@se.enum_field_serializer("RequestXfer", "XferID", "VFileType")
@se.enum_field_serializer("AssetUploadRequest", "AssetBlock", "Type")
@@ -143,6 +135,7 @@ class InventoryType(IntEnum):
lower = self.name.lower()
return {
"callingcard": "callcard",
"none": "-1",
}.get(lower, lower)
@@ -359,10 +352,10 @@ class PermissionType(IntEnum):
@se.enum_field_serializer("TransferRequest", "TransferInfo", "SourceType")
class TransferSourceType(IntEnum):
UNKNOWN = 0
FILE = enum.auto()
ASSET = enum.auto()
SIM_INV_ITEM = enum.auto()
SIM_ESTATE = enum.auto()
FILE = 1
ASSET = 2
SIM_INV_ITEM = 3
SIM_ESTATE = 4
class EstateAssetType(IntEnum):
@@ -425,15 +418,15 @@ class TransferParamsSerializer(se.EnumSwitchedSubfieldSerializer):
@se.enum_field_serializer("TransferInfo", "TransferInfo", "ChannelType")
class TransferChannelType(IntEnum):
UNKNOWN = 0
MISC = enum.auto()
ASSET = enum.auto()
MISC = 1
ASSET = 2
@se.enum_field_serializer("TransferInfo", "TransferInfo", "TargetType")
class TransferTargetType(IntEnum):
UNKNOWN = 0
FILE = enum.auto()
VFILE = enum.auto()
FILE = 1
VFILE = 2
@se.enum_field_serializer("TransferInfo", "TransferInfo", "Status")
@@ -540,45 +533,45 @@ class SendXferPacketIDSerializer(se.AdapterSubfieldSerializer):
@se.enum_field_serializer("ViewerEffect", "Effect", "Type")
class ViewerEffectType(IntEnum):
TEXT = 0
ICON = enum.auto()
CONNECTOR = enum.auto()
FLEXIBLE_OBJECT = enum.auto()
ANIMAL_CONTROLS = enum.auto()
LOCAL_ANIMATION_OBJECT = enum.auto()
CLOTH = enum.auto()
EFFECT_BEAM = enum.auto()
EFFECT_GLOW = enum.auto()
EFFECT_POINT = enum.auto()
EFFECT_TRAIL = enum.auto()
EFFECT_SPHERE = enum.auto()
EFFECT_SPIRAL = enum.auto()
EFFECT_EDIT = enum.auto()
EFFECT_LOOKAT = enum.auto()
EFFECT_POINTAT = enum.auto()
EFFECT_VOICE_VISUALIZER = enum.auto()
NAME_TAG = enum.auto()
EFFECT_BLOB = enum.auto()
ICON = 1
CONNECTOR = 2
FLEXIBLE_OBJECT = 3
ANIMAL_CONTROLS = 4
LOCAL_ANIMATION_OBJECT = 5
CLOTH = 6
EFFECT_BEAM = 7
EFFECT_GLOW = 8
EFFECT_POINT = 9
EFFECT_TRAIL = 10
EFFECT_SPHERE = 11
EFFECT_SPIRAL = 12
EFFECT_EDIT = 13
EFFECT_LOOKAT = 14
EFFECT_POINTAT = 15
EFFECT_VOICE_VISUALIZER = 16
NAME_TAG = 17
EFFECT_BLOB = 18
class LookAtTarget(IntEnum):
NONE = 0
IDLE = enum.auto()
AUTO_LISTEN = enum.auto()
FREELOOK = enum.auto()
RESPOND = enum.auto()
HOVER = enum.auto()
CONVERSATION = enum.auto()
SELECT = enum.auto()
FOCUS = enum.auto()
MOUSELOOK = enum.auto()
CLEAR = enum.auto()
IDLE = 1
AUTO_LISTEN = 2
FREELOOK = 3
RESPOND = 4
HOVER = 5
CONVERSATION = 6
SELECT = 7
FOCUS = 8
MOUSELOOK = 9
CLEAR = 10
class PointAtTarget(IntEnum):
NONE = 0
SELECT = enum.auto()
GRAB = enum.auto()
CLEAR = enum.auto()
SELECT = 1
GRAB = 2
CLEAR = 3
@se.subfield_serializer("ViewerEffect", "Effect", "TypeData")
@@ -943,7 +936,7 @@ class ObjectStateAdapter(se.ContextAdapter):
PCode.AVATAR: se.IntFlag(AgentState),
PCode.PRIMITIVE: AttachmentStateAdapter(None),
# Other cases are probably just a number (tree species ID or something.)
dataclasses.MISSING: se.IdentityAdapter(),
se.MISSING: se.IdentityAdapter(),
}
)
@@ -1146,9 +1139,15 @@ class TEExceptionField(se.SerializableBase):
return dict
_T = TypeVar("_T")
_TE_FIELD_KEY = Optional[Sequence[int]]
_TE_DICT = Dict[_TE_FIELD_KEY, _T]
def _te_field(spec: se.SERIALIZABLE_TYPE, first=False, optional=False,
default_factory=dataclasses.MISSING, default=dataclasses.MISSING):
if default_factory is not dataclasses.MISSING:
default_factory: Union[se.MissingType, Callable[[], _T]] = se.MISSING,
default: Union[se.MissingType, _T] = se.MISSING):
if default_factory is not se.MISSING:
new_default_factory = lambda: {None: default_factory()}
elif default is not None:
new_default_factory = lambda: {None: default}
@@ -1160,9 +1159,6 @@ def _te_field(spec: se.SERIALIZABLE_TYPE, first=False, optional=False,
)
_T = TypeVar("_T")
_TE_FIELD_KEY = Optional[Sequence[int]]
# If this seems weird it's because it is. TE offsets are S16s with `0` as the actual 0
# point, and LL divides by `0x7FFF` to convert back to float. Negative S16s can
# actually go to -0x8000 due to two's complement, creating a larger range for negatives.
@@ -1221,22 +1217,22 @@ MAX_TES = 45
@dataclasses.dataclass
class TextureEntryCollection:
Textures: Dict[_TE_FIELD_KEY, UUID] = _te_field(
Textures: _TE_DICT[UUID] = _te_field(
# Plywood texture
se.UUID, first=True, default=UUID('89556747-24cb-43ed-920b-47caed15465f'))
# Bytes are inverted so fully opaque white is \x00\x00\x00\x00
Color: Dict[_TE_FIELD_KEY, bytes] = _te_field(Color4(invert_bytes=True), default=b"\xff\xff\xff\xff")
ScalesS: Dict[_TE_FIELD_KEY, float] = _te_field(se.F32, default=1.0)
ScalesT: Dict[_TE_FIELD_KEY, float] = _te_field(se.F32, default=1.0)
OffsetsS: Dict[_TE_FIELD_KEY, float] = _te_field(TE_S16_COORD, default=0.0)
OffsetsT: Dict[_TE_FIELD_KEY, float] = _te_field(TE_S16_COORD, default=0.0)
Rotation: Dict[_TE_FIELD_KEY, float] = _te_field(PackedTERotation(), default=0.0)
BasicMaterials: Dict[_TE_FIELD_KEY, "BasicMaterials"] = _te_field(
Color: _TE_DICT[bytes] = _te_field(Color4(invert_bytes=True), default=b"\xff\xff\xff\xff")
ScalesS: _TE_DICT[float] = _te_field(se.F32, default=1.0)
ScalesT: _TE_DICT[float] = _te_field(se.F32, default=1.0)
OffsetsS: _TE_DICT[float] = _te_field(TE_S16_COORD, default=0.0)
OffsetsT: _TE_DICT[float] = _te_field(TE_S16_COORD, default=0.0)
Rotation: _TE_DICT[float] = _te_field(PackedTERotation(), default=0.0)
BasicMaterials: _TE_DICT["BasicMaterials"] = _te_field(
BUMP_SHINY_FULLBRIGHT, default_factory=BasicMaterials,
)
MediaFlags: Dict[_TE_FIELD_KEY, "MediaFlags"] = _te_field(MEDIA_FLAGS, default_factory=MediaFlags)
Glow: Dict[_TE_FIELD_KEY, float] = _te_field(se.QuantizedFloat(se.U8, 0.0, 1.0), default=0.0)
Materials: Dict[_TE_FIELD_KEY, UUID] = _te_field(se.UUID, optional=True, default=UUID.ZERO)
MediaFlags: _TE_DICT["MediaFlags"] = _te_field(MEDIA_FLAGS, default_factory=MediaFlags)
Glow: _TE_DICT[float] = _te_field(se.QuantizedFloat(se.U8, 0.0, 1.0), default=0.0)
Materials: _TE_DICT[UUID] = _te_field(se.UUID, optional=True, default=UUID.ZERO)
def unwrap(self):
"""Return `self` regardless of whether this is lazy wrapped object or not"""
@@ -1733,28 +1729,28 @@ class NameValueSerializer(se.SimpleSubfieldSerializer):
@se.enum_field_serializer("SetFollowCamProperties", "CameraProperty", "Type")
class CameraPropertyType(IntEnum):
PITCH = 0
FOCUS_OFFSET = enum.auto()
FOCUS_OFFSET_X = enum.auto()
FOCUS_OFFSET_Y = enum.auto()
FOCUS_OFFSET_Z = enum.auto()
POSITION_LAG = enum.auto()
FOCUS_LAG = enum.auto()
DISTANCE = enum.auto()
BEHINDNESS_ANGLE = enum.auto()
BEHINDNESS_LAG = enum.auto()
POSITION_THRESHOLD = enum.auto()
FOCUS_THRESHOLD = enum.auto()
ACTIVE = enum.auto()
POSITION = enum.auto()
POSITION_X = enum.auto()
POSITION_Y = enum.auto()
POSITION_Z = enum.auto()
FOCUS = enum.auto()
FOCUS_X = enum.auto()
FOCUS_Y = enum.auto()
FOCUS_Z = enum.auto()
POSITION_LOCKED = enum.auto()
FOCUS_LOCKED = enum.auto()
FOCUS_OFFSET = 1
FOCUS_OFFSET_X = 2
FOCUS_OFFSET_Y = 3
FOCUS_OFFSET_Z = 4
POSITION_LAG = 5
FOCUS_LAG = 6
DISTANCE = 7
BEHINDNESS_ANGLE = 8
BEHINDNESS_LAG = 9
POSITION_THRESHOLD = 10
FOCUS_THRESHOLD = 11
ACTIVE = 12
POSITION = 13
POSITION_X = 14
POSITION_Y = 15
POSITION_Z = 16
FOCUS = 17
FOCUS_X = 18
FOCUS_Y = 19
FOCUS_Z = 20
POSITION_LOCKED = 21
FOCUS_LOCKED = 22
@se.enum_field_serializer("DeRezObject", "AgentBlock", "Destination")
@@ -1869,30 +1865,33 @@ class GroupPowerFlags(IntFlag):
# Roles
ROLE_CREATE = 1 << 4 # Create new roles
ROLE_DELETE = 1 << 5 # Delete roles
ROLE_PROPERTIES = 1 << 6 # Change Role Names, Titles, and Descriptions (Of roles the user is in, only, or any role in group?)
ROLE_PROPERTIES = 1 << 6 # Change Role Names, Titles, and Descriptions
ROLE_ASSIGN_MEMBER_LIMITED = 1 << 7 # Assign Member to a Role that the assigner is in
ROLE_ASSIGN_MEMBER = 1 << 8 # Assign Member to Role
ROLE_REMOVE_MEMBER = 1 << 9 # Remove Member from Role
ROLE_CHANGE_ACTIONS = 1 << 10 # Change actions a role can perform
# Group Identity
GROUP_CHANGE_IDENTITY = 1 << 11 # Charter, insignia, 'Show In Group List', 'Publish on the web', 'Mature', all 'Show Member In Group Profile' checkboxes
GROUP_CHANGE_IDENTITY = 1 << 11 # Charter, insignia, 'Show In Group List', 'Publish on the web', 'Mature', etc.
# Parcel Management
LAND_DEED = 1 << 12 # Deed Land and Buy Land for Group
LAND_RELEASE = 1 << 13 # Release Land (to Gov. Linden)
LAND_SET_SALE_INFO = 1 << 14 # Set for sale info (Toggle "For Sale", Set Price, Set Target, Toggle "Sell objects with the land")
# Set for sale info (Toggle "For Sale", Set Price, Set Target, Toggle "Sell objects with the land")
LAND_SET_SALE_INFO = 1 << 14
LAND_DIVIDE_JOIN = 1 << 15 # Divide and Join Parcels
# Parcel Identity
LAND_FIND_PLACES = 1 << 17 # Toggle "Show in Find Places" and Set Category.
LAND_CHANGE_IDENTITY = 1 << 18 # Change Parcel Identity: Parcel Name, Parcel Description, Snapshot, 'Publish on the web', and 'Mature' checkbox
# Change Parcel Identity: Parcel Name, Parcel Description, Snapshot, 'Publish on the web', and 'Mature' checkbox
LAND_CHANGE_IDENTITY = 1 << 18
LAND_SET_LANDING_POINT = 1 << 19 # Set Landing Point
# Parcel Settings
LAND_CHANGE_MEDIA = 1 << 20 # Change Media Settings
LAND_EDIT = 1 << 21 # Toggle Edit Land
LAND_OPTIONS = 1 << 22 # Toggle Set Home Point, Fly, Outside Scripts, Create/Edit Objects, Landmark, and Damage checkboxes
# Toggle Set Home Point, Fly, Outside Scripts, Create/Edit Objects, Landmark, and Damage checkboxes
LAND_OPTIONS = 1 << 22
# Parcel Powers
LAND_ALLOW_EDIT_LAND = 1 << 23 # Bypass Edit Land Restriction
@@ -1997,6 +1996,35 @@ class ModifyLandAction(IntEnum):
REVERT = 5
@se.flag_field_serializer("RevokePermissions", "Data", "ObjectPermissions")
@se.flag_field_serializer("ScriptQuestion", "Data", "Questions")
@se.flag_field_serializer("ScriptAnswerYes", "Data", "Questions")
class ScriptPermissions(IntFlag):
# "1" itself seems to be unused?
TAKE_MONEY = 1 << 1
TAKE_CONTROLS = 1 << 2
# Doesn't seem to be used?
REMAP_CONTROLS = 1 << 3
TRIGGER_ANIMATIONS = 1 << 4
ATTACH = 1 << 5
# Doesn't seem to be used?
RELEASE_OWNERSHIP = 1 << 6
CHANGE_LINKS = 1 << 7
# Object joints don't exist anymore
CHANGE_JOINTS = 1 << 8
# Change its own permissions? Doesn't seem to be used.
CHANGE_PERMISSIONS = 1 << 9
TRACK_CAMERA = 1 << 10
CONTROL_CAMERA = 1 << 11
TELEPORT = 1 << 12
JOIN_EXPERIENCE = 1 << 13
MANAGE_ESTATE_ACCESS = 1 << 14
ANIMATION_OVERRIDE = 1 << 15
RETURN_OBJECTS = 1 << 16
FORCE_SIT = 1 << 17
CHANGE_ENVIRONMENT = 1 << 18
@se.http_serializer("RenderMaterials")
class RenderMaterialsSerializer(se.BaseHTTPSerializer):
@classmethod

View File

@@ -0,0 +1,127 @@
from typing import NamedTuple, Union, Optional
import hippolyzer.lib.base.serialization as se
from hippolyzer.lib.base import llsd
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.mesh import MeshAsset, LLMeshSerializer
from hippolyzer.lib.base.templates import AssetType
from hippolyzer.lib.client.state import BaseClientRegion
class UploadError(Exception):
pass
class UploadToken(NamedTuple):
linden_cost: int
uploader_url: str
payload: bytes
class AssetUploader:
def __init__(self, region: BaseClientRegion):
self._region = region
async def initiate_asset_upload(self, name: str, asset_type: AssetType,
body: bytes, flags: Optional[int] = None) -> UploadToken:
payload = {
"asset_type": asset_type.human_name,
"description": "(No Description)",
"everyone_mask": 0,
"group_mask": 0,
"folder_id": UUID.ZERO, # Puts it in the default folder, I guess. Undocumented.
"inventory_type": asset_type.inventory_type.human_name,
"name": name,
"next_owner_mask": 581632,
}
if flags is not None:
payload['flags'] = flags
resp_payload = await self._make_newfileagentinventory_req(payload)
return UploadToken(resp_payload["upload_price"], resp_payload["uploader"], body)
async def _make_newfileagentinventory_req(self, payload: dict):
async with self._region.caps_client.post("NewFileAgentInventory", llsd=payload) as resp:
resp.raise_for_status()
resp_payload = await resp.read_llsd()
# Need to sniff the resp payload for this because SL sends a 200 status code on error
if "error" in resp_payload:
raise UploadError(resp_payload)
return resp_payload
async def complete_upload(self, token: UploadToken) -> dict:
async with self._region.caps_client.post(token.uploader_url, data=token.payload) as resp:
resp.raise_for_status()
resp_payload = await resp.read_llsd()
# The actual upload endpoints return 200 on error, have to sniff the payload to figure
# out if it actually failed...
if "error" in resp_payload:
raise UploadError(resp_payload)
await self._handle_upload_complete(resp_payload)
return resp_payload
async def _handle_upload_complete(self, resp_payload: dict):
"""
Generic hook called when any asset upload completes.
Could trigger an AIS fetch to send the viewer details about the item we just created,
assuming we were in proxy context.
"""
pass
# The mesh upload flow is a little special, so it gets its own methods
async def initiate_mesh_upload(self, name: str, mesh: Union[bytes, MeshAsset],
flags: Optional[int] = None) -> UploadToken:
"""
Very basic LL-serialized mesh uploader
Currently only handles a single mesh with a single face and no associated textures.
"""
if isinstance(mesh, MeshAsset):
writer = se.BufferWriter("!")
writer.write(LLMeshSerializer(), mesh)
mesh = writer.copy_buffer()
asset_resources = self._build_asset_resources(name, mesh)
payload = {
'asset_resources': asset_resources,
'asset_type': 'mesh',
'description': '(No Description)',
'everyone_mask': 0,
'folder_id': UUID.ZERO,
'group_mask': 0,
'inventory_type': 'object',
'name': name,
'next_owner_mask': 581632,
'texture_folder_id': UUID.ZERO
}
if flags is not None:
payload['flags'] = flags
resp_payload = await self._make_newfileagentinventory_req(payload)
upload_body = llsd.format_xml(asset_resources)
return UploadToken(resp_payload["upload_price"], resp_payload["uploader"], upload_body)
def _build_asset_resources(self, name: str, mesh: bytes) -> dict:
return {
'instance_list': [
{
'face_list': [
{
'diffuse_color': [1.0, 1.0, 1.0, 1.0],
'fullbright': False
}
],
'material': 3,
'mesh': 0,
'mesh_name': name,
'physics_shape_type': 2,
'position': [0.0, 0.0, 0.0],
'rotation': [0.7071067690849304, 0.0, 0.0, 0.7071067690849304],
'scale': [1.0, 1.0, 1.0]
}
],
'mesh_list': [mesh],
'metric': 'MUT_Unspecified',
'texture_list': []
}

View File

@@ -0,0 +1,192 @@
from __future__ import annotations
import gzip
import logging
import secrets
from pathlib import Path
from typing import Union, List, Tuple, Set
from hippolyzer.lib.base import llsd
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.inventory import InventoryModel, InventoryCategory, InventoryItem
from hippolyzer.lib.base.message.message import Block
from hippolyzer.lib.client.state import BaseClientSession
LOG = logging.getLogger(__name__)
class InventoryManager:
def __init__(self, session: BaseClientSession):
self._session = session
self.model: InventoryModel = InventoryModel()
self._load_skeleton()
def _load_skeleton(self):
assert not self.model.nodes
skel_cats: List[dict] = self._session.login_data.get('inventory-skeleton', [])
for skel_cat in skel_cats:
self.model.add(InventoryCategory(
name=skel_cat["name"],
cat_id=UUID(skel_cat["folder_id"]),
parent_id=UUID(skel_cat["parent_id"]),
# Don't use the version from the skeleton, this flags the inventory as needing
# completion from the inventory cache. This matches indra's behavior.
version=InventoryCategory.VERSION_NONE,
type="category",
pref_type=skel_cat.get("type_default", -1),
owner_id=self._session.agent_id,
))
def load_cache(self, path: Union[str, Path]):
# Per indra, rough flow for loading inv on login is:
# 1. Look at inventory skeleton from login response
# 2. Pre-populate model with categories from the skeleton, including their versions
# 3. Read the inventory cache, tracking categories and items separately
# 4. Walk the list of categories in our cache. If the cat exists in the skeleton and the versions
# match, then we may load the category and its descendants from cache.
# 5. Any categories in the skeleton but not in the cache, or those with mismatched versions must be fetched.
# The viewer does this by setting the local version of the cats to -1 and forcing a descendent fetch
# over AIS.
#
# By the time you call this function call, you should have already loaded the inventory skeleton
# into the model set its inventory category versions to VERSION_NONE.
skel_cats: List[dict] = self._session.login_data['inventory-skeleton']
# UUID -> version map for inventory skeleton
skel_versions = {UUID(cat["folder_id"]): cat["version"] for cat in skel_cats}
LOG.info(f"Parsing inv cache at {path}")
cached_categories, cached_items = self._parse_cache(path)
LOG.info(f"Done parsing inv cache at {path}")
loaded_cat_ids: Set[UUID] = set()
for cached_cat in cached_categories:
existing_cat: InventoryCategory = self.model.get(cached_cat.cat_id) # noqa
# Don't clobber an existing cat unless it just has a placeholder version,
# maybe from loading the skeleton?
if existing_cat and existing_cat.version != InventoryCategory.VERSION_NONE:
continue
# Cached cat isn't the same as what the inv server says it should be, can't use it.
if cached_cat.version != skel_versions.get(cached_cat.cat_id):
continue
if existing_cat:
# Remove the category so that we can replace it, but leave any children in place
self.model.unlink(existing_cat, single_only=True)
self.model.add(cached_cat)
# Any items in this category in our cache file are usable and should be added
loaded_cat_ids.add(cached_cat.cat_id)
for cached_item in cached_items:
# The skeleton doesn't have any items, so if we run into any items they should be exactly the
# same as what we're trying to add. No point clobbering.
if cached_item.item_id in self.model:
continue
# The parent category didn't have a cache hit against the inventory skeleton, can't add!
if cached_item.parent_id not in loaded_cat_ids:
continue
self.model.add(cached_item)
def _parse_cache(self, path: Union[str, Path]) -> Tuple[List[InventoryCategory], List[InventoryItem]]:
categories: List[InventoryCategory] = []
items: List[InventoryItem] = []
# Parse our cached items and categories out of the compressed inventory cache
first_line = True
with gzip.open(path, "rb") as f:
# Line-delimited LLSD notation!
for line in f.readlines():
# TODO: Parsing of invcache is dominated by `parse_notation()`. It's stupidly inefficient.
node_llsd = llsd.parse_notation(line)
if first_line:
# First line is the file header
first_line = False
if node_llsd['inv_cache_version'] != 2:
raise ValueError(f"Unknown cache version: {node_llsd!r}")
continue
if InventoryCategory.ID_ATTR in node_llsd:
if (cat_node := InventoryCategory.from_llsd(node_llsd)) is not None:
categories.append(cat_node)
elif InventoryItem.ID_ATTR in node_llsd:
if (item_node := InventoryItem.from_llsd(node_llsd)) is not None:
items.append(item_node)
else:
LOG.warning(f"Unknown node type in inv cache: {node_llsd!r}")
return categories, items
# Thankfully we have 9 billion different ways to represent inventory data.
def ais_item_to_inventory_data(ais_item: dict) -> Block:
return Block(
"InventoryData",
ItemID=ais_item["item_id"],
FolderID=ais_item["parent_id"],
CallbackID=0,
CreatorID=ais_item["permissions"]["creator_id"],
OwnerID=ais_item["permissions"]["owner_id"],
GroupID=ais_item["permissions"]["group_id"],
BaseMask=ais_item["permissions"]["base_mask"],
OwnerMask=ais_item["permissions"]["owner_mask"],
GroupMask=ais_item["permissions"]["group_mask"],
EveryoneMask=ais_item["permissions"]["everyone_mask"],
NextOwnerMask=ais_item["permissions"]["next_owner_mask"],
GroupOwned=0,
AssetID=ais_item["asset_id"],
Type=ais_item["type"],
InvType=ais_item["inv_type"],
Flags=ais_item["flags"],
SaleType=ais_item["sale_info"]["sale_type"],
SalePrice=ais_item["sale_info"]["sale_price"],
Name=ais_item["name"],
Description=ais_item["desc"],
CreationDate=ais_item["created_at"],
# Meaningless here
CRC=secrets.randbits(32),
)
def inventory_data_to_ais_item(inventory_data: Block) -> dict:
return dict(
item_id=inventory_data["ItemID"],
parent_id=inventory_data["ParentID"],
permissions=dict(
creator_id=inventory_data["CreatorID"],
owner_id=inventory_data["OwnerID"],
group_id=inventory_data["GroupID"],
base_mask=inventory_data["BaseMask"],
owner_mask=inventory_data["OwnerMask"],
group_mask=inventory_data["GroupMask"],
everyone_mask=inventory_data["EveryoneMask"],
next_owner_mask=inventory_data["NextOwnerMask"],
),
asset_id=inventory_data["AssetID"],
type=inventory_data["Type"],
inv_type=inventory_data["InvType"],
flags=inventory_data["Flags"],
sale_info=dict(
sale_type=inventory_data["SaleType"],
sale_price=inventory_data["SalePrice"],
),
name=inventory_data["Name"],
description=inventory_data["Description"],
creation_at=inventory_data["CreationDate"],
)
def ais_folder_to_inventory_data(ais_folder: dict) -> Block:
return Block(
"FolderData",
FolderID=ais_folder["cat_id"],
ParentID=ais_folder["parent_id"],
CallbackID=0,
Type=ais_folder["preferred_type"],
Name=ais_folder["name"],
)
def inventory_data_to_ais_folder(inventory_data: Block) -> dict:
return dict(
cat_id=inventory_data["FolderID"],
parent_id=inventory_data["ParentID"],
preferred_type=inventory_data["Type"],
name=inventory_data["Name"],
)

View File

@@ -36,3 +36,4 @@ class BaseClientSession(abc.ABC):
region_by_handle: Callable[[int], Optional[BaseClientRegion]]
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[BaseClientRegion]]
objects: ClientWorldObjectManager
login_data: Dict[str, Any]

View File

@@ -1,11 +1,13 @@
from __future__ import annotations
from typing import *
import abc
import copy
import dataclasses
import multiprocessing
import pickle
import secrets
import warnings
from typing import *
from hippolyzer.lib.base.datatypes import UUID, Vector3
from hippolyzer.lib.base.message.message import Block, Message
@@ -14,10 +16,11 @@ from hippolyzer.lib.proxy import addon_ctx
from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
from hippolyzer.lib.base.network.transport import UDPPacket, Direction
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import SessionManager, Session
from hippolyzer.lib.proxy.task_scheduler import TaskLifeScope
from hippolyzer.lib.base.templates import ChatSourceType, ChatType
if TYPE_CHECKING:
from hippolyzer.lib.proxy.sessions import SessionManager, Session
from hippolyzer.lib.proxy.region import ProxiedRegion
class AssetAliasTracker:
@@ -99,47 +102,34 @@ def send_chat(message: Union[bytes, str], channel=0, chat_type=ChatType.NORMAL,
))
def ais_item_to_inventory_data(ais_item: dict):
return Block(
"InventoryData",
ItemID=ais_item["item_id"],
FolderID=ais_item["parent_id"],
CallbackID=0,
CreatorID=ais_item["permissions"]["creator_id"],
OwnerID=ais_item["permissions"]["owner_id"],
GroupID=ais_item["permissions"]["group_id"],
BaseMask=ais_item["permissions"]["base_mask"],
OwnerMask=ais_item["permissions"]["owner_mask"],
GroupMask=ais_item["permissions"]["group_mask"],
EveryoneMask=ais_item["permissions"]["everyone_mask"],
NextOwnerMask=ais_item["permissions"]["next_owner_mask"],
GroupOwned=0,
AssetID=ais_item["asset_id"],
Type=ais_item["type"],
InvType=ais_item["inv_type"],
Flags=ais_item["flags"],
SaleType=ais_item["sale_info"]["sale_type"],
SalePrice=ais_item["sale_info"]["sale_price"],
Name=ais_item["name"],
Description=ais_item["desc"],
CreationDate=ais_item["created_at"],
# Meaningless here
CRC=secrets.randbits(32),
)
class MetaBaseAddon(abc.ABCMeta):
"""
Metaclass for BaseAddon that prevents class member assignments from clobbering descriptors
Without this things like:
class Foo(BaseAddon):
bar: int = GlobalProperty(0)
Foo.bar = 2
Won't work as you expect!
"""
def __setattr__(self, key: str, value):
# TODO: Keep track of AddonProperties in __new__ or something?
try:
existing = object.__getattribute__(self, key)
except AttributeError:
# If the attribute doesn't exist then it's fine to use the base setattr.
super().__setattr__(key, value)
return
if existing and isinstance(existing, BaseAddonProperty):
existing.__set__(self, value)
return
super().__setattr__(key, value)
def ais_folder_to_inventory_data(ais_folder: dict):
return Block(
"FolderData",
FolderID=ais_folder["cat_id"],
ParentID=ais_folder["parent_id"],
CallbackID=0,
Type=ais_folder["preferred_type"],
Name=ais_folder["name"],
)
class BaseAddon(abc.ABC):
class BaseAddon(metaclass=MetaBaseAddon):
def _schedule_task(self, coro: Coroutine, session=None,
region_scoped=False, session_scoped=True, addon_scoped=True):
session = session or addon_ctx.session.get(None) or None
@@ -208,7 +198,7 @@ class BaseAddon(abc.ABC):
_T = TypeVar("_T")
_U = TypeVar("_U", Session, SessionManager)
_U = TypeVar("_U", "Session", "SessionManager")
class BaseAddonProperty(abc.ABC, Generic[_T, _U]):
@@ -257,7 +247,7 @@ class BaseAddonProperty(abc.ABC, Generic[_T, _U]):
self._get_context_obj().addon_ctx[self.name] = value
class SessionProperty(BaseAddonProperty[_T, Session]):
class SessionProperty(BaseAddonProperty[_T, "Session"]):
"""
Property tied to the current session context
@@ -267,7 +257,7 @@ class SessionProperty(BaseAddonProperty[_T, Session]):
return addon_ctx.session.get()
class GlobalProperty(BaseAddonProperty[_T, SessionManager]):
class GlobalProperty(BaseAddonProperty[_T, "SessionManager"]):
"""
Property tied to the global SessionManager context

View File

@@ -432,22 +432,34 @@ class AddonManager:
chat_type: int = message["ChatData"]["ChatType"]
# RLV-style OwnerSay?
if chat and chat.startswith("@") and chat_type == 8:
# RLV-style command, `@<cmd>(:<option1>;<option2>)?(=<param>)?`
options, _, param = chat.rpartition("=")
cmd, _, options = options.lstrip("@").partition(":")
options = options.split(";")
source = message["ChatData"]["SourceID"]
try:
with addon_ctx.push(session, region):
handled = cls._call_all_addon_hooks("handle_rlv_command",
session, region, source, cmd, options, param)
if handled:
region.circuit.drop_message(message)
return True
except:
LOG.exception(f"Failed while handling command {chat!r}")
if not cls._SWALLOW_ADDON_EXCEPTIONS:
raise
# RLV allows putting multiple commands into one message, blindly splitting on ",".
chat = chat.lstrip("@")
all_cmds_handled = True
for command_str in chat.split(","):
if not command_str:
continue
# RLV-style command, `@<cmd>(:<option1>;<option2>)?(=<param>)?`
options, _, param = command_str.partition("=")
cmd, _, options = options.partition(":")
# TODO: Not always correct, commands can specify their own parsing for the option field
options = options.split(";") if options else []
source = message["ChatData"]["SourceID"]
try:
with addon_ctx.push(session, region):
handled = cls._call_all_addon_hooks("handle_rlv_command",
session, region, source, cmd, options, param)
if handled:
region.circuit.drop_message(message)
else:
all_cmds_handled = False
except:
LOG.exception(f"Failed while handling command {command_str!r}")
all_cmds_handled = False
if not cls._SWALLOW_ADDON_EXCEPTIONS:
raise
# Drop the chat message if all commands it contained were handled by an addon
if all_cmds_handled:
return True
with addon_ctx.push(session, region):
return cls._call_all_addon_hooks("handle_lludp_message", session, region, message)

View File

@@ -0,0 +1,39 @@
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.network.transport import Direction
from hippolyzer.lib.client.asset_uploader import AssetUploader
from hippolyzer.lib.client.inventory_manager import ais_item_to_inventory_data
class ProxyAssetUploader(AssetUploader):
async def _handle_upload_complete(self, resp_payload: dict):
# Check if this a failure response first, raising if it is
await super()._handle_upload_complete(resp_payload)
# Fetch enough data from AIS to tell the viewer about the new inventory item
session = self._region.session()
item_id = resp_payload["new_inventory_item"]
ais_req_data = {
"items": [
{
"owner_id": session.agent_id,
"item_id": item_id,
}
]
}
async with self._region.caps_client.post('FetchInventory2', llsd=ais_req_data) as resp:
ais_item = (await resp.read_llsd())["items"][0]
# Got it, ship it off to the viewer
message = Message(
"UpdateCreateInventoryItem",
Block(
"AgentData",
AgentID=session.agent_id,
SimApproved=1,
TransactionID=UUID.random(),
),
ais_item_to_inventory_data(ais_item),
direction=Direction.IN
)
self._region.circuit.send(message)

View File

@@ -0,0 +1,28 @@
import datetime as dt
from hippolyzer.lib.base.helpers import get_mtime
from hippolyzer.lib.client.inventory_manager import InventoryManager
from hippolyzer.lib.client.state import BaseClientSession
from hippolyzer.lib.proxy.viewer_settings import iter_viewer_cache_dirs
class ProxyInventoryManager(InventoryManager):
def __init__(self, session: BaseClientSession):
super().__init__(session)
newest_cache = None
newest_timestamp = dt.datetime(year=1970, month=1, day=1, tzinfo=dt.timezone.utc)
# Look for the newest version of the cached inventory and use that.
# Not foolproof, but close enough if we're not sure what viewer is being used.
for cache_dir in iter_viewer_cache_dirs():
inv_cache_path = cache_dir / (str(session.agent_id) + ".inv.llsd.gz")
if inv_cache_path.exists():
mod = get_mtime(inv_cache_path)
if not mod:
continue
mod_ts = dt.datetime.fromtimestamp(mod, dt.timezone.utc)
if mod_ts <= newest_timestamp:
continue
newest_cache = inv_cache_path
if newest_cache:
self.load_cache(newest_cache)

View File

@@ -401,7 +401,7 @@ class AbstractMessageLogEntry(abc.ABC):
beautified = minidom.parseString(content).toprettyxml(indent=" ")
# kill blank lines. will break cdata sections. meh.
beautified = re.sub(r'\n\s*\n', '\n', beautified, flags=re.MULTILINE)
return re.sub(r'<([\w]+)>\s*</\1>', r'<\1></\1>',
return re.sub(r'<(\w+)>\s*</\1>', r'<\1></\1>',
beautified, flags=re.MULTILINE)
@@ -522,7 +522,7 @@ class HTTPMessageLogEntry(AbstractMessageLogEntry):
buf.write(bytes(headers).decode("utf8", errors="replace"))
buf.write("\r\n")
buf.write(message_body)
buf.write(message_body or "")
return buf.getvalue()
def request(self, beautify=False, replacements=None):
@@ -549,6 +549,12 @@ class HTTPMessageLogEntry(AbstractMessageLogEntry):
return self._summary
def _guess_content_type(self, message):
# SL's login service lies and says that its XML-RPC response is LLSD+XML.
# It is not, and it blows up the parser. It's been broken ever since the
# login rewrite and a fix is likely not forthcoming. I'm sick of seeing
# the traceback, so just hack around it.
if self.name == "LoginRequest":
return "application/xml"
content_type = message.headers.get("Content-Type", "")
if not message.content or content_type.startswith("application/llsd"):
return content_type

View File

@@ -22,6 +22,7 @@ from hippolyzer.lib.proxy.caps import CapType
from hippolyzer.lib.proxy.object_manager import ProxyObjectManager
from hippolyzer.lib.base.transfer_manager import TransferManager
from hippolyzer.lib.base.xfer_manager import XferManager
from hippolyzer.lib.proxy.asset_uploader import ProxyAssetUploader
if TYPE_CHECKING:
from hippolyzer.lib.proxy.sessions import Session
@@ -66,6 +67,7 @@ class ProxiedRegion(BaseClientRegion):
self.objects: ProxyObjectManager = ProxyObjectManager(self, may_use_vo_cache=True)
self.xfer_manager = XferManager(proxify(self), self.session().secure_session_id)
self.transfer_manager = TransferManager(proxify(self), session.agent_id, session.id)
self.asset_uploader = ProxyAssetUploader(proxify(self))
self._recalc_caps()
@property

View File

@@ -10,6 +10,7 @@ from typing import *
from weakref import ref
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.helpers import proxify
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.client.state import BaseClientSession
@@ -18,6 +19,7 @@ from hippolyzer.lib.proxy.circuit import ProxiedCircuit
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
from hippolyzer.lib.proxy.http_proxy import HTTPFlowContext
from hippolyzer.lib.proxy.caps import is_asset_server_cap_name, CapData, CapType
from hippolyzer.lib.proxy.inventory_manager import ProxyInventoryManager
from hippolyzer.lib.proxy.namecache import ProxyNameCache
from hippolyzer.lib.proxy.object_manager import ProxyWorldObjectManager
from hippolyzer.lib.proxy.region import ProxiedRegion
@@ -47,6 +49,7 @@ class Session(BaseClientSession):
self.message_handler: MessageHandler[Message, str] = MessageHandler()
self.http_message_handler: MessageHandler[HippoHTTPFlow, str] = MessageHandler()
self.objects = ProxyWorldObjectManager(self, session_manager.settings, session_manager.name_cache)
self.inventory = ProxyInventoryManager(proxify(self))
# Base path of a newview type cache directory for this session
self.cache_dir: Optional[str] = None
self._main_region = None

View File

@@ -25,7 +25,7 @@ from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
version = '0.11.2'
version = '0.12.0'
with open(path.join(here, 'README.md')) as readme_fh:
readme = readme_fh.read()

View File

@@ -79,6 +79,20 @@ class TestDatatypes(unittest.TestCase):
quat = Quaternion(X=128.0, Y=128.0, Z=22.0)
self.assertEqual(quat, (128.0, 128.0, 22.0, 0.0))
def test_quaternion_euler_roundtrip(self):
orig_vec = Vector3(0.0, -1.0, 2.0)
quat = Quaternion.from_euler(*orig_vec)
for orig_comp, new_comp in zip(orig_vec, quat.to_euler()):
self.assertAlmostEqual(orig_comp, new_comp)
def test_quaternion_transformations(self):
quat = Quaternion(0.4034226801113349, -0.2590347239999257, 0.7384602626041288, 0.4741598817790379)
expected_trans = (0.4741598817790379, 0.4034226801113349, -0.2590347239999257, 0.7384602626041288)
trans_quat = quat.to_transformations()
self.assertSequenceEqual(expected_trans, trans_quat)
new_quat = Quaternion.from_transformations(trans_quat)
self.assertEqual(quat, new_quat)
def test_uuid_from_bytes(self):
tmp_uuid = uuid.UUID('2b7f7a6e-32c5-dbfd-e2c7-926d1a9f0aca')
tmp_uuid2 = uuid.UUID('1dd5efe2-faaf-1864-5ac9-bc61c5d8d7ea')
@@ -135,6 +149,9 @@ class TestDatatypes(unittest.TestCase):
self.assertIsInstance(val, UUID)
self.assertEqual(orig, val)
def test_str_llsd_serialization(self):
self.assertEqual(b"'foo\\nbar'", llsd.format_notation("foo\nbar"))
def test_jank_stringy_bytes(self):
val = JankStringyBytes(b"foo\x00")
self.assertTrue("o" in val)

View File

@@ -122,7 +122,8 @@ class TestLegacyInv(unittest.TestCase):
'last_owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
'next_owner_mask': 581632,
'owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
'owner_mask': 2147483647
'owner_mask': 2147483647,
'is_owner_group': 0,
},
'sale_info': {
'sale_price': 10,

View File

@@ -62,3 +62,8 @@ class TestMesh(unittest.TestCase):
mat_list = list(mesh.iter_lod_materials())
self.assertEqual(4, len(mat_list))
self.assertIsInstance(mat_list[0], dict)
def test_make_default_triangle(self):
tri = MeshAsset.make_triangle()
self.assertEqual(0.5, tri.segments['high_lod'][0]['Position'][2].X)
self.assertEqual(1, tri.header['version'])

View File

@@ -33,10 +33,11 @@ class MockAddon(BaseAddon):
PARENT_ADDON_SOURCE = """
from hippolyzer.lib.proxy.addon_utils import BaseAddon
from hippolyzer.lib.proxy.addon_utils import BaseAddon, GlobalProperty
class ParentAddon(BaseAddon):
baz = None
quux: int = GlobalProperty(0)
@classmethod
def foo(cls):
@@ -136,3 +137,16 @@ class AddonIntegrationTests(BaseProxyTest):
AddonManager.unload_addon_from_path(str(self.parent_path), reload=True)
await asyncio.sleep(0.001)
self.assertNotIn('hippolyzer.user_addon_parent_addon', sys.modules)
async def test_global_property_access_and_set(self):
with open(self.parent_path, "w") as f:
f.write(PARENT_ADDON_SOURCE)
AddonManager.load_addon_from_path(str(self.parent_path), reload=True)
# Wait for the init hooks to run
await asyncio.sleep(0.001)
self.assertFalse("quux" in self.session_manager.addon_ctx)
parent_addon_mod = AddonManager.FRESH_ADDON_MODULES['hippolyzer.user_addon_parent_addon']
self.assertEqual(0, parent_addon_mod.ParentAddon.quux)
self.assertEqual(0, self.session_manager.addon_ctx["quux"])
parent_addon_mod.ParentAddon.quux = 1
self.assertEqual(1, self.session_manager.addon_ctx["quux"])