109 Commits

Author SHA1 Message Date
Salad Dais
a35aa9046e v0.14.3 2024-01-07 08:00:21 +00:00
Salad Dais
6c32da878d Handle (and ignore by default) the new GenericStreamingMessage
This is _enormously_ spammy, good god. Apparently related to PBR.
2024-01-07 07:51:52 +00:00
Salad Dais
49c54bc896 Automatically request all materials by default 2024-01-06 21:50:29 +00:00
Salad Dais
4c9fa38ffb Move material management to ClientObjectManager 2024-01-06 21:40:49 +00:00
Salad Dais
2856e78f16 Start adding MaterialManager for RenderMaterials 2024-01-06 20:40:04 +00:00
Salad Dais
33884925f4 enum.IntFlag -> IntFlag 2024-01-06 20:39:29 +00:00
Salad Dais
a11ef96d9a Serve inbound Xfers reliably 2024-01-05 02:53:05 +00:00
Salad Dais
7b6239d66a Add more parcel enums 2024-01-05 02:49:51 +00:00
Salad Dais
2c3bd140ff Update MapImageFlags 2024-01-04 22:24:36 +00:00
Salad Dais
9d2087a0fb Add ParcelManager to HippoClient 2024-01-04 21:45:54 +00:00
Salad Dais
67db8110a1 Fix ParcelOverlay data template 2024-01-04 20:01:32 +00:00
Salad Dais
ab1c56ff3e Start writing client parcel manager 2024-01-04 19:51:47 +00:00
Salad Dais
142f2e42ca Clean up message template code 2024-01-04 19:08:09 +00:00
Salad Dais
e7764c1665 Display templated EQ messages as templated messages
This makes them less annoying to read, and allows us to use
subfield serializers to pretty-print their contents.
2024-01-04 18:00:14 +00:00
Salad Dais
582cfea47c Send AgentUpdate after connecting to main region 2024-01-03 07:53:47 +00:00
Salad Dais
6f38d84a1c Add ParcelOverlay serializers 2024-01-03 07:51:51 +00:00
Salad Dais
1fc46e66bc Support __add__ and __radd_ on JankStringyBytes 2023-12-31 15:58:05 +00:00
Salad Dais
167673aa08 Be nicer about zero-length strings in Messages 2023-12-31 15:52:15 +00:00
Salad Dais
5ad8ee986f Keep track of user's groups in their session 2023-12-31 15:28:00 +00:00
Salad Dais
e9d7ee7e8e ObjectUpdateType.OBJECT_UPDATE -> ObjectUpdateType.UPDATE 2023-12-31 14:57:28 +00:00
Salad Dais
d21c3ec004 Update templates 2023-12-31 14:55:46 +00:00
Salad Dais
01c6931d53 v0.14.2 2023-12-24 18:05:05 +00:00
Salad Dais
493563bb6f Add a few asset type lookups 2023-12-24 06:47:04 +00:00
Salad Dais
ca5c71402b Bump Python requirement to 3.9 2023-12-24 05:57:14 +00:00
Salad Dais
ad765a1ede Load inventory cache in a background thread
llsd.parse_notation() is slow as hell, no way around it.
2023-12-24 05:55:56 +00:00
Salad Dais
9adee14e0f Allow non-byte legacy schema flag fields 2023-12-23 15:40:00 +00:00
Salad Dais
57c4bd0e7c Improve AIS support 2023-12-22 21:25:05 +00:00
Salad Dais
1085dbc8ab v0.14.1 2023-12-22 04:38:30 +00:00
Salad Dais
fb9740003e Fix a couple AIS cases 2023-12-22 04:38:30 +00:00
Salad Dais
087f16fbc5 Simplify Inventory/AssetType legacy conversion 2023-12-22 03:57:36 +00:00
Salad Dais
fa96e80590 Simplify AIS<->InventoryData conversion 2023-12-22 02:40:53 +00:00
Salad Dais
539d38fb4a Fix legacy serialization for categories 2023-12-21 22:09:48 +00:00
Salad Dais
caaf0b0e13 Add tests for legacy category parsing 2023-12-21 20:12:41 +00:00
Salad Dais
16958e516d More enumification in inventory code 2023-12-21 19:18:58 +00:00
Salad Dais
74e4e0c4ec Start supporting enums in inventory schema 2023-12-21 14:55:14 +00:00
Salad Dais
3efeb46500 Add notes about inventory compatibility issues 2023-12-21 06:41:47 +00:00
Salad Dais
0f2e933be1 Make legacy input schema round-trip correctly 2023-12-20 22:26:03 +00:00
Salad Dais
a7f40b0d15 Properly handle inventory metadata field 2023-12-20 03:23:03 +00:00
Salad Dais
e6ac99458f v0.14.0 2023-12-20 01:38:31 +00:00
Salad Dais
92cadf26e9 Support inventory cache v3 2023-12-20 01:21:54 +00:00
Salad Dais
305038a31d Add HippoClient.main_caps_client convenience property 2023-12-20 00:58:12 +00:00
Salad Dais
bd67d6f19f Split out RLV handling 2023-12-20 00:49:16 +00:00
Salad Dais
81eae4edbf Make default log level less insane 2023-12-19 18:43:08 +00:00
Salad Dais
776ef71574 Fix participant removal on session close 2023-12-19 18:41:46 +00:00
Salad Dais
31125ca489 Defer returning from join_session() until we're a participant 2023-12-19 06:38:35 +00:00
Salad Dais
29ab108764 Store capture and render device info for voice 2023-12-19 05:30:21 +00:00
Salad Dais
61820f1670 Better handling of client start locations 2023-12-19 04:24:47 +00:00
Salad Dais
7fafb8b5ae message_handler -> event_handler 2023-12-19 01:31:49 +00:00
Salad Dais
28e84c0c5a Clean up session joining code 2023-12-18 23:32:57 +00:00
Salad Dais
e629214bef Switch voice stuff to use MessageHandler for events 2023-12-18 23:18:25 +00:00
Salad Dais
5e9433b4a4 3d_position -> 3d_pos 2023-12-18 21:34:39 +00:00
Salad Dais
5f2082c6e9 Minor cleanup of asyncio usage 2023-12-18 21:32:25 +00:00
Salad Dais
12c0deadee Add tests for setting voice region pos 2023-12-18 21:16:35 +00:00
Salad Dais
6da766ef22 Add test for joining voice session 2023-12-18 20:11:21 +00:00
Salad Dais
f278a4bfcf Use asyncio.Event when events should be re-awaitable 2023-12-18 18:34:14 +00:00
Salad Dais
631fe91049 Correct coveragerc exclude_lines 2023-12-18 07:27:35 +00:00
Salad Dais
159f39227a Add more voice client tests 2023-12-18 07:08:37 +00:00
Salad Dais
670acef0b4 Add tests for voice connector setup 2023-12-18 06:10:51 +00:00
Salad Dais
1165769aca Start writing voice client tests 2023-12-18 05:34:33 +00:00
Salad Dais
613dd32a40 Add tests for voice stuff 2023-12-18 03:29:40 +00:00
Salad Dais
d7a88f904e Add voice-related tooling 2023-12-18 02:02:39 +00:00
Salad Dais
a8344a231b Make hippolyzer events awaitable 2023-12-17 23:37:10 +00:00
Salad Dais
11043e365a On second thought, don't handle EnableSimulator at all 2023-12-16 21:51:56 +00:00
Salad Dais
ad34ba78ea Handle EnableSimulator correctly in client 2023-12-16 20:53:38 +00:00
Salad Dais
f9b4ae1308 Get rid of decorator so we don't mess up type signature 2023-12-16 20:34:10 +00:00
Salad Dais
7fee8f6bfe Fix Python 3.8 2023-12-16 20:08:09 +00:00
Salad Dais
2e0ca3649c Use Future instead of Event for connected signal 2023-12-16 17:29:35 +00:00
Salad Dais
e0d44741e9 Better teleport request handling 2023-12-16 04:44:49 +00:00
Salad Dais
008d59c7d6 Fix Python 3.8 2023-12-15 21:34:45 +00:00
Salad Dais
ed03b0d49f Add a teleport method to client 2023-12-15 21:32:45 +00:00
Salad Dais
4cc1513e58 Correct type signatures in MessageHandler 2023-12-15 19:07:17 +00:00
Salad Dais
c768aeaf40 Be smarter about clearing out ObjectManagers 2023-12-15 17:18:35 +00:00
Salad Dais
42ebb0e915 Fix multi-region connections 2023-12-15 17:08:00 +00:00
Salad Dais
31ba9635eb WIP multi-region support for client 2023-12-15 00:55:14 +00:00
Salad Dais
dc58512ee6 Better handle sim disconnects in client 2023-12-14 23:22:32 +00:00
Salad Dais
4a58731441 Make client circuits easier to work with 2023-12-14 12:33:23 +00:00
Salad Dais
c2b92d2d7d Add test for non-templated EQ events 2023-12-14 10:10:41 +00:00
Salad Dais
640b384d27 Add tests for resend suppression 2023-12-14 09:31:19 +00:00
Salad Dais
a2ef3d9f8e More client refactoring 2023-12-14 09:14:07 +00:00
Salad Dais
0456b4b62d Make main region caps less annoying to work with 2023-12-14 02:19:11 +00:00
Salad Dais
92c9c82e73 Move some things from session to region 2023-12-14 02:08:12 +00:00
Salad Dais
c5ed1cff24 Handle non-templated EQ events in client 2023-12-14 01:23:57 +00:00
Salad Dais
0710735546 Make client handle ping checks 2023-12-13 22:01:34 +00:00
Salad Dais
7869df224e Simplify chat client example 2023-12-13 20:42:21 +00:00
Salad Dais
6f6274ec7d Add client example 2023-12-13 19:19:14 +00:00
Salad Dais
40da130066 Update docs related to client 2023-12-13 17:57:48 +00:00
Salad Dais
5947d52c8d Add inventory manager to client 2023-12-13 17:52:03 +00:00
Salad Dais
e4b73a7196 Don't take by default in client messagehandlers 2023-12-13 04:18:49 +00:00
Salad Dais
1ded1180dc Clean up client tests 2023-12-13 04:10:43 +00:00
Salad Dais
5517d60e7a Use correct user-agent for hippolyzer client 2023-12-12 22:20:39 +00:00
Salad Dais
ed7e42625e Add Hippolyzer proxy support to client 2023-12-12 22:15:28 +00:00
Salad Dais
d5cde896fb Add tests for client EQ handling 2023-12-12 21:47:34 +00:00
Salad Dais
007c79f4a7 Add basic EQ handling to client 2023-12-12 21:17:47 +00:00
Salad Dais
f1b523b5de Support client seed cap, support async message handlers 2023-12-11 21:47:15 +00:00
Salad Dais
c42e0d7291 Make client login testable 2023-12-11 19:08:01 +00:00
Salad Dais
1ee1b9acc6 Basic working client 2023-12-10 23:55:19 +00:00
Salad Dais
9904633a99 More client work 2023-12-10 23:26:28 +00:00
Salad Dais
c8791db75e Start adding client-related lib files 2023-12-10 19:52:24 +00:00
Salad Dais
21d1c7ebfe v0.13.4 2023-12-07 18:47:43 +00:00
Salad Dais
996a43be5b Add option to allow insecure upstream SSL connections 2023-12-07 18:44:10 +00:00
Salad Dais
9e8127e577 Don't use asyncio.get_running_loop() 2023-12-06 20:35:55 +00:00
Salad Dais
cfcd324a11 Pin to Werkzeug under 3.0 2023-12-06 20:35:39 +00:00
Salad Dais
6872634bf4 Be more resilient when faced with no cap_data 2023-12-06 20:35:18 +00:00
Salad Dais
091090c6fd Reparent avatars correctly when recalculating linksets 2023-12-03 23:51:11 +00:00
Salad Dais
bd4fff4200 Add support for PBR / reflection probes 2023-12-03 23:50:32 +00:00
Salad Dais
52dfd0be05 v0.13.3 2023-10-10 23:23:57 +00:00
Salad Dais
60f1737115 Appease new flake8 rules 2023-10-10 23:20:43 +00:00
Salad Dais
7a5d6baf02 Make failing to load invcache non-fatal 2023-10-10 23:15:15 +00:00
Salad Dais
44a332a77b Handle failing to load an addon correctly 2023-10-10 23:14:59 +00:00
73 changed files with 3920 additions and 576 deletions

View File

@@ -9,4 +9,4 @@ exclude_lines =
def __repr__
raise AssertionError
assert False
pass
^\s*pass\b

View File

@@ -1,6 +1,12 @@
name: Run Python Tests
on: [push, pull_request]
on:
push:
paths-ignore:
- '*.md'
pull_request:
paths-ignore:
- '*.md'
jobs:
build:
@@ -8,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.8", "3.11"]
python-version: ["3.9", "3.11"]
steps:
- uses: actions/checkout@v2
@@ -20,10 +26,11 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade pip wheel
pip install -r requirements.txt
pip install -r requirements-test.txt
sudo apt-get install libopenjp2-7
pip install -e .
- name: Run Flake8
run: |
flake8 .

View File

@@ -325,7 +325,7 @@ The REPL is fully async aware and allows awaiting events without blocking:
```python
>>> from hippolyzer.lib.client.object_manager import ObjectUpdateType
>>> evt = await session.objects.events.wait_for((ObjectUpdateType.OBJECT_UPDATE,), timeout=2.0)
>>> evt = await session.objects.events.wait_for((ObjectUpdateType.UPDATE,), timeout=2.0)
>>> evt.updated
{'Position'}
```
@@ -402,9 +402,13 @@ above is your only option.
### Should I use this library to make an SL client in Python?
No. If you just want to write a client in Python, you should instead look at using
Probably not. If you just want to write a client in Python, you should instead look at using
[libremetaverse](https://github.com/cinderblocks/libremetaverse/) via pythonnet.
I removed the client-related code inherited from PyOGP because libremetaverse's was simply better.
I removed the client-related code inherited from PyOGP because libremetaverse's was simply better
for general use.
<https://github.com/CasperTech/node-metaverse/> also looks like a good, modern wrapper if you
prefer TypeScript.
There is, however, a very low-level `HippoClient` class provided for testing, but it's unlikely
to be what you want for writing a general-purpose bot.

View File

@@ -152,7 +152,7 @@ class DeformerAddon(BaseAddon):
local_anim.LocalAnimAddon.apply_local_anim(session, region, "deformer_addon", anim_data)
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
cmd: str, options: List[str], param: str):
behaviour: str, options: List[str], param: str):
# An object in-world can also tell the client how to deform itself via
# RLV-style commands.
@@ -160,9 +160,9 @@ class DeformerAddon(BaseAddon):
if param != "force":
return
if cmd == "stop_deforming":
if behaviour == "stop_deforming":
self.deform_joints.clear()
elif cmd == "deform_joints":
elif behaviour == "deform_joints":
self.deform_joints.clear()
for joint_data in options:
joint_split = joint_data.split("|")

View File

@@ -17,7 +17,7 @@ from hippolyzer.lib.base import llsd
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.inventory import InventoryModel, InventoryObject
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.templates import XferFilePath
from hippolyzer.lib.base.templates import XferFilePath, AssetType
from hippolyzer.lib.proxy import addon_ctx
from hippolyzer.lib.proxy.webapp_cap_addon import WebAppCapAddon
@@ -64,7 +64,7 @@ async def get_task_inventory():
InventoryObject(
name="Contents",
parent_id=UUID.ZERO,
type="category",
type=AssetType.CATEGORY,
obj_id=obj_id
).to_llsd()
],

View File

@@ -114,15 +114,15 @@ class LocalAnimAddon(BaseAddon):
await asyncio.sleep(1.0)
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
cmd: str, options: List[str], param: str):
behaviour: str, options: List[str], param: str):
# We only handle commands
if param != "force":
return
if cmd == "stop_local_anim":
if behaviour == "stop_local_anim":
self.apply_local_anim(session, region, options[0], new_data=None)
return True
elif cmd == "start_local_anim":
elif behaviour == "start_local_anim":
self.apply_local_anim_from_file(session, region, options[0])
return True

View File

@@ -72,14 +72,13 @@ class PixelArtistAddon(BaseAddon):
# Watch for any newly created prims, this is basically what the viewer does to find
# prims that it just created with the build tool.
with session.objects.events.subscribe_async(
(ObjectUpdateType.OBJECT_UPDATE,),
(ObjectUpdateType.UPDATE,),
predicate=lambda e: e.object.UpdateFlags & JUST_CREATED_FLAGS and "LocalID" in e.updated
) as get_events:
# Create a pool of prims to use for building the pixel art
for _ in range(needed_prims):
# TODO: We don't track the land group or user's active group, so
# "anyone can build" must be on for rezzing to work.
group_id = UUID()
# TODO: Can't get land group atm, just tries to rez with the user's active group
group_id = session.active_group
region.circuit.send(Message(
'ObjectAdd',
Block('AgentData', AgentID=session.agent_id, SessionID=session.id, GroupID=group_id),

View File

@@ -0,0 +1,53 @@
"""
A simple client that just says hello to people
"""
import asyncio
import pprint
from contextlib import aclosing
import os
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.templates import ChatType, ChatSourceType
from hippolyzer.lib.client.hippo_client import HippoClient
async def amain():
client = HippoClient()
async def _respond_to_chat(message: Message):
if message["ChatData"]["SourceID"] == client.session.agent_id:
return
if message["ChatData"]["SourceType"] != ChatSourceType.AGENT:
return
if "hello" not in message["ChatData"]["Message"].lower():
return
await client.send_chat(f'Hello {message["ChatData"]["FromName"]}!', chat_type=ChatType.SHOUT)
async with aclosing(client):
await client.login(
username=os.environ["HIPPO_USERNAME"],
password=os.environ["HIPPO_PASSWORD"],
start_location=os.environ.get("HIPPO_START_LOCATION", "last"),
)
print("I'm here")
# Wait until we have details about parcels and print them
await client.main_region.parcel_manager.parcels_downloaded.wait()
pprint.pprint(client.main_region.parcel_manager.parcels)
await client.send_chat("Hello World!", chat_type=ChatType.SHOUT)
client.session.message_handler.subscribe("ChatFromSimulator", _respond_to_chat)
# Example of how to work with caps
async with client.main_caps_client.get("SimulatorFeatures") as features_resp:
print("Features:", await features_resp.read_llsd())
while True:
try:
await asyncio.sleep(0.001)
except (KeyboardInterrupt, asyncio.CancelledError):
await client.send_chat("Goodbye World!", chat_type=ChatType.SHOUT)
return
if __name__ == "__main__":
asyncio.run(amain())

View File

@@ -16,7 +16,7 @@ from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.addon_utils import BaseAddon
from hippolyzer.lib.proxy.ca_utils import setup_ca
from hippolyzer.lib.proxy.commands import handle_command
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, create_proxy_master, HTTPFlowContext
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, HTTPFlowContext
from hippolyzer.lib.proxy.http_event_manager import MITMProxyEventManager
from hippolyzer.lib.proxy.lludp_proxy import SLSOCKS5Server
from hippolyzer.lib.base.message.message import Message
@@ -77,6 +77,15 @@ class SelectionManagerAddon(BaseAddon):
selected.task_item = parsed["item-id"]
class AgentUpdaterAddon(BaseAddon):
def handle_eq_event(self, session: Session, region: ProxiedRegion, event: dict):
if event['message'] != 'AgentGroupDataUpdate':
return
session.groups.clear()
for group in event['body']['GroupData']:
session.groups.add(group['GroupID'])
class REPLAddon(BaseAddon):
@handle_command()
async def spawn_repl(self, session: Session, region: ProxiedRegion):
@@ -85,12 +94,12 @@ class REPLAddon(BaseAddon):
AddonManager.spawn_repl()
def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowContext):
def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowContext, ssl_insecure=False):
mitm_loop = asyncio.new_event_loop()
asyncio.set_event_loop(mitm_loop)
async def mitmproxy_loop():
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context, ssl_insecure=ssl_insecure)
gc.freeze()
await mitmproxy_master.run()
@@ -98,11 +107,12 @@ def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowCo
def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] = None,
extra_addon_paths: Optional[list] = None, proxy_host=None):
extra_addon_paths: Optional[list] = None, proxy_host=None, ssl_insecure=False):
extra_addons = extra_addons or []
extra_addon_paths = extra_addon_paths or []
extra_addons.append(SelectionManagerAddon())
extra_addons.append(REPLAddon())
extra_addons.append(AgentUpdaterAddon())
root_log = logging.getLogger()
root_log.addHandler(logging.StreamHandler())
@@ -123,17 +133,13 @@ def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] =
# TODO: argparse
if len(sys.argv) == 3:
if sys.argv[1] == "--setup-ca":
try:
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
except mitmproxy.exceptions.MitmproxyException:
# Proxy already running, create the master so we don't try to bind to a port
mitmproxy_master = create_proxy_master(proxy_host, http_proxy_port, flow_context)
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
setup_ca(sys.argv[2], mitmproxy_master)
return sys.exit(0)
http_proc = multiprocessing.Process(
target=run_http_proxy_process,
args=(proxy_host, http_proxy_port, flow_context),
args=(proxy_host, http_proxy_port, flow_context, ssl_insecure),
daemon=True,
)
http_proc.start()

View File

@@ -39,10 +39,11 @@ from hippolyzer.lib.base.settings import SettingDescriptor
from hippolyzer.lib.base.ui_helpers import loadUi
import hippolyzer.lib.base.serialization as se
from hippolyzer.lib.base.network.transport import Direction, SocketUDPTransport
from hippolyzer.lib.client.state import BaseClientSessionManager
from hippolyzer.lib.proxy.addons import BaseInteractionManager, AddonManager
from hippolyzer.lib.proxy.ca_utils import setup_ca_everywhere
from hippolyzer.lib.proxy.caps_client import ProxyCapsClient
from hippolyzer.lib.proxy.http_proxy import create_proxy_master, HTTPFlowContext
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, HTTPFlowContext
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry, AbstractMessageLogEntry, WrappingMessageLogger, \
import_log_entries, export_log_entries
from hippolyzer.lib.proxy.region import ProxiedRegion
@@ -71,6 +72,7 @@ class GUISessionManager(SessionManager, QtCore.QObject):
regionRemoved = QtCore.Signal(ProxiedRegion)
def __init__(self, settings):
BaseClientSessionManager.__init__(self)
SessionManager.__init__(self, settings)
QtCore.QObject.__init__(self)
self.all_regions = []
@@ -232,7 +234,7 @@ class MessageLogWindow(QtWidgets.QMainWindow):
"ParcelDwellReply ParcelAccessListReply AttachedSoundGainChange " \
"ParcelPropertiesRequest ParcelProperties GetObjectCost GetObjectPhysicsData ObjectImage " \
"ViewerAsset GetTexture SetAlwaysRun GetDisplayNames MapImageService MapItemReply " \
"AgentFOV".split(" ")
"AgentFOV GenericStreamingMessage".split(" ")
DEFAULT_FILTER = f"!({' || '.join(ignored for ignored in DEFAULT_IGNORE)})"
textRequest: QtWidgets.QTextEdit
@@ -275,9 +277,11 @@ class MessageLogWindow(QtWidgets.QMainWindow):
self.actionOpenMessageBuilder.triggered.connect(self._openMessageBuilder)
self.actionProxyRemotelyAccessible.setChecked(self.settings.REMOTELY_ACCESSIBLE)
self.actionProxySSLInsecure.setChecked(self.settings.SSL_INSECURE)
self.actionUseViewerObjectCache.setChecked(self.settings.USE_VIEWER_OBJECT_CACHE)
self.actionRequestMissingObjects.setChecked(self.settings.AUTOMATICALLY_REQUEST_MISSING_OBJECTS)
self.actionProxyRemotelyAccessible.triggered.connect(self._setProxyRemotelyAccessible)
self.actionProxySSLInsecure.triggered.connect(self._setProxySSLInsecure)
self.actionUseViewerObjectCache.triggered.connect(self._setUseViewerObjectCache)
self.actionRequestMissingObjects.triggered.connect(self._setRequestMissingObjects)
self.actionOpenNewMessageLogWindow.triggered.connect(self._openNewMessageLogWindow)
@@ -458,7 +462,7 @@ class MessageLogWindow(QtWidgets.QMainWindow):
if clicked_btn is not yes_btn:
return
master = create_proxy_master("127.0.0.1", -1, HTTPFlowContext())
master = create_http_proxy("127.0.0.1", -1, HTTPFlowContext())
dirs = setup_ca_everywhere(master)
msg = QtWidgets.QMessageBox()
@@ -474,6 +478,12 @@ class MessageLogWindow(QtWidgets.QMainWindow):
msg.setText("Remote accessibility setting changes will take effect on next run")
msg.exec()
def _setProxySSLInsecure(self, checked: bool):
self.sessionManager.settings.SSL_INSECURE = checked
msg = QtWidgets.QMessageBox()
msg.setText("SSL security setting changes will take effect on next run")
msg.exec()
def _setUseViewerObjectCache(self, checked: bool):
self.sessionManager.settings.USE_VIEWER_OBJECT_CACHE = checked
@@ -566,7 +576,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
message_names = sorted(x.name for x in self.templateDict)
for message_name in message_names:
if self.templateDict[message_name].msg_trust:
if self.templateDict[message_name].trusted:
self.comboTrusted.addItem(message_name)
else:
self.comboUntrusted.addItem(message_name)
@@ -937,6 +947,7 @@ def gui_main():
session_manager=window.sessionManager,
extra_addon_paths=window.getAddonList(),
proxy_host=http_host,
ssl_insecure=settings.SSL_INSECURE,
)

View File

@@ -245,7 +245,7 @@
<x>0</x>
<y>0</y>
<width>700</width>
<height>22</height>
<height>29</height>
</rect>
</property>
<widget class="QMenu" name="menuFile">
@@ -268,6 +268,7 @@
<addaction name="actionProxyRemotelyAccessible"/>
<addaction name="actionUseViewerObjectCache"/>
<addaction name="actionRequestMissingObjects"/>
<addaction name="actionProxySSLInsecure"/>
</widget>
<addaction name="menuFile"/>
</widget>
@@ -342,6 +343,17 @@
<string>Export Log Entries</string>
</property>
</action>
<action name="actionProxySSLInsecure">
<property name="checkable">
<bool>true</bool>
</property>
<property name="text">
<string>Allow Insecure SSL Connections</string>
</property>
<property name="toolTip">
<string>Allow invalid SSL certificates from upstream connections</string>
</property>
</action>
</widget>
<resources/>
<connections/>

View File

@@ -317,6 +317,22 @@ class JankStringyBytes(bytes):
return item in str(self)
return item in bytes(self)
def __add__(self, other):
if isinstance(other, bytes):
return bytes(self) + other
return str(self) + other
def __radd__(self, other):
if isinstance(other, bytes):
return other + bytes(self)
return other + str(self)
def lower(self):
return str(self).lower()
def upper(self):
return str(self).upper()
class RawBytes(bytes):
__slots__ = ()

View File

@@ -18,7 +18,7 @@ You should have received a copy of the GNU Lesser General Public License
along with this program; if not, write to the Free Software Foundation,
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import asyncio
from logging import getLogger
logger = getLogger('utilities.events')
@@ -54,13 +54,22 @@ class Event:
def notify(self, args):
for handler in self.subscribers[:]:
instance, inner_args, kwargs, one_shot, predicate = handler
handler, inner_args, kwargs, one_shot, predicate = handler
if predicate and not predicate(args):
continue
if one_shot:
self.unsubscribe(instance, *inner_args, **kwargs)
if instance(args, *inner_args, **kwargs) and not one_shot:
self.unsubscribe(instance, *inner_args, **kwargs)
self.unsubscribe(handler, *inner_args, **kwargs)
if asyncio.iscoroutinefunction(handler):
# Note that unsubscription may be delayed due to asyncio scheduling :)
async def _run_handler_wrapper():
unsubscribe = await handler(args, *inner_args, **kwargs)
if unsubscribe:
_ = self.unsubscribe(handler, *inner_args, **kwargs)
asyncio.create_task(_run_handler_wrapper())
else:
if handler(args, *inner_args, **kwargs) and not one_shot:
self.unsubscribe(handler, *inner_args, **kwargs)
def __len__(self):
return len(self.subscribers)

View File

@@ -132,6 +132,13 @@ def proxify(obj: Union[Callable[[], _T], weakref.ReferenceType, _T]) -> _T:
return obj
class BiDiDict(Generic[_T]):
"""Dictionary for bidirectional lookups"""
def __init__(self, values: Dict[_T, _T]):
self.forward = {**values}
self.backward = {value: key for (key, value) in values.items()}
def bytes_unescape(val: bytes) -> bytes:
# Only in CPython. bytes -> bytes with escape decoding.
# https://stackoverflow.com/a/23151714

View File

@@ -3,13 +3,20 @@ Parse the horrible legacy inventory-related format.
It's typically only used for object contents now.
"""
# TODO: Maybe handle CRC calculation? Does anything care about that?
# I don't think anything in the viewer actually looks at the result
# of the CRC check for UDP stuff.
from __future__ import annotations
import abc
import dataclasses
import datetime as dt
import inspect
import logging
import secrets
import struct
import typing
import weakref
from io import StringIO
from typing import *
@@ -22,12 +29,15 @@ from hippolyzer.lib.base.legacy_schema import (
SchemaFieldSerializer,
SchemaHexInt,
SchemaInt,
SchemaLLSD,
SchemaMultilineStr,
SchemaParsingError,
SchemaStr,
SchemaUUID,
schema_field,
)
from hippolyzer.lib.base.message.message import Block
from hippolyzer.lib.base.templates import SaleType, InventoryType, LookupIntEnum, AssetType, FolderType
MAGIC_ID = UUID("3c115e51-04f4-523c-9fa6-98aff1034730")
LOG = logging.getLogger(__name__)
@@ -37,12 +47,42 @@ _T = TypeVar("_T")
class SchemaFlagField(SchemaHexInt):
"""Like a hex int, but must be serialized as bytes in LLSD due to being a U32"""
@classmethod
def from_llsd(cls, val: Any) -> int:
return struct.unpack("!I", val)[0]
def from_llsd(cls, val: Any, flavor: str) -> int:
# Sometimes values in S32 range will just come through normally
if isinstance(val, int):
return val
if flavor == "legacy":
return struct.unpack("!I", val)[0]
return val
@classmethod
def to_llsd(cls, val: int) -> Any:
return struct.pack("!I", val)
def to_llsd(cls, val: int, flavor: str) -> Any:
if flavor == "legacy":
return struct.pack("!I", val)
return val
class SchemaEnumField(SchemaStr, Generic[_T]):
def __init__(self, enum_cls: Type[LookupIntEnum]):
super().__init__()
self._enum_cls = enum_cls
def deserialize(self, val: str) -> _T:
return self._enum_cls.from_lookup_name(val)
def serialize(self, val: _T) -> str:
return self._enum_cls(val).to_lookup_name()
def from_llsd(self, val: Union[str, int], flavor: str) -> _T:
if flavor == "legacy":
return self.deserialize(val)
return self._enum_cls(val)
def to_llsd(self, val: _T, flavor: str) -> Union[int, str]:
if flavor == "legacy":
return self.serialize(val)
return int(val)
def _yield_schema_tokens(reader: StringIO):
@@ -98,10 +138,14 @@ class InventoryBase(SchemaBase):
if not spec:
LOG.warning(f"Internal key {key!r}")
continue
spec_cls = spec
if not inspect.isclass(spec_cls):
spec_cls = spec_cls.__class__
# some kind of nested structure like sale_info
if issubclass(spec, SchemaBase):
if issubclass(spec_cls, SchemaBase):
obj_dict[key] = spec.from_reader(reader)
elif issubclass(spec, SchemaFieldSerializer):
elif issubclass(spec_cls, SchemaFieldSerializer):
obj_dict[key] = spec.deserialize(val)
else:
raise ValueError(f"Unsupported spec for {key!r}, {spec!r}")
@@ -110,9 +154,21 @@ class InventoryBase(SchemaBase):
return cls._obj_from_dict(obj_dict)
def to_writer(self, writer: StringIO):
writer.write(f"\t{self.SCHEMA_NAME}\t0\n")
writer.write(f"\t{self.SCHEMA_NAME}")
if self.SCHEMA_NAME == "permissions":
writer.write(" 0\n")
else:
writer.write("\t0\n")
writer.write("\t{\n")
for field_name, field in self._get_fields_dict().items():
# Make sure the ID field always comes first, if there is one.
fields_dict = {}
if hasattr(self, "ID_ATTR"):
fields_dict = {getattr(self, "ID_ATTR"): None}
# update()ing will put all fields that aren't yet in the dict after the ID attr.
fields_dict.update(self._get_fields_dict())
for field_name, field in fields_dict.items():
spec = field.metadata.get("spec")
# Not meant to be serialized
if not spec:
@@ -121,20 +177,23 @@ class InventoryBase(SchemaBase):
continue
val = getattr(self, field_name)
if val is None:
if val is None and not field.metadata.get("include_none"):
continue
spec_cls = spec
if not inspect.isclass(spec_cls):
spec_cls = spec_cls.__class__
# Some kind of nested structure like sale_info
if isinstance(val, SchemaBase):
val.to_writer(writer)
elif issubclass(spec, SchemaFieldSerializer):
elif issubclass(spec_cls, SchemaFieldSerializer):
writer.write(f"\t\t{field_name}\t{spec.serialize(val)}\n")
else:
raise ValueError(f"Bad inventory spec {spec!r}")
writer.write("\t}\n")
class InventoryDifferences(typing.NamedTuple):
class InventoryDifferences(NamedTuple):
changed: List[InventoryNodeBase]
removed: List[InventoryNodeBase]
@@ -165,12 +224,12 @@ class InventoryModel(InventoryBase):
return model
@classmethod
def from_llsd(cls, llsd_val: List[Dict]) -> InventoryModel:
def from_llsd(cls, llsd_val: List[Dict], flavor: str = "legacy") -> InventoryModel:
model = cls()
for obj_dict in llsd_val:
for inv_type in INVENTORY_TYPES:
if inv_type.ID_ATTR in obj_dict:
if (obj := inv_type.from_llsd(obj_dict)) is not None:
if (obj := inv_type.from_llsd(obj_dict, flavor)) is not None:
model.add(obj)
break
LOG.warning(f"Unknown object type {obj_dict!r}")
@@ -202,8 +261,8 @@ class InventoryModel(InventoryBase):
for node in self.ordered_nodes:
node.to_writer(writer)
def to_llsd(self):
return list(node.to_llsd() for node in self.ordered_nodes)
def to_llsd(self, flavor: str = "legacy"):
return list(node.to_llsd(flavor) for node in self.ordered_nodes)
def add(self, node: InventoryNodeBase):
if node.node_id in self.nodes:
@@ -279,24 +338,31 @@ class InventoryPermissions(InventoryBase):
group_id: UUID = schema_field(SchemaUUID)
# Nothing actually cares about this, but it could be there.
# It's kind of redundant since it just means owner_id == NULL_KEY && group_id != NULL_KEY.
is_owner_group: int = schema_field(SchemaInt, default=0, llsd_only=True)
is_owner_group: Optional[int] = schema_field(SchemaInt, default=None, llsd_only=True)
@dataclasses.dataclass
class InventorySaleInfo(InventoryBase):
SCHEMA_NAME: ClassVar[str] = "sale_info"
sale_type: str = schema_field(SchemaStr)
sale_type: SaleType = schema_field(SchemaEnumField(SaleType))
sale_price: int = schema_field(SchemaInt)
@dataclasses.dataclass
class InventoryNodeBase(InventoryBase):
ID_ATTR: ClassVar[str]
class _HasName(abc.ABC):
"""
Only exists so that we can assert that all subclasses should have this without forcing
a particular serialization order, as would happen if this was present on InventoryNodeBase.
"""
name: str
@dataclasses.dataclass
class InventoryNodeBase(InventoryBase, _HasName):
ID_ATTR: ClassVar[str]
parent_id: Optional[UUID] = schema_field(SchemaUUID)
model: Optional[InventoryModel] = dataclasses.field(
default=None, init=False, hash=False, compare=False, repr=False
)
@@ -337,8 +403,7 @@ class InventoryNodeBase(InventoryBase):
@dataclasses.dataclass
class InventoryContainerBase(InventoryNodeBase):
type: str = schema_field(SchemaStr)
name: str = schema_field(SchemaMultilineStr)
type: AssetType = schema_field(SchemaEnumField(AssetType))
@property
def children(self) -> Sequence[InventoryNodeBase]:
@@ -367,8 +432,8 @@ class InventoryContainerBase(InventoryNodeBase):
name=name,
cat_id=UUID.random(),
parent_id=self.node_id,
type="category",
pref_type="-1",
type=AssetType.CATEGORY,
pref_type=FolderType.NONE,
owner_id=getattr(self, 'owner_id', UUID.ZERO),
version=1,
)
@@ -385,6 +450,8 @@ class InventoryObject(InventoryContainerBase):
ID_ATTR: ClassVar[str] = "obj_id"
obj_id: UUID = schema_field(SchemaUUID)
name: str = schema_field(SchemaMultilineStr)
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=True)
__hash__ = InventoryNodeBase.__hash__
@@ -396,9 +463,43 @@ class InventoryCategory(InventoryContainerBase):
VERSION_NONE: ClassVar[int] = -1
cat_id: UUID = schema_field(SchemaUUID)
pref_type: str = schema_field(SchemaStr, llsd_name="preferred_type")
owner_id: UUID = schema_field(SchemaUUID)
version: int = schema_field(SchemaInt)
pref_type: FolderType = schema_field(SchemaEnumField(FolderType), llsd_name="preferred_type")
name: str = schema_field(SchemaMultilineStr)
owner_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
version: int = schema_field(SchemaInt, default=VERSION_NONE, llsd_only=True)
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=False)
def to_folder_data(self) -> Block:
return Block(
"FolderData",
FolderID=self.cat_id,
ParentID=self.parent_id,
CallbackID=0,
Type=self.pref_type,
Name=self.name,
)
@classmethod
def from_folder_data(cls, block: Block):
return cls(
cat_id=block["FolderID"],
parent_id=block["ParentID"],
pref_type=block["Type"],
name=block["Name"],
type=AssetType.CATEGORY,
)
@classmethod
def _get_fields_dict(cls, llsd_flavor: Optional[str] = None):
fields = super()._get_fields_dict(llsd_flavor)
if llsd_flavor == "ais":
# AIS is smart enough to know that all categories are asset type category...
fields.pop("type")
# These have different names though
fields["type_default"] = fields.pop("preferred_type")
fields["agent_id"] = fields.pop("owner_id")
fields["category_id"] = fields.pop("cat_id")
return fields
__hash__ = InventoryNodeBase.__hash__
@@ -409,16 +510,17 @@ class InventoryItem(InventoryNodeBase):
ID_ATTR: ClassVar[str] = "item_id"
item_id: UUID = schema_field(SchemaUUID)
type: str = schema_field(SchemaStr)
inv_type: str = schema_field(SchemaStr)
flags: int = schema_field(SchemaFlagField)
name: str = schema_field(SchemaMultilineStr)
desc: str = schema_field(SchemaMultilineStr)
creation_date: dt.datetime = schema_field(SchemaDate, llsd_name="created_at")
permissions: InventoryPermissions = schema_field(InventoryPermissions)
sale_info: InventorySaleInfo = schema_field(InventorySaleInfo)
asset_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
shadow_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
type: Optional[AssetType] = schema_field(SchemaEnumField(AssetType), default=None)
inv_type: Optional[InventoryType] = schema_field(SchemaEnumField(InventoryType), default=None)
flags: Optional[int] = schema_field(SchemaFlagField, default=None)
sale_info: Optional[InventorySaleInfo] = schema_field(InventorySaleInfo, default=None)
name: Optional[str] = schema_field(SchemaMultilineStr, default=None)
desc: Optional[str] = schema_field(SchemaMultilineStr, default=None)
metadata: Optional[Dict[str, Any]] = schema_field(SchemaLLSD, default=None, include_none=True)
creation_date: Optional[dt.datetime] = schema_field(SchemaDate, llsd_name="created_at", default=None)
__hash__ = InventoryNodeBase.__hash__
@@ -428,5 +530,68 @@ class InventoryItem(InventoryNodeBase):
return self.asset_id
return self.shadow_id ^ MAGIC_ID
def to_inventory_data(self) -> Block:
return Block(
"InventoryData",
ItemID=self.item_id,
FolderID=self.parent_id,
CallbackID=0,
CreatorID=self.permissions.creator_id,
OwnerID=self.permissions.owner_id,
GroupID=self.permissions.group_id,
BaseMask=self.permissions.base_mask,
OwnerMask=self.permissions.owner_mask,
GroupMask=self.permissions.group_mask,
EveryoneMask=self.permissions.everyone_mask,
NextOwnerMask=self.permissions.next_owner_mask,
GroupOwned=self.permissions.owner_id == UUID.ZERO and self.permissions.group_id != UUID.ZERO,
AssetID=self.true_asset_id,
Type=self.type,
InvType=self.inv_type,
Flags=self.flags,
SaleType=self.sale_info.sale_type,
SalePrice=self.sale_info.sale_price,
Name=self.name,
Description=self.desc,
CreationDate=SchemaDate.to_llsd(self.creation_date, "legacy"),
# Meaningless here
CRC=secrets.randbits(32),
)
@classmethod
def from_inventory_data(cls, block: Block):
return cls(
item_id=block["ItemID"],
parent_id=block["ParentID"],
permissions=InventoryPermissions(
creator_id=block["CreatorID"],
owner_id=block["OwnerID"],
group_id=block["GroupID"],
base_mask=block["BaseMask"],
owner_mask=block["OwnerMask"],
group_mask=block["GroupMask"],
everyone_mask=block["EveryoneMask"],
next_owner_mask=block["NextOwnerMask"],
),
asset_id=block["AssetID"],
type=AssetType(block["Type"]),
inv_type=InventoryType(block["InvType"]),
flags=block["Flags"],
sale_info=InventorySaleInfo(
sale_type=SaleType(block["SaleType"]),
sale_price=block["SalePrice"],
),
name=block["Name"],
desc=block["Description"],
creation_date=block["CreationDate"],
)
def to_llsd(self, flavor: str = "legacy"):
val = super().to_llsd(flavor=flavor)
if flavor == "ais":
# There's little chance this differs from owner ID, just place it.
val["agent_id"] = val["permissions"]["owner_id"]
return val
INVENTORY_TYPES: Tuple[Type[InventoryNodeBase], ...] = (InventoryCategory, InventoryObject, InventoryItem)

View File

@@ -9,11 +9,14 @@ import abc
import calendar
import dataclasses
import datetime as dt
import inspect
import logging
import re
from io import StringIO
from typing import *
import hippolyzer.lib.base.llsd as llsd
from hippolyzer.lib.base.datatypes import UUID
LOG = logging.getLogger(__name__)
@@ -32,11 +35,11 @@ class SchemaFieldSerializer(abc.ABC, Generic[_T]):
pass
@classmethod
def from_llsd(cls, val: Any) -> _T:
def from_llsd(cls, val: Any, flavor: str) -> _T:
return val
@classmethod
def to_llsd(cls, val: _T) -> Any:
def to_llsd(cls, val: _T, flavor: str) -> Any:
return val
@@ -50,11 +53,11 @@ class SchemaDate(SchemaFieldSerializer[dt.datetime]):
return str(calendar.timegm(val.utctimetuple()))
@classmethod
def from_llsd(cls, val: Any) -> dt.datetime:
def from_llsd(cls, val: Any, flavor: str) -> dt.datetime:
return dt.datetime.utcfromtimestamp(val)
@classmethod
def to_llsd(cls, val: dt.datetime):
def to_llsd(cls, val: dt.datetime, flavor: str):
return calendar.timegm(val.utctimetuple())
@@ -101,6 +104,13 @@ class SchemaStr(SchemaFieldSerializer[str]):
class SchemaUUID(SchemaFieldSerializer[UUID]):
@classmethod
def from_llsd(cls, val: Any, flavor: str) -> UUID:
# FetchInventory2 will return a string, but we want a UUID. It's not an issue
# for us to return a UUID later there because it'll just cast to string if
# that's what it wants
return UUID(val)
@classmethod
def deserialize(cls, val: str) -> UUID:
return UUID(val)
@@ -110,12 +120,28 @@ class SchemaUUID(SchemaFieldSerializer[UUID]):
return str(val)
def schema_field(spec: Type[Union[SchemaBase, SchemaFieldSerializer]], *, default=dataclasses.MISSING, init=True,
repr=True, hash=None, compare=True, llsd_name=None, llsd_only=False) -> dataclasses.Field: # noqa
class SchemaLLSD(SchemaFieldSerializer[_T]):
"""Arbitrary LLSD embedded in a field"""
@classmethod
def deserialize(cls, val: str) -> _T:
return llsd.parse_xml(val.partition("|")[0].encode("utf8"))
@classmethod
def serialize(cls, val: _T) -> str:
# Don't include the XML header
return llsd.format_xml(val).split(b">", 1)[1].decode("utf8") + "\n|"
_SCHEMA_SPEC = Union[Type[Union["SchemaBase", SchemaFieldSerializer]], SchemaFieldSerializer]
def schema_field(spec: _SCHEMA_SPEC, *, default=dataclasses.MISSING, init=True,
repr=True, hash=None, compare=True, llsd_name=None, llsd_only=False,
include_none=False) -> dataclasses.Field: # noqa
"""Describe a field in the inventory schema and the shape of its value"""
return dataclasses.field( # noqa
metadata={"spec": spec, "llsd_name": llsd_name, "llsd_only": llsd_only}, default=default,
init=init, repr=repr, hash=hash, compare=compare,
metadata={"spec": spec, "llsd_name": llsd_name, "llsd_only": llsd_only, "include_none": include_none},
default=default, init=init, repr=repr, hash=hash, compare=compare,
)
@@ -138,11 +164,11 @@ def parse_schema_line(line: str):
@dataclasses.dataclass
class SchemaBase(abc.ABC):
@classmethod
def _get_fields_dict(cls, llsd=False):
def _get_fields_dict(cls, llsd_flavor: Optional[str] = None):
fields_dict = {}
for field in dataclasses.fields(cls):
field_name = field.name
if llsd:
if llsd_flavor:
field_name = field.metadata.get("llsd_name") or field_name
fields_dict[field_name] = field
return fields_dict
@@ -161,8 +187,8 @@ class SchemaBase(abc.ABC):
return cls.from_str(data.decode("utf8"))
@classmethod
def from_llsd(cls, inv_dict: Dict):
fields = cls._get_fields_dict(llsd=True)
def from_llsd(cls, inv_dict: Dict, flavor: str = "legacy"):
fields = cls._get_fields_dict(llsd_flavor=flavor)
obj_dict = {}
for key, val in inv_dict.items():
if key in fields:
@@ -173,15 +199,23 @@ class SchemaBase(abc.ABC):
if not spec:
LOG.warning(f"Internal key {key!r}")
continue
spec_cls = spec
if not inspect.isclass(spec_cls):
spec_cls = spec_cls.__class__
# some kind of nested structure like sale_info
if issubclass(spec, SchemaBase):
obj_dict[key] = spec.from_llsd(val)
elif issubclass(spec, SchemaFieldSerializer):
obj_dict[key] = spec.from_llsd(val)
if issubclass(spec_cls, SchemaBase):
obj_dict[key] = spec.from_llsd(val, flavor)
elif issubclass(spec_cls, SchemaFieldSerializer):
obj_dict[key] = spec.from_llsd(val, flavor)
else:
raise ValueError(f"Unsupported spec for {key!r}, {spec!r}")
else:
LOG.warning(f"Unknown key {key!r}")
if flavor != "ais":
# AIS has a number of different fields that are irrelevant depending on
# what exactly sent the payload
LOG.warning(f"Unknown key {key!r}")
return cls._obj_from_dict(obj_dict)
def to_bytes(self) -> bytes:
@@ -193,9 +227,9 @@ class SchemaBase(abc.ABC):
writer.seek(0)
return writer.read()
def to_llsd(self):
def to_llsd(self, flavor: str = "legacy"):
obj_dict = {}
for field_name, field in self._get_fields_dict(llsd=True).items():
for field_name, field in self._get_fields_dict(llsd_flavor=flavor).items():
spec = field.metadata.get("spec")
# Not meant to be serialized
if not spec:
@@ -205,11 +239,15 @@ class SchemaBase(abc.ABC):
if val is None:
continue
spec_cls = spec
if not inspect.isclass(spec_cls):
spec_cls = spec_cls.__class__
# Some kind of nested structure like sale_info
if isinstance(val, SchemaBase):
val = val.to_llsd()
elif issubclass(spec, SchemaFieldSerializer):
val = spec.to_llsd(val)
val = val.to_llsd(flavor)
elif issubclass(spec_cls, SchemaFieldSerializer):
val = spec.to_llsd(val, flavor)
else:
raise ValueError(f"Bad inventory spec {spec!r}")
obj_dict[field_name] = val

View File

@@ -39,11 +39,11 @@ class HippoLLSDXMLPrettyFormatter(base_llsd.serde_xml.LLSDXMLPrettyFormatter, Hi
super().__init__()
def format_pretty_xml(val: typing.Any):
def format_pretty_xml(val: typing.Any) -> bytes:
return HippoLLSDXMLPrettyFormatter().format(val)
def format_xml(val: typing.Any):
def format_xml(val: typing.Any) -> bytes:
return HippoLLSDXMLFormatter().format(val)
@@ -58,11 +58,11 @@ class HippoLLSDNotationFormatter(base_llsd.serde_notation.LLSDNotationFormatter,
return super().STRING(v).replace(b"\n", b"\\n")
def format_notation(val: typing.Any):
def format_notation(val: typing.Any) -> bytes:
return HippoLLSDNotationFormatter().format(val)
def format_binary(val: typing.Any, with_header=True):
def format_binary(val: typing.Any, with_header=True) -> bytes:
val = _format_binary_recurse(val)
if with_header:
return b'<?llsd/binary?>\n' + val

View File

@@ -6,6 +6,7 @@ import copy
import dataclasses
import datetime as dt
import logging
from collections import deque
from typing import *
from typing import Optional
@@ -25,16 +26,23 @@ class ReliableResendInfo:
class Circuit:
def __init__(self, near_host: Optional[ADDR_TUPLE], far_host: ADDR_TUPLE, transport):
def __init__(
self,
near_host: Optional[ADDR_TUPLE],
far_host: ADDR_TUPLE,
transport: Optional[AbstractUDPTransport] = None,
):
self.near_host: Optional[ADDR_TUPLE] = near_host
self.host: ADDR_TUPLE = far_host
self.is_alive = True
self.transport: Optional[AbstractUDPTransport] = transport
self.transport = transport
self.serializer = UDPMessageSerializer()
self.last_packet_at = dt.datetime.now()
self.packet_id_base = 0
self.unacked_reliable: Dict[Tuple[Direction, int], ReliableResendInfo] = {}
self.resend_every: float = 3.0
# Reliable messages that we've already seen and handled, for resend suppression
self.seen_reliable: deque[int] = deque(maxlen=1_000)
def _send_prepared_message(self, message: Message, transport=None):
try:
@@ -44,6 +52,11 @@ class Circuit:
raise
return self.send_datagram(serialized, message.direction, transport=transport)
def disconnect(self):
self.packet_id_base = 0
self.unacked_reliable.clear()
self.is_alive = False
def send_datagram(self, data: bytes, direction: Direction, transport=None):
self.last_packet_at = dt.datetime.now()
src_addr, dst_addr = self.host, self.near_host
@@ -66,6 +79,7 @@ class Circuit:
# If it was queued, it's not anymore
message.queued = False
message.finalized = True
return True
def send(self, message: Message, transport=None) -> UDPPacket:
if self.prepare_message(message):
@@ -120,6 +134,13 @@ class Circuit:
message.direction = direction
self.send(message)
def track_reliable(self, packet_id: int) -> bool:
"""Tracks a reliable packet, returning if it's a new message"""
if packet_id in self.seen_reliable:
return False
self.seen_reliable.append(packet_id)
return True
def __repr__(self):
return "<%s %r : %r>" % (self.__class__.__name__, self.near_host, self.host)

View File

@@ -5802,6 +5802,25 @@ version 2.0
}
}
// GenericStreamingMessage
// Optimized generic message for streaming arbitrary data to viewer
// Avoid payloads over 7KB (8KB ceiling)
// Method -- magic number indicating method to use to decode payload:
// 0x4175 - GLTF material override data
// Payload -- data to be decoded
{
GenericStreamingMessage High 31 Trusted Unencoded
{
MethodData Single
{ Method U16 }
}
{
DataBlock Single
{ Data Variable 2 }
}
}
// LargeGenericMessage
// Similar to the above messages, but can handle larger payloads and serialized
// LLSD. Uses HTTP transport

View File

@@ -29,7 +29,10 @@ from hippolyzer.lib.base.message.msgtypes import MsgType
PACKER = Callable[[Any], bytes]
UNPACKER = Callable[[bytes], Any]
LLSD_PACKER = Callable[[Any], Any]
LLSD_UNPACKER = Callable[[Any], Any]
SPEC = Tuple[UNPACKER, PACKER]
LLSD_SPEC = Tuple[LLSD_UNPACKER, LLSD_PACKER]
def _pack_string(pack_string):
@@ -64,6 +67,21 @@ def _make_tuplecoord_spec(typ: Type[TupleCoord], struct_fmt: str,
return lambda x: typ(*struct_obj.unpack(x)), _packer
def _make_llsd_tuplecoord_spec(typ: Type[TupleCoord], needed_elems: Optional[int] = None):
if needed_elems is None:
# Number of elems needed matches the number in the coord type
def _packer(x):
return list(x)
else:
# Special case, we only want to pack some of the components.
# Mostly for Quaternion since we don't actually need to send W.
def _packer(x):
if isinstance(x, TupleCoord):
x = x.data()
return list(x.data(needed_elems))
return lambda x: typ(*x), _packer
def _unpack_specs(cls):
cls.UNPACKERS = {k: v[0] for (k, v) in cls.SPECS.items()}
cls.PACKERS = {k: v[1] for (k, v) in cls.SPECS.items()}
@@ -110,10 +128,15 @@ class TemplateDataPacker:
class LLSDDataPacker(TemplateDataPacker):
# Some template var types aren't directly representable in LLSD, so they
# get encoded to binary fields.
SPECS = {
SPECS: Dict[MsgType, LLSD_SPEC] = {
MsgType.MVT_IP_ADDR: (socket.inet_ntoa, socket.inet_aton),
# LLSD ints are technically bound to S32 range.
MsgType.MVT_U32: _make_struct_spec('!I'),
MsgType.MVT_U64: _make_struct_spec('!Q'),
MsgType.MVT_S64: _make_struct_spec('!q'),
# These are arrays in LLSD, we need to turn them into coords.
MsgType.MVT_LLVector3: _make_llsd_tuplecoord_spec(Vector3),
MsgType.MVT_LLVector3d: _make_llsd_tuplecoord_spec(Vector3),
MsgType.MVT_LLVector4: _make_llsd_tuplecoord_spec(Vector4),
MsgType.MVT_LLQuaternion: _make_llsd_tuplecoord_spec(Quaternion, needed_elems=3)
}

View File

@@ -222,7 +222,7 @@ class Message:
def add_blocks(self, block_list):
# can have a list of blocks if it is multiple or variable
for block in block_list:
if type(block) == list:
if type(block) is list:
for bl in block:
self.add_block(bl)
else:

View File

@@ -31,7 +31,8 @@ _T = TypeVar("_T")
_K = TypeVar("_K", bound=Hashable)
MESSAGE_HANDLER = Callable[[_T], Any]
PREDICATE = Callable[[_T], bool]
MESSAGE_NAMES = Iterable[_K]
# TODO: Can't do `Iterable[Union[_K, Literal["*"]]` apparently?
MESSAGE_NAMES = Iterable[Union[_K, str]]
class MessageHandler(Generic[_T, _K]):
@@ -43,10 +44,9 @@ class MessageHandler(Generic[_T, _K]):
LOG.debug('Creating a monitor for %s' % message_name)
return self.handlers.setdefault(message_name, Event())
def subscribe(self, message_name: _K, handler: MESSAGE_HANDLER) -> Event:
def subscribe(self, message_name: Union[_K, Literal["*"]], handler: MESSAGE_HANDLER):
notifier = self.register(message_name)
notifier.subscribe(handler)
return notifier
def _subscribe_all(self, message_names: MESSAGE_NAMES, handler: MESSAGE_HANDLER,
predicate: Optional[PREDICATE] = None) -> List[Event]:
@@ -145,7 +145,7 @@ class MessageHandler(Generic[_T, _K]):
# Always try to call wildcard handlers
self._handle_type('*', message)
def _handle_type(self, name: _K, message: _T):
def _handle_type(self, name: Union[_K, Literal["*"]], message: _T):
handler = self.handlers.get(name)
if not handler:
return

View File

@@ -47,7 +47,6 @@ class MsgBlockType:
MBT_SINGLE = 0
MBT_MULTIPLE = 1
MBT_VARIABLE = 2
MBT_String_List = ['Single', 'Multiple', 'Variable']
class PacketFlags(enum.IntFlag):
@@ -55,6 +54,8 @@ class PacketFlags(enum.IntFlag):
RELIABLE = 0x40
RESENT = 0x20
ACK = 0x10
# Not a real flag, just used for display.
EQ = 1 << 10
# frequency for messages
@@ -62,28 +63,23 @@ class PacketFlags(enum.IntFlag):
# = '\xFF\xFF'
# = '\xFF'
# = ''
class MsgFrequency:
FIXED_FREQUENCY_MESSAGE = -1 # marking it
LOW_FREQUENCY_MESSAGE = 4
MEDIUM_FREQUENCY_MESSAGE = 2
HIGH_FREQUENCY_MESSAGE = 1
class MsgFrequency(enum.IntEnum):
FIXED = -1 # marking it
LOW = 4
MEDIUM = 2
HIGH = 1
class MsgTrust:
LL_NOTRUST = 0
LL_TRUSTED = 1
class MsgEncoding(enum.IntEnum):
UNENCODED = 0
ZEROCODED = 1
class MsgEncoding:
LL_UNENCODED = 0
LL_ZEROCODED = 1
class MsgDeprecation:
LL_DEPRECATED = 0
LL_UDPDEPRECATED = 1
LL_UDPBLACKLISTED = 2
LL_NOTDEPRECATED = 3
class MsgDeprecation(enum.IntEnum):
DEPRECATED = 0
UDPDEPRECATED = 1
UDPBLACKLISTED = 2
NOTDEPRECATED = 3
# message variable types

View File

@@ -21,7 +21,7 @@ Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import typing
from .msgtypes import MsgType, MsgBlockType
from .msgtypes import MsgType, MsgBlockType, MsgFrequency
from ..datatypes import UUID
@@ -105,26 +105,19 @@ class MessageTemplateBlock:
return self.variable_map[name]
class MessageTemplate(object):
frequency_strings = {-1: 'fixed', 1: 'high', 2: 'medium', 4: 'low'} # strings for printout
deprecation_strings = ["Deprecated", "UDPDeprecated", "UDPBlackListed", "NotDeprecated"] # using _as_string methods
encoding_strings = ["Unencoded", "Zerocoded"] # etc
trusted_strings = ["Trusted", "NotTrusted"] # etc LDE 24oct2008
class MessageTemplate:
def __init__(self, name):
self.blocks: typing.List[MessageTemplateBlock] = []
self.block_map: typing.Dict[str, MessageTemplateBlock] = {}
# this is the function or object that will handle this type of message
self.received_count = 0
self.name = name
self.frequency = None
self.msg_num = 0
self.msg_freq_num_bytes = None
self.msg_trust = None
self.msg_deprecation = None
self.msg_encoding = None
self.frequency: typing.Optional[MsgFrequency] = None
self.num = 0
# Frequency + msg num as bytes
self.freq_num_bytes = None
self.trusted = False
self.deprecation = None
self.encoding = None
def add_block(self, block):
self.block_map[block.name] = block
@@ -134,12 +127,6 @@ class MessageTemplate(object):
return self.block_map[name]
def get_msg_freq_num_len(self):
if self.frequency == -1:
if self.frequency == MsgFrequency.FIXED:
return 4
return self.frequency
def get_frequency_as_string(self):
return MessageTemplate.frequency_strings[self.frequency]
def get_deprecation_as_string(self):
return MessageTemplate.deprecation_strings[self.msg_deprecation]

View File

@@ -68,32 +68,32 @@ class TemplateDictionary:
# do a mapping of type to a string for easier reference
frequency_str = ''
if template.frequency == MsgFrequency.FIXED_FREQUENCY_MESSAGE:
if template.frequency == MsgFrequency.FIXED:
frequency_str = "Fixed"
elif template.frequency == MsgFrequency.LOW_FREQUENCY_MESSAGE:
elif template.frequency == MsgFrequency.LOW:
frequency_str = "Low"
elif template.frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE:
elif template.frequency == MsgFrequency.MEDIUM:
frequency_str = "Medium"
elif template.frequency == MsgFrequency.HIGH_FREQUENCY_MESSAGE:
elif template.frequency == MsgFrequency.HIGH:
frequency_str = "High"
self.message_dict[(frequency_str,
template.msg_num)] = template
template.num)] = template
def build_message_ids(self):
for template in list(self.message_templates.values()):
frequency = template.frequency
num_bytes = None
if frequency == MsgFrequency.FIXED_FREQUENCY_MESSAGE:
if frequency == MsgFrequency.FIXED:
# have to do this because Fixed messages are stored as a long in the template
num_bytes = b'\xff\xff\xff' + struct.pack("B", template.msg_num)
elif frequency == MsgFrequency.LOW_FREQUENCY_MESSAGE:
num_bytes = b'\xff\xff' + struct.pack("!H", template.msg_num)
elif frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE:
num_bytes = b'\xff' + struct.pack("B", template.msg_num)
elif frequency == MsgFrequency.HIGH_FREQUENCY_MESSAGE:
num_bytes = struct.pack("B", template.msg_num)
template.msg_freq_num_bytes = num_bytes
num_bytes = b'\xff\xff\xff' + struct.pack("B", template.num)
elif frequency == MsgFrequency.LOW:
num_bytes = b'\xff\xff' + struct.pack("!H", template.num)
elif frequency == MsgFrequency.MEDIUM:
num_bytes = b'\xff' + struct.pack("B", template.num)
elif frequency == MsgFrequency.HIGH:
num_bytes = struct.pack("B", template.num)
template.freq_num_bytes = num_bytes
def get_template_by_name(self, template_name) -> typing.Optional[MessageTemplate]:
return self.message_templates.get(template_name)

View File

@@ -22,7 +22,7 @@ import struct
import re
from . import template
from .msgtypes import MsgFrequency, MsgTrust, MsgEncoding
from .msgtypes import MsgFrequency, MsgEncoding
from .msgtypes import MsgDeprecation, MsgBlockType, MsgType
from ..exc import MessageTemplateParsingError, MessageTemplateNotFound
@@ -112,67 +112,69 @@ class MessageTemplateParser:
frequency = None
freq_str = match.group(2)
if freq_str == 'Low':
frequency = MsgFrequency.LOW_FREQUENCY_MESSAGE
frequency = MsgFrequency.LOW
elif freq_str == 'Medium':
frequency = MsgFrequency.MEDIUM_FREQUENCY_MESSAGE
frequency = MsgFrequency.MEDIUM
elif freq_str == 'High':
frequency = MsgFrequency.HIGH_FREQUENCY_MESSAGE
frequency = MsgFrequency.HIGH
elif freq_str == 'Fixed':
frequency = MsgFrequency.FIXED_FREQUENCY_MESSAGE
frequency = MsgFrequency.FIXED
new_template.frequency = frequency
msg_num = int(match.group(3), 0)
if frequency == MsgFrequency.FIXED_FREQUENCY_MESSAGE:
if frequency == MsgFrequency.FIXED:
# have to do this because Fixed messages are stored as a long in the template
msg_num &= 0xff
msg_num_bytes = struct.pack('!BBBB', 0xff, 0xff, 0xff, msg_num)
elif frequency == MsgFrequency.LOW_FREQUENCY_MESSAGE:
elif frequency == MsgFrequency.LOW:
msg_num_bytes = struct.pack('!BBH', 0xff, 0xff, msg_num)
elif frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE:
elif frequency == MsgFrequency.MEDIUM:
msg_num_bytes = struct.pack('!BB', 0xff, msg_num)
elif frequency == MsgFrequency.HIGH_FREQUENCY_MESSAGE:
elif frequency == MsgFrequency.HIGH:
msg_num_bytes = struct.pack('!B', msg_num)
else:
raise Exception("don't know about frequency %s" % frequency)
new_template.msg_num = msg_num
new_template.msg_freq_num_bytes = msg_num_bytes
new_template.num = msg_num
new_template.freq_num_bytes = msg_num_bytes
msg_trust = None
msg_trust_str = match.group(4)
if msg_trust_str == 'Trusted':
msg_trust = MsgTrust.LL_TRUSTED
msg_trust = True
elif msg_trust_str == 'NotTrusted':
msg_trust = MsgTrust.LL_NOTRUST
msg_trust = False
else:
raise ValueError(f"Invalid trust {msg_trust_str}")
new_template.msg_trust = msg_trust
new_template.trusted = msg_trust
msg_encoding = None
msg_encoding_str = match.group(5)
if msg_encoding_str == 'Unencoded':
msg_encoding = MsgEncoding.LL_UNENCODED
msg_encoding = MsgEncoding.UNENCODED
elif msg_encoding_str == 'Zerocoded':
msg_encoding = MsgEncoding.LL_ZEROCODED
msg_encoding = MsgEncoding.ZEROCODED
else:
raise ValueError(f"Invalid encoding {msg_encoding_str}")
new_template.msg_encoding = msg_encoding
new_template.encoding = msg_encoding
msg_dep = None
msg_dep_str = match.group(7)
if msg_dep_str:
if msg_dep_str == 'Deprecated':
msg_dep = MsgDeprecation.LL_DEPRECATED
msg_dep = MsgDeprecation.DEPRECATED
elif msg_dep_str == 'UDPDeprecated':
msg_dep = MsgDeprecation.LL_UDPDEPRECATED
msg_dep = MsgDeprecation.UDPDEPRECATED
elif msg_dep_str == 'UDPBlackListed':
msg_dep = MsgDeprecation.LL_UDPBLACKLISTED
msg_dep = MsgDeprecation.UDPBLACKLISTED
elif msg_dep_str == 'NotDeprecated':
msg_dep = MsgDeprecation.LL_NOTDEPRECATED
msg_dep = MsgDeprecation.NOTDEPRECATED
else:
msg_dep = MsgDeprecation.LL_NOTDEPRECATED
msg_dep = MsgDeprecation.NOTDEPRECATED
if msg_dep is None:
raise MessageTemplateParsingError("Unknown msg_dep field %s" % match.group(0))
new_template.msg_deprecation = msg_dep
new_template.deprecation = msg_dep
return new_template

View File

@@ -220,11 +220,17 @@ class UDPMessageDeserializer:
if tmpl_variable.probably_binary:
return unpacked_data
# Truncated strings need to be treated carefully
if tmpl_variable.probably_text and unpacked_data.endswith(b"\x00"):
try:
return unpacked_data.decode("utf8").rstrip("\x00")
except UnicodeDecodeError:
return JankStringyBytes(unpacked_data)
if tmpl_variable.probably_text:
# If it has a null terminator, let's try to decode it first.
# We don't want to do this if there isn't one, because that may change
# the meaning of the data.
if unpacked_data.endswith(b"\x00"):
try:
return unpacked_data.decode("utf8").rstrip("\x00")
except UnicodeDecodeError:
pass
# Failed, return jank stringy bytes
return JankStringyBytes(unpacked_data)
elif tmpl_variable.type in {MsgType.MVT_FIXED, MsgType.MVT_VARIABLE}:
# No idea if this should be bytes or a string... make an object that's sort of both.
return JankStringyBytes(unpacked_data)

View File

@@ -69,7 +69,7 @@ class UDPMessageSerializer:
# frequency and message number. The template stores it because it doesn't
# change per template.
body_writer = se.BufferWriter("<")
body_writer.write_bytes(current_template.msg_freq_num_bytes)
body_writer.write_bytes(current_template.freq_num_bytes)
body_writer.write_bytes(msg.extra)
# We're going to pop off keys as we go, so shallow copy the dict.

View File

@@ -82,8 +82,9 @@ CAPS_DICT = Union[
class CapsClient:
def __init__(self, caps: Optional[CAPS_DICT] = None):
def __init__(self, caps: Optional[CAPS_DICT] = None, session: Optional[aiohttp.ClientSession] = None) -> None:
self._caps = caps
self._session = session
def _request_fixups(self, cap_or_url: str, headers: Dict, proxy: Optional[bool], ssl: Any):
return cap_or_url, headers, proxy, ssl
@@ -117,6 +118,7 @@ class CapsClient:
session_owned = False
# Use an existing session if we have one to take advantage of connection pooling
# otherwise create one
session = session or self._session
if session is None:
session_owned = True
session = aiohttp.ClientSession(

View File

@@ -1580,8 +1580,16 @@ def bitfield_field(bits: int, *, adapter: Optional[Adapter] = None, default=0, i
class BitfieldDataclass(DataclassAdapter):
def __init__(self, data_cls: Type,
prim_spec: Optional[SerializablePrimitive] = None, shift: bool = True):
PRIM_SPEC: ClassVar[Optional[SerializablePrimitive]] = None
def __init__(self, data_cls: Optional[Type] = None,
prim_spec: Optional[SerializablePrimitive] = None, shift: Optional[bool] = None):
if not dataclasses.is_dataclass(data_cls):
raise ValueError(f"{data_cls!r} is not a dataclass")
if prim_spec is None:
prim_spec = getattr(data_cls, 'PRIM_SPEC', None)
if shift is None:
shift = getattr(data_cls, 'SHIFT', True)
super().__init__(data_cls, prim_spec)
self._shift = shift
self._bitfield_spec = self._build_bitfield(data_cls)
@@ -1917,7 +1925,7 @@ class IntEnumSubfieldSerializer(AdapterInstanceSubfieldSerializer):
val = super().deserialize(ctx_obj, val, pod=pod)
# Don't pretend we were able to deserialize this if we
# had to fall through to the `int` case.
if pod and type(val) == int:
if pod and type(val) is int:
return UNSERIALIZABLE
return val

View File

@@ -15,9 +15,40 @@ from typing import *
import hippolyzer.lib.base.serialization as se
from hippolyzer.lib.base import llsd
from hippolyzer.lib.base.datatypes import UUID, IntEnum, IntFlag, Vector3, Quaternion
from hippolyzer.lib.base.helpers import BiDiDict
from hippolyzer.lib.base.namevalue import NameValuesSerializer
class LookupIntEnum(IntEnum):
"""
Used for enums that have legacy string names, may be used in the legacy schema
Generally this is the string returned by `LLWhateverType::lookup()` in indra
"""
@abc.abstractmethod
def to_lookup_name(self) -> str:
raise NotImplementedError()
@classmethod
def from_lookup_name(cls, legacy_name: str):
raise NotImplementedError()
_ASSET_TYPE_BIDI: BiDiDict[str] = BiDiDict({
"animation": "animatn",
"callingcard": "callcard",
"lsl_text": "lsltext",
"lsl_bytecode": "lslbyte",
"texture_tga": "txtr_tga",
"image_tga": "img_tga",
"image_jpeg": "jpg",
"sound_wav": "snd_wav",
"folder_link": "link_f",
"unknown": "invalid",
"none": "-1",
})
@se.enum_field_serializer("RequestXfer", "XferID", "VFileType")
@se.enum_field_serializer("AssetUploadRequest", "AssetBlock", "Type")
@se.enum_field_serializer("AssetUploadComplete", "AssetBlock", "Type")
@@ -26,7 +57,7 @@ from hippolyzer.lib.base.namevalue import NameValuesSerializer
@se.enum_field_serializer("RezObject", "InventoryData", "Type")
@se.enum_field_serializer("RezScript", "InventoryBlock", "Type")
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "Type")
class AssetType(IntEnum):
class AssetType(LookupIntEnum):
TEXTURE = 0
SOUND = 1
CALLINGCARD = 2
@@ -47,7 +78,7 @@ class AssetType(IntEnum):
GESTURE = 21
SIMSTATE = 22
LINK = 24
LINK_FOLDER = 25
FOLDER_LINK = 25
MARKETPLACE_FOLDER = 26
WIDGET = 40
PERSON = 45
@@ -62,16 +93,14 @@ class AssetType(IntEnum):
UNKNOWN = 255
NONE = -1
@property
def human_name(self):
def to_lookup_name(self) -> str:
lower = self.name.lower()
return {
"animation": "animatn",
"callingcard": "callcard",
"texture_tga": "txtr_tga",
"image_tga": "img_tga",
"sound_wav": "snd_wav",
}.get(lower, lower)
return _ASSET_TYPE_BIDI.forward.get(lower, lower)
@classmethod
def from_lookup_name(cls, legacy_name: str):
reg_name = _ASSET_TYPE_BIDI.backward.get(legacy_name, legacy_name).upper()
return cls[reg_name]
@property
def inventory_type(self):
@@ -99,12 +128,19 @@ class AssetType(IntEnum):
}.get(self, AssetType.NONE)
_INV_TYPE_BIDI: BiDiDict[str] = BiDiDict({
"callingcard": "callcard",
"attachment": "attach",
"none": "-1",
})
@se.enum_field_serializer("UpdateCreateInventoryItem", "InventoryData", "InvType")
@se.enum_field_serializer("CreateInventoryItem", "InventoryBlock", "InvType")
@se.enum_field_serializer("RezObject", "InventoryData", "InvType")
@se.enum_field_serializer("RezScript", "InventoryBlock", "InvType")
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "InvType")
class InventoryType(IntEnum):
class InventoryType(LookupIntEnum):
TEXTURE = 0
SOUND = 1
CALLINGCARD = 2
@@ -133,16 +169,37 @@ class InventoryType(IntEnum):
UNKNOWN = 255
NONE = -1
@property
def human_name(self):
def to_lookup_name(self) -> str:
lower = self.name.lower()
return {
"callingcard": "callcard",
"none": "-1",
}.get(lower, lower)
return _INV_TYPE_BIDI.forward.get(lower, lower)
@classmethod
def from_lookup_name(cls, legacy_name: str):
reg_name = _INV_TYPE_BIDI.backward.get(legacy_name, legacy_name).upper()
return cls[reg_name]
class FolderType(IntEnum):
_FOLDER_TYPE_BIDI: BiDiDict[str] = BiDiDict({
"callingcard": "callcard",
"lsl_text": "lsltext",
"animation": "animatn",
"snapshot_category": "snapshot",
"lost_and_found": "lstndfnd",
"ensemble_start": "ensemble",
"ensemble_end": "ensemble",
"current_outfit": "current",
"my_outfits": "my_otfts",
"basic_root": "basic_rt",
"marketplace_listings": "merchant",
"marketplace_stock": "stock",
"marketplace_version": "version",
"my_suitcase": "suitcase",
"root_inventory": "root_inv",
"none": "-1",
})
class FolderType(LookupIntEnum):
TEXTURE = 0
SOUND = 1
CALLINGCARD = 2
@@ -161,6 +218,7 @@ class FolderType(IntEnum):
ANIMATION = 20
GESTURE = 21
FAVORITE = 23
# The "ensemble" values aren't used, no idea what they were for.
ENSEMBLE_START = 26
ENSEMBLE_END = 45
# This range is reserved for special clothing folder types.
@@ -177,7 +235,7 @@ class FolderType(IntEnum):
# Note: We actually *never* create folders with that type. This is used for icon override only.
MARKETPLACE_VERSION = 55
SETTINGS = 56
# Firestorm folders, may not actually exist
# Firestorm folders, may not actually exist in legacy schema
FIRESTORM = 57
PHOENIX = 58
RLV = 59
@@ -185,6 +243,15 @@ class FolderType(IntEnum):
MY_SUITCASE = 100
NONE = -1
def to_lookup_name(self) -> str:
lower = self.name.lower()
return _FOLDER_TYPE_BIDI.forward.get(lower, lower)
@classmethod
def from_lookup_name(cls, legacy_name: str):
reg_name = _FOLDER_TYPE_BIDI.backward.get(legacy_name, legacy_name).upper()
return cls[reg_name]
@se.enum_field_serializer("AgentIsNowWearing", "WearableData", "WearableType")
@se.enum_field_serializer("AgentWearablesUpdate", "WearableData", "WearableType")
@@ -244,6 +311,9 @@ class Permissions(IntFlag):
RESERVED = 1 << 31
_SALE_TYPE_LEGACY_NAMES = ("not", "orig", "copy", "cntn")
@se.enum_field_serializer("ObjectSaleInfo", "ObjectData", "SaleType")
@se.enum_field_serializer("ObjectProperties", "ObjectData", "SaleType")
@se.enum_field_serializer("ObjectPropertiesFamily", "ObjectData", "SaleType")
@@ -252,12 +322,19 @@ class Permissions(IntFlag):
@se.enum_field_serializer("RezObject", "InventoryData", "SaleType")
@se.enum_field_serializer("UpdateTaskInventory", "InventoryData", "SaleType")
@se.enum_field_serializer("UpdateCreateInventoryItem", "InventoryData", "SaleType")
class SaleInfo(IntEnum):
class SaleType(LookupIntEnum):
NOT = 0
ORIGINAL = 1
COPY = 2
CONTENTS = 3
@classmethod
def from_lookup_name(cls, legacy_name: str):
return cls(_SALE_TYPE_LEGACY_NAMES.index(legacy_name))
def to_lookup_name(self) -> str:
return _SALE_TYPE_LEGACY_NAMES[int(self.value)]
@se.flag_field_serializer("ParcelInfoReply", "Data", "Flags")
class ParcelInfoFlags(IntFlag):
@@ -276,6 +353,7 @@ class ParcelInfoFlags(IntFlag):
class MapImageFlags(IntFlag):
# No clue, honestly. I guess there's potentially different image types you could request.
LAYER = 1 << 1
RETURN_NONEXISTENT = 0x10000
@se.enum_field_serializer("MapBlockReply", "Data", "Access")
@@ -1461,6 +1539,8 @@ class ExtraParamType(IntEnum):
RESERVED = 0x50
MESH = 0x60
EXTENDED_MESH = 0x70
RENDER_MATERIAL = 0x80
REFLECTION_PROBE = 0x90
class ExtendedMeshFlags(IntFlag):
@@ -1483,6 +1563,13 @@ class SculptTypeData:
Mirror: bool = se.bitfield_field(bits=1, adapter=se.BoolAdapter())
class ReflectionProbeFlags(IntFlag):
# use a box influence volume
BOX_VOLUME = 0x1
# render dynamic objects (avatars) into this Reflection Probe
DYNAMIC = 0x2
EXTRA_PARAM_TEMPLATES = {
ExtraParamType.FLEXIBLE: se.Template({
"Tension": se.BitField(se.U8, {"Tension": 6, "Softness1": 2}),
@@ -1514,6 +1601,15 @@ EXTRA_PARAM_TEMPLATES = {
ExtraParamType.EXTENDED_MESH: se.Template({
"Flags": se.IntFlag(ExtendedMeshFlags, se.U32),
}),
ExtraParamType.RENDER_MATERIAL: se.Collection(se.U8, se.Template({
"TEIdx": se.U8,
"TEID": se.UUID,
})),
ExtraParamType.REFLECTION_PROBE: se.Template({
"Ambiance": se.F32,
"ClipDistance": se.F32,
"Flags": se.IntFlag(ReflectionProbeFlags, se.U8),
}),
}
@@ -1857,8 +1953,8 @@ class AvatarPropertiesFlags(IntFlag):
@se.flag_field_serializer("AvatarGroupsReply", "GroupData", "GroupPowers")
@se.flag_field_serializer("AvatarGroupDataUpdate", "GroupData", "GroupPowers")
@se.flag_field_serializer("AvatarDataUpdate", "AgentDataData", "GroupPowers")
@se.flag_field_serializer("AgentGroupDataUpdate", "GroupData", "GroupPowers")
@se.flag_field_serializer("AgentDataUpdate", "AgentData", "GroupPowers")
@se.flag_field_serializer("GroupProfileReply", "GroupData", "PowersMask")
@se.flag_field_serializer("GroupRoleDataReply", "RoleData", "Powers")
class GroupPowerFlags(IntFlag):
@@ -2039,6 +2135,43 @@ class ScriptPermissions(IntFlag):
CHANGE_ENVIRONMENT = 1 << 18
@se.flag_field_serializer("ParcelProperties", "ParcelData", "ParcelFlags")
class ParcelFlags(IntFlag):
ALLOW_FLY = 1 << 0 # Can start flying
ALLOW_OTHER_SCRIPTS = 1 << 1 # Scripts by others can run.
FOR_SALE = 1 << 2 # Can buy this land
FOR_SALE_OBJECTS = 1 << 7 # Can buy all objects on this land
ALLOW_LANDMARK = 1 << 3 # Always true/deprecated
ALLOW_TERRAFORM = 1 << 4
ALLOW_DAMAGE = 1 << 5
CREATE_OBJECTS = 1 << 6
# 7 is moved above
USE_ACCESS_GROUP = 1 << 8
USE_ACCESS_LIST = 1 << 9
USE_BAN_LIST = 1 << 10
USE_PASS_LIST = 1 << 11
SHOW_DIRECTORY = 1 << 12
ALLOW_DEED_TO_GROUP = 1 << 13
CONTRIBUTE_WITH_DEED = 1 << 14
SOUND_LOCAL = 1 << 15 # Hear sounds in this parcel only
SELL_PARCEL_OBJECTS = 1 << 16 # Objects on land are included as part of the land when the land is sold
ALLOW_PUBLISH = 1 << 17 # Allow publishing of parcel information on the web
MATURE_PUBLISH = 1 << 18 # The information on this parcel is mature
URL_WEB_PAGE = 1 << 19 # The "media URL" is an HTML page
URL_RAW_HTML = 1 << 20 # The "media URL" is a raw HTML string like <H1>Foo</H1>
RESTRICT_PUSHOBJECT = 1 << 21 # Restrict push object to either on agent or on scripts owned by parcel owner
DENY_ANONYMOUS = 1 << 22 # Deny all non identified/transacted accounts
# DENY_IDENTIFIED = 1 << 23 # Deny identified accounts
# DENY_TRANSACTED = 1 << 24 # Deny identified accounts
ALLOW_GROUP_SCRIPTS = 1 << 25 # Allow scripts owned by group
CREATE_GROUP_OBJECTS = 1 << 26 # Allow object creation by group members or objects
ALLOW_ALL_OBJECT_ENTRY = 1 << 27 # Allow all objects to enter a parcel
ALLOW_GROUP_OBJECT_ENTRY = 1 << 28 # Only allow group (and owner) objects to enter the parcel
ALLOW_VOICE_CHAT = 1 << 29 # Allow residents to use voice chat on this parcel
USE_ESTATE_VOICE_CHAN = 1 << 30
DENY_AGEUNVERIFIED = 1 << 31 # Prevent residents who aren't age-verified
@se.enum_field_serializer("UpdateMuteListEntry", "MuteData", "MuteType")
class MuteType(IntEnum):
BY_NAME = 0
@@ -2069,20 +2202,106 @@ class MuteFlags(IntFlag):
return 0xF
class CreationDateAdapter(se.Adapter):
class DateAdapter(se.Adapter):
def __init__(self, multiplier: int = 1):
super(DateAdapter, self).__init__(None)
self._multiplier = multiplier
def decode(self, val: Any, ctx: Optional[se.ParseContext], pod: bool = False) -> Any:
return datetime.datetime.fromtimestamp(val / 1_000_000).isoformat()
return datetime.datetime.fromtimestamp(val / self._multiplier).isoformat()
def encode(self, val: Any, ctx: Optional[se.ParseContext]) -> Any:
return int(datetime.datetime.fromisoformat(val).timestamp() * 1_000_000)
return int(datetime.datetime.fromisoformat(val).timestamp() * self._multiplier)
@se.enum_field_serializer("MeanCollisionAlert", "MeanCollision", "Type")
class MeanCollisionType(IntEnum):
INVALID = 0
BUMP = enum.auto()
LLPUSHOBJECT = enum.auto()
SELECTED_OBJECT_COLLIDE = enum.auto()
SCRIPTED_OBJECT_COLLIDE = enum.auto()
PHYSICAL_OBJECT_COLLIDE = enum.auto()
@se.subfield_serializer("ObjectProperties", "ObjectData", "CreationDate")
class CreationDateSerializer(se.AdapterSubfieldSerializer):
ADAPTER = CreationDateAdapter(None)
ADAPTER = DateAdapter(1_000_000)
ORIG_INLINE = True
@se.subfield_serializer("MeanCollisionAlert", "MeanCollision", "Time")
@se.subfield_serializer("ParcelProperties", "ParcelData", "ClaimDate")
class DateSerializer(se.AdapterSubfieldSerializer):
ADAPTER = DateAdapter()
ORIG_INLINE = True
class ParcelGridType(IntEnum):
PUBLIC = 0x00
OWNED = 0x01 # Presumably non-linden owned land
GROUP = 0x02
SELF = 0x03
FOR_SALE = 0x04
AUCTION = 0x05
class ParcelGridFlags(IntFlag):
UNUSED = 0x8
HIDDEN_AVS = 0x10
SOUND_LOCAL = 0x20
WEST_LINE = 0x40
SOUTH_LINE = 0x80
@dataclasses.dataclass
class ParcelGridInfo(se.BitfieldDataclass):
PRIM_SPEC: ClassVar[se.SerializablePrimitive] = se.U8
SHIFT: ClassVar[bool] = False
Type: Union[ParcelGridType, int] = se.bitfield_field(bits=3, adapter=se.IntEnum(ParcelGridType))
Flags: ParcelGridFlags = se.bitfield_field(bits=5, adapter=se.IntFlag(ParcelGridFlags))
@se.subfield_serializer("ParcelOverlay", "ParcelData", "Data")
class ParcelOverlaySerializer(se.SimpleSubfieldSerializer):
TEMPLATE = se.Collection(None, se.BitfieldDataclass(ParcelGridInfo))
@se.enum_field_serializer("ParcelProperties", "ParcelData", "LandingType")
class LandingType(IntEnum):
NONE = 1
LANDING_POINT = 1
DIRECT = 2
@se.enum_field_serializer("ParcelProperties", "ParcelData", "Status")
class LandOwnershipStatus(IntEnum):
LEASED = 0
LEASE_PENDING = 1
ABANDONED = 2
NONE = -1
@se.enum_field_serializer("ParcelProperties", "ParcelData", "Category")
class LandCategory(IntEnum):
NONE = 0
LINDEN = enum.auto()
ADULT = enum.auto()
ARTS = enum.auto()
BUSINESS = enum.auto()
EDUCATIONAL = enum.auto()
GAMING = enum.auto()
HANGOUT = enum.auto()
NEWCOMER = enum.auto()
PARK = enum.auto()
RESIDENTIAL = enum.auto()
SHOPPING = enum.auto()
STAGE = enum.auto()
OTHER = enum.auto()
ANY = -1
@se.http_serializer("RenderMaterials")
class RenderMaterialsSerializer(se.BaseHTTPSerializer):
@classmethod
@@ -2118,7 +2337,7 @@ class RetrieveNavMeshSrcSerializer(se.BaseHTTPSerializer):
# Beta puppetry stuff, subject to change!
class PuppetryEventMask(enum.IntFlag):
class PuppetryEventMask(IntFlag):
POSITION = 1 << 0
POSITION_IN_PARENT_FRAME = 1 << 1
ROTATION = 1 << 2

View File

@@ -0,0 +1,45 @@
import asyncio
from typing import Any, Optional, List, Tuple
from hippolyzer.lib.base.message.circuit import Circuit, ConnectionHolder
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.network.transport import AbstractUDPTransport, ADDR_TUPLE, UDPPacket
class MockTransport(AbstractUDPTransport):
def sendto(self, data: Any, addr: Optional[ADDR_TUPLE] = ...) -> None:
pass
def abort(self) -> None:
pass
def close(self) -> None:
pass
def __init__(self):
super().__init__()
self.packets: List[Tuple[bytes, Tuple[str, int]]] = []
def send_packet(self, packet: UDPPacket) -> None:
self.packets.append((packet.data, packet.dst_addr))
class MockHandlingCircuit(Circuit):
def __init__(self, handler: MessageHandler[Message, str]):
super().__init__(("127.0.0.1", 1), ("127.0.0.1", 2), None)
self.handler = handler
def _send_prepared_message(self, message: Message, transport=None):
loop = asyncio.get_event_loop_policy().get_event_loop()
loop.call_soon(self.handler.handle, message)
class MockConnectionHolder(ConnectionHolder):
def __init__(self, circuit, message_handler):
self.circuit = circuit
self.message_handler = message_handler
async def soon(awaitable) -> Message:
return await asyncio.wait_for(awaitable, timeout=1.0)

View File

@@ -269,12 +269,13 @@ class XferManager:
xfer.xfer_id = request_msg["XferID"]["ID"]
packet_id = 0
# TODO: No resend yet. If it's lost, it's lost.
while xfer.chunks:
chunk = xfer.chunks.pop(packet_id)
# EOF if there are no chunks left
packet_val = XferPacket(PacketID=packet_id, IsEOF=not bool(xfer.chunks))
self._connection_holder.circuit.send(Message(
# We just send reliably since I don't care to implement the Xfer-specific
# resend-on-unacked nastiness
_ = self._connection_holder.circuit.send_reliable(Message(
"SendXferPacket",
Block("XferID", ID=xfer.xfer_id, Packet_=packet_val),
Block("DataPacket", Data=chunk),

View File

@@ -30,12 +30,12 @@ class AssetUploader:
async def initiate_asset_upload(self, name: str, asset_type: AssetType,
body: bytes, flags: Optional[int] = None) -> UploadToken:
payload = {
"asset_type": asset_type.human_name,
"asset_type": asset_type.to_lookup_name(),
"description": "(No Description)",
"everyone_mask": 0,
"group_mask": 0,
"folder_id": UUID.ZERO, # Puts it in the default folder, I guess. Undocumented.
"inventory_type": asset_type.inventory_type.human_name,
"inventory_type": asset_type.inventory_type.to_lookup_name(),
"name": name,
"next_owner_mask": 581632,
}

View File

@@ -0,0 +1,775 @@
from __future__ import annotations
import asyncio
import hashlib
from importlib.metadata import version
import logging
import uuid
import weakref
import xmlrpc.client
from typing import *
import aiohttp
import multidict
from hippolyzer.lib.base.datatypes import Vector3, StringEnum
from hippolyzer.lib.base.helpers import proxify, get_resource_filename
from hippolyzer.lib.base.message.circuit import Circuit
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.message.message_dot_xml import MessageDotXML
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
from hippolyzer.lib.base.network.caps_client import CapsClient, CAPS_DICT
from hippolyzer.lib.base.network.transport import ADDR_TUPLE, Direction, SocketUDPTransport, AbstractUDPTransport
from hippolyzer.lib.base.settings import Settings, SettingDescriptor
from hippolyzer.lib.base.templates import RegionHandshakeReplyFlags, ChatType
from hippolyzer.lib.base.transfer_manager import TransferManager
from hippolyzer.lib.base.xfer_manager import XferManager
from hippolyzer.lib.client.asset_uploader import AssetUploader
from hippolyzer.lib.client.inventory_manager import InventoryManager
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
from hippolyzer.lib.client.parcel_manager import ParcelManager
from hippolyzer.lib.client.state import BaseClientSession, BaseClientRegion, BaseClientSessionManager
LOG = logging.getLogger(__name__)
class StartLocation(StringEnum):
LAST = "last"
HOME = "home"
class ClientSettings(Settings):
SSL_VERIFY: bool = SettingDescriptor(False)
"""Off by default for now, the cert validation is a big mess due to LL using an internal CA."""
SSL_CERT_PATH: str = SettingDescriptor(get_resource_filename("lib/base/network/data/ca-bundle.crt"))
USER_AGENT: str = SettingDescriptor(f"Hippolyzer/v{version('hippolyzer')}")
SEND_AGENT_UPDATES: bool = SettingDescriptor(True)
"""Generally you want to send these, lots of things will break if you don't send at least one."""
AUTO_REQUEST_PARCELS: bool = SettingDescriptor(True)
"""Automatically request all parcel details when connecting to a region"""
AUTO_REQUEST_MATERIALS: bool = SettingDescriptor(True)
"""Automatically request all materials when connecting to a region"""
class HippoCapsClient(CapsClient):
def __init__(
self,
settings: ClientSettings,
caps: Optional[CAPS_DICT] = None,
session: Optional[aiohttp.ClientSession] = None,
) -> None:
super().__init__(caps, session)
self._settings = settings
def _request_fixups(self, cap_or_url: str, headers: Dict, proxy: Optional[bool], ssl: Any):
headers["User-Agent"] = self._settings.USER_AGENT
return cap_or_url, headers, proxy, self._settings.SSL_VERIFY
class HippoClientProtocol(asyncio.DatagramProtocol):
def __init__(self, session: HippoClientSession):
self.session = proxify(session)
self.message_xml = MessageDotXML()
self.deserializer = UDPMessageDeserializer(
settings=self.session.session_manager.settings,
)
def datagram_received(self, data, source_addr: ADDR_TUPLE):
region = self.session.region_by_circuit_addr(source_addr)
if not region:
logging.warning("Received packet from invalid address %s", source_addr)
return
message = self.deserializer.deserialize(data)
message.direction = Direction.IN
message.sender = source_addr
if not self.message_xml.validate_udp_msg(message.name):
LOG.warning(
f"Received {message.name!r} over UDP, when it should come over the event queue. Discarding."
)
raise PermissionError(f"UDPBanned message {message.name}")
region.circuit.collect_acks(message)
should_handle = True
if message.reliable:
# This is a bit crap. We send an ACK immediately through a PacketAck.
# This is pretty wasteful, we should batch them up and send them on a timer.
# We should ACK even if it's a resend of something we've already handled, maybe
# they never got the ACK.
region.circuit.send_acks((message.packet_id,))
should_handle = region.circuit.track_reliable(message.packet_id)
try:
if should_handle:
self.session.message_handler.handle(message)
except:
LOG.exception("Failed in region message handler")
region.message_handler.handle(message)
class HippoClientRegion(BaseClientRegion):
def __init__(self, circuit_addr, seed_cap: Optional[str], session: HippoClientSession, handle=None):
super().__init__()
self.caps = multidict.MultiDict()
self.message_handler: MessageHandler[Message, str] = MessageHandler(take_by_default=False)
self.circuit_addr = circuit_addr
self.handle = handle
if seed_cap:
self.caps["Seed"] = seed_cap
self.session: Callable[[], HippoClientSession] = weakref.ref(session)
self.caps_client = HippoCapsClient(session.session_manager.settings, self.caps, session.http_session)
self.xfer_manager = XferManager(proxify(self), self.session().secure_session_id)
self.transfer_manager = TransferManager(proxify(self), session.agent_id, session.id)
self.asset_uploader = AssetUploader(proxify(self))
self.parcel_manager = ParcelManager(proxify(self))
self.objects = ClientObjectManager(self)
self._llsd_serializer = LLSDMessageSerializer()
self._eq_task: Optional[asyncio.Task] = None
self.connected: asyncio.Future = asyncio.Future()
self.message_handler.subscribe("StartPingCheck", self._handle_ping_check)
def update_caps(self, caps: Mapping[str, str]) -> None:
self.caps.update(caps)
@property
def cap_urls(self) -> multidict.MultiDict:
return self.caps.copy()
async def connect(self, main_region: bool = False):
# Disconnect first if we're already connected
if self.circuit and self.circuit.is_alive:
self.disconnect()
if self.connected.done():
self.connected = asyncio.Future()
try:
# TODO: What happens if a circuit code is invalid, again? Does it just refuse to ACK?
await self.circuit.send_reliable(
Message(
"UseCircuitCode",
Block(
"CircuitCode",
Code=self.session().circuit_code,
SessionID=self.session().id,
ID=self.session().agent_id,
),
)
)
self.circuit.is_alive = True
# Clear out any old caps urls except the seed URL, we're about to fetch new caps.
seed_url = self.caps["Seed"]
self.caps.clear()
self.caps["Seed"] = seed_url
# Kick this off and await it later
seed_resp_fut = self.caps_client.post("Seed", llsd=list(self.session().session_manager.SUPPORTED_CAPS))
# Register first so we can handle it even if the ack happens after the message is sent
region_handshake_fut = self.message_handler.wait_for(("RegionHandshake",))
# If we're connecting to the main region, it won't even send us a RegionHandshake until we
# first send a CompleteAgentMovement.
if main_region:
await self.complete_agent_movement()
self.name = str((await region_handshake_fut)["RegionInfo"][0]["SimName"])
self.session().objects.track_region_objects(self.handle)
await self.circuit.send_reliable(
Message(
"RegionHandshakeReply",
Block("AgentData", AgentID=self.session().agent_id, SessionID=self.session().id),
Block(
"RegionInfo",
Flags=(
RegionHandshakeReplyFlags.SUPPORTS_SELF_APPEARANCE
| RegionHandshakeReplyFlags.VOCACHE_IS_EMPTY
)
)
)
)
await self.circuit.send_reliable(
Message(
"AgentThrottle",
Block(
"AgentData",
AgentID=self.session().agent_id,
SessionID=self.session().id,
CircuitCode=self.session().circuit_code,
),
Block(
"Throttle",
GenCounter=0,
# Reasonable defaults, I guess
Throttles_=[207360.0, 165376.0, 33075.19921875, 33075.19921875, 682700.75, 682700.75, 269312.0],
)
)
)
if self.session().session_manager.settings.SEND_AGENT_UPDATES:
# Usually we want to send at least one, since lots of messages will never be sent by the sim
# until we send at least one AgentUpdate. For example, ParcelOverlay and LayerData.
await self.circuit.send_reliable(
Message(
"AgentUpdate",
Block(
'AgentData',
AgentID=self.session().agent_id,
SessionID=self.session().id,
# Don't really care about the other fields.
fill_missing=True,
)
)
)
async with seed_resp_fut as seed_resp:
seed_resp.raise_for_status()
self.update_caps(await seed_resp.read_llsd())
self._eq_task = asyncio.create_task(self._poll_event_queue())
settings = self.session().session_manager.settings
if settings.AUTO_REQUEST_PARCELS:
_ = asyncio.create_task(self.parcel_manager.request_dirty_parcels())
if settings.AUTO_REQUEST_MATERIALS:
_ = asyncio.create_task(self.objects.request_all_materials())
except Exception as e:
# Let consumers who were `await`ing the connected signal know there was an error
if not self.connected.done():
self.connected.set_exception(e)
raise
self.connected.set_result(None)
def disconnect(self) -> None:
"""Simulator has gone away, disconnect. Should be synchronous"""
if self._eq_task is not None:
self._eq_task.cancel()
self._eq_task = None
self.circuit.disconnect()
self.objects.clear()
if self.connected.done():
self.connected = asyncio.Future()
# TODO: cancel XFers and Transfers and whatnot
async def complete_agent_movement(self) -> None:
await self.circuit.send_reliable(
Message(
"CompleteAgentMovement",
Block(
"AgentData",
AgentID=self.session().agent_id,
SessionID=self.session().id,
CircuitCode=self.session().circuit_code
),
)
)
self.session().main_region = self
async def _poll_event_queue(self):
ack: Optional[int] = None
while True:
payload = {"ack": ack, "done": False}
async with self.caps_client.post("EventQueueGet", llsd=payload) as resp:
if resp.status != 200:
await asyncio.sleep(0.1)
continue
polled = await resp.read_llsd()
for event in polled["events"]:
if self._llsd_serializer.can_handle(event["message"]):
msg = self._llsd_serializer.deserialize(event)
else:
# If this isn't a templated message (like some EQ-only events are),
# then we wrap it in a synthetic `Message` so that the API for handling
# both EQ-only and templated message events can be the same. Ick.
msg = Message(event["message"])
if isinstance(event["body"], dict):
msg.add_block(Block("EventData", **event["body"]))
else:
# Shouldn't be any events that have anything other than a dict
# as a body, but just to be sure...
msg.add_block(Block("EventData", Data=event["body"]))
msg.synthetic = True
msg.sender = self.circuit_addr
msg.direction = Direction.IN
self.session().message_handler.handle(msg)
self.message_handler.handle(msg)
ack = polled["id"]
await asyncio.sleep(0.001)
async def _handle_ping_check(self, message: Message):
self.circuit.send(
Message(
"CompletePingCheck",
Block("PingID", PingID=message["PingID"]["PingID"]),
)
)
class HippoClientSession(BaseClientSession):
"""Represents a client's view of a remote session"""
REGION_CLS = HippoClientRegion
region_by_handle: Callable[[int], Optional[HippoClientRegion]]
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[HippoClientRegion]]
regions: List[HippoClientRegion]
session_manager: HippoClient
main_region: Optional[HippoClientRegion]
def __init__(self, id, secure_session_id, agent_id, circuit_code, session_manager: Optional[HippoClient] = None,
login_data=None):
super().__init__(id, secure_session_id, agent_id, circuit_code, session_manager, login_data=login_data)
self.http_session = session_manager.http_session
self.objects = ClientWorldObjectManager(proxify(self), session_manager.settings, None)
self.inventory_manager = InventoryManager(proxify(self))
self.transport: Optional[SocketUDPTransport] = None
self.protocol: Optional[HippoClientProtocol] = None
self.message_handler.take_by_default = False
for msg_name in ("DisableSimulator", "CloseCircuit"):
self.message_handler.subscribe(msg_name, lambda msg: self.unregister_region(msg.sender))
for msg_name in ("TeleportFinish", "CrossedRegion", "EstablishAgentCommunication"):
self.message_handler.subscribe(msg_name, self._handle_register_region_message)
def register_region(self, circuit_addr: Optional[ADDR_TUPLE] = None, seed_url: Optional[str] = None,
handle: Optional[int] = None) -> HippoClientRegion:
return super().register_region(circuit_addr, seed_url, handle) # type:ignore
def unregister_region(self, circuit_addr: ADDR_TUPLE) -> None:
for i, region in enumerate(self.regions):
if region.circuit_addr == circuit_addr:
self.regions[i].disconnect()
del self.regions[i]
return
raise KeyError(f"No such region for {circuit_addr!r}")
def open_circuit(self, circuit_addr: ADDR_TUPLE):
for region in self.regions:
if region.circuit_addr == circuit_addr:
valid_circuit = False
if not region.circuit or not region.circuit.is_alive:
region.circuit = Circuit(("127.0.0.1", 0), circuit_addr, self.transport)
region.circuit.is_alive = False
valid_circuit = True
if region.circuit and region.circuit.is_alive:
# Whatever, already open
logging.debug("Tried to re-open circuit for %r" % (circuit_addr,))
valid_circuit = True
return valid_circuit
return False
def _handle_register_region_message(self, msg: Message):
# Handle events that inform us about new regions
sim_addr, sim_handle, sim_seed = None, None, None
moving_to_region = False
# Sim is asking us to talk to a neighbour
if msg.name == "EstablishAgentCommunication":
ip_split = msg["EventData"]["sim-ip-and-port"].split(":")
sim_addr = (ip_split[0], int(ip_split[1]))
sim_seed = msg["EventData"]["seed-capability"]
# We teleported or cross region, opening comms to new sim
elif msg.name in ("TeleportFinish", "CrossedRegion"):
sim_block = msg.get_block("RegionData", msg.get_block("Info"))[0]
sim_addr = (sim_block["SimIP"], sim_block["SimPort"])
sim_handle = sim_block["RegionHandle"]
sim_seed = sim_block["SeedCapability"]
moving_to_region = True
# Sim telling us about a neighbour
# elif msg.name == "EnableSimulator":
# sim_block = msg["SimulatorInfo"][0]
# sim_addr = (sim_block["IP"], sim_block["Port"])
# sim_handle = sim_block["Handle"]
# TODO: EnableSimulator is a little weird. It creates a region and establishes a
# circuit, but with no seed cap. The viewer will send UseCircuitCode and all that,
# but it's totally workable to just wait for an EstablishAgentCommunication to do that,
# since that's when the region actually shows up. I guess EnableSimulator just gives the
# viewer some lead time to set up the circuit before the region is actually shown through
# EstablishAgentCommunication? Either way, messing around with regions that don't have seed
# caps is annoying, so let's just not do it.
# Register a region if this message was telling us about a new one
if sim_addr is not None:
region = self.register_region(sim_addr, handle=sim_handle, seed_url=sim_seed)
# We can't actually connect without a sim seed, mind you, when we receive and EnableSimulator
# we have to wait for the EstablishAgentCommunication to actually connect.
need_connect = (region.circuit and region.circuit.is_alive) or moving_to_region
self.open_circuit(sim_addr)
if need_connect:
asyncio.create_task(region.connect(main_region=moving_to_region))
elif moving_to_region:
# No need to connect, but we do need to complete agent movement.
asyncio.create_task(region.complete_agent_movement())
class HippoClient(BaseClientSessionManager):
"""A simple client, only connects to one region at a time currently."""
SUPPORTED_CAPS: Set[str] = {
"AbuseCategories",
"AcceptFriendship",
"AcceptGroupInvite",
"AgentPreferences",
"AgentProfile",
"AgentState",
"AttachmentResources",
"AvatarPickerSearch",
"AvatarRenderInfo",
"CharacterProperties",
"ChatSessionRequest",
"CopyInventoryFromNotecard",
"CreateInventoryCategory",
"DeclineFriendship",
"DeclineGroupInvite",
"DispatchRegionInfo",
"DirectDelivery",
"EnvironmentSettings",
"EstateAccess",
"DispatchOpenRegionSettings",
"EstateChangeInfo",
"EventQueueGet",
"ExtEnvironment",
"FetchLib2",
"FetchLibDescendents2",
"FetchInventory2",
"FetchInventoryDescendents2",
"IncrementCOFVersion",
"InventoryAPIv3",
"LibraryAPIv3",
"InterestList",
"InventoryThumbnailUpload",
"GetDisplayNames",
"GetExperiences",
"AgentExperiences",
"FindExperienceByName",
"GetExperienceInfo",
"GetAdminExperiences",
"GetCreatorExperiences",
"ExperiencePreferences",
"GroupExperiences",
"UpdateExperience",
"IsExperienceAdmin",
"IsExperienceContributor",
"RegionExperiences",
"ExperienceQuery",
"GetMesh",
"GetMesh2",
"GetMetadata",
"GetObjectCost",
"GetObjectPhysicsData",
"GetTexture",
"GroupAPIv1",
"GroupMemberData",
"GroupProposalBallot",
"HomeLocation",
"LandResources",
"LSLSyntax",
"MapLayer",
"MapLayerGod",
"MeshUploadFlag",
"NavMeshGenerationStatus",
"NewFileAgentInventory",
"ObjectAnimation",
"ObjectMedia",
"ObjectMediaNavigate",
"ObjectNavMeshProperties",
"ParcelPropertiesUpdate",
"ParcelVoiceInfoRequest",
"ProductInfoRequest",
"ProvisionVoiceAccountRequest",
"ReadOfflineMsgs",
"RegionObjects",
"RemoteParcelRequest",
"RenderMaterials",
"RequestTextureDownload",
"ResourceCostSelected",
"RetrieveNavMeshSrc",
"SearchStatRequest",
"SearchStatTracking",
"SendPostcard",
"SendUserReport",
"SendUserReportWithScreenshot",
"ServerReleaseNotes",
"SetDisplayName",
"SimConsoleAsync",
"SimulatorFeatures",
"StartGroupProposal",
"TerrainNavMeshProperties",
"TextureStats",
"UntrustedSimulatorMessage",
"UpdateAgentInformation",
"UpdateAgentLanguage",
"UpdateAvatarAppearance",
"UpdateGestureAgentInventory",
"UpdateGestureTaskInventory",
"UpdateNotecardAgentInventory",
"UpdateNotecardTaskInventory",
"UpdateScriptAgent",
"UpdateScriptTask",
"UpdateSettingsAgentInventory",
"UpdateSettingsTaskInventory",
"UploadAgentProfileImage",
"UploadBakedTexture",
"UserInfo",
"ViewerAsset",
"ViewerBenefits",
"ViewerMetrics",
"ViewerStartAuction",
"ViewerStats",
}
DEFAULT_OPTIONS = {
"inventory-root",
"inventory-skeleton",
"inventory-lib-root",
"inventory-lib-owner",
"inventory-skel-lib",
"initial-outfit",
"gestures",
"display_names",
"event_notifications",
"classified_categories",
"adult_compliant",
"buddy-list",
"newuser-config",
"ui-config",
"advanced-mode",
"max-agent-groups",
"map-server-url",
"voice-config",
"tutorial_setting",
"login-flags",
"global-textures",
# Not an official option, just so this can be tracked.
"pyogp-client",
}
DEFAULT_LOGIN_URI = "https://login.agni.lindenlab.com/cgi-bin/login.cgi"
def __init__(self, options: Optional[Set[str]] = None):
self._username: Optional[str] = None
self._password: Optional[str] = None
self._mac = uuid.getnode()
self._options = options if options is not None else self.DEFAULT_OPTIONS
self.http_session: Optional[aiohttp.ClientSession] = aiohttp.ClientSession(trust_env=True)
self.session: Optional[HippoClientSession] = None
self.settings = ClientSettings()
self._resend_task: Optional[asyncio.Task] = None
@property
def main_region(self) -> Optional[HippoClientRegion]:
if not self.session:
return None
return self.session.main_region
@property
def main_circuit(self) -> Optional[Circuit]:
if not self.main_region:
return None
return self.main_region.circuit
@property
def main_caps_client(self) -> Optional[CapsClient]:
if not self.main_region:
return None
return self.main_region.caps_client
async def aclose(self):
try:
self.logout()
finally:
if self.http_session:
await self.http_session.close()
self.http_session = None
def __del__(self):
# Make sure we don't leak resources if someone was lazy.
try:
self.logout()
finally:
if self.http_session:
try:
asyncio.create_task(self.http_session.close)
except:
pass
self.http_session = None
async def _create_transport(self) -> Tuple[AbstractUDPTransport, HippoClientProtocol]:
loop = asyncio.get_event_loop_policy().get_event_loop()
transport, protocol = await loop.create_datagram_endpoint(
lambda: HippoClientProtocol(self.session),
local_addr=('0.0.0.0', 0))
transport = SocketUDPTransport(transport)
return transport, protocol
async def login(
self,
username: str,
password: str,
login_uri: Optional[str] = None,
agree_to_tos: bool = False,
start_location: Union[StartLocation, str, None] = StartLocation.LAST,
connect: bool = True,
):
if self.session:
raise RuntimeError("Already logged in!")
if not login_uri:
login_uri = self.DEFAULT_LOGIN_URI
if start_location is None:
start_location = StartLocation.LAST
# This isn't a symbolic start location and isn't a URI, must be a sim name.
if start_location not in iter(StartLocation) and not start_location.startswith("uri:"):
start_location = f"uri:{start_location}&128&128&128"
split_username = username.split(" ")
if len(split_username) < 2:
first_name = split_username[0]
last_name = "Resident"
else:
first_name, last_name = split_username
payload = {
"address_size": 64,
"agree_to_tos": int(agree_to_tos),
"channel": "Hippolyzer",
"extended_errors": 1,
"first": first_name,
"last": last_name,
"host_id": "",
"id0": hashlib.md5(str(self._mac).encode("ascii")).hexdigest(),
"mac": hashlib.md5(str(self._mac).encode("ascii")).hexdigest(),
"mfa_hash": "",
"passwd": "$1$" + hashlib.md5(str(password).encode("ascii")).hexdigest(),
# TODO: actually get these
"platform": "lnx",
"platform_string": "Linux 6.6",
# TODO: What is this?
"platform_version": "2.38.0",
"read_critical": 0,
"start": str(start_location),
"token": "",
"version": version("hippolyzer"),
"options": list(self._options),
}
async with self.http_session.post(
login_uri,
data=xmlrpc.client.dumps((payload,), "login_to_simulator"),
headers={"Content-Type": "text/xml", "User-Agent": self.settings.USER_AGENT},
ssl=self.settings.SSL_VERIFY,
) as resp:
resp.raise_for_status()
login_data = xmlrpc.client.loads((await resp.read()).decode("utf8"))[0][0]
self.session = HippoClientSession.from_login_data(login_data, self)
self.session.transport, self.session.protocol = await self._create_transport()
self._resend_task = asyncio.create_task(self._attempt_resends())
self.session.message_handler.subscribe("AgentDataUpdate", self._handle_agent_data_update)
self.session.message_handler.subscribe("AgentGroupDataUpdate", self._handle_agent_group_data_update)
assert self.session.open_circuit(self.session.regions[-1].circuit_addr)
if connect:
region = self.session.regions[-1]
await region.connect(main_region=True)
def logout(self):
if not self.session:
return
if self._resend_task:
self._resend_task.cancel()
self._resend_task = None
if self.main_circuit and self.main_circuit.is_alive:
# Don't need to send reliably, there's a good chance the server won't ACK anyway.
self.main_circuit.send(
Message(
"LogoutRequest",
Block("AgentData", AgentID=self.session.agent_id, SessionID=self.session.id),
)
)
session = self.session
self.session = None
for region in session.regions:
region.disconnect()
session.transport.close()
def send_chat(self, message: Union[bytes, str], channel: int = 0, chat_type=ChatType.NORMAL) -> asyncio.Future:
return self.main_circuit.send_reliable(Message(
"ChatFromViewer",
Block("AgentData", SessionID=self.session.id, AgentID=self.session.agent_id),
Block("ChatData", Message=message, Channel=channel, Type=chat_type),
))
def teleport(self, region_handle: int, local_pos=Vector3(0, 0, 0)) -> asyncio.Future:
"""Synchronously requests a teleport, returning a Future for teleport completion"""
teleport_fut = asyncio.Future()
# Send request synchronously, await asynchronously.
send_fut = self.main_circuit.send_reliable(
Message(
'TeleportLocationRequest',
Block('AgentData', AgentID=self.session.agent_id, SessionID=self.session.id),
Block('Info', RegionHandle=region_handle, Position=local_pos, fill_missing=True),
)
)
async def _handle_teleport():
# Subscribe first, we may receive an event before we receive the packet ACK.
with self.session.message_handler.subscribe_async(
("TeleportLocal", "TeleportFailed", "TeleportFinish"),
) as get_tp_done_msg:
try:
await send_fut
except Exception as e:
# Pass along error if we failed to send reliably.
teleport_fut.set_exception(e)
return
# Wait for a message that says we're done the teleport
msg = await get_tp_done_msg()
if msg.name == "TeleportFailed":
teleport_fut.set_exception(RuntimeError("Failed to teleport"))
elif msg.name == "TeleportLocal":
# Within the sim, nothing else we need to do
teleport_fut.set_result(None)
elif msg.name == "TeleportFinish":
# Non-local TP, wait until we receive the AgentMovementComplete to
# set the finished signal.
# Region should be registered by this point, wait for it to connect
try:
# just fail if it takes longer than 30 seconds for the handshake to complete
await asyncio.wait_for(self.session.region_by_handle(region_handle).connected, 30)
except Exception as e:
teleport_fut.set_exception(e)
return
teleport_fut.set_result(None)
asyncio.create_task(_handle_teleport())
return teleport_fut
async def _attempt_resends(self):
while True:
if self.session is None:
break
for region in self.session.regions:
if not region.circuit.is_alive:
continue
region.circuit.resend_unacked()
await asyncio.sleep(0.5)
def _handle_agent_data_update(self, msg: Message):
self.session.active_group = msg["AgentData"]["ActiveGroupID"]
def _handle_agent_group_data_update(self, msg: Message):
self.session.groups.clear()
for block in msg["GroupData"]:
self.session.groups.add(block["GroupID"])

View File

@@ -2,14 +2,13 @@ from __future__ import annotations
import gzip
import logging
import secrets
from pathlib import Path
from typing import Union, List, Tuple, Set
from hippolyzer.lib.base import llsd
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.inventory import InventoryModel, InventoryCategory, InventoryItem
from hippolyzer.lib.base.message.message import Block
from hippolyzer.lib.base.templates import AssetType, FolderType
from hippolyzer.lib.client.state import BaseClientSession
@@ -33,8 +32,8 @@ class InventoryManager:
# Don't use the version from the skeleton, this flags the inventory as needing
# completion from the inventory cache. This matches indra's behavior.
version=InventoryCategory.VERSION_NONE,
type="category",
pref_type=skel_cat.get("type_default", -1),
type=AssetType.CATEGORY,
pref_type=FolderType(skel_cat.get("type_default", FolderType.NONE)),
owner_id=self._session.agent_id,
))
@@ -87,6 +86,7 @@ class InventoryManager:
self.model.add(cached_item)
def _parse_cache(self, path: Union[str, Path]) -> Tuple[List[InventoryCategory], List[InventoryItem]]:
"""Warning, may be incredibly slow due to llsd.parse_notation() behavior"""
categories: List[InventoryCategory] = []
items: List[InventoryItem] = []
# Parse our cached items and categories out of the compressed inventory cache
@@ -99,7 +99,7 @@ class InventoryManager:
if first_line:
# First line is the file header
first_line = False
if node_llsd['inv_cache_version'] != 2:
if node_llsd['inv_cache_version'] not in (2, 3):
raise ValueError(f"Unknown cache version: {node_llsd!r}")
continue
@@ -112,81 +112,3 @@ class InventoryManager:
else:
LOG.warning(f"Unknown node type in inv cache: {node_llsd!r}")
return categories, items
# Thankfully we have 9 billion different ways to represent inventory data.
def ais_item_to_inventory_data(ais_item: dict) -> Block:
return Block(
"InventoryData",
ItemID=ais_item["item_id"],
FolderID=ais_item["parent_id"],
CallbackID=0,
CreatorID=ais_item["permissions"]["creator_id"],
OwnerID=ais_item["permissions"]["owner_id"],
GroupID=ais_item["permissions"]["group_id"],
BaseMask=ais_item["permissions"]["base_mask"],
OwnerMask=ais_item["permissions"]["owner_mask"],
GroupMask=ais_item["permissions"]["group_mask"],
EveryoneMask=ais_item["permissions"]["everyone_mask"],
NextOwnerMask=ais_item["permissions"]["next_owner_mask"],
GroupOwned=0,
AssetID=ais_item["asset_id"],
Type=ais_item["type"],
InvType=ais_item["inv_type"],
Flags=ais_item["flags"],
SaleType=ais_item["sale_info"]["sale_type"],
SalePrice=ais_item["sale_info"]["sale_price"],
Name=ais_item["name"],
Description=ais_item["desc"],
CreationDate=ais_item["created_at"],
# Meaningless here
CRC=secrets.randbits(32),
)
def inventory_data_to_ais_item(inventory_data: Block) -> dict:
return dict(
item_id=inventory_data["ItemID"],
parent_id=inventory_data["ParentID"],
permissions=dict(
creator_id=inventory_data["CreatorID"],
owner_id=inventory_data["OwnerID"],
group_id=inventory_data["GroupID"],
base_mask=inventory_data["BaseMask"],
owner_mask=inventory_data["OwnerMask"],
group_mask=inventory_data["GroupMask"],
everyone_mask=inventory_data["EveryoneMask"],
next_owner_mask=inventory_data["NextOwnerMask"],
),
asset_id=inventory_data["AssetID"],
type=inventory_data["Type"],
inv_type=inventory_data["InvType"],
flags=inventory_data["Flags"],
sale_info=dict(
sale_type=inventory_data["SaleType"],
sale_price=inventory_data["SalePrice"],
),
name=inventory_data["Name"],
description=inventory_data["Description"],
creation_at=inventory_data["CreationDate"],
)
def ais_folder_to_inventory_data(ais_folder: dict) -> Block:
return Block(
"FolderData",
FolderID=ais_folder["cat_id"],
ParentID=ais_folder["parent_id"],
CallbackID=0,
Type=ais_folder["preferred_type"],
Name=ais_folder["name"],
)
def inventory_data_to_ais_folder(inventory_data: Block) -> dict:
return dict(
cat_id=inventory_data["FolderID"],
parent_id=inventory_data["ParentID"],
preferred_type=inventory_data["Type"],
name=inventory_data["Name"],
)

View File

@@ -27,16 +27,20 @@ from hippolyzer.lib.base.objects import (
)
from hippolyzer.lib.base.settings import Settings
from hippolyzer.lib.client.namecache import NameCache, NameCacheEntry
from hippolyzer.lib.client.state import BaseClientSession, BaseClientRegion
from hippolyzer.lib.base.templates import PCode, ObjectStateSerializer
from hippolyzer.lib.base import llsd
if TYPE_CHECKING:
from hippolyzer.lib.client.state import BaseClientRegion, BaseClientSession
LOG = logging.getLogger(__name__)
OBJECT_OR_LOCAL = Union[Object, int]
MATERIAL_MAP_TYPE = Dict[UUID, dict]
class ObjectUpdateType(enum.IntEnum):
OBJECT_UPDATE = enum.auto()
UPDATE = enum.auto()
PROPERTIES = enum.auto()
FAMILY = enum.auto()
COSTS = enum.auto()
@@ -48,12 +52,13 @@ class ClientObjectManager:
Object manager for a specific region
"""
__slots__ = ("_region", "_world_objects", "state")
__slots__ = ("_region", "_world_objects", "state", "__weakref__", "_requesting_all_mats_lock")
def __init__(self, region: BaseClientRegion):
self._region: BaseClientRegion = proxify(region)
self._world_objects: ClientWorldObjectManager = proxify(region.session().objects)
self.state: RegionObjectsState = RegionObjectsState()
self._requesting_all_mats_lock = asyncio.Lock()
def __len__(self):
return len(self.state.localid_lookup)
@@ -71,7 +76,7 @@ class ClientObjectManager:
if self._region.handle is not None:
# We're tracked by the world object manager, tell it to untrack
# any objects that we owned
self._world_objects.clear_region_objects(self._region.handle)
self._world_objects.untrack_region_objects(self._region.handle)
def lookup_localid(self, localid: int) -> Optional[Object]:
return self.state.lookup_localid(localid)
@@ -161,9 +166,56 @@ class ClientObjectManager:
futures = []
for local_id in local_ids:
futures.append(self.state.register_future(local_id, ObjectUpdateType.OBJECT_UPDATE))
futures.append(self.state.register_future(local_id, ObjectUpdateType.UPDATE))
return futures
async def request_all_materials(self) -> MATERIAL_MAP_TYPE:
"""
Request all materials within the sim
Sigh, yes, this is best practice per indra :(
"""
if self._requesting_all_mats_lock.locked():
# We're already requesting all materials, wait until the lock is free
# and just return what was returned.
async with self._requesting_all_mats_lock:
return self.state.materials
async with self._requesting_all_mats_lock:
async with self._region.caps_client.get("RenderMaterials") as resp:
resp.raise_for_status()
# Clear out all previous materials, this is a complete response.
self.state.materials.clear()
self._process_materials_response(await resp.read())
return self.state.materials
async def request_materials(self, material_ids: Sequence[UUID]) -> MATERIAL_MAP_TYPE:
if self._requesting_all_mats_lock.locked():
# Just wait for the in-flight request for all materials to complete
# if we have one in flight.
async with self._requesting_all_mats_lock:
# Wait for the lock to be released
pass
not_found = set(x for x in material_ids if (x not in self.state.materials))
if not_found:
# Request any materials we don't already have, if there were any
data = {"Zipped": llsd.zip_llsd([x.bytes for x in material_ids])}
async with self._region.caps_client.post("RenderMaterials", data=data) as resp:
resp.raise_for_status()
self._process_materials_response(await resp.read())
# build up a dict of just the requested mats
mats = {}
for mat_id in material_ids:
mats[mat_id] = self.state.materials[mat_id]
return mats
def _process_materials_response(self, response: bytes):
entries = llsd.unzip_llsd(llsd.parse_xml(response)["Zipped"])
for entry in entries:
self.state.materials[UUID(bytes=entry["ID"])] = entry["Material"]
class ObjectEvent:
__slots__ = ("object", "updated", "update_type")
@@ -238,12 +290,14 @@ class ClientWorldObjectManager:
if self._get_region_manager(handle) is None:
self._region_managers[handle] = proxify(self._session.region_by_handle(handle).objects)
def clear_region_objects(self, handle: int):
def untrack_region_objects(self, handle: int):
"""Handle signal that a region object manager was just cleared"""
# Make sure they're gone from our lookup table
for obj in tuple(self._fullid_lookup.values()):
if obj.RegionHandle == handle:
del self._fullid_lookup[obj.FullID]
if handle in self._region_managers:
del self._region_managers[handle]
self._rebuild_avatar_objects()
def _get_region_manager(self, handle: int) -> Optional[ClientObjectManager]:
@@ -288,9 +342,9 @@ class ClientWorldObjectManager:
obj = obj.Parent
def clear(self):
for handle in tuple(self._region_managers.keys()):
self.untrack_region_objects(handle)
self._avatars.clear()
for region_mgr in self._region_managers.values():
region_mgr.clear()
if self._fullid_lookup:
LOG.warning(f"Had {len(self._fullid_lookup)} objects not tied to a region manager!")
self._fullid_lookup.clear()
@@ -357,7 +411,7 @@ class ClientWorldObjectManager:
if obj.PCode == PCode.AVATAR:
self._avatar_objects[obj.FullID] = obj
self._rebuild_avatar_objects()
self._run_object_update_hooks(obj, set(obj.to_dict().keys()), ObjectUpdateType.OBJECT_UPDATE, msg)
self._run_object_update_hooks(obj, set(obj.to_dict().keys()), ObjectUpdateType.UPDATE, msg)
def _kill_object_by_local_id(self, region_state: RegionObjectsState, local_id: int):
obj = region_state.lookup_localid(local_id)
@@ -409,7 +463,7 @@ class ClientWorldObjectManager:
# our view of the world then we want to move it to this region.
obj = self.lookup_fullid(object_data["FullID"])
if obj:
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE, msg)
self._update_existing_object(obj, object_data, ObjectUpdateType.UPDATE, msg)
else:
if region_state is None:
continue
@@ -433,7 +487,7 @@ class ClientWorldObjectManager:
# Need the Object as context because decoding state requires PCode.
state_deserializer = ObjectStateSerializer.deserialize
object_data["State"] = state_deserializer(ctx_obj=obj, val=object_data["State"])
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE, msg)
self._update_existing_object(obj, object_data, ObjectUpdateType.UPDATE, msg)
else:
if region_state:
region_state.missing_locals.add(object_data["LocalID"])
@@ -461,7 +515,7 @@ class ClientWorldObjectManager:
self._update_existing_object(obj, {
"UpdateFlags": update_flags,
"RegionHandle": handle,
}, ObjectUpdateType.OBJECT_UPDATE, msg)
}, ObjectUpdateType.UPDATE, msg)
continue
cached_obj_data = self._lookup_cache_entry(handle, block["ID"], block["CRC"])
@@ -500,7 +554,7 @@ class ClientWorldObjectManager:
LOG.warning(f"Got ObjectUpdateCompressed for unknown region {handle}: {object_data!r}")
obj = self.lookup_fullid(object_data["FullID"])
if obj:
self._update_existing_object(obj, object_data, ObjectUpdateType.OBJECT_UPDATE, msg)
self._update_existing_object(obj, object_data, ObjectUpdateType.UPDATE, msg)
else:
if region_state is None:
continue
@@ -650,13 +704,14 @@ class RegionObjectsState:
__slots__ = (
"handle", "missing_locals", "_orphans", "localid_lookup", "coarse_locations",
"_object_futures"
"_object_futures", "materials"
)
def __init__(self):
self.missing_locals = set()
self.localid_lookup: Dict[int, Object] = {}
self.coarse_locations: Dict[UUID, Vector3] = {}
self.materials: MATERIAL_MAP_TYPE = {}
self._object_futures: Dict[Tuple[int, int], List[asyncio.Future]] = {}
self._orphans: Dict[int, List[int]] = collections.defaultdict(list)
@@ -669,6 +724,7 @@ class RegionObjectsState:
self.coarse_locations.clear()
self.missing_locals.clear()
self.localid_lookup.clear()
self.materials.clear()
def lookup_localid(self, localid: int) -> Optional[Object]:
return self.localid_lookup.get(localid)
@@ -762,7 +818,8 @@ class RegionObjectsState:
def handle_object_reparented(self, obj: Object, old_parent_id: int):
"""Recreate any links to ancestor Objects for obj due to parent changes"""
self._unparent_object(obj, old_parent_id)
self._parent_object(obj, insert_at_head=True)
# Avatars get sent to the _end_ of the child list when reparented
self._parent_object(obj, insert_at_head=obj.PCode != PCode.AVATAR)
def collect_orphans(self, parent_localid: int) -> Sequence[int]:
"""Take ownership of any orphan IDs belonging to parent_localid"""

View File

@@ -0,0 +1,211 @@
import asyncio
import dataclasses
from typing import *
import numpy as np
from hippolyzer.lib.base.datatypes import UUID, Vector3, Vector2
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.templates import ParcelGridFlags, ParcelFlags
from hippolyzer.lib.client.state import BaseClientRegion
@dataclasses.dataclass
class Parcel:
local_id: int
name: str
flags: ParcelFlags
group_id: UUID
# TODO: More properties
class ParcelManager:
# We expect to receive this number of ParcelOverlay messages
NUM_CHUNKS = 4
# No, we don't support varregion or whatever.
REGION_SIZE = 256
# Basically, the minimum parcel size is 4 on either axis so each "point" in the
# ParcelOverlay represents an area this size
GRID_STEP = 4
GRIDS_PER_EDGE = REGION_SIZE // GRID_STEP
def __init__(self, region: BaseClientRegion):
# dimensions are south to north, west to east
self.overlay = np.zeros((self.GRIDS_PER_EDGE, self.GRIDS_PER_EDGE), dtype=np.uint8)
# 1-indexed parcel list index
self.parcel_indices = np.zeros((self.GRIDS_PER_EDGE, self.GRIDS_PER_EDGE), dtype=np.uint16)
self.parcels: List[Optional[Parcel]] = []
self.overlay_chunks: List[Optional[bytes]] = [None] * self.NUM_CHUNKS
self.overlay_complete = asyncio.Event()
self.parcels_downloaded = asyncio.Event()
self._parcels_dirty: bool = True
self._region = region
self._next_seq = 1
self._region.message_handler.subscribe("ParcelOverlay", self._handle_parcel_overlay)
def _handle_parcel_overlay(self, message: Message):
self.add_overlay_chunk(message["ParcelData"]["Data"], message["ParcelData"]["SequenceID"])
def add_overlay_chunk(self, chunk: bytes, chunk_num: int) -> bool:
self.overlay_chunks[chunk_num] = chunk
# Still have some pending chunks, don't try to parse this yet
if not all(self.overlay_chunks):
return False
new_overlay_data = b"".join(self.overlay_chunks)
self.overlay_chunks = [None] * self.NUM_CHUNKS
self._parcels_dirty = False
if new_overlay_data != self.overlay.data[:]:
# If the raw data doesn't match, then we have to parse again
self.overlay.data = new_overlay_data
self._parse_overlay()
# We could optimize this by just marking specific squares dirty
# if the parcel indices have changed between parses, but I don't care
# to do that.
self._parcels_dirty = True
self.parcels_downloaded.clear()
if not self.overlay_complete.is_set():
self.overlay_complete.set()
return True
@classmethod
def _pos_to_grid_coords(cls, pos: Vector3) -> Tuple[int, int]:
return round(pos.Y // cls.GRID_STEP), round(pos.X // cls.GRID_STEP)
def _parse_overlay(self):
# Zero out all parcel indices
self.parcel_indices[:, :] = 0
next_parcel_idx = 1
for y in range(0, self.GRIDS_PER_EDGE):
for x in range(0, self.GRIDS_PER_EDGE):
# We already have a parcel index for this grid, continue
if self.parcel_indices[y, x]:
continue
# Fill all adjacent grids with this parcel index
self._flood_fill_parcel_index(y, x, next_parcel_idx)
# SL doesn't allow disjoint grids to be part of the same parcel, so
# whatever grid we find next without a parcel index must be a new parcel
next_parcel_idx += 1
# Should have found at least one parcel
assert next_parcel_idx >= 2
# Have a different number of parcels now, we can't use the existing parcel objects
# because it's unlikely that just parcel boundaries have changed.
if len(self.parcels) != next_parcel_idx - 1:
# We don't know about any of these parcels yet, fill with none
self.parcels = [None] * (next_parcel_idx - 1)
def _flood_fill_parcel_index(self, start_y, start_x, parcel_idx):
"""Flood fill all neighboring grids with the parcel index, being mindful of parcel boundaries"""
# We know the start grid is assigned to this parcel index
self.parcel_indices[start_y, start_x] = parcel_idx
# Queue of grids to test the neighbors of, start with the start grid.
neighbor_test_queue: List[Tuple[int, int]] = [(start_y, start_x)]
while neighbor_test_queue:
to_test = neighbor_test_queue.pop(0)
test_grid = self.overlay[to_test]
for direction in ((-1, 0), (1, 0), (0, -1), (0, 1)):
new_pos = to_test[0] + direction[0], to_test[1] + direction[1]
if any(x < 0 or x >= self.GRIDS_PER_EDGE for x in new_pos):
# Outside bounds
continue
if self.parcel_indices[new_pos]:
# Already set, skip
continue
if direction[0] == -1 and test_grid & ParcelGridFlags.SOUTH_LINE:
# Test grid is already on a south line, can't go south.
continue
if direction[1] == -1 and test_grid & ParcelGridFlags.WEST_LINE:
# Test grid is already on a west line, can't go west.
continue
grid = self.overlay[new_pos]
if direction[0] == 1 and grid & ParcelGridFlags.SOUTH_LINE:
# Hit a south line going north, this is outside the current parcel
continue
if direction[1] == 1 and grid & ParcelGridFlags.WEST_LINE:
# Hit a west line going east, this is outside the current parcel
continue
# This grid is within the current parcel, set the parcel index
self.parcel_indices[new_pos] = parcel_idx
# Append the grid to the neighbour testing queue
neighbor_test_queue.append(new_pos)
async def request_dirty_parcels(self) -> Tuple[Parcel, ...]:
if self._parcels_dirty:
return await self.request_all_parcels()
return tuple(self.parcels)
async def request_all_parcels(self) -> Tuple[Parcel, ...]:
await self.overlay_complete.wait()
# Because of how we build up the parcel index map, it's safe for us to
# do this instead of keeping track of seen IDs in a set or similar
last_seen_parcel_index = 0
futs = []
for y in range(0, self.GRIDS_PER_EDGE):
for x in range(0, self.GRIDS_PER_EDGE):
parcel_index = self.parcel_indices[y, x]
assert parcel_index != 0
if parcel_index <= last_seen_parcel_index:
continue
assert parcel_index == last_seen_parcel_index + 1
last_seen_parcel_index = parcel_index
# Request a position within the parcel
futs.append(self.request_parcel_properties(
Vector2(x * self.GRID_STEP + 1.0, y * self.GRID_STEP + 1.0)
))
# Wait for all parcel properties to come in
await asyncio.gather(*futs)
self.parcels_downloaded.set()
self._parcels_dirty = False
return tuple(self.parcels)
async def request_parcel_properties(self, pos: Vector2) -> Parcel:
await self.overlay_complete.wait()
seq_id = self._next_seq
# Register a wait on a ParcelProperties matching this seq
parcel_props_fut = self._region.message_handler.wait_for(
("ParcelProperties",),
predicate=lambda msg: msg["ParcelData"]["SequenceID"] == seq_id,
timeout=10.0,
)
# We don't care about when we receive an ack, we only care about when we receive the parcel props
_ = self._region.circuit.send_reliable(Message(
"ParcelPropertiesRequest",
Block("AgentData", AgentID=self._region.session().agent_id, SessionID=self._region.session().id),
Block(
"ParcelData",
SequenceID=seq_id,
West=pos.X,
East=pos.X,
North=pos.Y,
South=pos.Y,
# What does this even mean?
SnapSelection=0,
),
))
self._next_seq += 1
parcel_props = await parcel_props_fut
data_block = parcel_props["ParcelData"][0]
# Parcel indices are one-indexed, convert to zero-indexed.
parcel_idx = self.parcel_indices[self._pos_to_grid_coords(pos)] - 1
assert len(self.parcels) > parcel_idx
self.parcels[parcel_idx] = parcel = Parcel(
local_id=data_block["LocalID"],
name=data_block["Name"],
flags=ParcelFlags(data_block["ParcelFlags"]),
group_id=data_block["GroupID"],
# Parcel UUID isn't in this response :/
)
return parcel

View File

@@ -0,0 +1,51 @@
from typing import NamedTuple, List, Sequence
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.templates import ChatType
class RLVCommand(NamedTuple):
behaviour: str
param: str
options: List[str]
class RLVParser:
@staticmethod
def is_rlv_message(msg: Message) -> bool:
chat: str = msg["ChatData"]["Message"]
chat_type: int = msg["ChatData"]["ChatType"]
return chat and chat.startswith("@") and chat_type == ChatType.OWNER
@staticmethod
def parse_chat(chat: str) -> List[RLVCommand]:
assert chat.startswith("@")
chat = chat.lstrip("@")
commands = []
for command_str in chat.split(","):
if not command_str:
continue
# RLV-style command, `<cmd>(:<option1>;<option2>)?(=<param>)?`
# Roughly (?<behaviour>[^:=]+)(:(?<option>[^=]*))?=(?<param>\w+)
options, _, param = command_str.partition("=")
behaviour, _, options = options.partition(":")
# TODO: Not always correct, commands can specify their own parsing for the option field
# maybe special-case these?
options = options.split(";") if options else []
commands.append(RLVCommand(behaviour, param, options))
return commands
@staticmethod
def format_chat(commands: Sequence[RLVCommand]) -> str:
assert commands
chat = ""
for command in commands:
if chat:
chat += ","
chat += command.behaviour
if command.options:
chat += ":" + ";".join(command.options)
if command.param:
chat += "=" + command.param
return "@" + chat

View File

@@ -4,17 +4,21 @@ Base classes for common session-related state shared between clients and proxies
from __future__ import annotations
import abc
import logging
import weakref
from typing import *
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.circuit import ConnectionHolder
import multidict
from hippolyzer.lib.base.datatypes import UUID, Vector3
from hippolyzer.lib.base.message.circuit import ConnectionHolder, Circuit
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.network.caps_client import CapsClient
from hippolyzer.lib.base.network.transport import ADDR_TUPLE
from hippolyzer.lib.base.objects import handle_to_global_pos
if TYPE_CHECKING:
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
class BaseClientRegion(ConnectionHolder, abc.ABC):
@@ -24,6 +28,53 @@ class BaseClientRegion(ConnectionHolder, abc.ABC):
session: Callable[[], BaseClientSession]
objects: ClientObjectManager
caps_client: CapsClient
cap_urls: multidict.MultiDict[str]
circuit_addr: ADDR_TUPLE
circuit: Optional[Circuit]
_name: Optional[str]
def __init__(self):
self._name = None
self.circuit = None
@abc.abstractmethod
def update_caps(self, caps: Mapping[str, str]) -> None:
pass
@property
def name(self):
if self._name:
return self._name
return "Pending %r" % (self.circuit_addr,)
@name.setter
def name(self, val):
self._name = val
@property
def global_pos(self) -> Vector3:
if self.handle is None:
raise ValueError("Can't determine global region position without handle")
return handle_to_global_pos(self.handle)
@property
def is_alive(self):
if not self.circuit:
return False
return self.circuit.is_alive
def mark_dead(self):
logging.info("Marking %r dead" % self)
if self.circuit:
self.circuit.is_alive = False
self.objects.clear()
def __repr__(self):
return "<%s %s (%r)>" % (self.__class__.__name__, self.name, self.handle)
class BaseClientSessionManager:
pass
class BaseClientSession(abc.ABC):
@@ -31,9 +82,104 @@ class BaseClientSession(abc.ABC):
id: UUID
agent_id: UUID
secure_session_id: UUID
active_group: UUID
groups: Set[UUID]
message_handler: MessageHandler[Message, str]
regions: Sequence[BaseClientRegion]
regions: MutableSequence[BaseClientRegion]
region_by_handle: Callable[[int], Optional[BaseClientRegion]]
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[BaseClientRegion]]
objects: ClientWorldObjectManager
login_data: Dict[str, Any]
REGION_CLS = Type[BaseClientRegion]
def __init__(self, id, secure_session_id, agent_id, circuit_code,
session_manager: Optional[BaseClientSessionManager], login_data=None):
self.login_data = login_data or {}
self.pending = True
self.id: UUID = id
self.secure_session_id: UUID = secure_session_id
self.agent_id: UUID = agent_id
self.circuit_code = circuit_code
self.global_caps = {}
self.session_manager = session_manager
self.active_group: UUID = UUID.ZERO
self.groups: Set[UUID] = set()
self.regions = []
self._main_region = None
self.message_handler: MessageHandler[Message, str] = MessageHandler()
super().__init__()
@classmethod
def from_login_data(cls, login_data, session_manager):
sess = cls(
id=UUID(login_data["session_id"]),
secure_session_id=UUID(login_data["secure_session_id"]),
agent_id=UUID(login_data["agent_id"]),
circuit_code=int(login_data["circuit_code"]),
session_manager=session_manager,
login_data=login_data,
)
appearance_service = login_data.get("agent_appearance_service")
map_image_service = login_data.get("map-server-url")
if appearance_service:
sess.global_caps["AppearanceService"] = appearance_service
if map_image_service:
sess.global_caps["MapImageService"] = map_image_service
# Login data also has details about the initial sim
sess.register_region(
circuit_addr=(login_data["sim_ip"], login_data["sim_port"]),
handle=(login_data["region_x"] << 32) | login_data["region_y"],
seed_url=login_data["seed_capability"],
)
return sess
def register_region(self, circuit_addr: Optional[ADDR_TUPLE] = None, seed_url: Optional[str] = None,
handle: Optional[int] = None) -> BaseClientRegion:
if not any((circuit_addr, seed_url)):
raise ValueError("One of circuit_addr and seed_url must be defined!")
for region in self.regions:
if region.circuit_addr == circuit_addr:
if seed_url and region.cap_urls.get("Seed") != seed_url:
region.update_caps({"Seed": seed_url})
if handle:
region.handle = handle
return region
if seed_url and region.cap_urls.get("Seed") == seed_url:
return region
if not circuit_addr:
raise ValueError("Can't create region without circuit addr!")
logging.info("Registering region for %r" % (circuit_addr,))
region = self.REGION_CLS(circuit_addr, seed_url, self, handle=handle)
self.regions.append(region)
return region
@property
def main_region(self) -> Optional[BaseClientRegion]:
if self._main_region and self._main_region() in self.regions:
return self._main_region()
return None
@main_region.setter
def main_region(self, val: BaseClientRegion):
self._main_region = weakref.ref(val)
def transaction_to_assetid(self, transaction_id: UUID):
return UUID.combine(transaction_id, self.secure_session_id)
def region_by_circuit_addr(self, circuit_addr) -> Optional[BaseClientRegion]:
for region in self.regions:
if region.circuit_addr == circuit_addr and region.circuit:
return region
return None
def region_by_handle(self, handle: int) -> Optional[BaseClientRegion]:
for region in self.regions:
if region.handle == handle:
return region
return None
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.id)

View File

@@ -188,7 +188,7 @@ class BaseAddon(metaclass=MetaBaseAddon):
pass
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
cmd: str, options: List[str], param: str):
behaviour: str, options: List[str], param: str):
pass
def handle_proxied_packet(self, session_manager: SessionManager, packet: UDPPacket,

View File

@@ -21,6 +21,7 @@ from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.helpers import get_mtime
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.network.transport import UDPPacket
from hippolyzer.lib.client.rlv import RLVParser
from hippolyzer.lib.proxy import addon_ctx
from hippolyzer.lib.proxy.task_scheduler import TaskLifeScope, TaskScheduler
@@ -174,7 +175,10 @@ class AddonManager:
def load_addon_from_path(cls, path, reload=False, raise_exceptions=True):
path = pathlib.Path(path).absolute()
mod_name = "hippolyzer.user_addon_%s" % path.stem
cls.BASE_ADDON_SPECS.append(importlib.util.spec_from_file_location(mod_name, path))
spec = importlib.util.spec_from_file_location(mod_name, path)
if not spec:
raise ValueError(f"Unable to load {path}")
cls.BASE_ADDON_SPECS.append(spec)
addon_dir = os.path.realpath(pathlib.Path(path).parent.absolute())
if addon_dir not in sys.path:
@@ -345,7 +349,7 @@ class AddonManager:
cls.SCHEDULER.kill_matching_tasks(lifetime_mask=TaskLifeScope.ADDON, creator=addon)
@classmethod
def _call_all_addon_hooks(cls, hook_name, *args, call_async=False, **kwargs):
def _call_all_addon_hooks(cls, hook_name, *args, call_async=False, **kwargs) -> Optional[bool]:
for module in cls.FRESH_ADDON_MODULES.values():
if not module:
continue
@@ -388,7 +392,7 @@ class AddonManager:
return cls._try_call_hook(module, hook_name, *args, call_async=call_async, **kwargs)
@classmethod
def _try_call_hook(cls, addon, hook_name, *args, call_async=False, **kwargs):
def _try_call_hook(cls, addon, hook_name, *args, call_async=False, **kwargs) -> Optional[bool]:
if cls._SUBPROCESS:
return
@@ -449,32 +453,30 @@ class AddonManager:
raise
return True
if message.name == "ChatFromSimulator" and "ChatData" in message:
chat: str = message["ChatData"]["Message"]
chat_type: int = message["ChatData"]["ChatType"]
# RLV-style OwnerSay?
if chat and chat.startswith("@") and chat_type == 8:
if RLVParser.is_rlv_message(message):
# RLV allows putting multiple commands into one message, blindly splitting on ",".
chat = chat.lstrip("@")
all_cmds_handled = True
for command_str in chat.split(","):
if not command_str:
continue
# RLV-style command, `@<cmd>(:<option1>;<option2>)?(=<param>)?`
options, _, param = command_str.partition("=")
cmd, _, options = options.partition(":")
# TODO: Not always correct, commands can specify their own parsing for the option field
options = options.split(";") if options else []
source = message["ChatData"]["SourceID"]
chat: str = message["ChatData"]["Message"]
source = message["ChatData"]["SourceID"]
for command in RLVParser.parse_chat(chat):
try:
with addon_ctx.push(session, region):
handled = cls._call_all_addon_hooks("handle_rlv_command",
session, region, source, cmd, options, param)
handled = cls._call_all_addon_hooks(
"handle_rlv_command",
session,
region,
source,
command.behaviour,
command.options,
command.param,
)
if handled:
region.circuit.drop_message(message)
else:
all_cmds_handled = False
except:
LOG.exception(f"Failed while handling command {command_str!r}")
LOG.exception(f"Failed while handling command {command!r}")
all_cmds_handled = False
if not cls._SWALLOW_ADDON_EXCEPTIONS:
raise

View File

@@ -1,8 +1,8 @@
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.inventory import InventoryItem
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.network.transport import Direction
from hippolyzer.lib.client.asset_uploader import AssetUploader
from hippolyzer.lib.client.inventory_manager import ais_item_to_inventory_data
class ProxyAssetUploader(AssetUploader):
@@ -22,7 +22,7 @@ class ProxyAssetUploader(AssetUploader):
]
}
async with self._region.caps_client.post('FetchInventory2', llsd=ais_req_data) as resp:
ais_item = (await resp.read_llsd())["items"][0]
ais_item = InventoryItem.from_llsd((await resp.read_llsd())["items"][0], flavor="ais")
# Got it, ship it off to the viewer
message = Message(
@@ -33,7 +33,7 @@ class ProxyAssetUploader(AssetUploader):
SimApproved=1,
TransactionID=UUID.random(),
),
ais_item_to_inventory_data(ais_item),
ais_item.to_inventory_data(),
direction=Direction.IN
)
self._region.circuit.send(message)

View File

@@ -224,6 +224,11 @@ class MITMProxyEventManager:
status = flow.response.status_code
cap_data: Optional[CapData] = flow.metadata["cap_data"]
if not cap_data:
# Make sure there's always cap data attached to the flow, even if it's
# empty. Some consumers expect it to always be there, when it might not
# be if the proxy barfed while handling the request.
cap_data = flow.metadata["cap_data"] = CapData()
if status == 200 and cap_data and cap_data.cap_name == "FirestormBridge":
# Fake FirestormBridge cap based on a bridge-like response coming from

View File

@@ -236,7 +236,7 @@ class SLMITMMaster(mitmproxy.master.Master):
)
def create_proxy_master(host, port, flow_context: HTTPFlowContext): # pragma: no cover
def create_http_proxy(host, port, flow_context: HTTPFlowContext, ssl_insecure=False): # pragma: no cover
opts = mitmproxy.options.Options()
master = SLMITMMaster(flow_context, opts)
@@ -251,10 +251,6 @@ def create_proxy_master(host, port, flow_context: HTTPFlowContext): # pragma: n
ssl_verify_upstream_trusted_ca=ca_bundle,
listen_host=host,
listen_port=port,
ssl_insecure=ssl_insecure,
)
return master
def create_http_proxy(bind_host, port, flow_context: HTTPFlowContext): # pragma: no cover
master = create_proxy_master(bind_host, port, flow_context)
return master

View File

@@ -1,3 +1,4 @@
import asyncio
import datetime as dt
from hippolyzer.lib.base.helpers import get_mtime
@@ -11,6 +12,8 @@ class ProxyInventoryManager(InventoryManager):
super().__init__(session)
newest_cache = None
newest_timestamp = dt.datetime(year=1970, month=1, day=1, tzinfo=dt.timezone.utc)
# So consumers know when the inventory should be complete
self.cache_loaded: asyncio.Event = asyncio.Event()
# Look for the newest version of the cached inventory and use that.
# Not foolproof, but close enough if we're not sure what viewer is being used.
for cache_dir in iter_viewer_cache_dirs():
@@ -25,4 +28,8 @@ class ProxyInventoryManager(InventoryManager):
newest_cache = inv_cache_path
if newest_cache:
self.load_cache(newest_cache)
cache_load_fut = asyncio.ensure_future(asyncio.to_thread(self.load_cache, newest_cache))
# Meh. Don't care if it fails.
cache_load_fut.add_done_callback(lambda *args: self.cache_loaded.set())
else:
self.cache_loaded.set()

View File

@@ -161,6 +161,8 @@ class InterceptingLLUDPProxyProtocol(UDPProxyProtocol):
region.mark_dead()
elif message.name == "RegionHandshake":
region.name = str(message["RegionInfo"][0]["SimName"])
elif message.name == "AgentDataUpdate" and self.session:
self.session.active_group = message["AgentData"]["ActiveGroupID"]
# Send the message if it wasn't explicitly dropped or sent before
if not message.finalized:

View File

@@ -16,10 +16,14 @@ import weakref
from defusedxml import minidom
from hippolyzer.lib.base import serialization as se, llsd
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.datatypes import TaggedUnion, UUID, TupleCoord
from hippolyzer.lib.base.helpers import bytes_escape
from hippolyzer.lib.base.message.message_formatting import HumanMessageSerializer
from hippolyzer.lib.base.message.msgtypes import PacketFlags
from hippolyzer.lib.base.message.template_dict import DEFAULT_TEMPLATE_DICT
from hippolyzer.lib.base.network.transport import Direction
from hippolyzer.lib.proxy.message_filter import MetaFieldSpecifier, compile_filter, BaseFilterNode, MessageFilterNode, \
EnumFieldSpecifier, MatchResult
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
@@ -614,6 +618,19 @@ class EQMessageLogEntry(AbstractMessageLogEntry):
return "EQ"
def request(self, beautify=False, replacements=None):
# TODO: This is a bit of a hack! Templated messages can be sent over the EQ, so let's
# display them as template messages if that's what they are.
if self.event['message'] in DEFAULT_TEMPLATE_DICT.message_templates:
msg = LLSDMessageSerializer().deserialize(self.event)
msg.synthetic = True
msg.send_flags = PacketFlags.EQ
msg.direction = Direction.IN
# Annoyingly, templated messages sent over the EQ can have extra fields not specified
# in the template, and this is often the case. ParcelProperties has fields that aren't
# in the template. Luckily, we don't really care about extra fields, we just may not
# be able to automatically decode U32 and friends without the hint from the template
# that that is what they are.
return HumanMessageSerializer.to_human_string(msg, replacements, beautify)
return f'EQ {self.event["message"]}\n\n{self._format_llsd(self.event["body"])}'
@property

View File

@@ -1,6 +1,5 @@
from __future__ import annotations
import logging
import hashlib
import uuid
import weakref
@@ -9,12 +8,11 @@ import urllib.parse
import multidict
from hippolyzer.lib.base.datatypes import Vector3, UUID
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.helpers import proxify
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.objects import handle_to_global_pos
from hippolyzer.lib.client.state import BaseClientRegion
from hippolyzer.lib.proxy.caps_client import ProxyCapsClient
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
@@ -44,14 +42,15 @@ class CapsMultiDict(multidict.MultiDict[Tuple[CapType, str]]):
class ProxiedRegion(BaseClientRegion):
circuit: Optional[ProxiedCircuit]
def __init__(self, circuit_addr, seed_cap: str, session: Session, handle=None):
super().__init__()
# A client may make a Seed request twice, and may get back two (valid!) sets of
# Cap URIs. We need to be able to look up both, so MultiDict is necessary.
self.handle: Optional[int] = handle
self._name: Optional[str] = None
# TODO: when does this change?
self.cache_id: Optional[UUID] = None
self.circuit: Optional[ProxiedCircuit] = None
self.circuit_addr = circuit_addr
self.caps = CapsMultiDict()
# Reverse lookup for URL -> cap data
@@ -71,31 +70,9 @@ class ProxiedRegion(BaseClientRegion):
self._recalc_caps()
@property
def name(self):
if self._name:
return self._name
return "Pending %r" % (self.circuit_addr,)
@name.setter
def name(self, val):
self._name = val
@property
def cap_urls(self) -> multidict.MultiDict[str, str]:
def cap_urls(self) -> multidict.MultiDict[str]:
return multidict.MultiDict((x, y[1]) for x, y in self.caps.items())
@property
def global_pos(self) -> Vector3:
if self.handle is None:
raise ValueError("Can't determine global region position without handle")
return handle_to_global_pos(self.handle)
@property
def is_alive(self):
if not self.circuit:
return False
return self.circuit.is_alive
def update_caps(self, caps: Mapping[str, str]):
for cap_name, cap_url in caps.items():
if isinstance(cap_url, str) and cap_url.startswith('http'):
@@ -158,15 +135,9 @@ class ProxiedRegion(BaseClientRegion):
return None
def mark_dead(self):
logging.info("Marking %r dead" % self)
if self.circuit:
self.circuit.is_alive = False
self.objects.clear()
super().mark_dead()
self.eq_manager.clear()
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.name)
class EventQueueManager:
def __init__(self, region: ProxiedRegion):

View File

@@ -6,7 +6,6 @@ import datetime
import functools
import logging
import multiprocessing
import weakref
from typing import *
from weakref import ref
@@ -14,9 +13,9 @@ from outleap import LEAPClient
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.helpers import proxify
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.client.state import BaseClientSession
from hippolyzer.lib.base.network.transport import ADDR_TUPLE
from hippolyzer.lib.client.state import BaseClientSession, BaseClientSessionManager
from hippolyzer.lib.proxy.addons import AddonManager
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
@@ -34,30 +33,34 @@ if TYPE_CHECKING:
class Session(BaseClientSession):
def __init__(self, session_id, secure_session_id, agent_id, circuit_code,
regions: MutableSequence[ProxiedRegion]
region_by_handle: Callable[[int], Optional[ProxiedRegion]]
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[ProxiedRegion]]
main_region: Optional[ProxiedRegion]
REGION_CLS = ProxiedRegion
def __init__(self, id, secure_session_id, agent_id, circuit_code,
session_manager: Optional[SessionManager], login_data=None):
self.login_data = login_data or {}
self.pending = True
self.id: UUID = session_id
self.secure_session_id: UUID = secure_session_id
self.agent_id: UUID = agent_id
self.circuit_code = circuit_code
self.global_caps = {}
super().__init__(
id=id,
secure_session_id=secure_session_id,
agent_id=agent_id,
circuit_code=circuit_code,
session_manager=session_manager,
login_data=login_data,
)
# Bag of arbitrary data addons can use to persist data across addon reloads
# Each addon name gets its own separate dict within this dict
self.addon_ctx: Dict[str, Dict[str, Any]] = collections.defaultdict(dict)
self.session_manager: SessionManager = session_manager or None
self.session_manager: SessionManager = session_manager
self.selected: SelectionModel = SelectionModel()
self.regions: List[ProxiedRegion] = []
self.started_at = datetime.datetime.now()
self.message_handler: MessageHandler[Message, str] = MessageHandler()
self.http_message_handler: MessageHandler[HippoHTTPFlow, str] = MessageHandler()
self.objects = ProxyWorldObjectManager(self, session_manager.settings, session_manager.name_cache)
self.inventory = ProxyInventoryManager(proxify(self))
self.leap_client: Optional[LEAPClient] = None
# Base path of a newview type cache directory for this session
self.cache_dir: Optional[str] = None
self._main_region = None
@property
def global_addon_ctx(self):
@@ -65,77 +68,13 @@ class Session(BaseClientSession):
return {}
return self.session_manager.addon_ctx
@classmethod
def from_login_data(cls, login_data, session_manager):
sess = Session(
session_id=UUID(login_data["session_id"]),
secure_session_id=UUID(login_data["secure_session_id"]),
agent_id=UUID(login_data["agent_id"]),
circuit_code=int(login_data["circuit_code"]),
session_manager=session_manager,
login_data=login_data,
)
appearance_service = login_data.get("agent_appearance_service")
map_image_service = login_data.get("map-server-url")
if appearance_service:
sess.global_caps["AppearanceService"] = appearance_service
if map_image_service:
sess.global_caps["MapImageService"] = map_image_service
# Login data also has details about the initial sim
sess.register_region(
circuit_addr=(login_data["sim_ip"], login_data["sim_port"]),
handle=(login_data["region_x"] << 32) | login_data["region_y"],
seed_url=login_data["seed_capability"],
)
return sess
@property
def main_region(self) -> Optional[ProxiedRegion]:
if self._main_region and self._main_region() in self.regions:
return self._main_region()
return None
@main_region.setter
def main_region(self, val: ProxiedRegion):
self._main_region = weakref.ref(val)
def register_region(self, circuit_addr: Optional[Tuple[str, int]] = None,
def register_region(self, circuit_addr: Optional[ADDR_TUPLE] = None,
seed_url: Optional[str] = None,
handle: Optional[int] = None) -> ProxiedRegion:
if not any((circuit_addr, seed_url)):
raise ValueError("One of circuit_addr and seed_url must be defined!")
for region in self.regions:
if region.circuit_addr == circuit_addr:
if seed_url and region.cap_urls.get("Seed") != seed_url:
region.update_caps({"Seed": seed_url})
if handle:
region.handle = handle
return region
if seed_url and region.cap_urls.get("Seed") == seed_url:
return region
if not circuit_addr:
raise ValueError("Can't create region without circuit addr!")
logging.info("Registering region for %r" % (circuit_addr,))
region = ProxiedRegion(circuit_addr, seed_url, self, handle=handle)
self.regions.append(region)
region: ProxiedRegion = super().register_region(circuit_addr, seed_url, handle) # type: ignore
AddonManager.handle_region_registered(self, region)
return region
def region_by_circuit_addr(self, circuit_addr) -> Optional[ProxiedRegion]:
for region in self.regions:
if region.circuit_addr == circuit_addr and region.circuit:
return region
return None
def region_by_handle(self, handle: int) -> Optional[ProxiedRegion]:
for region in self.regions:
if region.handle == handle:
return region
return None
def open_circuit(self, near_addr, circuit_addr, transport):
for region in self.regions:
if region.circuit_addr == circuit_addr:
@@ -175,15 +114,10 @@ class Session(BaseClientSession):
return CapData(cap_name, ref(region), ref(self), base_url, cap_type)
return None
def transaction_to_assetid(self, transaction_id: UUID):
return UUID.combine(transaction_id, self.secure_session_id)
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.id)
class SessionManager:
class SessionManager(BaseClientSessionManager):
def __init__(self, settings: ProxySettings):
BaseClientSessionManager.__init__(self)
self.settings: ProxySettings = settings
self.sessions: List[Session] = []
self.shutdown_signal = multiprocessing.Event()

View File

@@ -35,3 +35,4 @@ class ProxySettings(Settings):
AUTOMATICALLY_REQUEST_MISSING_OBJECTS: bool = SettingDescriptor(False)
ADDON_SCRIPTS: List[str] = SettingDescriptor(list)
FILTERS: Dict[str, str] = SettingDescriptor(dict)
SSL_INSECURE: bool = SettingDescriptor(False)

View File

@@ -83,7 +83,7 @@ class SOCKS5Server:
try:
# UDP Associate
if cmd == 3:
loop = asyncio.get_running_loop()
loop = asyncio.get_event_loop_policy().get_event_loop()
transport, protocol = await loop.create_datagram_endpoint(
self._udp_protocol_creator(writer.get_extra_info("peername")),
local_addr=('0.0.0.0', 0))

View File

@@ -65,7 +65,7 @@ class TaskScheduler:
task.cancel()
try:
event_loop = asyncio.get_running_loop()
event_loop = asyncio.get_event_loop_policy().get_event_loop()
await_all = asyncio.gather(*(task for task_data, task in self.tasks))
event_loop.run_until_complete(await_all)
except RuntimeError:

View File

@@ -1,11 +1,11 @@
import asyncio
import unittest
from typing import Any, Optional, List, Tuple
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer
from hippolyzer.lib.base.network.transport import UDPPacket, AbstractUDPTransport, ADDR_TUPLE
from hippolyzer.lib.base.network.transport import UDPPacket
from hippolyzer.lib.base.test_utils import MockTransport
from hippolyzer.lib.proxy.lludp_proxy import InterceptingLLUDPProxyProtocol
from hippolyzer.lib.proxy.region import ProxiedRegion
from hippolyzer.lib.proxy.sessions import SessionManager
@@ -63,21 +63,3 @@ class BaseProxyTest(unittest.IsolatedAsyncioTestCase):
def _msg_to_datagram(self, msg: Message, src, dst, socks_header=True):
packet = self._msg_to_packet(msg, src, dst)
return SOCKS5UDPTransport.serialize(packet, force_socks_header=socks_header)
class MockTransport(AbstractUDPTransport):
def sendto(self, data: Any, addr: Optional[ADDR_TUPLE] = ...) -> None:
pass
def abort(self) -> None:
pass
def close(self) -> None:
pass
def __init__(self):
super().__init__()
self.packets: List[Tuple[bytes, Tuple[str, int]]] = []
def send_packet(self, packet: UDPPacket) -> None:
self.packets.append((packet.data, packet.dst_addr))

View File

View File

@@ -0,0 +1,484 @@
from __future__ import annotations
import asyncio
import base64
import json
import logging
import random
import subprocess
import tempfile
import urllib.parse
import uuid
from typing import Optional, Union, Any, Dict
from hippolyzer.lib.base.datatypes import Vector3
from hippolyzer.lib.base.events import Event
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.objects import handle_to_gridxy
from .connection import VivoxConnection, VivoxMessage
LOG = logging.getLogger(__name__)
RESP_LOG = logging.getLogger(__name__ + ".responses")
def launch_slvoice(voice_path, args, env=None):
return subprocess.Popen([voice_path] + args, env=env)
def uuid_to_vivox(val):
return (b"x" + base64.b64encode(uuid.UUID(val).bytes, b"-_")).decode("utf8")
def uuid_to_vivox_uri(val):
return "sip:%s@bhr.vivox.com" % uuid_to_vivox(val)
def vivox_to_uuid(val):
# Pull the base64-encoded UUID out of the URI
val = val.split(":")[-1].split("@")[0][1:]
return str(uuid.UUID(bytes=base64.b64decode(val, b"-_")))
class VoiceClient:
SERVER_URL = "https://www.bhr.vivox.com/api2/"
def __init__(self, host: str, port: int):
self._host = host
self._port = port
self.logged_in = asyncio.Event()
self.ready = asyncio.Event()
self.session_ready = asyncio.Event()
self.session_added = Event()
self.channel_info_updated = Event()
self.participant_added = Event()
self.participant_updated = Event()
self.participant_removed = Event()
self.capture_devices_received = Event()
self.render_devices_received = Event()
self.render_devices = {}
self.capture_devices = {}
self._pending_req_futures: dict[str, asyncio.Future] = {}
self._connector_handle: Optional[str] = None
self._session_handle: Optional[str] = None
self._session_group_handle: Optional[str] = None
self._account_handle: Optional[str] = None
self._account_uri: Optional[str] = None
self._username: Optional[str] = None
self._password: Optional[str] = None
self._display_name: Optional[str] = None
self._uri: Optional[str] = None
self._participants: Dict[str, dict] = {}
self._mic_muted = False
self._region_global_x = 0
self._region_global_y = 0
self._pos = Vector3(0, 0, 0)
self.vivox_conn: Optional[VivoxConnection] = None
self._poll_task = asyncio.create_task(self._poll_messages())
self.event_handler: MessageHandler[VivoxMessage, str] = MessageHandler(take_by_default=False)
self.event_handler.subscribe(
"VoiceServiceConnectionStateChangedEvent",
self._handle_voice_service_connection_state_changed
)
self.event_handler.subscribe("AccountLoginStateChangeEvent", self._handle_account_login_state_change)
self.event_handler.subscribe("SessionAddedEvent", self._handle_session_added)
self.event_handler.subscribe("SessionRemovedEvent", self._handle_session_removed)
self.event_handler.subscribe("ParticipantAddedEvent", self._handle_participant_added)
self.event_handler.subscribe("ParticipantUpdatedEvent", self._handle_participant_updated)
self.event_handler.subscribe("ParticipantRemovedEvent", self._handle_participant_removed)
@property
def username(self):
return self._username
@property
def password(self):
return self._password
@property
def display_name(self):
return self._display_name
@property
def global_pos(self):
return self._pos
@property
def region_pos(self):
return self._global_to_region(self.global_pos)
@property
def uri(self):
return self._uri
@property
def participants(self):
# TODO: wrap in something to make immutable
return self._participants
def close(self):
if self.vivox_conn is not None:
self.vivox_conn.close()
self._poll_task.cancel()
self._poll_task = None
async def aclose(self):
if self._account_handle:
await self.logout()
self.close()
@classmethod
async def simple_init(
cls,
voice_path: str,
host: Optional[str] = None,
port: Optional[int] = None,
env: Optional[dict] = None
):
"""Simple initializer for standing up a client"""
if not host:
host = "127.0.0.1"
if not port:
port = random.randrange(40000, 60000)
str_addr = "%s:%s" % (host, port)
launch_slvoice(voice_path, ["-i", str_addr, "-m", "component"], env=env)
# HACK: wait for the process to start listening
await asyncio.sleep(0.2)
client = cls(host, port)
await client.create_vivox_connection()
await client.ready.wait()
return client
async def create_vivox_connection(self):
reader, writer = await asyncio.open_connection(host=self._host, port=self._port)
self.vivox_conn = VivoxConnection(reader, writer)
async def create_connector(self):
# TODO: Move all this extra crap out of here
devices = (await self.send_message("Aux.GetCaptureDevices.1", {}))["Results"]
self.capture_devices_received.notify(devices)
self.capture_devices.clear()
self.capture_devices.update(devices)
devices = (await self.send_message("Aux.GetRenderDevices.1", {}))["Results"]
self.render_devices_received.notify(devices)
self.render_devices.clear()
self.render_devices.update(devices)
await self.set_speakers_muted(False)
await self.set_speaker_volume(62)
await self.set_mic_muted(True)
await self.set_mic_volume(50)
connector_resp = await self.send_message("Connector.Create.1", {
"ClientName": "V2 SDK",
"AccountManagementServer": self.SERVER_URL,
"Mode": "Normal",
"MinimumPort": 30000,
"MaximumPort": 50000,
"Logging": {
"Folder": tempfile.gettempdir(),
"FileNamePrefix": "VivConnector",
"FileNameSuffix": ".log",
"LogLevel": 1
},
"Application": "",
"MaxCalls": 12,
})
self._connector_handle = connector_resp['Results']['ConnectorHandle']
self.ready.set()
async def login(self, username: Union[uuid.UUID, str], password: str):
# UUID, convert to Vivox format
if isinstance(username, uuid.UUID) or len(username) == 36:
username = uuid_to_vivox(username)
self._username = username
self._password = password
if not self._connector_handle:
raise Exception("Need a connector handle to log in")
if self._account_handle:
await self.logout()
resp = await self.send_message("Account.Login.1", {
"ConnectorHandle": self._connector_handle,
"AccountName": username,
"AccountPassword": password,
"AudioSessionAnswerMode": "VerifyAnswer",
"EnableBuddiesAndPresence": "false",
"BuddyManagementMode": "Application",
"ParticipantPropertyFrequency": 5,
})
if resp["ReturnCode"] != 0:
raise Exception(resp)
self._display_name = urllib.parse.unquote(resp["Results"]["DisplayName"])
self._account_uri = resp["Results"]["Uri"]
await self.logged_in.wait()
return resp
async def logout(self):
if self._session_handle:
await self.leave_session()
if self._account_handle:
await self.send_message("Account.Logout.1", {
"AccountHandle": self._account_handle,
})
self._account_handle = None
self._account_uri = None
self.logged_in.clear()
async def join_session(self, uri: str, region_handle: Optional[int] = None):
if self._session_handle:
await self.leave_session()
self.set_ref_region(region_handle)
self._uri = uri
await self.send_message("Session.Create.1", {
"AccountHandle": self._account_handle,
"URI": uri,
"ConnectAudio": "true",
"ConnectText": "false",
"VoiceFontID": 0,
"Name": ""
})
# wait until we're actually added
await self.session_ready.wait()
async def leave_session(self):
await self.send_message("SessionGroup.Terminate.1", {
"SessionGroupHandle": self._session_group_handle,
})
self.session_ready.clear()
# TODO: refactor into a collection
for participant in self._participants.values():
self.participant_removed.notify(participant)
self._participants.clear()
self._session_handle = None
self._session_group_handle = None
self._region_global_x = 0
self._region_global_y = 0
self._uri = None
def set_3d_pos(self, pos: Vector3, vel: Vector3 = Vector3(0, 0, 0)) -> asyncio.Future:
"""Set global 3D position, in Vivox coordinates"""
self._pos = pos
future = self.send_message("Session.Set3DPosition.1", {
"SessionHandle": self._session_handle,
"SpeakerPosition": self._build_position_dict(pos),
"ListenerPosition": self._build_position_dict(pos, vel=vel),
})
self._channel_info_updated()
return future
def set_region_3d_pos(self, pos: Vector3, vel: Vector3 = Vector3(0, 0, 0)) -> asyncio.Future:
"""Set 3D position, in region-local coordinates"""
vel = Vector3(vel[0], vel[2], -vel[1])
return self.set_3d_pos(self._region_to_global(pos), vel=vel)
def set_speakers_muted(self, val: bool):
return self.send_message("Connector.MuteLocalSpeaker.1", {
"Value": json.dumps(val),
"ConnectorHandle": self._connector_handle
})
def set_mic_muted(self, val: bool):
self._mic_muted = val
return self.send_message("Connector.MuteLocalMic.1", {
"Value": json.dumps(val),
"ConnectorHandle": self._connector_handle
})
def set_mic_volume(self, vol: int):
return self.send_message("Connector.SetLocalMicVolume.1", {
"Value": vol,
"ConnectorHandle": self._connector_handle
})
def set_speaker_volume(self, vol: int):
return self.send_message("Connector.SetLocalSpeakerVolume.1", {
"Value": vol,
"ConnectorHandle": self._connector_handle
})
def set_capture_device(self, device: str):
return self.send_message("Aux.SetCaptureDevice.1", {
"CaptureDeviceSpecifier": device,
})
def set_participant_volume(self, participant: str, vol: int):
return self.send_message("Session.SetParticipantVolumeForMe.1", {
"SessionHandle": self._session_handle,
"ParticipantURI": participant,
"Volume": vol,
})
async def get_channel_info(self, uri: str) -> dict:
return await self.send_message("Account.ChannelGetInfo.1", {
"AccountHandle": self._account_handle,
"URI": uri
})
def send_web_call(self, rel_path: str, params: dict) -> asyncio.Future[dict]:
"""Make a call to a Vivox Web API"""
return self.send_message("Account.WebCall.1", {
"AccountHandle": self._account_handle,
"RelativePath": rel_path,
"Parameters": params,
})
def send_message(self, msg_type: str, data: Any) -> asyncio.Future[dict]:
request_id = self._make_request_id()
# This is apparently what the viewer does, not clear if
# request_id has any semantic significance
if msg_type == "Session.Create.1":
request_id = data["URI"]
RESP_LOG.debug("%s %s %s %r" % ("Request", request_id, msg_type, data))
asyncio.create_task(self.vivox_conn.send_request(request_id, msg_type, data))
future = asyncio.Future()
self._pending_req_futures[request_id] = future
return future
def send_raw(self, data: bytes):
return self.vivox_conn.send_raw(data)
def set_ref_region(self, region_handle: Optional[int]):
"""Set reference position for region-local coordinates"""
if region_handle is not None:
self._region_global_x, self._region_global_y = handle_to_gridxy(region_handle)
else:
self._region_global_x, self._region_global_y = (0, 0)
self._channel_info_updated()
async def _poll_messages(self):
while not self.vivox_conn:
await asyncio.sleep(0.001)
async for msg in self.vivox_conn.read_messages():
try:
RESP_LOG.debug(repr(msg))
if msg.type == "Event":
self.event_handler.handle(msg)
elif msg.type == "Response":
# Might not have this request ID if it was sent directly via the socket
if msg.request_id in self._pending_req_futures:
self._pending_req_futures[msg.request_id].set_result(msg.data)
del self._pending_req_futures[msg.request_id]
except Exception:
LOG.exception("Error in response handler?")
async def _handle_voice_service_connection_state_changed(self, _msg: VivoxMessage):
await self.create_connector()
def _handle_account_login_state_change(self, msg: VivoxMessage):
if msg.data.get('StatusString') == "OK" and msg.data['State'] == '1':
self._account_handle = msg.data['AccountHandle']
self.logged_in.set()
else:
self.logged_in.clear()
self._account_uri = None
self._account_handle = None
def _handle_session_added(self, msg: VivoxMessage):
self._session_handle = msg.data["SessionHandle"]
self._session_group_handle = msg.data["SessionGroupHandle"]
self.session_added.notify(self._session_handle)
# We still have to wait for ourselves to be added as a participant, wait on
# that to set the session_ready event.
def _handle_session_removed(self, _msg: VivoxMessage):
self._session_handle = None
# We often don't get all the `ParticipantRemoved`s before the session dies,
# clear out the participant list.
for participant in tuple(self._participants.keys()):
self._remove_participant(participant)
self.session_ready.clear()
def _handle_participant_added(self, msg: VivoxMessage):
self._participants[msg.data["ParticipantUri"]] = msg.data
self.participant_added.notify(msg.data)
if msg.data["ParticipantUri"] == self._account_uri and not self.session_ready.is_set():
self.session_ready.set()
def _handle_participant_updated(self, msg: VivoxMessage):
participant_uri = msg.data["ParticipantUri"]
if participant_uri in self._participants:
participant = self._participants[participant_uri]
participant.update(msg.data)
self.participant_updated.notify(participant)
def _handle_participant_removed(self, msg: VivoxMessage):
self._remove_participant(msg.data["ParticipantUri"])
def _remove_participant(self, participant_uri: str):
if participant_uri in self._participants:
participant = self._participants[participant_uri]
del self._participants[participant_uri]
self.participant_removed.notify(participant)
def _global_to_region(self, pos: Vector3):
x = pos.X - self._region_global_x * 256
z = pos.Z + self._region_global_y * 256
# Vivox uses a different coordinate system than SL, Y is up!
return Vector3(x, -z, pos.Y)
def _region_to_global(self, pos: Vector3):
x = pos.X + self._region_global_x * 256
y = pos.Y + self._region_global_y * 256
return Vector3(x, pos.Z, -y)
def _build_position_dict(self, pos: Vector3, vel: Vector3 = Vector3(0, 0, 0)) -> dict:
return {
"Position": {
"X": pos.X,
"Y": pos.Y,
"Z": pos.Z,
},
"Velocity": {
"X": vel.X,
"Y": vel.Y,
"Z": vel.Z,
},
"AtOrientation": {
"X": "1.29938e-05",
"Y": 0,
"Z": -1,
},
"UpOrientation": {
"X": 0,
"Y": 1,
"Z": 0,
},
"LeftOrientation": {
"X": -1,
"Y": 0,
"Z": "-1.29938e-05",
}
}
def _channel_info_updated(self):
pos = self.global_pos
if self._region_global_x is not None:
pos = self.region_pos
self.channel_info_updated.notify(pos)
def _make_request_id(self):
return str(uuid.uuid4())

View File

@@ -0,0 +1,156 @@
# TODO: some fancy parser that parses everything into
# dicts or objects using schemas.
from __future__ import annotations
import asyncio
import weakref
from typing import Any, Optional, Coroutine, NamedTuple
import defusedxml.lxml
import lxml.etree
class VivoxMessage(NamedTuple):
type: str
name: str
request_id: Optional[str]
data: dict
def xml_to_dict(element):
return element.tag, dict(map(xml_to_dict, element)) or element.text
def buildxml(r, d, list_elem_name='i'):
if isinstance(d, dict):
for k, v in d.items():
s = lxml.etree.SubElement(r, k)
buildxml(s, v, list_elem_name)
elif isinstance(d, (list, tuple, set)):
for v in d:
if isinstance(v, lxml.etree._Element): # noqa
s = r
else:
s = lxml.etree.SubElement(r, list_elem_name)
buildxml(s, v, list_elem_name)
elif isinstance(d, str):
r.text = d
elif isinstance(d, lxml.etree._Element): # noqa
r.append(d)
elif d is None:
r.text = ""
else:
r.text = str(d)
return r
_VIVOX_NS = b' xmlns="http://www.vivox.com"' # noqa
def _remove_vivox_ns(data):
return data.replace(_VIVOX_NS, b"").strip()
def _clean_message(msg_action: str, parsed, dict_msg: dict):
# TODO: refactor this into some XML -> dict schema, some XML is ambiguous
if msg_action == "Aux.GetCaptureDevices.1":
devices = []
for device in parsed.find('Results/CaptureDevices'):
devices.append(xml_to_dict(device)[1])
dict_msg["Results"]["CaptureDevices"] = devices
if msg_action == "Account.WebCall.1":
results = dict_msg["Results"]
content_type = results.get("ContentType") or ""
if content_type.startswith("text/xml"):
xml_content = _remove_vivox_ns(results["Content"].encode("utf8"))
parsed_content = defusedxml.lxml.fromstring(xml_content)
body = parsed_content.xpath("//body")[0]
results["Content"] = body
if "ReturnCode" in dict_msg:
dict_msg["ReturnCode"] = int(dict_msg["ReturnCode"])
return dict_msg
def _build_webcall_params(params: dict) -> list:
params_list = []
elem = lxml.etree.Element('base')
for name, val in params.items():
params_list.append({"Name": name, "Value": val})
buildxml(elem, params_list, 'Parameter')
return list(elem)
class VivoxConnection:
def __init__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, owned=True):
self._reader: Optional[asyncio.StreamReader] = reader
self._writer: Optional[asyncio.StreamWriter] = writer
self._owned = owned
def close(self):
if self._owned and self._writer:
self._writer.close()
self._writer = None
self._reader = None
def __del__(self):
self.close()
async def read_messages(self):
# TODO: handle interrupted read
while self._reader and not self._reader.at_eof() and not self._writer.is_closing():
yield await self.read_message()
async def read_message(self):
msg = await self._reader.readuntil(b"\n\n\n")
return self.parse(msg[:-3])
def parse(self, raw_msg) -> VivoxMessage:
parsed_msg = defusedxml.lxml.fromstring(raw_msg.decode("utf8"))
msg_type = parsed_msg.tag
request_id = parsed_msg.attrib.get("requestId", None)
# There may be no params, just use an empty dict if that's the case
dict_msg = xml_to_dict(parsed_msg)[1] or {}
if msg_type == "Event":
msg_action = parsed_msg.attrib.get("type")
elif msg_type == "Response":
msg_action = parsed_msg.attrib.get("action")
# This is pretty useless, get rid of it because it gunks up repr()s.
if 'InputXml' in dict_msg:
del dict_msg['InputXml']
dict_msg = _clean_message(msg_action, parsed_msg, dict_msg)
elif msg_type == "Request":
msg_action = parsed_msg.attrib.get("action")
else:
raise Exception("Unknown Vivox message type %r?" % msg_type)
return VivoxMessage(msg_type, msg_action, request_id, dict_msg)
def send_raw(self, buf: bytes) -> Coroutine[Any, Any, None]:
self._writer.write(buf + b"\n\n\n")
drain_coro = self._writer.drain()
# Don't whine if this isn't awaited, we may not always want to flush immediately.
weakref.finalize(drain_coro, drain_coro.close)
return drain_coro
def send_request(self, request_id: str, action: str, data: Any) -> Coroutine[Any, Any, None]:
if action == "Account.WebCall.1":
data = dict(data)
data["Parameters"] = _build_webcall_params(data["Parameters"])
return self._send_request_response("Request", request_id, action, data)
def send_response(self, request_id: str, action: str, data: Any) -> Coroutine[Any, Any, None]:
return self._send_request_response("Response", request_id, action, data)
def _send_request_response(self, msg_type: str, request_id: str, action: str, data: Any):
elem = lxml.etree.Element(msg_type)
elem.attrib["requestId"] = request_id
elem.attrib["action"] = action
serialized = lxml.etree.tostring(buildxml(elem, data))
return self.send_raw(serialized)
def send_event(self, event_type: str, data: Any) -> Coroutine[Any, Any, None]:
elem = lxml.etree.Element("Event")
elem.attrib["type"] = event_type
serialized = lxml.etree.tostring(buildxml(elem, data))
return self.send_raw(serialized)

View File

@@ -25,7 +25,7 @@ from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
version = '0.13.2'
version = '0.14.3'
with open(path.join(here, 'README.md')) as readme_fh:
readme = readme_fh.read()
@@ -42,7 +42,6 @@ setup(
"Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
@@ -80,32 +79,35 @@ setup(
}
},
zip_safe=False,
python_requires='>=3.8',
python_requires='>=3.9',
install_requires=[
'llsd<1.1.0',
'outleap<1.0',
'defusedxml',
'aiohttp<4.0.0',
# Newer recordclasses break!
'recordclass>0.15,<0.18.3',
'lazy-object-proxy',
'arpeggio',
# requests breaks with newer idna
'idna<3,>=2.5',
# 7.x will be a major change.
'mitmproxy>=8.0.0,<8.1',
# For REPLs
'ptpython<4.0',
# JP2 codec
'Glymur<0.9.7',
'numpy<2.0',
# These could be in extras_require if you don't want a GUI.
'pyside6-essentials',
'qasync',
# Needed for mesh format conversion tooling
'pycollada',
'transformations',
'gltflib',
# JP2 codec
'Glymur<0.9.7',
'numpy<2.0',
# Proxy-specific stuff
'outleap<1.0',
'arpeggio',
# 7.x will be a major change.
'mitmproxy>=8.0.0,<8.1',
'Werkzeug<3.0',
# For REPLs
'ptpython<4.0',
# These could be in extras_require if you don't want a GUI.
'pyside6-essentials',
'qasync',
],
tests_require=[
"pytest",

51
tests/base/test_events.py Normal file
View File

@@ -0,0 +1,51 @@
import asyncio
import unittest
from unittest.mock import MagicMock
from hippolyzer.lib.base.events import Event
class TestEvents(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self):
self.event = Event()
async def test_trigger_sync(self):
mock = MagicMock(return_value=False)
self.event.subscribe(mock)
self.event.notify("foo")
mock.assert_called_with("foo")
self.assertIn(mock, [x[0] for x in self.event.subscribers])
async def test_trigger_sync_unsub(self):
mock = MagicMock(return_value=True)
self.event.subscribe(mock)
self.event.notify("foo")
mock.assert_called_with("foo")
self.assertNotIn(mock, [x[0] for x in self.event.subscribers])
async def test_trigger_async(self):
called = asyncio.Event()
mock = MagicMock()
async def _mock_wrapper(*args, **kwargs):
called.set()
mock(*args, **kwargs)
self.event.subscribe(_mock_wrapper)
self.event.notify("foo")
await called.wait()
mock.assert_called_with("foo")
self.assertIn(_mock_wrapper, [x[0] for x in self.event.subscribers])
async def test_trigger_async_unsub(self):
called = asyncio.Event()
mock = MagicMock()
async def _mock_wrapper(*args, **kwargs):
called.set()
mock(*args, **kwargs)
return True
self.event.subscribe(_mock_wrapper)
self.event.notify("foo")
await called.wait()
mock.assert_called_with("foo")
self.assertNotIn(_mock_wrapper, [x[0] for x in self.event.subscribers])

View File

@@ -2,7 +2,7 @@ import copy
import unittest
from hippolyzer.lib.base.datatypes import *
from hippolyzer.lib.base.inventory import InventoryModel
from hippolyzer.lib.base.inventory import InventoryModel, SaleType
from hippolyzer.lib.base.wearables import Wearable, VISUAL_PARAMS
SIMPLE_INV = """\tinv_object\t0
@@ -11,6 +11,8 @@ SIMPLE_INV = """\tinv_object\t0
\t\tparent_id\t00000000-0000-0000-0000-000000000000
\t\ttype\tcategory
\t\tname\tContents|
\t\tmetadata\t<llsd><undef /></llsd>
|
\t}
\tinv_item\t0
\t{
@@ -39,10 +41,23 @@ SIMPLE_INV = """\tinv_object\t0
\t}
\t\tname\tNew Script|
\t\tdesc\t2020-04-20 04:20:39 lsl2 script|
\t\tmetadata\t<llsd><map><key>experience</key><uuid>a2e76fcd-9360-4f6d-a924-000000000003</uuid></map></llsd>
|
\t\tcreation_date\t1587367239
\t}
"""
INV_CATEGORY = """\tinv_category\t0
\t{
\t\tcat_id\tf4d91477-def1-487a-b4f3-6fa201c17376
\t\tparent_id\t00000000-0000-0000-0000-000000000000
\t\ttype\tlsltext
\t\tpref_type\tlsltext
\t\tname\tScripts|
\t\towner_id\ta2e76fcd-9360-4f6d-a924-000000000003
\t}
"""
class TestLegacyInv(unittest.TestCase):
def setUp(self) -> None:
@@ -52,15 +67,27 @@ class TestLegacyInv(unittest.TestCase):
self.assertTrue(UUID('f4d91477-def1-487a-b4f3-6fa201c17376') in self.model.nodes)
self.assertIsNotNone(self.model.root)
def test_parse_category(self):
model = InventoryModel.from_str(INV_CATEGORY)
self.assertEqual(UUID('f4d91477-def1-487a-b4f3-6fa201c17376'), model.root.node_id)
def test_serialize(self):
self.model = InventoryModel.from_str(SIMPLE_INV)
new_model = InventoryModel.from_str(self.model.to_str())
self.assertEqual(self.model, new_model)
def test_serialize_category(self):
model = InventoryModel.from_str(INV_CATEGORY)
new_model = InventoryModel.from_str(model.to_str())
self.assertEqual(model, new_model)
def test_category_legacy_serialization(self):
self.assertEqual(INV_CATEGORY, InventoryModel.from_str(INV_CATEGORY).to_str())
def test_item_access(self):
item = self.model.nodes[UUID('dd163122-946b-44df-99f6-a6030e2b9597')]
self.assertEqual(item.name, "New Script")
self.assertEqual(item.sale_info.sale_type, "not")
self.assertEqual(item.sale_info.sale_type, SaleType.NOT)
self.assertDictEqual(item.metadata, {"experience": UUID("a2e76fcd-9360-4f6d-a924-000000000003")})
self.assertEqual(item.model, self.model)
def test_access_children(self):
@@ -112,6 +139,7 @@ class TestLegacyInv(unittest.TestCase):
'inv_type': 'script',
'item_id': UUID('dd163122-946b-44df-99f6-a6030e2b9597'),
'name': 'New Script',
'metadata': {"experience": UUID("a2e76fcd-9360-4f6d-a924-000000000003")},
'parent_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
'permissions': {
'base_mask': 2147483647,
@@ -123,7 +151,6 @@ class TestLegacyInv(unittest.TestCase):
'next_owner_mask': 581632,
'owner_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
'owner_mask': 2147483647,
'is_owner_group': 0,
},
'sale_info': {
'sale_price': 10,
@@ -134,12 +161,31 @@ class TestLegacyInv(unittest.TestCase):
]
)
def test_llsd_serialization_ais(self):
model = InventoryModel.from_str(INV_CATEGORY)
self.assertEqual(
[
{
'agent_id': UUID('a2e76fcd-9360-4f6d-a924-000000000003'),
'category_id': UUID('f4d91477-def1-487a-b4f3-6fa201c17376'),
'name': 'Scripts',
'parent_id': UUID('00000000-0000-0000-0000-000000000000'),
'type_default': 10,
'version': -1
}
],
model.to_llsd("ais")
)
def test_llsd_legacy_equality(self):
new_model = InventoryModel.from_llsd(self.model.to_llsd())
self.assertEqual(self.model, new_model)
new_model.root.name = "foo"
self.assertNotEqual(self.model, new_model)
def test_legacy_serialization(self):
self.assertEqual(SIMPLE_INV, self.model.to_str())
def test_difference_added(self):
new_model = InventoryModel.from_llsd(self.model.to_llsd())
diff = self.model.get_differences(new_model)

View File

@@ -89,7 +89,7 @@ class _MutableMultiDictTests:
d = create_instance()
s = pickle.dumps(d, protocol)
ud = pickle.loads(s)
assert type(ud) == type(d)
assert type(ud) is type(d)
assert ud == d
alternative = pickle.dumps(create_instance("werkzeug"), protocol)
assert pickle.loads(alternative) == d

View File

@@ -23,13 +23,7 @@ import unittest
from hippolyzer.lib.base.settings import Settings
class TestEvents(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
class TestSettings(unittest.TestCase):
def test_base_settings(self):
settings = Settings()
self.assertEqual(settings.ENABLE_DEFERRED_PACKET_PARSING, True)

View File

@@ -27,7 +27,7 @@ from hippolyzer.lib.base.message.data import msg_tmpl
from hippolyzer.lib.base.message.template import MessageTemplate, MessageTemplateBlock, MessageTemplateVariable
from hippolyzer.lib.base.message.template_dict import TemplateDictionary
from hippolyzer.lib.base.message.template_parser import MessageTemplateParser
from hippolyzer.lib.base.message.msgtypes import MsgFrequency, MsgTrust, MsgEncoding, \
from hippolyzer.lib.base.message.msgtypes import MsgFrequency, MsgEncoding, \
MsgDeprecation, MsgBlockType, MsgType
@@ -45,8 +45,8 @@ class TestDictionary(unittest.TestCase):
msg_dict = TemplateDictionary(self.template_list)
packet = msg_dict.get_template_by_name('ConfirmEnableSimulator')
assert packet is not None, "get_packet failed"
assert packet.frequency == MsgFrequency.MEDIUM_FREQUENCY_MESSAGE, "Incorrect frequency"
assert packet.msg_num == 8, "Incorrect message number for ConfirmEnableSimulator"
assert packet.frequency == MsgFrequency.MEDIUM, "Incorrect frequency"
assert packet.num == 8, "Incorrect message number for ConfirmEnableSimulator"
def test_get_packet_pair(self):
msg_dict = TemplateDictionary(self.template_list)
@@ -76,29 +76,29 @@ class TestTemplates(unittest.TestCase):
template = self.msg_dict['CompletePingCheck']
name = template.name
freq = template.frequency
num = template.msg_num
trust = template.msg_trust
enc = template.msg_encoding
num = template.num
trust = template.trusted
enc = template.encoding
assert name == 'CompletePingCheck', "Expected: CompletePingCheck Returned: " + name
assert freq == MsgFrequency.HIGH_FREQUENCY_MESSAGE, "Expected: High Returned: " + freq
assert freq == MsgFrequency.HIGH, "Expected: High Returned: " + freq
assert num == 2, "Expected: 2 Returned: " + str(num)
assert trust == MsgTrust.LL_NOTRUST, "Expected: NotTrusted Returned: " + trust
assert enc == MsgEncoding.LL_UNENCODED, "Expected: Unencoded Returned: " + enc
assert not trust, "Expected: NotTrusted Returned: " + trust
assert enc == MsgEncoding.UNENCODED, "Expected: Unencoded Returned: " + enc
def test_deprecated(self):
template = self.msg_dict['ObjectPosition']
dep = template.msg_deprecation
assert dep == MsgDeprecation.LL_DEPRECATED, "Expected: Deprecated Returned: " + str(dep)
dep = template.deprecation
assert dep == MsgDeprecation.DEPRECATED, "Expected: Deprecated Returned: " + str(dep)
def test_template_fixed(self):
template = self.msg_dict['PacketAck']
num = template.msg_num
num = template.num
assert num == 251, "Expected: 251 Returned: " + str(num)
def test_blacklisted(self):
template = self.msg_dict['TeleportFinish']
self.assertEqual(template.msg_deprecation,
MsgDeprecation.LL_UDPBLACKLISTED)
self.assertEqual(template.deprecation,
MsgDeprecation.UDPBLACKLISTED)
def test_block(self):
block = self.msg_dict['OpenCircuit'].get_block('CircuitInfo')
@@ -167,7 +167,7 @@ class TestTemplates(unittest.TestCase):
frequency_counter = {"low": 0, 'medium': 0, "high": 0, 'fixed': 0}
for template in list(self.msg_dict.message_templates.values()):
frequency_counter[template.get_frequency_as_string()] += 1
frequency_counter[template.frequency.name.lower()] += 1
self.assertEqual(low_count, frequency_counter["low"])
self.assertEqual(medium_count, frequency_counter["medium"])
self.assertEqual(high_count, frequency_counter["high"])

View File

@@ -6,7 +6,6 @@ from typing import *
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message import Block, Message
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.message.circuit import ConnectionHolder
from hippolyzer.lib.base.templates import (
AssetType,
EstateAssetType,
@@ -16,26 +15,10 @@ from hippolyzer.lib.base.templates import (
TransferTargetType,
TransferStatus,
)
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
from hippolyzer.lib.base.network.transport import Direction
from hippolyzer.lib.base.transfer_manager import TransferManager, Transfer
from hippolyzer.lib.base.xfer_manager import XferManager
class MockHandlingCircuit(ProxiedCircuit):
def __init__(self, handler: MessageHandler[Message, str]):
super().__init__(("127.0.0.1", 1), ("127.0.0.1", 2), None)
self.handler = handler
def _send_prepared_message(self, message: Message, transport=None):
loop = asyncio.get_event_loop_policy().get_event_loop()
loop.call_soon(self.handler.handle, message)
class MockConnectionHolder(ConnectionHolder):
def __init__(self, circuit, message_handler):
self.circuit = circuit
self.message_handler = message_handler
from hippolyzer.lib.base.test_utils import MockHandlingCircuit, MockConnectionHolder
class BaseTransferTests(unittest.IsolatedAsyncioTestCase):

39
tests/client/__init__.py Normal file
View File

@@ -0,0 +1,39 @@
from typing import Mapping, Optional
import multidict
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message import Message
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.network.caps_client import CapsClient
from hippolyzer.lib.base.test_utils import MockHandlingCircuit
from hippolyzer.lib.client.hippo_client import ClientSettings
from hippolyzer.lib.client.object_manager import ClientWorldObjectManager
from hippolyzer.lib.client.state import BaseClientRegion, BaseClientSession, BaseClientSessionManager
class MockClientRegion(BaseClientRegion):
def __init__(self, caps_urls: Optional[dict] = None):
super().__init__()
self.handle = None
self.circuit_addr = ("127.0.0.1", 1)
self.message_handler: MessageHandler[Message, str] = MessageHandler(take_by_default=False)
self.circuit = MockHandlingCircuit(self.message_handler)
self._name = "Test"
self.cap_urls = multidict.MultiDict()
if caps_urls:
self.cap_urls.update(caps_urls)
self.caps_client = CapsClient(self.cap_urls)
def session(self):
return MockClientSession(UUID.ZERO, UUID.ZERO, UUID.ZERO, 0, None)
def update_caps(self, caps: Mapping[str, str]) -> None:
pass
class MockClientSession(BaseClientSession):
def __init__(self, id, secure_session_id, agent_id, circuit_code,
session_manager: Optional[BaseClientSessionManager]):
super().__init__(id, secure_session_id, agent_id, circuit_code, session_manager)
self.objects = ClientWorldObjectManager(self, ClientSettings(), None)

View File

@@ -0,0 +1,179 @@
import asyncio
import copy
import unittest
import xmlrpc.client
from typing import Tuple, Optional
import aioresponses
from hippolyzer.lib.base import llsd
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.circuit import Circuit
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.message.message_handler import MessageHandler
from hippolyzer.lib.base.message.msgtypes import PacketFlags
from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
from hippolyzer.lib.base.network.transport import AbstractUDPTransport, UDPPacket, Direction
from hippolyzer.lib.base.test_utils import MockTransport, MockConnectionHolder, soon
from hippolyzer.lib.client.hippo_client import HippoClient, HippoClientProtocol
class MockServer(MockConnectionHolder):
def __init__(self, circuit, message_handler):
super().__init__(circuit, message_handler)
self.deserializer = UDPMessageDeserializer()
self.protocol: Optional[HippoClientProtocol] = None
def process_inbound(self, packet: UDPPacket):
"""Process a packet that the client sent to us"""
message = self.deserializer.deserialize(packet.data)
message.direction = Direction.IN
if message.reliable:
self.circuit.send_acks((message.packet_id,))
self.circuit.collect_acks(message)
if message.name != "PacketAck":
self.message_handler.handle(message)
class PacketForwardingTransport(MockTransport):
def __init__(self):
super().__init__()
self.protocol: Optional[HippoClientProtocol] = None
def send_packet(self, packet: UDPPacket):
super().send_packet(packet)
self.protocol.datagram_received(packet.data, packet.src_addr)
class MockServerTransport(MockTransport):
"""Used for the client to send packets out"""
def __init__(self, server: MockServer):
super().__init__()
self._server = server
def send_packet(self, packet: UDPPacket) -> None:
super().send_packet(packet)
# Directly pass the packet to the server
packet = copy.copy(packet)
packet.direction = Direction.IN
# Delay calling so the client can do its ACK bookkeeping first
asyncio.get_event_loop().call_soon(lambda: self._server.process_inbound(packet))
class MockHippoClient(HippoClient):
def __init__(self, server: MockServer):
super().__init__()
self.server = server
async def _create_transport(self) -> Tuple[AbstractUDPTransport, HippoClientProtocol]:
protocol = HippoClientProtocol(self.session)
# TODO: This isn't great, but whatever.
self.server.circuit.transport.protocol = protocol
return MockServerTransport(self.server), protocol
class TestHippoClient(unittest.IsolatedAsyncioTestCase):
FAKE_LOGIN_URI = "http://127.0.0.1:1/login.cgi"
FAKE_LOGIN_RESP = {
"session_id": str(UUID(int=1)),
"secure_session_id": str(UUID(int=2)),
"agent_id": str(UUID(int=3)),
"circuit_code": 123,
"sim_ip": "127.0.0.1",
"sim_port": 2,
"region_x": 0,
"region_y": 123,
"seed_capability": "https://127.0.0.1:4/foo",
"inventory-skeleton": [
{'name': 'My Inventory', 'folder_id': str(UUID(int=4)),
'parent_id': '00000000-0000-0000-0000-000000000000', 'type_default': 8, 'version': 200}
]
}
FAKE_SEED_RESP = {
"EventQueueGet": "https://127.0.0.1:5/",
}
FAKE_EQ_RESP = {
"id": 1,
"events": [
{"message": "ViewerFrozenMessage", "body": {"FrozenData": [{"Data": False}]}},
{"message": "NotTemplated", "body": {"foo": {"bar": True}}},
],
}
async def asyncSetUp(self):
self.server_handler: MessageHandler[Message, str] = MessageHandler()
self.server_transport = PacketForwardingTransport()
self.server_circuit = Circuit(("127.0.0.1", 2), ("127.0.0.1", 99), self.server_transport)
self.server = MockServer(self.server_circuit, self.server_handler)
self.aio_mock = aioresponses.aioresponses()
self.aio_mock.start()
self.aio_mock.post(
self.FAKE_LOGIN_URI,
body=xmlrpc.client.dumps((self.FAKE_LOGIN_RESP,), None, True)
)
self.aio_mock.post(self.FAKE_LOGIN_RESP['seed_capability'], body=llsd.format_xml(self.FAKE_SEED_RESP))
self.aio_mock.post(self.FAKE_SEED_RESP['EventQueueGet'], body=llsd.format_xml(self.FAKE_EQ_RESP), repeat=True)
self.client = MockHippoClient(self.server)
async def asyncTearDown(self):
try:
await self.client.aclose()
finally:
self.aio_mock.stop()
async def _log_client_in(self, client: MockHippoClient):
login_task = asyncio.create_task(client.login("foo", "bar", login_uri=self.FAKE_LOGIN_URI))
with self.server_handler.subscribe_async(
("*",),
) as get_msg:
assert (await soon(get_msg())).name == "UseCircuitCode"
assert (await soon(get_msg())).name == "CompleteAgentMovement"
self.server.circuit.send(Message(
'RegionHandshake',
Block('RegionInfo', fill_missing=True),
Block('RegionInfo2', fill_missing=True),
Block('RegionInfo3', fill_missing=True),
Block('RegionInfo4', fill_missing=True),
))
assert (await soon(get_msg())).name == "RegionHandshakeReply"
assert (await soon(get_msg())).name == "AgentThrottle"
await login_task
async def test_login(self):
await self._log_client_in(self.client)
with self.server_handler.subscribe_async(
("*",),
) as get_msg:
self.client.logout()
assert (await soon(get_msg())).name == "LogoutRequest"
async def test_eq(self):
await self._log_client_in(self.client)
with self.client.session.message_handler.subscribe_async(
("ViewerFrozenMessage", "NotTemplated"),
) as get_msg:
assert (await soon(get_msg())).name == "ViewerFrozenMessage"
msg = await soon(get_msg())
assert msg.name == "NotTemplated"
assert msg["EventData"]["foo"]["bar"] == 1
async def test_inventory_manager(self):
await self._log_client_in(self.client)
self.assertEqual(self.client.session.inventory_manager.model.root.node_id, UUID(int=4))
async def test_resend_suppression(self):
"""Make sure the client only handles the first seen copy of a reliable message"""
await self._log_client_in(self.client)
with self.client.session.message_handler.subscribe_async(
("ChatFromSimulator", "AgentDataUpdate"),
) as get_msg:
msg = Message("ChatFromSimulator", Block("ChatData", fill_missing=True))
msg.send_flags |= PacketFlags.RELIABLE
# Fake re-sending the message
packet = self.server_circuit.send(msg)
self.server_transport.send_packet(packet)
self.server_circuit.send(Message("AgentDataUpdate", Block("AgentData", fill_missing=True)))
assert (await soon(get_msg())).name == "ChatFromSimulator"
assert (await soon(get_msg())).name == "AgentDataUpdate"

View File

@@ -0,0 +1,69 @@
import unittest
from typing import Any
import aioresponses
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base import llsd
from hippolyzer.lib.client.object_manager import ClientObjectManager
from . import MockClientRegion
class MaterialManagerTest(unittest.IsolatedAsyncioTestCase):
FAKE_CAPS = {
"RenderMaterials": "http://127.0.0.1:8023"
}
GET_RENDERMATERIALS_BODY = [
{'ID': UUID(int=1).bytes,
'Material': {'AlphaMaskCutoff': 0, 'DiffuseAlphaMode': 1, 'EnvIntensity': 0,
'NormMap': UUID(int=4), 'NormOffsetX': 0, 'NormOffsetY': 0,
'NormRepeatX': 10000, 'NormRepeatY': 10000, 'NormRotation': 0, 'SpecColor': [255, 255, 255, 255],
'SpecExp': 51, 'SpecMap': UUID(int=5), 'SpecOffsetX': 0,
'SpecOffsetY': 0, 'SpecRepeatX': 10000, 'SpecRepeatY': 10000, 'SpecRotation': 0}},
{'ID': UUID(int=2).bytes,
'Material': {'AlphaMaskCutoff': 0, 'DiffuseAlphaMode': 0, 'EnvIntensity': 0,
'NormMap': UUID(int=6), 'NormOffsetX': 0, 'NormOffsetY': 0,
'NormRepeatX': 10000, 'NormRepeatY': -10000, 'NormRotation': 0,
'SpecColor': [255, 255, 255, 255], 'SpecExp': 51,
'SpecMap': UUID(int=7), 'SpecOffsetX': 0, 'SpecOffsetY': 0,
'SpecRepeatX': 10000, 'SpecRepeatY': -10000, 'SpecRotation': 0}},
{'ID': UUID(int=3).bytes,
'Material': {'AlphaMaskCutoff': 0, 'DiffuseAlphaMode': 1, 'EnvIntensity': 50,
'NormMap': UUID.ZERO, 'NormOffsetX': 0, 'NormOffsetY': 0,
'NormRepeatX': 10000, 'NormRepeatY': 10000, 'NormRotation': 0, 'SpecColor': [255, 255, 255, 255],
'SpecExp': 200, 'SpecMap': UUID(int=8), 'SpecOffsetX': 0,
'SpecOffsetY': 0, 'SpecRepeatX': 10000, 'SpecRepeatY': 10000, 'SpecRotation': 0}},
]
def _make_rendermaterials_resp(self, resp: Any) -> bytes:
return llsd.format_xml({"Zipped": llsd.zip_llsd(resp)})
async def asyncSetUp(self):
self.aio_mock = aioresponses.aioresponses()
self.aio_mock.start()
# Requesting all materials
self.aio_mock.get(
self.FAKE_CAPS['RenderMaterials'],
body=self._make_rendermaterials_resp(self.GET_RENDERMATERIALS_BODY)
)
# Specific material request
self.aio_mock.post(
self.FAKE_CAPS['RenderMaterials'],
body=self._make_rendermaterials_resp([self.GET_RENDERMATERIALS_BODY[0]])
)
self.region = MockClientRegion(self.FAKE_CAPS)
self.manager = ClientObjectManager(self.region)
async def asyncTearDown(self):
self.aio_mock.stop()
async def test_fetch_all_materials(self):
await self.manager.request_all_materials()
self.assertListEqual([UUID(int=1), UUID(int=2), UUID(int=3)], list(self.manager.state.materials.keys()))
async def test_fetch_some_materials(self):
mats = await self.manager.request_materials((UUID(int=1),))
self.assertListEqual([UUID(int=1)], list(mats.keys()))
self.assertListEqual([UUID(int=1)], list(self.manager.state.materials.keys()))

View File

@@ -0,0 +1,245 @@
import asyncio
import collections
import unittest
from typing import Dict
from hippolyzer.lib.base.datatypes import UUID
from hippolyzer.lib.base.message.message import Block, Message
import hippolyzer.lib.base.serialization as se
from hippolyzer.lib.base.templates import ParcelGridInfo, ParcelGridType, ParcelGridFlags
from hippolyzer.lib.base.test_utils import soon
from hippolyzer.lib.client.parcel_manager import ParcelManager
from . import MockClientRegion
OVERLAY_CHUNKS = (
b'\xc2\x82\x82\xc2\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82'
b'\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82'
b'\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\xc2B\x02\x02B\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x82B\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\xc2\x82\x82\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02',
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02',
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02',
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'B\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02'
b'\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02',
)
class TestParcelOverlay(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self):
self.region = MockClientRegion()
self.parcel_manager = ParcelManager(self.region)
self.handler = self.region.message_handler
self.test_msgs = []
for i, chunk in enumerate(OVERLAY_CHUNKS):
self.test_msgs.append(Message(
'ParcelOverlay',
Block('ParcelData', SequenceID=i, Data=chunk),
))
def test_low_level_parse(self):
spec = se.BitfieldDataclass(ParcelGridInfo)
reader = se.BufferReader("<", OVERLAY_CHUNKS[0])
self.assertEqual(
ParcelGridInfo(ParcelGridType.GROUP, ParcelGridFlags.SOUTH_LINE | ParcelGridFlags.WEST_LINE),
reader.read(spec),
)
self.assertEqual(
ParcelGridInfo(ParcelGridType.GROUP, ParcelGridFlags.SOUTH_LINE),
reader.read(spec),
)
def _get_parcel_areas(self) -> Dict[int, int]:
c = collections.Counter()
for parcel_idx in self.parcel_manager.parcel_indices.flatten():
c[parcel_idx] += self.parcel_manager.GRID_STEP
return dict(c.items())
async def test_handle_overlay(self):
self.assertFalse(self.parcel_manager.overlay_complete.is_set())
for msg in self.test_msgs:
self.handler.handle(msg)
self.assertTrue(self.parcel_manager.overlay_complete.is_set())
self.assertDictEqual({1: 36, 2: 16344, 3: 4}, self._get_parcel_areas())
async def test_request_parcel_properties(self):
for msg in self.test_msgs:
self.handler.handle(msg)
req_task = asyncio.create_task(self.parcel_manager.request_dirty_parcels())
# HACK: Wait for requests to be sent out
await asyncio.sleep(0.01)
for i in range(1, 4):
self.handler.handle(Message(
"ParcelProperties",
Block("ParcelData", LocalID=i, SequenceID=i, Name=str(i), GroupID=UUID.ZERO, ParcelFlags=0),
))
await soon(req_task)
self.assertEqual(3, len(self.parcel_manager.parcels))
self.assertEqual("1", self.parcel_manager.parcels[0].name)

36
tests/client/test_rlv.py Normal file
View File

@@ -0,0 +1,36 @@
import unittest
from hippolyzer.lib.base.message.message import Message, Block
from hippolyzer.lib.base.templates import ChatType
from hippolyzer.lib.client.rlv import RLVParser, RLVCommand
class TestRLV(unittest.TestCase):
def test_is_rlv_command(self):
msg = Message(
"ChatFromSimulator",
Block("ChatData", Message="@foobar", ChatType=ChatType.OWNER)
)
self.assertTrue(RLVParser.is_rlv_message(msg))
msg["ChatData"]["ChatType"] = ChatType.NORMAL
self.assertFalse(RLVParser.is_rlv_message(msg))
def test_rlv_parse_single_command(self):
cmd = RLVParser.parse_chat("@foo:bar;baz=quux")[0]
self.assertEqual("foo", cmd.behaviour)
self.assertListEqual(["bar", "baz"], cmd.options)
self.assertEqual("quux", cmd.param)
def test_rlv_parse_multiple_commands(self):
cmds = RLVParser.parse_chat("@foo:bar;baz=quux,bazzy")
self.assertEqual("foo", cmds[0].behaviour)
self.assertListEqual(["bar", "baz"], cmds[0].options)
self.assertEqual("quux", cmds[0].param)
self.assertEqual("bazzy", cmds[1].behaviour)
def test_rlv_format_commands(self):
chat = RLVParser.format_chat([
RLVCommand("foo", "quux", ["bar", "baz"]),
RLVCommand("bazzy", "", [])
])
self.assertEqual("@foo:bar;baz=quux,bazzy", chat)

View File

@@ -667,7 +667,7 @@ class SessionObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncio
async def test_handle_object_update_event(self):
with self.session.objects.events.subscribe_async(
message_names=(ObjectUpdateType.OBJECT_UPDATE,),
message_names=(ObjectUpdateType.UPDATE,),
predicate=lambda e: e.object.UpdateFlags & JUST_CREATED_FLAGS and "LocalID" in e.updated,
) as get_events:
self._create_object(local_id=999)
@@ -676,7 +676,7 @@ class SessionObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncio
async def test_handle_object_update_predicate(self):
with self.session.objects.events.subscribe_async(
message_names=(ObjectUpdateType.OBJECT_UPDATE,),
message_names=(ObjectUpdateType.UPDATE,),
) as get_events:
self._create_object(local_id=999)
evt = await asyncio.wait_for(get_events(), 1.0)
@@ -684,10 +684,10 @@ class SessionObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncio
async def test_handle_object_update_events_two_subscribers(self):
with self.session.objects.events.subscribe_async(
message_names=(ObjectUpdateType.OBJECT_UPDATE,),
message_names=(ObjectUpdateType.UPDATE,),
) as get_events:
with self.session.objects.events.subscribe_async(
message_names=(ObjectUpdateType.OBJECT_UPDATE,),
message_names=(ObjectUpdateType.UPDATE,),
) as get_events2:
self._create_object(local_id=999)
evt = await asyncio.wait_for(get_events(), 1.0)
@@ -697,10 +697,10 @@ class SessionObjectManagerTests(ObjectManagerTestMixin, unittest.IsolatedAsyncio
async def test_handle_object_update_events_two_subscribers_timeout(self):
with self.session.objects.events.subscribe_async(
message_names=(ObjectUpdateType.OBJECT_UPDATE,),
message_names=(ObjectUpdateType.UPDATE,),
) as get_events:
with self.session.objects.events.subscribe_async(
message_names=(ObjectUpdateType.OBJECT_UPDATE,),
message_names=(ObjectUpdateType.UPDATE,),
) as get_events2:
self._create_object(local_id=999)
evt = asyncio.wait_for(get_events(), 0.01)

0
tests/voice/__init__.py Normal file
View File

289
tests/voice/test_voice.py Normal file
View File

@@ -0,0 +1,289 @@
from typing import *
import asyncio
import unittest
from unittest import mock
from hippolyzer.lib.base.datatypes import Vector3
from hippolyzer.lib.voice.client import VoiceClient
from hippolyzer.lib.voice.connection import VivoxConnection
def _make_transport(write_func):
transport = mock.Mock()
transport.write.side_effect = write_func
transport.is_closing.return_value = False
return transport
def _make_protocol(transport: Any):
protocol = mock.Mock(transport=transport)
protocol._drain_helper = mock.AsyncMock()
return protocol
class TestVivoxConnection(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self):
self._writer_buf = bytearray()
self._transport = _make_transport(self._writer_buf.extend)
self._protocol = _make_protocol(self._transport)
self.reader = asyncio.StreamReader()
self.writer = asyncio.StreamWriter(self._transport, self._protocol, self.reader, asyncio.get_event_loop())
self.vivox_connection = VivoxConnection(self.reader, self.writer)
async def test_read_request(self):
self.reader.feed_data(
b'<Request requestId="foobar" action="Aux.GetRenderDevices.1"><Foo>1</Foo></Request>\n\n\n'
)
self.reader.feed_eof()
msg_type, msg_action, request_id, body = await self.vivox_connection.read_message()
self.assertEqual("Request", msg_type)
self.assertEqual("Aux.GetRenderDevices.1", msg_action)
self.assertEqual("foobar", request_id)
self.assertDictEqual({"Foo": "1"}, body)
async def test_read_response(self):
self.reader.feed_data(
b'<Response requestId="foobar" action="Connector.SetLocalMicVolume.1"><ReturnCode>0</ReturnCode>'
b'<Results><StatusCode>0</StatusCode><StatusString /></Results>'
b'<InputXml><Request/></InputXml></Response>\n\n\n'
)
self.reader.feed_eof()
msg_type, msg_action, request_id, body = await self.vivox_connection.read_message()
self.assertEqual("Response", msg_type)
self.assertEqual("Connector.SetLocalMicVolume.1", msg_action)
self.assertEqual("foobar", request_id)
self.assertDictEqual(
{'ReturnCode': 0, 'Results': {'StatusCode': '0', 'StatusString': None}},
body,
)
async def test_read_event(self):
self.reader.feed_data(
b'<Event type="MediaStreamUpdatedEvent"><SessionGroupHandle>4</SessionGroupHandle><SessionHandle>7'
b'</SessionHandle><StatusCode>0</StatusCode><StatusString/>'
b'<State>6</State><StateDescription>Connecting</StateDescription><Incoming>false</Incoming>'
b'<DurableMediaId/></Event>\n\n\n'
)
self.reader.feed_eof()
msg_type, msg_action, request_id, body = await self.vivox_connection.read_message()
self.assertEqual("Event", msg_type)
self.assertEqual("MediaStreamUpdatedEvent", msg_action)
self.assertEqual(None, request_id)
self.assertDictEqual(
{
'DurableMediaId': None,
'Incoming': 'false',
'SessionGroupHandle': '4',
'SessionHandle': '7',
'State': '6',
'StateDescription': 'Connecting',
'StatusCode': '0',
'StatusString': None,
},
body,
)
async def test_read_messages(self):
self.reader.feed_data(
b'<Request requestId="foobar" action="Aux.GetRenderDevices.1"><Foo>1</Foo></Request>\n\n\n'
b'<Request requestId="quux" action="Aux.GetRenderDevices.1"><Foo>1</Foo></Request>\n\n\n'
)
self.reader.feed_eof()
i = 0
async for msg in self.vivox_connection.read_messages():
if i == 0:
self.assertEqual("foobar", msg.request_id)
else:
self.assertEqual("quux", msg.request_id)
self.assertEqual("Request", msg.type)
self.assertEqual("Aux.GetRenderDevices.1", msg.name)
self.assertDictEqual({"Foo": "1"}, msg.data)
i += 1
async def test_send_message(self):
await self.vivox_connection.send_request("foo", "bar", {"baz": 1})
self.assertEqual(
b'<Request requestId="foo" action="bar"><baz>1</baz></Request>\n\n\n',
self._writer_buf
)
class TestVoiceClient(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self):
self._client_transport = _make_transport(
lambda *args: asyncio.get_event_loop().call_soon(self.server_reader.feed_data, *args)
)
self._client_protocol = _make_protocol(self._client_transport)
self.client_reader = asyncio.StreamReader()
self.client_writer = asyncio.StreamWriter(
self._client_transport,
self._client_protocol,
self.client_reader,
asyncio.get_event_loop()
)
self._server_transport = _make_transport(
lambda *args: asyncio.get_event_loop().call_soon(self.client_reader.feed_data, *args)
)
self._server_protocol = _make_protocol(self._server_transport)
self.server_reader = asyncio.StreamReader()
self.server_writer = asyncio.StreamWriter(
self._server_transport,
self._server_protocol,
self.server_reader,
asyncio.get_event_loop()
)
self.client_connection = VivoxConnection(self.client_reader, self.client_writer)
self.server_connection = VivoxConnection(self.server_reader, self.server_writer)
self.client = VoiceClient("127.0.0.1", 0)
self.client.vivox_conn = self.client_connection
def _make_request_id():
_make_request_id.i += 1
return str(_make_request_id.i)
_make_request_id.i = 0
self.client._make_request_id = _make_request_id
async def _expect_message(self, name: str):
msg = await self.server_connection.read_message()
self.assertEqual(name, msg.name)
return msg
async def _handle_message(self, name: str):
msg = await self._expect_message(name)
await self.server_connection.send_response(msg.request_id, msg.name, {
"ReturnCode": 0,
"Results": {}
})
return msg
async def _do_connector_setup(self):
async def _serve_connector_setup():
await self.server_connection.send_event(
"VoiceServiceConnectionStateChangedEvent",
{
"Connected": 1,
"Platform": "Linux",
"Version": 1,
"DataDirectory": "/tmp/whatever",
}
)
msg = await self.server_connection.read_message()
self.assertEqual('Aux.GetCaptureDevices.1', msg.name)
await self.server_connection.send_response(msg.request_id, msg.name, {
"ReturnCode": 0,
"Results": {
"StatusCode": 0,
"StatusString": None,
"CaptureDevices": []
}
})
msg = await self.server_connection.read_message()
self.assertEqual('Aux.GetRenderDevices.1', msg.name)
await self.server_connection.send_response(msg.request_id, msg.name, {
"ReturnCode": 0,
"Results": {
"StatusCode": 0,
"StatusString": None,
"RenderDevices": []
}
})
await self._handle_message("Connector.MuteLocalSpeaker.1")
await self._handle_message("Connector.SetLocalSpeakerVolume.1")
await self._handle_message("Connector.MuteLocalMic.1")
await self._handle_message("Connector.SetLocalMicVolume.1")
msg = await self.server_connection.read_message()
self.assertEqual('Connector.Create.1', msg.name)
await self.server_connection.send_response(msg.request_id, msg.name, {
"ReturnCode": 0,
"Results": {
"ConnectorHandle": 2,
}
})
serve_connector_task = asyncio.create_task(_serve_connector_setup())
await asyncio.wait_for(serve_connector_task, 0.5)
await asyncio.wait_for(self.client.ready.wait(), 0.5)
async def _do_login(self):
async def _serve_login():
msg = await self._expect_message("Account.Login.1")
self.assertEqual("foo", msg.data["AccountName"])
await self.server_connection.send_event("AccountLoginStateChangeEvent", {
"AccountHandle": 2,
"StatusCode": 200,
"StatusString": "OK",
"State": 1,
})
await self.server_connection.send_response(msg.request_id, msg.name, {
"ReturnCode": 0,
"Results": {
"StatusCode": 0,
"StatusString": None,
"AccountHandle": 2,
"DisplayName": "foo",
"Uri": "uri:baz@foo",
}
})
login_task = asyncio.create_task(_serve_login())
await asyncio.wait_for(self.client.login("foo", "bar"), 0.5)
await asyncio.wait_for(login_task, 0.5)
async def _join_session(self):
async def _serve_session():
await self._handle_message("Session.Create.1")
await self.server_connection.send_event("SessionAddedEvent", {
"SessionHandle": 4,
"SessionGroupHandle": 5,
})
await self.server_connection.send_event("ParticipantAddedEvent", {
"ParticipantUri": "uri:baz@foo",
})
serve_session_task = asyncio.create_task(_serve_session())
await asyncio.wait_for(self.client.join_session("uri:foo@bar", region_handle=256), 0.5)
self.assertIn("uri:baz@foo", self.client.participants)
await asyncio.wait_for(serve_session_task, 0.5)
async def test_create_connector(self):
await self._do_connector_setup()
async def test_login(self):
await self._do_connector_setup()
await self._do_login()
async def test_create_session(self):
await self._do_connector_setup()
await self._do_login()
await self._join_session()
async def test_set_position(self):
await self._do_connector_setup()
await self._do_login()
await self._join_session()
handle_3d_pos_task = asyncio.create_task(self._handle_message("Session.Set3DPosition.1"))
await self.client.set_region_3d_pos(Vector3(1, 2, 3))
msg = await handle_3d_pos_task
self.assertDictEqual(
{'X': '1.0', 'Y': '3.0', 'Z': '-258.0'},
msg.data["SpeakerPosition"]["Position"],
)
self.assertAlmostEqual(self.client.region_pos.X, 1.0)
self.assertAlmostEqual(self.client.region_pos.Y, 2.0)
self.assertAlmostEqual(self.client.region_pos.Z, 3.0)
self.assertAlmostEqual(self.client.global_pos.X, 1.0)
self.assertAlmostEqual(self.client.global_pos.Y, 3.0)
self.assertAlmostEqual(self.client.global_pos.Z, -258.0)

View File

@@ -0,0 +1,36 @@
"""
Connect to a voice session at 0, 0, 0 for 20 seconds, then exit.
"""
import asyncio
from contextlib import aclosing
import os
from hippolyzer.lib.base.datatypes import Vector3
from hippolyzer.lib.voice.client import VoiceClient
VOICE_PATH = os.environ["SLVOICE_PATH"]
async def amain():
client = await VoiceClient.simple_init(VOICE_PATH)
async with aclosing(client):
print("Capture Devices:", client.capture_devices)
print("Render Devices:", client.render_devices)
await client.set_mic_muted(True)
await client.set_mic_volume(60)
print(await client.login(os.environ["SLVOICE_USERNAME"], os.environ["SLVOICE_PASSWORD"]))
await client.join_session(os.environ["SLVOICE_URI"], int(os.environ["SLVOICE_HANDLE"]))
await client.set_region_3d_pos(Vector3(0, 0, 0))
print(client.region_pos)
# leave running for 20 seconds, then exit
await asyncio.sleep(20.0)
print("Bye!")
if __name__ == "__main__":
asyncio.run(amain())