Compare commits
169 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a91bc67a43 | ||
|
|
48180b85d1 | ||
|
|
77d3bf2fe1 | ||
|
|
d8ec9ee77a | ||
|
|
0b46b95f81 | ||
|
|
73e66c56e5 | ||
|
|
fd2a4d8dce | ||
|
|
2209ebdd0c | ||
|
|
ccfb641cc2 | ||
|
|
220d8ddf65 | ||
|
|
235bc8e09e | ||
|
|
41fd67577a | ||
|
|
8347b341f5 | ||
|
|
9d5599939e | ||
|
|
1fd6decf91 | ||
|
|
4ddc6aa852 | ||
|
|
ab89f6bc14 | ||
|
|
cb8c1cfe91 | ||
|
|
52679bf708 | ||
|
|
a21c0439e9 | ||
|
|
216ffb3777 | ||
|
|
d4c30d998d | ||
|
|
003f37c3d3 | ||
|
|
d64a07c04c | ||
|
|
82b156813b | ||
|
|
b71da8f5a4 | ||
|
|
5618bcbac1 | ||
|
|
24abc36df2 | ||
|
|
9ceea8324a | ||
|
|
29653c350f | ||
|
|
b03ef1c36b | ||
|
|
a2d5414691 | ||
|
|
135ce06452 | ||
|
|
12862fcd02 | ||
|
|
9ab5c8a907 | ||
|
|
9652261b67 | ||
|
|
3887e0a23c | ||
|
|
84733731fe | ||
|
|
49f7ba960f | ||
|
|
f2ee6f789f | ||
|
|
9df0224fbf | ||
|
|
59493e021c | ||
|
|
7b98c0b261 | ||
|
|
a39d025a04 | ||
|
|
908d7a24f1 | ||
|
|
0bf1e84da4 | ||
|
|
3d8da0af65 | ||
|
|
abf730cea5 | ||
|
|
0a45cd3739 | ||
|
|
af17525071 | ||
|
|
592ac4bec6 | ||
|
|
960c8aa905 | ||
|
|
c1d795e850 | ||
|
|
984ac257a5 | ||
|
|
9b970f07e5 | ||
|
|
d6a6fb4a91 | ||
|
|
fd747c9615 | ||
|
|
69dd1ca9ce | ||
|
|
2c914b43b0 | ||
|
|
0d18bc1daa | ||
|
|
626e59f22c | ||
|
|
8c614404d8 | ||
|
|
98df182110 | ||
|
|
c856b5e7fc | ||
|
|
c0e91273fd | ||
|
|
e50a00064a | ||
|
|
ebc02f9a22 | ||
|
|
f57087bf6c | ||
|
|
6c6ea66989 | ||
|
|
6cc25118b9 | ||
|
|
3aa5215587 | ||
|
|
eb34a945bc | ||
|
|
ccb29f8eeb | ||
|
|
bf377ae323 | ||
|
|
6df2224be5 | ||
|
|
9dbb719d52 | ||
|
|
2608a02d5c | ||
|
|
eb2c5b7494 | ||
|
|
a1bbfbf410 | ||
|
|
2485831c47 | ||
|
|
2e869e9219 | ||
|
|
c39db7f130 | ||
|
|
c58d24bd16 | ||
|
|
aef1261068 | ||
|
|
2570269e29 | ||
|
|
f3c937bf14 | ||
|
|
2fab1a0fae | ||
|
|
935e3ccc40 | ||
|
|
f5ededcdd7 | ||
|
|
237a409ee0 | ||
|
|
058b9f5313 | ||
|
|
fdcb816585 | ||
|
|
d22fef149b | ||
|
|
9e035e98ba | ||
|
|
c9138b4649 | ||
|
|
0caba9da68 | ||
|
|
b2f0de2db5 | ||
|
|
0b0e031091 | ||
|
|
4eeac738dc | ||
|
|
d9416363b3 | ||
|
|
5906140921 | ||
|
|
58932e585e | ||
|
|
b9f8ce0da2 | ||
|
|
67aa5e6bcd | ||
|
|
2a05529ceb | ||
|
|
a97aa88cc9 | ||
|
|
febc0793f2 | ||
|
|
141eb3afcd | ||
|
|
517888b1fa | ||
|
|
376b100ed9 | ||
|
|
07fbec47e1 | ||
|
|
7836527305 | ||
|
|
21b18b7a52 | ||
|
|
28b09144f2 | ||
|
|
1e13fede82 | ||
|
|
1bfb719f08 | ||
|
|
e5b63f7550 | ||
|
|
91328ac448 | ||
|
|
46dbacd475 | ||
|
|
187742c20a | ||
|
|
5eae956750 | ||
|
|
37e8f8a20e | ||
|
|
b3125f3231 | ||
|
|
46fed98d6a | ||
|
|
3b5938cf5c | ||
|
|
c7aeb03ea4 | ||
|
|
ab1bd16b5c | ||
|
|
0412ca5019 | ||
|
|
4d238c8dc8 | ||
|
|
3bcc510cfd | ||
|
|
0d9593e14c | ||
|
|
28dfe2f1b2 | ||
|
|
c8f7231eae | ||
|
|
00e9ecb765 | ||
|
|
2892bbeb98 | ||
|
|
28f57a8836 | ||
|
|
943b8b11d5 | ||
|
|
88915dd8d7 | ||
|
|
60b39e27f8 | ||
|
|
8af87befbd | ||
|
|
95e34bb07a | ||
|
|
106eb5c063 | ||
|
|
e7f88eeed9 | ||
|
|
d07f100452 | ||
|
|
02c212e4a6 | ||
|
|
8989843042 | ||
|
|
a217a30133 | ||
|
|
8514d7bae8 | ||
|
|
d9084c3332 | ||
|
|
0f35cc00d5 | ||
|
|
a6a7ce8fa3 | ||
|
|
269a1e163b | ||
|
|
eb2b6ee870 | ||
|
|
79a4f72558 | ||
|
|
6316369e1a | ||
|
|
1b0272f3b3 | ||
|
|
aedc2bf48c | ||
|
|
5d3fd69e35 | ||
|
|
ae464f2c06 | ||
|
|
7d303d2bca | ||
|
|
dda3759028 | ||
|
|
d4e1a7a070 | ||
|
|
d401842eef | ||
|
|
1e4060f49c | ||
|
|
a6c7f996ba | ||
|
|
8fb36892cf | ||
|
|
16c02d8b8c | ||
|
|
badd4dbc78 | ||
|
|
a63418aaac |
@@ -1,2 +1,10 @@
|
||||
[run]
|
||||
omit =
|
||||
concurrency = multiprocessing
|
||||
[report]
|
||||
exclude_lines =
|
||||
pragma: no cover
|
||||
if TYPE_CHECKING:
|
||||
if typing.TYPE_CHECKING:
|
||||
def __repr__
|
||||
raise AssertionError
|
||||
|
||||
46
.github/workflows/bundle_windows.yml
vendored
Normal file
46
.github/workflows/bundle_windows.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
# Have to manually unzip this (it gets double zipped) and add it
|
||||
# onto the release after it gets created. Don't want actions with repo write.
|
||||
name: Bundle Windows EXE
|
||||
|
||||
on:
|
||||
# Only trigger on release creation
|
||||
release:
|
||||
types:
|
||||
- created
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.9]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -e .
|
||||
pip install cx_freeze
|
||||
|
||||
- name: Bundle with cx_Freeze
|
||||
run: |
|
||||
python setup_cxfreeze.py build_exe
|
||||
pip install pip-licenses
|
||||
pip-licenses --format=plain-vertical --with-license-file --no-license-path --output-file=lib_licenses.txt
|
||||
python setup_cxfreeze.py finalize_cxfreeze
|
||||
|
||||
- name: Upload the artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: hippolyzer-gui-windows-${{ github.sha }}
|
||||
path: ./dist/**
|
||||
2
.github/workflows/pypi_publish.yml
vendored
2
.github/workflows/pypi_publish.yml
vendored
@@ -6,6 +6,8 @@ on:
|
||||
release:
|
||||
types:
|
||||
- created
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
# based on https://github.com/pypa/gh-action-pypi-publish
|
||||
|
||||
|
||||
30
.github/workflows/pytest.yml
vendored
30
.github/workflows/pytest.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: Run Python Tests
|
||||
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -12,16 +12,36 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
- name: Test with pytest
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements-test.txt
|
||||
sudo apt-get install libopenjp2-7
|
||||
- name: Run Flake8
|
||||
run: |
|
||||
pytest
|
||||
flake8 .
|
||||
- name: Test with pytest
|
||||
# Tests are intentionally covered to detect broken tests.
|
||||
run: |
|
||||
pytest --cov=./hippolyzer --cov=./tests --cov-report=xml
|
||||
|
||||
# Keep this in a workflow without any other secrets in it.
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./coverage.xml
|
||||
directory: ./coverage/reports/
|
||||
flags: unittests
|
||||
env_vars: OS,PYTHON
|
||||
name: codecov-umbrella
|
||||
fail_ci_if_error: false
|
||||
path_to_write_report: ./coverage/codecov_report.txt
|
||||
verbose: false
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,6 +1,7 @@
|
||||
#use glob syntax
|
||||
syntax: glob
|
||||
|
||||
__pycache__
|
||||
*.pyc
|
||||
build/*
|
||||
*.egg-info
|
||||
|
||||
61
README.md
61
README.md
@@ -1,5 +1,7 @@
|
||||
# Hippolyzer
|
||||
|
||||
 [](https://codecov.io/gh/SaladDais/Hippolyzer)
|
||||
|
||||
[Hippolyzer](http://wiki.secondlife.com/wiki/Hippo) is a fork of Linden Lab's abandoned
|
||||
[PyOGP library](http://wiki.secondlife.com/wiki/PyOGP)
|
||||
targeting modern Python 3, with a focus on debugging issues in Second Life-compatible
|
||||
@@ -22,6 +24,9 @@ with low-level SL details. See the [Local Animation addon example](https://githu
|
||||

|
||||
|
||||
## Setup
|
||||
|
||||
### From Source
|
||||
|
||||
* Python 3.8 or above is **required**. If you're unable to upgrade your system Python package due to
|
||||
being on a stable distro, you can use [pyenv](https://github.com/pyenv/pyenv) to create
|
||||
a self-contained Python install with the appropriate version.
|
||||
@@ -32,6 +37,11 @@ with low-level SL details. See the [Local Animation addon example](https://githu
|
||||
* * Under Windows it's `<virtualenv_dir>\Scripts\activate.bat`
|
||||
* Run `pip install hippolyzer`, or run `pip install -e .` in a cloned repo to install an editable version
|
||||
|
||||
### Binary Windows Builds
|
||||
|
||||
Binary Windows builds are available on the [Releases page](https://github.com/SaladDais/Hippolyzer/releases/).
|
||||
I don't extensively test these, building from source is recommended.
|
||||
|
||||
## Proxy
|
||||
|
||||
A proxy is provided with both a CLI and Qt-based interface. The proxy application wraps a
|
||||
@@ -52,16 +62,27 @@ the [Alchemy](https://github.com/AlchemyViewer/Alchemy) viewer.
|
||||
On Linux that would be `~/.firestorm_x64/` if you're using Firestorm.
|
||||
* * Certificate validation can be disabled entirely through viewer debug setting `NoVerifySSLCert`,
|
||||
but is not recommended.
|
||||
|
||||
#### Windows
|
||||
|
||||
Windows viewers have broken SOCKS 5 proxy support. To work around that, you need to use a wrapper EXE that
|
||||
can make the viewer to correctly talk to Hippolyzer. Follow the instructions on https://github.com/SaladDais/WinHippoAutoProxy
|
||||
to start the viewer and run it through Hippolyzer.
|
||||
|
||||
The proxy should _not_ be configured through the viewer's own preferences panel, it won't work correctly.
|
||||
|
||||
#### OS X & Linux
|
||||
|
||||
SOCKS 5 works correctly on these platforms, so you can just configure it through the
|
||||
`preferences -> network -> proxy settings` panel:
|
||||
|
||||
* Start the viewer and configure it to use `127.0.0.1:9061` as a SOCKS proxy and `127.0.0.1:9062` as
|
||||
an HTTP proxy. You **must** select the option in the viewer to use the HTTP proxy for all HTTP
|
||||
traffic, or logins will fail.
|
||||
* Optionally, If you want to reduce HTTP proxy lag you can have asset requests bypass the HTTP proxy by setting
|
||||
the `no_proxy` env var appropriately. For ex. `no_proxy="asset-cdn.glb.agni.lindenlab.com" ./firestorm` or
|
||||
`setx /m "no_proxy" "asset-cdn.glb.agni.lindenlab.com"` on Windows.
|
||||
the `no_proxy` env var appropriately. For ex. `no_proxy="asset-cdn.glb.agni.lindenlab.com" ./firestorm`.
|
||||
* Log in!
|
||||
|
||||

|
||||
|
||||
### Filtering
|
||||
|
||||
By default, the proxy's display filter is configured to ignore many high-frequency messages.
|
||||
@@ -85,11 +106,14 @@ agent's session, you can do `(Meta.AgentID == None || Meta.AgentID == "d929385f-
|
||||
Vectors can also be compared. This will get any ObjectUpdate variant that occurs within a certain range:
|
||||
`(*ObjectUpdate*.ObjectData.*Data.Position > (110, 50, 100) && *ObjectUpdate*.ObjectData.*Data.Position < (115, 55, 105))`
|
||||
|
||||
If you want to compare against an enum or a flag class in defined in `templates.py`, you can just specify its name:
|
||||
`ViewerEffect.Effect.Type == ViewerEffectType.EFFECT_BEAM`
|
||||
|
||||
### Logging
|
||||
|
||||
Decoded messages are displayed in the log pane, clicking one will show the request and
|
||||
response for HTTP messages, and a human-friendly form for UDP messages. Some messages and
|
||||
fields have [special packers defined](https://github.com/SaladDais/Hippolyzer/blob/master/hippolyzer/lib/proxy/templates.py)
|
||||
fields have [special packers defined](https://github.com/SaladDais/Hippolyzer/blob/master/hippolyzer/lib/base/templates.py)
|
||||
that will give a more human-readable form of enum or binary fields, with the original form beside or below it.
|
||||
|
||||
For example, an `AgentUpdate` message may show up in the log pane like:
|
||||
@@ -289,12 +313,8 @@ If you are a viewer developer, please put them in a viewer.
|
||||
|
||||
## Potential Changes
|
||||
|
||||
* Make package-able for PyPI
|
||||
* GitHub action to build binary packages and pull together licenses bundle
|
||||
* AISv3 wrapper?
|
||||
* Higher level wrappers for common things? I don't really need these, so only if people want to write them.
|
||||
* Highlight matched portion of message in log view, if applicable
|
||||
* * Remember deep filters and return a map of them, have message formatter return text ranges?
|
||||
* Move things out of `templates.py`, right now most binary serialization stuff lives there
|
||||
because it's more convenient for me to hot-reload.
|
||||
* Ability to add menus?
|
||||
@@ -303,10 +323,23 @@ If you are a viewer developer, please put them in a viewer.
|
||||
|
||||
[LGPLv3](https://www.gnu.org/licenses/lgpl-3.0.en.html). If you have a good reason why, I might dual license.
|
||||
|
||||
This package [includes portions of the Second Life(TM) Viewer Artwork](https://github.com/SaladDais/Hippolyzer/tree/master/hippolyzer/lib/proxy/data),
|
||||
This package [includes portions of the Second Life(TM) Viewer Artwork](https://github.com/SaladDais/Hippolyzer/tree/master/hippolyzer/lib/base/data),
|
||||
Copyright (C) 2008 Linden Research, Inc. The viewer artwork is licensed under the Creative Commons
|
||||
Attribution-Share Alike 3.0 License.
|
||||
|
||||
## Contributing
|
||||
|
||||
Ensure that any patches are clean with no unnecessary whitespace or formatting changes, and that you
|
||||
add new tests for any added functionality.
|
||||
|
||||
## Philosophy
|
||||
|
||||
With a few notable exceptions, Hippolyzer focuses mainly on decomposition of data, and doesn't
|
||||
provide many high-level abstractions for interpreting or manipulating that data. It's careful
|
||||
to only do lossless transforms on data that are just prettier representations of the data sent
|
||||
over the wire. Hippolyzer's goal is to help people understand how Second Life actually works,
|
||||
automatically employing abstractions that hide how SL works is counter to that goal.
|
||||
|
||||
## For Client Developers
|
||||
|
||||
This section is mostly useful if you're developing a new SL-compatible client from scratch. Clients based
|
||||
@@ -320,18 +353,20 @@ UDP proxy and an HTTP proxy.
|
||||
To have your client's traffic proxied through Hippolyzer the general flow is:
|
||||
|
||||
* Open a TCP connection to Hippolyzer's SOCKS 5 proxy port
|
||||
* * This should be done once per logical user session, as Hippolyzer assumes a 1:1 mapping of SOCKS
|
||||
* * This should be done once per logical user session, as Hippolyzer assumes a 1:1 mapping of SOCKS TCP
|
||||
connections to SL sessions
|
||||
* Send a UDP associate command without authentication
|
||||
* The proxy will respond with a host / port pair that UDP messages may be sent through
|
||||
* At this point you will no longer need to use the TCP connection, but it must be kept
|
||||
* At this point you will no longer need to use the TCP connection, but it must be kept
|
||||
alive until you want to break the UDP association
|
||||
* Whenever you send a UDP packet to a remote host, you'll need to instead send it to the host / port
|
||||
from the UDP associate response. A SOCKS 5 header must be prepended to the data indicating the ultimate destination
|
||||
of the packet
|
||||
* Any received UDP packets will also have a SOCKS 5 header indicating the real source IP and address
|
||||
* * When in doubt, check `socks_proxy.py`, `packets.py` and the SOCKS 5 RFC for more info on how to deal with SOCKS.
|
||||
* All HTTP requests must be sent through the Hippolyzer's HTTP proxy port.
|
||||
* * <https://github.com/SaladDais/WinHippoAutoProxy/blob/master/winhippoautoproxy/socks5udphooker.cpp> is a simple
|
||||
example that wraps around `recvfrom()` and `sendto()` and could be used as a starting point.
|
||||
* All HTTP requests must be sent through the Hippolyzer's HTTP proxy port.
|
||||
* * You may not need to do any extra plumbing to get this to work if your chosen HTTP client
|
||||
respects the `HTTP_PROXY` environment variable.
|
||||
* All HTTPS connections will be encrypted with the proxy's TLS key. You'll need to either add it to whatever
|
||||
|
||||
@@ -9,7 +9,7 @@ from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class PropertyHelloWorldAddon(BaseAddon):
|
||||
class AddonStateHelloWorldAddon(BaseAddon):
|
||||
# How to say hello, value shared across sessions and will be the same
|
||||
# regardless of which session is active when accessed.
|
||||
# "hello_greeting" is added to session_manager.addon_ctx's dict and will survive reloads
|
||||
@@ -28,7 +28,11 @@ class PropertyHelloWorldAddon(BaseAddon):
|
||||
# Shared across sessions and will die if the addon is reloaded
|
||||
self.hello_punctuation = "!"
|
||||
|
||||
@handle_command(greeting=Parameter(str, sep=None))
|
||||
@handle_command(
|
||||
# Use the longer-form `Parameter()` for declaring this because
|
||||
# this field should be greedy and take the rest of the message (no separator.)
|
||||
greeting=Parameter(str, sep=None),
|
||||
)
|
||||
async def set_hello_greeting(self, _session: Session, _region: ProxiedRegion, greeting: str):
|
||||
"""Set the person to say hello to"""
|
||||
self.hello_greeting = greeting
|
||||
@@ -38,7 +42,10 @@ class PropertyHelloWorldAddon(BaseAddon):
|
||||
"""Set the person to say hello to"""
|
||||
self.hello_person = person
|
||||
|
||||
@handle_command(punctuation=Parameter(str, sep=None))
|
||||
@handle_command(
|
||||
# Punctuation should have no whitespace, so using a simple parameter is OK.
|
||||
punctuation=str,
|
||||
)
|
||||
async def set_hello_punctuation(self, _session: Session, _region: ProxiedRegion, punctuation: str):
|
||||
"""Set the punctuation to use for saying hello"""
|
||||
self.hello_punctuation = punctuation
|
||||
@@ -47,8 +54,8 @@ class PropertyHelloWorldAddon(BaseAddon):
|
||||
async def say_hello(self, _session: Session, _region: ProxiedRegion):
|
||||
"""Say hello using the configured hello variables"""
|
||||
# These aren't instance properties, they can be accessed via the class as well.
|
||||
hello_person = PropertyHelloWorldAddon.hello_person
|
||||
hello_person = AddonStateHelloWorldAddon.hello_person
|
||||
send_chat(f"{self.hello_greeting} {hello_person}{self.hello_punctuation}")
|
||||
|
||||
|
||||
addons = [PropertyHelloWorldAddon()]
|
||||
addons = [AddonStateHelloWorldAddon()]
|
||||
@@ -4,11 +4,11 @@ All buttons make you go backwards.
|
||||
Except for backward, which makes you go left.
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.templates import AgentControlFlags
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import AgentControlFlags
|
||||
|
||||
|
||||
NUDGE_MASK = sum(x for x in AgentControlFlags if "NUDGE" in x.name)
|
||||
@@ -19,7 +19,7 @@ BACK_MASK = (AgentControlFlags.AT_NEG | AgentControlFlags.NUDGE_AT_NEG)
|
||||
|
||||
|
||||
class BackwardsAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.name == "AgentUpdate":
|
||||
agent_data_block = message["AgentData"][0]
|
||||
flags: AgentControlFlags = agent_data_block.deserialize_var("ControlFlags")
|
||||
|
||||
@@ -11,7 +11,7 @@ import secrets
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
@@ -41,7 +41,7 @@ class BezosifyAddon(BaseAddon):
|
||||
# random value to XOR all CRCs with
|
||||
self.bezos_crc_xor = secrets.randbits(32)
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.name == "ObjectUpdateCached":
|
||||
for block in message["ObjectData"]:
|
||||
# Cached only really has a CRC, this will force the cache miss.
|
||||
|
||||
@@ -14,18 +14,17 @@ from typing import *
|
||||
from PySide2 import QtCore, QtGui, QtWidgets
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.base.ui_helpers import loadUi
|
||||
from hippolyzer.lib.base.templates import PCode
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.task_scheduler import TaskLifeScope
|
||||
from hippolyzer.lib.proxy.templates import PCode
|
||||
|
||||
|
||||
def _is_color_blueish(color: bytes) -> bool:
|
||||
@@ -81,7 +80,7 @@ class BlueishObjectListGUIAddon(BaseAddon):
|
||||
raise
|
||||
|
||||
def _highlight_object(self, session: Session, obj: Object):
|
||||
session.main_region.circuit.send_message(ProxiedMessage(
|
||||
session.main_region.circuit.send_message(Message(
|
||||
"ForceObjectSelect",
|
||||
Block("Header", ResetList=False),
|
||||
Block("Data", LocalID=obj.LocalID),
|
||||
@@ -89,7 +88,7 @@ class BlueishObjectListGUIAddon(BaseAddon):
|
||||
))
|
||||
|
||||
def _teleport_to_object(self, session: Session, obj: Object):
|
||||
session.main_region.circuit.send_message(ProxiedMessage(
|
||||
session.main_region.circuit.send_message(Message(
|
||||
"TeleportLocationRequest",
|
||||
Block("AgentData", AgentID=session.agent_id, SessionID=session.id),
|
||||
Block(
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
def handle_lludp_message(session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(session: Session, region: ProxiedRegion, message: Message):
|
||||
# addon_ctx will persist across addon reloads, use for storing data that
|
||||
# needs to survive across calls to this function
|
||||
ctx = session.addon_ctx
|
||||
|
||||
@@ -10,13 +10,13 @@ message with a greeting.
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class CustomMetaExampleAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if not message.name.startswith("ChatFrom"):
|
||||
return
|
||||
|
||||
|
||||
@@ -16,8 +16,8 @@ import random
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketLayout
|
||||
from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
@@ -28,7 +28,7 @@ class PacketMutationAddon(BaseAddon):
|
||||
def __init__(self):
|
||||
self.serializer = UDPMessageSerializer()
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
# Only inbound messages, don't fiddle with the sim.
|
||||
if message.direction != Direction.IN:
|
||||
return
|
||||
|
||||
@@ -9,23 +9,24 @@ class GreetingAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def greetings(self, session: Session, region: ProxiedRegion):
|
||||
"""Greet everyone around you"""
|
||||
agent_obj = region.objects.lookup_fullid(session.agent_id)
|
||||
if not agent_obj:
|
||||
our_avatar = region.objects.lookup_avatar(session.agent_id)
|
||||
if not our_avatar:
|
||||
show_message("Don't have an agent object?")
|
||||
|
||||
# Note that this will only have avatars closeish to your camera. The sim sends
|
||||
# KillObjects for avatars that get too far away.
|
||||
other_agents = [o for o in region.objects.all_avatars if o.FullID != agent_obj.FullID]
|
||||
# Look this up in the session object store since we may be next
|
||||
# to a region border.
|
||||
other_avatars = [o for o in session.objects.all_avatars if o.FullID != our_avatar.FullID]
|
||||
|
||||
if not other_agents:
|
||||
show_message("No other agents?")
|
||||
if not other_avatars:
|
||||
show_message("No other avatars?")
|
||||
|
||||
for other_agent in other_agents:
|
||||
dist = Vector3.dist(agent_obj.Position, other_agent.Position)
|
||||
for other_avatar in other_avatars:
|
||||
dist = Vector3.dist(our_avatar.GlobalPosition, other_avatar.GlobalPosition)
|
||||
if dist >= 19.0:
|
||||
continue
|
||||
nv = other_agent.NameValue.to_dict()
|
||||
send_chat(f"Greetings, {nv['FirstName']} {nv['LastName']}!")
|
||||
if other_avatar.PreferredName is None:
|
||||
continue
|
||||
send_chat(f"Greetings, {other_avatar.PreferredName}!")
|
||||
|
||||
|
||||
addons = [GreetingAddon()]
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
Drop outgoing packets that might leak what you're looking at, similar to Firestorm
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.base.templates import ViewerEffectType
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import ViewerEffectType
|
||||
|
||||
|
||||
BLOCKED_EFFECTS = (
|
||||
@@ -17,7 +17,7 @@ BLOCKED_EFFECTS = (
|
||||
)
|
||||
|
||||
|
||||
def handle_lludp_message(_session: Session, region: ProxiedRegion, msg: ProxiedMessage):
|
||||
def handle_lludp_message(_session: Session, region: ProxiedRegion, msg: Message):
|
||||
if msg.name == "ViewerEffect" and msg.direction == Direction.OUT:
|
||||
new_blocks = [b for b in msg["Effect"] if b["Type"] not in BLOCKED_EFFECTS]
|
||||
if new_blocks:
|
||||
|
||||
@@ -13,10 +13,10 @@ from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.llanim import Animation
|
||||
from hippolyzer.lib.proxy.addon_utils import AssetAliasTracker, BaseAddon, GlobalProperty
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
from hippolyzer.lib.proxy.vfs import STATIC_VFS
|
||||
from hippolyzer.lib.base.vfs import STATIC_VFS
|
||||
|
||||
|
||||
JOINT_REPLS = {
|
||||
@@ -53,7 +53,7 @@ class HorrorAnimatorAddon(BaseAddon):
|
||||
# We've reloaded, so make sure assets get new aliases
|
||||
self.horror_anim_tracker.invalidate_aliases()
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
tracker = self.horror_anim_tracker
|
||||
|
||||
if message.name == "AvatarAnimation":
|
||||
|
||||
@@ -16,12 +16,11 @@ import pathlib
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
@@ -101,7 +100,7 @@ class LocalAnimAddon(BaseAddon):
|
||||
anim_name: str, new_data: Optional[bytes] = None):
|
||||
asset_repo: HTTPAssetRepo = session.session_manager.asset_repo
|
||||
next_id: Optional[UUID] = None
|
||||
new_msg = ProxiedMessage(
|
||||
new_msg = Message(
|
||||
"AgentAnimation",
|
||||
Block(
|
||||
"AgentData",
|
||||
|
||||
@@ -23,23 +23,22 @@ import ctypes
|
||||
import secrets
|
||||
from typing import *
|
||||
|
||||
import mitmproxy
|
||||
from mitmproxy.http import HTTPFlow
|
||||
import mitmproxy.http
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
from hippolyzer.lib.base.mesh import LLMeshSerializer, MeshAsset
|
||||
from hippolyzer.lib.base import serialization as se
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.base.templates import ExtraParamType
|
||||
from hippolyzer.lib.proxy import addon_ctx
|
||||
from hippolyzer.lib.proxy.addon_utils import show_message, BaseAddon, GlobalProperty, SessionProperty
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
from hippolyzer.lib.proxy.templates import ExtraParamType
|
||||
|
||||
|
||||
def _modify_crc(crc_tweak, crc_val):
|
||||
@@ -126,7 +125,7 @@ class MeshUploadInterceptingAddon(BaseAddon):
|
||||
region.objects.request_objects(old_locals)
|
||||
show_message(f"Cleared target {old_locals}")
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
# Replace any mesh asset IDs in tracked objects with our local assets
|
||||
if not self.local_mesh_target_locals:
|
||||
return
|
||||
|
||||
@@ -27,16 +27,32 @@ from mitmproxy.http import HTTPFlow
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.jp2_utils import BufferedJp2k
|
||||
from hippolyzer.lib.base.multiprocessing_utils import ParentProcessWatcher
|
||||
from hippolyzer.lib.base.templates import TextureEntry
|
||||
from hippolyzer.lib.proxy.addon_utils import AssetAliasTracker, BaseAddon, GlobalProperty, AddonProcess
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
from hippolyzer.lib.proxy.templates import TextureEntry
|
||||
|
||||
|
||||
glymur.set_option('lib.num_threads', 4)
|
||||
|
||||
# These should never be replaced, they're only used as aliases to tell the viewer
|
||||
# it should fetch the relevant texture from the appearance service
|
||||
BAKES_ON_MESH_TEXTURE_IDS = {UUID(x) for x in (
|
||||
"5a9f4a74-30f2-821c-b88d-70499d3e7183",
|
||||
"ae2de45c-d252-50b8-5c6e-19f39ce79317",
|
||||
"24daea5f-0539-cfcf-047f-fbc40b2786ba",
|
||||
"52cc6bb6-2ee5-e632-d3ad-50197b1dcb8a",
|
||||
"43529ce8-7faa-ad92-165a-bc4078371687",
|
||||
"09aac1fb-6bce-0bee-7d44-caac6dbb6c63",
|
||||
"ff62763f-d60a-9855-890b-0c96f8f8cd98",
|
||||
"8e915e25-31d1-cc95-ae08-d58a47488251",
|
||||
"9742065b-19b5-297c-858a-29711d539043",
|
||||
"03642e83-2bd1-4eb9-34b4-4c47ed586d2d",
|
||||
"edd51b77-fc10-ce7a-4b3d-011dfc349e4f",
|
||||
)}
|
||||
|
||||
|
||||
def _modify_crc(crc_tweak: int, crc_val: int):
|
||||
return ctypes.c_uint32(crc_val ^ crc_tweak).value
|
||||
@@ -82,7 +98,7 @@ class MonochromeAddon(BaseAddon):
|
||||
# Tell queue consumers to shut down
|
||||
self.mono_addon_shutdown_signal.set()
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
tracker = self.mono_tracker
|
||||
if message.name == "ObjectUpdateCached":
|
||||
for block in message["ObjectData"]:
|
||||
@@ -137,6 +153,8 @@ class MonochromeAddon(BaseAddon):
|
||||
# and we don't want to change the canonical view.
|
||||
parsed_te = copy.deepcopy(parsed_te)
|
||||
for k, v in parsed_te.Textures.items():
|
||||
if v in BAKES_ON_MESH_TEXTURE_IDS:
|
||||
continue
|
||||
# Replace textures with their alias to bust the viewer cache
|
||||
parsed_te.Textures[k] = tracker.get_alias_uuid(v)
|
||||
for k, v in parsed_te.Color.items():
|
||||
@@ -166,6 +184,8 @@ class MonochromeAddon(BaseAddon):
|
||||
orig_texture_id = self.mono_tracker.get_orig_uuid(UUID(texture_id))
|
||||
if not orig_texture_id:
|
||||
return
|
||||
if orig_texture_id in BAKES_ON_MESH_TEXTURE_IDS:
|
||||
return
|
||||
|
||||
# The request was for a fake texture ID we created, rewrite the request to
|
||||
# request the real asset and mark the flow for modification once we receive
|
||||
|
||||
@@ -11,11 +11,11 @@ from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.base.templates import PCode
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message, SessionProperty
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import PCode
|
||||
|
||||
|
||||
class ObjectUpdateBlameAddon(BaseAddon):
|
||||
|
||||
@@ -3,16 +3,15 @@ Do the money dance whenever someone in the sim pays you directly
|
||||
"""
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.templates import MoneyTransactionType, ChatType
|
||||
from hippolyzer.lib.proxy.addon_utils import send_chat, BaseAddon
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import MoneyTransactionType, PCode, ChatType
|
||||
|
||||
|
||||
class PaydayAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.name != "MoneyBalanceReply":
|
||||
return
|
||||
transaction_block = message["TransactionInfo"][0]
|
||||
@@ -28,8 +27,8 @@ class PaydayAddon(BaseAddon):
|
||||
return
|
||||
|
||||
# Check if they're likely to be in the sim
|
||||
sender_obj = region.objects.lookup_fullid(sender)
|
||||
if not sender_obj or sender_obj.PCode != PCode.AVATAR:
|
||||
sender_obj = region.objects.lookup_avatar(sender)
|
||||
if not sender_obj:
|
||||
return
|
||||
|
||||
amount = transaction_block['Amount']
|
||||
@@ -38,7 +37,7 @@ class PaydayAddon(BaseAddon):
|
||||
chat_type=ChatType.SHOUT,
|
||||
)
|
||||
# Do the traditional money dance.
|
||||
session.main_region.circuit.send_message(ProxiedMessage(
|
||||
session.main_region.circuit.send_message(Message(
|
||||
"AgentAnimation",
|
||||
Block("AgentData", AgentID=session.agent_id, SessionID=session.id),
|
||||
Block("AnimationList", AnimID=UUID("928cae18-e31d-76fd-9cc9-2f55160ff818"), StartAnim=True),
|
||||
|
||||
161
addon_examples/pixel_artist.py
Normal file
161
addon_examples/pixel_artist.py
Normal file
@@ -0,0 +1,161 @@
|
||||
"""
|
||||
Import a small image (like a nintendo sprite) and create it out of cube prims
|
||||
|
||||
Inefficient and doesn't even do line fill, expect it to take `width * height`
|
||||
prims for whatever image you import!
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import struct
|
||||
from typing import *
|
||||
|
||||
from PySide2.QtGui import QImage
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, Vector3, Quaternion
|
||||
from hippolyzer.lib.base.helpers import to_chunks
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.templates import ObjectUpdateFlags, PCode, MCode, MultipleObjectUpdateFlags, TextureEntry
|
||||
from hippolyzer.lib.client.object_manager import ObjectEvent, UpdateType
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
JUST_CREATED_FLAGS = (ObjectUpdateFlags.CREATE_SELECTED | ObjectUpdateFlags.OBJECT_YOU_OWNER)
|
||||
PRIM_SCALE = 0.2
|
||||
|
||||
|
||||
class PixelArtistAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def import_pixel_art(self, session: Session, region: ProxiedRegion):
|
||||
"""
|
||||
Import a small image (like a nintendo sprite) and create it out of cube prims
|
||||
"""
|
||||
filename = await AddonManager.UI.open_file(
|
||||
"Open an image",
|
||||
filter_str="Images (*.png *.jpg *.jpeg *.bmp)",
|
||||
)
|
||||
if not filename:
|
||||
return
|
||||
img = QImage()
|
||||
with open(filename, "rb") as f:
|
||||
img.loadFromData(f.read(), aformat=None)
|
||||
img = img.convertToFormat(QImage.Format_RGBA8888)
|
||||
height = img.height()
|
||||
width = img.width()
|
||||
pixels: List[Optional[bytes]] = []
|
||||
needed_prims = 0
|
||||
for y in range(height):
|
||||
for x in range(width):
|
||||
color: int = img.pixel(x, y)
|
||||
# This will be ARGB, SL wants RGBA
|
||||
alpha = (color & 0xFF000000) >> 24
|
||||
color = color & 0x00FFFFFF
|
||||
if alpha > 20:
|
||||
# Repack RGBA to the bytes format we use for colors
|
||||
pixels.append(struct.pack("!I", (color << 8) | alpha))
|
||||
needed_prims += 1
|
||||
else:
|
||||
# Pretty transparent, skip it
|
||||
pixels.append(None)
|
||||
|
||||
if not await AddonManager.UI.confirm("Confirm prim use", f"This will take {needed_prims} prims"):
|
||||
return
|
||||
|
||||
agent_obj = region.objects.lookup_fullid(session.agent_id)
|
||||
agent_pos = agent_obj.RegionPosition
|
||||
|
||||
created_prims = []
|
||||
# Watch for any newly created prims, this is basically what the viewer does to find
|
||||
# prims that it just created with the build tool.
|
||||
with session.objects.events.subscribe_async(
|
||||
(UpdateType.OBJECT_UPDATE,),
|
||||
predicate=lambda e: e.object.UpdateFlags & JUST_CREATED_FLAGS and "LocalID" in e.updated
|
||||
) as get_events:
|
||||
# Create a pool of prims to use for building the pixel art
|
||||
for _ in range(needed_prims):
|
||||
# TODO: We don't track the land group or user's active group, so
|
||||
# "anyone can build" must be on for rezzing to work.
|
||||
group_id = UUID()
|
||||
region.circuit.send_message(Message(
|
||||
'ObjectAdd',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id, GroupID=group_id),
|
||||
Block(
|
||||
'ObjectData',
|
||||
PCode=PCode.PRIMITIVE,
|
||||
Material=MCode.WOOD,
|
||||
AddFlags=ObjectUpdateFlags.CREATE_SELECTED,
|
||||
PathCurve=16,
|
||||
ProfileCurve=1,
|
||||
PathScaleX=100,
|
||||
PathScaleY=100,
|
||||
BypassRaycast=1,
|
||||
RayStart=agent_obj.RegionPosition + Vector3(0, 0, 2),
|
||||
RayEnd=agent_obj.RegionPosition + Vector3(0, 0, 2),
|
||||
RayTargetID=UUID(),
|
||||
RayEndIsIntersection=0,
|
||||
Scale=Vector3(PRIM_SCALE, PRIM_SCALE, PRIM_SCALE),
|
||||
Rotation=Quaternion(0.0, 0.0, 0.0, 1.0),
|
||||
fill_missing=True,
|
||||
),
|
||||
))
|
||||
# Don't spam a ton of creates at once
|
||||
await asyncio.sleep(0.02)
|
||||
|
||||
# Read any creation events that queued up while we were creating the objects
|
||||
# So we can figure out the newly-created objects' IDs
|
||||
for _ in range(needed_prims):
|
||||
evt: ObjectEvent = await asyncio.wait_for(get_events(), 1.0)
|
||||
created_prims.append(evt.object)
|
||||
|
||||
# Drawing origin starts at the top left, should be positioned just above the
|
||||
# avatar on Z and centered on Y.
|
||||
top_left = Vector3(0, (width * PRIM_SCALE) * -0.5, (height * PRIM_SCALE) + 2.0) + agent_pos
|
||||
positioning_blocks = []
|
||||
prim_idx = 0
|
||||
for i, pixel_color in enumerate(pixels):
|
||||
# Transparent, skip
|
||||
if pixel_color is None:
|
||||
continue
|
||||
x = i % width
|
||||
y = i // width
|
||||
obj = created_prims[prim_idx]
|
||||
# Set a blank texture on all faces
|
||||
te = TextureEntry()
|
||||
te.Textures[None] = UUID('5748decc-f629-461c-9a36-a35a221fe21f')
|
||||
# Set the prim color to the color from the pixel
|
||||
te.Color[None] = pixel_color
|
||||
# Set the prim texture and color
|
||||
region.circuit.send_message(Message(
|
||||
'ObjectImage',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block('ObjectData', ObjectLocalID=obj.LocalID, MediaURL=b'', TextureEntry_=te),
|
||||
direction=Direction.OUT,
|
||||
))
|
||||
# Save the repositioning data for later since it uses a different message,
|
||||
# but it can be set in batches.
|
||||
positioning_blocks.append(Block(
|
||||
'ObjectData',
|
||||
ObjectLocalID=obj.LocalID,
|
||||
Type=MultipleObjectUpdateFlags.POSITION,
|
||||
Data_={'POSITION': top_left + Vector3(0, x * PRIM_SCALE, y * -PRIM_SCALE)},
|
||||
))
|
||||
await asyncio.sleep(0.01)
|
||||
# We actually used a prim for this, so increment the index
|
||||
prim_idx += 1
|
||||
|
||||
# Move the "pixels" to their correct position in chunks
|
||||
for chunk in to_chunks(positioning_blocks, 25):
|
||||
region.circuit.send_message(Message(
|
||||
'MultipleObjectUpdate',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
*chunk,
|
||||
direction=Direction.OUT,
|
||||
))
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
|
||||
addons = [PixelArtistAddon()]
|
||||
151
addon_examples/recapitator.py
Normal file
151
addon_examples/recapitator.py
Normal file
@@ -0,0 +1,151 @@
|
||||
"""
|
||||
Recapitator addon, merges a base head shape into body shapes.
|
||||
|
||||
Only works if both the base shapes and shapes you need to edit are modify.
|
||||
|
||||
Useful if you switch heads a lot. Most heads come with a base shape you
|
||||
have to start from if you don't want the head to look like garbage. If you
|
||||
have an existing shape for your body, you have to write down all the values
|
||||
of the base shape's head sliders and edit them onto your body shapes.
|
||||
|
||||
This addon does basically the same thing by intercepting shape uploads. After
|
||||
enabling recapitation, you save the base head shape once. Then the next time you
|
||||
edit and save a body shape, it will be saved with the head sliders from your base
|
||||
shape.
|
||||
"""
|
||||
import logging
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.templates import AssetType, WearableType
|
||||
from hippolyzer.lib.base.wearables import Wearable, VISUAL_PARAMS
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, SessionProperty, AssetAliasTracker, show_message
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
|
||||
|
||||
# Get all VisualParam IDs that belong to head sliders
|
||||
HEAD_EDIT_GROUPS = ("shape_head", "shape_eyes", "shape_ears", "shape_nose", "shape_mouth", "shape_chin")
|
||||
HEAD_PARAM_IDS = [v.id for v in VISUAL_PARAMS if v.edit_group in HEAD_EDIT_GROUPS]
|
||||
|
||||
|
||||
class RecapitatorAddon(BaseAddon):
|
||||
transaction_remappings: AssetAliasTracker = SessionProperty(AssetAliasTracker)
|
||||
recapitating: bool = SessionProperty(bool)
|
||||
recapitation_mappings: Dict[int, float] = SessionProperty(dict)
|
||||
|
||||
@handle_command()
|
||||
async def enable_recapitation(self, _session: Session, _region: ProxiedRegion):
|
||||
"""Apply base head shape when saving subsequent shapes"""
|
||||
self.recapitating = True
|
||||
self.recapitation_mappings.clear()
|
||||
show_message("Recapitation enabled, wear the base shape containing the head parameters and save it.")
|
||||
|
||||
@handle_command()
|
||||
async def disable_recapitation(self, _session: Session, _region: ProxiedRegion):
|
||||
self.recapitating = False
|
||||
show_message("Recapitation disabled")
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if not self.recapitating:
|
||||
return
|
||||
if message.direction != Direction.OUT:
|
||||
return
|
||||
if message.name != "AssetUploadRequest":
|
||||
return
|
||||
if message["AssetBlock"]["Type"] != AssetType.BODYPART:
|
||||
return
|
||||
|
||||
# Pending asset upload for a bodypart asset. Take the message and request
|
||||
# it from the client ourself so we can see what it wants to upload
|
||||
new_message = message.take()
|
||||
self._schedule_task(self._proxy_bodypart_upload(session, region, new_message))
|
||||
return True
|
||||
|
||||
async def _proxy_bodypart_upload(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
asset_block = message["AssetBlock"]
|
||||
# Asset will already be in the viewer's VFS as the expected asset ID, calculate it.
|
||||
asset_id = session.transaction_to_assetid(asset_block["TransactionID"])
|
||||
success = False
|
||||
try:
|
||||
# Xfer the asset from the viewer if it wasn't small enough to fit in AssetData
|
||||
if asset_block["AssetData"]:
|
||||
asset_data = asset_block["AssetData"]
|
||||
else:
|
||||
xfer = await region.xfer_manager.request(
|
||||
vfile_id=asset_id,
|
||||
vfile_type=AssetType.BODYPART,
|
||||
direction=Direction.IN,
|
||||
)
|
||||
asset_data = xfer.reassemble_chunks()
|
||||
|
||||
wearable = Wearable.from_bytes(asset_data)
|
||||
# If they're uploading a shape, process it.
|
||||
if wearable.wearable_type == WearableType.SHAPE:
|
||||
if self.recapitation_mappings:
|
||||
# Copy our previously saved head params over
|
||||
for key, value in self.recapitation_mappings.items():
|
||||
wearable.parameters[key] = value
|
||||
# Upload the changed version
|
||||
asset_data = wearable.to_bytes()
|
||||
show_message("Recapitated shape")
|
||||
else:
|
||||
# Don't have a recapitation mapping yet, use this shape as the base.
|
||||
for param_id in HEAD_PARAM_IDS:
|
||||
self.recapitation_mappings[param_id] = wearable.parameters[param_id]
|
||||
show_message("Got base parameters for recapitation, head parameters will be copied")
|
||||
|
||||
# Upload it ourselves with a new transaction ID that can be traced back to
|
||||
# the original. This is important because otherwise the viewer will use its
|
||||
# own cached version of the shape, under the assumption it wasn't modified
|
||||
# during upload.
|
||||
new_transaction_id = self.transaction_remappings.get_alias_uuid(
|
||||
asset_block["TransactionID"]
|
||||
)
|
||||
await region.xfer_manager.upload_asset(
|
||||
asset_type=AssetType.BODYPART,
|
||||
data=asset_data,
|
||||
transaction_id=new_transaction_id,
|
||||
)
|
||||
success = True
|
||||
except:
|
||||
logging.exception("Exception while recapitating")
|
||||
# Tell the viewer about the status of its original upload
|
||||
region.circuit.send_message(Message(
|
||||
"AssetUploadComplete",
|
||||
Block("AssetBlock", UUID=asset_id, Type=asset_block["Type"], Success=success),
|
||||
direction=Direction.IN,
|
||||
))
|
||||
|
||||
def handle_http_request(self, session_manager: SessionManager, flow: HippoHTTPFlow):
|
||||
# Skip requests that aren't related to patching an existing item
|
||||
if flow.cap_data.cap_name != "InventoryAPIv3":
|
||||
return
|
||||
if flow.request.method != "PATCH":
|
||||
return
|
||||
if "/item/" not in flow.request.url:
|
||||
return
|
||||
|
||||
parsed = llsd.parse_xml(flow.request.content)
|
||||
if parsed.get("type") != "bodypart":
|
||||
return
|
||||
# `hash_id` being present means we're updating the item to point to a newly
|
||||
# uploaded asset. It's actually a transaction ID.
|
||||
transaction_id: Optional[UUID] = parsed.get("hash_id")
|
||||
if not transaction_id:
|
||||
return
|
||||
# We have an original transaction ID, do we need to remap it to an alias ID?
|
||||
orig_id = self.transaction_remappings.get_alias_uuid(transaction_id, create=False)
|
||||
if not orig_id:
|
||||
return
|
||||
|
||||
parsed["hash_id"] = orig_id
|
||||
flow.request.content = llsd.format_xml(parsed)
|
||||
|
||||
|
||||
addons = [RecapitatorAddon()]
|
||||
@@ -1,12 +1,12 @@
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class REPLExampleAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.name == "ChatFromViewer":
|
||||
chat_msg = message["ChatData"]["Message"]
|
||||
if not chat_msg:
|
||||
|
||||
@@ -15,8 +15,8 @@ from hippolyzer.lib.base import serialization as se
|
||||
from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
|
||||
from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.packets import ProxiedUDPPacket
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import UDPPacket
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager, Session
|
||||
|
||||
@@ -28,11 +28,12 @@ class SerializationSanityChecker(BaseAddon):
|
||||
self.serializer = UDPMessageSerializer()
|
||||
self.deserializer = UDPMessageDeserializer()
|
||||
|
||||
def handle_proxied_packet(self, session_manager: SessionManager, packet: ProxiedUDPPacket,
|
||||
session: Optional[Session], region: Optional[ProxiedRegion],
|
||||
message: Optional[ProxiedMessage]):
|
||||
def handle_proxied_packet(self, session_manager: SessionManager, packet: UDPPacket,
|
||||
session: Optional[Session], region: Optional[ProxiedRegion]):
|
||||
# Well this doesn't even parse as a message, can't do anything about it.
|
||||
if message is None:
|
||||
try:
|
||||
message = self.deserializer.deserialize(packet.data)
|
||||
except:
|
||||
LOG.error(f"Received unparseable message from {packet.src_addr!r}: {packet.data!r}")
|
||||
return
|
||||
try:
|
||||
@@ -63,7 +64,7 @@ class SerializationSanityChecker(BaseAddon):
|
||||
except:
|
||||
LOG.exception(f"Exception during message validation:\n{message!r}")
|
||||
|
||||
def _roundtrip_var_serializers(self, message: ProxiedMessage):
|
||||
def _roundtrip_var_serializers(self, message: Message):
|
||||
for block in itertools.chain(*message.blocks.values()):
|
||||
for var_name in block.vars.keys():
|
||||
orig_val = block[var_name]
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
"""Block potentially bad things"""
|
||||
from hippolyzer.lib.base.templates import IMDialogType, XferFilePath
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import IMDialogType
|
||||
|
||||
SUSPICIOUS_PACKETS = {"RequestXfer", "TransferRequest", "UUIDNameRequest",
|
||||
"UUIDGroupNameRequest", "OpenCircuit"}
|
||||
SUSPICIOUS_PACKETS = {"TransferRequest", "UUIDNameRequest", "UUIDGroupNameRequest", "OpenCircuit"}
|
||||
REGULAR_IM_DIALOGS = (IMDialogType.TYPING_STOP, IMDialogType.TYPING_STOP, IMDialogType.NOTHING_SPECIAL)
|
||||
|
||||
|
||||
class ShieldAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.direction != Direction.IN:
|
||||
return
|
||||
if message.name in SUSPICIOUS_PACKETS:
|
||||
@@ -29,6 +28,13 @@ class ShieldAddon(BaseAddon):
|
||||
else:
|
||||
expected_id = from_agent ^ session.agent_id
|
||||
msg_block["ID"] = expected_id
|
||||
if message.name == "RequestXfer":
|
||||
xfer_block = message["XferID"][0]
|
||||
# Don't allow Xfers for files, only assets
|
||||
if xfer_block["FilePath"] != XferFilePath.NONE or xfer_block["Filename"]:
|
||||
show_message(f"Blocked suspicious {message.name} packet")
|
||||
region.circuit.drop_message(message)
|
||||
return True
|
||||
|
||||
|
||||
addons = [ShieldAddon()]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import itertools
|
||||
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
@@ -12,7 +12,7 @@ def _to_spongecase(val):
|
||||
return "".join(itertools.chain(*spongecased))
|
||||
|
||||
|
||||
def handle_lludp_message(session: Session, _region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(session: Session, _region: ProxiedRegion, message: Message):
|
||||
ctx = session.addon_ctx
|
||||
ctx.setdefault("spongecase", False)
|
||||
if message.name == "ChatFromViewer":
|
||||
|
||||
@@ -4,13 +4,8 @@ Example of how to request a Transfer
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.legacy_inv import InventoryModel, InventoryItem
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import (
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.templates import (
|
||||
AssetType,
|
||||
EstateAssetType,
|
||||
TransferRequestParamsSimEstate,
|
||||
@@ -18,6 +13,10 @@ from hippolyzer.lib.proxy.templates import (
|
||||
TransferSourceType,
|
||||
XferFilePath,
|
||||
)
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
|
||||
class TransferExampleAddon(BaseAddon):
|
||||
@@ -36,12 +35,12 @@ class TransferExampleAddon(BaseAddon):
|
||||
async def get_first_script(self, session: Session, region: ProxiedRegion):
|
||||
"""Get the contents of the first script in the selected object"""
|
||||
# Ask for the object inventory so we can find a script
|
||||
region.circuit.send_message(ProxiedMessage(
|
||||
region.circuit.send_message(Message(
|
||||
'RequestTaskInventory',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block('InventoryData', LocalID=session.selected.object_local),
|
||||
))
|
||||
inv_message = await region.message_handler.wait_for('ReplyTaskInventory', timeout=5.0)
|
||||
inv_message = await region.message_handler.wait_for(('ReplyTaskInventory',), timeout=5.0)
|
||||
|
||||
# Xfer the inventory file and look for a script
|
||||
xfer = await region.xfer_manager.request(
|
||||
|
||||
105
addon_examples/turbo_object_inventory.py
Normal file
105
addon_examples/turbo_object_inventory.py
Normal file
@@ -0,0 +1,105 @@
|
||||
"""
|
||||
Speed up outbound object inventory listing requests
|
||||
by 20x at the cost of potentially failing to request some due to
|
||||
dropped packets.
|
||||
|
||||
Useful for builders working on objects with very large inventories that
|
||||
change very often.
|
||||
|
||||
Object Inventory transfers use the Xfer system. Xfers have their own,
|
||||
terrible reliability system that probably pre-dates LLUDP reliability.
|
||||
Each packet has to be ACKed before the far end will send the next packet.
|
||||
Each packet can be around 1200 bytes and will fit 1.5 inventory items worth of data.
|
||||
|
||||
Let's say your sim ping is 100 ms. Because each packet needs to be ACKed
|
||||
before the next will be sent, it'll take around `num_items * 100 / 1.5`
|
||||
milliseconds before you receive the full inventory list of an object.
|
||||
That means for an object with 300 items, it'll take about 20 seconds
|
||||
for you to download the full inventory, and those downloads are triggered
|
||||
every time the inventory is changed.
|
||||
|
||||
By faking ACKs for packets we haven't received yet, we can trick the server
|
||||
into sending us packets much faster than it would otherwise. The only problem
|
||||
is that if an inbound SendXferPacket gets lost after we faked an ACK for it,
|
||||
we have no way to re-request it. The Xfer will just fail. The viewer will also
|
||||
drop any out-of-order xfer packets, so packet re-ordering is a problem.
|
||||
|
||||
To deal with that, the proxy attempts its own Xfers using all the chunks
|
||||
from the previous attempts before sending a final, reconstructed Xfer
|
||||
to the viewer.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.templates import XferFilePath
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.base.xfer_manager import Xfer
|
||||
|
||||
|
||||
class TurboObjectInventoryAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if message.direction != Direction.OUT:
|
||||
return
|
||||
if message.name != "RequestTaskInventory":
|
||||
return
|
||||
|
||||
self._schedule_task(self._proxy_task_inventory_request(region, message.take()))
|
||||
return True
|
||||
|
||||
async def _proxy_task_inventory_request(
|
||||
self,
|
||||
region: ProxiedRegion,
|
||||
request_msg: Message
|
||||
):
|
||||
# Keep around a dict of chunks we saw previously in case we have to restart
|
||||
# an Xfer due to missing chunks. We don't expect chunks to change across Xfers
|
||||
# so this can be used to recover from dropped SendXferPackets in subsequent attempts
|
||||
existing_chunks: Dict[int, bytes] = {}
|
||||
for i in range(3):
|
||||
# Any previous requests will have triggered a delete of the inventory file
|
||||
# by marking it complete on the server-side. Re-send our RequestTaskInventory
|
||||
# To make sure there's a fresh copy.
|
||||
region.circuit.send_message(request_msg.take())
|
||||
inv_message = await region.message_handler.wait_for(('ReplyTaskInventory',), timeout=5.0)
|
||||
# No task inventory, send the reply as-is
|
||||
file_name = inv_message["InventoryData"]["Filename"]
|
||||
if not file_name:
|
||||
region.circuit.send_message(inv_message)
|
||||
return
|
||||
|
||||
xfer = region.xfer_manager.request(
|
||||
file_name=file_name,
|
||||
file_path=XferFilePath.CACHE,
|
||||
turbo=True,
|
||||
)
|
||||
xfer.chunks.update(existing_chunks)
|
||||
try:
|
||||
await xfer
|
||||
except asyncio.TimeoutError:
|
||||
# We likely failed the request due to missing chunks, store
|
||||
# the chunks that we _did_ get for the next attempt.
|
||||
existing_chunks.update(xfer.chunks)
|
||||
continue
|
||||
|
||||
# Send the original ReplyTaskInventory to the viewer so it knows the file is ready
|
||||
region.circuit.send_message(inv_message)
|
||||
proxied_xfer = Xfer(data=xfer.reassemble_chunks())
|
||||
|
||||
# Wait for the viewer to request the inventory file
|
||||
await region.xfer_manager.serve_inbound_xfer_request(
|
||||
xfer=proxied_xfer,
|
||||
request_predicate=lambda x: x["XferID"]["Filename"] == file_name,
|
||||
# indra's XferManager throttles confirms, so even local transfers will be
|
||||
# slow if we wait for confirmation.
|
||||
wait_for_confirm=False,
|
||||
)
|
||||
return
|
||||
raise asyncio.TimeoutError("Failed to get inventory after 3 tries")
|
||||
|
||||
|
||||
addons = [TurboObjectInventoryAddon()]
|
||||
@@ -11,15 +11,14 @@ from typing import *
|
||||
import aiohttp
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.templates import AssetType
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.addon_utils import ais_item_to_inventory_data, show_message, BaseAddon
|
||||
from hippolyzer.lib.proxy.commands import handle_command, Parameter
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import AssetType
|
||||
|
||||
|
||||
class UploaderAddon(BaseAddon):
|
||||
@@ -92,7 +91,7 @@ class UploaderAddon(BaseAddon):
|
||||
async with region.caps_client.post('FetchInventory2', llsd=ais_req_data) as resp:
|
||||
ais_item = (await resp.read_llsd())["items"][0]
|
||||
|
||||
message = ProxiedMessage(
|
||||
message = Message(
|
||||
"UpdateCreateInventoryItem",
|
||||
Block(
|
||||
"AgentData",
|
||||
|
||||
@@ -1,28 +1,28 @@
|
||||
"""
|
||||
Example of how to request an Xfer
|
||||
"""
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.legacy_inv import InventoryModel
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.templates import XferFilePath, AssetType, InventoryType, WearableType
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon, show_message
|
||||
from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.templates import XferFilePath
|
||||
|
||||
|
||||
class XferExampleAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def get_mute_list(self, session: Session, region: ProxiedRegion):
|
||||
"""Fetch the current user's mute list"""
|
||||
region.circuit.send_message(ProxiedMessage(
|
||||
region.circuit.send_message(Message(
|
||||
'MuteListRequest',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block("MuteData", MuteCRC=0),
|
||||
))
|
||||
|
||||
# Wait for any MuteListUpdate, dropping it before it reaches the viewer
|
||||
update_msg = await region.message_handler.wait_for('MuteListUpdate', timeout=5.0)
|
||||
update_msg = await region.message_handler.wait_for(('MuteListUpdate',), timeout=5.0)
|
||||
mute_file_name = update_msg["MuteData"]["Filename"]
|
||||
if not mute_file_name:
|
||||
show_message("Nobody muted?")
|
||||
@@ -35,14 +35,14 @@ class XferExampleAddon(BaseAddon):
|
||||
@handle_command()
|
||||
async def get_task_inventory(self, session: Session, region: ProxiedRegion):
|
||||
"""Get the inventory of the currently selected object"""
|
||||
region.circuit.send_message(ProxiedMessage(
|
||||
region.circuit.send_message(Message(
|
||||
'RequestTaskInventory',
|
||||
# If no session is passed in we'll use the active session when the coro was created
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block('InventoryData', LocalID=session.selected.object_local),
|
||||
))
|
||||
|
||||
inv_message = await region.message_handler.wait_for('ReplyTaskInventory', timeout=5.0)
|
||||
inv_message = await region.message_handler.wait_for(('ReplyTaskInventory',), timeout=5.0)
|
||||
|
||||
# Xfer doesn't need to be immediately awaited, multiple signals can be waited on.
|
||||
xfer = region.xfer_manager.request(
|
||||
@@ -60,5 +60,61 @@ class XferExampleAddon(BaseAddon):
|
||||
item_names = [item.name for item in inv_model.items.values()]
|
||||
show_message(item_names)
|
||||
|
||||
@handle_command()
|
||||
async def eyes_for_you(self, session: Session, region: ProxiedRegion):
|
||||
"""Upload an eye bodypart and create an item for it"""
|
||||
asset_data = f"""LLWearable version 22
|
||||
New Eyes
|
||||
|
||||
\tpermissions 0
|
||||
\t{{
|
||||
\t\tbase_mask\t7fffffff
|
||||
\t\towner_mask\t7fffffff
|
||||
\t\tgroup_mask\t00000000
|
||||
\t\teveryone_mask\t00000000
|
||||
\t\tnext_owner_mask\t00082000
|
||||
\t\tcreator_id\t{session.agent_id}
|
||||
\t\towner_id\t{session.agent_id}
|
||||
\t\tlast_owner_id\t00000000-0000-0000-0000-000000000000
|
||||
\t\tgroup_id\t00000000-0000-0000-0000-000000000000
|
||||
\t}}
|
||||
\tsale_info\t0
|
||||
\t{{
|
||||
\t\tsale_type\tnot
|
||||
\t\tsale_price\t10
|
||||
\t}}
|
||||
type 3
|
||||
parameters 2
|
||||
98 0
|
||||
99 0
|
||||
textures 1
|
||||
3 89556747-24cb-43ed-920b-47caed15465f
|
||||
"""
|
||||
# If we want to create an item containing the asset we need to know the transaction id
|
||||
# used to create the asset.
|
||||
transaction_id = UUID.random()
|
||||
await region.xfer_manager.upload_asset(
|
||||
AssetType.BODYPART,
|
||||
data=asset_data,
|
||||
transaction_id=transaction_id
|
||||
)
|
||||
region.circuit.send_message(Message(
|
||||
'CreateInventoryItem',
|
||||
Block('AgentData', AgentID=session.agent_id, SessionID=session.id),
|
||||
Block(
|
||||
'InventoryBlock',
|
||||
CallbackID=0,
|
||||
# Null folder ID will put it in the default folder for the type
|
||||
FolderID=UUID(),
|
||||
TransactionID=transaction_id,
|
||||
NextOwnerMask=0x7fFFffFF,
|
||||
Type=AssetType.BODYPART,
|
||||
InvType=InventoryType.WEARABLE,
|
||||
WearableType=WearableType.EYES,
|
||||
Name='Eyes For You',
|
||||
Description=b''
|
||||
),
|
||||
))
|
||||
|
||||
|
||||
addons = [XferExampleAddon()]
|
||||
|
||||
14
codecov.yml
Normal file
14
codecov.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
coverage:
|
||||
precision: 1
|
||||
round: down
|
||||
range: "50...80"
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
# Do not fail commits if the code coverage drops.
|
||||
target: 0%
|
||||
threshold: 100%
|
||||
base: auto
|
||||
patch:
|
||||
default:
|
||||
only_pulls: true
|
||||
@@ -1,43 +1,15 @@
|
||||
import collections
|
||||
import codecs
|
||||
import copy
|
||||
import enum
|
||||
import fnmatch
|
||||
import io
|
||||
import logging
|
||||
import pickle
|
||||
import queue
|
||||
import re
|
||||
import typing
|
||||
import weakref
|
||||
|
||||
from defusedxml import minidom
|
||||
from PySide2 import QtCore, QtGui
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion, CapType
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.sessions import Session, BaseMessageLogger
|
||||
|
||||
from .message_filter import compile_filter, BaseFilterNode, MessageFilterNode, MetaFieldSpecifier
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.message_logger import FilteringMessageLogger
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def bytes_unescape(val: bytes) -> bytes:
|
||||
# Only in CPython. bytes -> bytes with escape decoding.
|
||||
# https://stackoverflow.com/a/23151714
|
||||
return codecs.escape_decode(val)[0] # type: ignore
|
||||
|
||||
|
||||
def bytes_escape(val: bytes) -> bytes:
|
||||
# Try to keep newlines as-is
|
||||
return re.sub(rb"(?<!\\)\\n", b"\n", codecs.escape_encode(val)[0]) # type: ignore
|
||||
|
||||
|
||||
class MessageLogHeader(enum.IntEnum):
|
||||
Host = 0
|
||||
Type = enum.auto()
|
||||
@@ -46,582 +18,23 @@ class MessageLogHeader(enum.IntEnum):
|
||||
Summary = enum.auto()
|
||||
|
||||
|
||||
class AbstractMessageLogEntry:
|
||||
region: typing.Optional[ProxiedRegion]
|
||||
session: typing.Optional[Session]
|
||||
name: str
|
||||
type: str
|
||||
|
||||
__slots__ = ["_region", "_session", "_region_name", "_agent_id", "_summary", "meta"]
|
||||
|
||||
def __init__(self, region, session):
|
||||
if region and not isinstance(region, weakref.ReferenceType):
|
||||
region = weakref.ref(region)
|
||||
if session and not isinstance(session, weakref.ReferenceType):
|
||||
session = weakref.ref(session)
|
||||
|
||||
self._region: typing.Optional[weakref.ReferenceType] = region
|
||||
self._session: typing.Optional[weakref.ReferenceType] = session
|
||||
self._region_name = None
|
||||
self._agent_id = None
|
||||
self._summary = None
|
||||
if self.region:
|
||||
self._region_name = self.region.name
|
||||
if self.session:
|
||||
self._agent_id = self.session.agent_id
|
||||
|
||||
agent_obj = None
|
||||
if self.region is not None:
|
||||
agent_obj = self.region.objects.lookup_fullid(self.agent_id)
|
||||
self.meta = {
|
||||
"RegionName": self.region_name,
|
||||
"AgentID": self.agent_id,
|
||||
"SessionID": self.session.id if self.session else None,
|
||||
"AgentLocal": agent_obj.LocalID if agent_obj is not None else None,
|
||||
"Method": self.method,
|
||||
"Type": self.type,
|
||||
"SelectedLocal": self._current_selected_local(),
|
||||
"SelectedFull": self._current_selected_full(),
|
||||
}
|
||||
|
||||
def freeze(self):
|
||||
pass
|
||||
|
||||
def cache_summary(self):
|
||||
self._summary = self.summary
|
||||
|
||||
def _current_selected_local(self):
|
||||
if self.session:
|
||||
return self.session.selected.object_local
|
||||
return None
|
||||
|
||||
def _current_selected_full(self):
|
||||
selected_local = self._current_selected_local()
|
||||
if selected_local is None or self.region is None:
|
||||
return None
|
||||
obj = self.region.objects.lookup_localid(selected_local)
|
||||
return obj and obj.FullID
|
||||
|
||||
def _get_meta(self, name: str):
|
||||
# Slight difference in semantics. Filters are meant to return the same
|
||||
# thing no matter when they're run, so SelectedLocal and friends resolve
|
||||
# to the selected items _at the time the message was logged_. To handle
|
||||
# the case where we want to match on the selected object at the time the
|
||||
# filter is evaluated, we resolve these here.
|
||||
if name == "CurrentSelectedLocal":
|
||||
return self._current_selected_local()
|
||||
elif name == "CurrentSelectedFull":
|
||||
return self._current_selected_full()
|
||||
return self.meta.get(name)
|
||||
|
||||
@property
|
||||
def region(self) -> typing.Optional[ProxiedRegion]:
|
||||
if self._region:
|
||||
return self._region()
|
||||
return None
|
||||
|
||||
@property
|
||||
def session(self) -> typing.Optional[Session]:
|
||||
if self._session:
|
||||
return self._session()
|
||||
return None
|
||||
|
||||
@property
|
||||
def region_name(self) -> str:
|
||||
region = self.region
|
||||
if region:
|
||||
self._region_name = region.name
|
||||
return self._region_name
|
||||
# Region may die after a message is logged, need to keep this around.
|
||||
if self._region_name:
|
||||
return self._region_name
|
||||
|
||||
return ""
|
||||
|
||||
@property
|
||||
def agent_id(self) -> typing.Optional[UUID]:
|
||||
if self._agent_id:
|
||||
return self._agent_id
|
||||
|
||||
session = self.session
|
||||
if session:
|
||||
self._agent_id = session.agent_id
|
||||
return self._agent_id
|
||||
return None
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
region_name = self.region_name
|
||||
if not region_name:
|
||||
return ""
|
||||
session_str = ""
|
||||
agent_id = self.agent_id
|
||||
if agent_id:
|
||||
session_str = f" ({agent_id})"
|
||||
return region_name + session_str
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
return None
|
||||
|
||||
def response(self, beautify=False):
|
||||
return None
|
||||
|
||||
def _packet_root_matches(self, pattern):
|
||||
if fnmatch.fnmatchcase(self.name, pattern):
|
||||
return True
|
||||
if fnmatch.fnmatchcase(self.type, pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _val_matches(self, operator, val, expected):
|
||||
if isinstance(expected, MetaFieldSpecifier):
|
||||
expected = self._get_meta(str(expected))
|
||||
if not isinstance(expected, (int, float, bytes, str, type(None), tuple)):
|
||||
if callable(expected):
|
||||
expected = expected()
|
||||
else:
|
||||
expected = str(expected)
|
||||
elif expected is not None:
|
||||
# Unbox the expected value
|
||||
expected = expected.value
|
||||
if not isinstance(val, (int, float, bytes, str, type(None), tuple, TupleCoord)):
|
||||
val = str(val)
|
||||
|
||||
if not operator:
|
||||
return bool(val)
|
||||
elif operator == "==":
|
||||
return val == expected
|
||||
elif operator == "!=":
|
||||
return val != expected
|
||||
elif operator == "^=":
|
||||
if val is None:
|
||||
return False
|
||||
return val.startswith(expected)
|
||||
elif operator == "$=":
|
||||
if val is None:
|
||||
return False
|
||||
return val.endswith(expected)
|
||||
elif operator == "~=":
|
||||
if val is None:
|
||||
return False
|
||||
return expected in val
|
||||
elif operator == "<":
|
||||
return val < expected
|
||||
elif operator == "<=":
|
||||
return val <= expected
|
||||
elif operator == ">":
|
||||
return val > expected
|
||||
elif operator == ">=":
|
||||
return val >= expected
|
||||
else:
|
||||
raise ValueError(f"Unexpected operator {operator!r}")
|
||||
|
||||
def _base_matches(self, matcher: "MessageFilterNode") -> typing.Optional[bool]:
|
||||
if len(matcher.selector) == 1:
|
||||
# Comparison operators would make no sense here
|
||||
if matcher.value or matcher.operator:
|
||||
return False
|
||||
return self._packet_root_matches(matcher.selector[0])
|
||||
if len(matcher.selector) == 2 and matcher.selector[0] == "Meta":
|
||||
return self._val_matches(matcher.operator, self._get_meta(matcher.selector[1]), matcher.value)
|
||||
return None
|
||||
|
||||
def matches(self, matcher: "MessageFilterNode"):
|
||||
return self._base_matches(matcher) or False
|
||||
|
||||
@property
|
||||
def seq(self):
|
||||
return ""
|
||||
|
||||
@property
|
||||
def method(self):
|
||||
return ""
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def _format_llsd(parsed):
|
||||
xmlified = llsd.format_pretty_xml(parsed)
|
||||
# dedent <key> by 1 for easier visual scanning
|
||||
xmlified = re.sub(rb" <key>", b"<key>", xmlified)
|
||||
return xmlified.decode("utf8", errors="replace")
|
||||
|
||||
|
||||
class LLUDPMessageLogEntry(AbstractMessageLogEntry):
|
||||
__slots__ = ["_message", "_name", "_direction", "_frozen_message", "_seq", "_deserializer"]
|
||||
|
||||
def __init__(self, message: ProxiedMessage, region, session):
|
||||
self._message: ProxiedMessage = message
|
||||
self._deserializer = None
|
||||
self._name = message.name
|
||||
self._direction = message.direction
|
||||
self._frozen_message: typing.Optional[bytes] = None
|
||||
self._seq = message.packet_id
|
||||
super().__init__(region, session)
|
||||
|
||||
_MESSAGE_META_ATTRS = {
|
||||
"Injected", "Dropped", "Extra", "Resent", "Zerocoded", "Acks", "Reliable",
|
||||
}
|
||||
|
||||
def _get_meta(self, name: str):
|
||||
# These may change between when the message is logged and when we
|
||||
# actually filter on it, since logging happens before addons.
|
||||
msg = self.message
|
||||
if name in self._MESSAGE_META_ATTRS:
|
||||
return getattr(msg, name.lower(), None)
|
||||
msg_meta = getattr(msg, "meta", None)
|
||||
if msg_meta is not None:
|
||||
if name in msg_meta:
|
||||
return msg_meta[name]
|
||||
return super()._get_meta(name)
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
if self._message:
|
||||
return self._message
|
||||
elif self._frozen_message:
|
||||
message = pickle.loads(self._frozen_message)
|
||||
message.deserializer = self._deserializer
|
||||
return message
|
||||
else:
|
||||
raise ValueError("Didn't have a fresh or frozen message somehow")
|
||||
|
||||
def freeze(self):
|
||||
self.message.invalidate_caches()
|
||||
# These are expensive to keep around. pickle them and un-pickle on
|
||||
# an as-needed basis.
|
||||
self._deserializer = self.message.deserializer
|
||||
self.message.deserializer = None
|
||||
self._frozen_message = pickle.dumps(self._message, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
self._message = None
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return "LLUDP"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if self._message:
|
||||
self._name = self._message.name
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def method(self):
|
||||
if self._message:
|
||||
self._direction = self._message.direction
|
||||
return self._direction.name if self._direction is not None else ""
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
return self.message.to_human_string(replacements, beautify)
|
||||
|
||||
def matches(self, matcher):
|
||||
base_matched = self._base_matches(matcher)
|
||||
if base_matched is not None:
|
||||
return base_matched
|
||||
|
||||
if not self._packet_root_matches(matcher.selector[0]):
|
||||
return False
|
||||
|
||||
message = self.message
|
||||
|
||||
selector_len = len(matcher.selector)
|
||||
# name, block_name, var_name(, subfield_name)?
|
||||
if selector_len not in (3, 4):
|
||||
return False
|
||||
for block_name in message.blocks:
|
||||
if not fnmatch.fnmatchcase(block_name, matcher.selector[1]):
|
||||
continue
|
||||
for block in message[block_name]:
|
||||
for var_name in block.vars.keys():
|
||||
if not fnmatch.fnmatchcase(var_name, matcher.selector[2]):
|
||||
continue
|
||||
if selector_len == 3:
|
||||
if matcher.value is None:
|
||||
return True
|
||||
if self._val_matches(matcher.operator, block[var_name], matcher.value):
|
||||
return True
|
||||
elif selector_len == 4:
|
||||
try:
|
||||
deserialized = block.deserialize_var(var_name)
|
||||
except KeyError:
|
||||
continue
|
||||
# Discard the tag if this is a tagged union, we only want the value
|
||||
if isinstance(deserialized, TaggedUnion):
|
||||
deserialized = deserialized.value
|
||||
if not isinstance(deserialized, dict):
|
||||
return False
|
||||
for key in deserialized.keys():
|
||||
if fnmatch.fnmatchcase(str(key), matcher.selector[3]):
|
||||
if matcher.value is None:
|
||||
return True
|
||||
if self._val_matches(matcher.operator, deserialized[key], matcher.value):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
if self._summary is None:
|
||||
self._summary = self.message.to_summary()[:500]
|
||||
return self._summary
|
||||
|
||||
@property
|
||||
def seq(self):
|
||||
if self._message:
|
||||
self._seq = self._message.packet_id
|
||||
return self._seq
|
||||
|
||||
|
||||
class EQMessageLogEntry(AbstractMessageLogEntry):
|
||||
__slots__ = ["event"]
|
||||
|
||||
def __init__(self, event, region, session):
|
||||
super().__init__(region, session)
|
||||
self.event = event
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return "EQ"
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
return self._format_llsd(self.event["body"])
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.event["message"]
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
if self._summary is not None:
|
||||
return self._summary
|
||||
self._summary = ""
|
||||
self._summary = llsd.format_notation(self.event["body"]).decode("utf8")[:500]
|
||||
return self._summary
|
||||
|
||||
|
||||
class HTTPMessageLogEntry(AbstractMessageLogEntry):
|
||||
__slots__ = ["flow"]
|
||||
|
||||
def __init__(self, flow: HippoHTTPFlow):
|
||||
self.flow: HippoHTTPFlow = flow
|
||||
cap_data = self.flow.cap_data
|
||||
region = cap_data and cap_data.region
|
||||
session = cap_data and cap_data.session
|
||||
|
||||
super().__init__(region, session)
|
||||
# This was a request the proxy made through itself
|
||||
self.meta["Injected"] = flow.request_injected
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return "HTTP"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
cap_data = self.flow.cap_data
|
||||
name = cap_data and cap_data.cap_name
|
||||
if name:
|
||||
return name
|
||||
return self.flow.request.url
|
||||
|
||||
@property
|
||||
def method(self):
|
||||
return self.flow.request.method
|
||||
|
||||
def _format_http_message(self, want_request, beautify):
|
||||
message = self.flow.request if want_request else self.flow.response
|
||||
method = self.flow.request.method
|
||||
buf = io.StringIO()
|
||||
cap_data = self.flow.cap_data
|
||||
cap_name = cap_data and cap_data.cap_name
|
||||
base_url = cap_name and cap_data.base_url
|
||||
temporary_cap = cap_data and cap_data.type == CapType.TEMPORARY
|
||||
beautify_url = (beautify and base_url and cap_name and
|
||||
not temporary_cap and self.session and want_request)
|
||||
if want_request:
|
||||
buf.write(message.method)
|
||||
buf.write(" ")
|
||||
if beautify_url:
|
||||
buf.write(f"[[{cap_name}]]{message.url[len(base_url):]}")
|
||||
else:
|
||||
buf.write(message.url)
|
||||
buf.write(" ")
|
||||
buf.write(message.http_version)
|
||||
else:
|
||||
buf.write(message.http_version)
|
||||
buf.write(" ")
|
||||
buf.write(str(message.status_code))
|
||||
buf.write(" ")
|
||||
buf.write(message.reason)
|
||||
buf.write("\r\n")
|
||||
if beautify_url:
|
||||
buf.write("# ")
|
||||
buf.write(message.url)
|
||||
buf.write("\r\n")
|
||||
|
||||
headers = copy.deepcopy(message.headers)
|
||||
for key in tuple(headers.keys()):
|
||||
if key.lower().startswith("x-hippo-"):
|
||||
LOG.warning(f"Internal header {key!r} leaked out?")
|
||||
# If this header actually came from somewhere untrusted, we can't
|
||||
# include it. It may change the meaning of the message when replayed.
|
||||
headers[f"X-Untrusted-{key}"] = headers[key]
|
||||
headers.pop(key)
|
||||
beautified = None
|
||||
if beautify and message.content:
|
||||
try:
|
||||
serializer = se.HTTP_SERIALIZERS.get(cap_name)
|
||||
if serializer:
|
||||
if want_request:
|
||||
beautified = serializer.deserialize_req_body(method, message.content)
|
||||
else:
|
||||
beautified = serializer.deserialize_resp_body(method, message.content)
|
||||
|
||||
if beautified is se.UNSERIALIZABLE:
|
||||
beautified = None
|
||||
else:
|
||||
beautified = self._format_llsd(beautified)
|
||||
headers["X-Hippo-Beautify"] = "1"
|
||||
|
||||
if not beautified:
|
||||
content_type = self._guess_content_type(message)
|
||||
if content_type.startswith("application/llsd"):
|
||||
beautified = self._format_llsd(llsd.parse(message.content))
|
||||
elif any(content_type.startswith(x) for x in ("application/xml", "text/xml")):
|
||||
beautified = minidom.parseString(message.content).toprettyxml(indent=" ")
|
||||
# kill blank lines. will break cdata sections. meh.
|
||||
beautified = re.sub(r'\n\s*\n', '\n', beautified, flags=re.MULTILINE)
|
||||
beautified = re.sub(r'<([\w]+)>\s*</\1>', r'<\1></\1>',
|
||||
beautified, flags=re.MULTILINE)
|
||||
except:
|
||||
LOG.exception("Failed to beautify message")
|
||||
|
||||
message_body = beautified or message.content
|
||||
if isinstance(message_body, bytes):
|
||||
try:
|
||||
decoded = message.text
|
||||
# Valid in many codecs, but unprintable.
|
||||
if "\x00" in decoded:
|
||||
raise ValueError("Embedded null")
|
||||
message_body = decoded
|
||||
except (UnicodeError, ValueError):
|
||||
# non-printable characters, return the escaped version.
|
||||
headers["X-Hippo-Escaped-Body"] = "1"
|
||||
message_body = bytes_escape(message_body).decode("utf8")
|
||||
|
||||
buf.write(bytes(headers).decode("utf8", errors="replace"))
|
||||
buf.write("\r\n")
|
||||
|
||||
buf.write(message_body)
|
||||
return buf.getvalue()
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
return self._format_http_message(want_request=True, beautify=beautify)
|
||||
|
||||
def response(self, beautify=False):
|
||||
return self._format_http_message(want_request=False, beautify=beautify)
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
if self._summary is not None:
|
||||
return self._summary
|
||||
msg = self.flow.response
|
||||
self._summary = f"{msg.status_code}: "
|
||||
if not msg.content:
|
||||
return self._summary
|
||||
if len(msg.content) > 1000000:
|
||||
self._summary += "[too large...]"
|
||||
return self._summary
|
||||
content_type = self._guess_content_type(msg)
|
||||
if content_type.startswith("application/llsd"):
|
||||
notation = llsd.format_notation(llsd.parse(msg.content))
|
||||
self._summary += notation.decode("utf8")[:500]
|
||||
return self._summary
|
||||
|
||||
def _guess_content_type(self, message):
|
||||
content_type = message.headers.get("Content-Type", "")
|
||||
if not message.content or content_type.startswith("application/llsd"):
|
||||
return content_type
|
||||
# Sometimes gets sent with `text/plain` or `text/html`. Cool.
|
||||
if message.content.startswith(rb'<?xml version="1.0" ?><llsd>'):
|
||||
return "application/llsd+xml"
|
||||
if message.content.startswith(rb'<llsd>'):
|
||||
return "application/llsd+xml"
|
||||
if message.content.startswith(rb'<?xml '):
|
||||
return "application/xml"
|
||||
return content_type
|
||||
|
||||
|
||||
class MessageLogModel(QtCore.QAbstractTableModel, BaseMessageLogger):
|
||||
class MessageLogModel(QtCore.QAbstractTableModel, FilteringMessageLogger):
|
||||
def __init__(self, parent=None):
|
||||
QtCore.QAbstractTableModel.__init__(self, parent)
|
||||
BaseMessageLogger.__init__(self)
|
||||
self._raw_entries = collections.deque(maxlen=2000)
|
||||
self._queued_entries = queue.Queue()
|
||||
self._filtered_entries = []
|
||||
self._paused = False
|
||||
self.filter: typing.Optional[BaseFilterNode] = None
|
||||
FilteringMessageLogger.__init__(self)
|
||||
|
||||
def setFilter(self, filter_str: str):
|
||||
self.filter = compile_filter(filter_str)
|
||||
def _begin_insert(self, insert_idx: int):
|
||||
self.beginInsertRows(QtCore.QModelIndex(), insert_idx, insert_idx)
|
||||
|
||||
def _end_insert(self):
|
||||
self.endInsertRows()
|
||||
|
||||
def _begin_reset(self):
|
||||
self.beginResetModel()
|
||||
# Keep any entries that've aged out of the raw entries list that
|
||||
# match the new filter
|
||||
self._filtered_entries = [
|
||||
m for m in self._filtered_entries if
|
||||
m not in self._raw_entries and self.filter.match(m)
|
||||
]
|
||||
self._filtered_entries.extend((m for m in self._raw_entries if self.filter.match(m)))
|
||||
|
||||
def _end_reset(self):
|
||||
self.endResetModel()
|
||||
|
||||
def setPaused(self, paused: bool):
|
||||
self._paused = paused
|
||||
|
||||
def log_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
if self._paused:
|
||||
return
|
||||
self.queueLogEntry(LLUDPMessageLogEntry(message, region, session))
|
||||
|
||||
def log_http_response(self, flow: HippoHTTPFlow):
|
||||
if self._paused:
|
||||
return
|
||||
# These are huge, let's not log them for now.
|
||||
if flow.cap_data and flow.cap_data.asset_server_cap:
|
||||
return
|
||||
self.queueLogEntry(HTTPMessageLogEntry(flow))
|
||||
|
||||
def log_eq_event(self, session: Session, region: ProxiedRegion, event: dict):
|
||||
if self._paused:
|
||||
return
|
||||
self.queueLogEntry(EQMessageLogEntry(event, region, session))
|
||||
|
||||
def appendQueuedEntries(self):
|
||||
while not self._queued_entries.empty():
|
||||
entry: AbstractMessageLogEntry = self._queued_entries.get(block=False)
|
||||
# Paused, throw it away.
|
||||
if self._paused:
|
||||
continue
|
||||
self._raw_entries.append(entry)
|
||||
try:
|
||||
if self.filter.match(entry):
|
||||
next_idx = len(self._filtered_entries)
|
||||
self.beginInsertRows(QtCore.QModelIndex(), next_idx, next_idx)
|
||||
self._filtered_entries.append(entry)
|
||||
self.endInsertRows()
|
||||
|
||||
entry.cache_summary()
|
||||
# In the common case we don't need to keep around the serialization
|
||||
# caches anymore. If the filter changes, the caches will be repopulated
|
||||
# as necessary.
|
||||
entry.freeze()
|
||||
except Exception:
|
||||
LOG.exception("Failed to filter queued message")
|
||||
|
||||
def queueLogEntry(self, entry: AbstractMessageLogEntry):
|
||||
self._queued_entries.put(entry, block=False)
|
||||
|
||||
def rowCount(self, parent=None, *args, **kwargs):
|
||||
return len(self._filtered_entries)
|
||||
|
||||
@@ -656,14 +69,6 @@ class MessageLogModel(QtCore.QAbstractTableModel, BaseMessageLogger):
|
||||
if orientation == QtCore.Qt.Horizontal and role == QtCore.Qt.DisplayRole:
|
||||
return MessageLogHeader(col).name
|
||||
|
||||
def clear(self):
|
||||
self.beginResetModel()
|
||||
self._filtered_entries.clear()
|
||||
while not self._queued_entries.empty():
|
||||
self._queued_entries.get(block=False)
|
||||
self._raw_entries.clear()
|
||||
self.endResetModel()
|
||||
|
||||
|
||||
class RegionListModel(QtCore.QAbstractListModel):
|
||||
def __init__(self, parent, session_manager):
|
||||
|
||||
@@ -17,15 +17,16 @@ from hippolyzer.lib.proxy.commands import handle_command
|
||||
from hippolyzer.lib.proxy.http_proxy import create_http_proxy, create_proxy_master, HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.http_event_manager import MITMProxyEventManager
|
||||
from hippolyzer.lib.proxy.lludp_proxy import SLSOCKS5Server
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager, Session
|
||||
from hippolyzer.lib.proxy.settings import ProxySettings
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SelectionManagerAddon(BaseAddon):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
selected = session.selected
|
||||
if message.name == "ObjectSelect":
|
||||
# ObjectDeselect intentionally ignored to deal with messages that
|
||||
@@ -88,11 +89,12 @@ def run_http_proxy_process(proxy_host, http_proxy_port, flow_context: HTTPFlowCo
|
||||
mitmproxy_master = create_http_proxy(proxy_host, http_proxy_port, flow_context)
|
||||
mitmproxy_master.start_server()
|
||||
gc.freeze()
|
||||
flow_context.mitmproxy_ready.set()
|
||||
mitm_loop.run_forever()
|
||||
|
||||
|
||||
def start_proxy(extra_addons: Optional[list] = None, extra_addon_paths: Optional[list] = None,
|
||||
session_manager=None, proxy_host=None):
|
||||
def start_proxy(session_manager: SessionManager, extra_addons: Optional[list] = None,
|
||||
extra_addon_paths: Optional[list] = None, proxy_host=None):
|
||||
extra_addons = extra_addons or []
|
||||
extra_addon_paths = extra_addon_paths or []
|
||||
extra_addons.append(SelectionManagerAddon())
|
||||
@@ -105,13 +107,13 @@ def start_proxy(extra_addons: Optional[list] = None, extra_addon_paths: Optional
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
udp_proxy_port = int(os.environ.get("HIPPO_UDP_PORT", 9061))
|
||||
http_proxy_port = int(os.environ.get("HIPPO_HTTP_PORT", 9062))
|
||||
udp_proxy_port = session_manager.settings.SOCKS_PROXY_PORT
|
||||
http_proxy_port = session_manager.settings.HTTP_PROXY_PORT
|
||||
if proxy_host is None:
|
||||
proxy_host = os.environ.get("HIPPO_BIND_HOST", "127.0.0.1")
|
||||
proxy_host = session_manager.settings.PROXY_BIND_ADDR
|
||||
|
||||
session_manager = session_manager or SessionManager()
|
||||
flow_context = session_manager.flow_context
|
||||
session_manager.name_cache.load_viewer_caches()
|
||||
|
||||
# TODO: argparse
|
||||
if len(sys.argv) == 3:
|
||||
@@ -136,7 +138,7 @@ def start_proxy(extra_addons: Optional[list] = None, extra_addon_paths: Optional
|
||||
async_server = loop.run_until_complete(coro)
|
||||
|
||||
event_manager = MITMProxyEventManager(session_manager, flow_context)
|
||||
loop.create_task(event_manager.pump_proxy_events())
|
||||
loop.create_task(event_manager.run())
|
||||
|
||||
addon_paths = sys.argv[1:]
|
||||
addon_paths.extend(extra_addon_paths)
|
||||
@@ -144,6 +146,7 @@ def start_proxy(extra_addons: Optional[list] = None, extra_addon_paths: Optional
|
||||
|
||||
# Everything in memory at this point should stay
|
||||
gc.freeze()
|
||||
gc.set_threshold(5000, 50, 10)
|
||||
|
||||
# Serve requests until Ctrl+C is pressed
|
||||
print(f"SOCKS and HTTP proxies running on {proxy_host}")
|
||||
@@ -178,10 +181,15 @@ def start_proxy(extra_addons: Optional[list] = None, extra_addon_paths: Optional
|
||||
|
||||
def _windows_timeout_killer(pid: int):
|
||||
time.sleep(2.0)
|
||||
print(f"Killing hanging event loop")
|
||||
print("Killing hanging event loop")
|
||||
os.kill(pid, 9)
|
||||
|
||||
|
||||
def main():
|
||||
multiprocessing.set_start_method("spawn")
|
||||
start_proxy()
|
||||
start_proxy(SessionManager(ProxySettings()))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
multiprocessing.freeze_support()
|
||||
main()
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import asyncio
|
||||
import base64
|
||||
import dataclasses
|
||||
import email
|
||||
import functools
|
||||
import html
|
||||
@@ -8,7 +9,6 @@ import json
|
||||
import logging
|
||||
import pathlib
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import socket
|
||||
@@ -20,41 +20,40 @@ import multidict
|
||||
from qasync import QEventLoop
|
||||
from PySide2 import QtCore, QtWidgets, QtGui
|
||||
|
||||
from hippolyzer.apps.model import (
|
||||
AbstractMessageLogEntry,
|
||||
LLUDPMessageLogEntry,
|
||||
MessageLogModel,
|
||||
MessageLogHeader,
|
||||
RegionListModel,
|
||||
bytes_unescape,
|
||||
bytes_escape,
|
||||
)
|
||||
from hippolyzer.apps.model import MessageLogModel, MessageLogHeader, RegionListModel
|
||||
from hippolyzer.apps.proxy import start_proxy
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import bytes_unescape, bytes_escape, get_resource_filename
|
||||
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.message_formatting import (
|
||||
HumanMessageSerializer,
|
||||
VerbatimHumanVal,
|
||||
subfield_eval,
|
||||
SpannedString,
|
||||
)
|
||||
from hippolyzer.lib.base.message.msgtypes import MsgType
|
||||
from hippolyzer.lib.base.message.template_dict import TemplateDictionary
|
||||
from hippolyzer.lib.base.ui_helpers import loadUi
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base.network.transport import Direction, SocketUDPTransport
|
||||
from hippolyzer.lib.proxy.addons import BaseInteractionManager, AddonManager
|
||||
from hippolyzer.lib.proxy.ca_utils import setup_ca_everywhere
|
||||
from hippolyzer.lib.proxy.caps_client import CapsClient
|
||||
from hippolyzer.lib.proxy.caps_client import ProxyCapsClient
|
||||
from hippolyzer.lib.proxy.http_proxy import create_proxy_master, HTTPFlowContext
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage, VerbatimHumanVal, proxy_eval
|
||||
from hippolyzer.lib.proxy.message_logger import LLUDPMessageLogEntry, AbstractMessageLogEntry
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
from hippolyzer.lib.proxy.settings import ProxySettings
|
||||
from hippolyzer.lib.proxy.templates import CAP_TEMPLATES
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
|
||||
MAIN_WINDOW_UI_PATH = os.path.join(BASE_PATH, "proxy_mainwindow.ui")
|
||||
MESSAGE_BUILDER_UI_PATH = os.path.join(BASE_PATH, "message_builder.ui")
|
||||
ADDON_DIALOG_UI_PATH = os.path.join(BASE_PATH, "addon_dialog.ui")
|
||||
FILTER_DIALOG_UI_PATH = os.path.join(BASE_PATH, "filter_dialog.ui")
|
||||
MAIN_WINDOW_UI_PATH = get_resource_filename("apps/proxy_mainwindow.ui")
|
||||
MESSAGE_BUILDER_UI_PATH = get_resource_filename("apps/message_builder.ui")
|
||||
ADDON_DIALOG_UI_PATH = get_resource_filename("apps/addon_dialog.ui")
|
||||
FILTER_DIALOG_UI_PATH = get_resource_filename("apps/filter_dialog.ui")
|
||||
|
||||
|
||||
def show_error_message(error_msg, parent=None):
|
||||
@@ -69,8 +68,8 @@ class GUISessionManager(SessionManager, QtCore.QObject):
|
||||
regionAdded = QtCore.Signal(ProxiedRegion)
|
||||
regionRemoved = QtCore.Signal(ProxiedRegion)
|
||||
|
||||
def __init__(self, model):
|
||||
SessionManager.__init__(self)
|
||||
def __init__(self, settings, model):
|
||||
SessionManager.__init__(self, settings)
|
||||
QtCore.QObject.__init__(self)
|
||||
self.all_regions = []
|
||||
self.message_logger = model
|
||||
@@ -143,6 +142,19 @@ class GUIInteractionManager(BaseInteractionManager, QtCore.QObject):
|
||||
return None
|
||||
return dialog.selectedFiles()[0]
|
||||
|
||||
async def confirm(self, title: str, caption: str) -> bool:
|
||||
msg = QtWidgets.QMessageBox(
|
||||
QtWidgets.QMessageBox.Icon.Question,
|
||||
title,
|
||||
caption,
|
||||
QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel,
|
||||
self.parent(),
|
||||
)
|
||||
fut = asyncio.Future()
|
||||
msg.finished.connect(lambda r: fut.set_result(r))
|
||||
msg.open()
|
||||
return (await fut) == QtWidgets.QMessageBox.Ok
|
||||
|
||||
|
||||
def nonFatalExceptions(f):
|
||||
@functools.wraps(f)
|
||||
@@ -169,13 +181,15 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
"ViewerAsset GetTexture SetAlwaysRun GetDisplayNames MapImageService MapItemReply".split(" ")
|
||||
DEFAULT_FILTER = f"!({' || '.join(ignored for ignored in DEFAULT_IGNORE)})"
|
||||
|
||||
textRequest: QtWidgets.QTextEdit
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
loadUi(MAIN_WINDOW_UI_PATH, self)
|
||||
|
||||
self.settings = QtCore.QSettings("SaladDais", "hippolyzer")
|
||||
self._selectedEntry: Optional[AbstractMessageLogEntry] = None
|
||||
|
||||
self.settings = GUIProxySettings(QtCore.QSettings("SaladDais", "hippolyzer"))
|
||||
self.model = MessageLogModel(parent=self.tableView)
|
||||
self.tableView.setModel(self.model)
|
||||
self.model.rowsAboutToBeInserted.connect(self.beforeInsert)
|
||||
@@ -192,15 +206,19 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
self.actionManageAddons.triggered.connect(self._manageAddons)
|
||||
self.actionManageFilters.triggered.connect(self._manageFilters)
|
||||
self.actionOpenMessageBuilder.triggered.connect(self._openMessageBuilder)
|
||||
self.actionProxyRemotelyAccessible.setChecked(
|
||||
self.settings.value("RemotelyAccessible", False, type=bool))
|
||||
|
||||
self.actionProxyRemotelyAccessible.setChecked(self.settings.REMOTELY_ACCESSIBLE)
|
||||
self.actionUseViewerObjectCache.setChecked(self.settings.USE_VIEWER_OBJECT_CACHE)
|
||||
self.actionRequestMissingObjects.setChecked(self.settings.AUTOMATICALLY_REQUEST_MISSING_OBJECTS)
|
||||
self.actionProxyRemotelyAccessible.triggered.connect(self._setProxyRemotelyAccessible)
|
||||
self.actionUseViewerObjectCache.triggered.connect(self._setUseViewerObjectCache)
|
||||
self.actionRequestMissingObjects.triggered.connect(self._setRequestMissingObjects)
|
||||
|
||||
self._filterMenu = QtWidgets.QMenu()
|
||||
self._populateFilterMenu()
|
||||
self.toolButtonFilter.setMenu(self._filterMenu)
|
||||
|
||||
self.sessionManager = GUISessionManager(self.model)
|
||||
self.sessionManager = GUISessionManager(self.settings, self.model)
|
||||
self.interactionManager = GUIInteractionManager(self)
|
||||
AddonManager.UI = self.interactionManager
|
||||
|
||||
@@ -221,15 +239,12 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
self._filterMenu.clear()
|
||||
|
||||
_addFilterAction("Default", self.DEFAULT_FILTER)
|
||||
filters = self.getFilterDict()
|
||||
filters = self.settings.FILTERS
|
||||
for preset_name, preset_filter in filters.items():
|
||||
_addFilterAction(preset_name, preset_filter)
|
||||
|
||||
def getFilterDict(self):
|
||||
return json.loads(str(self.settings.value("Filters", "{}")))
|
||||
|
||||
def setFilterDict(self, val: dict):
|
||||
self.settings.setValue("Filters", json.dumps(val))
|
||||
self.settings.FILTERS = val
|
||||
self._populateFilterMenu()
|
||||
|
||||
def _manageFilters(self):
|
||||
@@ -242,10 +257,10 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
filter_str = self.lineEditFilter.text()
|
||||
else:
|
||||
self.lineEditFilter.setText(filter_str)
|
||||
self.model.setFilter(filter_str)
|
||||
self.model.set_filter(filter_str)
|
||||
|
||||
def _setPaused(self, checked):
|
||||
self.model.setPaused(checked)
|
||||
self.model.set_paused(checked)
|
||||
|
||||
def _messageSelected(self, selected, _deselected):
|
||||
indexes = selected.indexes()
|
||||
@@ -271,8 +286,23 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
beautify=self.checkBeautify.isChecked(),
|
||||
replacements=self.buildReplacements(entry.session, entry.region),
|
||||
)
|
||||
resp = entry.response(beautify=self.checkBeautify.isChecked())
|
||||
highlight_range = None
|
||||
if isinstance(req, SpannedString):
|
||||
match_result = self.model.filter.match(entry)
|
||||
# Match result was a tuple indicating what matched
|
||||
if isinstance(match_result, tuple):
|
||||
highlight_range = req.spans.get(match_result)
|
||||
|
||||
self.textRequest.setPlainText(req)
|
||||
if highlight_range:
|
||||
cursor = self.textRequest.textCursor()
|
||||
cursor.setPosition(highlight_range[0], QtGui.QTextCursor.MoveAnchor)
|
||||
cursor.setPosition(highlight_range[1], QtGui.QTextCursor.KeepAnchor)
|
||||
highlight_format = QtGui.QTextBlockFormat()
|
||||
highlight_format.setBackground(QtCore.Qt.yellow)
|
||||
cursor.setBlockFormat(highlight_format)
|
||||
|
||||
resp = entry.response(beautify=self.checkBeautify.isChecked())
|
||||
if resp:
|
||||
self.textResponse.show()
|
||||
self.textResponse.setPlainText(resp)
|
||||
@@ -359,20 +389,26 @@ class ProxyGUI(QtWidgets.QMainWindow):
|
||||
msg.exec()
|
||||
|
||||
def _setProxyRemotelyAccessible(self, checked: bool):
|
||||
self.settings.setValue("RemotelyAccessible", checked)
|
||||
self.sessionManager.settings.REMOTELY_ACCESSIBLE = checked
|
||||
msg = QtWidgets.QMessageBox()
|
||||
msg.setText("Remote accessibility setting changes will take effect on next run")
|
||||
msg.exec()
|
||||
|
||||
def _setUseViewerObjectCache(self, checked: bool):
|
||||
self.sessionManager.settings.USE_VIEWER_OBJECT_CACHE = checked
|
||||
|
||||
def _setRequestMissingObjects(self, checked: bool):
|
||||
self.sessionManager.settings.AUTOMATICALLY_REQUEST_MISSING_OBJECTS = checked
|
||||
|
||||
def _manageAddons(self):
|
||||
dialog = AddonDialog(self)
|
||||
dialog.exec_()
|
||||
|
||||
def getAddonList(self) -> List[str]:
|
||||
return json.loads(str(self.settings.value("Addons", "[]")))
|
||||
return self.sessionManager.settings.ADDON_SCRIPTS
|
||||
|
||||
def setAddonList(self, val: List[str]):
|
||||
self.settings.setValue("Addons", json.dumps(val))
|
||||
self.sessionManager.settings.ADDON_SCRIPTS = val
|
||||
|
||||
|
||||
BANNED_HEADERS = ("content-length", "host")
|
||||
@@ -497,7 +533,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
self.textRequest.clear()
|
||||
|
||||
template = self.templateDict[message_name]
|
||||
msg = ProxiedMessage(message_name, direction=Direction.OUT)
|
||||
msg = Message(message_name, direction=Direction.OUT)
|
||||
|
||||
for tmpl_block in template.blocks:
|
||||
num_blocks = tmpl_block.number or 1
|
||||
@@ -508,7 +544,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
msg_block = Block(tmpl_block.name, **fill_vars)
|
||||
msg.add_block(msg_block)
|
||||
self.textRequest.setPlainText(
|
||||
msg.to_human_string(replacements={}, beautify=True, template=template)
|
||||
HumanMessageSerializer.to_human_string(msg, replacements={}, beautify=True, template=template)
|
||||
)
|
||||
|
||||
def _getVarPlaceholder(self, msg, block, var):
|
||||
@@ -568,6 +604,8 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
|
||||
if re.match(r"\A\s*(in|out)\s+", msg_text, re.I):
|
||||
sender_func = self._sendLLUDPMessage
|
||||
elif re.match(r"\A\s*(eq)\s+", msg_text, re.I):
|
||||
sender_func = self._sendEQMessage
|
||||
elif re.match(r"\A.*http/[0-9.]+\r?\n", msg_text, re.I):
|
||||
sender_func = self._sendHTTPMessage
|
||||
else:
|
||||
@@ -591,7 +629,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
env = self._buildEnv(session, region)
|
||||
# We specifically want to allow `eval()` in messages since
|
||||
# messages from here are trusted.
|
||||
msg = ProxiedMessage.from_human_string(msg_text, replacements, env, safe=False)
|
||||
msg = HumanMessageSerializer.from_human_string(msg_text, replacements, env, safe=False)
|
||||
if self.checkLLUDPViaCaps.isChecked():
|
||||
if msg.direction == Direction.IN:
|
||||
region.eq_manager.queue_event(
|
||||
@@ -606,9 +644,22 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
)
|
||||
else:
|
||||
transport = None
|
||||
if self.checkOffCircuit.isChecked():
|
||||
transport = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
off_circuit = self.checkOffCircuit.isChecked()
|
||||
if off_circuit:
|
||||
transport = SocketUDPTransport(socket.socket(socket.AF_INET, socket.SOCK_DGRAM))
|
||||
region.circuit.send_message(msg, transport=transport)
|
||||
if off_circuit:
|
||||
transport.close()
|
||||
|
||||
def _sendEQMessage(self, session, region: Optional[ProxiedRegion], msg_text: str, _replacements: dict):
|
||||
if not session or not region:
|
||||
raise RuntimeError("Need a valid session and region to send EQ event")
|
||||
message_line, _, body = (x.strip() for x in msg_text.partition("\n"))
|
||||
message_name = message_line.rsplit(" ", 1)[-1]
|
||||
region.eq_manager.queue_event({
|
||||
"message": message_name,
|
||||
"body": llsd.parse_xml(body.encode("utf8")),
|
||||
})
|
||||
|
||||
def _sendHTTPMessage(self, session, region, msg_text: str, replacements: dict):
|
||||
env = self._buildEnv(session, region)
|
||||
@@ -668,7 +719,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
elif directive == b"UNESCAPE":
|
||||
val = unescaped_contents
|
||||
elif directive == b"EVAL":
|
||||
val = proxy_eval(contents.decode("utf8").strip(), globals_={**env, **replacements})
|
||||
val = subfield_eval(contents.decode("utf8").strip(), globals_={**env, **replacements})
|
||||
val = _coerce_to_bytes(val)
|
||||
elif directive == b"REPL":
|
||||
val = _coerce_to_bytes(replacements[contents.decode("utf8").strip()])
|
||||
@@ -683,7 +734,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
return val
|
||||
|
||||
def _sendHTTPRequest(self, method, uri, headers, body):
|
||||
caps_client = CapsClient()
|
||||
caps_client = ProxyCapsClient(self.sessionManager.settings)
|
||||
|
||||
async def _send_request():
|
||||
req = caps_client.request(method, uri, headers=headers, data=body)
|
||||
@@ -787,6 +838,22 @@ class FilterDialog(QtWidgets.QDialog):
|
||||
self.listFilters.takeItem(idx)
|
||||
|
||||
|
||||
class GUIProxySettings(ProxySettings):
|
||||
"""Persistent settings backed by QSettings"""
|
||||
def __init__(self, settings: QtCore.QSettings):
|
||||
super().__init__()
|
||||
self._settings_obj = settings
|
||||
|
||||
def get_setting(self, name: str) -> Any:
|
||||
val: Any = self._settings_obj.value(name, defaultValue=dataclasses.MISSING)
|
||||
if val is dataclasses.MISSING:
|
||||
return val
|
||||
return json.loads(val)
|
||||
|
||||
def set_setting(self, name: str, val: Any):
|
||||
self._settings_obj.setValue(name, json.dumps(val))
|
||||
|
||||
|
||||
def gui_main():
|
||||
multiprocessing.set_start_method('spawn')
|
||||
QtCore.QCoreApplication.setAttribute(QtCore.Qt.AA_ShareOpenGLContexts)
|
||||
@@ -796,16 +863,19 @@ def gui_main():
|
||||
window = ProxyGUI()
|
||||
timer = QtCore.QTimer(app)
|
||||
timer.timeout.connect(window.sessionManager.checkRegions)
|
||||
timer.timeout.connect(window.model.appendQueuedEntries)
|
||||
timer.start(100)
|
||||
signal.signal(signal.SIGINT, lambda *args: QtWidgets.QApplication.quit())
|
||||
window.show()
|
||||
remote_access = window.settings.value("RemotelyAccessible", False, type=bool)
|
||||
http_host = None
|
||||
if remote_access:
|
||||
if window.sessionManager.settings.REMOTELY_ACCESSIBLE:
|
||||
http_host = "0.0.0.0"
|
||||
start_proxy(
|
||||
session_manager=window.sessionManager,
|
||||
extra_addon_paths=window.getAddonList(),
|
||||
proxy_host=http_host,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
multiprocessing.freeze_support()
|
||||
gui_main()
|
||||
|
||||
@@ -262,6 +262,8 @@
|
||||
<addaction name="actionManageFilters"/>
|
||||
<addaction name="separator"/>
|
||||
<addaction name="actionProxyRemotelyAccessible"/>
|
||||
<addaction name="actionUseViewerObjectCache"/>
|
||||
<addaction name="actionRequestMissingObjects"/>
|
||||
</widget>
|
||||
<addaction name="menuFile"/>
|
||||
</widget>
|
||||
@@ -299,6 +301,28 @@
|
||||
<string>Make the proxy accessible from other devices on the network</string>
|
||||
</property>
|
||||
</action>
|
||||
<action name="actionUseViewerObjectCache">
|
||||
<property name="checkable">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Use Viewer Object Cache</string>
|
||||
</property>
|
||||
<property name="toolTip">
|
||||
<string>Can help make the proxy aware of certain objects, but can cause slowdowns</string>
|
||||
</property>
|
||||
</action>
|
||||
<action name="actionRequestMissingObjects">
|
||||
<property name="checkable">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Automatically Request Missing Objects</string>
|
||||
</property>
|
||||
<property name="toolTip">
|
||||
<string>Force the proxy to request objects that it doesn't know about due to cache misses</string>
|
||||
</property>
|
||||
</action>
|
||||
</widget>
|
||||
<resources/>
|
||||
<connections/>
|
||||
|
||||
17503
hippolyzer/lib/base/data/avatar_lad.xml
Normal file
17503
hippolyzer/lib/base/data/avatar_lad.xml
Normal file
File diff suppressed because it is too large
Load Diff
232
hippolyzer/lib/base/data/avatar_skeleton.xml
Normal file
232
hippolyzer/lib/base/data/avatar_skeleton.xml
Normal file
@@ -0,0 +1,232 @@
|
||||
<linden_skeleton num_bones="133" num_collision_volumes="26" version="2.0">
|
||||
<bone aliases="hip avatar_mPelvis" connected="false" end="0.000 0.000 0.084" group="Torso" name="mPelvis" pivot="0.000000 0.000000 1.067015" pos="0.000 0.000 1.067" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.030 0.000 0.095" group="Collision" name="PELVIS" pos="-0.01 0 -0.02" rot="0.000000 8.00000 0.000000" scale="0.12 0.16 0.17" support="base"/>
|
||||
<collision_volume end="-0.100 0.000 0.000" group="Collision" name="BUTT" pos="-0.06 0 -0.1" rot="0.000000 0.00000 0.000000" scale="0.1 0.1 0.1" support="base"/>
|
||||
<bone connected="true" end="0.000 0.000 -0.084" group="Spine" name="mSpine1" pivot="0.000000 0.000000 0.084073" pos="0.000 0.000 0.084" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.000 0.000 0.084" group="Spine" name="mSpine2" pivot="0.000000 0.000000 -0.084073" pos="0.000 0.000 -0.084" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone aliases="abdomen avatar_mTorso" connected="true" end="-0.015 0.000 0.205" group="Torso" name="mTorso" pivot="0.000000 0.000000 0.084073" pos="0.000 0.000 0.084" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.028 0.000 0.094" group="Collision" name="BELLY" pos="0.028 0 0.04" rot="0.000000 8.00000 0.000000" scale="0.09 0.13 0.15" support="base"/>
|
||||
<collision_volume end="0.000 0.100 0.000" group="Collision" name="LEFT_HANDLE" pos="0.0 0.10 0.058" rot="0.000000 0.00000 0.000000" scale="0.05 0.05 0.05" support="base"/>
|
||||
<collision_volume end="0.000 -0.100 0.000" group="Collision" name="RIGHT_HANDLE" pos="0.0 -0.10 0.058" rot="0.000000 0.00000 0.000000" scale="0.05 0.05 0.05" support="base"/>
|
||||
<collision_volume end="-0.100 0.000 0.000" group="Collision" name="LOWER_BACK" pos="0.0 0.0 0.023" rot="0.000000 0.00000 0.000000" scale="0.09 0.13 0.15" support="base"/>
|
||||
<bone connected="true" end="0.015 0.000 -0.205" group="Spine" name="mSpine3" pivot="-0.015368 0.000000 0.204877" pos="-0.015 0.000 0.205" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.015 0.000 0.205" group="Spine" name="mSpine4" pivot="0.015368 0.000000 -0.204877" pos="0.015 0.000 -0.205" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone aliases="chest avatar_mChest" connected="true" end="-0.010 0.000 0.250" group="Torso" name="mChest" pivot="-0.015368 0.000000 0.204877" pos="-0.015 0.000 0.205" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="-0.096 0.000 0.152" group="Collision" name="CHEST" pos="0.028 0 0.07" rot="0.000000 -10.00000 0.000000" scale="0.11 0.15 0.2" support="base"/>
|
||||
<collision_volume end="0.080 0.000 -0.006" group="Collision" name="LEFT_PEC" pos="0.119 0.082 0.042" rot="0.000000 4.29000 0.000000" scale="0.05 0.05 0.05" support="base"/>
|
||||
<collision_volume end="0.080 0.000 -0.006" group="Collision" name="RIGHT_PEC" pos="0.119 -0.082 0.042" rot="0.000000 4.29000 0.000000" scale="0.05 0.05 0.05" support="base"/>
|
||||
<collision_volume end="-0.100 0.000 0.000" group="Collision" name="UPPER_BACK" pos="0.0 0.0 0.017" rot="0.000000 0.00000 0.000000" scale="0.09 0.13 0.15" support="base"/>
|
||||
<bone aliases="neck avatar_mNeck" connected="true" end="0.000 0.000 0.077" group="Torso" name="mNeck" pivot="-0.009507 0.000000 0.251108" pos="-0.010 0.000 0.251" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.000 0.080" group="Collision" name="NECK" pos="0.0 0 0.02" rot="0.000000 0.000000 0.000000" scale="0.05 0.06 0.08" support="base"/>
|
||||
<bone aliases="head avatar_mHead" connected="true" end="0.000 0.000 0.079" group="Torso" name="mHead" pivot="0.000000 -0.000000 0.075630" pos="0.000 -0.000 0.076" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.000 0.100" group="Collision" name="HEAD" pos="0.02 0 0.07" rot="0.000000 0.000000 0.000000" scale="0.11 0.09 0.12" support="base"/>
|
||||
<bone aliases="figureHair avatar_mSkull" connected="false" end="0.000 0.000 0.033" group="Extra" name="mSkull" pivot="0.000000 0.000000 0.079000" pos="0.000 0.000 0.079" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base"/>
|
||||
<bone aliases="avatar_mEyeRight" connected="false" end="0.025 0.000 0.000" group="Extra" name="mEyeRight" pivot="0.098466 -0.036000 0.079000" pos="0.098 -0.036 0.079" rot="0.000000 0.000000 -0.000000" scale="1.000 1.000 1.000" support="base"/>
|
||||
<bone aliases="avatar_mEyeLeft" connected="false" end="0.025 0.000 0.000" group="Extra" name="mEyeLeft" pivot="0.098461 0.036000 0.079000" pos="0.098 0.036 0.079" rot="0.000000 -0.000000 0.000000" scale="1.000 1.000 1.000" support="base"/>
|
||||
<bone connected="false" end="0.020 0.000 0.000" group="Face" name="mFaceRoot" pivot="0.025000 0.000000 0.045000" pos="0.025 0.000 0.045" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="false" end="0.025 0.000 0.000" group="Face" name="mFaceEyeAltRight" pivot="0.073466 -0.036000 0.0339300" pos="0.073 -0.036 0.034" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.025 0.000 0.000" group="Face" name="mFaceEyeAltLeft" pivot="0.073461 0.036000 0.0339300" pos="0.073 0.036 0.034" rot="0.000000 0.000000 0.000000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.024 0.004 0.018" group="Face" name="mFaceForeheadLeft" pivot="0.061 0.035 0.083" pos="0.061 0.035 0.083" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.024 -0.004 0.018" group="Face" name="mFaceForeheadRight" pivot="0.061 -0.035 0.083" pos="0.061 -0.035 0.083" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.023 0.013 0.000" group="Eyes" name="mFaceEyebrowOuterLeft" pivot="0.064 0.051 0.048" pos="0.064 0.051 0.048" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.027 0.000 0.000" group="Eyes" name="mFaceEyebrowCenterLeft" pivot="0.070 0.043 0.056" pos="0.070 0.043 0.056" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.026 0.000 0.000" group="Eyes" name="mFaceEyebrowInnerLeft" pivot="0.075 0.022 0.051" pos="0.075 0.022 0.051" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.023 -0.013 0.000" group="Eyes" name="mFaceEyebrowOuterRight" pivot="0.064 -0.051 0.048" pos="0.064 -0.051 0.048" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.027 0.000 0.000" group="Eyes" name="mFaceEyebrowCenterRight" pivot="0.070 -0.043 0.056" pos="0.070 -0.043 0.056" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.026 0.000 0.000" group="Eyes" name="mFaceEyebrowInnerRight" pivot="0.075 -0.022 0.051" pos="0.075 -0.022 0.051" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.027 0.000 0.005" group="Eyes" name="mFaceEyeLidUpperLeft" pivot="0.073 0.036 0.034" pos="0.073 0.036 0.034" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.024 0.000 -0.007" group="Eyes" name="mFaceEyeLidLowerLeft" pivot="0.073 0.036 0.034" pos="0.073 0.036 0.034" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.027 0.000 0.005" group="Eyes" name="mFaceEyeLidUpperRight" pivot="0.073 -0.036 0.034" pos="0.073 -0.036 0.034" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.024 0.000 -0.007" group="Eyes" name="mFaceEyeLidLowerRight" pivot="0.073 -0.036 0.034" pos="0.073 -0.036 0.034" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="-0.019 0.018 0.025" group="Ears" name="mFaceEar1Left" pivot="0.000 0.080 0.002" pos="0.000 0.080 0.002" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.000 0.000 0.033" group="Ears" name="mFaceEar2Left" pivot="-0.019 0.018 0.025" pos="-0.019 0.018 0.025" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.019 -0.018 0.025" group="Ears" name="mFaceEar1Right" pivot="0.000 -0.080 0.002" pos="0.000 -0.080 0.002" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.000 0.000 0.033" group="Ears" name="mFaceEar2Right" pivot="-0.019 -0.018 0.025" pos="-0.019 -0.018 0.025" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
<bone connected="false" end="0.015 0.004 0.000" group="Face" name="mFaceNoseLeft" pivot="0.086 0.015 -0.004" pos="0.086 0.015 -0.004" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.025 0.000 0.000" group="Face" name="mFaceNoseCenter" pivot="0.102 0.000 0.000" pos="0.102 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.015 -0.004 0.000" group="Face" name="mFaceNoseRight" pivot="0.086 -0.015 -0.004" pos="0.086 -0.015 -0.004" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.013 0.030 0.000" group="Face" name="mFaceCheekLowerLeft" pivot="0.050 0.034 -0.031" pos="0.050 0.034 -0.031" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.022 0.015 0.000" group="Face" name="mFaceCheekUpperLeft" pivot="0.070 0.034 -0.005" pos="0.070 0.034 -0.005" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.013 -0.030 0.000" group="Face" name="mFaceCheekLowerRight" pivot="0.050 -0.034 -0.031" pos="0.050 -0.034 -0.031" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.022 -0.015 0.000" group="Face" name="mFaceCheekUpperRight" pivot="0.070 -0.034 -0.005" pos="0.070 -0.034 -0.005" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.059 0.000 -0.039" group="Mouth" name="mFaceJaw" pivot="-0.001 0.000 -0.015" pos="-0.001 0.000 -0.015" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="false" end="0.021 0.000 -0.018" group="Mouth" name="mFaceChin" pivot="0.074 0.000 -0.054" pos="0.074 0.000 -0.054" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.035 0.000 0.000" group="Mouth" name="mFaceTeethLower" pivot="0.021 0.000 -0.039" pos="0.021 0.000 -0.039" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="false" end="0.034 0.017 0.005" group="Lips" name="mFaceLipLowerLeft" pivot="0.045 0.000 0.000" pos="0.045 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.034 -0.017 0.005" group="Lips" name="mFaceLipLowerRight" pivot="0.045 0.000 0.000" pos="0.045 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.040 0.000 0.002" group="Lips" name="mFaceLipLowerCenter" pivot="0.045 0.000 0.000" pos="0.045 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.022 0.000 0.007" group="Mouth" name="mFaceTongueBase" pivot="0.039 0.000 0.005" pos="0.039 0.000 0.005" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.010 0.000 0.000" group="Mouth" name="mFaceTongueTip" pivot="0.022 0.000 0.007" pos="0.022 0.000 0.007" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.017 0.000 0.000" group="Face" name="mFaceJawShaper" pivot="0.000 0.000 0.000" pos="0.000 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.036 0.000 0.000" group="Face" name="mFaceForeheadCenter" pivot="0.069 0.000 0.065" pos="0.069 0.000 0.065" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.014 0.000 0.000" group="Nose" name="mFaceNoseBase" pivot="0.094 0.000 -0.016" pos="0.094 0.000 -0.016" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.035 0.000 0.000" group="Mouth" name="mFaceTeethUpper" pivot="0.020 0.000 -0.030" pos="0.020 0.000 -0.030" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="false" end="0.041 0.015 0.000" group="Lips" name="mFaceLipUpperLeft" pivot="0.045 0.000 -0.003" pos="0.045 0.000 -0.003" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.041 -0.015 0.000" group="Lips" name="mFaceLipUpperRight" pivot="0.045 0.000 -0.003" pos="0.045 0.000 -0.003" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.045 0.051 0.000" group="Lips" name="mFaceLipCornerLeft" pivot="0.028 -0.019 -0.010" pos="0.028 -0.019 -0.010" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.045 -0.051 0.000" group="Lips" name="mFaceLipCornerRight" pivot="0.028 0.019 -0.010" pos="0.028 0.019 -0.010" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.043 0.000 0.002" group="Lips" name="mFaceLipUpperCenter" pivot="0.045 0.000 -0.003" pos="0.045 0.000 -0.003" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
<bone connected="false" end="0.016 0.000 0.000" group="Face" name="mFaceEyecornerInnerLeft" pivot="0.075 0.017 0.032" pos="0.075 0.017 0.032" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.016 0.000 0.000" group="Face" name="mFaceEyecornerInnerRight" pivot="0.075 -0.017 0.032" pos="0.075 -0.017 0.032" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="0.015 0.000 0.008" group="Nose" name="mFaceNoseBridge" pivot="0.091 0.000 0.020" pos="0.091 0.000 0.020" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone aliases="lCollar avatar_mCollarLeft" connected="false" end="0.000 0.079 0.000" group="Arms" name="mCollarLeft" pivot="-0.020927 0.084665 0.165396" pos="-0.021 0.085 0.165" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.100 0.000" group="Collision" name="L_CLAVICLE" pos="0.02 0 0.02" rot="0.000000 0.00000 0.000000" scale="0.07 0.14 0.05" support="base"/>
|
||||
<bone aliases="lShldr avatar_mShoulderLeft" connected="true" end="0.000 0.247 0.000" group="Arms" name="mShoulderLeft" pivot="0.000000 0.079000 -0.000000" pos="0.000 0.079 -0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.130 -0.003" group="Collision" name="L_UPPER_ARM" pos="0.0 0.12 0.01" rot="-5.000000 0.00000 0.000000" scale="0.05 0.17 0.05" support="base"/>
|
||||
<bone aliases="lForeArm avatar_mElbowLeft" connected="true" end="0.000 0.205 0.000" group="Arms" name="mElbowLeft" pivot="0.000000 0.248000 0.000000" pos="0.000 0.248 0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.100 -0.001" group="Collision" name="L_LOWER_ARM" pos="0.0 0.1 0.0" rot="-3.000000 0.00000 0.000000" scale="0.04 0.14 0.04" support="base"/>
|
||||
<bone aliases="lHand avatar_mWristLeft" connected="true" end="0.000 0.060 0.000" group="Arms" name="mWristLeft" pivot="-0.000000 0.204846 0.000000" pos="-0.000 0.205 0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.005 0.049 -0.001" group="Collision" name="L_HAND" pos="0.01 0.05 0.0" rot="-3.000000 0.00000 -10.000000" scale="0.05 0.08 0.03" support="base"/>
|
||||
<bone connected="false" end="-0.001 0.040 -0.006" group="Hand" name="mHandMiddle1Left" pivot="0.013 0.101 0.015" pos="0.013 0.101 0.015" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.001 0.049 -0.008" group="Hand" name="mHandMiddle2Left" pivot="-0.001 0.040 -0.006" pos="-0.001 0.040 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.002 0.033 -0.006" group="Hand" name="mHandMiddle3Left" pivot="-0.001 0.049 -0.008" pos="-0.001 0.049 -0.008" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="0.017 0.036 -0.006" group="Hand" name="mHandIndex1Left" pivot="0.038 0.097 0.015" pos="0.038 0.097 0.015" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.014 0.032 -0.006" group="Hand" name="mHandIndex2Left" pivot="0.017 0.036 -0.006" pos="0.017 0.036 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.011 0.025 -0.004" group="Hand" name="mHandIndex3Left" pivot="0.014 0.032 -0.006" pos="0.014 0.032 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.013 0.038 -0.008" group="Hand" name="mHandRing1Left" pivot="-0.010 0.099 0.009" pos="-0.010 0.099 0.009" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.013 0.040 -0.009" group="Hand" name="mHandRing2Left" pivot="-0.013 0.038 -0.008" pos="-0.013 0.038 -0.008" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.010 0.028 -0.006" group="Hand" name="mHandRing3Left" pivot="-0.013 0.040 -0.009" pos="-0.013 0.040 -0.009" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.024 0.025 -0.006" group="Hand" name="mHandPinky1Left" pivot="-0.031 0.095 0.003" pos="-0.031 0.095 0.003" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.015 0.018 -0.004" group="Hand" name="mHandPinky2Left" pivot="-0.024 0.025 -0.006" pos="-0.024 0.025 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.013 0.016 -0.004" group="Hand" name="mHandPinky3Left" pivot="-0.015 0.018 -0.004" pos="-0.015 0.018 -0.004" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="0.028 0.032 0.000" group="Hand" name="mHandThumb1Left" pivot="0.031 0.026 0.004" pos="0.031 0.026 0.004" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.023 0.031 0.000" group="Hand" name="mHandThumb2Left" pivot="0.028 0.032 -0.001" pos="0.028 0.032 -0.001" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.015 0.025 0.000" group="Hand" name="mHandThumb3Left" pivot="0.023 0.031 -0.001" pos="0.023 0.031 -0.001" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone aliases="rCollar avatar_mCollarRight" connected="false" end="0.000 -0.079 0.000" group="Arms" name="mCollarRight" pivot="-0.020927 -0.085000 0.165396" pos="-0.021 -0.085 0.165" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 -0.100 0.000" group="Collision" name="R_CLAVICLE" pos="0.02 0 0.02" rot="0.000000 0.00000 0.000000" scale="0.07 0.14 0.05" support="base"/>
|
||||
<bone aliases="rShldr avatar_mShoulderRight" connected="true" end="0.000 -0.247 0.000" group="Arms" name="mShoulderRight" pivot="0.000000 -0.079418 -0.000000" pos="0.000 -0.079 -0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 -0.130 -0.003" group="Collision" name="R_UPPER_ARM" pos="0.0 -0.12 0.01" rot="5.000000 0.00000 0.000000" scale="0.05 0.17 0.05" support="base"/>
|
||||
<bone aliases="rForeArm avatar_mElbowRight" connected="true" end="0.000 -0.205 0.000" group="Arms" name="mElbowRight" pivot="0.000000 -0.248000 -0.000000" pos="0.000 -0.248 -0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 -0.100 -0.001" group="Collision" name="R_LOWER_ARM" pos="0.0 -0.1 0.0" rot="3.000000 0.00000 0.000000" scale="0.04 0.14 0.04" support="base"/>
|
||||
<bone aliases="rHand avatar_mWristRight" connected="true" end="0.000 -0.060 0.000" group="Arms" name="mWristRight" pivot="-0.000000 -0.205000 -0.000000" pos="0.000 -0.205 -0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.005 -0.049 -0.001" group="Collision" name="R_HAND" pos="0.01 -0.05 0.0" rot="3.000000 0.00000 10.000000" scale="0.05 0.08 0.03" support="base"/>
|
||||
<bone connected="false" end="-0.001 -0.040 -0.006" group="Hand" name="mHandMiddle1Right" pivot="0.013 -0.101 0.015" pos="0.013 -0.101 0.015" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.001 -0.049 -0.008" group="Hand" name="mHandMiddle2Right" pivot="-0.001 -0.040 -0.006" pos="-0.001 -0.040 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.002 -0.033 -0.006" group="Hand" name="mHandMiddle3Right" pivot="-0.001 -0.049 -0.008" pos="-0.001 -0.049 -0.008" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="0.017 -0.036 -0.006" group="Hand" name="mHandIndex1Right" pivot="0.038 -0.097 0.015" pos="0.038 -0.097 0.015" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.014 -0.032 -0.006" group="Hand" name="mHandIndex2Right" pivot="0.017 -0.036 -0.006" pos="0.017 -0.036 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.011 -0.025 -0.004" group="Hand" name="mHandIndex3Right" pivot="0.014 -0.032 -0.006" pos="0.014 -0.032 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.013 -0.038 -0.008" group="Hand" name="mHandRing1Right" pivot="-0.010 -0.099 0.009" pos="-0.010 -0.099 0.009" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.013 -0.040 -0.009" group="Hand" name="mHandRing2Right" pivot="-0.013 -0.038 -0.008" pos="-0.013 -0.038 -0.008" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.010 -0.028 -0.006" group="Hand" name="mHandRing3Right" pivot="-0.013 -0.040 -0.009" pos="-0.013 -0.040 -0.009" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.024 -0.025 -0.006" group="Hand" name="mHandPinky1Right" pivot="-0.031 -0.095 0.003" pos="-0.031 -0.095 0.003" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.015 -0.018 -0.004" group="Hand" name="mHandPinky2Right" pivot="-0.024 -0.025 -0.006" pos="-0.024 -0.025 -0.006" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.013 -0.016 -0.004" group="Hand" name="mHandPinky3Right" pivot="-0.015 -0.018 -0.004" pos="-0.015 -0.018 -0.004" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="0.028 -0.032 0.000" group="Hand" name="mHandThumb1Right" pivot="0.031 -0.026 0.004" pos="0.031 -0.026 0.004" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.023 -0.031 0.000" group="Hand" name="mHandThumb2Right" pivot="0.028 -0.032 -0.001" pos="0.028 -0.032 -0.001" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.015 -0.025 0.000" group="Hand" name="mHandThumb3Right" pivot="0.023 -0.031 -0.001" pos="0.023 -0.031 -0.001" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.061 0.000 0.000" group="Wing" name="mWingsRoot" pivot="-0.014 0.000 0.000" pos="-0.014 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="false" end="-0.168 0.169 0.067" group="Wing" name="mWing1Left" pivot="-0.099 0.105 0.181" pos="-0.099 0.105 0.181" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.181 0.183 0.000" group="Wing" name="mWing2Left" pivot="-0.168 0.169 0.067" pos="-0.168 0.169 0.067" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.171 0.173 0.000" group="Wing" name="mWing3Left" pivot="-0.181 0.183 0.000" pos="-0.181 0.183 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.146 0.132 0.000" group="Wing" name="mWing4Left" pivot="-0.171 0.173 0.000" pos="-0.171 0.173 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="true" end="-0.068 0.062 -0.159" group="Wing" name="mWing4FanLeft" pivot="-0.171 0.173 0.000" pos="-0.171 0.173 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.168 -0.169 0.067" group="Wing" name="mWing1Right" pivot="-0.099 -0.105 0.181" pos="-0.099 -0.105 0.181" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.181 -0.183 0.000" group="Wing" name="mWing2Right" pivot="-0.168 -0.169 0.067" pos="-0.168 -0.169 0.067" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.171 -0.173 0.000" group="Wing" name="mWing3Right" pivot="-0.181 -0.183 0.000" pos="-0.181 -0.183 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.146 -0.132 0.000" group="Wing" name="mWing4Right" pivot="-0.171 -0.173 0.000" pos="-0.171 -0.173 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="true" end="-0.068 -0.062 -0.159" group="Wing" name="mWing4FanRight" pivot="-0.171 -0.173 0.000" pos="-0.171 -0.173 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone aliases="rThigh avatar_mHipRight" connected="false" end="-0.001 0.049 -0.491" group="Legs" name="mHipRight" pivot="0.033620 -0.128806 -0.041086" pos="0.034 -0.129 -0.041" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.000 -0.200" group="Collision" name="R_UPPER_LEG" pos="-0.02 0.05 -0.22" rot="0.000000 0.00000 0.000000" scale="0.09 0.09 0.32" support="base"/>
|
||||
<bone aliases="rShin avatar_mKneeRight" connected="true" end="-0.029 0.000 -0.469" group="Legs" name="mKneeRight" pivot="-0.000780 0.048635 -0.490922" pos="-0.001 0.049 -0.491" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="-0.010 0.000 -0.150" group="Collision" name="R_LOWER_LEG" pos="-0.02 0.0 -0.2" rot="0.000000 0.00000 0.000000" scale="0.06 0.06 0.25" support="base"/>
|
||||
<bone aliases="rFoot avatar_mAnkleRight" connected="true" end="0.112 0.000 -0.061" group="Legs" name="mAnkleRight" pivot="-0.028869 0.000000 -0.468494" pos="-0.029 0.000 -0.468" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.089 0.000 -0.026" group="Collision" name="R_FOOT" pos="0.077 0.0 -0.041" rot="0.000000 10.00000 0.000000" scale="0.13 0.05 0.05" support="base"/>
|
||||
<bone aliases="avatar_mFootRight" connected="true" end="0.105 -0.010 0.000" group="Extra" name="mFootRight" pivot="0.111956 -0.000000 -0.060637" pos="0.112 -0.000 -0.061" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<bone aliases="avatar_mToeRight" connected="false" end="0.020 0.000 0.000" group="Extra" name="mToeRight" pivot="0.105399 -0.010408 -0.000104" pos="0.109 0.000 0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone aliases="lThigh avatar_mHipLeft" connected="false" end="-0.001 -0.046 -0.491" group="Legs" name="mHipLeft" pivot="0.033757 0.126765 -0.040998" pos="0.034 0.127 -0.041" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.000 0.000 -0.200" group="Collision" name="L_UPPER_LEG" pos="-0.02 -0.05 -0.22" rot="0.000000 0.00000 0.000000" scale="0.09 0.09 0.32" support="base"/>
|
||||
<bone aliases="lShin avatar_mKneeLeft" connected="true" end="-0.029 0.001 -0.469" group="Legs" name="mKneeLeft" pivot="-0.000887 -0.045568 -0.491053" pos="-0.001 -0.046 -0.491" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="-0.010 0.000 -0.150" group="Collision" name="L_LOWER_LEG" pos="-0.02 0.0 -0.2" rot="0.000000 0.00000 0.000000" scale="0.06 0.06 0.25" support="base"/>
|
||||
<bone aliases="lFoot avatar_mAnkleLeft" connected="true" end="0.112 0.000 -0.061" group="Legs" name="mAnkleLeft" pivot="-0.028887 0.001378 -0.468449" pos="-0.029 0.001 -0.468" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<collision_volume end="0.089 0.000 -0.026" group="Collision" name="L_FOOT" pos="0.077 0.0 -0.041" rot="0.000000 10.00000 0.000000" scale="0.13 0.05 0.05" support="base"/>
|
||||
<bone aliases="avatar_mFootLeft" connected="true" end="0.105 0.008 0.001" group="Extra" name="mFootLeft" pivot="0.111956 -0.000000 -0.060620" pos="0.112 -0.000 -0.061" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base">
|
||||
<bone aliases="avatar_mToeLeft" connected="false" end="0.020 0.000 0.000" group="Extra" name="mToeLeft" pivot="0.105387 0.008270 0.000871" pos="0.109 0.000 0.000" rot="0.000000 0.000000 0.000000" scale="1.000 1.000 1.000" support="base"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="-0.197 0.000 0.000" group="Tail" name="mTail1" pivot="-0.116 0.000 0.047" pos="-0.116 0.000 0.047" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.168 0.000 0.000" group="Tail" name="mTail2" pivot="-0.197 0.000 0.000" pos="-0.197 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.142 0.000 0.000" group="Tail" name="mTail3" pivot="-0.168 0.000 0.000" pos="-0.168 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.112 0.000 0.000" group="Tail" name="mTail4" pivot="-0.142 0.000 0.000" pos="-0.142 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.094 0.000 0.000" group="Tail" name="mTail5" pivot="-0.112 0.000 0.000" pos="-0.112 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.089 0.000 0.000" group="Tail" name="mTail6" pivot="-0.094 0.000 0.000" pos="-0.094 0.000 0.000" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="0.004 0.000 -0.066" group="Groin" name="mGroin" pivot="0.064 0.000 -0.097" pos="0.064 0.000 -0.097" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
<bone connected="false" end="-0.204 0.000 0.000" group="Limb" name="mHindLimbsRoot" pivot="-0.200 0.000 0.084" pos="-0.200 0.000 0.084" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="false" end="0.002 -0.046 -0.491" group="Limb" name="mHindLimb1Left" pivot="-0.204 0.129 -0.125" pos="-0.204 0.129 -0.125" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.030 -0.003 -0.468" group="Limb" name="mHindLimb2Left" pivot="0.002 -0.046 -0.491" pos="0.002 -0.046 -0.491" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.112 0.000 -0.061" group="Limb" name="mHindLimb3Left" pivot="-0.030 -0.003 -0.468" pos="-0.030 -0.003 -0.468" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.105 0.008 0.000" group="Limb" name="mHindLimb4Left" pivot="0.112 0.000 -0.061" pos="0.112 0.000 -0.061" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
<bone connected="false" end="0.002 0.046 -0.491" group="Limb" name="mHindLimb1Right" pivot="-0.204 -0.129 -0.125" pos="-0.204 -0.129 -0.125" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="-0.030 0.003 -0.468" group="Limb" name="mHindLimb2Right" pivot="0.002 0.046 -0.491" pos="0.002 0.046 -0.491" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.112 0.000 -0.061" group="Limb" name="mHindLimb3Right" pivot="-0.030 0.003 -0.468" pos="-0.030 0.003 -0.468" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended">
|
||||
<bone connected="true" end="0.105 -0.008 0.000" group="Limb" name="mHindLimb4Right" pivot="0.112 0.000 -0.061" pos="0.112 0.000 -0.061" rot="0.000 0.000 0.000" scale="1.00 1.00 1.00" support="extended"/>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</bone>
|
||||
</linden_skeleton>
|
||||
@@ -299,6 +299,32 @@ class StringEnum(str, enum.Enum):
|
||||
return self.value
|
||||
|
||||
|
||||
class IntEnum(enum.IntEnum):
|
||||
# Give a special repr() that'll eval in a REPL.
|
||||
def __repr__(self):
|
||||
return f"{self.__class__.__name__}.{self.name}"
|
||||
|
||||
|
||||
class IntFlag(enum.IntFlag):
|
||||
def __repr__(self):
|
||||
# Make an ORed together version of the flags based on the POD version
|
||||
flags = flags_to_pod(type(self), self)
|
||||
flags = " | ".join(
|
||||
(f"{self.__class__.__name__}.{v}" if isinstance(v, str) else str(v))
|
||||
for v in flags
|
||||
)
|
||||
return f"({flags})"
|
||||
|
||||
|
||||
def flags_to_pod(flag_cls: Type[enum.IntFlag], val: int) -> Tuple[Union[str, int], ...]:
|
||||
# Shove any bits not represented in the IntFlag into an int
|
||||
left_over = val
|
||||
for flag in iter(flag_cls):
|
||||
left_over &= ~flag.value
|
||||
extra = (int(left_over),) if left_over else ()
|
||||
return tuple(flag.name for flag in iter(flag_cls) if val & flag.value) + extra
|
||||
|
||||
|
||||
class TaggedUnion(recordclass.datatuple): # type: ignore
|
||||
tag: Any
|
||||
value: Any
|
||||
@@ -306,5 +332,6 @@ class TaggedUnion(recordclass.datatuple): # type: ignore
|
||||
|
||||
__all__ = [
|
||||
"Vector3", "Vector4", "Vector2", "Quaternion", "TupleCoord",
|
||||
"UUID", "RawBytes", "StringEnum", "JankStringyBytes", "TaggedUnion"
|
||||
"UUID", "RawBytes", "StringEnum", "JankStringyBytes", "TaggedUnion",
|
||||
"IntEnum", "IntFlag", "flags_to_pod"
|
||||
]
|
||||
|
||||
@@ -347,7 +347,7 @@ class RegionCapNotAvailable(RegionDomainError):
|
||||
|
||||
class RegionMessageError(RegionDomainError):
|
||||
""" an error raised when a region does not have a connection
|
||||
over which it can send UDP messages
|
||||
over which it can send UDP messages
|
||||
|
||||
accepts a region object as an attribute
|
||||
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
import functools
|
||||
import pkg_resources
|
||||
import re
|
||||
import weakref
|
||||
from pprint import PrettyPrinter
|
||||
from typing import *
|
||||
@@ -121,3 +124,24 @@ def proxify(obj: Union[Callable[[], _T], weakref.ReferenceType, _T]) -> _T:
|
||||
if obj is not None and not isinstance(obj, weakref.ProxyTypes):
|
||||
return weakref.proxy(obj)
|
||||
return obj
|
||||
|
||||
|
||||
def bytes_unescape(val: bytes) -> bytes:
|
||||
# Only in CPython. bytes -> bytes with escape decoding.
|
||||
# https://stackoverflow.com/a/23151714
|
||||
return codecs.escape_decode(val)[0] # type: ignore
|
||||
|
||||
|
||||
def bytes_escape(val: bytes) -> bytes:
|
||||
# Try to keep newlines as-is
|
||||
return re.sub(rb"(?<!\\)\\n", b"\n", codecs.escape_encode(val)[0]) # type: ignore
|
||||
|
||||
|
||||
def get_resource_filename(resource_filename: str):
|
||||
return pkg_resources.resource_filename("hippolyzer", resource_filename)
|
||||
|
||||
|
||||
def to_chunks(chunkable: Sequence[_T], chunk_size: int) -> Generator[_T, None, None]:
|
||||
while chunkable:
|
||||
yield chunkable[:chunk_size]
|
||||
chunkable = chunkable[chunk_size:]
|
||||
|
||||
@@ -3,11 +3,11 @@ import tempfile
|
||||
from io import BytesIO
|
||||
from typing import *
|
||||
|
||||
import defusedxml.cElementTree
|
||||
import defusedxml.ElementTree
|
||||
from glymur import jp2box, Jp2k
|
||||
|
||||
# Replace glymur's ElementTree with a safe one
|
||||
jp2box.ET = defusedxml.cElementTree
|
||||
jp2box.ET = defusedxml.ElementTree
|
||||
|
||||
|
||||
SL_DEFAULT_ENCODE = {
|
||||
|
||||
@@ -1,71 +1,49 @@
|
||||
"""
|
||||
Parse the horrible legacy inventory format
|
||||
Parse the horrible legacy inventory-related format.
|
||||
|
||||
It's typically only used for object contents now.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import itertools
|
||||
import logging
|
||||
import re
|
||||
import weakref
|
||||
from io import StringIO
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.legacy_schema import (
|
||||
parse_schema_line,
|
||||
SchemaBase,
|
||||
SchemaDate,
|
||||
SchemaFieldSerializer,
|
||||
SchemaHexInt,
|
||||
SchemaInt,
|
||||
SchemaMultilineStr,
|
||||
SchemaParsingError,
|
||||
SchemaStr,
|
||||
SchemaUUID,
|
||||
schema_field,
|
||||
)
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
MAGIC_ID = UUID("3c115e51-04f4-523c-9fa6-98aff1034730")
|
||||
LOG = logging.getLogger(__name__)
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
def _parse_str(val: str):
|
||||
return val.rstrip("|")
|
||||
|
||||
|
||||
def _int_from_hex(val: str):
|
||||
return int(val, 16)
|
||||
|
||||
|
||||
def _parse_date(val: str):
|
||||
return dt.datetime.utcfromtimestamp(int(val))
|
||||
|
||||
|
||||
class InventoryParsingError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _inv_field(spec: Union[Callable, Type], *, default=dataclasses.MISSING, init=True, repr=True, # noqa
|
||||
hash=None, compare=True) -> dataclasses.Field: # noqa
|
||||
"""Describe a field in the inventory schema and the shape of its value"""
|
||||
return dataclasses.field(
|
||||
metadata={"spec": spec}, default=default, init=init,
|
||||
repr=repr, hash=hash, compare=compare
|
||||
)
|
||||
|
||||
|
||||
# The schema is meant to allow multi-line strings, but in practice
|
||||
# it does not due to scanf() shenanigans. This is fine.
|
||||
_INV_TOKEN_RE = re.compile(r'\A\s*([^\s]+)(\s+([^\t\r\n]+))?$')
|
||||
|
||||
|
||||
def _parse_inv_line(line: str):
|
||||
g = _INV_TOKEN_RE.search(line)
|
||||
if not g:
|
||||
raise InventoryParsingError("%r doesn't match the token regex" % line)
|
||||
return g.group(1), g.group(3)
|
||||
|
||||
|
||||
def _yield_inv_tokens(line_iter: Iterator[str]):
|
||||
def _yield_schema_tokens(reader: StringIO):
|
||||
in_bracket = False
|
||||
for line in line_iter:
|
||||
# empty str == EOF in Python
|
||||
while line := reader.readline():
|
||||
line = line.strip()
|
||||
# Whitespace-only lines are automatically skipped
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
key, val = _parse_inv_line(line)
|
||||
except InventoryParsingError:
|
||||
key, val = parse_schema_line(line)
|
||||
except SchemaParsingError:
|
||||
# Can happen if there's a malformed multi-line string, just
|
||||
# skip by it.
|
||||
LOG.warning(f"Found invalid inventory line {line!r}")
|
||||
@@ -77,41 +55,91 @@ def _yield_inv_tokens(line_iter: Iterator[str]):
|
||||
in_bracket = True
|
||||
continue
|
||||
if key == "}":
|
||||
if not in_bracket:
|
||||
LOG.warning("Unexpected closing bracket")
|
||||
in_bracket = False
|
||||
break
|
||||
yield key, val
|
||||
if in_bracket:
|
||||
raise LOG.warning("Reached EOF while inside a bracket")
|
||||
LOG.warning("Reached EOF while inside a bracket")
|
||||
|
||||
|
||||
class InventoryModel:
|
||||
class InventoryBase(SchemaBase):
|
||||
SCHEMA_NAME: ClassVar[str]
|
||||
|
||||
@classmethod
|
||||
def from_reader(cls, reader: StringIO, read_header=False) -> InventoryBase:
|
||||
tok_iter = _yield_schema_tokens(reader)
|
||||
# Someone else hasn't already read the header for us
|
||||
if read_header:
|
||||
schema_name, _ = next(tok_iter)
|
||||
if schema_name != cls.SCHEMA_NAME:
|
||||
raise ValueError(f"Expected schema name {schema_name!r} to be {cls.SCHEMA_NAME!r}")
|
||||
|
||||
fields = cls._fields_dict()
|
||||
obj_dict = {}
|
||||
for key, val in tok_iter:
|
||||
if key in fields:
|
||||
field: dataclasses.Field = fields[key]
|
||||
spec = field.metadata.get("spec")
|
||||
# Not a real key, an internal var on our dataclass
|
||||
if not spec:
|
||||
LOG.warning(f"Internal key {key!r}")
|
||||
continue
|
||||
# some kind of nested structure like sale_info
|
||||
if issubclass(spec, SchemaBase):
|
||||
obj_dict[key] = spec.from_reader(reader)
|
||||
elif issubclass(spec, SchemaFieldSerializer):
|
||||
obj_dict[key] = spec.deserialize(val)
|
||||
else:
|
||||
raise ValueError(f"Unsupported spec for {key!r}, {spec!r}")
|
||||
else:
|
||||
LOG.warning(f"Unknown key {key!r}")
|
||||
return cls._obj_from_dict(obj_dict)
|
||||
|
||||
def to_writer(self, writer: StringIO):
|
||||
writer.write(f"\t{self.SCHEMA_NAME}\t0\n")
|
||||
writer.write("\t{\n")
|
||||
for field_name, field in self._fields_dict().items():
|
||||
spec = field.metadata.get("spec")
|
||||
# Not meant to be serialized
|
||||
if not spec:
|
||||
continue
|
||||
|
||||
val = getattr(self, field_name)
|
||||
if val is None:
|
||||
continue
|
||||
|
||||
# Some kind of nested structure like sale_info
|
||||
if isinstance(val, SchemaBase):
|
||||
val.to_writer(writer)
|
||||
elif issubclass(spec, SchemaFieldSerializer):
|
||||
writer.write(f"\t\t{field_name}\t{spec.serialize(val)}\n")
|
||||
else:
|
||||
raise ValueError(f"Bad inventory spec {spec!r}")
|
||||
writer.write("\t}\n")
|
||||
|
||||
|
||||
class InventoryModel(InventoryBase):
|
||||
def __init__(self):
|
||||
self.containers: Dict[UUID, InventoryContainerBase] = {}
|
||||
self.items: Dict[UUID, InventoryItem] = {}
|
||||
self.root: Optional[InventoryContainerBase] = None
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, text: str):
|
||||
return cls.from_iter(iter(text.splitlines()))
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes):
|
||||
return cls.from_str(data.decode("utf8"))
|
||||
|
||||
@classmethod
|
||||
def from_iter(cls, line_iter: Iterator[str]) -> InventoryModel:
|
||||
def from_reader(cls, reader: StringIO, read_header=False) -> InventoryModel:
|
||||
model = cls()
|
||||
for key, value in _yield_inv_tokens(line_iter):
|
||||
for key, value in _yield_schema_tokens(reader):
|
||||
if key == "inv_object":
|
||||
obj = InventoryObject.from_iter(line_iter)
|
||||
obj = InventoryObject.from_reader(reader)
|
||||
if obj is not None:
|
||||
model.add_container(obj)
|
||||
elif key == "inv_category":
|
||||
cat = InventoryCategory.from_iter(line_iter)
|
||||
cat = InventoryCategory.from_reader(reader)
|
||||
if cat is not None:
|
||||
model.add_container(cat)
|
||||
elif key == "inv_item":
|
||||
item = InventoryItem.from_iter(line_iter)
|
||||
item = InventoryItem.from_reader(reader)
|
||||
if item is not None:
|
||||
model.add_item(item)
|
||||
else:
|
||||
@@ -119,6 +147,12 @@ class InventoryModel:
|
||||
model.reparent_nodes()
|
||||
return model
|
||||
|
||||
def to_writer(self, writer: StringIO):
|
||||
for container in self.containers.values():
|
||||
container.to_writer(writer)
|
||||
for item in self.items.values():
|
||||
item.to_writer(writer)
|
||||
|
||||
def add_container(self, container: InventoryContainerBase):
|
||||
self.containers[container.node_id] = container
|
||||
container.model = weakref.proxy(self)
|
||||
@@ -143,63 +177,34 @@ class InventoryModel:
|
||||
parent_container.children.append(obj)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryBase(abc.ABC):
|
||||
@classmethod
|
||||
def _fields_dict(cls):
|
||||
return {f.name: f for f in dataclasses.fields(cls)}
|
||||
|
||||
@classmethod
|
||||
def from_iter(cls, line_iter: Iterator[str]):
|
||||
fields = cls._fields_dict()
|
||||
obj = {}
|
||||
for key, val in _yield_inv_tokens(line_iter):
|
||||
if key in fields:
|
||||
field: dataclasses.Field = fields[key]
|
||||
spec = field.metadata.get("spec")
|
||||
# Not a real key, an internal var on our dataclass
|
||||
if not spec:
|
||||
LOG.warning(f"Internal key {key!r}")
|
||||
continue
|
||||
# some kind of nested structure like sale_info
|
||||
if isinstance(spec, type) and issubclass(spec, InventoryBase):
|
||||
obj[key] = spec.from_iter(line_iter)
|
||||
else:
|
||||
obj[key] = spec(val)
|
||||
else:
|
||||
LOG.warning(f"Unknown key {key!r}")
|
||||
|
||||
# Bad entry, ignore
|
||||
# TODO: Check on these. might be symlinks or something.
|
||||
if obj.get("type") == "-1":
|
||||
LOG.warning(f"Skipping bad object with type == -1: {obj!r}")
|
||||
return None
|
||||
return cls(**obj) # type: ignore
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryPermissions(InventoryBase):
|
||||
base_mask: int = _inv_field(_int_from_hex)
|
||||
owner_mask: int = _inv_field(_int_from_hex)
|
||||
group_mask: int = _inv_field(_int_from_hex)
|
||||
everyone_mask: int = _inv_field(_int_from_hex)
|
||||
next_owner_mask: int = _inv_field(_int_from_hex)
|
||||
creator_id: UUID = _inv_field(UUID)
|
||||
owner_id: UUID = _inv_field(UUID)
|
||||
last_owner_id: UUID = _inv_field(UUID)
|
||||
group_id: UUID = _inv_field(UUID)
|
||||
SCHEMA_NAME: ClassVar[str] = "permissions"
|
||||
|
||||
base_mask: int = schema_field(SchemaHexInt)
|
||||
owner_mask: int = schema_field(SchemaHexInt)
|
||||
group_mask: int = schema_field(SchemaHexInt)
|
||||
everyone_mask: int = schema_field(SchemaHexInt)
|
||||
next_owner_mask: int = schema_field(SchemaHexInt)
|
||||
creator_id: UUID = schema_field(SchemaUUID)
|
||||
owner_id: UUID = schema_field(SchemaUUID)
|
||||
last_owner_id: UUID = schema_field(SchemaUUID)
|
||||
group_id: UUID = schema_field(SchemaUUID)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventorySaleInfo(InventoryBase):
|
||||
sale_type: str = _inv_field(str)
|
||||
sale_price: int = _inv_field(int)
|
||||
SCHEMA_NAME: ClassVar[str] = "sale_info"
|
||||
|
||||
sale_type: str = schema_field(SchemaStr)
|
||||
sale_price: int = schema_field(SchemaInt)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryNodeBase(InventoryBase):
|
||||
ID_ATTR: ClassVar[str]
|
||||
parent_id: Optional[UUID] = _inv_field(UUID)
|
||||
|
||||
parent_id: Optional[UUID] = schema_field(SchemaUUID)
|
||||
model: Optional[InventoryModel] = dataclasses.field(default=None, init=False)
|
||||
|
||||
@property
|
||||
@@ -210,43 +215,58 @@ class InventoryNodeBase(InventoryBase):
|
||||
def parent(self):
|
||||
return self.model.containers.get(self.parent_id)
|
||||
|
||||
@classmethod
|
||||
def _obj_from_dict(cls, obj_dict):
|
||||
# Bad entry, ignore
|
||||
# TODO: Check on these. might be symlinks or something.
|
||||
if obj_dict.get("type") == "-1":
|
||||
LOG.warning(f"Skipping bad object with type == -1: {obj_dict!r}")
|
||||
return None
|
||||
return super()._obj_from_dict(obj_dict)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryContainerBase(InventoryNodeBase):
|
||||
type: str = _inv_field(str)
|
||||
name: str = _inv_field(_parse_str)
|
||||
type: str = schema_field(SchemaStr)
|
||||
name: str = schema_field(SchemaMultilineStr)
|
||||
children: List[InventoryNodeBase] = dataclasses.field(default_factory=list, init=False)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryObject(InventoryContainerBase):
|
||||
SCHEMA_NAME: ClassVar[str] = "inv_object"
|
||||
ID_ATTR: ClassVar[str] = "obj_id"
|
||||
obj_id: UUID = _inv_field(UUID)
|
||||
|
||||
obj_id: UUID = schema_field(SchemaUUID)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryCategory(InventoryContainerBase):
|
||||
ID_ATTR: ClassVar[str] = "cat_id"
|
||||
cat_id: UUID = _inv_field(UUID)
|
||||
pref_type: str = _inv_field(str)
|
||||
owner_id: UUID = _inv_field(UUID)
|
||||
version: int = _inv_field(int)
|
||||
SCHEMA_NAME: ClassVar[str] = "inv_object"
|
||||
|
||||
cat_id: UUID = schema_field(SchemaUUID)
|
||||
pref_type: str = schema_field(SchemaStr)
|
||||
owner_id: UUID = schema_field(SchemaUUID)
|
||||
version: int = schema_field(SchemaInt)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class InventoryItem(InventoryNodeBase):
|
||||
SCHEMA_NAME: ClassVar[str] = "inv_item"
|
||||
ID_ATTR: ClassVar[str] = "item_id"
|
||||
item_id: UUID = _inv_field(UUID)
|
||||
type: str = _inv_field(str)
|
||||
inv_type: str = _inv_field(str)
|
||||
flags: int = _inv_field(_int_from_hex)
|
||||
name: str = _inv_field(_parse_str)
|
||||
desc: str = _inv_field(_parse_str)
|
||||
creation_date: dt.datetime = _inv_field(_parse_date)
|
||||
permissions: InventoryPermissions = _inv_field(InventoryPermissions)
|
||||
sale_info: InventorySaleInfo = _inv_field(InventorySaleInfo)
|
||||
asset_id: Optional[UUID] = _inv_field(UUID, default=None)
|
||||
shadow_id: Optional[UUID] = _inv_field(UUID, default=None)
|
||||
|
||||
item_id: UUID = schema_field(SchemaUUID)
|
||||
type: str = schema_field(SchemaStr)
|
||||
inv_type: str = schema_field(SchemaStr)
|
||||
flags: int = schema_field(SchemaHexInt)
|
||||
name: str = schema_field(SchemaMultilineStr)
|
||||
desc: str = schema_field(SchemaMultilineStr)
|
||||
creation_date: dt.datetime = schema_field(SchemaDate)
|
||||
permissions: InventoryPermissions = schema_field(InventoryPermissions)
|
||||
sale_info: InventorySaleInfo = schema_field(InventorySaleInfo)
|
||||
asset_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
|
||||
shadow_id: Optional[UUID] = schema_field(SchemaUUID, default=None)
|
||||
|
||||
@property
|
||||
def true_asset_id(self) -> UUID:
|
||||
|
||||
155
hippolyzer/lib/base/legacy_schema.py
Normal file
155
hippolyzer/lib/base/legacy_schema.py
Normal file
@@ -0,0 +1,155 @@
|
||||
"""
|
||||
Legacy line-oriented schema parser base classes
|
||||
|
||||
Used for task inventory and wearables.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import calendar
|
||||
import dataclasses
|
||||
import datetime as dt
|
||||
import logging
|
||||
import re
|
||||
from io import StringIO
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class SchemaFieldSerializer(abc.ABC, Generic[_T]):
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def deserialize(cls, val: str) -> _T:
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def serialize(cls, val: _T) -> str:
|
||||
pass
|
||||
|
||||
|
||||
class SchemaDate(SchemaFieldSerializer[dt.datetime]):
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> dt.datetime:
|
||||
return dt.datetime.utcfromtimestamp(int(val))
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: dt.datetime) -> str:
|
||||
return str(calendar.timegm(val.utctimetuple()))
|
||||
|
||||
|
||||
class SchemaHexInt(SchemaFieldSerializer[int]):
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> int:
|
||||
return int(val, 16)
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: int) -> str:
|
||||
return "%08x" % val
|
||||
|
||||
|
||||
class SchemaInt(SchemaFieldSerializer[int]):
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> int:
|
||||
return int(val)
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: int) -> str:
|
||||
return str(val)
|
||||
|
||||
|
||||
class SchemaMultilineStr(SchemaFieldSerializer[str]):
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> str:
|
||||
# llinventory claims that it will parse multiple lines until it finds
|
||||
# an "|" terminator. That's not true. Use llinventory's _actual_ behaviour.
|
||||
return val.partition("|")[0]
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: str) -> str:
|
||||
return val + "|"
|
||||
|
||||
|
||||
class SchemaStr(SchemaFieldSerializer[str]):
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> str:
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: str) -> str:
|
||||
return val
|
||||
|
||||
|
||||
class SchemaUUID(SchemaFieldSerializer[UUID]):
|
||||
@classmethod
|
||||
def deserialize(cls, val: str) -> UUID:
|
||||
return UUID(val)
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, val: UUID) -> str:
|
||||
return str(val)
|
||||
|
||||
|
||||
def schema_field(spec: Type[Union[SchemaBase, SchemaFieldSerializer]], *, default=dataclasses.MISSING, init=True,
|
||||
repr=True, hash=None, compare=True) -> dataclasses.Field: # noqa
|
||||
"""Describe a field in the inventory schema and the shape of its value"""
|
||||
return dataclasses.field(
|
||||
metadata={"spec": spec}, default=default, init=init, repr=repr, hash=hash, compare=compare
|
||||
)
|
||||
|
||||
|
||||
class SchemaParsingError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
# The schema is meant to allow multi-line strings, but in practice
|
||||
# it does not due to scanf() shenanigans. This is fine.
|
||||
_SCHEMA_LINE_TOKENS_RE = re.compile(r'\A\s*([^\s]+)(\s+([^\t\r\n]+))?$')
|
||||
|
||||
|
||||
def parse_schema_line(line: str):
|
||||
g = _SCHEMA_LINE_TOKENS_RE.search(line)
|
||||
if not g:
|
||||
raise SchemaParsingError(f"{line!r} doesn't match the token regex")
|
||||
return g.group(1), g.group(3)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class SchemaBase(abc.ABC):
|
||||
@classmethod
|
||||
def _fields_dict(cls):
|
||||
return {f.name: f for f in dataclasses.fields(cls)}
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, text: str):
|
||||
return cls.from_reader(StringIO(text))
|
||||
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def from_reader(cls: Type[_T], reader: StringIO) -> _T:
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes):
|
||||
return cls.from_str(data.decode("utf8"))
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
return self.to_str().encode("utf8")
|
||||
|
||||
def to_str(self) -> str:
|
||||
writer = StringIO()
|
||||
self.to_writer(writer)
|
||||
writer.seek(0)
|
||||
return writer.read()
|
||||
|
||||
@abc.abstractmethod
|
||||
def to_writer(self, writer: StringIO):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def _obj_from_dict(cls, obj_dict: Dict):
|
||||
return cls(**obj_dict) # type: ignore
|
||||
@@ -39,6 +39,7 @@ class MeshAsset:
|
||||
# These TypedDicts describe the expected shape of the LLSD in the mesh
|
||||
# header and various segments. They're mainly for type hinting.
|
||||
class MeshHeaderDict(TypedDict, total=False):
|
||||
"""Header of the mesh file, includes offsets & sizes for segments' LLSD"""
|
||||
version: int
|
||||
creator: UUID
|
||||
date: dt.datetime
|
||||
@@ -54,6 +55,7 @@ class MeshHeaderDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class SegmentHeaderDict(TypedDict):
|
||||
"""Standard shape for segment references within the header"""
|
||||
offset: int
|
||||
size: int
|
||||
|
||||
@@ -73,6 +75,7 @@ class PhysicsHavokSegmentHeaderDict(PhysicsSegmentHeaderDict, total=False):
|
||||
|
||||
|
||||
class PhysicsCostDataHeaderDict(TypedDict, total=False):
|
||||
"""Cost of physical representation, populated by server"""
|
||||
decomposition: float
|
||||
decomposition_discounted_vertices: int
|
||||
decomposition_hulls: int
|
||||
@@ -85,6 +88,7 @@ class PhysicsCostDataHeaderDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class MeshSegmentDict(TypedDict, total=False):
|
||||
"""Dict of segments unpacked using the MeshHeaderDict"""
|
||||
high_lod: List[LODSegmentDict]
|
||||
medium_lod: List[LODSegmentDict]
|
||||
low_lod: List[LODSegmentDict]
|
||||
@@ -96,6 +100,7 @@ class MeshSegmentDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class LODSegmentDict(TypedDict, total=False):
|
||||
"""Represents a single entry within the material list of a LOD segment"""
|
||||
# Only present if True and no geometry
|
||||
NoGeometry: bool
|
||||
# -1.0 - 1.0
|
||||
@@ -113,17 +118,22 @@ class LODSegmentDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class DomainDict(TypedDict):
|
||||
"""Description of the real range for quantized coordinates"""
|
||||
# number of elems depends on what the domain is for, Vec2 or Vec3
|
||||
Max: List[float]
|
||||
Min: List[float]
|
||||
|
||||
|
||||
class VertexWeight(recordclass.datatuple): # type: ignore
|
||||
"""Vertex weight for a specific joint on a specific vertex"""
|
||||
# index of the joint within the joint_names list in the skin segment
|
||||
joint_idx: int
|
||||
# 0.0 - 1.0
|
||||
weight: float
|
||||
|
||||
|
||||
class SkinSegmentDict(TypedDict, total=False):
|
||||
"""Rigging information"""
|
||||
joint_names: List[str]
|
||||
# model -> world transform matrix for model
|
||||
bind_shape_matrix: List[float]
|
||||
@@ -137,14 +147,17 @@ class SkinSegmentDict(TypedDict, total=False):
|
||||
|
||||
|
||||
class PhysicsConvexSegmentDict(DomainDict, total=False):
|
||||
"""Data for convex hull collisions, populated by the client"""
|
||||
# Min / Max domain vals are inline, unlike for LODs
|
||||
HullList: List[int]
|
||||
# -1.0 - 1.0
|
||||
# -1.0 - 1.0, dequantized from binary field of U16s
|
||||
Positions: List[Vector3]
|
||||
# -1.0 - 1.0
|
||||
# -1.0 - 1.0, dequantized from binary field of U16s
|
||||
BoundingVerts: List[Vector3]
|
||||
|
||||
|
||||
class PhysicsHavokSegmentDict(TypedDict, total=False):
|
||||
"""Cached data for Havok collisions, populated by sim and not used by client."""
|
||||
HullMassProps: MassPropsDict
|
||||
MOPP: MOPPDict
|
||||
MeshDecompMassProps: MassPropsDict
|
||||
@@ -169,8 +182,11 @@ class MOPPDict(TypedDict, total=False):
|
||||
|
||||
|
||||
def positions_from_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
# Used for turning positions into their actual positions within the mesh / domain
|
||||
# for ex: positions_from_domain(lod["Position"], lod["PositionDomain])
|
||||
"""
|
||||
Used for turning positions into their actual positions within the mesh / domain
|
||||
|
||||
for ex: positions_from_domain(lod["Position"], lod["PositionDomain])
|
||||
"""
|
||||
lower = domain['Min']
|
||||
upper = domain['Max']
|
||||
return [
|
||||
@@ -179,7 +195,7 @@ def positions_from_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
|
||||
|
||||
def positions_to_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
# Used for turning positions into their actual positions within the mesh / domain
|
||||
"""Used for turning positions into their actual positions within the mesh / domain"""
|
||||
lower = domain['Min']
|
||||
upper = domain['Max']
|
||||
return [
|
||||
@@ -187,7 +203,36 @@ def positions_to_domain(positions: Iterable[TupleCoord], domain: DomainDict):
|
||||
]
|
||||
|
||||
|
||||
class VertexWeights(se.SerializableBase):
|
||||
"""Serializer for a list of joint weights on a single vertex"""
|
||||
INFLUENCE_SER = se.QuantizedFloat(se.U16, 0.0, 1.0)
|
||||
INFLUENCE_LIMIT = 4
|
||||
INFLUENCE_TERM = 0xFF
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, vals, writer: se.BufferWriter, ctx=None):
|
||||
if len(vals) > cls.INFLUENCE_LIMIT:
|
||||
raise ValueError(f"{vals!r} is too long, can only have {cls.INFLUENCE_LIMIT} influences!")
|
||||
for val in vals:
|
||||
joint_idx, influence = val
|
||||
writer.write(se.U8, joint_idx)
|
||||
writer.write(cls.INFLUENCE_SER, influence, ctx=ctx)
|
||||
if len(vals) != cls.INFLUENCE_LIMIT:
|
||||
writer.write(se.U8, cls.INFLUENCE_TERM)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, reader: se.Reader, ctx=None):
|
||||
influence_list = []
|
||||
for _ in range(cls.INFLUENCE_LIMIT):
|
||||
joint_idx = reader.read(se.U8)
|
||||
if joint_idx == cls.INFLUENCE_TERM:
|
||||
break
|
||||
influence_list.append(VertexWeight(joint_idx, reader.read(cls.INFLUENCE_SER, ctx=ctx)))
|
||||
return influence_list
|
||||
|
||||
|
||||
class SegmentSerializer:
|
||||
"""Serializer for binary fields within an LLSD object"""
|
||||
def __init__(self, templates):
|
||||
self._templates: Dict[str, se.SerializableBase] = templates
|
||||
|
||||
@@ -217,33 +262,6 @@ class SegmentSerializer:
|
||||
return new_segment
|
||||
|
||||
|
||||
class VertexWeights(se.SerializableBase):
|
||||
INFLUENCE_SER = se.QuantizedFloat(se.U16, 0.0, 1.0)
|
||||
INFLUENCE_LIMIT = 4
|
||||
INFLUENCE_TERM = 0xFF
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, vals, writer: se.BufferWriter, ctx=None):
|
||||
if len(vals) > cls.INFLUENCE_LIMIT:
|
||||
raise ValueError(f"{vals!r} is too long, can only have {cls.INFLUENCE_LIMIT} influences!")
|
||||
for val in vals:
|
||||
joint_idx, influence = val
|
||||
writer.write(se.U8, joint_idx)
|
||||
writer.write(cls.INFLUENCE_SER, influence, ctx=ctx)
|
||||
if len(vals) != cls.INFLUENCE_LIMIT:
|
||||
writer.write(se.U8, cls.INFLUENCE_TERM)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, reader: se.Reader, ctx=None):
|
||||
influence_list = []
|
||||
for _ in range(cls.INFLUENCE_LIMIT):
|
||||
joint_idx = reader.read(se.U8)
|
||||
if joint_idx == cls.INFLUENCE_TERM:
|
||||
break
|
||||
influence_list.append(VertexWeight(joint_idx, reader.read(cls.INFLUENCE_SER, ctx=ctx)))
|
||||
return influence_list
|
||||
|
||||
|
||||
LOD_SEGMENT_SERIALIZER = SegmentSerializer({
|
||||
# 16-bit indices to the verts making up the tri. Imposes a 16-bit
|
||||
# upper limit on verts in any given material in the mesh.
|
||||
@@ -265,6 +283,7 @@ class LLMeshSerializer(se.SerializableBase):
|
||||
KNOWN_SEGMENTS = ("lowest_lod", "low_lod", "medium_lod", "high_lod",
|
||||
"physics_mesh", "physics_convex", "skin", "physics_havok")
|
||||
|
||||
# Define unpackers for specific binary fields within the parsed LLSD segments
|
||||
SEGMENT_TEMPLATES: Dict[str, SegmentSerializer] = {
|
||||
"lowest_lod": LOD_SEGMENT_SERIALIZER,
|
||||
"low_lod": LOD_SEGMENT_SERIALIZER,
|
||||
|
||||
@@ -19,5 +19,3 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
|
||||
|
||||
80
hippolyzer/lib/base/message/circuit.py
Normal file
80
hippolyzer/lib/base/message/circuit.py
Normal file
@@ -0,0 +1,80 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import datetime as dt
|
||||
import logging
|
||||
from typing import *
|
||||
from typing import Optional
|
||||
|
||||
from .message_handler import MessageHandler
|
||||
from ..network.transport import AbstractUDPTransport, UDPPacket, Direction, ADDR_TUPLE
|
||||
from .message import Block, Message
|
||||
from .msgtypes import PacketFlags
|
||||
from .udpserializer import UDPMessageSerializer
|
||||
|
||||
|
||||
class Circuit:
|
||||
def __init__(self, near_host: Optional[ADDR_TUPLE], far_host: ADDR_TUPLE, transport):
|
||||
self.near_host: Optional[ADDR_TUPLE] = near_host
|
||||
self.host: ADDR_TUPLE = far_host
|
||||
self.is_alive = True
|
||||
self.transport: Optional[AbstractUDPTransport] = transport
|
||||
self.serializer = UDPMessageSerializer()
|
||||
self.last_packet_at = dt.datetime.now()
|
||||
self.packet_id_base = 0
|
||||
|
||||
def _send_prepared_message(self, message: Message, transport=None):
|
||||
try:
|
||||
serialized = self.serializer.serialize(message)
|
||||
except:
|
||||
logging.exception(f"Failed to serialize: {message.to_dict()!r}")
|
||||
raise
|
||||
return self.send_datagram(serialized, message.direction, transport=transport)
|
||||
|
||||
def send_datagram(self, data: bytes, direction: Direction, transport=None):
|
||||
self.last_packet_at = dt.datetime.now()
|
||||
src_addr, dst_addr = self.host, self.near_host
|
||||
if direction == Direction.OUT:
|
||||
src_addr, dst_addr = self.near_host, self.host
|
||||
|
||||
packet = UDPPacket(src_addr, dst_addr, data, direction)
|
||||
(transport or self.transport).send_packet(packet)
|
||||
return packet
|
||||
|
||||
def prepare_message(self, message: Message):
|
||||
if message.finalized:
|
||||
raise RuntimeError(f"Trying to re-send finalized {message!r}")
|
||||
message.packet_id = self.packet_id_base
|
||||
self.packet_id_base += 1
|
||||
if not message.acks:
|
||||
message.send_flags &= PacketFlags.ACK
|
||||
message.finalized = True
|
||||
|
||||
def send_message(self, message: Message, transport=None):
|
||||
if self.prepare_message(message):
|
||||
return self._send_prepared_message(message, transport)
|
||||
|
||||
def send_acks(self, to_ack: Sequence[int], direction=Direction.OUT, packet_id=None):
|
||||
logging.debug("%r acking %r" % (direction, to_ack))
|
||||
# TODO: maybe tack this onto `.acks` for next message?
|
||||
message = Message('PacketAck', *[Block('Packets', ID=x) for x in to_ack])
|
||||
message.packet_id = packet_id
|
||||
message.direction = direction
|
||||
message.injected = True
|
||||
self.send_message(message)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %r : %r>" % (self.__class__.__name__, self.near_host, self.host)
|
||||
|
||||
|
||||
class ConnectionHolder(abc.ABC):
|
||||
"""
|
||||
Any object that has both a circuit and a message handler
|
||||
|
||||
Preferred to explicitly passing around a circuit, message handler pair
|
||||
because generally a ConnectionHolder represents a region or a client.
|
||||
The same region or client may have multiple different circuits across the
|
||||
lifetime of a session (due to region restarts, etc.)
|
||||
"""
|
||||
circuit: Optional[Circuit]
|
||||
message_handler: MessageHandler[Message, str]
|
||||
@@ -20,8 +20,8 @@ along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
import os
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
|
||||
msg_tmpl = open(os.path.join(os.path.dirname(__file__), 'message_template.msg'))
|
||||
with open(os.path.join(os.path.dirname(__file__), 'message.xml'), "rb") as _f:
|
||||
msg_tmpl = open(get_resource_filename("lib/base/message/data/message_template.msg"))
|
||||
with open(get_resource_filename("lib/base/message/data/message.xml"), "rb") as _f:
|
||||
msg_details = _f.read()
|
||||
|
||||
@@ -18,29 +18,52 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
import enum
|
||||
import importlib
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
from typing import *
|
||||
|
||||
from .. import serialization as se
|
||||
from ..datatypes import *
|
||||
from .msgtypes import PacketFlags
|
||||
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
import hippolyzer.lib.base.templates as templates
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketFlags
|
||||
from hippolyzer.lib.base.network.transport import Direction, ADDR_TUPLE
|
||||
|
||||
BLOCK_DICT = Dict[str, "MsgBlockList"]
|
||||
VAR_TYPE = Union[TupleCoord, bytes, str, float, int, Tuple, UUID]
|
||||
|
||||
_TEMPLATES_MTIME = os.stat(templates.__file__).st_mtime
|
||||
|
||||
|
||||
def maybe_reload_templates():
|
||||
# Templates may be modified at runtime during development, check
|
||||
# if they've changed since startup and reload if they have.
|
||||
global _TEMPLATES_MTIME
|
||||
templates_mtime = os.stat(templates.__file__).st_mtime
|
||||
|
||||
if _TEMPLATES_MTIME is None or _TEMPLATES_MTIME < templates_mtime:
|
||||
print("Reloading templates")
|
||||
try:
|
||||
importlib.reload(templates) # type: ignore
|
||||
_TEMPLATES_MTIME = templates_mtime
|
||||
except:
|
||||
logging.exception("Failed to reload templates!")
|
||||
|
||||
|
||||
class Block:
|
||||
"""
|
||||
"""
|
||||
base representation of a block
|
||||
Block expects a name, and kwargs for variables (var_name = value)
|
||||
"""
|
||||
__slots__ = ('name', 'size', 'vars', 'message_name', '_ser_cache', 'fill_missing',)
|
||||
|
||||
def __init__(self, name, /, fill_missing=False, **kwargs):
|
||||
def __init__(self, name, /, *, fill_missing=False, **kwargs):
|
||||
self.name = name
|
||||
self.size = 0
|
||||
self.message_name: Optional[str] = None
|
||||
@@ -129,24 +152,7 @@ class Block:
|
||||
continue
|
||||
# We have a serializer, include the pretty output in the repr,
|
||||
# using the _ suffix so the builder knows it needs to be serialized.
|
||||
deserialized = self.deserialize_var(key)
|
||||
type_name = type(deserialized).__name__
|
||||
# TODO: replace __repr__ for these in a context manager so nested
|
||||
# Enums / Flags get handled correctly as well. The point of the
|
||||
# pretty repr() is to make messages directly paste-able into code.
|
||||
if isinstance(deserialized, enum.IntEnum):
|
||||
deserialized = f"{type_name}.{deserialized.name}"
|
||||
elif isinstance(deserialized, enum.IntFlag):
|
||||
# Make an ORed together version of the flags based on the POD version
|
||||
flags = se.flags_to_pod(type(deserialized), deserialized)
|
||||
flags = " | ".join(
|
||||
(f"{type_name}.{v}" if isinstance(v, str) else str(v))
|
||||
for v in flags
|
||||
)
|
||||
deserialized = f"({flags})"
|
||||
else:
|
||||
deserialized = repr(deserialized)
|
||||
block_vars[f"{key}_"] = deserialized
|
||||
block_vars[f"{key}_"] = repr(self.deserialize_var(key))
|
||||
else:
|
||||
block_vars = self.vars
|
||||
|
||||
@@ -176,9 +182,13 @@ class MsgBlockList(List["Block"]):
|
||||
|
||||
class Message:
|
||||
__slots__ = ("name", "send_flags", "_packet_id", "acks", "body_boundaries", "queued",
|
||||
"offset", "raw_extra", "raw_body", "deserializer", "_blocks", "finalized")
|
||||
"offset", "raw_extra", "raw_body", "deserializer", "_blocks", "finalized",
|
||||
"direction", "meta", "injected", "dropped", "sender")
|
||||
|
||||
def __init__(self, name, *args, packet_id=None, flags=0, acks=None, direction=None):
|
||||
# TODO: Do this on a timer or something.
|
||||
maybe_reload_templates()
|
||||
|
||||
def __init__(self, name, *args, packet_id=None, flags=0, acks=None):
|
||||
self.name = name
|
||||
self.send_flags = flags
|
||||
self._packet_id: Optional[int] = packet_id # aka, sequence number
|
||||
@@ -187,6 +197,7 @@ class Message:
|
||||
self.body_boundaries = (-1, -1)
|
||||
self.offset = 0
|
||||
self.raw_extra = b""
|
||||
self.direction: Direction = direction if direction is not None else Direction.OUT
|
||||
# For lazy deserialization
|
||||
self.raw_body = None
|
||||
self.deserializer = None
|
||||
@@ -196,6 +207,10 @@ class Message:
|
||||
# Whether message is owned by the queue or should be sent immediately
|
||||
self.queued: bool = False
|
||||
self._blocks: BLOCK_DICT = {}
|
||||
self.meta = {}
|
||||
self.injected = False
|
||||
self.dropped = False
|
||||
self.sender: Optional[ADDR_TUPLE] = None
|
||||
|
||||
self.add_blocks(args)
|
||||
|
||||
@@ -309,6 +324,9 @@ class Message:
|
||||
return msg
|
||||
|
||||
def invalidate_caches(self):
|
||||
# Don't have any caches if we haven't even parsed
|
||||
if self.raw_body:
|
||||
return
|
||||
for blocks in self.blocks.values():
|
||||
for block in blocks:
|
||||
block.invalidate_caches()
|
||||
@@ -331,7 +349,7 @@ class Message:
|
||||
block_reprs = sep.join(x.repr(pretty=pretty) for x in itertools.chain(*self.blocks.values()))
|
||||
if block_reprs:
|
||||
block_reprs = sep + block_reprs
|
||||
return f"{self.name!r}{block_reprs}"
|
||||
return f"{self.name!r}{block_reprs}, direction=Direction.{self.direction.name}"
|
||||
|
||||
def repr(self, pretty=False):
|
||||
self.ensure_parsed()
|
||||
@@ -349,6 +367,18 @@ class Message:
|
||||
message_copy.packet_id = None
|
||||
return message_copy
|
||||
|
||||
def to_summary(self):
|
||||
string = ""
|
||||
for block_name, block_list in self.blocks.items():
|
||||
for block in block_list:
|
||||
for var_name, val in block.items():
|
||||
if block.name == "AgentData" and var_name in ("AgentID", "SessionID"):
|
||||
continue
|
||||
if string:
|
||||
string += ", "
|
||||
string += f"{var_name}={_trunc_repr(val, 10)}"
|
||||
return string
|
||||
|
||||
def __repr__(self):
|
||||
return self.repr()
|
||||
|
||||
@@ -356,3 +386,16 @@ class Message:
|
||||
if not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
return self.to_dict() == other.to_dict()
|
||||
|
||||
|
||||
def _trunc_repr(val, max_len):
|
||||
if isinstance(val, (uuid.UUID, TupleCoord)):
|
||||
val = str(val)
|
||||
repr_val = repr(val)
|
||||
if isinstance(val, str):
|
||||
repr_val = repr_val[1:-1]
|
||||
if isinstance(val, bytes):
|
||||
repr_val = repr_val[2:-1]
|
||||
if len(repr_val) > max_len:
|
||||
return repr_val[:max_len] + "…"
|
||||
return repr_val
|
||||
|
||||
@@ -1,53 +1,19 @@
|
||||
import ast
|
||||
import base64
|
||||
import importlib
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
import re
|
||||
import uuid
|
||||
from typing import *
|
||||
|
||||
import hippolyzer.lib.base.datatypes
|
||||
from hippolyzer.lib.base.datatypes import *
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.helpers import HippoPrettyPrinter
|
||||
from hippolyzer.lib.base.message.message import Message, Block, PacketFlags
|
||||
import hippolyzer.lib.proxy.templates as templates
|
||||
from hippolyzer.lib.base.message.msgtypes import MsgBlockType
|
||||
from hippolyzer.lib.base.message.template import MessageTemplate
|
||||
from hippolyzer.lib.proxy.packets import Direction
|
||||
|
||||
_TEMPLATES_MTIME = os.stat(templates.__file__).st_mtime
|
||||
|
||||
|
||||
def _maybe_reload_templates():
|
||||
# Templates may be modified at runtime during development, check
|
||||
# if they've changed since startup and reload if they have.
|
||||
global _TEMPLATES_MTIME
|
||||
templates_mtime = os.stat(templates.__file__).st_mtime
|
||||
|
||||
if _TEMPLATES_MTIME is None or _TEMPLATES_MTIME < templates_mtime:
|
||||
print("Reloading templates")
|
||||
try:
|
||||
importlib.reload(templates) # type: ignore
|
||||
_TEMPLATES_MTIME = templates_mtime
|
||||
except:
|
||||
logging.exception("Failed to reload templates!")
|
||||
|
||||
|
||||
def _trunc_repr(val, max_len):
|
||||
if isinstance(val, (uuid.UUID, TupleCoord)):
|
||||
val = str(val)
|
||||
repr_val = repr(val)
|
||||
if isinstance(val, str):
|
||||
repr_val = repr_val[1:-1]
|
||||
if isinstance(val, bytes):
|
||||
repr_val = repr_val[2:-1]
|
||||
if len(repr_val) > max_len:
|
||||
return repr_val[:max_len] + "…"
|
||||
return repr_val
|
||||
from .. import datatypes
|
||||
from .. import llsd
|
||||
from .. import serialization as se
|
||||
from ..helpers import HippoPrettyPrinter
|
||||
from ..network.transport import Direction
|
||||
from .msgtypes import PacketFlags, MsgBlockType
|
||||
from .template import MessageTemplate
|
||||
from .message import Message, Block, maybe_reload_templates
|
||||
|
||||
|
||||
class VerbatimHumanVal(str):
|
||||
@@ -58,135 +24,31 @@ def _filtered_exports(mod):
|
||||
return {k: getattr(mod, k) for k in mod.__all__}
|
||||
|
||||
|
||||
def proxy_eval(eval_str: str, globals_=None, locals_=None):
|
||||
def subfield_eval(eval_str: str, globals_=None, locals_=None):
|
||||
return eval(
|
||||
eval_str,
|
||||
{
|
||||
"llsd": llsd,
|
||||
"base64": base64,
|
||||
"math": math,
|
||||
**_filtered_exports(hippolyzer.lib.base.datatypes),
|
||||
**_filtered_exports(datatypes),
|
||||
**(globals_ or {})},
|
||||
locals_
|
||||
)
|
||||
|
||||
|
||||
class ProxiedMessage(Message):
|
||||
__slots__ = ("meta", "injected", "dropped", "direction")
|
||||
TextSpan = Tuple[int, int]
|
||||
SpanDict = Dict[Tuple[Union[str, int], ...], TextSpan]
|
||||
|
||||
def __init__(self, *args, direction=None, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.direction = direction if direction is not None else Direction.OUT
|
||||
self.meta = {}
|
||||
self.injected = False
|
||||
self.dropped = False
|
||||
_maybe_reload_templates()
|
||||
|
||||
def to_human_string(self, replacements=None, beautify=False,
|
||||
template: Optional[MessageTemplate] = None):
|
||||
replacements = replacements or {}
|
||||
_maybe_reload_templates()
|
||||
string = ""
|
||||
if self.direction is not None:
|
||||
string += f'{self.direction.name} '
|
||||
string += self.name
|
||||
if self.packet_id is not None:
|
||||
string += f'\n# {self.packet_id}: {PacketFlags(self.send_flags)!r}'
|
||||
string += f'{", DROPPED" if self.dropped else ""}{", INJECTED" if self.injected else ""}'
|
||||
if self.extra:
|
||||
string += f'\n# EXTRA: {self.extra!r}'
|
||||
string += '\n\n'
|
||||
class SpannedString(str):
|
||||
spans: SpanDict = {}
|
||||
|
||||
for block_name, block_list in self.blocks.items():
|
||||
block_suffix = ""
|
||||
if template and template.get_block(block_name).block_type == MsgBlockType.MBT_VARIABLE:
|
||||
block_suffix = ' # Variable'
|
||||
for block in block_list:
|
||||
string += f"[{block_name}]{block_suffix}\n"
|
||||
for var_name, val in block.items():
|
||||
string += self._format_var(block, var_name, val, replacements, beautify)
|
||||
return string
|
||||
|
||||
def _format_var(self, block, var_name, var_val, replacements, beautify=False):
|
||||
string = ""
|
||||
# Check if we have a more human-readable way to present this field
|
||||
ser_key = (self.name, block.name, var_name)
|
||||
serializer = se.SUBFIELD_SERIALIZERS.get(ser_key)
|
||||
field_prefix = ""
|
||||
if isinstance(var_val, VerbatimHumanVal):
|
||||
var_data = var_val
|
||||
elif isinstance(var_val, (uuid.UUID, TupleCoord)):
|
||||
var_data = str(var_val)
|
||||
elif isinstance(var_val, (str, bytes)) and not serializer:
|
||||
var_data = self._multi_line_pformat(var_val)
|
||||
else:
|
||||
var_data = repr(var_val)
|
||||
if serializer and beautify and not isinstance(var_val, VerbatimHumanVal):
|
||||
try:
|
||||
pretty_data = serializer.deserialize(block, var_val, pod=True)
|
||||
if pretty_data is not se.UNSERIALIZABLE:
|
||||
string += f" {var_name} =| {self._multi_line_pformat(pretty_data)}"
|
||||
if serializer.AS_HEX and isinstance(var_val, int):
|
||||
var_data = hex(var_val)
|
||||
if serializer.ORIG_INLINE:
|
||||
string += f" #{var_data}\n"
|
||||
return string
|
||||
else:
|
||||
string += "\n"
|
||||
# Human-readable version should be used, orig data is commented out
|
||||
field_prefix = "#"
|
||||
except:
|
||||
logging.exception(f"Failed in subfield serializer {ser_key!r}")
|
||||
if beautify:
|
||||
if block.name == "AgentData":
|
||||
if var_name == "AgentID" and var_val == replacements.get("AGENT_ID"):
|
||||
var_data = "[[AGENT_ID]]"
|
||||
elif var_name == "SessionID" and var_val == replacements.get("SESSION_ID"):
|
||||
var_data = "[[SESSION_ID]]"
|
||||
if "CircuitCode" in var_name or ("Code" in var_name and "Circuit" in block.name):
|
||||
if var_val == replacements.get("CIRCUIT_CODE"):
|
||||
var_data = "[[CIRCUIT_CODE]]"
|
||||
string += f" {field_prefix}{var_name} = {var_data}\n"
|
||||
return string
|
||||
|
||||
@staticmethod
|
||||
def _multi_line_pformat(val):
|
||||
printer = HippoPrettyPrinter(width=100)
|
||||
val = printer.pformat(val)
|
||||
newstr = ""
|
||||
# Now we need to rebuild this to add in the appropriate
|
||||
# line continuations.
|
||||
lines = list(val.splitlines())
|
||||
first_line = True
|
||||
while lines:
|
||||
line = lines.pop(0)
|
||||
prefix = ""
|
||||
suffix = ""
|
||||
if first_line:
|
||||
first_line = False
|
||||
else:
|
||||
prefix = " "
|
||||
|
||||
if lines:
|
||||
suffix = " \\\n"
|
||||
newstr += f"{prefix}{line}{suffix}"
|
||||
return newstr
|
||||
|
||||
def to_summary(self):
|
||||
string = ""
|
||||
for block_name, block_list in self.blocks.items():
|
||||
for block in block_list:
|
||||
for var_name, val in block.items():
|
||||
if block.name == "AgentData" and var_name in ("AgentID", "SessionID"):
|
||||
continue
|
||||
if string:
|
||||
string += ", "
|
||||
string += f"{var_name}={_trunc_repr(val, 10)}"
|
||||
return string
|
||||
|
||||
class HumanMessageSerializer:
|
||||
@classmethod
|
||||
def from_human_string(cls, string, replacements=None, env=None, safe=True):
|
||||
_maybe_reload_templates()
|
||||
maybe_reload_templates()
|
||||
replacements = replacements or {}
|
||||
env = env or {}
|
||||
first_line = True
|
||||
@@ -201,7 +63,7 @@ class ProxiedMessage(Message):
|
||||
|
||||
if first_line:
|
||||
direction, message_name = line.split(" ", 1)
|
||||
msg = ProxiedMessage(message_name)
|
||||
msg = Message(message_name)
|
||||
msg.direction = Direction[direction.upper()]
|
||||
first_line = False
|
||||
continue
|
||||
@@ -240,14 +102,14 @@ class ProxiedMessage(Message):
|
||||
var_val = tuple(float(x) for x in var_val.split(","))
|
||||
# UUID-ish
|
||||
elif re.match(r"\A\w+-\w+-.*", var_val):
|
||||
var_val = UUID(var_val)
|
||||
var_val = datatypes.UUID(var_val)
|
||||
else:
|
||||
var_val = ast.literal_eval(var_val)
|
||||
|
||||
# Normally gross, but necessary for expressiveness in built messages
|
||||
# unless a metalanguage is added.
|
||||
if evaled:
|
||||
var_val = proxy_eval(
|
||||
var_val = subfield_eval(
|
||||
var_val,
|
||||
globals_={**env, **replacements},
|
||||
locals_={"block": cur_block}
|
||||
@@ -265,6 +127,102 @@ class ProxiedMessage(Message):
|
||||
cur_block[var_name] = var_val
|
||||
return msg
|
||||
|
||||
def _args_repr(self, pretty=False):
|
||||
base = super()._args_repr(pretty=pretty)
|
||||
return f"{base}, direction=Direction.{self.direction.name}"
|
||||
@classmethod
|
||||
def to_human_string(cls, msg: Message, replacements=None, beautify=False,
|
||||
template: Optional[MessageTemplate] = None) -> SpannedString:
|
||||
replacements = replacements or {}
|
||||
maybe_reload_templates()
|
||||
spans: SpanDict = {}
|
||||
string = ""
|
||||
if msg.direction is not None:
|
||||
string += f'{msg.direction.name} '
|
||||
string += msg.name
|
||||
if msg.packet_id is not None:
|
||||
string += f'\n# {msg.packet_id}: {PacketFlags(msg.send_flags)!r}'
|
||||
string += f'{", DROPPED" if msg.dropped else ""}{", INJECTED" if msg.injected else ""}'
|
||||
if msg.extra:
|
||||
string += f'\n# EXTRA: {msg.extra!r}'
|
||||
string += '\n\n'
|
||||
|
||||
for block_name, block_list in msg.blocks.items():
|
||||
block_suffix = ""
|
||||
if template and template.get_block(block_name).block_type == MsgBlockType.MBT_VARIABLE:
|
||||
block_suffix = ' # Variable'
|
||||
for block_num, block in enumerate(block_list):
|
||||
string += f"[{block_name}]{block_suffix}\n"
|
||||
for var_name, val in block.items():
|
||||
start_len = len(string)
|
||||
string += cls._format_var(msg, block, var_name, val, replacements, beautify)
|
||||
end_len = len(string)
|
||||
# Store the spans for each var so we can highlight specific matches
|
||||
spans[(msg.name, block_name, block_num, var_name)] = (start_len, end_len)
|
||||
string += "\n"
|
||||
spanned = SpannedString(string)
|
||||
spanned.spans = spans
|
||||
return spanned
|
||||
|
||||
@classmethod
|
||||
def _format_var(cls, msg, block, var_name, var_val, replacements, beautify=False):
|
||||
string = ""
|
||||
# Check if we have a more human-readable way to present this field
|
||||
ser_key = (msg.name, block.name, var_name)
|
||||
serializer = se.SUBFIELD_SERIALIZERS.get(ser_key)
|
||||
field_prefix = ""
|
||||
if isinstance(var_val, VerbatimHumanVal):
|
||||
var_data = var_val
|
||||
elif isinstance(var_val, (uuid.UUID, datatypes.TupleCoord)):
|
||||
var_data = str(var_val)
|
||||
elif isinstance(var_val, (str, bytes)) and not serializer:
|
||||
var_data = cls._multi_line_pformat(var_val)
|
||||
else:
|
||||
var_data = repr(var_val)
|
||||
if serializer and beautify and not isinstance(var_val, VerbatimHumanVal):
|
||||
try:
|
||||
pretty_data = serializer.deserialize(block, var_val, pod=True)
|
||||
if pretty_data is not se.UNSERIALIZABLE:
|
||||
string += f" {var_name} =| {cls._multi_line_pformat(pretty_data)}"
|
||||
if serializer.AS_HEX and isinstance(var_val, int):
|
||||
var_data = hex(var_val)
|
||||
if serializer.ORIG_INLINE:
|
||||
string += f" #{var_data}"
|
||||
return string
|
||||
else:
|
||||
string += "\n"
|
||||
# Human-readable version should be used, orig data is commented out
|
||||
field_prefix = "#"
|
||||
except:
|
||||
logging.exception(f"Failed in subfield serializer {ser_key!r}")
|
||||
if beautify:
|
||||
if block.name == "AgentData":
|
||||
if var_name == "AgentID" and var_val == replacements.get("AGENT_ID"):
|
||||
var_data = "[[AGENT_ID]]"
|
||||
elif var_name == "SessionID" and var_val == replacements.get("SESSION_ID"):
|
||||
var_data = "[[SESSION_ID]]"
|
||||
if "CircuitCode" in var_name or ("Code" in var_name and "Circuit" in block.name):
|
||||
if var_val == replacements.get("CIRCUIT_CODE"):
|
||||
var_data = "[[CIRCUIT_CODE]]"
|
||||
string += f" {field_prefix}{var_name} = {var_data}"
|
||||
return string
|
||||
|
||||
@staticmethod
|
||||
def _multi_line_pformat(val):
|
||||
printer = HippoPrettyPrinter(width=100)
|
||||
val = printer.pformat(val)
|
||||
newstr = ""
|
||||
# Now we need to rebuild this to add in the appropriate
|
||||
# line continuations.
|
||||
lines = list(val.splitlines())
|
||||
first_line = True
|
||||
while lines:
|
||||
line = lines.pop(0)
|
||||
prefix = ""
|
||||
suffix = ""
|
||||
if first_line:
|
||||
first_line = False
|
||||
else:
|
||||
prefix = " "
|
||||
|
||||
if lines:
|
||||
suffix = " \\\n"
|
||||
newstr += f"{prefix}{line}{suffix}"
|
||||
return newstr
|
||||
@@ -28,36 +28,36 @@ from hippolyzer.lib.base.events import Event
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
_T = TypeVar("_T")
|
||||
_K = TypeVar("_K", bound=Hashable)
|
||||
MESSAGE_HANDLER = Callable[[_T], Any]
|
||||
PREDICATE = Callable[[_T], bool]
|
||||
MESSAGE_NAMES = Union[str, Iterable[str]]
|
||||
MESSAGE_NAMES = Iterable[_K]
|
||||
|
||||
|
||||
class MessageHandler(Generic[_T]):
|
||||
def __init__(self):
|
||||
self.handlers: Dict[str, Event] = {}
|
||||
class MessageHandler(Generic[_T, _K]):
|
||||
def __init__(self, take_by_default: bool = True):
|
||||
self.handlers: Dict[_K, Event] = {}
|
||||
self.take_by_default = take_by_default
|
||||
|
||||
def register(self, message_name: str) -> Event:
|
||||
def register(self, message_name: _K) -> Event:
|
||||
LOG.debug('Creating a monitor for %s' % message_name)
|
||||
return self.handlers.setdefault(message_name, Event())
|
||||
|
||||
def subscribe(self, message_name: str, handler: MESSAGE_HANDLER) -> Event:
|
||||
def subscribe(self, message_name: _K, handler: MESSAGE_HANDLER) -> Event:
|
||||
notifier = self.register(message_name)
|
||||
notifier.subscribe(handler)
|
||||
return notifier
|
||||
|
||||
def _subscribe_all(self, message_names: MESSAGE_NAMES, handler: MESSAGE_HANDLER,
|
||||
predicate: Optional[PREDICATE] = None) -> List[Event]:
|
||||
if isinstance(message_names, str):
|
||||
message_names = (message_names,)
|
||||
notifiers = [self.register(name) for name in message_names]
|
||||
for n in notifiers:
|
||||
n.subscribe(handler, predicate=predicate)
|
||||
return notifiers
|
||||
|
||||
@contextlib.contextmanager
|
||||
def subscribe_async(self, message_names: MESSAGE_NAMES, take: bool = True,
|
||||
predicate: Optional[PREDICATE] = None) -> ContextManager[Callable[[], Awaitable[_T]]]:
|
||||
def subscribe_async(self, message_names: MESSAGE_NAMES, predicate: Optional[PREDICATE] = None,
|
||||
take: Optional[bool] = None) -> ContextManager[Callable[[], Awaitable[_T]]]:
|
||||
"""
|
||||
Subscribe to a set of message matching predicate while within a block
|
||||
|
||||
@@ -69,6 +69,8 @@ class MessageHandler(Generic[_T]):
|
||||
If a subscriber is just an observer that will never drop or modify a message, take=False
|
||||
may be used and messages will be sent as usual.
|
||||
"""
|
||||
if take is None:
|
||||
take = self.take_by_default
|
||||
msg_queue = asyncio.Queue()
|
||||
|
||||
def _handler_wrapper(message: _T):
|
||||
@@ -79,14 +81,20 @@ class MessageHandler(Generic[_T]):
|
||||
|
||||
notifiers = self._subscribe_all(message_names, _handler_wrapper, predicate=predicate)
|
||||
|
||||
async def _get_wrapper():
|
||||
msg = await msg_queue.get()
|
||||
# Consumption is completion
|
||||
msg_queue.task_done()
|
||||
return msg
|
||||
|
||||
try:
|
||||
yield msg_queue.get
|
||||
yield _get_wrapper
|
||||
finally:
|
||||
for n in notifiers:
|
||||
n.unsubscribe(_handler_wrapper)
|
||||
|
||||
def wait_for(self, message_names: MESSAGE_NAMES,
|
||||
predicate: Optional[PREDICATE] = None, timeout=None, take=True) -> Awaitable[_T]:
|
||||
def wait_for(self, message_names: MESSAGE_NAMES, predicate: Optional[PREDICATE] = None,
|
||||
timeout: Optional[float] = None, take: Optional[bool] = None) -> Awaitable[_T]:
|
||||
"""
|
||||
Wait for a single instance one of message_names matching predicate
|
||||
|
||||
@@ -95,8 +103,8 @@ class MessageHandler(Generic[_T]):
|
||||
sequence of packets, since multiple packets may come in after the future has already
|
||||
been marked completed, causing some to be missed.
|
||||
"""
|
||||
if isinstance(message_names, str):
|
||||
message_names = (message_names,)
|
||||
if take is None:
|
||||
take = self.take_by_default
|
||||
notifiers = [self.register(name) for name in message_names]
|
||||
|
||||
fut = asyncio.get_event_loop().create_future()
|
||||
@@ -126,7 +134,7 @@ class MessageHandler(Generic[_T]):
|
||||
notifier.subscribe(_handler, predicate=predicate)
|
||||
return fut
|
||||
|
||||
def is_handled(self, message_name: str):
|
||||
def is_handled(self, message_name: _K):
|
||||
return message_name in self.handlers
|
||||
|
||||
def handle(self, message: _T):
|
||||
@@ -134,7 +142,7 @@ class MessageHandler(Generic[_T]):
|
||||
# Always try to call wildcard handlers
|
||||
self._handle_type('*', message)
|
||||
|
||||
def _handle_type(self, name: str, message: _T):
|
||||
def _handle_type(self, name: _K, message: _T):
|
||||
handler = self.handlers.get(name)
|
||||
if not handler:
|
||||
return
|
||||
|
||||
@@ -56,7 +56,8 @@ class MessageTemplateVariable:
|
||||
self._probably_text = False
|
||||
else:
|
||||
self._probably_text = any(x in self.name for x in (
|
||||
"Name", "Text", "Title", "Description", "Message", "Label", "Method"))
|
||||
"Name", "Text", "Title", "Description", "Message", "Label", "Method", "Filename",
|
||||
))
|
||||
self._probably_text = self._probably_text and self.name != "NameValue"
|
||||
return self._probably_text
|
||||
|
||||
@@ -66,7 +67,7 @@ class MessageTemplateBlock:
|
||||
self.variables: typing.List[MessageTemplateVariable] = []
|
||||
self.variable_map: typing.Dict[str, MessageTemplateVariable] = {}
|
||||
self.name = name
|
||||
self.block_type = 0
|
||||
self.block_type: MsgBlockType = MsgBlockType.MBT_SINGLE
|
||||
self.number = 0
|
||||
|
||||
def add_variable(self, var):
|
||||
|
||||
@@ -64,10 +64,9 @@ def _parse_msg_num(reader: se.BufferReader):
|
||||
class UDPMessageDeserializer:
|
||||
DEFAULT_TEMPLATE = TemplateDictionary()
|
||||
|
||||
def __init__(self, settings=None, message_cls: Type[Message] = Message):
|
||||
def __init__(self, settings=None):
|
||||
self.settings = settings or Settings()
|
||||
self.template_dict = self.DEFAULT_TEMPLATE
|
||||
self.message_cls = message_cls
|
||||
|
||||
def deserialize(self, msg_buff: bytes):
|
||||
msg = self._parse_message_header(msg_buff)
|
||||
@@ -85,7 +84,7 @@ class UDPMessageDeserializer:
|
||||
|
||||
reader = se.BufferReader("!", data)
|
||||
|
||||
msg: Message = self.message_cls("Placeholder")
|
||||
msg: Message = Message("Placeholder")
|
||||
msg.send_flags = reader.read(se.U8)
|
||||
msg.packet_id = reader.read(se.U32)
|
||||
|
||||
|
||||
@@ -19,6 +19,3 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
|
||||
|
||||
|
||||
172
hippolyzer/lib/base/network/caps_client.py
Normal file
172
hippolyzer/lib/base/network/caps_client.py
Normal file
@@ -0,0 +1,172 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import copy
|
||||
import dataclasses
|
||||
from types import TracebackType
|
||||
from typing import *
|
||||
|
||||
import aiohttp
|
||||
import multidict
|
||||
|
||||
from hippolyzer.lib.base import llsd as llsd_lib
|
||||
|
||||
|
||||
class CapsClientResponse(aiohttp.ClientResponse):
|
||||
"""
|
||||
Not actually instantiated, used for lying to the type system
|
||||
since we'll dynamically put this onto a ClientResponse instance
|
||||
Will fail isinstance().
|
||||
"""
|
||||
async def read_llsd(self) -> Any:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class _HippoSessionRequestContextManager:
|
||||
"""
|
||||
_SessionRequestContextManager but with a symmetrical API
|
||||
|
||||
aiohttp.request() and aiohttp.ClientSession.request() have different APIs.
|
||||
One is sync returning a context manager, one is async returning a coro.
|
||||
aiohttp.request() also doesn't accept the arguments that we need for custom
|
||||
SSL contexts. To deal with requests that have existing sessions and those without,
|
||||
just give them both the same wrapper and don't close the session on context manager
|
||||
exit if it wasn't our session.
|
||||
"""
|
||||
__slots__ = ("_coro", "_resp", "_session", "_session_owned")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coro: Coroutine[asyncio.Future[Any], None, aiohttp.ClientResponse],
|
||||
session: aiohttp.ClientSession,
|
||||
session_owned: bool = True,
|
||||
) -> None:
|
||||
self._coro = coro
|
||||
self._resp: Optional[aiohttp.ClientResponse] = None
|
||||
self._session = session
|
||||
self._session_owned = session_owned
|
||||
|
||||
async def __aenter__(self) -> CapsClientResponse:
|
||||
try:
|
||||
self._resp = await self._coro
|
||||
|
||||
# We don't control creation of the ClientResponse, so tack on
|
||||
# a convenience method for reading LLSD.
|
||||
async def _read_llsd():
|
||||
return llsd_lib.parse_xml(await self._resp.read())
|
||||
self._resp.read_llsd = _read_llsd
|
||||
except BaseException:
|
||||
if self._session_owned:
|
||||
await self._session.close()
|
||||
raise
|
||||
else:
|
||||
# intentionally fooling the type system
|
||||
return self._resp # type: ignore
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc: Optional[BaseException],
|
||||
tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
assert self._resp is not None
|
||||
self._resp.close()
|
||||
if self._session_owned:
|
||||
await self._session.close()
|
||||
|
||||
|
||||
CAPS_DICT = Union[
|
||||
Mapping[str, str],
|
||||
multidict.MultiDict[str],
|
||||
]
|
||||
|
||||
|
||||
class CapsClient:
|
||||
def __init__(self, caps: Optional[CAPS_DICT] = None):
|
||||
self._caps = caps
|
||||
|
||||
def _request_fixups(self, cap_or_url: str, headers: Dict, proxy: Optional[bool], ssl: Any):
|
||||
return cap_or_url, headers, proxy, ssl
|
||||
|
||||
def _get_caps(self) -> Optional[CAPS_DICT]:
|
||||
return self._caps
|
||||
|
||||
def request(self, method: str, cap_or_url: str, *, path: str = "", data: Any = None,
|
||||
headers: Optional[Dict] = None, session: Optional[aiohttp.ClientSession] = None,
|
||||
llsd: Any = dataclasses.MISSING, params: Optional[Dict[str, Any]] = None,
|
||||
proxy: Optional[str] = None, skip_auto_headers: Optional[Sequence[str]] = None,
|
||||
**kwargs) -> _HippoSessionRequestContextManager:
|
||||
if cap_or_url.startswith("http"):
|
||||
if path:
|
||||
raise ValueError("Specifying both path and a full URL not supported")
|
||||
else:
|
||||
caps = self._get_caps()
|
||||
if caps is None:
|
||||
raise RuntimeError(f"Need a caps dict to request a Cap like {cap_or_url}")
|
||||
if cap_or_url not in caps:
|
||||
raise KeyError(f"{cap_or_url} is not a full URL and not a Cap")
|
||||
cap_or_url = caps[cap_or_url]
|
||||
if path:
|
||||
cap_or_url += path
|
||||
|
||||
if params is not None:
|
||||
for pname, pval in params.items():
|
||||
if not isinstance(pval, str):
|
||||
params[pname] = str(pval)
|
||||
|
||||
session_owned = False
|
||||
# Use an existing session if we have one to take advantage of connection pooling
|
||||
# otherwise create one
|
||||
if session is None:
|
||||
session_owned = True
|
||||
session = aiohttp.ClientSession(
|
||||
connector=aiohttp.TCPConnector(force_close=True),
|
||||
connector_owner=True
|
||||
)
|
||||
|
||||
if headers is None:
|
||||
headers = {}
|
||||
else:
|
||||
headers = copy.copy(headers)
|
||||
|
||||
# Use sentinel val so explicit `None` can be passed
|
||||
if llsd is not dataclasses.MISSING:
|
||||
data = llsd_lib.format_xml(llsd)
|
||||
# Sometimes needed even on GETs.
|
||||
if "Content-Type" not in headers:
|
||||
headers["Content-Type"] = "application/llsd+xml"
|
||||
# Always present, usually ignored by the server.
|
||||
if "Accept" not in headers:
|
||||
headers["Accept"] = "application/llsd+xml"
|
||||
# Ask to keep the connection open if we're sharing a session
|
||||
if not session_owned:
|
||||
headers["Connection"] = "keep-alive"
|
||||
headers["Keep-alive"] = "300"
|
||||
|
||||
ssl = kwargs.pop('ssl', None)
|
||||
cap_or_url, headers, proxy, ssl = self._request_fixups(cap_or_url, headers, proxy, ssl)
|
||||
|
||||
resp = session._request(method, cap_or_url, data=data, headers=headers, # noqa: need internal call
|
||||
params=params, ssl=ssl, proxy=proxy,
|
||||
skip_auto_headers=skip_auto_headers or ("User-Agent",), **kwargs)
|
||||
return _HippoSessionRequestContextManager(resp, session, session_owned=session_owned)
|
||||
|
||||
def get(self, cap_or_url: str, *, path: str = "", headers: Optional[dict] = None,
|
||||
session: Optional[aiohttp.ClientSession] = None, params: Optional[Dict[str, Any]] = None,
|
||||
proxy: Optional[str] = None, **kwargs) -> _HippoSessionRequestContextManager:
|
||||
return self.request("GET", cap_or_url=cap_or_url, path=path, headers=headers,
|
||||
session=session, params=params, proxy=proxy, **kwargs)
|
||||
|
||||
def post(self, cap_or_url: str, *, path: str = "", data: Any = None,
|
||||
headers: Optional[dict] = None, session: Optional[aiohttp.ClientSession] = None,
|
||||
llsd: Any = dataclasses.MISSING, params: Optional[Dict[str, Any]] = None,
|
||||
proxy: Optional[str] = None, **kwargs) -> _HippoSessionRequestContextManager:
|
||||
return self.request("POST", cap_or_url=cap_or_url, path=path, headers=headers, data=data,
|
||||
llsd=llsd, session=session, params=params, proxy=proxy, **kwargs)
|
||||
|
||||
def put(self, cap_or_url: str, *, path: str = "", data: Any = None,
|
||||
headers: Optional[dict] = None, session: Optional[aiohttp.ClientSession] = None,
|
||||
llsd: Any = dataclasses.MISSING, params: Optional[Dict[str, Any]] = None,
|
||||
proxy: Optional[str] = None, **kwargs) -> _HippoSessionRequestContextManager:
|
||||
return self.request("PUT", cap_or_url=cap_or_url, path=path, headers=headers, data=data,
|
||||
llsd=llsd, session=session, params=params, proxy=proxy, **kwargs)
|
||||
72
hippolyzer/lib/base/network/transport.py
Normal file
72
hippolyzer/lib/base/network/transport.py
Normal file
@@ -0,0 +1,72 @@
|
||||
import abc
|
||||
import asyncio
|
||||
import enum
|
||||
import socket
|
||||
from typing import *
|
||||
|
||||
|
||||
ADDR_TUPLE = Tuple[str, int]
|
||||
|
||||
|
||||
class Direction(enum.Enum):
|
||||
OUT = enum.auto()
|
||||
IN = enum.auto()
|
||||
|
||||
def __invert__(self):
|
||||
if self == self.OUT:
|
||||
return self.IN
|
||||
return self.OUT
|
||||
|
||||
|
||||
class UDPPacket:
|
||||
def __init__(
|
||||
self,
|
||||
src_addr: Optional[ADDR_TUPLE],
|
||||
dst_addr: ADDR_TUPLE,
|
||||
data: bytes,
|
||||
direction: Direction
|
||||
):
|
||||
self.src_addr = src_addr
|
||||
self.dst_addr = dst_addr
|
||||
self.data = data
|
||||
self.direction = direction
|
||||
|
||||
@property
|
||||
def outgoing(self):
|
||||
return self.direction == Direction.OUT
|
||||
|
||||
@property
|
||||
def incoming(self):
|
||||
return self.direction == Direction.IN
|
||||
|
||||
@property
|
||||
def far_addr(self):
|
||||
if self.outgoing:
|
||||
return self.dst_addr
|
||||
return self.src_addr
|
||||
|
||||
|
||||
class AbstractUDPTransport(abc.ABC):
|
||||
__slots__ = ()
|
||||
|
||||
@abc.abstractmethod
|
||||
def send_packet(self, packet: UDPPacket) -> None:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def close(self) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class SocketUDPTransport(AbstractUDPTransport):
|
||||
def __init__(self, transport: Union[asyncio.DatagramTransport, socket.socket]):
|
||||
super().__init__()
|
||||
self.transport = transport
|
||||
|
||||
def send_packet(self, packet: UDPPacket) -> None:
|
||||
if not packet.outgoing:
|
||||
raise ValueError(f"{self.__class__.__name__} can only send outbound packets")
|
||||
self.transport.sendto(packet.data, packet.dst_addr)
|
||||
|
||||
def close(self) -> None:
|
||||
self.transport.close()
|
||||
@@ -18,208 +18,130 @@ You should have received a copy of the GNU Lesser General Public License
|
||||
along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import logging
|
||||
import struct
|
||||
from typing import *
|
||||
|
||||
import lazy_object_proxy
|
||||
import recordclass
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3, Quaternion, Vector4
|
||||
from hippolyzer.lib.base.datatypes import Vector3, Quaternion, Vector4, UUID, TaggedUnion
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.namevalue import NameValueCollection
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
import hippolyzer.lib.base.templates as tmpls
|
||||
|
||||
|
||||
class Object:
|
||||
""" represents an Object
|
||||
class Object(recordclass.datatuple): # type: ignore
|
||||
__options__ = {
|
||||
"use_weakref": True,
|
||||
}
|
||||
__weakref__: Any
|
||||
|
||||
Initialize the Object class instance
|
||||
>>> obj = Object()
|
||||
"""
|
||||
LocalID: Optional[int] = None
|
||||
State: Optional[int] = None
|
||||
FullID: Optional[UUID] = None
|
||||
CRC: Optional[int] = None
|
||||
PCode: Optional[tmpls.PCode] = None
|
||||
Material: Optional[tmpls.MCode] = None
|
||||
ClickAction: Optional[int] = None
|
||||
Scale: Optional[Vector3] = None
|
||||
ParentID: Optional[int] = None
|
||||
# Actually contains a weakref proxy
|
||||
Parent: Optional[Object] = None
|
||||
UpdateFlags: Optional[int] = None
|
||||
PathCurve: Optional[int] = None
|
||||
ProfileCurve: Optional[int] = None
|
||||
PathBegin: Optional[int] = None
|
||||
PathEnd: Optional[int] = None
|
||||
PathScaleX: Optional[int] = None
|
||||
PathScaleY: Optional[int] = None
|
||||
PathShearX: Optional[int] = None
|
||||
PathShearY: Optional[int] = None
|
||||
PathTwist: Optional[int] = None
|
||||
PathTwistBegin: Optional[int] = None
|
||||
PathRadiusOffset: Optional[int] = None
|
||||
PathTaperX: Optional[int] = None
|
||||
PathTaperY: Optional[int] = None
|
||||
PathRevolutions: Optional[int] = None
|
||||
PathSkew: Optional[int] = None
|
||||
ProfileBegin: Optional[int] = None
|
||||
ProfileEnd: Optional[int] = None
|
||||
ProfileHollow: Optional[int] = None
|
||||
TextureEntry: Optional[tmpls.TextureEntry] = None
|
||||
TextureAnim: Optional[tmpls.TextureAnim] = None
|
||||
NameValue: Optional[Any] = None
|
||||
Data: Optional[Any] = None
|
||||
Text: Optional[str] = None
|
||||
TextColor: Optional[bytes] = None
|
||||
MediaURL: Optional[str] = None
|
||||
PSBlock: Optional[Dict] = None
|
||||
ExtraParams: Optional[Dict[tmpls.ExtraParamType, Any]] = None
|
||||
Sound: Optional[UUID] = None
|
||||
OwnerID: Optional[UUID] = None
|
||||
SoundGain: Optional[float] = None
|
||||
SoundFlags: Optional[int] = None
|
||||
SoundRadius: Optional[float] = None
|
||||
JointType: Optional[int] = None
|
||||
JointPivot: Optional[int] = None
|
||||
JointAxisOrAnchor: Optional[int] = None
|
||||
TreeSpecies: Optional[int] = None
|
||||
ScratchPad: Optional[bytes] = None
|
||||
ObjectCosts: Optional[Dict] = None
|
||||
ChildIDs: Optional[List[int]] = None
|
||||
# Same as parent, contains weakref proxies.
|
||||
Children: Optional[List[Object]] = None
|
||||
|
||||
__slots__ = (
|
||||
"LocalID",
|
||||
"State",
|
||||
"FullID",
|
||||
"CRC",
|
||||
"PCode",
|
||||
"Material",
|
||||
"ClickAction",
|
||||
"Scale",
|
||||
"ParentID",
|
||||
"UpdateFlags",
|
||||
"PathCurve",
|
||||
"ProfileCurve",
|
||||
"PathBegin",
|
||||
"PathEnd",
|
||||
"PathScaleX",
|
||||
"PathScaleY",
|
||||
"PathShearX",
|
||||
"PathShearY",
|
||||
"PathTwist",
|
||||
"PathTwistBegin",
|
||||
"PathRadiusOffset",
|
||||
"PathTaperX",
|
||||
"PathTaperY",
|
||||
"PathRevolutions",
|
||||
"PathSkew",
|
||||
"ProfileBegin",
|
||||
"ProfileEnd",
|
||||
"ProfileHollow",
|
||||
"TextureEntry",
|
||||
"TextureAnim",
|
||||
"NameValue",
|
||||
"Data",
|
||||
"Text",
|
||||
"TextColor",
|
||||
"MediaURL",
|
||||
"PSBlock",
|
||||
"ExtraParams",
|
||||
"Sound",
|
||||
"OwnerID",
|
||||
"SoundGain",
|
||||
"SoundFlags",
|
||||
"SoundRadius",
|
||||
"JointType",
|
||||
"JointPivot",
|
||||
"JointAxisOrAnchor",
|
||||
"TreeSpecies",
|
||||
"ObjectCosts",
|
||||
"FootCollisionPlane",
|
||||
"Position",
|
||||
"Velocity",
|
||||
"Acceleration",
|
||||
"Rotation",
|
||||
"AngularVelocity",
|
||||
"CreatorID",
|
||||
"GroupID",
|
||||
"CreationDate",
|
||||
"BaseMask",
|
||||
"OwnerMask",
|
||||
"GroupMask",
|
||||
"EveryoneMask",
|
||||
"NextOwnerMask",
|
||||
"OwnershipCost",
|
||||
"SaleType",
|
||||
"SalePrice",
|
||||
"AggregatePerms",
|
||||
"AggregatePermTextures",
|
||||
"AggregatePermTexturesOwner",
|
||||
"Category",
|
||||
"InventorySerial",
|
||||
"ItemID",
|
||||
"FolderID",
|
||||
"FromTaskID",
|
||||
"LastOwnerID",
|
||||
"Name",
|
||||
"Description",
|
||||
"TouchName",
|
||||
"SitName",
|
||||
"TextureID",
|
||||
"ChildIDs",
|
||||
"Children",
|
||||
"Parent",
|
||||
"ScratchPad",
|
||||
"__weakref__",
|
||||
)
|
||||
FootCollisionPlane: Optional[Vector4] = None
|
||||
Position: Optional[Vector3] = None
|
||||
Velocity: Optional[Vector3] = None
|
||||
Acceleration: Optional[Vector3] = None
|
||||
Rotation: Optional[Quaternion] = None
|
||||
AngularVelocity: Optional[Vector3] = None
|
||||
|
||||
def __init__(self, *, ID=None, LocalID=None, State=None, FullID=None, CRC=None, PCode=None, Material=None,
|
||||
ClickAction=None, Scale=None, ParentID=None, UpdateFlags=None, PathCurve=None, ProfileCurve=None,
|
||||
PathBegin=None, PathEnd=None, PathScaleX=None, PathScaleY=None, PathShearX=None, PathShearY=None,
|
||||
PathTwist=None, PathTwistBegin=None, PathRadiusOffset=None, PathTaperX=None, PathTaperY=None,
|
||||
PathRevolutions=None, PathSkew=None, ProfileBegin=None, ProfileEnd=None, ProfileHollow=None,
|
||||
TextureEntry=None, TextureAnim=None, NameValue=None, Data=None, Text=None, TextColor=None,
|
||||
MediaURL=None, PSBlock=None, ExtraParams=None, Sound=None, OwnerID=None, SoundGain=None,
|
||||
SoundFlags=None, SoundRadius=None, JointType=None, JointPivot=None, JointAxisOrAnchor=None,
|
||||
FootCollisionPlane=None, Position=None, Velocity=None, Acceleration=None, Rotation=None,
|
||||
AngularVelocity=None, TreeSpecies=None, ObjectCosts=None, ScratchPad=None):
|
||||
# from ObjectProperties
|
||||
CreatorID: Optional[UUID] = None
|
||||
GroupID: Optional[UUID] = None
|
||||
CreationDate: Optional[int] = None
|
||||
BaseMask: Optional[int] = None
|
||||
OwnerMask: Optional[int] = None
|
||||
GroupMask: Optional[int] = None
|
||||
EveryoneMask: Optional[int] = None
|
||||
NextOwnerMask: Optional[int] = None
|
||||
OwnershipCost: Optional[int] = None
|
||||
# TaxRate
|
||||
SaleType: Optional[int] = None
|
||||
SalePrice: Optional[int] = None
|
||||
AggregatePerms: Optional[int] = None
|
||||
AggregatePermTextures: Optional[int] = None
|
||||
AggregatePermTexturesOwner: Optional[int] = None
|
||||
Category: Optional[int] = None
|
||||
InventorySerial: Optional[int] = None
|
||||
ItemID: Optional[UUID] = None
|
||||
FolderID: Optional[UUID] = None
|
||||
FromTaskID: Optional[UUID] = None
|
||||
LastOwnerID: Optional[UUID] = None
|
||||
Name: Optional[str] = None
|
||||
Description: Optional[str] = None
|
||||
TouchName: Optional[str] = None
|
||||
SitName: Optional[str] = None
|
||||
TextureID: Optional[List[UUID]] = None
|
||||
RegionHandle: Optional[int] = None
|
||||
|
||||
def __init__(self, **_kwargs):
|
||||
""" set up the object attributes """
|
||||
|
||||
self.LocalID = LocalID or ID # U32
|
||||
self.State = State # U8
|
||||
self.FullID = FullID # LLUUID
|
||||
self.CRC = CRC # U32 // TEMPORARY HACK FOR JAMES
|
||||
self.PCode = PCode # U8
|
||||
self.Material = Material # U8
|
||||
self.ClickAction = ClickAction # U8
|
||||
self.Scale = Scale # LLVector3
|
||||
self.ParentID = ParentID # U32
|
||||
# Actually contains a weakref proxy
|
||||
self.Parent: Optional[Object] = None
|
||||
self.UpdateFlags = UpdateFlags # U32 // U32, see object_flags.h
|
||||
self.PathCurve = PathCurve # U8
|
||||
self.ProfileCurve = ProfileCurve # U8
|
||||
self.PathBegin = PathBegin # U16 // 0 to 1, quanta = 0.01
|
||||
self.PathEnd = PathEnd # U16 // 0 to 1, quanta = 0.01
|
||||
self.PathScaleX = PathScaleX # U8 // 0 to 1, quanta = 0.01
|
||||
self.PathScaleY = PathScaleY # U8 // 0 to 1, quanta = 0.01
|
||||
self.PathShearX = PathShearX # U8 // -.5 to .5, quanta = 0.01
|
||||
self.PathShearY = PathShearY # U8 // -.5 to .5, quanta = 0.01
|
||||
self.PathTwist = PathTwist # S8 // -1 to 1, quanta = 0.01
|
||||
self.PathTwistBegin = PathTwistBegin # S8 // -1 to 1, quanta = 0.01
|
||||
self.PathRadiusOffset = PathRadiusOffset # S8 // -1 to 1, quanta = 0.01
|
||||
self.PathTaperX = PathTaperX # S8 // -1 to 1, quanta = 0.01
|
||||
self.PathTaperY = PathTaperY # S8 // -1 to 1, quanta = 0.01
|
||||
self.PathRevolutions = PathRevolutions # U8 // 0 to 3, quanta = 0.015
|
||||
self.PathSkew = PathSkew # S8 // -1 to 1, quanta = 0.01
|
||||
self.ProfileBegin = ProfileBegin # U16 // 0 to 1, quanta = 0.01
|
||||
self.ProfileEnd = ProfileEnd # U16 // 0 to 1, quanta = 0.01
|
||||
self.ProfileHollow = ProfileHollow # U16 // 0 to 1, quanta = 0.01
|
||||
self.TextureEntry = TextureEntry # Variable 2
|
||||
self.TextureAnim = TextureAnim # Variable 1
|
||||
self.NameValue = NameValue # Variable 2
|
||||
self.Data = Data # Variable 2
|
||||
self.Text = Text # Variable 1 // llSetText() hovering text
|
||||
self.TextColor = TextColor # Fixed 4 // actually, a LLColor4U
|
||||
self.MediaURL = MediaURL # Variable 1 // URL for web page, movie, etc.
|
||||
self.PSBlock = PSBlock # Variable 1
|
||||
self.ExtraParams = ExtraParams or {} # Variable 1
|
||||
self.Sound = Sound # LLUUID
|
||||
self.OwnerID = OwnerID # LLUUID // HACK object's owner id, only set if non-null sound, for muting
|
||||
self.SoundGain = SoundGain # F32
|
||||
self.SoundFlags = SoundFlags # U8
|
||||
self.SoundRadius = SoundRadius # F32 // cutoff radius
|
||||
self.JointType = JointType # U8
|
||||
self.JointPivot = JointPivot # LLVector3
|
||||
self.JointAxisOrAnchor = JointAxisOrAnchor # LLVector3
|
||||
self.TreeSpecies = TreeSpecies
|
||||
self.ScratchPad = ScratchPad
|
||||
self.ObjectCosts = ObjectCosts or {}
|
||||
self.ExtraParams = self.ExtraParams or {} # Variable 1
|
||||
self.ObjectCosts = self.ObjectCosts or {}
|
||||
self.ChildIDs = []
|
||||
# Same as parent, contains weakref proxies.
|
||||
self.Children: List[Object] = []
|
||||
|
||||
# from ObjectUpdateCompressed
|
||||
self.FootCollisionPlane: Optional[Vector4] = FootCollisionPlane
|
||||
self.Position: Optional[Vector3] = Position
|
||||
self.Velocity: Optional[Vector3] = Velocity
|
||||
self.Acceleration: Optional[Vector3] = Acceleration
|
||||
self.Rotation: Optional[Quaternion] = Rotation
|
||||
self.AngularVelocity: Optional[Vector3] = AngularVelocity
|
||||
|
||||
# from ObjectProperties
|
||||
self.CreatorID = None
|
||||
self.GroupID = None
|
||||
self.CreationDate = None
|
||||
self.BaseMask = None
|
||||
self.OwnerMask = None
|
||||
self.GroupMask = None
|
||||
self.EveryoneMask = None
|
||||
self.NextOwnerMask = None
|
||||
self.OwnershipCost = None
|
||||
# TaxRate
|
||||
self.SaleType = None
|
||||
self.SalePrice = None
|
||||
self.AggregatePerms = None
|
||||
self.AggregatePermTextures = None
|
||||
self.AggregatePermTexturesOwner = None
|
||||
self.Category = None
|
||||
self.InventorySerial = None
|
||||
self.ItemID = None
|
||||
self.FolderID = None
|
||||
self.FromTaskID = None
|
||||
self.LastOwnerID = None
|
||||
self.Name = None
|
||||
self.Description = None
|
||||
self.TouchName = None
|
||||
self.SitName = None
|
||||
self.TextureID = None
|
||||
@property
|
||||
def GlobalPosition(self) -> Vector3:
|
||||
return handle_to_global_pos(self.RegionHandle) + self.RegionPosition
|
||||
|
||||
@property
|
||||
def RegionPosition(self) -> Vector3:
|
||||
@@ -243,23 +165,280 @@ class Object:
|
||||
# TODO: Cache this and dirty cache if ancestor updates rot?
|
||||
return self.Rotation * self.Parent.RegionRotation
|
||||
|
||||
@property
|
||||
def AncestorsKnown(self) -> bool:
|
||||
obj = self
|
||||
while obj.ParentID:
|
||||
if not obj.Parent:
|
||||
return False
|
||||
obj = obj.Parent
|
||||
return True
|
||||
|
||||
def update_properties(self, properties: Dict[str, Any]) -> Set[str]:
|
||||
""" takes a dictionary of attribute:value and makes it so """
|
||||
updated_properties = set()
|
||||
for key, val in properties.items():
|
||||
if hasattr(self, key):
|
||||
old_val = getattr(self, key, val)
|
||||
old_val = getattr(self, key, dataclasses.MISSING)
|
||||
# Don't check equality if we're using a lazy proxy,
|
||||
# parsing is deferred until we actually use it.
|
||||
is_proxy = isinstance(val, lazy_object_proxy.Proxy)
|
||||
if is_proxy or old_val != val:
|
||||
if any(isinstance(x, lazy_object_proxy.Proxy) for x in (old_val, val)):
|
||||
# TODO: be smarter about this. Can we store the raw bytes and
|
||||
# compare those if it's an unparsed object?
|
||||
is_updated = old_val is not val
|
||||
else:
|
||||
is_updated = old_val != val
|
||||
if is_updated:
|
||||
updated_properties.add(key)
|
||||
setattr(self, key, val)
|
||||
return updated_properties
|
||||
|
||||
def to_dict(self):
|
||||
val = recordclass.asdict(self)
|
||||
del val["Children"]
|
||||
del val["Parent"]
|
||||
return val
|
||||
|
||||
|
||||
def handle_to_gridxy(handle: int) -> Tuple[int, int]:
|
||||
return (handle >> 32) // 256, (handle & 0xFFffFFff) // 256
|
||||
|
||||
|
||||
def gridxy_to_handle(x: int, y: int):
|
||||
return ((x * 256) << 32) | (y * 256)
|
||||
|
||||
|
||||
def handle_to_global_pos(handle: int) -> Vector3:
|
||||
return Vector3(handle >> 32, handle & 0xFFffFFff)
|
||||
|
||||
|
||||
def normalize_object_update(block: Block, handle: int):
|
||||
object_data = {
|
||||
"RegionHandle": handle,
|
||||
"FootCollisionPlane": None,
|
||||
"SoundFlags": block["Flags"],
|
||||
"SoundGain": block["Gain"],
|
||||
"SoundRadius": block["Radius"],
|
||||
**dict(block.items()),
|
||||
"TextureEntry": block.deserialize_var("TextureEntry", make_copy=False),
|
||||
"NameValue": block.deserialize_var("NameValue", make_copy=False),
|
||||
"TextureAnim": block.deserialize_var("TextureAnim", make_copy=False),
|
||||
"ExtraParams": block.deserialize_var("ExtraParams", make_copy=False) or {},
|
||||
"PSBlock": block.deserialize_var("PSBlock", make_copy=False).value,
|
||||
"UpdateFlags": block.deserialize_var("UpdateFlags", make_copy=False),
|
||||
"State": block.deserialize_var("State", make_copy=False),
|
||||
**block.deserialize_var("ObjectData", make_copy=False).value,
|
||||
}
|
||||
object_data["LocalID"] = object_data.pop("ID")
|
||||
# Empty == not updated
|
||||
if not object_data["TextureEntry"]:
|
||||
object_data.pop("TextureEntry")
|
||||
# OwnerID is only set in this packet if a sound is playing. Don't allow
|
||||
# ObjectUpdates to clobber _real_ OwnerIDs we had from ObjectProperties
|
||||
# with a null UUID.
|
||||
if object_data["OwnerID"] == UUID():
|
||||
del object_data["OwnerID"]
|
||||
del object_data["Flags"]
|
||||
del object_data["Gain"]
|
||||
del object_data["Radius"]
|
||||
del object_data["ObjectData"]
|
||||
return object_data
|
||||
|
||||
|
||||
def normalize_terse_object_update(block: Block, handle: int):
|
||||
object_data = {
|
||||
**block.deserialize_var("Data", make_copy=False),
|
||||
**dict(block.items()),
|
||||
"TextureEntry": block.deserialize_var("TextureEntry", make_copy=False),
|
||||
"RegionHandle": handle,
|
||||
}
|
||||
object_data["LocalID"] = object_data.pop("ID")
|
||||
object_data.pop("Data")
|
||||
# Empty == not updated
|
||||
if object_data["TextureEntry"] is None:
|
||||
object_data.pop("TextureEntry")
|
||||
return object_data
|
||||
|
||||
|
||||
def normalize_object_update_compressed_data(data: bytes):
|
||||
# Shared by ObjectUpdateCompressed and VOCache case
|
||||
compressed = FastObjectUpdateCompressedDataDeserializer.read(data)
|
||||
# TODO: ObjectUpdateCompressed doesn't provide a default value for unused
|
||||
# fields, whereas ObjectUpdate and friends do (TextColor, etc.)
|
||||
# need some way to normalize ObjectUpdates so they won't appear to have
|
||||
# changed just because an ObjectUpdate got sent with a default value
|
||||
# Only used for determining which sections are present
|
||||
del compressed["Flags"]
|
||||
|
||||
ps_block = compressed.pop("PSBlockNew", None)
|
||||
if ps_block is None:
|
||||
ps_block = compressed.pop("PSBlock", None)
|
||||
if ps_block is None:
|
||||
ps_block = TaggedUnion(0, None)
|
||||
compressed.pop("PSBlock", None)
|
||||
if compressed["NameValue"] is None:
|
||||
compressed["NameValue"] = NameValueCollection()
|
||||
|
||||
object_data = {
|
||||
"PSBlock": ps_block.value,
|
||||
# Parent flag not set means explicitly un-parented
|
||||
"ParentID": compressed.pop("ParentID", None) or 0,
|
||||
"LocalID": compressed.pop("ID"),
|
||||
**compressed,
|
||||
}
|
||||
if object_data["TextureEntry"] is None:
|
||||
object_data.pop("TextureEntry")
|
||||
# Don't clobber OwnerID in case the object has a proper one.
|
||||
if object_data["OwnerID"] == UUID():
|
||||
del object_data["OwnerID"]
|
||||
return object_data
|
||||
|
||||
|
||||
def normalize_object_update_compressed(block: Block, handle: int):
|
||||
compressed = normalize_object_update_compressed_data(block["Data"])
|
||||
compressed["UpdateFlags"] = block.deserialize_var("UpdateFlags", make_copy=False)
|
||||
compressed["RegionHandle"] = handle
|
||||
return compressed
|
||||
|
||||
|
||||
class SimpleStructReader(se.BufferReader):
|
||||
def read_struct(self, spec: struct.Struct, peek=False) -> Tuple[Any, ...]:
|
||||
val = spec.unpack_from(self._buffer, self._pos)
|
||||
if not peek:
|
||||
self._pos += spec.size
|
||||
return val
|
||||
|
||||
def read_bytes_null_term(self) -> bytes:
|
||||
old_offset = self._pos
|
||||
while self._buffer[self._pos] != 0:
|
||||
self._pos += 1
|
||||
val = self._buffer[old_offset:self._pos]
|
||||
self._pos += 1
|
||||
return val
|
||||
|
||||
|
||||
class FastObjectUpdateCompressedDataDeserializer:
|
||||
HEADER_STRUCT = struct.Struct("<16sIBBIBB3f3f3fI16s")
|
||||
ANGULAR_VELOCITY_STRUCT = struct.Struct("<3f")
|
||||
PARENT_ID_STRUCT = struct.Struct("<I")
|
||||
TREE_SPECIES_STRUCT = struct.Struct("<B")
|
||||
DATAPACKER_LEN = struct.Struct("<I")
|
||||
COLOR_ADAPTER = tmpls.Color4()
|
||||
PARTICLES_OLD = se.TypedBytesFixed(86, tmpls.PSBLOCK_TEMPLATE)
|
||||
SOUND_STRUCT = struct.Struct("<16sfBf")
|
||||
PRIM_PARAMS_STRUCT = struct.Struct("<BBHHBBBBbbbbbBbHHH")
|
||||
ATTACHMENT_STATE_ADAPTER = tmpls.AttachmentStateAdapter(None)
|
||||
|
||||
@classmethod
|
||||
def read(cls, data: bytes) -> Dict:
|
||||
reader = SimpleStructReader("<", data)
|
||||
foo = reader.read_struct(cls.HEADER_STRUCT)
|
||||
full_id, local_id, pcode, state, crc, material, click_action, \
|
||||
scalex, scaley, scalez, posx, posy, posz, rotx, roty, rotz, \
|
||||
flags, owner_id = foo
|
||||
scale = Vector3(scalex, scaley, scalez)
|
||||
full_id = UUID(bytes=full_id)
|
||||
pcode = tmpls.PCode(pcode)
|
||||
if pcode == tmpls.PCode.AVATAR:
|
||||
state = tmpls.AgentState(state)
|
||||
elif pcode == tmpls.PCode.PRIMITIVE:
|
||||
state = cls.ATTACHMENT_STATE_ADAPTER.decode(state, None)
|
||||
pos = Vector3(posx, posy, posz)
|
||||
rot = Quaternion(rotx, roty, rotz)
|
||||
owner_id = UUID(bytes=owner_id)
|
||||
ang_vel = None
|
||||
if flags & tmpls.CompressedFlags.ANGULAR_VELOCITY.value:
|
||||
ang_vel = Vector3(*reader.read_struct(cls.ANGULAR_VELOCITY_STRUCT))
|
||||
parent_id = None
|
||||
if flags & tmpls.CompressedFlags.PARENT_ID.value:
|
||||
parent_id = reader.read_struct(cls.PARENT_ID_STRUCT)[0]
|
||||
tree_species = None
|
||||
if flags & tmpls.CompressedFlags.TREE.value:
|
||||
tree_species = reader.read_struct(cls.TREE_SPECIES_STRUCT)[0]
|
||||
scratchpad = None
|
||||
if flags & tmpls.CompressedFlags.SCRATCHPAD.value:
|
||||
scratchpad = reader.read_bytes(reader.read_struct(cls.DATAPACKER_LEN)[0])
|
||||
text = None
|
||||
text_color = None
|
||||
if flags & tmpls.CompressedFlags.TEXT.value:
|
||||
text = reader.read_bytes_null_term().decode("utf8")
|
||||
text_color = cls.COLOR_ADAPTER.decode(reader.read_bytes(4), ctx=None)
|
||||
media_url = None
|
||||
if flags & tmpls.CompressedFlags.MEDIA_URL.value:
|
||||
media_url = reader.read_bytes_null_term().decode("utf8")
|
||||
psblock = None
|
||||
if flags & tmpls.CompressedFlags.PARTICLES.value:
|
||||
psblock = reader.read(cls.PARTICLES_OLD)
|
||||
extra_params = reader.read(tmpls.EXTRA_PARAM_COLLECTION)
|
||||
sound, sound_gain, sound_flags, sound_radius = None, None, None, None
|
||||
if flags & tmpls.CompressedFlags.SOUND.value:
|
||||
sound, sound_gain, sound_flags, sound_radius = reader.read_struct(cls.SOUND_STRUCT)
|
||||
sound = UUID(bytes=sound)
|
||||
sound_flags = tmpls.SoundFlags(sound_flags)
|
||||
name_value = None
|
||||
if flags & tmpls.CompressedFlags.NAME_VALUES.value:
|
||||
name_value = reader.read(tmpls.NAMEVALUES_TERMINATED_TEMPLATE)
|
||||
path_curve, profile_curve, path_begin, path_end, path_scale_x, path_scale_y, \
|
||||
path_shear_x, path_shear_y, path_twist, path_twist_begin, path_radius_offset, \
|
||||
path_taper_x, path_taper_y, path_revolutions, path_skew, profile_begin, \
|
||||
profile_end, profile_hollow = reader.read_struct(cls.PRIM_PARAMS_STRUCT)
|
||||
texture_entry = reader.read(tmpls.DATA_PACKER_TE_TEMPLATE)
|
||||
texture_anim = None
|
||||
if flags & tmpls.CompressedFlags.TEXTURE_ANIM.value:
|
||||
texture_anim = reader.read(se.TypedByteArray(se.U32, tmpls.TA_TEMPLATE))
|
||||
psblock_new = None
|
||||
if flags & tmpls.CompressedFlags.PARTICLES_NEW.value:
|
||||
psblock_new = reader.read(tmpls.PSBLOCK_TEMPLATE)
|
||||
|
||||
if len(reader):
|
||||
logging.warning(f"{len(reader)} bytes left at end of buffer for compressed {data!r}")
|
||||
|
||||
return {
|
||||
x: getattr(self, x) for x in dir(self)
|
||||
if not isinstance(getattr(self.__class__, x, None), property) and
|
||||
not callable(getattr(self, x)) and not x.startswith("_")
|
||||
"FullID": full_id,
|
||||
"ID": local_id,
|
||||
"PCode": pcode,
|
||||
"State": state,
|
||||
"CRC": crc,
|
||||
"Material": material,
|
||||
"ClickAction": click_action,
|
||||
"Scale": scale,
|
||||
"Position": pos,
|
||||
"Rotation": rot,
|
||||
"Flags": flags,
|
||||
"OwnerID": owner_id,
|
||||
"AngularVelocity": ang_vel,
|
||||
"ParentID": parent_id,
|
||||
"TreeSpecies": tree_species,
|
||||
"ScratchPad": scratchpad,
|
||||
"Text": text,
|
||||
"TextColor": text_color,
|
||||
"MediaURL": media_url,
|
||||
"PSBlock": psblock,
|
||||
"ExtraParams": extra_params,
|
||||
"Sound": sound,
|
||||
"SoundGain": sound_gain,
|
||||
"SoundFlags": sound_flags,
|
||||
"SoundRadius": sound_radius,
|
||||
"NameValue": name_value,
|
||||
"PathCurve": path_curve,
|
||||
"ProfileCurve": profile_curve,
|
||||
"PathBegin": path_begin, # 0 to 1, quanta = 0.01
|
||||
"PathEnd": path_end, # 0 to 1, quanta = 0.01
|
||||
"PathScaleX": path_scale_x, # 0 to 1, quanta = 0.01
|
||||
"PathScaleY": path_scale_y, # 0 to 1, quanta = 0.01
|
||||
"PathShearX": path_shear_x, # -.5 to .5, quanta = 0.01
|
||||
"PathShearY": path_shear_y, # -.5 to .5, quanta = 0.01
|
||||
"PathTwist": path_twist, # -1 to 1, quanta = 0.01
|
||||
"PathTwistBegin": path_twist_begin, # -1 to 1, quanta = 0.01
|
||||
"PathRadiusOffset": path_radius_offset, # -1 to 1, quanta = 0.01
|
||||
"PathTaperX": path_taper_x, # -1 to 1, quanta = 0.01
|
||||
"PathTaperY": path_taper_y, # -1 to 1, quanta = 0.01
|
||||
"PathRevolutions": path_revolutions, # 0 to 3, quanta = 0.015
|
||||
"PathSkew": path_skew, # -1 to 1, quanta = 0.01
|
||||
"ProfileBegin": profile_begin, # 0 to 1, quanta = 0.01
|
||||
"ProfileEnd": profile_end, # 0 to 1, quanta = 0.01
|
||||
"ProfileHollow": profile_hollow, # 0 to 1, quanta = 0.01
|
||||
"TextureEntry": texture_entry,
|
||||
"TextureAnim": texture_anim,
|
||||
"PSBlockNew": psblock_new,
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import enum
|
||||
import math
|
||||
import struct
|
||||
import types
|
||||
import typing
|
||||
import weakref
|
||||
from io import SEEK_CUR, SEEK_SET, SEEK_END, RawIOBase, BufferedIOBase
|
||||
from typing import *
|
||||
@@ -891,7 +890,23 @@ class TupleCoord(SerializableBase):
|
||||
return cls.COORD_CLS
|
||||
|
||||
|
||||
class QuantizedTupleCoord(TupleCoord):
|
||||
class EncodedTupleCoord(TupleCoord, abc.ABC):
|
||||
_elem_specs: Sequence[SERIALIZABLE_TYPE]
|
||||
|
||||
def serialize(self, vals, writer: BufferWriter, ctx):
|
||||
vals = self._vals_to_tuple(vals)
|
||||
for spec, val in zip(self._elem_specs, vals):
|
||||
writer.write(spec, val, ctx=ctx)
|
||||
|
||||
def deserialize(self, reader: Reader, ctx):
|
||||
vals = (reader.read(spec, ctx=ctx) for spec in self._elem_specs)
|
||||
val = self.COORD_CLS(*vals)
|
||||
if self.need_pod(reader):
|
||||
return tuple(val)
|
||||
return val
|
||||
|
||||
|
||||
class QuantizedTupleCoord(EncodedTupleCoord):
|
||||
def __init__(self, lower=None, upper=None, component_scales=None):
|
||||
super().__init__()
|
||||
if component_scales:
|
||||
@@ -907,17 +922,14 @@ class QuantizedTupleCoord(TupleCoord):
|
||||
)
|
||||
assert len(self._elem_specs) == self.NUM_ELEMS
|
||||
|
||||
def serialize(self, vals, writer: BufferWriter, ctx):
|
||||
vals = self._vals_to_tuple(vals)
|
||||
for spec, val in zip(self._elem_specs, vals):
|
||||
writer.write(spec, val, ctx=ctx)
|
||||
|
||||
def deserialize(self, reader: Reader, ctx):
|
||||
vals = (reader.read(spec, ctx=ctx) for spec in self._elem_specs)
|
||||
val = self.COORD_CLS(*vals)
|
||||
if self.need_pod(reader):
|
||||
return tuple(val)
|
||||
return val
|
||||
class FixedPointTupleCoord(EncodedTupleCoord):
|
||||
def __init__(self, int_bits: int, frac_bits: int, signed: bool):
|
||||
super().__init__()
|
||||
self._elem_specs = tuple(
|
||||
FixedPoint(self.ELEM_SPEC, int_bits, frac_bits, signed)
|
||||
for _ in range(self.NUM_ELEMS)
|
||||
)
|
||||
|
||||
|
||||
class Vector3(TupleCoord):
|
||||
@@ -993,6 +1005,12 @@ class Vector4U8(QuantizedTupleCoord):
|
||||
COORD_CLS = dtypes.Vector4
|
||||
|
||||
|
||||
class FixedPointVector3U16(FixedPointTupleCoord):
|
||||
ELEM_SPEC = U16
|
||||
NUM_ELEMS = 3
|
||||
COORD_CLS = dtypes.Vector3
|
||||
|
||||
|
||||
class OptionalPrefixed(SerializableBase):
|
||||
"""Field prefixed by a U8 indicating whether or not it's present"""
|
||||
OPTIONAL = True
|
||||
@@ -1092,15 +1110,6 @@ class IntEnum(Adapter):
|
||||
return lambda: self.enum_cls(0)
|
||||
|
||||
|
||||
def flags_to_pod(flag_cls: Type[enum.IntFlag], val: int) -> typing.Tuple[Union[str, int], ...]:
|
||||
# Shove any bits not represented in the IntFlag into an int
|
||||
left_over = val
|
||||
for flag in iter(flag_cls):
|
||||
left_over &= ~flag.value
|
||||
extra = (int(left_over),) if left_over else ()
|
||||
return tuple(flag.name for flag in iter(flag_cls) if val & flag.value) + extra
|
||||
|
||||
|
||||
class IntFlag(Adapter):
|
||||
def __init__(self, flag_cls: Type[enum.IntFlag],
|
||||
flag_spec: Optional[SerializablePrimitive] = None):
|
||||
@@ -1121,7 +1130,7 @@ class IntFlag(Adapter):
|
||||
|
||||
def decode(self, val: Any, ctx: Optional[ParseContext], pod: bool = False) -> Any:
|
||||
if pod:
|
||||
return flags_to_pod(self.flag_cls, val)
|
||||
return dtypes.flags_to_pod(self.flag_cls, val)
|
||||
return self.flag_cls(val)
|
||||
|
||||
def default_value(self) -> Any:
|
||||
@@ -1501,6 +1510,9 @@ class DataclassAdapter(Adapter):
|
||||
self._data_cls = data_cls
|
||||
|
||||
def encode(self, val: Any, ctx: Optional[ParseContext]) -> Any:
|
||||
if isinstance(val, lazy_object_proxy.Proxy):
|
||||
# Have to unwrap these or the dataclass check will fail
|
||||
val = val.__wrapped__
|
||||
if dataclasses.is_dataclass(val):
|
||||
val = dataclasses.asdict(val)
|
||||
return val
|
||||
@@ -1613,7 +1625,7 @@ class BufferedLLSDBinaryParser(llsd.HippoLLSDBinaryParser):
|
||||
byte = self._getc()[0]
|
||||
except IndexError:
|
||||
byte = None
|
||||
raise llsd.LLSDParseError("%s at byte %d: %s" % (message, self._index+offset, byte))
|
||||
raise llsd.LLSDParseError("%s at byte %d: %s" % (message, self._index + offset, byte))
|
||||
|
||||
def _getc(self, num=1):
|
||||
return self._buffer.read_bytes(num)
|
||||
@@ -1641,8 +1653,14 @@ def subfield_serializer(msg_name, block_name, var_name):
|
||||
return f
|
||||
|
||||
|
||||
_ENUM_TYPE = TypeVar("_ENUM_TYPE", bound=Type[dtypes.IntEnum])
|
||||
_FLAG_TYPE = TypeVar("_FLAG_TYPE", bound=Type[dtypes.IntFlag])
|
||||
|
||||
|
||||
def enum_field_serializer(msg_name, block_name, var_name):
|
||||
def f(orig_cls):
|
||||
def f(orig_cls: _ENUM_TYPE) -> _ENUM_TYPE:
|
||||
if not issubclass(orig_cls, dtypes.IntEnum):
|
||||
raise ValueError(f"{orig_cls} must be a subclass of Hippolyzer's IntEnum class")
|
||||
wrapper = subfield_serializer(msg_name, block_name, var_name)
|
||||
wrapper(IntEnumSubfieldSerializer(orig_cls))
|
||||
return orig_cls
|
||||
@@ -1650,7 +1668,9 @@ def enum_field_serializer(msg_name, block_name, var_name):
|
||||
|
||||
|
||||
def flag_field_serializer(msg_name, block_name, var_name):
|
||||
def f(orig_cls):
|
||||
def f(orig_cls: _FLAG_TYPE) -> _FLAG_TYPE:
|
||||
if not issubclass(orig_cls, dtypes.IntFlag):
|
||||
raise ValueError(f"{orig_cls!r} must be a subclass of Hippolyzer's IntFlag class")
|
||||
wrapper = subfield_serializer(msg_name, block_name, var_name)
|
||||
wrapper(IntFlagSubfieldSerializer(orig_cls))
|
||||
return orig_cls
|
||||
@@ -1703,7 +1723,7 @@ class BaseSubfieldSerializer(abc.ABC):
|
||||
"""Guess at which template a val might correspond to"""
|
||||
if dataclasses.is_dataclass(val):
|
||||
val = dataclasses.asdict(val) # noqa
|
||||
if isinstance(val, bytes):
|
||||
if isinstance(val, (bytes, bytearray)):
|
||||
template_checker = cls._template_sizes_match
|
||||
elif isinstance(val, dict):
|
||||
template_checker = cls._template_keys_match
|
||||
|
||||
@@ -19,81 +19,48 @@ along with this program; if not, write to the Free Software Foundation,
|
||||
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from typing import *
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class SettingDescriptor(Generic[_T]):
|
||||
__slots__ = ("name", "default")
|
||||
|
||||
def __init__(self, default: Union[Callable[[], _T], _T]):
|
||||
self.default = default
|
||||
self.name: Optional[str] = None
|
||||
|
||||
def __set_name__(self, owner: Settings, name: str):
|
||||
self.name = name
|
||||
|
||||
def _make_default(self) -> _T:
|
||||
if callable(self.default):
|
||||
return self.default()
|
||||
return self.default
|
||||
|
||||
def __get__(self, obj: Settings, owner: Optional[Type] = None) -> _T:
|
||||
val: Union[_T, dataclasses.MISSING] = obj.get_setting(self.name)
|
||||
if val is dataclasses.MISSING:
|
||||
val = self._make_default()
|
||||
return val
|
||||
|
||||
def __set__(self, obj: Settings, value: _T) -> None:
|
||||
obj.set_setting(self.name, value)
|
||||
|
||||
|
||||
class Settings:
|
||||
def __init__(self, quiet_logging=False, spammy_logging=False, log_tests=True):
|
||||
""" some lovely configurable settings
|
||||
ENABLE_DEFERRED_PACKET_PARSING: bool = SettingDescriptor(True)
|
||||
|
||||
These are applied application wide, and can be
|
||||
overridden at any time in a specific instance
|
||||
|
||||
quiet_logging overrides spammy_logging
|
||||
"""
|
||||
def __init__(self):
|
||||
self._settings: Dict[str, Any] = {}
|
||||
|
||||
self.quiet_logging = quiet_logging
|
||||
self.spammy_logging = spammy_logging
|
||||
def get_setting(self, name: str) -> Any:
|
||||
return self._settings.get(name, dataclasses.MISSING)
|
||||
|
||||
# toggle handling udp packets
|
||||
self.HANDLE_PACKETS = True
|
||||
self.HANDLE_OUTGOING_PACKETS = False
|
||||
|
||||
# toggle parsing all/handled packets
|
||||
self.ENABLE_DEFERRED_PACKET_PARSING = True
|
||||
|
||||
# ~~~~~~~~~~~~~~~~~~
|
||||
# Logging behaviors
|
||||
# ~~~~~~~~~~~~~~~~~~
|
||||
# being a test tool, and an immature one at that,
|
||||
# enable fine granularity in the logging, but
|
||||
# make sure we can tone it down as well
|
||||
|
||||
self.LOG_VERBOSE = True
|
||||
self.ENABLE_BYTES_TO_HEX_LOGGING = False
|
||||
self.ENABLE_CAPS_LOGGING = True
|
||||
self.ENABLE_CAPS_LLSD_LOGGING = False
|
||||
self.ENABLE_EQ_LOGGING = True
|
||||
self.ENABLE_UDP_LOGGING = True
|
||||
self.ENABLE_OBJECT_LOGGING = True
|
||||
self.LOG_SKIPPED_PACKETS = True
|
||||
self.ENABLE_HOST_LOGGING = True
|
||||
self.LOG_COROUTINE_SPAWNS = True
|
||||
self.PROXY_LOGGING = False
|
||||
|
||||
# allow disabling logging of certain packets
|
||||
self.DISABLE_SPAMMERS = True
|
||||
self.UDP_SPAMMERS = ['PacketAck', 'AgentUpdate']
|
||||
|
||||
# toggle handling a region's event queue
|
||||
self.ENABLE_REGION_EVENT_QUEUE = True
|
||||
|
||||
# how many seconds to wait between polling
|
||||
# a region's event queue
|
||||
self.REGION_EVENT_QUEUE_POLL_INTERVAL = 1
|
||||
|
||||
if self.spammy_logging:
|
||||
self.ENABLE_BYTES_TO_HEX_LOGGING = True
|
||||
self.ENABLE_CAPS_LLSD_LOGGING = True
|
||||
self.DISABLE_SPAMMERS = False
|
||||
|
||||
# override the defaults
|
||||
if self.quiet_logging:
|
||||
self.LOG_VERBOSE = False
|
||||
self.ENABLE_BYTES_TO_HEX_LOGGING = False
|
||||
self.ENABLE_CAPS_LOGGING = False
|
||||
self.ENABLE_CAPS_LLSD_LOGGING = False
|
||||
self.ENABLE_EQ_LOGGING = False
|
||||
self.ENABLE_UDP_LOGGING = False
|
||||
self.LOG_SKIPPED_PACKETS = False
|
||||
self.ENABLE_OBJECT_LOGGING = False
|
||||
self.ENABLE_HOST_LOGGING = False
|
||||
self.LOG_COROUTINE_SPAWNS = False
|
||||
self.DISABLE_SPAMMERS = True
|
||||
|
||||
# ~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Test related settings
|
||||
# ~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
if log_tests:
|
||||
self.ENABLE_LOGGING_IN_TESTS = True
|
||||
else:
|
||||
self.ENABLE_LOGGING_IN_TESTS = False
|
||||
def set_setting(self, name: str, val: Any):
|
||||
self._settings[name] = val
|
||||
|
||||
1652
hippolyzer/lib/base/templates.py
Normal file
1652
hippolyzer/lib/base/templates.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -8,18 +8,15 @@ import dataclasses
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.templates import (
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.circuit import ConnectionHolder
|
||||
from hippolyzer.lib.base.templates import (
|
||||
TransferRequestParamsBase,
|
||||
TransferChannelType,
|
||||
TransferSourceType,
|
||||
TransferStatus,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
|
||||
_TRANSFER_MESSAGES = {"TransferInfo", "TransferPacket", "TransferAbort"}
|
||||
|
||||
@@ -49,7 +46,7 @@ class Transfer:
|
||||
def cancelled(self) -> bool:
|
||||
return self._future.cancelled()
|
||||
|
||||
def is_our_message(self, message: ProxiedMessage):
|
||||
def is_our_message(self, message: Message):
|
||||
if "TransferData" in message.blocks:
|
||||
transfer_block = message["TransferData"][0]
|
||||
else:
|
||||
@@ -71,8 +68,15 @@ class Transfer:
|
||||
|
||||
|
||||
class TransferManager:
|
||||
def __init__(self, region: ProxiedRegion):
|
||||
self._region: ProxiedRegion = proxify(region)
|
||||
def __init__(
|
||||
self,
|
||||
connection_holder: ConnectionHolder,
|
||||
agent_id: Optional[UUID] = None,
|
||||
session_id: Optional[UUID] = None,
|
||||
):
|
||||
self._connection_holder = connection_holder
|
||||
self._agent_id = agent_id
|
||||
self._session_id = session_id
|
||||
|
||||
def request(
|
||||
self, *,
|
||||
@@ -86,11 +90,11 @@ class TransferManager:
|
||||
params_dict = dataclasses.asdict(params)
|
||||
# Fill in any missing AgentID or SessionID attrs if the params type has them
|
||||
if params_dict.get("AgentID", dataclasses.MISSING) is None:
|
||||
params.AgentID = self._region.session().agent_id
|
||||
params.AgentID = self._agent_id
|
||||
if params_dict.get("SessionID", dataclasses.MISSING) is None:
|
||||
params.SessionID = self._region.session().id
|
||||
params.SessionID = self._session_id
|
||||
|
||||
self._region.circuit.send_message(ProxiedMessage(
|
||||
self._connection_holder.circuit.send_message(Message(
|
||||
'TransferRequest',
|
||||
Block(
|
||||
'TransferInfo',
|
||||
@@ -107,13 +111,13 @@ class TransferManager:
|
||||
|
||||
async def _pump_transfer_replies(self, transfer: Transfer):
|
||||
# Subscribe to message related to our transfer while we're in this block
|
||||
with self._region.message_handler.subscribe_async(
|
||||
_TRANSFER_MESSAGES,
|
||||
predicate=transfer.is_our_message
|
||||
with self._connection_holder.message_handler.subscribe_async(
|
||||
_TRANSFER_MESSAGES,
|
||||
predicate=transfer.is_our_message,
|
||||
) as get_msg:
|
||||
while not transfer.done():
|
||||
try:
|
||||
msg: ProxiedMessage = await asyncio.wait_for(get_msg(), 5.0)
|
||||
msg: Message = await asyncio.wait_for(get_msg(), 5.0)
|
||||
except TimeoutError as e:
|
||||
transfer.set_exception(e)
|
||||
return
|
||||
@@ -128,18 +132,18 @@ class TransferManager:
|
||||
elif msg.name == "TransferAbort":
|
||||
transfer.error_code = msg["TransferID"][0].deserialize_var("Result")
|
||||
transfer.set_exception(
|
||||
ConnectionAbortedError(f"Unknown failure")
|
||||
ConnectionAbortedError("Unknown failure")
|
||||
)
|
||||
|
||||
def _handle_transfer_packet(self, msg: ProxiedMessage, transfer: Transfer):
|
||||
def _handle_transfer_packet(self, msg: Message, transfer: Transfer):
|
||||
transfer_block = msg["TransferData"][0]
|
||||
packet_id: int = transfer_block["Packet"]
|
||||
packet_data = transfer_block["Data"]
|
||||
transfer.chunks[packet_id] = packet_data
|
||||
if transfer_block["Status"] == TransferStatus.DONE:
|
||||
if transfer_block["Status"] == TransferStatus.DONE and not transfer.done():
|
||||
transfer.mark_done()
|
||||
|
||||
def _handle_transfer_info(self, msg: ProxiedMessage, transfer: Transfer):
|
||||
def _handle_transfer_info(self, msg: Message, transfer: Transfer):
|
||||
transfer_block = msg["TransferInfo"][0]
|
||||
transfer.expected_size = transfer_block["Size"]
|
||||
# Don't re-set if we get a resend of packet 0
|
||||
@@ -1,11 +1,10 @@
|
||||
import dataclasses
|
||||
from typing import *
|
||||
|
||||
import pkg_resources
|
||||
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.proxy.templates import AssetType
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
from hippolyzer.lib.base.templates import AssetType
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
@@ -21,7 +20,7 @@ class VFSBlock:
|
||||
class VFS:
|
||||
def __init__(self, index_path):
|
||||
self._data_fh = None
|
||||
self.blocks = []
|
||||
self.blocks: List[VFSBlock] = []
|
||||
self._uuid_lookup: Dict[UUID, VFSBlock] = {}
|
||||
|
||||
assert "index.db2" in index_path
|
||||
@@ -45,10 +44,10 @@ class VFS:
|
||||
self.blocks.append(block)
|
||||
self._uuid_lookup[block.file_id] = block
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> Iterator[VFSBlock]:
|
||||
return iter(self.blocks)
|
||||
|
||||
def __getitem__(self, item: UUID):
|
||||
def __getitem__(self, item: UUID) -> VFSBlock:
|
||||
return self._uuid_lookup[item]
|
||||
|
||||
def __contains__(self, item: UUID):
|
||||
@@ -59,10 +58,10 @@ class VFS:
|
||||
self._data_fh.close()
|
||||
self._data_fh = None
|
||||
|
||||
def read_block(self, block: VFSBlock):
|
||||
def read_block(self, block: VFSBlock) -> bytes:
|
||||
self._data_fh.seek(block.location)
|
||||
return self._data_fh.read(block.size)
|
||||
|
||||
|
||||
_static_path = pkg_resources.resource_filename("hippolyzer.lib.proxy", "data/static_index.db2")
|
||||
_static_path = get_resource_filename("lib/base/data/static_index.db2")
|
||||
STATIC_VFS = VFS(_static_path)
|
||||
148
hippolyzer/lib/base/wearables.py
Normal file
148
hippolyzer/lib/base/wearables.py
Normal file
@@ -0,0 +1,148 @@
|
||||
"""
|
||||
Body parts and linden clothing layers
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import logging
|
||||
from io import StringIO
|
||||
from typing import *
|
||||
|
||||
from xml.etree.ElementTree import parse as parse_etree
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
from hippolyzer.lib.base.legacy_inv import InventorySaleInfo, InventoryPermissions
|
||||
from hippolyzer.lib.base.legacy_schema import SchemaBase, parse_schema_line, SchemaParsingError
|
||||
from hippolyzer.lib.base.templates import WearableType
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
_T = TypeVar("_T")
|
||||
|
||||
WEARABLE_VERSION = "LLWearable version 22"
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class VisualParam:
|
||||
id: int
|
||||
name: str
|
||||
value_min: float
|
||||
value_max: float
|
||||
# These might be `None` if the param isn't meant to be directly edited
|
||||
edit_group: Optional[str]
|
||||
wearable: Optional[str]
|
||||
|
||||
|
||||
class VisualParams(List[VisualParam]):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
lad_path = get_resource_filename("lib/base/data/avatar_lad.xml")
|
||||
with open(lad_path, "rb") as f:
|
||||
doc = parse_etree(f)
|
||||
for param in doc.findall(".//param"):
|
||||
self.append(VisualParam(
|
||||
id=int(param.attrib["id"]),
|
||||
name=param.attrib["name"],
|
||||
edit_group=param.get("edit_group"),
|
||||
wearable=param.get("wearable"),
|
||||
value_min=float(param.attrib["value_min"]),
|
||||
value_max=float(param.attrib["value_max"]),
|
||||
))
|
||||
|
||||
def by_name(self, name: str) -> VisualParam:
|
||||
return [x for x in self if x.name == name][0]
|
||||
|
||||
def by_edit_group(self, edit_group: str) -> List[VisualParam]:
|
||||
return [x for x in self if x.edit_group == edit_group]
|
||||
|
||||
def by_wearable(self, wearable: str) -> List[VisualParam]:
|
||||
return [x for x in self if x.wearable == wearable]
|
||||
|
||||
|
||||
VISUAL_PARAMS = VisualParams()
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Wearable(SchemaBase):
|
||||
name: str
|
||||
wearable_type: WearableType
|
||||
permissions: InventoryPermissions
|
||||
sale_info: InventorySaleInfo
|
||||
# VisualParam ID -> val
|
||||
parameters: Dict[int, float]
|
||||
# TextureEntry ID -> texture ID
|
||||
textures: Dict[int, UUID]
|
||||
|
||||
@classmethod
|
||||
def _skip_to_next_populated_line(cls, reader: StringIO):
|
||||
old_pos = reader.tell()
|
||||
while peeked_data := reader.readline():
|
||||
# Read until we find a non-blank line
|
||||
if peeked_data.lstrip("\n"):
|
||||
break
|
||||
old_pos = reader.tell()
|
||||
# Reading an empty string means EOF
|
||||
if not peeked_data:
|
||||
raise SchemaParsingError("Premature EOF")
|
||||
reader.seek(old_pos)
|
||||
|
||||
@classmethod
|
||||
def _read_and_parse_line(cls, reader: StringIO):
|
||||
cls._skip_to_next_populated_line(reader)
|
||||
return parse_schema_line(reader.readline())
|
||||
|
||||
@classmethod
|
||||
def _read_expected_key(cls, reader: StringIO, expected_key: str) -> str:
|
||||
key, val = cls._read_and_parse_line(reader)
|
||||
if key != expected_key:
|
||||
raise ValueError(f"Expected {expected_key} not found, {(key, val)!r}")
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def from_reader(cls, reader: StringIO) -> Wearable:
|
||||
cls._skip_to_next_populated_line(reader)
|
||||
version_str = reader.readline().rstrip()
|
||||
if version_str != WEARABLE_VERSION:
|
||||
raise ValueError(f"Bad wearable version {version_str!r}")
|
||||
cls._skip_to_next_populated_line(reader)
|
||||
name = reader.readline().rstrip()
|
||||
|
||||
permissions = InventoryPermissions.from_reader(reader, read_header=True)
|
||||
sale_info = InventorySaleInfo.from_reader(reader, read_header=True)
|
||||
|
||||
wearable_type = WearableType(int(cls._read_expected_key(reader, "type")))
|
||||
num_params = int(cls._read_expected_key(reader, "parameters"))
|
||||
params = {}
|
||||
for _ in range(num_params):
|
||||
param_id, param_val = cls._read_and_parse_line(reader)
|
||||
if param_val == ".":
|
||||
param_val = "0.0"
|
||||
params[int(param_id)] = float(param_val)
|
||||
|
||||
num_textures = int(cls._read_expected_key(reader, "textures"))
|
||||
textures = {}
|
||||
for _ in range(num_textures):
|
||||
te_id, texture_id = cls._read_and_parse_line(reader)
|
||||
textures[int(te_id)] = UUID(texture_id)
|
||||
return Wearable(
|
||||
name=name,
|
||||
wearable_type=wearable_type,
|
||||
permissions=permissions,
|
||||
sale_info=sale_info,
|
||||
parameters=params,
|
||||
textures=textures
|
||||
)
|
||||
|
||||
def to_writer(self, writer: StringIO):
|
||||
writer.write(f"{WEARABLE_VERSION}\n")
|
||||
writer.write(f"{self.name}\n\n")
|
||||
self.permissions.to_writer(writer)
|
||||
self.sale_info.to_writer(writer)
|
||||
writer.write(f"type {int(self.wearable_type)}\n")
|
||||
writer.write(f"parameters {len(self.parameters)}\n")
|
||||
for param_id, param_val in self.parameters.items():
|
||||
writer.write(f"{param_id} {param_val}\n")
|
||||
writer.write(f"textures {len(self.textures)}\n")
|
||||
for te_id, texture_id in self.textures.items():
|
||||
writer.write(f"{te_id} {texture_id}\n")
|
||||
286
hippolyzer/lib/base/xfer_manager.py
Normal file
286
hippolyzer/lib/base/xfer_manager.py
Normal file
@@ -0,0 +1,286 @@
|
||||
"""
|
||||
Managers for inbound and outbound xfer as well as the AssetUploadRequest flow
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import enum
|
||||
import random
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, RawBytes
|
||||
from hippolyzer.lib.base.message.data_packer import TemplateDataPacker
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.msgtypes import MsgType
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
from hippolyzer.lib.base.message.circuit import ConnectionHolder
|
||||
from hippolyzer.lib.base.templates import XferPacket, XferFilePath, AssetType, XferError
|
||||
|
||||
_XFER_MESSAGES = {"AbortXfer", "ConfirmXferPacket", "RequestXfer", "SendXferPacket"}
|
||||
|
||||
|
||||
MAX_CHUNK_SIZE = 1150
|
||||
ACK_AHEAD_MAX = 10
|
||||
|
||||
|
||||
class Xfer:
|
||||
def __init__(
|
||||
self,
|
||||
xfer_id: Optional[int] = None,
|
||||
direction: Direction = Direction.OUT,
|
||||
data: Optional[bytes] = None,
|
||||
turbo: bool = False,
|
||||
):
|
||||
self.xfer_id: Optional[int] = xfer_id
|
||||
self.chunks: Dict[int, bytes] = {}
|
||||
self.expected_size: Optional[int] = None
|
||||
self.size_known = asyncio.Future()
|
||||
self.error_code: Union[int, XferError] = 0
|
||||
self.next_ackable = 0
|
||||
self.turbo = turbo
|
||||
self.direction: Direction = direction
|
||||
self.expected_chunks: Optional[int] = None
|
||||
self._future: asyncio.Future[Xfer] = asyncio.Future()
|
||||
|
||||
if data is not None:
|
||||
# Prepend the expected length field to the first chunk
|
||||
if not isinstance(data, RawBytes):
|
||||
data = TemplateDataPacker.pack(len(data), MsgType.MVT_S32) + data
|
||||
chunk_num = 0
|
||||
while data:
|
||||
self.chunks[chunk_num] = data[:MAX_CHUNK_SIZE]
|
||||
data = data[MAX_CHUNK_SIZE:]
|
||||
chunk_num += 1
|
||||
|
||||
def reassemble_chunks(self) -> bytes:
|
||||
assembled = bytearray()
|
||||
for _, data in sorted(self.chunks.items()):
|
||||
assembled.extend(data)
|
||||
return assembled
|
||||
|
||||
def mark_done(self):
|
||||
self._future.set_result(self)
|
||||
|
||||
def done(self) -> bool:
|
||||
return self._future.done()
|
||||
|
||||
def cancelled(self) -> bool:
|
||||
return self._future.cancelled()
|
||||
|
||||
def is_our_message(self, message):
|
||||
return message["XferID"]["ID"] == self.xfer_id
|
||||
|
||||
def cancel(self) -> bool:
|
||||
if not self.size_known.done():
|
||||
self.size_known.cancel()
|
||||
return self._future.cancel()
|
||||
|
||||
def set_exception(self, exc: Union[type, BaseException]) -> None:
|
||||
if not self.size_known.done():
|
||||
self.size_known.set_exception(exc)
|
||||
return self._future.set_exception(exc)
|
||||
|
||||
def __await__(self) -> Generator[Any, None, Xfer]:
|
||||
return self._future.__await__()
|
||||
|
||||
|
||||
class UploadStrategy(enum.IntEnum):
|
||||
XFER = enum.auto()
|
||||
ASSET_UPLOAD_REQUEST = enum.auto()
|
||||
|
||||
|
||||
class XferManager:
|
||||
def __init__(
|
||||
self,
|
||||
connection_holder: ConnectionHolder,
|
||||
secure_session_id: Optional[UUID] = None,
|
||||
):
|
||||
self._connection_holder = connection_holder
|
||||
self._secure_session_id = secure_session_id
|
||||
|
||||
def request(
|
||||
self, xfer_id: Optional[int] = None,
|
||||
file_name: Union[bytes, str, None] = None,
|
||||
file_path: Optional[Union[XferFilePath, int]] = None,
|
||||
vfile_id: Optional[UUID] = None,
|
||||
vfile_type: Optional[Union[AssetType, int]] = None,
|
||||
use_big_packets: bool = False,
|
||||
delete_on_completion: bool = True,
|
||||
turbo: bool = False,
|
||||
direction: Direction = Direction.OUT,
|
||||
) -> Xfer:
|
||||
xfer_id = xfer_id if xfer_id is not None else random.getrandbits(64)
|
||||
self._connection_holder.circuit.send_message(Message(
|
||||
'RequestXfer',
|
||||
Block(
|
||||
'XferID',
|
||||
ID=xfer_id,
|
||||
Filename=file_name or b'',
|
||||
FilePath=file_path or XferFilePath.NONE,
|
||||
DeleteOnCompletion=delete_on_completion,
|
||||
UseBigPackets=use_big_packets,
|
||||
VFileID=vfile_id or UUID(),
|
||||
VFileType=vfile_type or AssetType.NONE,
|
||||
),
|
||||
direction=direction,
|
||||
))
|
||||
xfer = Xfer(xfer_id, direction=direction, turbo=turbo)
|
||||
asyncio.create_task(self._pump_xfer_replies(xfer))
|
||||
return xfer
|
||||
|
||||
async def _pump_xfer_replies(self, xfer: Xfer):
|
||||
with self._connection_holder.message_handler.subscribe_async(
|
||||
_XFER_MESSAGES,
|
||||
predicate=xfer.is_our_message,
|
||||
) as get_msg:
|
||||
while not xfer.done():
|
||||
try:
|
||||
msg: Message = await asyncio.wait_for(get_msg(), 5.0)
|
||||
except asyncio.exceptions.TimeoutError as e:
|
||||
xfer.set_exception(e)
|
||||
return
|
||||
|
||||
if xfer.cancelled():
|
||||
# AbortXfer doesn't seem to work on in-progress Xfers.
|
||||
# Just let any new packets drop on the floor.
|
||||
return
|
||||
|
||||
if msg.name == "SendXferPacket":
|
||||
self._handle_send_xfer_packet(msg, xfer)
|
||||
elif msg.name == "AbortXfer":
|
||||
xfer.error_code = msg["XferID"][0].deserialize_var("Result")
|
||||
xfer.set_exception(
|
||||
ConnectionAbortedError(f"Xfer failed with {xfer.error_code!r}")
|
||||
)
|
||||
|
||||
def _handle_send_xfer_packet(self, msg: Message, xfer: Xfer):
|
||||
# Received a SendXfer for an Xfer we sent ourselves
|
||||
packet_id: XferPacket = msg["XferID"][0].deserialize_var("Packet")
|
||||
packet_data = msg["DataPacket"]["Data"]
|
||||
# First 4 bytes are expected total data length
|
||||
if packet_id.PacketID == 0:
|
||||
# Yes, S32. Only used as a hint so buffers can be pre-allocated,
|
||||
# EOF bit determines when the data actually ends.
|
||||
xfer.expected_size = TemplateDataPacker.unpack(packet_data[:4], MsgType.MVT_S32)
|
||||
# Don't re-set if we get a resend of packet 0
|
||||
if not xfer.size_known.done():
|
||||
xfer.size_known.set_result(xfer.expected_size)
|
||||
packet_data = packet_data[4:]
|
||||
|
||||
to_ack = (packet_id.PacketID,)
|
||||
if xfer.turbo:
|
||||
# ACK the next few packets we expect to be sent, if we haven't already
|
||||
ack_max = packet_id.PacketID + ACK_AHEAD_MAX
|
||||
to_ack = range(xfer.next_ackable, ack_max)
|
||||
xfer.next_ackable = ack_max
|
||||
for ack_id in to_ack:
|
||||
self._connection_holder.circuit.send_message(Message(
|
||||
"ConfirmXferPacket",
|
||||
Block("XferID", ID=xfer.xfer_id, Packet=ack_id),
|
||||
direction=xfer.direction,
|
||||
))
|
||||
|
||||
xfer.chunks[packet_id.PacketID] = packet_data
|
||||
# We may be waiting on other packets so we can't end immediately.
|
||||
if packet_id.IsEOF:
|
||||
xfer.expected_chunks = packet_id.PacketID + 1
|
||||
if not xfer.done() and len(xfer.chunks) == xfer.expected_chunks:
|
||||
xfer.mark_done()
|
||||
|
||||
def upload_asset(
|
||||
self,
|
||||
asset_type: AssetType,
|
||||
data: Union[bytes, str],
|
||||
store_local: bool = False,
|
||||
temp_file: bool = False,
|
||||
transaction_id: Optional[UUID] = None,
|
||||
upload_strategy: Optional[UploadStrategy] = None,
|
||||
) -> asyncio.Future[UUID]:
|
||||
"""Upload an asset through the Xfer upload path"""
|
||||
if not transaction_id:
|
||||
transaction_id = UUID.random()
|
||||
if isinstance(data, str):
|
||||
data = data.encode("utf8")
|
||||
|
||||
# Small amounts of data can be sent inline, decide based on size
|
||||
if upload_strategy is None:
|
||||
if len(data) >= MAX_CHUNK_SIZE:
|
||||
upload_strategy = UploadStrategy.XFER
|
||||
else:
|
||||
upload_strategy = UploadStrategy.ASSET_UPLOAD_REQUEST
|
||||
|
||||
xfer = None
|
||||
inline_data = b''
|
||||
if upload_strategy == UploadStrategy.XFER:
|
||||
xfer = Xfer(data=data)
|
||||
else:
|
||||
inline_data = data
|
||||
|
||||
self._connection_holder.circuit.send_message(Message(
|
||||
"AssetUploadRequest",
|
||||
Block(
|
||||
"AssetBlock",
|
||||
TransactionID=transaction_id,
|
||||
Type=asset_type,
|
||||
Tempfile=temp_file,
|
||||
StoreLocal=store_local,
|
||||
AssetData=inline_data,
|
||||
)
|
||||
))
|
||||
fut = asyncio.Future()
|
||||
asyncio.create_task(self._pump_asset_upload(xfer, transaction_id, fut))
|
||||
return fut
|
||||
|
||||
async def _pump_asset_upload(self, xfer: Optional[Xfer], transaction_id: UUID, fut: asyncio.Future):
|
||||
message_handler = self._connection_holder.message_handler
|
||||
# We'll receive an Xfer request for the asset we're uploading.
|
||||
# asset ID is determined by hashing secure session ID with chosen transaction ID.
|
||||
asset_id: UUID = UUID.combine(transaction_id, self._secure_session_id)
|
||||
try:
|
||||
# Only need to do this if we're using the xfer upload strategy, otherwise all the
|
||||
# data was already sent in the AssetUploadRequest and we don't expect a RequestXfer.
|
||||
def request_predicate(request_msg: Message):
|
||||
return request_msg["XferID"]["VFileID"] == asset_id
|
||||
if xfer is not None:
|
||||
await self.serve_inbound_xfer_request(xfer, request_predicate)
|
||||
|
||||
def complete_predicate(complete_msg: Message):
|
||||
return complete_msg["AssetBlock"]["UUID"] == asset_id
|
||||
msg = await message_handler.wait_for(('AssetUploadComplete',), predicate=complete_predicate)
|
||||
if msg["AssetBlock"]["Success"] == 1:
|
||||
fut.set_result(asset_id)
|
||||
else:
|
||||
fut.set_exception(RuntimeError(f"Xfer for transaction {transaction_id} failed"))
|
||||
|
||||
except asyncio.TimeoutError as e:
|
||||
fut.set_exception(e)
|
||||
|
||||
async def serve_inbound_xfer_request(
|
||||
self,
|
||||
xfer: Xfer,
|
||||
request_predicate: Callable[[Message], bool],
|
||||
wait_for_confirm: bool = True
|
||||
):
|
||||
message_handler = self._connection_holder.message_handler
|
||||
request_msg = await message_handler.wait_for(
|
||||
('RequestXfer',), predicate=request_predicate, timeout=5.0)
|
||||
xfer.xfer_id = request_msg["XferID"]["ID"]
|
||||
|
||||
packet_id = 0
|
||||
# TODO: No resend yet. If it's lost, it's lost.
|
||||
while xfer.chunks:
|
||||
chunk = xfer.chunks.pop(packet_id)
|
||||
# EOF if there are no chunks left
|
||||
packet_val = XferPacket(PacketID=packet_id, IsEOF=not bool(xfer.chunks))
|
||||
self._connection_holder.circuit.send_message(Message(
|
||||
"SendXferPacket",
|
||||
Block("XferID", ID=xfer.xfer_id, Packet_=packet_val),
|
||||
Block("DataPacket", Data=chunk),
|
||||
# Send this towards the sender of the RequestXfer
|
||||
direction=~request_msg.direction,
|
||||
))
|
||||
# Don't care about the value, just want to know it was confirmed.
|
||||
if wait_for_confirm:
|
||||
await message_handler.wait_for(
|
||||
("ConfirmXferPacket",), predicate=xfer.is_our_message, timeout=5.0)
|
||||
packet_id += 1
|
||||
82
hippolyzer/lib/client/namecache.py
Normal file
82
hippolyzer/lib/client/namecache.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import dataclasses
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class NameCacheEntry:
|
||||
full_id: UUID
|
||||
first_name: Optional[str] = None
|
||||
last_name: Optional[str] = None
|
||||
display_name: Optional[str] = None
|
||||
|
||||
def __str__(self):
|
||||
if self.display_name:
|
||||
return f"{self.display_name} ({self.legacy_name})"
|
||||
if self.legacy_name:
|
||||
return self.legacy_name
|
||||
return f"(???) ({self.full_id})"
|
||||
|
||||
@property
|
||||
def legacy_name(self) -> Optional[str]:
|
||||
if self.first_name is None:
|
||||
return None
|
||||
return f"{self.first_name} {self.last_name}"
|
||||
|
||||
@property
|
||||
def preferred_name(self) -> Optional[str]:
|
||||
if self.display_name:
|
||||
return self.display_name
|
||||
return self.legacy_name
|
||||
|
||||
|
||||
class NameCache:
|
||||
def __init__(self):
|
||||
self._cache: Dict[UUID, NameCacheEntry] = {}
|
||||
|
||||
def create_subscriptions(
|
||||
self,
|
||||
message_handler: MessageHandler[Message, str],
|
||||
):
|
||||
message_handler.subscribe("UUIDNameReply", self._handle_uuid_name_reply)
|
||||
|
||||
def lookup(self, uuid: UUID, create_if_none: bool = False) -> Optional[NameCacheEntry]:
|
||||
val = self._cache.get(uuid)
|
||||
if create_if_none and val is None:
|
||||
val = NameCacheEntry(full_id=uuid)
|
||||
self._cache[uuid] = val
|
||||
return val
|
||||
|
||||
def update(self, full_id: UUID, vals: dict):
|
||||
# upsert the cache entry
|
||||
entry = self._cache.get(full_id) or NameCacheEntry(full_id=full_id)
|
||||
if "FirstName" in vals:
|
||||
entry.first_name = vals["FirstName"]
|
||||
if "LastName" in vals:
|
||||
entry.last_name = vals["LastName"]
|
||||
if "DisplayName" in vals:
|
||||
entry.display_name = vals["DisplayName"] if vals["DisplayName"] else None
|
||||
self._cache[full_id] = entry
|
||||
|
||||
def _handle_uuid_name_reply(self, msg: Message):
|
||||
for block in msg.blocks["UUIDNameBlock"]:
|
||||
self.update(block["ID"], {
|
||||
"FirstName": block["FirstName"],
|
||||
"LastName": block["LastName"],
|
||||
})
|
||||
|
||||
def _process_display_names_response(self, parsed: dict):
|
||||
"""Handle the response from the GetDisplayNames cap"""
|
||||
for agent in parsed["agents"]:
|
||||
# Don't set display name if they just have the default
|
||||
display_name = None
|
||||
if not agent["is_display_name_default"]:
|
||||
display_name = agent["display_name"]
|
||||
self.update(agent["id"], {
|
||||
"FirstName": agent["legacy_first_name"],
|
||||
"LastName": agent["legacy_last_name"],
|
||||
"DisplayName": display_name,
|
||||
})
|
||||
881
hippolyzer/lib/client/object_manager.py
Normal file
881
hippolyzer/lib/client/object_manager.py
Normal file
@@ -0,0 +1,881 @@
|
||||
"""
|
||||
Manager for a client's view of objects in the region and world.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import collections
|
||||
import enum
|
||||
import itertools
|
||||
import logging
|
||||
import math
|
||||
import weakref
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, Vector3
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.objects import (
|
||||
normalize_object_update,
|
||||
normalize_terse_object_update,
|
||||
normalize_object_update_compressed_data,
|
||||
normalize_object_update_compressed,
|
||||
Object, handle_to_global_pos,
|
||||
)
|
||||
from hippolyzer.lib.base.settings import Settings
|
||||
from hippolyzer.lib.client.namecache import NameCache, NameCacheEntry
|
||||
from hippolyzer.lib.client.state import BaseClientSession, BaseClientRegion
|
||||
from hippolyzer.lib.base.templates import PCode, ObjectStateSerializer
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
OBJECT_OR_LOCAL = Union[Object, int]
|
||||
|
||||
|
||||
class UpdateType(enum.IntEnum):
|
||||
OBJECT_UPDATE = enum.auto()
|
||||
PROPERTIES = enum.auto()
|
||||
FAMILY = enum.auto()
|
||||
COSTS = enum.auto()
|
||||
KILL = enum.auto()
|
||||
|
||||
|
||||
class ClientObjectManager:
|
||||
"""
|
||||
Object manager for a specific region
|
||||
"""
|
||||
|
||||
__slots__ = ("_region", "_world_objects", "state")
|
||||
|
||||
def __init__(self, region: BaseClientRegion):
|
||||
self._region: BaseClientRegion = proxify(region)
|
||||
self._world_objects: ClientWorldObjectManager = proxify(region.session().objects)
|
||||
self.state: RegionObjectsState = RegionObjectsState()
|
||||
|
||||
def __len__(self):
|
||||
return len(self.state.localid_lookup)
|
||||
|
||||
@property
|
||||
def all_objects(self) -> Iterable[Object]:
|
||||
return self.state.localid_lookup.values()
|
||||
|
||||
@property
|
||||
def missing_locals(self) -> Set[int]:
|
||||
return self.state.missing_locals
|
||||
|
||||
def clear(self):
|
||||
self.state.clear()
|
||||
if self._region.handle is not None:
|
||||
# We're tracked by the world object manager, tell it to untrack
|
||||
# any objects that we owned
|
||||
self._world_objects.clear_region_objects(self._region.handle)
|
||||
|
||||
def lookup_localid(self, localid: int) -> Optional[Object]:
|
||||
return self.state.lookup_localid(localid)
|
||||
|
||||
def lookup_fullid(self, fullid: UUID) -> Optional[Object]:
|
||||
obj = self._world_objects.lookup_fullid(fullid)
|
||||
if obj is None or obj.RegionHandle != self._region.handle:
|
||||
return None
|
||||
return obj
|
||||
|
||||
@property
|
||||
def all_avatars(self) -> Iterable[Avatar]:
|
||||
return tuple(a for a in self._world_objects.all_avatars
|
||||
if a.RegionHandle == self._region.handle)
|
||||
|
||||
def lookup_avatar(self, fullid: UUID) -> Optional[Avatar]:
|
||||
for avatar in self.all_avatars:
|
||||
if avatar.FullID == fullid:
|
||||
return avatar
|
||||
return None
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def _is_localid_selected(self, local_id: int):
|
||||
return False
|
||||
|
||||
def request_object_properties(self, objects: Union[OBJECT_OR_LOCAL, Sequence[OBJECT_OR_LOCAL]]) \
|
||||
-> List[asyncio.Future[Object]]:
|
||||
if isinstance(objects, (Object, int)):
|
||||
objects = (objects,)
|
||||
if not objects:
|
||||
return []
|
||||
|
||||
local_ids = tuple((o.LocalID if isinstance(o, Object) else o) for o in objects)
|
||||
|
||||
# Don't mess with already selected objects
|
||||
unselected_ids = tuple(local for local in local_ids if not self._is_localid_selected(local))
|
||||
ids_to_req = unselected_ids
|
||||
|
||||
session = self._region.session()
|
||||
while ids_to_req:
|
||||
blocks = [
|
||||
Block("AgentData", AgentID=session.agent_id, SessionID=session.id),
|
||||
*[Block("ObjectData", ObjectLocalID=x) for x in ids_to_req[:255]],
|
||||
]
|
||||
# Selecting causes ObjectProperties to be sent
|
||||
self._region.circuit.send_message(Message("ObjectSelect", blocks))
|
||||
self._region.circuit.send_message(Message("ObjectDeselect", blocks))
|
||||
ids_to_req = ids_to_req[255:]
|
||||
|
||||
futures = []
|
||||
for local_id in local_ids:
|
||||
if local_id in unselected_ids:
|
||||
# Need to wait until we get our reply
|
||||
fut = self.state.register_future(local_id, UpdateType.PROPERTIES)
|
||||
else:
|
||||
# This was selected so we should already have up to date info
|
||||
fut = asyncio.Future()
|
||||
fut.set_result(self.lookup_localid(local_id))
|
||||
futures.append(fut)
|
||||
return futures
|
||||
|
||||
def request_missing_objects(self) -> List[asyncio.Future[Object]]:
|
||||
return self.request_objects(self.state.missing_locals)
|
||||
|
||||
def request_objects(self, local_ids: Union[int, Iterable[int]]) -> List[asyncio.Future[Object]]:
|
||||
"""
|
||||
Request object local IDs, returning a list of awaitable handles for the objects
|
||||
|
||||
Some may never be resolved, so use `asyncio.wait()` or `asyncio.wait_for()`.
|
||||
"""
|
||||
if isinstance(local_ids, int):
|
||||
local_ids = (local_ids,)
|
||||
elif isinstance(local_ids, set):
|
||||
local_ids = tuple(local_ids)
|
||||
|
||||
session = self._region.session()
|
||||
|
||||
ids_to_req = local_ids
|
||||
while ids_to_req:
|
||||
self._region.circuit.send_message(Message(
|
||||
"RequestMultipleObjects",
|
||||
Block("AgentData", AgentID=session.agent_id, SessionID=session.id),
|
||||
*[Block("ObjectData", CacheMissType=0, ID=x) for x in ids_to_req[:255]],
|
||||
))
|
||||
ids_to_req = ids_to_req[255:]
|
||||
|
||||
futures = []
|
||||
for local_id in local_ids:
|
||||
futures.append(self.state.register_future(local_id, UpdateType.OBJECT_UPDATE))
|
||||
return futures
|
||||
|
||||
|
||||
class ObjectEvent:
|
||||
__slots__ = ("object", "updated", "update_type")
|
||||
|
||||
object: Object
|
||||
updated: Set[str]
|
||||
update_type: UpdateType
|
||||
|
||||
def __init__(self, obj: Object, updated: Set[str], update_type: UpdateType):
|
||||
self.object = obj
|
||||
self.updated = updated
|
||||
self.update_type = update_type
|
||||
|
||||
@property
|
||||
def name(self) -> UpdateType:
|
||||
return self.update_type
|
||||
|
||||
|
||||
class ClientWorldObjectManager:
|
||||
"""Manages Objects for a session's whole world"""
|
||||
def __init__(self, session: BaseClientSession, settings: Settings, name_cache: Optional[NameCache]):
|
||||
self._session: BaseClientSession = session
|
||||
self._settings = settings
|
||||
self.name_cache = name_cache or NameCache()
|
||||
self.events: MessageHandler[ObjectEvent, UpdateType] = MessageHandler(take_by_default=False)
|
||||
self._fullid_lookup: Dict[UUID, Object] = {}
|
||||
self._avatars: Dict[UUID, Avatar] = {}
|
||||
self._avatar_objects: Dict[UUID, Object] = {}
|
||||
self._region_managers: Dict[int, ClientObjectManager] = {}
|
||||
message_handler = self._session.message_handler
|
||||
message_handler.subscribe("ObjectUpdate", self._handle_object_update)
|
||||
message_handler.subscribe("ImprovedTerseObjectUpdate",
|
||||
self._handle_terse_object_update)
|
||||
message_handler.subscribe("ObjectUpdateCompressed",
|
||||
self._handle_object_update_compressed)
|
||||
message_handler.subscribe("ObjectUpdateCached",
|
||||
self._handle_object_update_cached)
|
||||
message_handler.subscribe("CoarseLocationUpdate",
|
||||
self._handle_coarse_location_update)
|
||||
message_handler.subscribe("KillObject",
|
||||
self._handle_kill_object)
|
||||
message_handler.subscribe("ObjectProperties",
|
||||
self._handle_object_properties_generic)
|
||||
message_handler.subscribe("ObjectPropertiesFamily",
|
||||
self._handle_object_properties_generic)
|
||||
|
||||
def lookup_fullid(self, full_id: UUID) -> Optional[Object]:
|
||||
return self._fullid_lookup.get(full_id, None)
|
||||
|
||||
@property
|
||||
def all_objects(self) -> Iterable[Object]:
|
||||
return self._fullid_lookup.values()
|
||||
|
||||
def lookup_avatar(self, full_id: UUID) -> Optional[Avatar]:
|
||||
return {a.FullID: a for a in self.all_avatars}.get(full_id, None)
|
||||
|
||||
@property
|
||||
def all_avatars(self) -> Iterable[Avatar]:
|
||||
return tuple(self._avatars.values())
|
||||
|
||||
def __len__(self):
|
||||
return len(self._fullid_lookup)
|
||||
|
||||
def _get_region_state(self, handle: int) -> Optional[RegionObjectsState]:
|
||||
val = self._get_region_manager(handle)
|
||||
if val is None:
|
||||
return None
|
||||
return val.state
|
||||
|
||||
def track_region_objects(self, handle: int):
|
||||
"""Start tracking objects for a region"""
|
||||
if self._get_region_manager(handle) is None:
|
||||
self._region_managers[handle] = proxify(self._session.region_by_handle(handle).objects)
|
||||
|
||||
def clear_region_objects(self, handle: int):
|
||||
"""Handle signal that a region object manager was just cleared"""
|
||||
# Make sure they're gone from our lookup table
|
||||
for obj in tuple(self._fullid_lookup.values()):
|
||||
if obj.RegionHandle == handle:
|
||||
del self._fullid_lookup[obj.FullID]
|
||||
self._rebuild_avatar_objects()
|
||||
|
||||
def _get_region_manager(self, handle: int) -> Optional[ClientObjectManager]:
|
||||
return self._region_managers.get(handle)
|
||||
|
||||
def request_missing_objects(self) -> List[asyncio.Future[Object]]:
|
||||
futs = []
|
||||
for region in self._session.regions:
|
||||
futs.extend(region.objects.request_missing_objects())
|
||||
return futs
|
||||
|
||||
def request_object_properties(self, objects: Union[Object, Sequence[Object]]) \
|
||||
-> List[asyncio.Future[Object]]:
|
||||
# Doesn't accept local ID unlike ObjectManager because they're ambiguous here.
|
||||
if isinstance(objects, Object):
|
||||
objects = (objects,)
|
||||
if not objects:
|
||||
return []
|
||||
|
||||
# Has to be sent to the region they belong to, so split the objects out by region handle.
|
||||
objs_by_region = collections.defaultdict(list)
|
||||
for obj in objects:
|
||||
objs_by_region[obj.RegionHandle].append(obj)
|
||||
|
||||
futs = []
|
||||
for region_handle, region_objs in objs_by_region.items():
|
||||
region_mgr = self._get_region_manager(region_handle)
|
||||
futs.extend(region_mgr.request_object_properties(region_objs))
|
||||
return futs
|
||||
|
||||
async def load_ancestors(self, obj: Object, wait_time: float = 1.0):
|
||||
"""
|
||||
Ensure that the entire chain of parents above this object is loaded
|
||||
|
||||
Use this to make sure the object you're dealing with isn't orphaned and
|
||||
its RegionPosition can be determined.
|
||||
"""
|
||||
region_mgr = self._get_region_manager(obj.RegionHandle)
|
||||
while obj.ParentID:
|
||||
if obj.Parent is None:
|
||||
await asyncio.wait_for(region_mgr.request_objects(obj.ParentID)[0], wait_time)
|
||||
obj = obj.Parent
|
||||
|
||||
def clear(self):
|
||||
self._avatars.clear()
|
||||
for region_mgr in self._region_managers.values():
|
||||
region_mgr.clear()
|
||||
if self._fullid_lookup:
|
||||
LOG.warning(f"Had {len(self._fullid_lookup)} objects not tied to a region manager!")
|
||||
self._fullid_lookup.clear()
|
||||
self._rebuild_avatar_objects()
|
||||
self._region_managers.clear()
|
||||
|
||||
def _update_existing_object(self, obj: Object, new_properties: dict, update_type: UpdateType):
|
||||
old_parent_id = obj.ParentID
|
||||
new_parent_id = new_properties.get("ParentID", obj.ParentID)
|
||||
old_local_id = obj.LocalID
|
||||
new_local_id = new_properties.get("LocalID", obj.LocalID)
|
||||
old_region_handle = obj.RegionHandle
|
||||
new_region_handle = new_properties.get("RegionHandle", obj.RegionHandle)
|
||||
old_region_state = self._get_region_state(old_region_handle)
|
||||
new_region_state = self._get_region_state(new_region_handle)
|
||||
|
||||
actually_updated_props = set()
|
||||
|
||||
if old_region_handle != new_region_handle:
|
||||
# The object just changed regions, we have to remove it from the old one.
|
||||
# Our LocalID will most likely change because, well, our locale changed.
|
||||
old_region_state.untrack_object(obj)
|
||||
elif old_local_id != new_local_id:
|
||||
# Our LocalID changed, and we deal with linkages to other prims by
|
||||
# LocalID association. Break any links since our LocalID is changing.
|
||||
# Could happen if we didn't mark an attachment prim dead and the parent agent
|
||||
# came back into the sim. Attachment FullIDs do not change across TPs,
|
||||
# LocalIDs do. This at least lets us partially recover from the bad state.
|
||||
new_localid = new_properties["LocalID"]
|
||||
LOG.warning(f"Got an update with new LocalID for {obj.FullID}, {obj.LocalID} != {new_localid}. "
|
||||
f"May have mishandled a KillObject for a prim that left and re-entered region.")
|
||||
old_region_state.untrack_object(obj)
|
||||
obj.LocalID = new_localid
|
||||
old_region_state.track_object(obj)
|
||||
actually_updated_props |= {"LocalID"}
|
||||
|
||||
actually_updated_props |= obj.update_properties(new_properties)
|
||||
|
||||
if new_region_handle != old_region_handle:
|
||||
# Region just changed to this region, we should have untracked it before
|
||||
# so mark it tracked on this region. This should implicitly pick up any
|
||||
# orphans and handle parent ID changes.
|
||||
if new_region_state is not None:
|
||||
new_region_state.track_object(obj)
|
||||
else:
|
||||
# This will leave a regionless object in the global lookup dict, same as indra.
|
||||
LOG.warning(f"Tried to move object {obj!r} to unknown region {new_region_handle}")
|
||||
|
||||
if obj.PCode == PCode.AVATAR:
|
||||
# `Avatar` instances are handled separately. Update all Avatar objects so
|
||||
# we can deal with the RegionHandle change.
|
||||
self._rebuild_avatar_objects()
|
||||
elif new_parent_id != old_parent_id:
|
||||
# Parent ID changed, but we're in the same region
|
||||
new_region_state.handle_object_reparented(obj, old_parent_id=old_parent_id)
|
||||
|
||||
if actually_updated_props and new_region_state is not None:
|
||||
self._run_object_update_hooks(obj, actually_updated_props, update_type)
|
||||
|
||||
def _track_new_object(self, region: RegionObjectsState, obj: Object):
|
||||
region.track_object(obj)
|
||||
self._fullid_lookup[obj.FullID] = obj
|
||||
if obj.PCode == PCode.AVATAR:
|
||||
self._avatar_objects[obj.FullID] = obj
|
||||
self._rebuild_avatar_objects()
|
||||
self._run_object_update_hooks(obj, set(obj.to_dict().keys()), UpdateType.OBJECT_UPDATE)
|
||||
|
||||
def _kill_object_by_local_id(self, region_state: RegionObjectsState, local_id: int):
|
||||
obj = region_state.lookup_localid(local_id)
|
||||
region_state.missing_locals -= {local_id}
|
||||
child_ids: Sequence[int]
|
||||
|
||||
if obj:
|
||||
self._run_kill_object_hooks(obj)
|
||||
child_ids = obj.ChildIDs
|
||||
else:
|
||||
LOG.debug(f"Tried to kill unknown object {local_id}")
|
||||
# Kill any pending futures it might have had since untrack_object()
|
||||
# won't be called.
|
||||
region_state.cancel_futures(local_id)
|
||||
# If it had any orphans, they need to die.
|
||||
child_ids = region_state.collect_orphans(local_id)
|
||||
|
||||
# KillObject implicitly kills descendents
|
||||
# This may mutate child_ids, use the reversed iterator so we don't
|
||||
# invalidate the iterator during removal.
|
||||
for child_id in reversed(child_ids):
|
||||
# indra special-cases avatar PCodes and doesn't mark them dead
|
||||
# due to cascading kill. Is this correct? Do avatars require
|
||||
# explicit kill? Does this imply ParentID = 0 or do we need
|
||||
# an explicit follow-up update?
|
||||
child_obj = region_state.lookup_localid(child_id)
|
||||
if child_obj and child_obj.PCode == PCode.AVATAR:
|
||||
continue
|
||||
self._kill_object_by_local_id(region_state, child_id)
|
||||
|
||||
# Have to do this last, since untracking will clear child IDs
|
||||
if obj:
|
||||
region_state.untrack_object(obj)
|
||||
self._fullid_lookup.pop(obj.FullID, None)
|
||||
if obj.PCode == PCode.AVATAR:
|
||||
self._avatar_objects.pop(obj.FullID, None)
|
||||
self._rebuild_avatar_objects()
|
||||
|
||||
def _handle_object_update(self, msg: Message):
|
||||
seen_locals = []
|
||||
handle = msg["RegionData"]["RegionHandle"]
|
||||
region_state = self._get_region_state(handle)
|
||||
for block in msg['ObjectData']:
|
||||
object_data = normalize_object_update(block, handle)
|
||||
seen_locals.append(object_data["LocalID"])
|
||||
if region_state is None:
|
||||
LOG.warning(f"Got ObjectUpdate for unknown region {handle}: {object_data!r}")
|
||||
# Do a lookup by FullID, if an object with this FullID already exists anywhere in
|
||||
# our view of the world then we want to move it to this region.
|
||||
obj = self.lookup_fullid(object_data["FullID"])
|
||||
if obj:
|
||||
self._update_existing_object(obj, object_data, UpdateType.OBJECT_UPDATE)
|
||||
else:
|
||||
if region_state is None:
|
||||
continue
|
||||
self._track_new_object(region_state, Object(**object_data))
|
||||
msg.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_terse_object_update(self, msg: Message):
|
||||
seen_locals = []
|
||||
handle = msg["RegionData"]["RegionHandle"]
|
||||
region_state = self._get_region_state(handle)
|
||||
for block in msg['ObjectData']:
|
||||
object_data = normalize_terse_object_update(block, handle)
|
||||
|
||||
if region_state is None:
|
||||
LOG.warning(f"Got ImprovedTerseObjectUpdate for unknown region {handle}: {object_data!r}")
|
||||
continue
|
||||
|
||||
obj = region_state.lookup_localid(object_data["LocalID"])
|
||||
# Can only update existing object with this message
|
||||
if obj:
|
||||
# Need the Object as context because decoding state requires PCode.
|
||||
state_deserializer = ObjectStateSerializer.deserialize
|
||||
object_data["State"] = state_deserializer(ctx_obj=obj, val=object_data["State"])
|
||||
self._update_existing_object(obj, object_data, UpdateType.OBJECT_UPDATE)
|
||||
else:
|
||||
if region_state:
|
||||
region_state.missing_locals.add(object_data["LocalID"])
|
||||
LOG.debug(f"Received terse update for unknown object {object_data['LocalID']}")
|
||||
seen_locals.append(object_data["LocalID"])
|
||||
|
||||
msg.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_object_update_cached(self, msg: Message):
|
||||
seen_locals = []
|
||||
missing_locals = set()
|
||||
handle = msg["RegionData"]["RegionHandle"]
|
||||
region_state = self._get_region_state(handle)
|
||||
for block in msg['ObjectData']:
|
||||
seen_locals.append(block["ID"])
|
||||
update_flags = block.deserialize_var("UpdateFlags", make_copy=False)
|
||||
|
||||
if region_state is None:
|
||||
LOG.warning(f"Got ObjectUpdateCached for unknown region {handle}: {block!r}")
|
||||
continue
|
||||
|
||||
# Check if we already know about the object
|
||||
obj = region_state.lookup_localid(block["ID"])
|
||||
if obj is not None and obj.CRC == block["CRC"]:
|
||||
self._update_existing_object(obj, {
|
||||
"UpdateFlags": update_flags,
|
||||
"RegionHandle": handle,
|
||||
}, UpdateType.OBJECT_UPDATE)
|
||||
continue
|
||||
|
||||
cached_obj_data = self._lookup_cache_entry(handle, block["ID"], block["CRC"])
|
||||
if cached_obj_data is not None:
|
||||
cached_obj = normalize_object_update_compressed_data(cached_obj_data)
|
||||
cached_obj["UpdateFlags"] = update_flags
|
||||
cached_obj["RegionHandle"] = handle
|
||||
self._track_new_object(region_state, Object(**cached_obj))
|
||||
continue
|
||||
|
||||
# Don't know about it and wasn't cached.
|
||||
missing_locals.add(block["ID"])
|
||||
if region_state:
|
||||
region_state.missing_locals.update(missing_locals)
|
||||
if missing_locals:
|
||||
self._handle_object_update_cached_misses(handle, missing_locals)
|
||||
msg.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_object_update_cached_misses(self, region_handle: int, missing_locals: Set[int]):
|
||||
"""Handle an ObjectUpdateCached that referenced some un-cached local IDs"""
|
||||
region_mgr = self._get_region_manager(region_handle)
|
||||
region_mgr.request_objects(missing_locals)
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def _lookup_cache_entry(self, region_handle: int, local_id: int, crc: int) -> Optional[bytes]:
|
||||
return None
|
||||
|
||||
def _handle_object_update_compressed(self, msg: Message):
|
||||
seen_locals = []
|
||||
handle = msg["RegionData"]["RegionHandle"]
|
||||
region_state = self._get_region_state(handle)
|
||||
for block in msg['ObjectData']:
|
||||
object_data = normalize_object_update_compressed(block, handle)
|
||||
seen_locals.append(object_data["LocalID"])
|
||||
if region_state is None:
|
||||
LOG.warning(f"Got ObjectUpdateCompressed for unknown region {handle}: {object_data!r}")
|
||||
obj = self.lookup_fullid(object_data["FullID"])
|
||||
if obj:
|
||||
self._update_existing_object(obj, object_data, UpdateType.OBJECT_UPDATE)
|
||||
else:
|
||||
if region_state is None:
|
||||
continue
|
||||
self._track_new_object(region_state, Object(**object_data))
|
||||
msg.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_object_properties_generic(self, packet: Message):
|
||||
seen_locals = []
|
||||
for block in packet["ObjectData"]:
|
||||
object_properties = dict(block.items())
|
||||
if packet.name == "ObjectProperties":
|
||||
object_properties["TextureID"] = block.deserialize_var("TextureID")
|
||||
|
||||
obj = self.lookup_fullid(block["ObjectID"])
|
||||
if obj:
|
||||
seen_locals.append(obj.LocalID)
|
||||
self._update_existing_object(obj, object_properties, UpdateType.PROPERTIES)
|
||||
else:
|
||||
LOG.debug(f"Received {packet.name} for unknown {block['ObjectID']}")
|
||||
packet.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_kill_object(self, message: Message):
|
||||
seen_locals = []
|
||||
|
||||
# Have to look up region based on sender, handle not sent in this message
|
||||
region = self._session.region_by_circuit_addr(message.sender)
|
||||
region_state = region.objects.state
|
||||
for block in message["ObjectData"]:
|
||||
self._kill_object_by_local_id(region_state, block["ID"])
|
||||
seen_locals.append(block["ID"])
|
||||
message.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_coarse_location_update(self, message: Message):
|
||||
# Have to look up region based on sender, handle not sent in this message
|
||||
region = self._session.region_by_circuit_addr(message.sender)
|
||||
region_state = region.objects.state
|
||||
region_state.coarse_locations.clear()
|
||||
|
||||
coarse_locations: Dict[UUID, Vector3] = {}
|
||||
for agent_block, location_block in zip(message["AgentData"], message["Location"]):
|
||||
x, y, z = location_block["X"], location_block["Y"], location_block["Z"]
|
||||
coarse_locations[agent_block["AgentID"]] = Vector3(
|
||||
X=x,
|
||||
Y=y,
|
||||
# The z-axis is multiplied by 4 to obtain true Z location
|
||||
# The z-axis is also limited to 1020m in height
|
||||
# If z == 255 then the true Z is unknown.
|
||||
# http://wiki.secondlife.com/wiki/CoarseLocationUpdate
|
||||
Z=z * 4 if z != 255 else math.inf,
|
||||
)
|
||||
|
||||
region_state.coarse_locations.update(coarse_locations)
|
||||
self._rebuild_avatar_objects()
|
||||
|
||||
def _process_get_object_cost_response(self, parsed: dict):
|
||||
if "error" in parsed:
|
||||
return
|
||||
for object_id, object_costs in parsed.items():
|
||||
obj = self.lookup_fullid(UUID(object_id))
|
||||
if not obj:
|
||||
LOG.debug(f"Received ObjectCost for unknown {object_id}")
|
||||
continue
|
||||
obj.ObjectCosts.update(object_costs)
|
||||
self._run_object_update_hooks(obj, {"ObjectCosts"}, UpdateType.COSTS)
|
||||
|
||||
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: UpdateType):
|
||||
region_state = self._get_region_state(obj.RegionHandle)
|
||||
region_state.resolve_futures(obj, update_type)
|
||||
if obj.PCode == PCode.AVATAR and "NameValue" in updated_props:
|
||||
if obj.NameValue:
|
||||
self.name_cache.update(obj.FullID, obj.NameValue.to_dict())
|
||||
self.events.handle(ObjectEvent(obj, updated_props, update_type))
|
||||
|
||||
def _run_kill_object_hooks(self, obj: Object):
|
||||
self.events.handle(ObjectEvent(obj, set(), UpdateType.KILL))
|
||||
|
||||
def _rebuild_avatar_objects(self):
|
||||
# Get all avatars known through coarse locations and which region the location was in
|
||||
coarse_locations: Dict[UUID, Tuple[int, Vector3]] = {}
|
||||
for region_handle, region in self._region_managers.items():
|
||||
for av_key, location in region.state.coarse_locations.items():
|
||||
coarse_locations[av_key] = (region_handle, location)
|
||||
|
||||
# Merge together avatars known through coarse locations or objects, with details for both
|
||||
current_av_details: Dict[UUID, Tuple[Optional[Tuple[int, Vector3]], Optional[Object]]] = {}
|
||||
for av_key in set(coarse_locations.keys()) | set(self._avatar_objects.keys()):
|
||||
details = (coarse_locations.get(av_key), self._avatar_objects.get(av_key))
|
||||
current_av_details[av_key] = details
|
||||
|
||||
# Look for changes in avatars we're already tracking
|
||||
for existing_key in tuple(self._avatars.keys()):
|
||||
av = self._avatars[existing_key]
|
||||
if existing_key in current_av_details:
|
||||
# This avatar this exists, update it.
|
||||
coarse_pair, av_obj = current_av_details[existing_key]
|
||||
av.Object = av_obj
|
||||
if coarse_pair:
|
||||
coarse_handle, coarse_location = coarse_pair
|
||||
av.CoarseLocation = coarse_location
|
||||
av.RegionHandle = coarse_handle
|
||||
# If we have a real value for Z then throw away any stale guesses
|
||||
if av.CoarseLocation.Z != math.inf:
|
||||
av.GuessedZ = None
|
||||
if av_obj:
|
||||
av.Object = av_obj
|
||||
av.RegionHandle = av_obj.RegionHandle
|
||||
else:
|
||||
# Avatar isn't in coarse locations or objects, it's gone.
|
||||
self._avatars.pop(existing_key, None)
|
||||
av.Object = None
|
||||
av.CoarseLocation = None
|
||||
av.Valid = False
|
||||
|
||||
# Check for any new avatars
|
||||
for av_key, (coarse_pair, av_obj) in current_av_details.items():
|
||||
if av_key in self._avatars:
|
||||
# Already handled in the update step above
|
||||
continue
|
||||
region_handle = None
|
||||
coarse_location = None
|
||||
if coarse_pair:
|
||||
region_handle, coarse_location = coarse_pair
|
||||
if av_obj:
|
||||
# Prefer the region handle from the Object if we have one
|
||||
region_handle = av_obj.RegionHandle
|
||||
assert region_handle is not None
|
||||
self._avatars[av_key] = Avatar(
|
||||
full_id=av_key,
|
||||
region_handle=region_handle,
|
||||
resolved_name=self.name_cache.lookup(av_key, create_if_none=True),
|
||||
coarse_location=coarse_location,
|
||||
obj=av_obj,
|
||||
)
|
||||
|
||||
|
||||
class RegionObjectsState:
|
||||
"""
|
||||
Internal class for tracking Object state within a specific region
|
||||
|
||||
Should only be directly used by the world and region ObjectManagers.
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
"handle", "missing_locals", "_orphans", "localid_lookup", "coarse_locations",
|
||||
"_object_futures"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
self.missing_locals = set()
|
||||
self.localid_lookup: Dict[int, Object] = {}
|
||||
self.coarse_locations: Dict[UUID, Vector3] = {}
|
||||
self._object_futures: Dict[Tuple[int, int], List[asyncio.Future]] = {}
|
||||
self._orphans: Dict[int, List[int]] = collections.defaultdict(list)
|
||||
|
||||
def clear(self):
|
||||
"""Called by the owning ObjectManager when it knows the region is going away"""
|
||||
for fut in tuple(itertools.chain(*self._object_futures.values())):
|
||||
fut.cancel()
|
||||
self._object_futures.clear()
|
||||
self._orphans.clear()
|
||||
self.coarse_locations.clear()
|
||||
self.missing_locals.clear()
|
||||
self.localid_lookup.clear()
|
||||
|
||||
def lookup_localid(self, localid: int) -> Optional[Object]:
|
||||
return self.localid_lookup.get(localid)
|
||||
|
||||
def track_object(self, obj: Object):
|
||||
"""Assign ownership of Object to this region"""
|
||||
obj_same_localid = self.localid_lookup.get(obj.LocalID)
|
||||
if obj_same_localid:
|
||||
LOG.error(f"Clobbering existing object with LocalID {obj.LocalID}! "
|
||||
f"{obj.to_dict()} clobbered {obj_same_localid.to_dict()}")
|
||||
self.localid_lookup[obj.LocalID] = obj
|
||||
# If it was missing, it's not missing anymore.
|
||||
self.missing_locals -= {obj.LocalID}
|
||||
|
||||
self._parent_object(obj)
|
||||
|
||||
# Adopt any of our orphaned child objects.
|
||||
for orphan_local in self.collect_orphans(obj.LocalID):
|
||||
child_obj = self.localid_lookup.get(orphan_local)
|
||||
# Shouldn't be any dead children in the orphanage
|
||||
assert child_obj is not None
|
||||
self._parent_object(child_obj)
|
||||
|
||||
def untrack_object(self, obj: Object):
|
||||
"""
|
||||
Take ownership of an Object from this region
|
||||
|
||||
Can happen due to the object being killed, or due to it moving to another region
|
||||
"""
|
||||
former_child_ids = obj.ChildIDs[:]
|
||||
for child_id in former_child_ids:
|
||||
child_obj = self.localid_lookup.get(child_id)
|
||||
assert child_obj is not None
|
||||
self._unparent_object(child_obj, child_obj.ParentID)
|
||||
|
||||
# Place any remaining unkilled children in the orphanage
|
||||
for child_id in former_child_ids:
|
||||
self._track_orphan(child_id, obj.LocalID)
|
||||
|
||||
assert not obj.ChildIDs
|
||||
|
||||
# Make sure the parent knows we went away
|
||||
self._unparent_object(obj, obj.ParentID)
|
||||
# Object doesn't belong to this region anymore and won't receive
|
||||
# any updates, cancel any pending futures
|
||||
self.cancel_futures(obj.LocalID)
|
||||
|
||||
del self.localid_lookup[obj.LocalID]
|
||||
|
||||
def _parent_object(self, obj: Object, insert_at_head=False):
|
||||
"""Create any links to ancestor Objects for obj"""
|
||||
if obj.ParentID:
|
||||
parent = self.localid_lookup.get(obj.ParentID)
|
||||
if parent is not None:
|
||||
assert obj.LocalID not in parent.ChildIDs
|
||||
# Link order is never explicitly passed to clients, so we have to do
|
||||
# some nasty guesswork based on order of received initial ObjectUpdates
|
||||
# Note that this is broken in the viewer as well, and there doesn't seem
|
||||
# to be a foolproof way to get this.
|
||||
idx = 0 if insert_at_head else len(parent.ChildIDs)
|
||||
parent.ChildIDs.insert(idx, obj.LocalID)
|
||||
parent.Children.insert(idx, obj)
|
||||
obj.Parent = weakref.proxy(parent)
|
||||
else:
|
||||
# We have a parent, but we don't have an Object for it yet
|
||||
self.missing_locals.add(obj.ParentID)
|
||||
self._track_orphan(obj.LocalID, parent_id=obj.ParentID)
|
||||
obj.Parent = None
|
||||
LOG.debug(f"{obj.LocalID} updated with parent {obj.ParentID}, but parent wasn't found!")
|
||||
|
||||
def _unparent_object(self, obj: Object, old_parent_id: int):
|
||||
"""Break any links to ancestor Objects for obj"""
|
||||
obj.Parent = None
|
||||
if old_parent_id:
|
||||
# Had a parent, remove this from the child and orphan lists.
|
||||
removed = self._untrack_orphan(obj, old_parent_id)
|
||||
|
||||
old_parent = self.localid_lookup.get(old_parent_id)
|
||||
if old_parent:
|
||||
if obj.LocalID in old_parent.ChildIDs:
|
||||
idx = old_parent.ChildIDs.index(obj.LocalID)
|
||||
del old_parent.ChildIDs[idx]
|
||||
del old_parent.Children[idx]
|
||||
else:
|
||||
# Something is very broken if this happens
|
||||
LOG.warning(f"Changing parent of {obj.LocalID}, but old parent didn't correctly adopt, "
|
||||
f"was {'' if removed else 'not '}in orphan list")
|
||||
else:
|
||||
LOG.debug(f"Changing parent of {obj.LocalID}, but couldn't find old parent")
|
||||
|
||||
def handle_object_reparented(self, obj: Object, old_parent_id: int):
|
||||
"""Recreate any links to ancestor Objects for obj due to parent changes"""
|
||||
self._unparent_object(obj, old_parent_id)
|
||||
self._parent_object(obj, insert_at_head=True)
|
||||
|
||||
def collect_orphans(self, parent_localid: int) -> Sequence[int]:
|
||||
"""Take ownership of any orphan IDs belonging to parent_localid"""
|
||||
return self._orphans.pop(parent_localid, [])
|
||||
|
||||
def _track_orphan(self, local_id: int, parent_id: int):
|
||||
if len(self._orphans) > 100:
|
||||
LOG.warning(f"Orphaned object dict is getting large: {len(self._orphans)}")
|
||||
self._orphans[parent_id].append(local_id)
|
||||
|
||||
def _untrack_orphan(self, obj: Object, parent_id: int):
|
||||
"""Remove obj from parent_id's list of orphans if present"""
|
||||
if parent_id not in self._orphans:
|
||||
return False
|
||||
orphan_list = self._orphans[parent_id]
|
||||
removed = False
|
||||
if obj.LocalID in orphan_list:
|
||||
orphan_list.remove(obj.LocalID)
|
||||
removed = True
|
||||
# List is empty now, get rid of it.
|
||||
if not orphan_list:
|
||||
del self._orphans[parent_id]
|
||||
return removed
|
||||
|
||||
def register_future(self, local_id: int, future_type: UpdateType) -> asyncio.Future[Object]:
|
||||
fut = asyncio.Future()
|
||||
fut_key = (local_id, future_type)
|
||||
local_futs = self._object_futures.get(fut_key, [])
|
||||
local_futs.append(fut)
|
||||
self._object_futures[fut_key] = local_futs
|
||||
fut.add_done_callback(local_futs.remove)
|
||||
return fut
|
||||
|
||||
def resolve_futures(self, obj: Object, update_type: UpdateType):
|
||||
futures = self._object_futures.get((obj.LocalID, update_type), [])
|
||||
for fut in futures[:]:
|
||||
fut.set_result(obj)
|
||||
|
||||
def cancel_futures(self, local_id: int):
|
||||
# Object went away, so need to kill any pending futures.
|
||||
for fut_key, futs in self._object_futures.items():
|
||||
if fut_key[0] == local_id:
|
||||
for fut in futs:
|
||||
fut.cancel()
|
||||
break
|
||||
|
||||
|
||||
class LocationType(enum.IntEnum):
|
||||
NONE = enum.auto()
|
||||
COARSE = enum.auto()
|
||||
EXACT = enum.auto()
|
||||
|
||||
|
||||
class Avatar:
|
||||
"""Wrapper for an avatar known through ObjectUpdate or CoarseLocationUpdate"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
full_id: UUID,
|
||||
region_handle: int,
|
||||
obj: Optional["Object"] = None,
|
||||
coarse_location: Optional[Vector3] = None,
|
||||
resolved_name: Optional[NameCacheEntry] = None,
|
||||
):
|
||||
self.FullID: UUID = full_id
|
||||
self.Object: Optional["Object"] = obj
|
||||
self.RegionHandle: int = region_handle
|
||||
# TODO: Allow hooking into getZOffsets FS bridge response
|
||||
# to fill in the Z axis if it's infinite
|
||||
self.CoarseLocation = coarse_location
|
||||
self.Valid = True
|
||||
self.GuessedZ: Optional[float] = None
|
||||
self._resolved_name = resolved_name
|
||||
|
||||
@property
|
||||
def LocationType(self) -> "LocationType":
|
||||
if self.Object and self.Object.AncestorsKnown:
|
||||
return LocationType.EXACT
|
||||
if self.CoarseLocation is not None:
|
||||
return LocationType.COARSE
|
||||
return LocationType.NONE
|
||||
|
||||
@property
|
||||
def RegionPosition(self) -> Vector3:
|
||||
if self.Object and self.Object.AncestorsKnown:
|
||||
return self.Object.RegionPosition
|
||||
if self.CoarseLocation is not None:
|
||||
if self.CoarseLocation.Z == math.inf and self.GuessedZ is not None:
|
||||
coarse = self.CoarseLocation
|
||||
return Vector3(coarse.X, coarse.Y, self.GuessedZ)
|
||||
return self.CoarseLocation
|
||||
raise ValueError(f"Avatar {self.FullID} has no known position")
|
||||
|
||||
@property
|
||||
def GlobalPosition(self) -> Vector3:
|
||||
return self.RegionPosition + handle_to_global_pos(self.RegionHandle)
|
||||
|
||||
@property
|
||||
def Name(self) -> Optional[str]:
|
||||
if not self._resolved_name:
|
||||
return None
|
||||
return str(self._resolved_name)
|
||||
|
||||
@property
|
||||
def PreferredName(self) -> Optional[str]:
|
||||
if not self._resolved_name:
|
||||
return None
|
||||
return self._resolved_name.preferred_name
|
||||
|
||||
@property
|
||||
def DisplayName(self) -> Optional[str]:
|
||||
if not self._resolved_name:
|
||||
return None
|
||||
return self._resolved_name.display_name
|
||||
|
||||
@property
|
||||
def LegacyName(self) -> Optional[str]:
|
||||
if not self._resolved_name:
|
||||
return None
|
||||
return self._resolved_name.legacy_name
|
||||
|
||||
def __repr__(self):
|
||||
loc_str = str(self.RegionPosition) if self.LocationType != LocationType.NONE else "?"
|
||||
return f"<{self.__class__.__name__} {self.FullID} {self.Name!r} @ {loc_str}>"
|
||||
36
hippolyzer/lib/client/state.py
Normal file
36
hippolyzer/lib/client/state.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""
|
||||
Base classes for common session-related state shared between clients and proxies
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.circuit import ConnectionHolder
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.base.network.transport import ADDR_TUPLE
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.client.object_manager import ClientObjectManager, ClientWorldObjectManager
|
||||
|
||||
|
||||
class BaseClientRegion(ConnectionHolder, abc.ABC):
|
||||
"""Represents a client's view of a remote region"""
|
||||
# Actually a weakref
|
||||
handle: Optional[int]
|
||||
session: Callable[[], BaseClientSession]
|
||||
objects: ClientObjectManager
|
||||
|
||||
|
||||
class BaseClientSession(abc.ABC):
|
||||
"""Represents a client's view of a remote session"""
|
||||
id: UUID
|
||||
agent_id: UUID
|
||||
secure_session_id: UUID
|
||||
message_handler: MessageHandler[Message, str]
|
||||
regions: Sequence[BaseClientRegion]
|
||||
region_by_handle: Callable[[int], Optional[BaseClientRegion]]
|
||||
region_by_circuit_addr: Callable[[ADDR_TUPLE], Optional[BaseClientRegion]]
|
||||
objects: ClientWorldObjectManager
|
||||
@@ -8,17 +8,16 @@ import warnings
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID, Vector3
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.message.message import Block, Message
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.proxy import addon_ctx
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.packets import Direction, ProxiedUDPPacket
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.network.transport import UDPPacket, Direction
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager, Session
|
||||
from hippolyzer.lib.proxy.task_scheduler import TaskLifeScope
|
||||
from hippolyzer.lib.proxy.templates import ChatSourceType, ChatType
|
||||
from hippolyzer.lib.base.templates import ChatSourceType, ChatType
|
||||
|
||||
|
||||
class AssetAliasTracker:
|
||||
@@ -40,8 +39,13 @@ class AssetAliasTracker:
|
||||
def get_orig_uuid(self, val: UUID) -> Optional[UUID]:
|
||||
return self.rev_mapping.get(val)
|
||||
|
||||
def get_alias_uuid(self, val: UUID):
|
||||
alias_id = self.alias_mapping.setdefault(val, UUID.random())
|
||||
def get_alias_uuid(self, val: UUID, create: bool = True) -> Optional[UUID]:
|
||||
if create:
|
||||
alias_id = self.alias_mapping.setdefault(val, UUID.random())
|
||||
else:
|
||||
alias_id = self.alias_mapping.get(val)
|
||||
if alias_id is None:
|
||||
return None
|
||||
self.rev_mapping.setdefault(alias_id, val)
|
||||
return alias_id
|
||||
|
||||
@@ -53,7 +57,7 @@ def show_message(text, session=None) -> None:
|
||||
|
||||
# `or None` so we don't use a dead weakref Proxy which are False-y
|
||||
session = session or addon_ctx.session.get(None) or None
|
||||
message = ProxiedMessage(
|
||||
message = Message(
|
||||
"ChatFromSimulator",
|
||||
Block(
|
||||
"ChatData",
|
||||
@@ -79,7 +83,7 @@ def send_chat(message: Union[bytes, str], channel=0, chat_type=ChatType.NORMAL,
|
||||
session = session or addon_ctx.session.get(None) or None
|
||||
if not session:
|
||||
raise RuntimeError("Tried to send chat without session")
|
||||
session.main_region.circuit.send_message(ProxiedMessage(
|
||||
session.main_region.circuit.send_message(Message(
|
||||
"ChatFromViewer",
|
||||
Block(
|
||||
"AgentData",
|
||||
@@ -155,7 +159,7 @@ class BaseAddon(abc.ABC):
|
||||
def handle_unload(self, session_manager: SessionManager):
|
||||
pass
|
||||
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
pass
|
||||
|
||||
def handle_http_request(self, session_manager: SessionManager, flow: HippoHTTPFlow):
|
||||
@@ -177,13 +181,15 @@ class BaseAddon(abc.ABC):
|
||||
def handle_region_changed(self, session: Session, region: ProxiedRegion):
|
||||
pass
|
||||
|
||||
def handle_circuit_created(self, session: Session, region: ProxiedRegion):
|
||||
pass
|
||||
|
||||
def handle_rlv_command(self, session: Session, region: ProxiedRegion, source: UUID,
|
||||
cmd: str, options: List[str], param: str):
|
||||
pass
|
||||
|
||||
def handle_proxied_packet(self, session_manager: SessionManager, packet: ProxiedUDPPacket,
|
||||
session: Optional[Session], region: Optional[ProxiedRegion],
|
||||
message: Optional[ProxiedMessage]):
|
||||
def handle_proxied_packet(self, session_manager: SessionManager, packet: UDPPacket,
|
||||
session: Optional[Session], region: Optional[ProxiedRegion]):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@@ -16,15 +16,15 @@ from types import ModuleType
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.network.transport import UDPPacket
|
||||
from hippolyzer.lib.proxy import addon_ctx
|
||||
from hippolyzer.lib.proxy.task_scheduler import TaskLifeScope, TaskScheduler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.commands import CommandDetails, WrappedCommandCallable
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.objects import Object
|
||||
from hippolyzer.lib.proxy.packets import ProxiedUDPPacket
|
||||
from hippolyzer.lib.proxy.object_manager import Object
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
|
||||
@@ -55,6 +55,10 @@ class BaseInteractionManager:
|
||||
async def save_file(self, caption: str = '', directory: str = '', filter_str: str = '') -> Optional[str]:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def confirm(self, title: str, caption: str) -> bool:
|
||||
pass
|
||||
|
||||
def main_window_handle(self) -> Any:
|
||||
return None
|
||||
|
||||
@@ -97,9 +101,14 @@ class AddonManager:
|
||||
|
||||
@classmethod
|
||||
def shutdown(cls):
|
||||
to_pop = []
|
||||
for mod in cls.FRESH_ADDON_MODULES.values():
|
||||
to_pop.append(mod)
|
||||
cls._call_module_hooks(mod, "handle_unload", cls.SESSION_MANAGER)
|
||||
cls.SCHEDULER.shutdown()
|
||||
for mod in to_pop:
|
||||
if isinstance(mod, ModuleType):
|
||||
sys.modules.pop(mod.__name__, None)
|
||||
|
||||
@classmethod
|
||||
def have_active_repl(cls):
|
||||
@@ -169,6 +178,7 @@ class AddonManager:
|
||||
old_mod = cls.FRESH_ADDON_MODULES.pop(specs[0].name, None)
|
||||
if old_mod:
|
||||
cls._unload_module(old_mod)
|
||||
sys.modules.pop(old_mod.__name__, None)
|
||||
if reload:
|
||||
cls._reload_addons()
|
||||
|
||||
@@ -277,13 +287,8 @@ class AddonManager:
|
||||
|
||||
# Make sure module initialization happens after any pending task cancellations
|
||||
# due to module unloading.
|
||||
def _init_soon():
|
||||
cls._call_module_hooks(mod, "handle_init", cls.SESSION_MANAGER)
|
||||
if not cls._SUBPROCESS:
|
||||
for session in cls.SESSION_MANAGER.sessions:
|
||||
with addon_ctx.push(new_session=session):
|
||||
cls._call_module_hooks(mod, "handle_session_init", session)
|
||||
asyncio.get_event_loop().call_soon(_init_soon)
|
||||
|
||||
asyncio.get_event_loop().call_soon(cls._init_module, mod)
|
||||
except Exception as e:
|
||||
if had_mod:
|
||||
logging.exception("Exploded trying to reload addon %s" % spec.name)
|
||||
@@ -299,6 +304,14 @@ class AddonManager:
|
||||
if raise_exceptions and load_exception is not None:
|
||||
raise load_exception
|
||||
|
||||
@classmethod
|
||||
def _init_module(cls, mod: ModuleType):
|
||||
cls._call_module_hooks(mod, "handle_init", cls.SESSION_MANAGER)
|
||||
if not cls._SUBPROCESS:
|
||||
for session in cls.SESSION_MANAGER.sessions:
|
||||
with addon_ctx.push(new_session=session):
|
||||
cls._call_module_hooks(mod, "handle_session_init", session)
|
||||
|
||||
@classmethod
|
||||
def _unload_module(cls, old_mod: ModuleType):
|
||||
cls._call_module_hooks(old_mod, "handle_unload", cls.SESSION_MANAGER)
|
||||
@@ -382,7 +395,7 @@ class AddonManager:
|
||||
LOG.error(text)
|
||||
|
||||
@classmethod
|
||||
def handle_lludp_message(cls, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
def handle_lludp_message(cls, session: Session, region: ProxiedRegion, message: Message):
|
||||
cls._reload_addons()
|
||||
if message.name == "ChatFromViewer" and "ChatData" in message:
|
||||
if message["ChatData"]["Channel"] == cls.COMMAND_CHANNEL:
|
||||
@@ -514,8 +527,13 @@ class AddonManager:
|
||||
return cls._call_all_addon_hooks("handle_region_changed", session, region)
|
||||
|
||||
@classmethod
|
||||
def handle_proxied_packet(cls, session_manager: SessionManager, packet: ProxiedUDPPacket,
|
||||
session: Optional[Session], region: Optional[ProxiedRegion],
|
||||
message: Optional[ProxiedMessage]):
|
||||
return cls._call_all_addon_hooks("handle_proxied_packet", session_manager,
|
||||
packet, session, region, message)
|
||||
def handle_circuit_created(cls, session: Session, region: ProxiedRegion):
|
||||
with addon_ctx.push(session, region):
|
||||
return cls._call_all_addon_hooks("handle_circuit_created", session, region)
|
||||
|
||||
@classmethod
|
||||
def handle_proxied_packet(cls, session_manager: SessionManager, packet: UDPPacket,
|
||||
session: Optional[Session], region: Optional[ProxiedRegion]):
|
||||
with addon_ctx.push(session, region):
|
||||
return cls._call_all_addon_hooks("handle_proxied_packet", session_manager,
|
||||
packet, session, region)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from hippolyzer.lib.proxy.viewer_settings import iter_viewer_config_dirs, has_settings_file
|
||||
|
||||
|
||||
class InvalidConfigDir(Exception):
|
||||
@@ -11,39 +12,22 @@ def setup_ca(config_path, mitmproxy_master):
|
||||
p = Path(config_path)
|
||||
if not p.exists():
|
||||
raise InvalidConfigDir("Config path does not exist!")
|
||||
settings_path = p / "user_settings"
|
||||
if not (settings_path / "settings.xml").exists():
|
||||
if not has_settings_file(p):
|
||||
raise InvalidConfigDir("Path is not a second life config dir!")
|
||||
|
||||
mitmproxy_conf_dir = Path(mitmproxy_master.options.confdir)
|
||||
mitmproxy_ca_path = (mitmproxy_conf_dir.expanduser() / "mitmproxy-ca-cert.pem")
|
||||
|
||||
shutil.copy(mitmproxy_ca_path, settings_path / "CA.pem")
|
||||
shutil.copy(mitmproxy_ca_path, p / "user_settings" / "CA.pem")
|
||||
|
||||
|
||||
def setup_ca_everywhere(mitmproxy_master):
|
||||
valid_paths = set()
|
||||
paths = _viewer_config_dir_iter()
|
||||
paths = iter_viewer_config_dirs()
|
||||
for path in paths:
|
||||
try:
|
||||
setup_ca(path, mitmproxy_master)
|
||||
valid_paths.add(path)
|
||||
except InvalidConfigDir:
|
||||
pass
|
||||
except PermissionError:
|
||||
pass
|
||||
|
||||
return valid_paths
|
||||
|
||||
|
||||
def _viewer_config_dir_iter():
|
||||
if sys.platform.startswith("linux"):
|
||||
paths = (x for x in Path.home().iterdir() if x.name.startswith("."))
|
||||
elif sys.platform == "darwin":
|
||||
paths = (Path.home() / "Library" / "Application Support").iterdir()
|
||||
elif sys.platform in ("win32", "msys", "cygwin"):
|
||||
paths = (Path.home() / "AppData" / "Local").iterdir()
|
||||
else:
|
||||
raise Exception("Unknown OS, can't locate viewer config dirs!")
|
||||
|
||||
return (path for path in paths if path.is_dir())
|
||||
|
||||
@@ -1,150 +1,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import copy
|
||||
import dataclasses
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from types import TracebackType
|
||||
from typing import *
|
||||
|
||||
import aiohttp
|
||||
|
||||
from hippolyzer.lib.base import llsd as llsd_lib
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.network.caps_client import CapsClient, CAPS_DICT
|
||||
from hippolyzer.lib.proxy.settings import ProxySettings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
|
||||
|
||||
class CapsClientResponse(aiohttp.ClientResponse):
|
||||
"""
|
||||
Not actually instantiated, used for lying to the type system
|
||||
since we'll dynamically put this onto a ClientResponse instance
|
||||
Will fail isinstance().
|
||||
"""
|
||||
async def read_llsd(self) -> Any:
|
||||
raise NotImplementedError()
|
||||
class ProxyCapsClient(CapsClient):
|
||||
def __init__(self, settings: ProxySettings, region: Optional[ProxiedRegion] = None):
|
||||
super().__init__(None)
|
||||
self._region = region
|
||||
self._settings = settings
|
||||
|
||||
def _get_caps(self) -> Optional[CAPS_DICT]:
|
||||
if not self._region:
|
||||
return None
|
||||
return self._region.caps
|
||||
|
||||
class _HippoSessionRequestContextManager:
|
||||
"""
|
||||
_SessionRequestContextManager but with a symmetrical API
|
||||
|
||||
aiohttp.request() and aiohttp.ClientSession.request() have different APIs.
|
||||
One is sync returning a context manager, one is async returning a coro.
|
||||
aiohttp.request() also doesn't accept the arguments that we need for custom
|
||||
SSL contexts. To deal with requests that have existing sessions and those without,
|
||||
just give them both the same wrapper and don't close the session on context manager
|
||||
exit if it wasn't our session.
|
||||
"""
|
||||
__slots__ = ("_coro", "_resp", "_session", "_session_owned")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coro: Coroutine[asyncio.Future[Any], None, aiohttp.ClientResponse],
|
||||
session: aiohttp.ClientSession,
|
||||
session_owned: bool = True,
|
||||
) -> None:
|
||||
self._coro = coro
|
||||
self._resp: Optional[aiohttp.ClientResponse] = None
|
||||
self._session = session
|
||||
self._session_owned = session_owned
|
||||
|
||||
async def __aenter__(self) -> CapsClientResponse:
|
||||
try:
|
||||
self._resp = await self._coro
|
||||
|
||||
# We don't control creation of the ClientResponse, so tack on
|
||||
# a convenience method for reading LLSD.
|
||||
async def _read_llsd():
|
||||
return llsd_lib.parse_xml(await self._resp.read())
|
||||
self._resp.read_llsd = _read_llsd
|
||||
except BaseException:
|
||||
if self._session_owned:
|
||||
await self._session.close()
|
||||
raise
|
||||
else:
|
||||
# intentionally fooling the type system
|
||||
return self._resp # type: ignore
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc: Optional[BaseException],
|
||||
tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
assert self._resp is not None
|
||||
self._resp.close()
|
||||
if self._session_owned:
|
||||
await self._session.close()
|
||||
|
||||
|
||||
class CapsClient:
|
||||
def __init__(self, region: Optional[ProxiedRegion] = None):
|
||||
self._region: Optional[ProxiedRegion] = proxify(region)
|
||||
|
||||
def request(self, method: str, cap_or_url: str, *, path: str = "", data: Any = None,
|
||||
headers: Optional[dict] = None, session: Optional[aiohttp.ClientSession] = None,
|
||||
llsd: Any = dataclasses.MISSING, params: Optional[Dict[str, Any]] = None,
|
||||
proxy: Optional[str] = None, skip_auto_headers: Optional[Sequence[str]] = None,
|
||||
**kwargs) -> _HippoSessionRequestContextManager:
|
||||
if cap_or_url.startswith("http"):
|
||||
if path:
|
||||
raise ValueError("Specifying both path and a full URL not supported")
|
||||
else:
|
||||
if self._region is None:
|
||||
raise RuntimeError(f"Need a region to request a Cap like {cap_or_url}")
|
||||
if cap_or_url not in self._region.caps:
|
||||
raise KeyError(f"{cap_or_url} is not a full URL and not a Cap")
|
||||
cap_or_url = self._region.caps[cap_or_url]
|
||||
if path:
|
||||
cap_or_url += path
|
||||
|
||||
if params is not None:
|
||||
for pname, pval in params.items():
|
||||
if not isinstance(pval, str):
|
||||
params[pname] = str(pval)
|
||||
|
||||
session_owned = False
|
||||
# Use an existing session if we have one to take advantage of connection pooling
|
||||
# otherwise create one
|
||||
if session is None:
|
||||
session_owned = True
|
||||
session = aiohttp.ClientSession(
|
||||
connector=aiohttp.TCPConnector(force_close=True),
|
||||
connector_owner=True
|
||||
)
|
||||
|
||||
if headers is None:
|
||||
headers = {}
|
||||
else:
|
||||
headers = copy.copy(headers)
|
||||
|
||||
# Use sentinel val so explicit `None` can be passed
|
||||
if llsd is not dataclasses.MISSING:
|
||||
data = llsd_lib.format_xml(llsd)
|
||||
# Sometimes needed even on GETs.
|
||||
if "Content-Type" not in headers:
|
||||
headers["Content-Type"] = "application/llsd+xml"
|
||||
# Always present, usually ignored by the server.
|
||||
if "Accept" not in headers:
|
||||
headers["Accept"] = "application/llsd+xml"
|
||||
# Ask to keep the connection open if we're sharing a session
|
||||
if not session_owned:
|
||||
headers["Connection"] = "keep-alive"
|
||||
headers["Keep-alive"] = "300"
|
||||
# We go through the proxy by default, tack on a header letting mitmproxy know the
|
||||
# request came from us so we can tag the request as injected. The header will be popped
|
||||
# off before passing through to the server.
|
||||
ssl = kwargs.pop('ssl', None)
|
||||
def _request_fixups(self, cap_or_url: str, headers: Dict, proxy: Optional[bool], ssl: Any):
|
||||
# We want to proxy this through Hippolyzer
|
||||
if proxy is None:
|
||||
# Always set this so we know this request was from the proxy
|
||||
# We go through the proxy by default, tack on a header letting mitmproxy know the
|
||||
# request came from us so we can tag the request as injected. The header will be popped
|
||||
# off before passing through to the server.
|
||||
headers["X-Hippo-Injected"] = "1"
|
||||
# TODO: Have a setting for this
|
||||
proxy_port = int(os.environ.get("HIPPO_HTTP_PORT", 9062))
|
||||
proxy_port = self._settings.HTTP_PROXY_PORT
|
||||
proxy = f"http://127.0.0.1:{proxy_port}"
|
||||
# TODO: set up the SSLContext to validate mitmproxy's cert
|
||||
ssl = ssl or False
|
||||
@@ -160,28 +45,4 @@ class CapsClient:
|
||||
if sys.platform == "win32" and cap_or_url.startswith("https:"):
|
||||
headers["X-Hippo-Windows-SSL-Hack"] = "1"
|
||||
cap_or_url = re.sub(r"^https:", "http:", cap_or_url)
|
||||
|
||||
resp = session._request(method, cap_or_url, data=data, headers=headers, # noqa: need internal call
|
||||
params=params, ssl=ssl, proxy=proxy,
|
||||
skip_auto_headers=skip_auto_headers or ("User-Agent",), **kwargs)
|
||||
return _HippoSessionRequestContextManager(resp, session, session_owned=session_owned)
|
||||
|
||||
def get(self, cap_or_url: str, *, path: str = "", headers: Optional[dict] = None,
|
||||
session: Optional[aiohttp.ClientSession] = None, params: Optional[Dict[str, Any]] = None,
|
||||
proxy: Optional[str] = None, **kwargs) -> _HippoSessionRequestContextManager:
|
||||
return self.request("GET", cap_or_url=cap_or_url, path=path, headers=headers,
|
||||
session=session, params=params, proxy=proxy, **kwargs)
|
||||
|
||||
def post(self, cap_or_url: str, *, path: str = "", data: Any = None,
|
||||
headers: Optional[dict] = None, session: Optional[aiohttp.ClientSession] = None,
|
||||
llsd: Any = dataclasses.MISSING, params: Optional[Dict[str, Any]] = None,
|
||||
proxy: Optional[str] = None, **kwargs) -> _HippoSessionRequestContextManager:
|
||||
return self.request("POST", cap_or_url=cap_or_url, path=path, headers=headers, data=data,
|
||||
llsd=llsd, session=session, params=params, proxy=proxy, **kwargs)
|
||||
|
||||
def put(self, cap_or_url: str, *, path: str = "", data: Any = None,
|
||||
headers: Optional[dict] = None, session: Optional[aiohttp.ClientSession] = None,
|
||||
llsd: Any = dataclasses.MISSING, params: Optional[Dict[str, Any]] = None,
|
||||
proxy: Optional[str] = None, **kwargs) -> _HippoSessionRequestContextManager:
|
||||
return self.request("PUT", cap_or_url=cap_or_url, path=path, headers=headers, data=data,
|
||||
llsd=llsd, session=session, params=params, proxy=proxy, **kwargs)
|
||||
return cap_or_url, headers, proxy, ssl
|
||||
|
||||
@@ -1,67 +1,40 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import logging
|
||||
from collections import deque
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.message.circuit import Circuit
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.msgtypes import PacketFlags
|
||||
from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer
|
||||
from hippolyzer.lib.proxy.packets import Direction, ProxiedUDPPacket
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.network.transport import Direction
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import BaseMessageLogger
|
||||
LLUDP_LOGGING_HOOK = Optional[Callable[[Message], Any]]
|
||||
|
||||
|
||||
class ProxiedCircuit:
|
||||
def __init__(self, near_host, far_host, transport, region: Optional[ProxiedRegion] = None,
|
||||
socks_transport: Optional[bool] = None):
|
||||
self.near_host = near_host
|
||||
self.host = far_host
|
||||
self.is_alive = True
|
||||
self.socks_transport = socks_transport
|
||||
self.transport: Optional[asyncio.DatagramTransport] = transport
|
||||
class ProxiedCircuit(Circuit):
|
||||
def __init__(self, near_host, far_host, transport, logging_hook: LLUDP_LOGGING_HOOK = None):
|
||||
super().__init__(near_host, far_host, transport)
|
||||
self.in_injections = InjectionTracker(0)
|
||||
self.out_injections = InjectionTracker(0)
|
||||
self.serializer = UDPMessageSerializer()
|
||||
self.last_packet_at = dt.datetime.now()
|
||||
self.region: Optional[ProxiedRegion] = region
|
||||
message_logger = None
|
||||
if region:
|
||||
message_logger = region.session().session_manager.message_logger
|
||||
self.message_logger: Optional[BaseMessageLogger] = message_logger
|
||||
self.logging_hook: LLUDP_LOGGING_HOOK = logging_hook
|
||||
|
||||
def _send_prepared_message(self, message: ProxiedMessage, direction, transport=None):
|
||||
def _send_prepared_message(self, message: Message, transport=None):
|
||||
try:
|
||||
serialized = self.serializer.serialize(message)
|
||||
except:
|
||||
logging.exception(f"Failed to serialize: {message.to_dict()!r}")
|
||||
raise
|
||||
if self.message_logger and message.injected:
|
||||
self.message_logger.log_lludp_message(self.region.session(), self.region, message)
|
||||
return self.send_datagram(serialized, direction, transport=transport)
|
||||
|
||||
def send_datagram(self, data: bytes, direction: Direction, transport=None):
|
||||
self.last_packet_at = dt.datetime.now()
|
||||
src_addr, dst_addr = self.host, self.near_host
|
||||
if direction == Direction.OUT:
|
||||
src_addr, dst_addr = self.near_host, self.host
|
||||
|
||||
packet = ProxiedUDPPacket(src_addr, dst_addr, data, direction)
|
||||
packet_data = packet.serialize(socks_header=self.socks_transport)
|
||||
(transport or self.transport).sendto(packet_data, dst_addr)
|
||||
return packet
|
||||
if self.logging_hook and message.injected:
|
||||
self.logging_hook(message)
|
||||
return self.send_datagram(serialized, message.direction, transport=transport)
|
||||
|
||||
def _get_injections(self, direction: Direction):
|
||||
if direction == Direction.OUT:
|
||||
return self.out_injections, self.in_injections
|
||||
return self.in_injections, self.out_injections
|
||||
|
||||
def prepare_message(self, message: ProxiedMessage, direction=None):
|
||||
def prepare_message(self, message: Message, direction=None):
|
||||
if message.finalized:
|
||||
raise RuntimeError(f"Trying to re-send finalized {message!r}")
|
||||
direction = direction or getattr(message, 'direction')
|
||||
@@ -102,12 +75,7 @@ class ProxiedCircuit:
|
||||
message.send_flags &= ~PacketFlags.ACK
|
||||
return True
|
||||
|
||||
def send_message(self, message: ProxiedMessage, direction=None, transport=None):
|
||||
direction = direction or getattr(message, 'direction')
|
||||
if self.prepare_message(message, direction):
|
||||
return self._send_prepared_message(message, direction, transport)
|
||||
|
||||
def _rewrite_packet_ack(self, message: ProxiedMessage, reverse_injections):
|
||||
def _rewrite_packet_ack(self, message: Message, reverse_injections):
|
||||
new_blocks = []
|
||||
for block in message["Packets"]:
|
||||
packet_id = block["ID"]
|
||||
@@ -124,14 +92,14 @@ class ProxiedCircuit:
|
||||
message["Packets"] = new_blocks
|
||||
return True
|
||||
|
||||
def _rewrite_start_ping_check(self, message: ProxiedMessage, fwd_injections):
|
||||
def _rewrite_start_ping_check(self, message: Message, fwd_injections):
|
||||
orig_id = message["PingID"]["OldestUnacked"]
|
||||
new_id = fwd_injections.get_effective_id(orig_id)
|
||||
if orig_id != new_id:
|
||||
logging.debug("Rewrote oldest unacked %s -> %s" % (orig_id, new_id))
|
||||
message["PingID"]["OldestUnacked"] = new_id
|
||||
|
||||
def drop_message(self, message: ProxiedMessage, orig_direction=None):
|
||||
def drop_message(self, message: Message, orig_direction=None):
|
||||
if message.finalized:
|
||||
raise RuntimeError(f"Trying to drop finalized {message!r}")
|
||||
if message.packet_id is None:
|
||||
@@ -140,13 +108,12 @@ class ProxiedCircuit:
|
||||
fwd_injections, reverse_injections = self._get_injections(orig_direction)
|
||||
|
||||
fwd_injections.mark_dropped(message.packet_id)
|
||||
if hasattr(message, 'dropped'):
|
||||
message.dropped = True
|
||||
message.dropped = True
|
||||
message.finalized = True
|
||||
|
||||
# Was sent reliably, tell the other end that we saw it and to shut up.
|
||||
if message.reliable:
|
||||
self._send_acks([message.packet_id], ~orig_direction)
|
||||
self.send_acks([message.packet_id], ~orig_direction)
|
||||
|
||||
# This packet had acks for the other end, send them in a separate PacketAck
|
||||
effective_acks = tuple(
|
||||
@@ -154,20 +121,7 @@ class ProxiedCircuit:
|
||||
if not reverse_injections.was_injected(x)
|
||||
)
|
||||
if effective_acks:
|
||||
self._send_acks(effective_acks, orig_direction, packet_id=message.packet_id)
|
||||
|
||||
def _send_acks(self, to_ack, direction, packet_id=None):
|
||||
logging.debug("%r acking %r" % (direction, to_ack))
|
||||
# TODO: maybe tack this onto `.acks` for next message?
|
||||
packet = ProxiedMessage('PacketAck',
|
||||
*[Block('Packets', ID=x) for x in to_ack])
|
||||
packet.packet_id = packet_id
|
||||
packet.injected = True
|
||||
packet.direction = direction
|
||||
self.send_message(packet)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %r : %r>" % (self.__class__.__name__, self.near_host, self.host)
|
||||
self.send_acks(effective_acks, orig_direction, packet_id=message.packet_id)
|
||||
|
||||
|
||||
class InjectionTracker:
|
||||
|
||||
@@ -48,7 +48,11 @@ class HTTPAssetRepo(collections.UserDict):
|
||||
asset_id = None
|
||||
for name, val in flow.request.query.items():
|
||||
if name.endswith("_id"):
|
||||
asset_id = UUID(val)
|
||||
try:
|
||||
asset_id = UUID(val)
|
||||
break
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if not asset_id or asset_id not in self.data:
|
||||
return False
|
||||
|
||||
@@ -9,11 +9,12 @@ import urllib.parse
|
||||
import weakref
|
||||
import xmlrpc.client
|
||||
|
||||
import defusedxml.cElementTree
|
||||
import defusedxml.ElementTree
|
||||
import defusedxml.xmlrpc
|
||||
import mitmproxy.http
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.llsd_msg_serializer import LLSDMessageSerializer
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
@@ -24,7 +25,7 @@ from hippolyzer.lib.proxy.http_proxy import HTTPFlowContext
|
||||
|
||||
def apply_security_monkeypatches():
|
||||
defusedxml.xmlrpc.monkey_patch()
|
||||
llsd.fromstring = defusedxml.cElementTree.fromstring
|
||||
llsd.fromstring = defusedxml.ElementTree.fromstring
|
||||
|
||||
|
||||
apply_security_monkeypatches()
|
||||
@@ -51,36 +52,39 @@ class MITMProxyEventManager:
|
||||
self.llsd_message_serializer = LLSDMessageSerializer()
|
||||
self._asset_server_proxied = False
|
||||
|
||||
async def pump_proxy_events(self):
|
||||
async def run(self):
|
||||
while not self.shutdown_signal.is_set():
|
||||
try:
|
||||
try:
|
||||
event_type, flow_state = self.from_proxy_queue.get(False)
|
||||
except queue.Empty:
|
||||
await asyncio.sleep(0.001)
|
||||
continue
|
||||
|
||||
flow = HippoHTTPFlow.from_state(flow_state, self.session_manager)
|
||||
try:
|
||||
if event_type == "request":
|
||||
self._handle_request(flow)
|
||||
# A response was injected early in the cycle, we won't get a response
|
||||
# callback from mitmproxy so just log it now.
|
||||
message_logger = self.session_manager.message_logger
|
||||
if message_logger and flow.response_injected:
|
||||
message_logger.log_http_response(flow)
|
||||
elif event_type == "response":
|
||||
self._handle_response(flow)
|
||||
else:
|
||||
raise Exception(f"Unknown mitmproxy event type {event_type}")
|
||||
finally:
|
||||
# If someone has taken this request out of the regular callback flow,
|
||||
# they'll manually send a callback at some later time.
|
||||
if not flow.taken:
|
||||
self.to_proxy_queue.put(("callback", flow.id, flow.get_state()))
|
||||
await self.pump_proxy_event()
|
||||
except:
|
||||
logging.exception("Exploded when handling parsed packets")
|
||||
|
||||
async def pump_proxy_event(self):
|
||||
try:
|
||||
event_type, flow_state = self.from_proxy_queue.get(False)
|
||||
except queue.Empty:
|
||||
await asyncio.sleep(0.001)
|
||||
return
|
||||
|
||||
flow = HippoHTTPFlow.from_state(flow_state, self.session_manager)
|
||||
try:
|
||||
if event_type == "request":
|
||||
self._handle_request(flow)
|
||||
# A response was injected early in the cycle, we won't get a response
|
||||
# callback from mitmproxy so just log it now.
|
||||
message_logger = self.session_manager.message_logger
|
||||
if message_logger and flow.response_injected:
|
||||
message_logger.log_http_response(flow)
|
||||
elif event_type == "response":
|
||||
self._handle_response(flow)
|
||||
else:
|
||||
raise Exception(f"Unknown mitmproxy event type {event_type}")
|
||||
finally:
|
||||
# If someone has taken this request out of the regular callback flow,
|
||||
# they'll manually send a callback at some later time.
|
||||
if not flow.taken:
|
||||
self.to_proxy_queue.put(("callback", flow.id, flow.get_state()))
|
||||
|
||||
def _handle_request(self, flow: HippoHTTPFlow):
|
||||
url = flow.request.url
|
||||
cap_data = self.session_manager.resolve_cap(url)
|
||||
@@ -118,11 +122,14 @@ class MITMProxyEventManager:
|
||||
else:
|
||||
flow.response = mitmproxy.http.HTTPResponse.make(
|
||||
307,
|
||||
b"Redirecting...",
|
||||
# Can't provide explanation in the body because this results in failing Range requests under
|
||||
# mitmproxy that return garbage data. Chances are there's weird interactions
|
||||
# between HTTP/1.x pipelining and range requests under mitmproxy that no other
|
||||
# applications have hit. If that's a concern then Connection: close should be used.
|
||||
b"",
|
||||
{
|
||||
"Content-Type": "text/plain",
|
||||
"Connection": "keep-alive",
|
||||
"Location": redir_url,
|
||||
"Connection": "close",
|
||||
}
|
||||
)
|
||||
elif cap_data and cap_data.asset_server_cap:
|
||||
@@ -175,64 +182,89 @@ class MITMProxyEventManager:
|
||||
if flow.request_injected:
|
||||
return
|
||||
|
||||
if AddonManager.handle_http_response(flow):
|
||||
return
|
||||
|
||||
status = flow.response.status_code
|
||||
cap_data: Optional[CapData] = flow.metadata["cap_data"]
|
||||
|
||||
if cap_data:
|
||||
if status != 200:
|
||||
if status == 200 and cap_data and cap_data.cap_name == "FirestormBridge":
|
||||
# Fake FirestormBridge cap based on a bridge-like response coming from
|
||||
# a non-browser HTTP request. Figure out what session it belongs to
|
||||
# so it can be handled in the session and region HTTP MessageHandlers
|
||||
agent_id_str = flow.response.headers.get("X-SecondLife-Owner-Key", "")
|
||||
if not agent_id_str:
|
||||
return
|
||||
agent_id = UUID(agent_id_str)
|
||||
for session in self.session_manager.sessions:
|
||||
if session.pending:
|
||||
continue
|
||||
if session.agent_id == agent_id:
|
||||
# Enrich the flow with the session and region info
|
||||
cap_data = CapData(
|
||||
cap_name="FirestormBridge",
|
||||
region=weakref.ref(session.main_region),
|
||||
session=weakref.ref(session),
|
||||
)
|
||||
flow.cap_data = cap_data
|
||||
break
|
||||
|
||||
if cap_data.cap_name == "LoginRequest":
|
||||
self._handle_login_flow(flow)
|
||||
if AddonManager.handle_http_response(flow):
|
||||
return
|
||||
|
||||
if status != 200 or not cap_data:
|
||||
return
|
||||
|
||||
if cap_data.cap_name == "LoginRequest":
|
||||
self._handle_login_flow(flow)
|
||||
return
|
||||
|
||||
try:
|
||||
session = cap_data.session and cap_data.session()
|
||||
if not session:
|
||||
return
|
||||
try:
|
||||
region = cap_data.region and cap_data.region()
|
||||
session.http_message_handler.handle(flow)
|
||||
|
||||
region = cap_data.region and cap_data.region()
|
||||
if not region:
|
||||
return
|
||||
region.http_message_handler.handle(flow)
|
||||
|
||||
if cap_data.cap_name == "Seed":
|
||||
parsed = llsd.parse_xml(flow.response.content)
|
||||
logging.debug("Got seed cap for %r : %r" % (cap_data, parsed))
|
||||
region.update_caps(parsed)
|
||||
|
||||
# On LL's grid these URIs aren't unique across sessions or regions,
|
||||
# so we get request attribution by replacing them with a unique
|
||||
# alias URI.
|
||||
logging.debug("Replacing GetMesh caps with wrapped versions")
|
||||
wrappable_caps = {"GetMesh2", "GetMesh", "GetTexture", "ViewerAsset"}
|
||||
for cap_name in wrappable_caps:
|
||||
if cap_name in parsed:
|
||||
parsed[cap_name] = region.register_wrapper_cap(cap_name)
|
||||
flow.response.content = llsd.format_pretty_xml(parsed)
|
||||
elif cap_data.cap_name == "EventQueueGet":
|
||||
parsed_eq_resp = llsd.parse_xml(flow.response.content)
|
||||
if parsed_eq_resp:
|
||||
old_events = parsed_eq_resp["events"]
|
||||
new_events = []
|
||||
for event in old_events:
|
||||
if not self._handle_eq_event(cap_data.session(), region, event):
|
||||
new_events.append(event)
|
||||
# Add on any fake events that've been queued by addons
|
||||
eq_manager = cap_data.region().eq_manager
|
||||
new_events.extend(eq_manager.take_events())
|
||||
parsed_eq_resp["events"] = new_events
|
||||
if old_events and not new_events:
|
||||
# Need at least one event or the viewer will refuse to ack!
|
||||
new_events.append({"message": "NOP", "body": {}})
|
||||
flow.response.content = llsd.format_pretty_xml(parsed_eq_resp)
|
||||
elif cap_data.cap_name in self.UPLOAD_CREATING_CAPS:
|
||||
if not region:
|
||||
return
|
||||
|
||||
region.http_message_handler.handle(flow)
|
||||
|
||||
if cap_data.cap_name == "Seed":
|
||||
parsed = llsd.parse_xml(flow.response.content)
|
||||
logging.debug("Got seed cap for %r : %r" % (cap_data, parsed))
|
||||
region.update_caps(parsed)
|
||||
|
||||
# On LL's grid these URIs aren't unique across sessions or regions,
|
||||
# so we get request attribution by replacing them with a unique
|
||||
# alias URI.
|
||||
logging.debug("Replacing GetMesh caps with wrapped versions")
|
||||
wrappable_caps = {"GetMesh2", "GetMesh", "GetTexture", "ViewerAsset"}
|
||||
for cap_name in wrappable_caps:
|
||||
if cap_name in parsed:
|
||||
parsed[cap_name] = region.register_wrapper_cap(cap_name)
|
||||
flow.response.content = llsd.format_pretty_xml(parsed)
|
||||
elif cap_data.cap_name == "EventQueueGet":
|
||||
parsed_eq_resp = llsd.parse_xml(flow.response.content)
|
||||
if parsed_eq_resp:
|
||||
old_events = parsed_eq_resp["events"]
|
||||
new_events = []
|
||||
for event in old_events:
|
||||
if not self._handle_eq_event(cap_data.session(), region, event):
|
||||
new_events.append(event)
|
||||
# Add on any fake events that've been queued by addons
|
||||
eq_manager = cap_data.region().eq_manager
|
||||
new_events.extend(eq_manager.take_events())
|
||||
parsed_eq_resp["events"] = new_events
|
||||
if old_events and not new_events:
|
||||
# Need at least one event or the viewer will refuse to ack!
|
||||
new_events.append({"message": "NOP", "body": {}})
|
||||
flow.response.content = llsd.format_pretty_xml(parsed_eq_resp)
|
||||
elif cap_data.cap_name in self.UPLOAD_CREATING_CAPS:
|
||||
if not region:
|
||||
return
|
||||
parsed = llsd.parse_xml(flow.response.content)
|
||||
if "uploader" in parsed:
|
||||
region.register_temporary_cap(cap_data.cap_name + "Uploader", parsed["uploader"])
|
||||
except:
|
||||
logging.exception("OOPS, blew up in HTTP proxy!")
|
||||
parsed = llsd.parse_xml(flow.response.content)
|
||||
if "uploader" in parsed:
|
||||
region.register_temporary_cap(cap_data.cap_name + "Uploader", parsed["uploader"])
|
||||
except:
|
||||
logging.exception("OOPS, blew up in HTTP proxy!")
|
||||
|
||||
def _handle_login_flow(self, flow: HippoHTTPFlow):
|
||||
resp = xmlrpc.client.loads(flow.response.content)[0][0] # type: ignore
|
||||
|
||||
@@ -5,7 +5,6 @@ import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import pkg_resources
|
||||
import queue
|
||||
import typing
|
||||
import uuid
|
||||
@@ -20,6 +19,7 @@ from mitmproxy.addons import core, clientplayback
|
||||
from mitmproxy.http import HTTPFlow
|
||||
import OpenSSL
|
||||
|
||||
from hippolyzer.lib.base.helpers import get_resource_filename
|
||||
from hippolyzer.lib.base.multiprocessing_utils import ParentProcessWatcher
|
||||
|
||||
orig_sethostflags = OpenSSL.SSL._lib.X509_VERIFY_PARAM_set_hostflags # noqa
|
||||
@@ -82,6 +82,7 @@ class HTTPFlowContext:
|
||||
self.from_proxy_queue = multiprocessing.Queue()
|
||||
self.to_proxy_queue = multiprocessing.Queue()
|
||||
self.shutdown_signal = multiprocessing.Event()
|
||||
self.mitmproxy_ready = multiprocessing.Event()
|
||||
|
||||
|
||||
class IPCInterceptionAddon:
|
||||
@@ -230,7 +231,7 @@ def create_proxy_master(host, port, flow_context: HTTPFlowContext): # pragma: n
|
||||
os.path.join(opts.confdir, "config.yml"),
|
||||
)
|
||||
# Use SL's CA bundle so LL's CA certs won't cause verification errors
|
||||
ca_bundle = pkg_resources.resource_filename("hippolyzer.lib.base", "network/data/ca-bundle.crt")
|
||||
ca_bundle = get_resource_filename("lib/base/network/data/ca-bundle.crt")
|
||||
opts.update(
|
||||
ssl_verify_upstream_trusted_ca=ca_bundle,
|
||||
listen_host=host,
|
||||
@@ -249,9 +250,9 @@ def create_http_proxy(bind_host, port, flow_context: HTTPFlowContext): # pragma
|
||||
|
||||
def is_asset_server_cap_name(cap_name):
|
||||
return cap_name and (
|
||||
cap_name.startswith("GetMesh") or
|
||||
cap_name.startswith("GetTexture") or
|
||||
cap_name.startswith("ViewerAsset")
|
||||
cap_name.startswith("GetMesh")
|
||||
or cap_name.startswith("GetTexture")
|
||||
or cap_name.startswith("ViewerAsset")
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -5,10 +5,9 @@ from typing import Optional, Tuple
|
||||
from hippolyzer.lib.base.message.message_dot_xml import MessageDotXML
|
||||
from hippolyzer.lib.base.message.udpdeserializer import UDPMessageDeserializer
|
||||
from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer
|
||||
from hippolyzer.lib.base.settings import Settings
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.packets import ProxiedUDPPacket
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.base.network.transport import UDPPacket
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
from hippolyzer.lib.proxy.socks_proxy import SOCKS5Server, UDPProxyProtocol
|
||||
@@ -26,56 +25,39 @@ class SLSOCKS5Server(SOCKS5Server):
|
||||
return lambda: InterceptingLLUDPProxyProtocol(source_addr, self.session_manager)
|
||||
|
||||
|
||||
class BaseLLUDPProxyProtocol(UDPProxyProtocol):
|
||||
def __init__(self, source_addr: Tuple[str, int]):
|
||||
class InterceptingLLUDPProxyProtocol(UDPProxyProtocol):
|
||||
def __init__(self, source_addr: Tuple[str, int], session_manager: SessionManager):
|
||||
super().__init__(source_addr)
|
||||
self.settings = Settings()
|
||||
self.settings.ENABLE_DEFERRED_PACKET_PARSING = True
|
||||
self.settings.HANDLE_PACKETS = False
|
||||
self.session_manager: SessionManager = session_manager
|
||||
self.serializer = UDPMessageSerializer()
|
||||
self.deserializer = UDPMessageDeserializer(
|
||||
settings=self.settings,
|
||||
message_cls=ProxiedMessage,
|
||||
settings=self.session_manager.settings,
|
||||
)
|
||||
self.message_xml = MessageDotXML()
|
||||
self.session: Optional[Session] = None
|
||||
|
||||
def _ensure_message_allowed(self, msg: ProxiedMessage):
|
||||
def _ensure_message_allowed(self, msg: Message):
|
||||
if not self.message_xml.validate_udp_msg(msg.name):
|
||||
LOG.warning(
|
||||
f"Received {msg.name!r} over UDP, when it should come over the event queue. Discarding."
|
||||
)
|
||||
raise PermissionError(f"UDPBanned message {msg.name}")
|
||||
|
||||
|
||||
class InterceptingLLUDPProxyProtocol(BaseLLUDPProxyProtocol):
|
||||
def __init__(self, source_addr: Tuple[str, int], session_manager: SessionManager):
|
||||
super().__init__(source_addr)
|
||||
self.session_manager: SessionManager = session_manager
|
||||
self.session: Optional[Session] = None
|
||||
|
||||
def _handle_proxied_packet(self, packet: ProxiedUDPPacket):
|
||||
message: Optional[ProxiedMessage] = None
|
||||
def handle_proxied_packet(self, packet: UDPPacket):
|
||||
region: Optional[ProxiedRegion] = None
|
||||
# Try to do an initial region lookup so we have it for handle_proxied_packet()
|
||||
if self.session:
|
||||
region = self.session.region_by_circuit_addr(packet.far_addr)
|
||||
deserialize_exc = None
|
||||
try:
|
||||
message = self.deserializer.deserialize(packet.data)
|
||||
message.direction = packet.direction
|
||||
except Exception as e:
|
||||
# Hang onto this since handle_proxied_packet doesn't need a parseable
|
||||
# message. If that hook doesn't handle the packet then re-raise.
|
||||
deserialize_exc = e
|
||||
|
||||
# the proxied packet handler is allowed to mutate `packet.data` before
|
||||
# the message gets parsed.
|
||||
if AddonManager.handle_proxied_packet(self.session_manager, packet,
|
||||
self.session, region, message):
|
||||
# Swallow any error raised by above message deserialization, it was handled.
|
||||
self.session, region):
|
||||
return
|
||||
|
||||
if deserialize_exc is not None:
|
||||
# handle_proxied_packet() didn't deal with the error, so it's fatal.
|
||||
raise deserialize_exc
|
||||
message = self.deserializer.deserialize(packet.data)
|
||||
message.direction = packet.direction
|
||||
message.sender = packet.src_addr
|
||||
|
||||
assert message is not None
|
||||
# Check for UDP bans on inbound messages
|
||||
@@ -122,20 +104,39 @@ class InterceptingLLUDPProxyProtocol(BaseLLUDPProxyProtocol):
|
||||
region.handle = message["Data"]["RegionHandle"]
|
||||
LOG.info(f"Setting main region to {region!r}, had circuit addr {packet.far_addr!r}")
|
||||
AddonManager.handle_region_changed(self.session, region)
|
||||
if message.name == "RegionHandshake":
|
||||
region.cache_id = message["RegionInfo"]["CacheID"]
|
||||
self.session.objects.track_region_objects(region.handle)
|
||||
if self.session_manager.settings.USE_VIEWER_OBJECT_CACHE:
|
||||
try:
|
||||
region.objects.load_cache()
|
||||
except:
|
||||
LOG.exception("Failed to load region cache, skipping")
|
||||
|
||||
try:
|
||||
self.session.message_handler.handle(message)
|
||||
except:
|
||||
LOG.exception("Failed in session message handler")
|
||||
try:
|
||||
region.message_handler.handle(message)
|
||||
except:
|
||||
LOG.exception("Failed in region message handler")
|
||||
|
||||
message_logger = self.session_manager.message_logger
|
||||
if message_logger:
|
||||
message_logger.log_lludp_message(self.session, region, message)
|
||||
|
||||
handled = AddonManager.handle_lludp_message(
|
||||
self.session, region, message
|
||||
)
|
||||
|
||||
# This message is owned by an async handler, drop it so it doesn't get
|
||||
# sent with the normal flow.
|
||||
if message.queued and not message.dropped:
|
||||
region.circuit.drop_message(message)
|
||||
|
||||
# Shouldn't mutate the message past this point, so log it now.
|
||||
if message_logger:
|
||||
message_logger.log_lludp_message(self.session, region, message)
|
||||
|
||||
if handled:
|
||||
return
|
||||
|
||||
@@ -144,11 +145,6 @@ class InterceptingLLUDPProxyProtocol(BaseLLUDPProxyProtocol):
|
||||
elif message.name == "RegionHandshake":
|
||||
region.name = str(message["RegionInfo"][0]["SimName"])
|
||||
|
||||
# This message is owned by an async handler, drop it so it doesn't get
|
||||
# sent with the normal flow.
|
||||
if message.queued and not message.dropped:
|
||||
region.circuit.drop_message(message)
|
||||
|
||||
if not message.dropped:
|
||||
region.circuit.send_message(message)
|
||||
|
||||
|
||||
@@ -3,28 +3,30 @@ import ast
|
||||
import typing
|
||||
|
||||
from arpeggio import Optional, ZeroOrMore, EOF, \
|
||||
ParserPython, PTNodeVisitor, visit_parse_tree
|
||||
from arpeggio import RegExMatch as _
|
||||
ParserPython, PTNodeVisitor, visit_parse_tree, RegExMatch
|
||||
|
||||
|
||||
def literal():
|
||||
return [
|
||||
# Nightmare. str or bytes literal.
|
||||
# https://stackoverflow.com/questions/14366401/#comment79795017_14366904
|
||||
_(r'''b?(\"\"\"|\'\'\'|\"|\')((?<!\\)(\\\\)*\\\1|.)*?\1'''),
|
||||
_(r'\d+(\.\d+)?'),
|
||||
RegExMatch(r'''b?(\"\"\"|\'\'\'|\"|\')((?<!\\)(\\\\)*\\\1|.)*?\1'''),
|
||||
# base16
|
||||
RegExMatch(r'0x\d+'),
|
||||
# base10 int or float.
|
||||
RegExMatch(r'\d+(\.\d+)?'),
|
||||
"None",
|
||||
"True",
|
||||
"False",
|
||||
# vector3 (tuple)
|
||||
_(r'\(\s*\d+(\.\d+)?\s*,\s*\d+(\.\d+)?\s*,\s*\d+(\.\d+)?\s*\)'),
|
||||
RegExMatch(r'\(\s*\d+(\.\d+)?\s*,\s*\d+(\.\d+)?\s*,\s*\d+(\.\d+)?\s*\)'),
|
||||
# vector4 (tuple)
|
||||
_(r'\(\s*\d+(\.\d+)?\s*,\s*\d+(\.\d+)?\s*,\s*\d+(\.\d+)?\s*,\s*\d+(\.\d+)?\s*\)'),
|
||||
RegExMatch(r'\(\s*\d+(\.\d+)?\s*,\s*\d+(\.\d+)?\s*,\s*\d+(\.\d+)?\s*,\s*\d+(\.\d+)?\s*\)'),
|
||||
]
|
||||
|
||||
|
||||
def identifier():
|
||||
return _(r'[a-zA-Z*]([a-zA-Z0-9*]+)?')
|
||||
return RegExMatch(r'[a-zA-Z*]([a-zA-Z0-9_*]+)?')
|
||||
|
||||
|
||||
def field_specifier():
|
||||
@@ -43,12 +45,16 @@ def meta_field_specifier():
|
||||
return "Meta", ".", identifier
|
||||
|
||||
|
||||
def enum_field_specifier():
|
||||
return identifier, ".", identifier
|
||||
|
||||
|
||||
def compare_val():
|
||||
return [literal, meta_field_specifier]
|
||||
return [literal, meta_field_specifier, enum_field_specifier]
|
||||
|
||||
|
||||
def binary_expression():
|
||||
return field_specifier, ["==", "!=", "^=", "$=", "~=", ">", ">=", "<", "<="], compare_val
|
||||
return field_specifier, ["==", "!=", "^=", "$=", "~=", ">", ">=", "<", "<=", "&"], compare_val
|
||||
|
||||
|
||||
def term():
|
||||
@@ -63,9 +69,12 @@ def message_filter():
|
||||
return expression, EOF
|
||||
|
||||
|
||||
MATCH_RESULT = typing.Union[bool, typing.Tuple]
|
||||
|
||||
|
||||
class BaseFilterNode(abc.ABC):
|
||||
@abc.abstractmethod
|
||||
def match(self, msg) -> bool:
|
||||
def match(self, msg) -> MATCH_RESULT:
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
@@ -95,17 +104,17 @@ class BinaryFilterNode(BaseFilterNode, abc.ABC):
|
||||
|
||||
|
||||
class UnaryNotFilterNode(UnaryFilterNode):
|
||||
def match(self, msg) -> bool:
|
||||
def match(self, msg) -> MATCH_RESULT:
|
||||
return not self.node.match(msg)
|
||||
|
||||
|
||||
class OrFilterNode(BinaryFilterNode):
|
||||
def match(self, msg) -> bool:
|
||||
def match(self, msg) -> MATCH_RESULT:
|
||||
return self.left_node.match(msg) or self.right_node.match(msg)
|
||||
|
||||
|
||||
class AndFilterNode(BinaryFilterNode):
|
||||
def match(self, msg) -> bool:
|
||||
def match(self, msg) -> MATCH_RESULT:
|
||||
return self.left_node.match(msg) and self.right_node.match(msg)
|
||||
|
||||
|
||||
@@ -115,7 +124,7 @@ class MessageFilterNode(BaseFilterNode):
|
||||
self.operator = operator
|
||||
self.value = value
|
||||
|
||||
def match(self, msg) -> bool:
|
||||
def match(self, msg) -> MATCH_RESULT:
|
||||
return msg.matches(self)
|
||||
|
||||
@property
|
||||
@@ -127,6 +136,11 @@ class MetaFieldSpecifier(str):
|
||||
pass
|
||||
|
||||
|
||||
class EnumFieldSpecifier(typing.NamedTuple):
|
||||
enum_name: str
|
||||
field_name: str
|
||||
|
||||
|
||||
class LiteralValue:
|
||||
"""Only exists because we can't return `None` in a visitor, need to box it"""
|
||||
def __init__(self, value):
|
||||
@@ -134,23 +148,26 @@ class LiteralValue:
|
||||
|
||||
|
||||
class MessageFilterVisitor(PTNodeVisitor):
|
||||
def visit_identifier(self, node, children):
|
||||
def visit_identifier(self, node, _children):
|
||||
return str(node.value)
|
||||
|
||||
def visit_field_specifier(self, node, children):
|
||||
def visit_field_specifier(self, _node, children):
|
||||
return children
|
||||
|
||||
def visit_literal(self, node, children):
|
||||
def visit_literal(self, node, _children):
|
||||
return LiteralValue(ast.literal_eval(node.value))
|
||||
|
||||
def visit_meta_field_specifier(self, node, children):
|
||||
def visit_meta_field_specifier(self, _node, children):
|
||||
return MetaFieldSpecifier(children[0])
|
||||
|
||||
def visit_unary_field_specifier(self, node, children):
|
||||
def visit_enum_field_specifier(self, _node, children):
|
||||
return EnumFieldSpecifier(*children)
|
||||
|
||||
def visit_unary_field_specifier(self, _node, children):
|
||||
# Looks like a bare field specifier with no operator
|
||||
return MessageFilterNode(tuple(children), None, None)
|
||||
|
||||
def visit_unary_expression(self, node, children):
|
||||
def visit_unary_expression(self, _node, children):
|
||||
if len(children) == 1:
|
||||
if isinstance(children[0], BaseFilterNode):
|
||||
return children[0]
|
||||
@@ -162,10 +179,10 @@ class MessageFilterVisitor(PTNodeVisitor):
|
||||
else:
|
||||
raise ValueError(f"Unrecognized unary prefix {children[0]}")
|
||||
|
||||
def visit_binary_expression(self, node, children):
|
||||
def visit_binary_expression(self, _node, children):
|
||||
return MessageFilterNode(tuple(children[0]), children[1], children[2])
|
||||
|
||||
def visit_expression(self, node, children):
|
||||
def visit_expression(self, _node, children):
|
||||
if self.debug:
|
||||
print("Expression {}".format(children))
|
||||
if len(children) > 1:
|
||||
644
hippolyzer/lib/proxy/message_logger.py
Normal file
644
hippolyzer/lib/proxy/message_logger.py
Normal file
@@ -0,0 +1,644 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections
|
||||
import copy
|
||||
import fnmatch
|
||||
import io
|
||||
import logging
|
||||
import pickle
|
||||
import re
|
||||
import typing
|
||||
import weakref
|
||||
|
||||
from defusedxml import minidom
|
||||
|
||||
from hippolyzer.lib.base import serialization as se, llsd
|
||||
from hippolyzer.lib.base.datatypes import TaggedUnion, UUID, TupleCoord
|
||||
from hippolyzer.lib.base.helpers import bytes_escape
|
||||
from hippolyzer.lib.base.message.message_formatting import HumanMessageSerializer
|
||||
from hippolyzer.lib.proxy.message_filter import MetaFieldSpecifier, compile_filter, BaseFilterNode, MessageFilterNode, \
|
||||
EnumFieldSpecifier
|
||||
from hippolyzer.lib.proxy.region import CapType
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseMessageLogger:
|
||||
def log_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
pass
|
||||
|
||||
def log_http_response(self, flow: HippoHTTPFlow):
|
||||
pass
|
||||
|
||||
def log_eq_event(self, session: Session, region: ProxiedRegion, event: dict):
|
||||
pass
|
||||
|
||||
|
||||
class FilteringMessageLogger(BaseMessageLogger):
|
||||
def __init__(self):
|
||||
BaseMessageLogger.__init__(self)
|
||||
self._raw_entries = collections.deque(maxlen=2000)
|
||||
self._filtered_entries: typing.List[AbstractMessageLogEntry] = []
|
||||
self._paused = False
|
||||
self.filter: BaseFilterNode = compile_filter("")
|
||||
|
||||
def set_filter(self, filter_str: str):
|
||||
self.filter = compile_filter(filter_str)
|
||||
self._begin_reset()
|
||||
# Keep any entries that've aged out of the raw entries list that
|
||||
# match the new filter
|
||||
self._filtered_entries = [
|
||||
m for m in self._filtered_entries if
|
||||
m not in self._raw_entries and self.filter.match(m)
|
||||
]
|
||||
self._filtered_entries.extend((m for m in self._raw_entries if self.filter.match(m)))
|
||||
self._end_reset()
|
||||
|
||||
def set_paused(self, paused: bool):
|
||||
self._paused = paused
|
||||
|
||||
def log_lludp_message(self, session: Session, region: ProxiedRegion, message: Message):
|
||||
if self._paused:
|
||||
return
|
||||
self._add_log_entry(LLUDPMessageLogEntry(message, region, session))
|
||||
|
||||
def log_http_response(self, flow: HippoHTTPFlow):
|
||||
if self._paused:
|
||||
return
|
||||
# These are huge, let's not log them for now.
|
||||
if flow.cap_data and flow.cap_data.asset_server_cap:
|
||||
return
|
||||
self._add_log_entry(HTTPMessageLogEntry(flow))
|
||||
|
||||
def log_eq_event(self, session: Session, region: ProxiedRegion, event: dict):
|
||||
if self._paused:
|
||||
return
|
||||
self._add_log_entry(EQMessageLogEntry(event, region, session))
|
||||
|
||||
# Hooks that Qt models will want to implement
|
||||
def _begin_insert(self, insert_idx: int):
|
||||
pass
|
||||
|
||||
def _end_insert(self):
|
||||
pass
|
||||
|
||||
def _begin_reset(self):
|
||||
pass
|
||||
|
||||
def _end_reset(self):
|
||||
pass
|
||||
|
||||
def _add_log_entry(self, entry: AbstractMessageLogEntry):
|
||||
try:
|
||||
# Paused, throw it away.
|
||||
if self._paused:
|
||||
return
|
||||
self._raw_entries.append(entry)
|
||||
if self.filter.match(entry):
|
||||
next_idx = len(self._filtered_entries)
|
||||
self._begin_insert(next_idx)
|
||||
self._filtered_entries.append(entry)
|
||||
self._end_insert()
|
||||
|
||||
entry.cache_summary()
|
||||
# In the common case we don't need to keep around the serialization
|
||||
# caches anymore. If the filter changes, the caches will be repopulated
|
||||
# as necessary.
|
||||
entry.freeze()
|
||||
except Exception:
|
||||
LOG.exception("Failed to filter queued message")
|
||||
|
||||
def clear(self):
|
||||
self._begin_reset()
|
||||
self._filtered_entries.clear()
|
||||
self._raw_entries.clear()
|
||||
self._end_reset()
|
||||
|
||||
|
||||
class AbstractMessageLogEntry:
|
||||
region: typing.Optional[ProxiedRegion]
|
||||
session: typing.Optional[Session]
|
||||
name: str
|
||||
type: str
|
||||
|
||||
__slots__ = ["_region", "_session", "_region_name", "_agent_id", "_summary", "meta"]
|
||||
|
||||
def __init__(self, region, session):
|
||||
if region and not isinstance(region, weakref.ReferenceType):
|
||||
region = weakref.ref(region)
|
||||
if session and not isinstance(session, weakref.ReferenceType):
|
||||
session = weakref.ref(session)
|
||||
|
||||
self._region: typing.Optional[weakref.ReferenceType] = region
|
||||
self._session: typing.Optional[weakref.ReferenceType] = session
|
||||
self._region_name = None
|
||||
self._agent_id = None
|
||||
self._summary = None
|
||||
if self.region:
|
||||
self._region_name = self.region.name
|
||||
if self.session:
|
||||
self._agent_id = self.session.agent_id
|
||||
|
||||
agent_obj = None
|
||||
if self.region is not None:
|
||||
agent_obj = self.region.objects.lookup_fullid(self.agent_id)
|
||||
self.meta = {
|
||||
"RegionName": self.region_name,
|
||||
"AgentID": self.agent_id,
|
||||
"SessionID": self.session.id if self.session else None,
|
||||
"AgentLocal": agent_obj.LocalID if agent_obj is not None else None,
|
||||
"Method": self.method,
|
||||
"Type": self.type,
|
||||
"SelectedLocal": self._current_selected_local(),
|
||||
"SelectedFull": self._current_selected_full(),
|
||||
}
|
||||
|
||||
def freeze(self):
|
||||
pass
|
||||
|
||||
def cache_summary(self):
|
||||
self._summary = self.summary
|
||||
|
||||
def _current_selected_local(self):
|
||||
if self.session:
|
||||
return self.session.selected.object_local
|
||||
return None
|
||||
|
||||
def _current_selected_full(self):
|
||||
selected_local = self._current_selected_local()
|
||||
if selected_local is None or self.region is None:
|
||||
return None
|
||||
obj = self.region.objects.lookup_localid(selected_local)
|
||||
return obj and obj.FullID
|
||||
|
||||
def _get_meta(self, name: str):
|
||||
# Slight difference in semantics. Filters are meant to return the same
|
||||
# thing no matter when they're run, so SelectedLocal and friends resolve
|
||||
# to the selected items _at the time the message was logged_. To handle
|
||||
# the case where we want to match on the selected object at the time the
|
||||
# filter is evaluated, we resolve these here.
|
||||
if name == "CurrentSelectedLocal":
|
||||
return self._current_selected_local()
|
||||
elif name == "CurrentSelectedFull":
|
||||
return self._current_selected_full()
|
||||
return self.meta.get(name)
|
||||
|
||||
@property
|
||||
def region(self) -> typing.Optional[ProxiedRegion]:
|
||||
if self._region:
|
||||
return self._region()
|
||||
return None
|
||||
|
||||
@property
|
||||
def session(self) -> typing.Optional[Session]:
|
||||
if self._session:
|
||||
return self._session()
|
||||
return None
|
||||
|
||||
@property
|
||||
def region_name(self) -> str:
|
||||
region = self.region
|
||||
if region:
|
||||
self._region_name = region.name
|
||||
return self._region_name
|
||||
# Region may die after a message is logged, need to keep this around.
|
||||
if self._region_name:
|
||||
return self._region_name
|
||||
|
||||
return ""
|
||||
|
||||
@property
|
||||
def agent_id(self) -> typing.Optional[UUID]:
|
||||
if self._agent_id:
|
||||
return self._agent_id
|
||||
|
||||
session = self.session
|
||||
if session:
|
||||
self._agent_id = session.agent_id
|
||||
return self._agent_id
|
||||
return None
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
region_name = self.region_name
|
||||
if not region_name:
|
||||
return ""
|
||||
session_str = ""
|
||||
agent_id = self.agent_id
|
||||
if agent_id:
|
||||
session_str = f" ({agent_id})"
|
||||
return region_name + session_str
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
return None
|
||||
|
||||
def response(self, beautify=False):
|
||||
return None
|
||||
|
||||
def _packet_root_matches(self, pattern):
|
||||
if fnmatch.fnmatchcase(self.name, pattern):
|
||||
return True
|
||||
if fnmatch.fnmatchcase(self.type, pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _val_matches(self, operator, val, expected):
|
||||
if isinstance(expected, MetaFieldSpecifier):
|
||||
expected = self._get_meta(str(expected))
|
||||
if not isinstance(expected, (int, float, bytes, str, type(None), tuple)):
|
||||
if callable(expected):
|
||||
expected = expected()
|
||||
else:
|
||||
expected = str(expected)
|
||||
elif isinstance(expected, EnumFieldSpecifier):
|
||||
# Local import so we get a fresh copy of the templates module
|
||||
from hippolyzer.lib.proxy import templates
|
||||
enum_cls = getattr(templates, expected.enum_name)
|
||||
expected = enum_cls[expected.field_name]
|
||||
elif expected is not None:
|
||||
# Unbox the expected value
|
||||
expected = expected.value
|
||||
if not isinstance(val, (int, float, bytes, str, type(None), tuple, TupleCoord)):
|
||||
val = str(val)
|
||||
|
||||
if not operator:
|
||||
return bool(val)
|
||||
elif operator == "==":
|
||||
return val == expected
|
||||
elif operator == "!=":
|
||||
return val != expected
|
||||
elif operator == "^=":
|
||||
if val is None:
|
||||
return False
|
||||
return val.startswith(expected)
|
||||
elif operator == "$=":
|
||||
if val is None:
|
||||
return False
|
||||
return val.endswith(expected)
|
||||
elif operator == "~=":
|
||||
if val is None:
|
||||
return False
|
||||
return expected in val
|
||||
elif operator == "<":
|
||||
return val < expected
|
||||
elif operator == "<=":
|
||||
return val <= expected
|
||||
elif operator == ">":
|
||||
return val > expected
|
||||
elif operator == ">=":
|
||||
return val >= expected
|
||||
elif operator == "&":
|
||||
return val & expected
|
||||
else:
|
||||
raise ValueError(f"Unexpected operator {operator!r}")
|
||||
|
||||
def _base_matches(self, matcher: "MessageFilterNode") -> typing.Optional[bool]:
|
||||
if len(matcher.selector) == 1:
|
||||
# Comparison operators would make no sense here
|
||||
if matcher.value or matcher.operator:
|
||||
return False
|
||||
return self._packet_root_matches(matcher.selector[0])
|
||||
if len(matcher.selector) == 2 and matcher.selector[0] == "Meta":
|
||||
return self._val_matches(matcher.operator, self._get_meta(matcher.selector[1]), matcher.value)
|
||||
return None
|
||||
|
||||
def matches(self, matcher: "MessageFilterNode"):
|
||||
return self._base_matches(matcher) or False
|
||||
|
||||
@property
|
||||
def seq(self):
|
||||
return ""
|
||||
|
||||
@property
|
||||
def method(self):
|
||||
return ""
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def _format_llsd(parsed):
|
||||
xmlified = llsd.format_pretty_xml(parsed)
|
||||
# dedent <key> by 1 for easier visual scanning
|
||||
xmlified = re.sub(rb" <key>", b"<key>", xmlified)
|
||||
return xmlified.decode("utf8", errors="replace")
|
||||
|
||||
|
||||
class HTTPMessageLogEntry(AbstractMessageLogEntry):
|
||||
__slots__ = ["flow"]
|
||||
|
||||
def __init__(self, flow: HippoHTTPFlow):
|
||||
self.flow: HippoHTTPFlow = flow
|
||||
cap_data = self.flow.cap_data
|
||||
region = cap_data and cap_data.region
|
||||
session = cap_data and cap_data.session
|
||||
|
||||
super().__init__(region, session)
|
||||
# This was a request the proxy made through itself
|
||||
self.meta["Injected"] = flow.request_injected
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return "HTTP"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
cap_data = self.flow.cap_data
|
||||
name = cap_data and cap_data.cap_name
|
||||
if name:
|
||||
return name
|
||||
return self.flow.request.url
|
||||
|
||||
@property
|
||||
def method(self):
|
||||
return self.flow.request.method
|
||||
|
||||
def _format_http_message(self, want_request, beautify):
|
||||
message = self.flow.request if want_request else self.flow.response
|
||||
method = self.flow.request.method
|
||||
buf = io.StringIO()
|
||||
cap_data = self.flow.cap_data
|
||||
cap_name = cap_data and cap_data.cap_name
|
||||
base_url = cap_name and cap_data.base_url
|
||||
temporary_cap = cap_data and cap_data.type == CapType.TEMPORARY
|
||||
beautify_url = (beautify and base_url and cap_name
|
||||
and not temporary_cap and self.session and want_request)
|
||||
if want_request:
|
||||
buf.write(message.method)
|
||||
buf.write(" ")
|
||||
if beautify_url:
|
||||
buf.write(f"[[{cap_name}]]{message.url[len(base_url):]}")
|
||||
else:
|
||||
buf.write(message.url)
|
||||
buf.write(" ")
|
||||
buf.write(message.http_version)
|
||||
else:
|
||||
buf.write(message.http_version)
|
||||
buf.write(" ")
|
||||
buf.write(str(message.status_code))
|
||||
buf.write(" ")
|
||||
buf.write(message.reason)
|
||||
buf.write("\r\n")
|
||||
if beautify_url:
|
||||
buf.write("# ")
|
||||
buf.write(message.url)
|
||||
buf.write("\r\n")
|
||||
|
||||
headers = copy.deepcopy(message.headers)
|
||||
for key in tuple(headers.keys()):
|
||||
if key.lower().startswith("x-hippo-"):
|
||||
LOG.warning(f"Internal header {key!r} leaked out?")
|
||||
# If this header actually came from somewhere untrusted, we can't
|
||||
# include it. It may change the meaning of the message when replayed.
|
||||
headers[f"X-Untrusted-{key}"] = headers[key]
|
||||
headers.pop(key)
|
||||
beautified = None
|
||||
if beautify and message.content:
|
||||
try:
|
||||
serializer = se.HTTP_SERIALIZERS.get(cap_name)
|
||||
if serializer:
|
||||
if want_request:
|
||||
beautified = serializer.deserialize_req_body(method, message.content)
|
||||
else:
|
||||
beautified = serializer.deserialize_resp_body(method, message.content)
|
||||
|
||||
if beautified is se.UNSERIALIZABLE:
|
||||
beautified = None
|
||||
else:
|
||||
beautified = self._format_llsd(beautified)
|
||||
headers["X-Hippo-Beautify"] = "1"
|
||||
|
||||
if not beautified:
|
||||
content_type = self._guess_content_type(message)
|
||||
if content_type.startswith("application/llsd"):
|
||||
beautified = self._format_llsd(llsd.parse(message.content))
|
||||
elif any(content_type.startswith(x) for x in ("application/xml", "text/xml")):
|
||||
beautified = minidom.parseString(message.content).toprettyxml(indent=" ")
|
||||
# kill blank lines. will break cdata sections. meh.
|
||||
beautified = re.sub(r'\n\s*\n', '\n', beautified, flags=re.MULTILINE)
|
||||
beautified = re.sub(r'<([\w]+)>\s*</\1>', r'<\1></\1>',
|
||||
beautified, flags=re.MULTILINE)
|
||||
except:
|
||||
LOG.exception("Failed to beautify message")
|
||||
|
||||
message_body = beautified or message.content
|
||||
if isinstance(message_body, bytes):
|
||||
try:
|
||||
decoded = message.text
|
||||
# Valid in many codecs, but unprintable.
|
||||
if "\x00" in decoded:
|
||||
raise ValueError("Embedded null")
|
||||
message_body = decoded
|
||||
except (UnicodeError, ValueError):
|
||||
# non-printable characters, return the escaped version.
|
||||
headers["X-Hippo-Escaped-Body"] = "1"
|
||||
message_body = bytes_escape(message_body).decode("utf8")
|
||||
|
||||
buf.write(bytes(headers).decode("utf8", errors="replace"))
|
||||
buf.write("\r\n")
|
||||
|
||||
buf.write(message_body)
|
||||
return buf.getvalue()
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
return self._format_http_message(want_request=True, beautify=beautify)
|
||||
|
||||
def response(self, beautify=False):
|
||||
return self._format_http_message(want_request=False, beautify=beautify)
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
if self._summary is not None:
|
||||
return self._summary
|
||||
msg = self.flow.response
|
||||
self._summary = f"{msg.status_code}: "
|
||||
if not msg.content:
|
||||
return self._summary
|
||||
if len(msg.content) > 1000000:
|
||||
self._summary += "[too large...]"
|
||||
return self._summary
|
||||
content_type = self._guess_content_type(msg)
|
||||
if content_type.startswith("application/llsd"):
|
||||
notation = llsd.format_notation(llsd.parse(msg.content))
|
||||
self._summary += notation.decode("utf8")[:500]
|
||||
return self._summary
|
||||
|
||||
def _guess_content_type(self, message):
|
||||
content_type = message.headers.get("Content-Type", "")
|
||||
if not message.content or content_type.startswith("application/llsd"):
|
||||
return content_type
|
||||
# Sometimes gets sent with `text/plain` or `text/html`. Cool.
|
||||
if message.content.startswith(rb'<?xml version="1.0" ?><llsd>'):
|
||||
return "application/llsd+xml"
|
||||
if message.content.startswith(rb'<llsd>'):
|
||||
return "application/llsd+xml"
|
||||
if message.content.startswith(rb'<?xml '):
|
||||
return "application/xml"
|
||||
return content_type
|
||||
|
||||
|
||||
class EQMessageLogEntry(AbstractMessageLogEntry):
|
||||
__slots__ = ["event"]
|
||||
|
||||
def __init__(self, event, region, session):
|
||||
super().__init__(region, session)
|
||||
self.event = event
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return "EQ"
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
return f'EQ {self.event["message"]}\n\n{self._format_llsd(self.event["body"])}'
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.event["message"]
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
if self._summary is not None:
|
||||
return self._summary
|
||||
self._summary = ""
|
||||
self._summary = llsd.format_notation(self.event["body"]).decode("utf8")[:500]
|
||||
return self._summary
|
||||
|
||||
|
||||
class LLUDPMessageLogEntry(AbstractMessageLogEntry):
|
||||
__slots__ = ["_message", "_name", "_direction", "_frozen_message", "_seq", "_deserializer"]
|
||||
|
||||
def __init__(self, message: Message, region, session):
|
||||
self._message: Message = message
|
||||
self._deserializer = None
|
||||
self._name = message.name
|
||||
self._direction = message.direction
|
||||
self._frozen_message: typing.Optional[bytes] = None
|
||||
self._seq = message.packet_id
|
||||
super().__init__(region, session)
|
||||
|
||||
_MESSAGE_META_ATTRS = {
|
||||
"Injected", "Dropped", "Extra", "Resent", "Zerocoded", "Acks", "Reliable",
|
||||
}
|
||||
|
||||
def _get_meta(self, name: str):
|
||||
# These may change between when the message is logged and when we
|
||||
# actually filter on it, since logging happens before addons.
|
||||
msg = self.message
|
||||
if name in self._MESSAGE_META_ATTRS:
|
||||
return getattr(msg, name.lower(), None)
|
||||
msg_meta = getattr(msg, "meta", None)
|
||||
if msg_meta is not None:
|
||||
if name in msg_meta:
|
||||
return msg_meta[name]
|
||||
return super()._get_meta(name)
|
||||
|
||||
@property
|
||||
def message(self) -> Message:
|
||||
if self._message:
|
||||
return self._message
|
||||
elif self._frozen_message:
|
||||
message = pickle.loads(self._frozen_message)
|
||||
message.deserializer = self._deserializer
|
||||
return message
|
||||
else:
|
||||
raise ValueError("Didn't have a fresh or frozen message somehow")
|
||||
|
||||
def freeze(self):
|
||||
message = self.message
|
||||
message.invalidate_caches()
|
||||
# These are expensive to keep around. pickle them and un-pickle on
|
||||
# an as-needed basis.
|
||||
self._deserializer = self.message.deserializer
|
||||
message.deserializer = None
|
||||
try:
|
||||
self._frozen_message = pickle.dumps(self._message, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
finally:
|
||||
message.deserializer = self._deserializer
|
||||
self._message = None
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return "LLUDP"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if self._message:
|
||||
self._name = self._message.name
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def method(self):
|
||||
if self._message:
|
||||
self._direction = self._message.direction
|
||||
return self._direction.name if self._direction is not None else ""
|
||||
|
||||
def request(self, beautify=False, replacements=None):
|
||||
return HumanMessageSerializer.to_human_string(self.message, replacements, beautify)
|
||||
|
||||
def matches(self, matcher):
|
||||
base_matched = self._base_matches(matcher)
|
||||
if base_matched is not None:
|
||||
return base_matched
|
||||
|
||||
if not self._packet_root_matches(matcher.selector[0]):
|
||||
return False
|
||||
|
||||
message = self.message
|
||||
|
||||
selector_len = len(matcher.selector)
|
||||
# name, block_name, var_name(, subfield_name)?
|
||||
if selector_len not in (3, 4):
|
||||
return False
|
||||
for block_name in message.blocks:
|
||||
if not fnmatch.fnmatchcase(block_name, matcher.selector[1]):
|
||||
continue
|
||||
for block_num, block in enumerate(message[block_name]):
|
||||
for var_name in block.vars.keys():
|
||||
if not fnmatch.fnmatchcase(var_name, matcher.selector[2]):
|
||||
continue
|
||||
# So we know where the match happened
|
||||
span_key = (message.name, block_name, block_num, var_name)
|
||||
if selector_len == 3:
|
||||
# We're just matching on the var existing, not having any particular value
|
||||
if matcher.value is None:
|
||||
return span_key
|
||||
if self._val_matches(matcher.operator, block[var_name], matcher.value):
|
||||
return span_key
|
||||
# Need to invoke a special unpacker
|
||||
elif selector_len == 4:
|
||||
try:
|
||||
deserialized = block.deserialize_var(var_name)
|
||||
except KeyError:
|
||||
continue
|
||||
# Discard the tag if this is a tagged union, we only want the value
|
||||
if isinstance(deserialized, TaggedUnion):
|
||||
deserialized = deserialized.value
|
||||
if not isinstance(deserialized, dict):
|
||||
return False
|
||||
for key in deserialized.keys():
|
||||
if fnmatch.fnmatchcase(str(key), matcher.selector[3]):
|
||||
if matcher.value is None:
|
||||
return span_key
|
||||
if self._val_matches(matcher.operator, deserialized[key], matcher.value):
|
||||
return span_key
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
if self._summary is None:
|
||||
self._summary = self.message.to_summary()[:500]
|
||||
return self._summary
|
||||
|
||||
@property
|
||||
def seq(self):
|
||||
if self._message:
|
||||
self._seq = self._message.packet_id
|
||||
return self._seq
|
||||
51
hippolyzer/lib/proxy/namecache.py
Normal file
51
hippolyzer/lib/proxy/namecache.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.client.namecache import NameCache
|
||||
from hippolyzer.lib.proxy.viewer_settings import iter_viewer_cache_dirs
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
|
||||
|
||||
class ProxyNameCache(NameCache):
|
||||
def create_subscriptions(
|
||||
self,
|
||||
message_handler: MessageHandler[Message, str],
|
||||
http_message_handler: Optional[MessageHandler[HippoHTTPFlow, str]] = None,
|
||||
):
|
||||
super().create_subscriptions(message_handler)
|
||||
if http_message_handler is not None:
|
||||
http_message_handler.subscribe("GetDisplayNames", self._handle_get_display_names)
|
||||
|
||||
def load_viewer_caches(self):
|
||||
for cache_dir in iter_viewer_cache_dirs():
|
||||
try:
|
||||
namecache_file = cache_dir / "avatar_name_cache.xml"
|
||||
if namecache_file.exists():
|
||||
with open(namecache_file, "rb") as f:
|
||||
namecache_bytes = f.read()
|
||||
agents = llsd.parse_xml(namecache_bytes)["agents"]
|
||||
for agent_id, agent_data in agents.items():
|
||||
# Don't set display name if they just have the default
|
||||
display_name = None
|
||||
if not agent_data["is_display_name_default"]:
|
||||
display_name = agent_data["display_name"]
|
||||
self.update(UUID(agent_id), {
|
||||
"FirstName": agent_data["legacy_first_name"],
|
||||
"LastName": agent_data["legacy_last_name"],
|
||||
"DisplayName": display_name,
|
||||
})
|
||||
except:
|
||||
logging.exception(f"Failed to load namecache from {cache_dir}")
|
||||
|
||||
def _handle_get_display_names(self, flow: HippoHTTPFlow):
|
||||
if flow.response.status_code != 200:
|
||||
return
|
||||
self._process_display_names_response(llsd.parse_xml(flow.response.content))
|
||||
168
hippolyzer/lib/proxy/object_manager.py
Normal file
168
hippolyzer/lib/proxy/object_manager.py
Normal file
@@ -0,0 +1,168 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.templates import PCode
|
||||
from hippolyzer.lib.client.namecache import NameCache
|
||||
from hippolyzer.lib.client.object_manager import (
|
||||
ClientObjectManager,
|
||||
UpdateType, ClientWorldObjectManager,
|
||||
)
|
||||
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.settings import ProxySettings
|
||||
from hippolyzer.lib.proxy.vocache import RegionViewerObjectCacheChain
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProxyObjectManager(ClientObjectManager):
|
||||
"""
|
||||
Object manager for a specific region
|
||||
"""
|
||||
_region: ProxiedRegion
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
region: ProxiedRegion,
|
||||
may_use_vo_cache: bool = False
|
||||
):
|
||||
super().__init__(region)
|
||||
self.may_use_vo_cache = may_use_vo_cache
|
||||
self.cache_loaded = False
|
||||
self.object_cache = RegionViewerObjectCacheChain([])
|
||||
self._cache_miss_timer: Optional[asyncio.TimerHandle] = None
|
||||
self.queued_cache_misses: Set[int] = set()
|
||||
region.message_handler.subscribe(
|
||||
"RequestMultipleObjects",
|
||||
self._handle_request_multiple_objects,
|
||||
)
|
||||
|
||||
def load_cache(self):
|
||||
if not self.may_use_vo_cache or self.cache_loaded:
|
||||
return
|
||||
handle = self._region.handle
|
||||
if not handle:
|
||||
LOG.warning(f"Tried to load cache for {self._region} without a handle")
|
||||
return
|
||||
self.cache_loaded = True
|
||||
self.object_cache = RegionViewerObjectCacheChain.for_region(handle, self._region.cache_id)
|
||||
|
||||
def request_missed_cached_objects_soon(self):
|
||||
if self._cache_miss_timer:
|
||||
self._cache_miss_timer.cancel()
|
||||
# Basically debounce. Will only trigger 0.2 seconds after the last time it's invoked to
|
||||
# deal with the initial flood of ObjectUpdateCached and the natural lag time between that
|
||||
# and the viewers' RequestMultipleObjects messages
|
||||
self._cache_miss_timer = asyncio.get_event_loop().call_later(
|
||||
0.2, self._request_missed_cached_objects)
|
||||
|
||||
def _request_missed_cached_objects(self):
|
||||
self._cache_miss_timer = None
|
||||
self.request_objects(self.queued_cache_misses)
|
||||
self.queued_cache_misses.clear()
|
||||
|
||||
def clear(self):
|
||||
super().clear()
|
||||
self.object_cache = RegionViewerObjectCacheChain([])
|
||||
self.cache_loaded = False
|
||||
self.queued_cache_misses.clear()
|
||||
if self._cache_miss_timer:
|
||||
self._cache_miss_timer.cancel()
|
||||
self._cache_miss_timer = None
|
||||
|
||||
def _is_localid_selected(self, localid: int):
|
||||
return localid in self._region.session().selected.object_locals
|
||||
|
||||
def _handle_request_multiple_objects(self, msg: Message):
|
||||
# Remove any queued cache misses that the viewer just requested for itself
|
||||
self.queued_cache_misses -= {b["ID"] for b in msg["ObjectData"]}
|
||||
|
||||
|
||||
class ProxyWorldObjectManager(ClientWorldObjectManager):
|
||||
_session: Session
|
||||
_settings: ProxySettings
|
||||
|
||||
def __init__(self, session: Session, settings: ProxySettings, name_cache: Optional[NameCache]):
|
||||
super().__init__(session, settings, name_cache)
|
||||
session.http_message_handler.subscribe(
|
||||
"GetObjectCost",
|
||||
self._handle_get_object_cost
|
||||
)
|
||||
session.http_message_handler.subscribe(
|
||||
"FirestormBridge",
|
||||
self._handle_firestorm_bridge_request,
|
||||
)
|
||||
|
||||
def _handle_object_update_cached_misses(self, region_handle: int, missing_locals: Set[int]):
|
||||
if self._settings.AUTOMATICALLY_REQUEST_MISSING_OBJECTS:
|
||||
# Schedule these local IDs to be requested soon if the viewer doesn't request
|
||||
# them itself. Ideally we could just mutate the CRC of the ObjectUpdateCached
|
||||
# to force a CRC cache miss in the viewer, but that appears to cause the viewer
|
||||
# to drop the resulting ObjectUpdateCompressed when the CRC doesn't match?
|
||||
# It was causing all objects to go missing even though the ObjectUpdateCompressed
|
||||
# was received.
|
||||
region_mgr: Optional[ProxyObjectManager] = self._get_region_manager(region_handle)
|
||||
region_mgr.queued_cache_misses |= missing_locals
|
||||
region_mgr.request_missed_cached_objects_soon()
|
||||
|
||||
def _run_object_update_hooks(self, obj: Object, updated_props: Set[str], update_type: UpdateType):
|
||||
super()._run_object_update_hooks(obj, updated_props, update_type)
|
||||
region = self._session.region_by_handle(obj.RegionHandle)
|
||||
if obj.PCode == PCode.AVATAR and "ParentID" in updated_props:
|
||||
if obj.ParentID and not region.objects.lookup_localid(obj.ParentID):
|
||||
# If an avatar just sat on an object we don't know about, add it to the queued
|
||||
# cache misses and request if if the viewer doesn't. This should happen
|
||||
# regardless of the auto-request object setting because otherwise we have no way
|
||||
# to get a sitting agent's true region location, even if it's ourself.
|
||||
region.objects.queued_cache_misses.add(obj.ParentID)
|
||||
region.objects.request_missed_cached_objects_soon()
|
||||
AddonManager.handle_object_updated(self._session, region, obj, updated_props)
|
||||
|
||||
def _run_kill_object_hooks(self, obj: Object):
|
||||
super()._run_kill_object_hooks(obj)
|
||||
region = self._session.region_by_handle(obj.RegionHandle)
|
||||
AddonManager.handle_object_killed(self._session, region, obj)
|
||||
|
||||
def _lookup_cache_entry(self, region_handle: int, local_id: int, crc: int) -> Optional[bytes]:
|
||||
region_mgr: Optional[ProxyObjectManager] = self._get_region_manager(region_handle)
|
||||
return region_mgr.object_cache.lookup_object_data(local_id, crc)
|
||||
|
||||
def _handle_get_object_cost(self, flow: HippoHTTPFlow):
|
||||
parsed = llsd.parse_xml(flow.response.content)
|
||||
self._process_get_object_cost_response(parsed)
|
||||
|
||||
def _handle_firestorm_bridge_request(self, flow: HippoHTTPFlow):
|
||||
"""
|
||||
Pull guessed avatar Z offsets from Firestorm Bridge requests
|
||||
|
||||
CoarseLocationUpdate packets can only represent heights up to 1024, so
|
||||
viewers typically use an LSL bridge to get avatar heights beyond that range
|
||||
and combine it with their X and Y coords from CoarseLocationUpdate packets.
|
||||
"""
|
||||
if not flow.request.content.startswith(b'<llsd><string>getZOffsets|'):
|
||||
return
|
||||
parsed: str = llsd.parse_xml(flow.response.content)
|
||||
if not parsed:
|
||||
return
|
||||
|
||||
# av_1_id, 1025.001, av_2_id, 3000.0, ...
|
||||
split = parsed.split(", ")
|
||||
for av_id, z_offset in zip(split[0::2], split[1::2]):
|
||||
av_id = UUID(av_id)
|
||||
z_offset = float(z_offset)
|
||||
av = self.lookup_avatar(av_id)
|
||||
if not av:
|
||||
continue
|
||||
av.GuessedZ = z_offset
|
||||
@@ -1,421 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections
|
||||
import copy
|
||||
import logging
|
||||
import typing
|
||||
import weakref
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
from hippolyzer.lib.base.datatypes import UUID, TaggedUnion
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.namevalue import NameValueCollection
|
||||
from hippolyzer.lib.base.objects import Object
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.templates import PCode, ObjectStateSerializer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OrphanManager:
|
||||
def __init__(self):
|
||||
self._orphans: typing.Dict[int, typing.List[int]] = collections.defaultdict(list)
|
||||
|
||||
def clear(self):
|
||||
return self._orphans.clear()
|
||||
|
||||
def untrack_orphan(self, obj: Object, parent_id: int):
|
||||
if parent_id not in self._orphans:
|
||||
return False
|
||||
orphan_list = self._orphans[parent_id]
|
||||
removed = False
|
||||
if obj.LocalID in orphan_list:
|
||||
orphan_list.remove(obj.LocalID)
|
||||
removed = True
|
||||
# List is empty now, get rid of it.
|
||||
if not orphan_list:
|
||||
del self._orphans[parent_id]
|
||||
return removed
|
||||
|
||||
def collect_orphans(self, parent: Object) -> typing.Sequence[int]:
|
||||
return self._orphans.pop(parent.LocalID, [])
|
||||
|
||||
def track_orphan(self, obj: Object):
|
||||
self.track_orphan_by_id(obj.LocalID, obj.ParentID)
|
||||
|
||||
def track_orphan_by_id(self, local_id, parent_id):
|
||||
if len(self._orphans) > 100:
|
||||
LOG.warning(f"Orphaned object dict is getting large: {len(self._orphans)}")
|
||||
self._orphans[parent_id].append(local_id)
|
||||
|
||||
|
||||
OBJECT_OR_LOCAL = typing.Union[Object, int]
|
||||
|
||||
|
||||
class ObjectManager:
|
||||
"""Object manager for a specific region"""
|
||||
|
||||
def __init__(self, region: ProxiedRegion):
|
||||
self._localid_lookup: typing.Dict[int, Object] = {}
|
||||
self._fullid_lookup: typing.Dict[UUID, int] = {}
|
||||
# Objects that we've seen references to but don't have data for
|
||||
self.missing_locals = set()
|
||||
self._region: ProxiedRegion = proxify(region)
|
||||
self._orphan_manager = OrphanManager()
|
||||
|
||||
message_handler = region.message_handler
|
||||
message_handler.subscribe("ObjectUpdate", self._handle_object_update)
|
||||
message_handler.subscribe("ImprovedTerseObjectUpdate",
|
||||
self._handle_terse_object_update)
|
||||
message_handler.subscribe("ObjectUpdateCompressed",
|
||||
self._handle_object_update_compressed)
|
||||
message_handler.subscribe("ObjectUpdateCached",
|
||||
self._handle_object_update_cached)
|
||||
message_handler.subscribe("ObjectProperties",
|
||||
self._handle_object_properties_generic)
|
||||
message_handler.subscribe("ObjectPropertiesFamily",
|
||||
self._handle_object_properties_generic)
|
||||
region.http_message_handler.subscribe("GetObjectCost",
|
||||
self._handle_get_object_cost)
|
||||
message_handler.subscribe("KillObject",
|
||||
self._handle_kill_object)
|
||||
|
||||
@property
|
||||
def all_objects(self) -> typing.Iterable[Object]:
|
||||
return self._localid_lookup.values()
|
||||
|
||||
@property
|
||||
def all_avatars(self) -> typing.Iterable[Object]:
|
||||
# This is only avatars within draw distance. Might be useful to have another
|
||||
# accessor for UUID + pos that's based on CoarseLocationUpdate.
|
||||
return (o for o in self.all_objects if o.PCode == PCode.AVATAR)
|
||||
|
||||
def lookup_localid(self, localid) -> typing.Optional[Object]:
|
||||
return self._localid_lookup.get(localid, None)
|
||||
|
||||
def lookup_fullid(self, fullid: UUID) -> typing.Optional[Object]:
|
||||
local_id = self._fullid_lookup.get(fullid, None)
|
||||
if local_id is None:
|
||||
return None
|
||||
return self.lookup_localid(local_id)
|
||||
|
||||
def _track_object(self, obj: Object):
|
||||
self._localid_lookup[obj.LocalID] = obj
|
||||
self._fullid_lookup[obj.FullID] = obj.LocalID
|
||||
# If it was missing, it's not missing anymore.
|
||||
self.missing_locals -= {obj.LocalID}
|
||||
|
||||
self._parent_object(obj)
|
||||
|
||||
# Adopt any of our orphaned child objects.
|
||||
for orphan_local in self._orphan_manager.collect_orphans(obj):
|
||||
child_obj = self.lookup_localid(orphan_local)
|
||||
# Shouldn't be any dead children in the orphanage
|
||||
assert child_obj is not None
|
||||
self._parent_object(child_obj)
|
||||
|
||||
self._notify_object_updated(obj, set(obj.to_dict().keys()))
|
||||
|
||||
def _parent_object(self, obj: Object, insert_at_head=False):
|
||||
if obj.ParentID:
|
||||
parent = self.lookup_localid(obj.ParentID)
|
||||
if parent is not None:
|
||||
assert obj.LocalID not in parent.ChildIDs
|
||||
# Link order is never explicitly passed to clients, so we have to do
|
||||
# some nasty guesswork based on order of received initial ObjectUpdates
|
||||
# Note that this is broken in the viewer as well, and there doesn't seem
|
||||
# to be a foolproof way to get this.
|
||||
idx = 0 if insert_at_head else len(parent.ChildIDs)
|
||||
parent.ChildIDs.insert(idx, obj.LocalID)
|
||||
parent.Children.insert(idx, obj)
|
||||
obj.Parent = weakref.proxy(parent)
|
||||
else:
|
||||
self.missing_locals.add(obj.ParentID)
|
||||
self._orphan_manager.track_orphan(obj)
|
||||
obj.Parent = None
|
||||
LOG.debug(f"{obj.LocalID} updated with parent {obj.ParentID}, but parent wasn't found!")
|
||||
|
||||
def _unparent_object(self, obj: Object, old_parent_id: int):
|
||||
obj.Parent = None
|
||||
if old_parent_id:
|
||||
# Had a parent, remove this from the child list.
|
||||
removed = self._orphan_manager.untrack_orphan(obj, old_parent_id)
|
||||
|
||||
old_parent = self.lookup_localid(old_parent_id)
|
||||
if old_parent:
|
||||
if obj.LocalID in old_parent.ChildIDs:
|
||||
idx = old_parent.ChildIDs.index(obj.LocalID)
|
||||
del old_parent.ChildIDs[idx]
|
||||
del old_parent.Children[idx]
|
||||
else:
|
||||
# Something is very broken if this happens
|
||||
LOG.warning(f"Changing parent of {obj.LocalID}, but old parent didn't correctly adopt, "
|
||||
f"was {'' if removed else 'not '}in orphan list")
|
||||
else:
|
||||
LOG.debug(f"Changing parent of {obj.LocalID}, but couldn't find old parent")
|
||||
|
||||
def _update_existing_object(self, obj: Object, new_properties):
|
||||
new_parent_id = new_properties.get("ParentID", obj.ParentID)
|
||||
old_parent_id = obj.ParentID
|
||||
|
||||
actually_updated_props = obj.update_properties(new_properties)
|
||||
|
||||
if new_parent_id != old_parent_id:
|
||||
self._unparent_object(obj, old_parent_id)
|
||||
self._parent_object(obj, insert_at_head=True)
|
||||
|
||||
# Common case where this may be falsy is if we get an ObjectUpdateCached
|
||||
# that didn't have a changed UpdateFlags field.
|
||||
if actually_updated_props:
|
||||
self._notify_object_updated(obj, actually_updated_props)
|
||||
|
||||
def _normalize_object_update(self, block: Block):
|
||||
object_data = {
|
||||
"FootCollisionPlane": None,
|
||||
"SoundFlags": block["Flags"],
|
||||
"SoundGain": block["Gain"],
|
||||
"SoundRadius": block["Radius"],
|
||||
**dict(block.items()),
|
||||
"TextureEntry": block.deserialize_var("TextureEntry", make_copy=False),
|
||||
"NameValue": block.deserialize_var("NameValue", make_copy=False),
|
||||
"TextureAnim": block.deserialize_var("TextureAnim", make_copy=False),
|
||||
"ExtraParams": block.deserialize_var("ExtraParams", make_copy=False) or {},
|
||||
"PSBlock": block.deserialize_var("PSBlock", make_copy=False).value,
|
||||
"UpdateFlags": block.deserialize_var("UpdateFlags", make_copy=False),
|
||||
"State": block.deserialize_var("State", make_copy=False),
|
||||
**block.deserialize_var("ObjectData", make_copy=False).value,
|
||||
}
|
||||
# Empty == not updated
|
||||
if not object_data["TextureEntry"]:
|
||||
object_data.pop("TextureEntry")
|
||||
# OwnerID is only set in this packet if a sound is playing. Don't allow
|
||||
# ObjectUpdates to clobber _real_ OwnerIDs we had from ObjectProperties
|
||||
# with a null UUID.
|
||||
if object_data["OwnerID"] == UUID():
|
||||
del object_data["OwnerID"]
|
||||
del object_data["Flags"]
|
||||
del object_data["Gain"]
|
||||
del object_data["Radius"]
|
||||
del object_data["ObjectData"]
|
||||
return object_data
|
||||
|
||||
def _handle_object_update(self, packet: ProxiedMessage):
|
||||
seen_locals = []
|
||||
for block in packet['ObjectData']:
|
||||
object_data = self._normalize_object_update(block)
|
||||
|
||||
seen_locals.append(object_data["ID"])
|
||||
obj = self.lookup_fullid(object_data["FullID"])
|
||||
if obj:
|
||||
self._update_existing_object(obj, object_data)
|
||||
else:
|
||||
obj = Object(**object_data)
|
||||
self._track_object(obj)
|
||||
packet.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _normalize_terse_object_update(self, block: Block):
|
||||
object_data = {
|
||||
**block.deserialize_var("Data", make_copy=False),
|
||||
**dict(block.items()),
|
||||
"TextureEntry": block.deserialize_var("TextureEntry", make_copy=False),
|
||||
}
|
||||
object_data.pop("Data")
|
||||
# Empty == not updated
|
||||
if object_data["TextureEntry"] is None:
|
||||
object_data.pop("TextureEntry")
|
||||
return object_data
|
||||
|
||||
def _handle_terse_object_update(self, packet: ProxiedMessage):
|
||||
seen_locals = []
|
||||
for block in packet['ObjectData']:
|
||||
object_data = self._normalize_terse_object_update(block)
|
||||
obj = self.lookup_localid(object_data["ID"])
|
||||
# Can only update existing object with this message
|
||||
if obj:
|
||||
# Need the Object as context because decoding state requires PCode.
|
||||
state_deserializer = ObjectStateSerializer.deserialize
|
||||
object_data["State"] = state_deserializer(ctx_obj=obj, val=object_data["State"])
|
||||
|
||||
seen_locals.append(object_data["ID"])
|
||||
if obj:
|
||||
self._update_existing_object(obj, object_data)
|
||||
else:
|
||||
self.missing_locals.add(object_data["ID"])
|
||||
LOG.debug(f"Received terse update for unknown object {object_data['ID']}")
|
||||
|
||||
packet.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_object_update_cached(self, packet: ProxiedMessage):
|
||||
seen_locals = []
|
||||
for block in packet['ObjectData']:
|
||||
seen_locals.append(block["ID"])
|
||||
obj = self.lookup_localid(block["ID"])
|
||||
if obj is not None:
|
||||
self._update_existing_object(obj, {
|
||||
"UpdateFlags": block.deserialize_var("UpdateFlags", make_copy=False),
|
||||
})
|
||||
else:
|
||||
self.missing_locals.add(block["ID"])
|
||||
packet.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _normalize_object_update_compressed(self, block: Block):
|
||||
# TODO: ObjectUpdateCompressed doesn't provide a default value for unused
|
||||
# fields, whereas ObjectUpdate and friends do (TextColor, etc.)
|
||||
# need some way to normalize ObjectUpdates so they won't appear to have
|
||||
# changed just because an ObjectUpdate got sent with a default value
|
||||
# Only do a shallow copy
|
||||
compressed = copy.copy(block.deserialize_var("Data", make_copy=False))
|
||||
# Only used for determining which sections are present
|
||||
del compressed["Flags"]
|
||||
|
||||
ps_block = compressed.pop("PSBlockNew", None)
|
||||
if ps_block is None:
|
||||
ps_block = compressed.pop("PSBlock", None)
|
||||
if ps_block is None:
|
||||
ps_block = TaggedUnion(0, None)
|
||||
compressed.pop("PSBlock", None)
|
||||
if compressed["NameValue"] is None:
|
||||
compressed["NameValue"] = NameValueCollection()
|
||||
|
||||
object_data = {
|
||||
"PSBlock": ps_block.value,
|
||||
# Parent flag not set means explicitly un-parented
|
||||
"ParentID": compressed.pop("ParentID", None) or 0,
|
||||
**compressed,
|
||||
**dict(block.items()),
|
||||
"UpdateFlags": block.deserialize_var("UpdateFlags", make_copy=False),
|
||||
}
|
||||
if object_data["TextureEntry"] is None:
|
||||
object_data.pop("TextureEntry")
|
||||
# Don't clobber OwnerID in case the object has a proper one.
|
||||
if object_data["OwnerID"] == UUID():
|
||||
del object_data["OwnerID"]
|
||||
object_data.pop("Data")
|
||||
return object_data
|
||||
|
||||
def _handle_object_update_compressed(self, packet: ProxiedMessage):
|
||||
seen_locals = []
|
||||
for block in packet['ObjectData']:
|
||||
object_data = self._normalize_object_update_compressed(block)
|
||||
obj = self.lookup_localid(object_data["ID"])
|
||||
seen_locals.append(object_data["ID"])
|
||||
if obj:
|
||||
self._update_existing_object(obj, object_data)
|
||||
else:
|
||||
obj = Object(**object_data)
|
||||
self._track_object(obj)
|
||||
packet.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_object_properties_generic(self, packet: ProxiedMessage):
|
||||
seen_locals = []
|
||||
for block in packet["ObjectData"]:
|
||||
object_properties = dict(block.items())
|
||||
if packet.name == "ObjectProperties":
|
||||
object_properties["TextureID"] = block.deserialize_var("TextureID")
|
||||
|
||||
obj = self.lookup_fullid(block["ObjectID"])
|
||||
if obj:
|
||||
seen_locals.append(obj.LocalID)
|
||||
self._update_existing_object(obj, object_properties)
|
||||
else:
|
||||
LOG.debug(f"Received {packet.name} for unknown {block['ObjectID']}")
|
||||
packet.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_kill_object(self, packet: ProxiedMessage):
|
||||
seen_locals = []
|
||||
for block in packet["ObjectData"]:
|
||||
obj = self.lookup_localid(block["ID"])
|
||||
seen_locals.append(block["ID"])
|
||||
self.missing_locals -= {block["ID"]}
|
||||
if obj:
|
||||
AddonManager.handle_object_killed(self._region.session(), self._region, obj)
|
||||
|
||||
former_child_ids = obj.ChildIDs[:]
|
||||
for child_id in former_child_ids:
|
||||
child_obj = self.lookup_localid(child_id)
|
||||
assert child_obj is not None
|
||||
self._unparent_object(child_obj, child_obj.ParentID)
|
||||
|
||||
del self._localid_lookup[obj.LocalID]
|
||||
del self._fullid_lookup[obj.FullID]
|
||||
|
||||
# Place any remaining unkilled children in the orphanage
|
||||
for child_id in former_child_ids:
|
||||
self._orphan_manager.track_orphan_by_id(child_id, obj.LocalID)
|
||||
|
||||
assert not obj.ChildIDs
|
||||
|
||||
# Make sure the parent knows we went away
|
||||
self._unparent_object(obj, obj.ParentID)
|
||||
else:
|
||||
logging.debug(f"Received {packet.name} for unknown {block['ID']}")
|
||||
packet.meta["ObjectUpdateIDs"] = tuple(seen_locals)
|
||||
|
||||
def _handle_get_object_cost(self, flow: HippoHTTPFlow):
|
||||
parsed = llsd.parse_xml(flow.response.content)
|
||||
if "error" in parsed:
|
||||
return
|
||||
for object_id, object_costs in parsed.items():
|
||||
obj = self.lookup_fullid(UUID(object_id))
|
||||
if not obj:
|
||||
LOG.debug(f"Received ObjectCost for unknown {object_id}")
|
||||
continue
|
||||
obj.ObjectCosts.update(object_costs)
|
||||
self._notify_object_updated(obj, {"ObjectCosts"})
|
||||
|
||||
def _notify_object_updated(self, obj: Object, updated_props: Set[str]):
|
||||
AddonManager.handle_object_updated(self._region.session(), self._region, obj, updated_props)
|
||||
|
||||
def clear(self):
|
||||
self._localid_lookup.clear()
|
||||
self._fullid_lookup.clear()
|
||||
self._orphan_manager.clear()
|
||||
self.missing_locals.clear()
|
||||
|
||||
def request_object_properties(self, objects: typing.Union[OBJECT_OR_LOCAL, typing.Sequence[OBJECT_OR_LOCAL]]):
|
||||
if isinstance(objects, (Object, int)):
|
||||
objects = (objects,)
|
||||
if not objects:
|
||||
return
|
||||
|
||||
session = self._region.session()
|
||||
|
||||
local_ids = tuple((o.LocalID if isinstance(o, Object) else o) for o in objects)
|
||||
|
||||
# Don't mess with already selected objects
|
||||
local_ids = tuple(local for local in local_ids if local not in session.selected.object_locals)
|
||||
|
||||
while local_ids:
|
||||
blocks = [
|
||||
Block("AgentData", AgentID=session.agent_id, SessionID=session.id),
|
||||
*[Block("ObjectData", ObjectLocalID=x) for x in local_ids[:100]],
|
||||
]
|
||||
# Selecting causes ObjectProperties to be sent
|
||||
self._region.circuit.send_message(ProxiedMessage("ObjectSelect", blocks))
|
||||
self._region.circuit.send_message(ProxiedMessage("ObjectDeselect", blocks))
|
||||
local_ids = local_ids[100:]
|
||||
|
||||
def request_missing_objects(self):
|
||||
self.request_objects(self.missing_locals)
|
||||
|
||||
def request_objects(self, local_ids):
|
||||
if isinstance(local_ids, int):
|
||||
local_ids = (local_ids,)
|
||||
if isinstance(local_ids, set):
|
||||
local_ids = tuple(local_ids)
|
||||
|
||||
session = self._region.session()
|
||||
while local_ids:
|
||||
self._region.circuit.send_message(ProxiedMessage(
|
||||
"RequestMultipleObjects",
|
||||
Block("AgentData", AgentID=session.agent_id, SessionID=session.id),
|
||||
*[Block("ObjectData", CacheMissType=0, ID=x) for x in local_ids[:100]],
|
||||
))
|
||||
local_ids = local_ids[100:]
|
||||
@@ -1,53 +0,0 @@
|
||||
import enum
|
||||
import socket
|
||||
import struct
|
||||
import typing
|
||||
|
||||
|
||||
class Direction(enum.Enum):
|
||||
OUT = enum.auto()
|
||||
IN = enum.auto()
|
||||
|
||||
def __invert__(self):
|
||||
if self == self.OUT:
|
||||
return self.IN
|
||||
return self.OUT
|
||||
|
||||
|
||||
ADDR_TUPLE = typing.Tuple[str, int]
|
||||
|
||||
|
||||
class ProxiedUDPPacket:
|
||||
HEADER_STRUCT = struct.Struct("!HBB4sH")
|
||||
|
||||
def __init__(self, src_addr: ADDR_TUPLE, dst_addr: ADDR_TUPLE, data: bytes, direction: Direction):
|
||||
self.src_addr = src_addr
|
||||
self.dst_addr = dst_addr
|
||||
self.data = data
|
||||
self.direction = direction
|
||||
|
||||
@property
|
||||
def outgoing(self):
|
||||
return self.direction == Direction.OUT
|
||||
|
||||
@property
|
||||
def incoming(self):
|
||||
return self.direction == Direction.IN
|
||||
|
||||
@property
|
||||
def far_addr(self):
|
||||
if self.outgoing:
|
||||
return self.dst_addr
|
||||
return self.src_addr
|
||||
|
||||
def _make_socks_header(self):
|
||||
return self.HEADER_STRUCT.pack(
|
||||
0, 0, 1, socket.inet_aton(self.far_addr[0]), self.far_addr[1])
|
||||
|
||||
def serialize(self, socks_header=None):
|
||||
# Decide whether we need a header based on packet direction
|
||||
if socks_header is None:
|
||||
socks_header = self.incoming
|
||||
if not socks_header:
|
||||
return self.data
|
||||
return self._make_socks_header() + self.data
|
||||
@@ -10,18 +10,21 @@ import urllib.parse
|
||||
|
||||
import multidict
|
||||
|
||||
from hippolyzer.lib.base.datatypes import Vector3
|
||||
from hippolyzer.lib.base.datatypes import Vector3, UUID
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.proxy.caps_client import CapsClient
|
||||
from hippolyzer.lib.base.objects import handle_to_global_pos
|
||||
from hippolyzer.lib.client.state import BaseClientRegion
|
||||
from hippolyzer.lib.proxy.caps_client import ProxyCapsClient
|
||||
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
|
||||
from hippolyzer.lib.proxy.objects import ObjectManager
|
||||
from hippolyzer.lib.proxy.transfer_manager import TransferManager
|
||||
from hippolyzer.lib.proxy.xfer_manager import XferManager
|
||||
from hippolyzer.lib.proxy.object_manager import ProxyObjectManager
|
||||
from hippolyzer.lib.base.transfer_manager import TransferManager
|
||||
from hippolyzer.lib.base.xfer_manager import XferManager
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.sessions import Session
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
|
||||
|
||||
class CapType(enum.Enum):
|
||||
@@ -32,6 +35,10 @@ class CapType(enum.Enum):
|
||||
|
||||
|
||||
class CapsMultiDict(multidict.MultiDict[Tuple[CapType, str]]):
|
||||
# TODO: Make a view object for this that's just name -> URL
|
||||
# deriving from MultiMapping[_T] so we don't have to do
|
||||
# so many copies for consumers that aren't expecting the
|
||||
# CapType tag.
|
||||
def add(self, key, value) -> None:
|
||||
# Prepend rather than append when adding caps.
|
||||
# Necessary so the most recent for a region URI is returned
|
||||
@@ -41,25 +48,30 @@ class CapsMultiDict(multidict.MultiDict[Tuple[CapType, str]]):
|
||||
super().add(key, val)
|
||||
|
||||
|
||||
class ProxiedRegion:
|
||||
def __init__(self, circuit_addr, seed_cap: str, session, handle=None):
|
||||
class ProxiedRegion(BaseClientRegion):
|
||||
def __init__(self, circuit_addr, seed_cap: str, session: Session, handle=None):
|
||||
# A client may make a Seed request twice, and may get back two (valid!) sets of
|
||||
# Cap URIs. We need to be able to look up both, so MultiDict is necessary.
|
||||
self.handle: Optional[int] = handle
|
||||
self._name: Optional[str] = None
|
||||
# TODO: when does this change?
|
||||
self.cache_id: Optional[UUID] = None
|
||||
self.circuit: Optional[ProxiedCircuit] = None
|
||||
self.circuit_addr = circuit_addr
|
||||
self._caps = CapsMultiDict()
|
||||
self._caps_url_lookup: Dict[str, Tuple[CapType, str]] = {}
|
||||
if seed_cap:
|
||||
self._caps["Seed"] = (CapType.NORMAL, seed_cap)
|
||||
self.session: Optional[Callable[[], Session]] = weakref.ref(session)
|
||||
self.message_handler: MessageHandler[ProxiedMessage] = MessageHandler()
|
||||
self.http_message_handler: MessageHandler[HippoHTTPFlow] = MessageHandler()
|
||||
self.session: Callable[[], Session] = weakref.ref(session)
|
||||
self.message_handler: MessageHandler[Message, str] = MessageHandler()
|
||||
self.http_message_handler: MessageHandler[HippoHTTPFlow, str] = MessageHandler()
|
||||
self.eq_manager = EventQueueManager(self)
|
||||
self.xfer_manager = XferManager(self)
|
||||
self.transfer_manager = TransferManager(self)
|
||||
self.caps_client = CapsClient(self)
|
||||
self.objects = ObjectManager(self)
|
||||
settings = session.session_manager.settings
|
||||
self.caps_client = ProxyCapsClient(settings, proxify(self))
|
||||
self.objects: ProxyObjectManager = ProxyObjectManager(self, may_use_vo_cache=True)
|
||||
self.xfer_manager = XferManager(proxify(self), self.session().secure_session_id)
|
||||
self.transfer_manager = TransferManager(proxify(self), session.agent_id, session.id)
|
||||
self._recalc_caps()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -76,10 +88,10 @@ class ProxiedRegion:
|
||||
return multidict.MultiDict((x, y[1]) for x, y in self._caps.items())
|
||||
|
||||
@property
|
||||
def global_pos(self):
|
||||
def global_pos(self) -> Vector3:
|
||||
if self.handle is None:
|
||||
raise ValueError("Can't determine global region position without handle")
|
||||
return Vector3(self.handle >> 32, self.handle & 0xFFffFFff)
|
||||
return handle_to_global_pos(self.handle)
|
||||
|
||||
@property
|
||||
def is_alive(self):
|
||||
@@ -91,6 +103,13 @@ class ProxiedRegion:
|
||||
for cap_name, cap_url in caps.items():
|
||||
if isinstance(cap_url, str) and cap_url.startswith('http'):
|
||||
self._caps.add(cap_name, (CapType.NORMAL, cap_url))
|
||||
self._recalc_caps()
|
||||
|
||||
def _recalc_caps(self):
|
||||
self._caps_url_lookup.clear()
|
||||
for name, cap_info in self._caps.items():
|
||||
cap_type, cap_url = cap_info
|
||||
self._caps_url_lookup[cap_url] = (cap_type, name)
|
||||
|
||||
def register_wrapper_cap(self, name: str):
|
||||
"""
|
||||
@@ -102,9 +121,13 @@ class ProxiedRegion:
|
||||
parsed = list(urllib.parse.urlsplit(self._caps[name][1]))
|
||||
seed_id = self._caps["Seed"][1].split("/")[-1].encode("utf8")
|
||||
# Give it a unique domain tied to the current Seed URI
|
||||
parsed[1] = f"{name}-{hashlib.sha256(seed_id).hexdigest()[:16]}.hippo-proxy.localhost"
|
||||
parsed[1] = f"{name.lower()}-{hashlib.sha256(seed_id).hexdigest()[:16]}.hippo-proxy.localhost"
|
||||
# Force the URL to HTTP, we're going to handle the request ourselves so it doesn't need
|
||||
# to be secure. This should save on expensive TLS context setup for each req.
|
||||
parsed[0] = "http"
|
||||
wrapper_url = urllib.parse.urlunsplit(parsed)
|
||||
self._caps.add(name + "ProxyWrapper", (CapType.WRAPPER, wrapper_url))
|
||||
self._recalc_caps()
|
||||
return wrapper_url
|
||||
|
||||
def register_proxy_cap(self, name: str):
|
||||
@@ -113,21 +136,24 @@ class ProxiedRegion:
|
||||
"""
|
||||
cap_url = f"https://caps.hippo-proxy.localhost/cap/{uuid.uuid4()!s}"
|
||||
self._caps.add(name, (CapType.PROXY_ONLY, cap_url))
|
||||
self._recalc_caps()
|
||||
return cap_url
|
||||
|
||||
def register_temporary_cap(self, name: str, cap_url: str):
|
||||
"""Register a Cap that only has meaning the first time it's used"""
|
||||
self._caps.add(name, (CapType.TEMPORARY, cap_url))
|
||||
self._recalc_caps()
|
||||
|
||||
def resolve_cap(self, url: str, consume=True) -> Optional[Tuple[str, str, CapType]]:
|
||||
for name, cap_info in self._caps.items():
|
||||
cap_type, cap_url = cap_info
|
||||
for cap_url in self._caps_url_lookup.keys():
|
||||
if url.startswith(cap_url):
|
||||
cap_type, name = self._caps_url_lookup[cap_url]
|
||||
if cap_type == CapType.TEMPORARY and consume:
|
||||
# Resolving a temporary cap pops it out of the dict
|
||||
temporary_caps = self._caps.popall(name)
|
||||
temporary_caps.remove(cap_info)
|
||||
temporary_caps.remove((cap_type, cap_url))
|
||||
self._caps.extend((name, x) for x in temporary_caps)
|
||||
self._recalc_caps()
|
||||
return name, cap_url, cap_type
|
||||
return None
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import datetime
|
||||
import functools
|
||||
import logging
|
||||
import multiprocessing
|
||||
import weakref
|
||||
@@ -9,19 +10,26 @@ from typing import *
|
||||
from weakref import ref
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.message_handler import MessageHandler
|
||||
from hippolyzer.lib.client.state import BaseClientSession
|
||||
from hippolyzer.lib.proxy.addons import AddonManager
|
||||
from hippolyzer.lib.proxy.circuit import ProxiedCircuit
|
||||
from hippolyzer.lib.proxy.http_asset_repo import HTTPAssetRepo
|
||||
from hippolyzer.lib.proxy.http_proxy import HTTPFlowContext, is_asset_server_cap_name, SerializedCapData
|
||||
from hippolyzer.lib.proxy.namecache import ProxyNameCache
|
||||
from hippolyzer.lib.proxy.object_manager import ProxyWorldObjectManager
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion, CapType
|
||||
from hippolyzer.lib.proxy.settings import ProxySettings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.message_logger import BaseMessageLogger
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
|
||||
|
||||
class Session:
|
||||
class Session(BaseClientSession):
|
||||
def __init__(self, session_id, secure_session_id, agent_id, circuit_code,
|
||||
login_data=None, session_manager=None):
|
||||
session_manager: Optional[SessionManager], login_data=None):
|
||||
self.login_data = login_data or {}
|
||||
self.pending = True
|
||||
self.id: UUID = session_id
|
||||
@@ -35,6 +43,9 @@ class Session:
|
||||
self.selected: SelectionModel = SelectionModel()
|
||||
self.regions: List[ProxiedRegion] = []
|
||||
self.started_at = datetime.datetime.now()
|
||||
self.message_handler: MessageHandler[Message, str] = MessageHandler()
|
||||
self.http_message_handler: MessageHandler[HippoHTTPFlow, str] = MessageHandler()
|
||||
self.objects = ProxyWorldObjectManager(self, session_manager.settings, session_manager.name_cache)
|
||||
self._main_region = None
|
||||
|
||||
@property
|
||||
@@ -50,8 +61,8 @@ class Session:
|
||||
secure_session_id=UUID(login_data["secure_session_id"]),
|
||||
agent_id=UUID(login_data["agent_id"]),
|
||||
circuit_code=int(login_data["circuit_code"]),
|
||||
login_data=login_data,
|
||||
session_manager=session_manager,
|
||||
login_data=login_data,
|
||||
)
|
||||
appearance_service = login_data.get("agent_appearance_service")
|
||||
map_image_service = login_data.get("map-server-url")
|
||||
@@ -62,6 +73,7 @@ class Session:
|
||||
# Login data also has details about the initial sim
|
||||
sess.register_region(
|
||||
circuit_addr=(login_data["sim_ip"], login_data["sim_port"]),
|
||||
handle=(login_data["region_x"] << 32) | login_data["region_y"],
|
||||
seed_url=login_data["seed_capability"],
|
||||
)
|
||||
return sess
|
||||
@@ -106,12 +118,26 @@ class Session:
|
||||
return region
|
||||
return None
|
||||
|
||||
def region_by_handle(self, handle: int) -> Optional[ProxiedRegion]:
|
||||
for region in self.regions:
|
||||
if region.handle == handle:
|
||||
return region
|
||||
return None
|
||||
|
||||
def open_circuit(self, near_addr, circuit_addr, transport):
|
||||
for region in self.regions:
|
||||
if region.circuit_addr == circuit_addr:
|
||||
if not region.circuit or not region.circuit.is_alive:
|
||||
logging_hook = None
|
||||
if self.session_manager.message_logger:
|
||||
logging_hook = functools.partial(
|
||||
self.session_manager.message_logger.log_lludp_message,
|
||||
self,
|
||||
region,
|
||||
)
|
||||
region.circuit = ProxiedCircuit(
|
||||
near_addr, circuit_addr, transport, region=region)
|
||||
near_addr, circuit_addr, transport, logging_hook=logging_hook)
|
||||
AddonManager.handle_circuit_created(self, region)
|
||||
return True
|
||||
if region.circuit and region.circuit.is_alive:
|
||||
# Whatever, already open
|
||||
@@ -137,35 +163,30 @@ class Session:
|
||||
return CapData(cap_name, ref(region), ref(self), base_url, cap_type)
|
||||
return None
|
||||
|
||||
def tid_to_assetid(self, transaction_id: UUID):
|
||||
def transaction_to_assetid(self, transaction_id: UUID):
|
||||
return UUID.combine(transaction_id, self.secure_session_id)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.id)
|
||||
|
||||
|
||||
class BaseMessageLogger:
|
||||
def log_lludp_message(self, session: Session, region: ProxiedRegion, message: ProxiedMessage):
|
||||
pass
|
||||
|
||||
def log_http_response(self, flow: HippoHTTPFlow):
|
||||
pass
|
||||
|
||||
def log_eq_event(self, session: Session, region: ProxiedRegion, event: dict):
|
||||
pass
|
||||
|
||||
|
||||
class SessionManager:
|
||||
def __init__(self):
|
||||
def __init__(self, settings: ProxySettings):
|
||||
self.settings: ProxySettings = settings
|
||||
self.sessions: List[Session] = []
|
||||
self.shutdown_signal = multiprocessing.Event()
|
||||
self.flow_context = HTTPFlowContext()
|
||||
self.asset_repo = HTTPAssetRepo()
|
||||
self.message_logger: Optional[BaseMessageLogger] = None
|
||||
self.addon_ctx: Dict[str, Any] = {}
|
||||
self.name_cache = ProxyNameCache()
|
||||
|
||||
def create_session(self, login_data) -> Session:
|
||||
session = Session.from_login_data(login_data, self)
|
||||
self.name_cache.create_subscriptions(
|
||||
session.message_handler,
|
||||
session.http_message_handler,
|
||||
)
|
||||
self.sessions.append(session)
|
||||
logging.info("Created %r" % session)
|
||||
return session
|
||||
@@ -180,6 +201,7 @@ class SessionManager:
|
||||
|
||||
def close_session(self, session: Session):
|
||||
logging.info("Closed %r" % session)
|
||||
session.objects.clear()
|
||||
self.sessions.remove(session)
|
||||
|
||||
def resolve_cap(self, url: str) -> Optional["CapData"]:
|
||||
|
||||
33
hippolyzer/lib/proxy/settings.py
Normal file
33
hippolyzer/lib/proxy/settings.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import os
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.settings import Settings, SettingDescriptor
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class EnvSettingDescriptor(SettingDescriptor):
|
||||
"""A setting that prefers to pull its value from the environment"""
|
||||
__slots__ = ("_env_name", "_env_callable")
|
||||
|
||||
def __init__(self, default: Union[Callable[[], _T], _T], env_name: str, spec: Callable[[str], _T]):
|
||||
super().__init__(default)
|
||||
self._env_name = env_name
|
||||
self._env_callable = spec
|
||||
|
||||
def __get__(self, obj, owner=None) -> _T:
|
||||
val = os.getenv(self._env_name)
|
||||
if val is not None:
|
||||
return self._env_callable(val)
|
||||
return super().__get__(obj, owner)
|
||||
|
||||
|
||||
class ProxySettings(Settings):
|
||||
SOCKS_PROXY_PORT: int = EnvSettingDescriptor(9061, "HIPPO_UDP_PORT", int)
|
||||
HTTP_PROXY_PORT: int = EnvSettingDescriptor(9062, "HIPPO_HTTP_PORT", int)
|
||||
PROXY_BIND_ADDR: str = EnvSettingDescriptor("127.0.0.1", "HIPPO_BIND_HOST", str)
|
||||
REMOTELY_ACCESSIBLE: bool = SettingDescriptor(False)
|
||||
USE_VIEWER_OBJECT_CACHE: bool = SettingDescriptor(False)
|
||||
AUTOMATICALLY_REQUEST_MISSING_OBJECTS: bool = SettingDescriptor(False)
|
||||
ADDON_SCRIPTS: List[str] = SettingDescriptor(list)
|
||||
FILTERS: Dict[str, str] = SettingDescriptor(dict)
|
||||
@@ -6,7 +6,8 @@ import socket
|
||||
import struct
|
||||
from typing import Optional, List, Tuple
|
||||
|
||||
from hippolyzer.lib.proxy.packets import ProxiedUDPPacket, Direction
|
||||
from hippolyzer.lib.base.network.transport import UDPPacket, Direction
|
||||
from hippolyzer.lib.proxy.transport import SOCKS5UDPTransport
|
||||
|
||||
|
||||
class SOCKS5Server:
|
||||
@@ -145,10 +146,10 @@ class UDPProxyProtocol(asyncio.DatagramProtocol):
|
||||
def __init__(self, source_addr: Tuple[str, int]):
|
||||
self.socks_client_addr: Tuple[str, int] = source_addr
|
||||
self.far_to_near_map = {}
|
||||
self.transport: Optional[asyncio.DatagramTransport] = None
|
||||
self.transport: Optional[SOCKS5UDPTransport] = None
|
||||
|
||||
def connection_made(self, transport):
|
||||
self.transport = transport
|
||||
def connection_made(self, transport: asyncio.DatagramTransport):
|
||||
self.transport = SOCKS5UDPTransport(transport)
|
||||
|
||||
def _parse_socks_datagram(self, data):
|
||||
rsv, frag, address_type = struct.unpack("!HBB", data[:4])
|
||||
@@ -162,8 +163,8 @@ class UDPProxyProtocol(asyncio.DatagramProtocol):
|
||||
data = data[4:]
|
||||
elif address_type == 3: # Domain name
|
||||
domain_length = data[0]
|
||||
address = data[1:1+domain_length]
|
||||
data = data[1+domain_length:]
|
||||
address = data[1:1 + domain_length]
|
||||
data = data[1 + domain_length:]
|
||||
else:
|
||||
logging.error("Don't understand addr type %d" % address_type)
|
||||
return None
|
||||
@@ -183,7 +184,7 @@ class UDPProxyProtocol(asyncio.DatagramProtocol):
|
||||
# this allows us to have source and dest addr on the same IP
|
||||
# since we expect a send from client->far to happen first
|
||||
self.far_to_near_map[remote_addr] = source_addr
|
||||
src_packet = ProxiedUDPPacket(
|
||||
src_packet = UDPPacket(
|
||||
src_addr=source_addr,
|
||||
dst_addr=remote_addr,
|
||||
data=data,
|
||||
@@ -198,7 +199,7 @@ class UDPProxyProtocol(asyncio.DatagramProtocol):
|
||||
logging.warning("Got datagram from unknown host %s:%s" % source_addr)
|
||||
return
|
||||
|
||||
src_packet = ProxiedUDPPacket(
|
||||
src_packet = UDPPacket(
|
||||
src_addr=source_addr,
|
||||
dst_addr=near_addr,
|
||||
data=data,
|
||||
@@ -206,13 +207,13 @@ class UDPProxyProtocol(asyncio.DatagramProtocol):
|
||||
)
|
||||
|
||||
try:
|
||||
self._handle_proxied_packet(src_packet)
|
||||
self.handle_proxied_packet(src_packet)
|
||||
except:
|
||||
logging.exception("Barfed while handling UDP packet!")
|
||||
raise
|
||||
|
||||
def _handle_proxied_packet(self, packet):
|
||||
self.transport.sendto(packet.serialize(), packet.dst_addr)
|
||||
def handle_proxied_packet(self, packet):
|
||||
self.transport.send_packet(packet)
|
||||
|
||||
def close(self):
|
||||
logging.info("Closing UDP transport")
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
80
hippolyzer/lib/proxy/test_utils.py
Normal file
80
hippolyzer/lib/proxy/test_utils.py
Normal file
@@ -0,0 +1,80 @@
|
||||
import asyncio
|
||||
import unittest
|
||||
from typing import Any, Optional, List, Tuple
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.message.message import Message
|
||||
from hippolyzer.lib.base.message.udpserializer import UDPMessageSerializer
|
||||
from hippolyzer.lib.base.network.transport import UDPPacket, AbstractUDPTransport, ADDR_TUPLE
|
||||
from hippolyzer.lib.proxy.lludp_proxy import InterceptingLLUDPProxyProtocol
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import SessionManager
|
||||
from hippolyzer.lib.proxy.settings import ProxySettings
|
||||
from hippolyzer.lib.proxy.transport import SOCKS5UDPTransport
|
||||
|
||||
|
||||
class BaseProxyTest(unittest.IsolatedAsyncioTestCase):
|
||||
def setUp(self) -> None:
|
||||
self.client_addr = ("127.0.0.1", 1)
|
||||
self.region_addr = ("127.0.0.1", 3)
|
||||
self.circuit_code = 1234
|
||||
self.session_manager = SessionManager(ProxySettings())
|
||||
self.session = self.session_manager.create_session({
|
||||
"session_id": UUID.random(),
|
||||
"secure_session_id": UUID.random(),
|
||||
"agent_id": UUID.random(),
|
||||
"circuit_code": self.circuit_code,
|
||||
"sim_ip": self.region_addr[0],
|
||||
"sim_port": self.region_addr[1],
|
||||
"region_x": 0,
|
||||
"region_y": 123,
|
||||
"seed_capability": "https://test.localhost:4/foo",
|
||||
})
|
||||
self.transport = MockTransport()
|
||||
self.protocol = InterceptingLLUDPProxyProtocol(
|
||||
self.client_addr, self.session_manager)
|
||||
self.protocol.transport = self.transport
|
||||
self.serializer = UDPMessageSerializer()
|
||||
self.session.objects.track_region_objects(123)
|
||||
|
||||
async def _wait_drained(self):
|
||||
await asyncio.sleep(0.001)
|
||||
|
||||
def _setup_default_circuit(self):
|
||||
self._setup_region_circuit(self.session.regions[-1])
|
||||
self.session.main_region = self.session.regions[-1]
|
||||
|
||||
def _setup_region_circuit(self, region: ProxiedRegion):
|
||||
# Not going to send a UseCircuitCode, so have to pretend we already did the
|
||||
# client -> region NAT hole-punching
|
||||
self.protocol.session = self.session
|
||||
self.protocol.far_to_near_map[region.circuit_addr] = self.client_addr
|
||||
self.session_manager.claim_session(self.session.id)
|
||||
self.session.open_circuit(self.client_addr, region.circuit_addr,
|
||||
self.protocol.transport)
|
||||
|
||||
def _msg_to_packet(self, msg: Message, src, dst) -> UDPPacket:
|
||||
return UDPPacket(src_addr=src, dst_addr=dst, data=self.serializer.serialize(msg),
|
||||
direction=msg.direction)
|
||||
|
||||
def _msg_to_datagram(self, msg: Message, src, dst, socks_header=True):
|
||||
packet = self._msg_to_packet(msg, src, dst)
|
||||
return SOCKS5UDPTransport.serialize(packet, force_socks_header=socks_header)
|
||||
|
||||
|
||||
class MockTransport(AbstractUDPTransport):
|
||||
def sendto(self, data: Any, addr: Optional[ADDR_TUPLE] = ...) -> None:
|
||||
pass
|
||||
|
||||
def abort(self) -> None:
|
||||
pass
|
||||
|
||||
def close(self) -> None:
|
||||
pass
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.packets: List[Tuple[bytes, Tuple[str, int]]] = []
|
||||
|
||||
def send_packet(self, packet: UDPPacket) -> None:
|
||||
self.packets.append((packet.data, packet.dst_addr))
|
||||
20
hippolyzer/lib/proxy/transport.py
Normal file
20
hippolyzer/lib/proxy/transport.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import socket
|
||||
import struct
|
||||
|
||||
from hippolyzer.lib.base.network.transport import SocketUDPTransport, UDPPacket
|
||||
|
||||
|
||||
class SOCKS5UDPTransport(SocketUDPTransport):
|
||||
HEADER_STRUCT = struct.Struct("!HBB4sH")
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, packet: UDPPacket, force_socks_header: bool = False) -> bytes:
|
||||
# Decide whether we need a header based on packet direction
|
||||
if packet.outgoing and not force_socks_header:
|
||||
return packet.data
|
||||
header = cls.HEADER_STRUCT.pack(
|
||||
0, 0, 1, socket.inet_aton(packet.far_addr[0]), packet.far_addr[1])
|
||||
return header + packet.data
|
||||
|
||||
def send_packet(self, packet: UDPPacket) -> None:
|
||||
self.transport.sendto(self.serialize(packet), packet.dst_addr)
|
||||
70
hippolyzer/lib/proxy/viewer_settings.py
Normal file
70
hippolyzer/lib/proxy/viewer_settings.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import itertools
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from hippolyzer.lib.base import llsd
|
||||
|
||||
|
||||
def iter_viewer_data_dirs():
|
||||
if sys.platform.startswith("linux"):
|
||||
paths = (x for x in Path.home().iterdir() if x.name.startswith("."))
|
||||
elif sys.platform == "darwin":
|
||||
paths = (Path.home() / "Library" / "Application Support").iterdir()
|
||||
elif sys.platform in ("win32", "msys", "cygwin"):
|
||||
app_data = Path.home() / "AppData"
|
||||
# On Windows the cache directory is in Local, the settings are in Roaming. I think.
|
||||
paths = itertools.chain((app_data / "Local").iterdir(), (app_data / "Roaming").iterdir())
|
||||
else:
|
||||
raise Exception("Unknown OS, can't locate viewer config dirs!")
|
||||
|
||||
for path in paths:
|
||||
if not path.is_dir():
|
||||
continue
|
||||
if not has_settings_file(path) and not has_cache_file(path):
|
||||
continue
|
||||
yield path
|
||||
|
||||
|
||||
def has_cache_file(path: Path):
|
||||
try:
|
||||
return (path / "avatar_name_cache.xml").exists()
|
||||
except PermissionError:
|
||||
return False
|
||||
|
||||
|
||||
def has_settings_file(path: Path):
|
||||
try:
|
||||
return (path / "user_settings" / "settings.xml").exists()
|
||||
except PermissionError:
|
||||
return False
|
||||
|
||||
|
||||
def iter_viewer_config_dirs():
|
||||
for viewer_dir in iter_viewer_data_dirs():
|
||||
if has_settings_file(viewer_dir):
|
||||
yield viewer_dir
|
||||
|
||||
|
||||
def iter_viewer_cache_dirs():
|
||||
for viewer_dir in iter_viewer_data_dirs():
|
||||
# Is this a settings dir
|
||||
if has_settings_file(viewer_dir):
|
||||
# Users can choose custom locations for the cache directory, we need to parse
|
||||
# their settings to see if they've done so.
|
||||
with open(viewer_dir / "user_settings" / "settings.xml", "rb") as fh:
|
||||
config: dict = llsd.parse_xml(fh.read())
|
||||
# TODO: is this the case on all platforms?
|
||||
cache_location = None
|
||||
cache_elem = config.get("CacheLocation")
|
||||
if cache_elem:
|
||||
cache_location = cache_elem.get("Value")
|
||||
if cache_location:
|
||||
cache_location = Path(cache_location)
|
||||
if has_cache_file(cache_location):
|
||||
yield cache_location
|
||||
# Cache may be in the base dir on Windows
|
||||
if has_cache_file(viewer_dir):
|
||||
yield viewer_dir
|
||||
# but it might also be in a subfolder
|
||||
if has_cache_file(viewer_dir / "cache"):
|
||||
yield viewer_dir / "cache"
|
||||
226
hippolyzer/lib/proxy/vocache.py
Normal file
226
hippolyzer/lib/proxy/vocache.py
Normal file
@@ -0,0 +1,226 @@
|
||||
"""
|
||||
Viewer object cache implementation
|
||||
|
||||
Important to have because if we're debugging potential state management issues
|
||||
in the viewer's scene graph, we need an idea of what it's scene graph _should_
|
||||
look like at the current point in time. We can get that by hooking into its
|
||||
VOCache so we know about its cache hits, and then compare whats in the proxy's
|
||||
ObjectManager vs the viewer's (through GDB or something.)
|
||||
|
||||
Everything little-endian unless otherwise specified.
|
||||
These use native struct alignment and padding, which is the reason for the
|
||||
native address size being stored in the header. They should have just packed
|
||||
the structs properly instead.
|
||||
|
||||
object.cache index file:
|
||||
IndexMetaHeader:
|
||||
U32 version = 15;
|
||||
U32 address_size = 32 or 64;
|
||||
CacheIndex entries[128];
|
||||
|
||||
Exactly 128 region entries allowed, if any are missing they will have `time == 0`
|
||||
and should be skipped.
|
||||
|
||||
CacheIndex:
|
||||
S32 index = i; // redundant, but helpful
|
||||
U64 handle; // ORed together global X and Y
|
||||
U32 time;
|
||||
|
||||
|
||||
objects_<grid_x>_<grix_y>.slc:
|
||||
ObjectsMetaHeader:
|
||||
// must match ID sent in RegionHandshake. Filenames do not include grid ID so this may be
|
||||
// a file for a region at the same coords on a completely different grid!
|
||||
UUID cache_id;
|
||||
S32 num_entries;
|
||||
|
||||
VOCacheEntry:
|
||||
U32 local_id;
|
||||
U32 crc;
|
||||
S32 hit_count;
|
||||
S32 dupe_count;
|
||||
S32 crc_change_count;
|
||||
// must be <= 10000 and > 0. Failing this continues parsing without reading data.
|
||||
S32 size;
|
||||
if (size <= 10000 && entry.size > 0)
|
||||
U8 data[size]; // same representation as "data" in ObjectUpdateCompressed
|
||||
else
|
||||
U8 data[0];
|
||||
|
||||
|
||||
ObjectsMetaHeader header;
|
||||
for i in range(header.num_entries) {
|
||||
VOCacheEntry entry;
|
||||
}
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import *
|
||||
|
||||
import recordclass
|
||||
|
||||
import hippolyzer.lib.base.serialization as se
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.objects import handle_to_gridxy
|
||||
from hippolyzer.lib.proxy.viewer_settings import iter_viewer_cache_dirs
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ViewerObjectCache:
|
||||
VERSION = 15
|
||||
MAX_REGIONS = 128
|
||||
|
||||
def __init__(self, base_path: Union[str, Path]):
|
||||
self.base_path = Path(base_path)
|
||||
# handle -> updated
|
||||
self.regions: Dict[int, int] = {}
|
||||
|
||||
@classmethod
|
||||
def from_path(cls, base_path: Union[str, Path]):
|
||||
cache = cls(base_path)
|
||||
with open(cache.base_path / "object.cache", "rb") as fh:
|
||||
reader = se.BufferReader("<", fh.read())
|
||||
version = reader.read(se.U32)
|
||||
if version != cls.VERSION:
|
||||
LOG.error(f"Unsupported vocache version {version} in {cache.base_path}")
|
||||
return
|
||||
address_size = reader.read(se.U32)
|
||||
if address_size not in (32, 64):
|
||||
LOG.error(f"Unsupported address size {address_size}")
|
||||
return
|
||||
|
||||
# HACK: VOCache writes structs directly to disk from memory. It doesn't specify
|
||||
# any packing rules, so the struct gets written with whatever the platform
|
||||
# defaults are. In my case, everything is 8 byte aligned because there's a
|
||||
# U64 member for the handle. I'm not an expert in this sort of thing, so we
|
||||
# try to guess the arrangement of the struct by scanning ahead.
|
||||
int_spec = se.U32
|
||||
for i in range(cls.MAX_REGIONS):
|
||||
entry_index = reader.read(int_spec) & 0xFFffFFff
|
||||
if entry_index != i:
|
||||
LOG.warning(f"Expected region entry index to be {i}, got {entry_index}")
|
||||
# Sniff padding alignment on the first cache entry
|
||||
if i == 0:
|
||||
# Seek to where the next index would be if everything was 8 byte aligned
|
||||
with reader.scoped_seek(20, io.SEEK_CUR):
|
||||
next_i = reader.read(se.U32)
|
||||
|
||||
# If it's 1 then we're using 8 byte alignment. Just read 8 bytes for all ints.
|
||||
# If there was no padding then this would read into the region handle, but
|
||||
# that could never have 4 bytes == 1 because both x and y will be multiples of 256.
|
||||
if next_i == 1:
|
||||
# Trash the extra few bits and switch to reading U64s
|
||||
_ = reader.read(se.U32)
|
||||
int_spec = se.U64
|
||||
|
||||
handle = reader.read(se.U64)
|
||||
# Mask off any junk bits that might have been written in the padding
|
||||
time = reader.read(int_spec) & 0xFFffFFff
|
||||
# If there's no time then this is an empty slot.
|
||||
if not time:
|
||||
continue
|
||||
cache.regions[handle] = time
|
||||
return cache
|
||||
|
||||
def read_region(self, handle: int) -> Optional[RegionViewerObjectCache]:
|
||||
if handle not in self.regions:
|
||||
return None
|
||||
grid_x, grid_y = handle_to_gridxy(handle)
|
||||
objects_file = self.base_path / f"objects_{grid_x}_{grid_y}.slc"
|
||||
if not objects_file.exists():
|
||||
return None
|
||||
return RegionViewerObjectCache.from_file(objects_file)
|
||||
|
||||
|
||||
class ViewerObjectCacheEntry(recordclass.datatuple): # type: ignore
|
||||
local_id: int
|
||||
crc: int
|
||||
data: bytes
|
||||
|
||||
|
||||
class RegionViewerObjectCache:
|
||||
"""Parser and container for .slc files"""
|
||||
def __init__(self, cache_id: UUID, entries: List[ViewerObjectCacheEntry]):
|
||||
self.cache_id: UUID = cache_id
|
||||
self.entries: Dict[int, ViewerObjectCacheEntry] = {
|
||||
e.local_id: e for e in entries
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, objects_path: Union[str, Path]):
|
||||
# These files are only a few megabytes max so fine to slurp in
|
||||
with open(objects_path, "rb") as fh:
|
||||
reader = se.BufferReader("<", fh.read())
|
||||
cache_id: UUID = reader.read(se.UUID)
|
||||
|
||||
num_entries = reader.read(se.S32)
|
||||
entries = []
|
||||
for _ in range(num_entries):
|
||||
# EOF, the viewer specifically allows for this.
|
||||
if not len(reader):
|
||||
break
|
||||
local_id = reader.read(se.U32)
|
||||
crc = reader.read(se.U32)
|
||||
# Not important to us
|
||||
_ = reader.read(se.U32)
|
||||
_ = reader.read(se.U32)
|
||||
_ = reader.read(se.U32)
|
||||
size = reader.read(se.U32)
|
||||
if not size or size > 10_000:
|
||||
continue
|
||||
data = reader.read_bytes(size, to_bytes=True)
|
||||
entries.append(ViewerObjectCacheEntry(
|
||||
local_id=local_id,
|
||||
crc=crc,
|
||||
data=data,
|
||||
))
|
||||
return RegionViewerObjectCache(cache_id, entries)
|
||||
|
||||
def lookup_object_data(self, local_id: int, crc: int) -> Optional[bytes]:
|
||||
entry = self.entries.get(local_id)
|
||||
if entry and entry.crc == crc:
|
||||
return entry.data
|
||||
return None
|
||||
|
||||
|
||||
class RegionViewerObjectCacheChain:
|
||||
"""Wrapper for the checking the same region in multiple cache locations"""
|
||||
def __init__(self, region_caches: List[RegionViewerObjectCache]):
|
||||
self.region_caches = region_caches
|
||||
|
||||
def lookup_object_data(self, local_id: int, crc: int) -> Optional[bytes]:
|
||||
for cache in self.region_caches:
|
||||
data = cache.lookup_object_data(local_id, crc)
|
||||
if data:
|
||||
return data
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def for_region(cls, handle: int, cache_id: UUID):
|
||||
"""
|
||||
Get a cache chain for a specific region, called on region connection
|
||||
|
||||
We don't know what viewer the user is currently using, or where its cache lives
|
||||
so we have to try every region object cache file for every viewer installed.
|
||||
"""
|
||||
caches = []
|
||||
for cache_dir in iter_viewer_cache_dirs():
|
||||
if not (cache_dir / "objectcache" / "object.cache").exists():
|
||||
continue
|
||||
cache = ViewerObjectCache.from_path(cache_dir / "objectcache")
|
||||
if cache:
|
||||
caches.append(cache)
|
||||
regions = []
|
||||
for cache in caches:
|
||||
region = cache.read_region(handle)
|
||||
if not region:
|
||||
continue
|
||||
if region.cache_id != cache_id:
|
||||
continue
|
||||
regions.append(region)
|
||||
return RegionViewerObjectCacheChain(regions)
|
||||
@@ -1,145 +0,0 @@
|
||||
"""
|
||||
Outbound Xfer only.
|
||||
|
||||
sim->viewer Xfer is only legitimately used for terrain so not worth implementing.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import random
|
||||
from typing import *
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.base.helpers import proxify
|
||||
from hippolyzer.lib.base.message.data_packer import TemplateDataPacker
|
||||
from hippolyzer.lib.base.message.message import Block
|
||||
from hippolyzer.lib.base.message.msgtypes import MsgType
|
||||
from hippolyzer.lib.proxy.message import ProxiedMessage
|
||||
from hippolyzer.lib.proxy.templates import XferPacket, XferFilePath, AssetType, XferError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
|
||||
_XFER_MESSAGES = {"AbortXfer", "ConfirmXferPacket", "RequestXfer", "SendXferPacket"}
|
||||
|
||||
|
||||
class Xfer:
|
||||
def __init__(self, xfer_id: int):
|
||||
super().__init__()
|
||||
self.xfer_id: Optional[int] = xfer_id
|
||||
self.chunks: Dict[int, bytes] = {}
|
||||
self.expected_size: Optional[int] = None
|
||||
self.size_known = asyncio.Future()
|
||||
self.error_code: Union[int, XferError] = 0
|
||||
self._future: asyncio.Future[Xfer] = asyncio.Future()
|
||||
|
||||
def reassemble_chunks(self) -> bytes:
|
||||
assembled = bytearray()
|
||||
for _, data in sorted(self.chunks.items()):
|
||||
assembled.extend(data)
|
||||
return assembled
|
||||
|
||||
def mark_done(self):
|
||||
self._future.set_result(self)
|
||||
|
||||
def done(self) -> bool:
|
||||
return self._future.done()
|
||||
|
||||
def cancelled(self) -> bool:
|
||||
return self._future.cancelled()
|
||||
|
||||
def is_our_message(self, message):
|
||||
return message["XferID"]["ID"] == self.xfer_id
|
||||
|
||||
def cancel(self) -> bool:
|
||||
if not self.size_known.done():
|
||||
self.size_known.cancel()
|
||||
return self._future.cancel()
|
||||
|
||||
def set_exception(self, exc: Union[type, BaseException]) -> None:
|
||||
if not self.size_known.done():
|
||||
self.size_known.set_exception(exc)
|
||||
return self._future.set_exception(exc)
|
||||
|
||||
def __await__(self) -> Generator[Any, None, Xfer]:
|
||||
return self._future.__await__()
|
||||
|
||||
|
||||
class XferManager:
|
||||
def __init__(self, region: ProxiedRegion):
|
||||
self._region: ProxiedRegion = proxify(region)
|
||||
|
||||
def request(
|
||||
self, xfer_id: Optional[int] = None,
|
||||
file_name: Union[bytes, str, None] = None,
|
||||
file_path: Optional[Union[XferFilePath, int]] = None,
|
||||
vfile_id: Optional[UUID] = None,
|
||||
vfile_type: Optional[Union[AssetType, int]] = None,
|
||||
use_big_packets: bool = False,
|
||||
delete_on_completion: bool = False,
|
||||
) -> Xfer:
|
||||
xfer_id = xfer_id if xfer_id is not None else random.getrandbits(64)
|
||||
self._region.circuit.send_message(ProxiedMessage(
|
||||
'RequestXfer',
|
||||
Block(
|
||||
'XferID',
|
||||
ID=xfer_id,
|
||||
Filename=file_name or b'',
|
||||
FilePath=file_path or XferFilePath.NONE,
|
||||
DeleteOnCompletion=delete_on_completion,
|
||||
UseBigPackets=use_big_packets,
|
||||
VFileID=vfile_id or UUID(),
|
||||
VFileType=vfile_type or AssetType.NONE,
|
||||
),
|
||||
))
|
||||
xfer = Xfer(xfer_id)
|
||||
asyncio.create_task(self._pump_xfer_replies(xfer))
|
||||
return xfer
|
||||
|
||||
async def _pump_xfer_replies(self, xfer: Xfer):
|
||||
with self._region.message_handler.subscribe_async(
|
||||
_XFER_MESSAGES,
|
||||
predicate=xfer.is_our_message
|
||||
) as get_msg:
|
||||
while not xfer.done():
|
||||
try:
|
||||
msg: ProxiedMessage = await asyncio.wait_for(get_msg(), 5.0)
|
||||
except asyncio.exceptions.TimeoutError as e:
|
||||
xfer.set_exception(e)
|
||||
return
|
||||
|
||||
if xfer.cancelled():
|
||||
# AbortXfer doesn't seem to work on in-progress Xfers.
|
||||
# Just let any new packets drop on the floor.
|
||||
return
|
||||
|
||||
if msg.name == "SendXferPacket":
|
||||
self._handle_send_xfer_packet(msg, xfer)
|
||||
elif msg.name == "AbortXfer":
|
||||
xfer.error_code = msg["XferID"][0].deserialize_var("Result")
|
||||
xfer.set_exception(
|
||||
ConnectionAbortedError(f"Xfer failed with {xfer.error_code!r}")
|
||||
)
|
||||
|
||||
def _handle_send_xfer_packet(self, msg: ProxiedMessage, xfer: Xfer):
|
||||
# Received a SendXfer for an Xfer we sent ourselves
|
||||
packet_id: XferPacket = msg["XferID"][0].deserialize_var("Packet")
|
||||
packet_data = msg["DataPacket"]["Data"]
|
||||
# First 4 bytes are expected total data length
|
||||
if packet_id.PacketID == 0:
|
||||
# Yes, S32. Only used as a hint so buffers can be pre-allocated,
|
||||
# EOF bit determines when the data actually ends.
|
||||
xfer.expected_size = TemplateDataPacker.unpack(packet_data[:4], MsgType.MVT_S32)
|
||||
# Don't re-set if we get a resend of packet 0
|
||||
if not xfer.size_known.done():
|
||||
xfer.size_known.set_result(xfer.expected_size)
|
||||
packet_data = packet_data[4:]
|
||||
|
||||
self._region.circuit.send_message(ProxiedMessage(
|
||||
"ConfirmXferPacket",
|
||||
Block("XferID", ID=xfer.xfer_id, Packet=packet_id.PacketID),
|
||||
))
|
||||
|
||||
xfer.chunks[packet_id.PacketID] = packet_data
|
||||
if packet_id.IsEOF:
|
||||
xfer.mark_done()
|
||||
4
requirements-test.txt
Normal file
4
requirements-test.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
aioresponses
|
||||
pytest
|
||||
pytest-cov
|
||||
flake8
|
||||
@@ -60,7 +60,7 @@ sortedcontainers==2.3.0
|
||||
toml==0.10.2
|
||||
tornado==6.1
|
||||
typing-extensions==3.7.4.3
|
||||
urllib3==1.26.4
|
||||
urllib3==1.26.5
|
||||
urwid==2.1.2
|
||||
wcwidth==0.2.5
|
||||
Werkzeug==1.0.1
|
||||
|
||||
@@ -5,3 +5,8 @@ license_files =
|
||||
|
||||
[bdist_wheel]
|
||||
universal = 1
|
||||
|
||||
[flake8]
|
||||
max-line-length = 160
|
||||
exclude = build/*, .eggs/*
|
||||
ignore = F405, F403, E501, F841, E722, W503, E741, E731
|
||||
|
||||
15
setup.py
15
setup.py
@@ -25,7 +25,7 @@ from setuptools import setup, find_packages
|
||||
|
||||
here = path.abspath(path.dirname(__file__))
|
||||
|
||||
version = '0.2'
|
||||
version = '0.6.1'
|
||||
|
||||
with open(path.join(here, 'README.md')) as readme_fh:
|
||||
readme = readme_fh.read()
|
||||
@@ -50,7 +50,7 @@ setup(
|
||||
"Topic :: Software Development :: Testing",
|
||||
],
|
||||
author='Salad Dais',
|
||||
author_email='SaladDais@users.noreply.github.com',
|
||||
author_email='83434023+SaladDais@users.noreply.github.com',
|
||||
url='https://github.com/SaladDais/Hippolyzer/',
|
||||
license='LGPLv3',
|
||||
packages=find_packages(include=["hippolyzer", "hippolyzer.*"]),
|
||||
@@ -63,9 +63,11 @@ setup(
|
||||
'lib/base/message/data/message_template.msg',
|
||||
'lib/base/message/data/message.xml',
|
||||
'lib/base/network/data/ca-bundle.crt',
|
||||
'lib/proxy/data/static_data.db2',
|
||||
'lib/proxy/data/static_index.db2',
|
||||
'lib/proxy/data/LICENSE-artwork.txt',
|
||||
'lib/base/data/static_data.db2',
|
||||
'lib/base/data/static_index.db2',
|
||||
'lib/base/data/avatar_lad.xml',
|
||||
'lib/base/data/avatar_skeleton.xml',
|
||||
'lib/base/data/LICENSE-artwork.txt',
|
||||
],
|
||||
},
|
||||
entry_points={
|
||||
@@ -93,10 +95,11 @@ setup(
|
||||
'Glymur<1.0',
|
||||
'numpy<2.0',
|
||||
# These could be in extras_require if you don't want a GUI.
|
||||
'pyside2',
|
||||
'pyside2<6.0',
|
||||
'qasync',
|
||||
],
|
||||
tests_require=[
|
||||
"pytest",
|
||||
"aioresponses",
|
||||
],
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user