Initial implementation of proxy-provided caps
Useful for mocking out a cap while developing the viewer-side pieces of it.
This commit is contained in:
@@ -206,7 +206,7 @@ class MessageMirrorAddon(BaseAddon):
|
||||
return
|
||||
caps_source = target_region
|
||||
|
||||
new_base_url = caps_source.caps.get(cap_data.cap_name)
|
||||
new_base_url = caps_source.cap_urls.get(cap_data.cap_name)
|
||||
if not new_base_url:
|
||||
print("No equiv cap?")
|
||||
return
|
||||
|
||||
78
addon_examples/mock_proxy_cap.py
Normal file
78
addon_examples/mock_proxy_cap.py
Normal file
@@ -0,0 +1,78 @@
|
||||
"""
|
||||
Example of proxy-provided caps
|
||||
|
||||
Useful for mocking out a cap that isn't actually implemented by the server
|
||||
while developing the viewer-side pieces of it.
|
||||
|
||||
Implements a cap that accepts an `obj_id` UUID query parameter and returns
|
||||
the name of the object.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
|
||||
from mitmproxy import http
|
||||
|
||||
from hippolyzer.lib.base.datatypes import UUID
|
||||
from hippolyzer.lib.proxy.addon_utils import BaseAddon
|
||||
from hippolyzer.lib.proxy.http_flow import HippoHTTPFlow
|
||||
from hippolyzer.lib.proxy.region import ProxiedRegion
|
||||
from hippolyzer.lib.proxy.sessions import Session, SessionManager
|
||||
|
||||
|
||||
class MockProxyCapExampleAddon(BaseAddon):
|
||||
def handle_region_registered(self, session: Session, region: ProxiedRegion):
|
||||
# Register a fake URL for our cap. This will add the cap URL to the Seed
|
||||
# response that gets sent back to the client if that cap name was requested.
|
||||
if "GetObjectNameExample" not in region.cap_urls:
|
||||
region.register_proxy_cap("GetObjectNameExample")
|
||||
|
||||
def handle_http_request(self, session_manager: SessionManager, flow: HippoHTTPFlow):
|
||||
if flow.cap_data.cap_name != "GetObjectNameExample":
|
||||
return
|
||||
if flow.request.method != "GET":
|
||||
return
|
||||
# This request may take a while to generate a response for, take it out of the normal
|
||||
# HTTP handling flow and handle it in a async task.
|
||||
# TODO: Make all HTTP handling hooks async so this isn't necessary
|
||||
self._schedule_task(self._handle_request(flow.take()))
|
||||
|
||||
async def _handle_request(self, flow: HippoHTTPFlow):
|
||||
try:
|
||||
obj_id = UUID(flow.request.query['obj_id'])
|
||||
obj_mgr = flow.cap_data.region().objects
|
||||
obj = obj_mgr.lookup_fullid(obj_id)
|
||||
if not obj:
|
||||
flow.response = http.Response.make(
|
||||
status_code=404,
|
||||
content=f"Couldn't find {obj_id!r}".encode("utf8"),
|
||||
headers={"Content-Type": "text/plain"},
|
||||
)
|
||||
flow.release()
|
||||
return
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(obj_mgr.request_object_properties(obj)[0], 1.0)
|
||||
except asyncio.TimeoutError:
|
||||
flow.response = http.Response.make(
|
||||
status_code=404,
|
||||
content=f"Timed out requesting {obj_id!r}".encode("utf8"),
|
||||
headers={"Content-Type": "text/plain"},
|
||||
)
|
||||
flow.release()
|
||||
return
|
||||
|
||||
flow.response = http.Response.make(
|
||||
content=obj.Name.encode("utf8"),
|
||||
headers={"Content-Type": "text/plain"},
|
||||
)
|
||||
flow.release()
|
||||
except:
|
||||
flow.response = http.Response.make(
|
||||
status_code=500,
|
||||
content=b"The server is on fire",
|
||||
headers={"Content-Type": "text/plain"},
|
||||
)
|
||||
flow.release()
|
||||
|
||||
|
||||
addons = [MockProxyCapExampleAddon()]
|
||||
@@ -568,7 +568,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
else:
|
||||
self.comboUntrusted.addItem(message_name)
|
||||
|
||||
cap_names = sorted(set(itertools.chain(*[r.caps.keys() for r in self.regionModel.regions])))
|
||||
cap_names = sorted(set(itertools.chain(*[r.cap_urls.keys() for r in self.regionModel.regions])))
|
||||
for cap_name in cap_names:
|
||||
if cap_name.endswith("ProxyWrapper"):
|
||||
continue
|
||||
@@ -599,7 +599,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
break
|
||||
self.textRequest.setPlainText(
|
||||
f"""{method} [[{cap_name}]]{path}{params} HTTP/1.1
|
||||
# {region.caps.get(cap_name, "<unknown URI>")}
|
||||
# {region.cap_urls.get(cap_name, "<unknown URI>")}
|
||||
{headers}
|
||||
{body}"""
|
||||
)
|
||||
@@ -700,7 +700,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
else:
|
||||
self._sendHTTPRequest(
|
||||
"POST",
|
||||
region.caps["UntrustedSimulatorMessage"],
|
||||
region.cap_urls["UntrustedSimulatorMessage"],
|
||||
{"Content-Type": "application/llsd+xml", "Accept": "application/llsd+xml"},
|
||||
self.llsdSerializer.serialize(msg),
|
||||
)
|
||||
@@ -744,7 +744,7 @@ class MessageBuilderWindow(QtWidgets.QMainWindow):
|
||||
cap_name = match.group(1)
|
||||
cap_url = session.global_caps.get(cap_name)
|
||||
if not cap_url:
|
||||
cap_url = region.caps.get(cap_name)
|
||||
cap_url = region.cap_urls.get(cap_name)
|
||||
if not cap_url:
|
||||
raise ValueError("Don't have a Cap for %s" % cap_name)
|
||||
uri = cap_url + match.group(2)
|
||||
|
||||
@@ -181,6 +181,9 @@ class BaseAddon(abc.ABC):
|
||||
def handle_region_changed(self, session: Session, region: ProxiedRegion):
|
||||
pass
|
||||
|
||||
def handle_region_registered(self, session: Session, region: ProxiedRegion):
|
||||
pass
|
||||
|
||||
def handle_circuit_created(self, session: Session, region: ProxiedRegion):
|
||||
pass
|
||||
|
||||
|
||||
@@ -527,6 +527,11 @@ class AddonManager:
|
||||
with addon_ctx.push(session, region):
|
||||
return cls._call_all_addon_hooks("handle_region_changed", session, region)
|
||||
|
||||
@classmethod
|
||||
def handle_region_registered(cls, session: Session, region: ProxiedRegion):
|
||||
with addon_ctx.push(session, region):
|
||||
return cls._call_all_addon_hooks("handle_region_registered", session, region)
|
||||
|
||||
@classmethod
|
||||
def handle_circuit_created(cls, session: Session, region: ProxiedRegion):
|
||||
with addon_ctx.push(session, region):
|
||||
|
||||
@@ -24,6 +24,10 @@ class CapType(enum.Enum):
|
||||
WRAPPER = enum.auto()
|
||||
PROXY_ONLY = enum.auto()
|
||||
|
||||
@property
|
||||
def fake(self) -> bool:
|
||||
return self == CapType.PROXY_ONLY or self == CapType.WRAPPER
|
||||
|
||||
|
||||
class SerializedCapData(typing.NamedTuple):
|
||||
cap_name: typing.Optional[str] = None
|
||||
|
||||
@@ -20,7 +20,7 @@ class ProxyCapsClient(CapsClient):
|
||||
def _get_caps(self) -> Optional[CAPS_DICT]:
|
||||
if not self._region:
|
||||
return None
|
||||
return self._region.caps
|
||||
return self._region.cap_urls
|
||||
|
||||
def _request_fixups(self, cap_or_url: str, headers: Dict, proxy: Optional[bool], ssl: Any):
|
||||
# We want to proxy this through Hippolyzer
|
||||
|
||||
@@ -90,9 +90,10 @@ class MITMProxyEventManager:
|
||||
url = flow.request.url
|
||||
cap_data = self.session_manager.resolve_cap(url)
|
||||
flow.cap_data = cap_data
|
||||
# Don't do anything special with the proxy's own requests,
|
||||
# we only pass it through for logging purposes.
|
||||
if flow.request_injected:
|
||||
# Don't do anything special with the proxy's own requests unless the requested
|
||||
# URL can only be handled by the proxy. Ideally we only pass the request through
|
||||
# for logging purposes.
|
||||
if flow.request_injected and (not cap_data or not cap_data.type.fake):
|
||||
return
|
||||
|
||||
# The local asset repo gets first bite at the apple
|
||||
@@ -104,7 +105,7 @@ class MITMProxyEventManager:
|
||||
AddonManager.handle_http_request(flow)
|
||||
if cap_data and cap_data.cap_name.endswith("ProxyWrapper"):
|
||||
orig_cap_name = cap_data.cap_name.rsplit("ProxyWrapper", 1)[0]
|
||||
orig_cap_url = cap_data.region().caps[orig_cap_name]
|
||||
orig_cap_url = cap_data.region().cap_urls[orig_cap_name]
|
||||
split_orig_url = urllib.parse.urlsplit(orig_cap_url)
|
||||
orig_cap_host = split_orig_url[1]
|
||||
|
||||
@@ -135,7 +136,7 @@ class MITMProxyEventManager:
|
||||
)
|
||||
elif cap_data and cap_data.asset_server_cap:
|
||||
# Both the wrapper request and the actual asset server request went through
|
||||
# the proxy
|
||||
# the proxy. Don't bother trying the redirect strategy anymore.
|
||||
self._asset_server_proxied = True
|
||||
logging.warning("noproxy not used, switching to URI rewrite strategy")
|
||||
elif cap_data and cap_data.cap_name == "EventQueueGet":
|
||||
@@ -159,6 +160,17 @@ class MITMProxyEventManager:
|
||||
"Connection": "close",
|
||||
},
|
||||
)
|
||||
elif cap_data and cap_data.cap_name == "Seed":
|
||||
# Drop any proxy-only caps from the seed request we send to the server,
|
||||
# add those cap names as metadata so we know to send their urls in the response
|
||||
parsed_seed: List[str] = llsd.parse_xml(flow.request.content)
|
||||
flow.metadata['needed_proxy_caps'] = []
|
||||
for known_cap_name, (known_cap_type, known_cap_url) in cap_data.region().caps.items():
|
||||
if known_cap_type == CapType.PROXY_ONLY and known_cap_name in parsed_seed:
|
||||
parsed_seed.remove(known_cap_name)
|
||||
flow.metadata['needed_proxy_caps'].append(known_cap_name)
|
||||
if flow.metadata['needed_proxy_caps']:
|
||||
flow.request.content = llsd.format_xml(parsed_seed)
|
||||
elif not cap_data:
|
||||
if self._is_login_request(flow):
|
||||
# Not strictly a Cap, but makes it easier to filter on.
|
||||
@@ -200,8 +212,9 @@ class MITMProxyEventManager:
|
||||
if message_logger:
|
||||
message_logger.log_http_response(flow)
|
||||
|
||||
# Don't handle responses for requests injected by the proxy
|
||||
if flow.request_injected:
|
||||
# Don't process responses for requests or responses injected by the proxy.
|
||||
# We already processed it, it came from us!
|
||||
if flow.request_injected or flow.response_injected:
|
||||
return
|
||||
|
||||
status = flow.response.status_code
|
||||
@@ -262,6 +275,9 @@ class MITMProxyEventManager:
|
||||
for cap_name in wrappable_caps:
|
||||
if cap_name in parsed:
|
||||
parsed[cap_name] = region.register_wrapper_cap(cap_name)
|
||||
# Send the client the URLs for any proxy-only caps it requested
|
||||
for cap_name in flow.metadata['needed_proxy_caps']:
|
||||
parsed[cap_name] = region.cap_urls[cap_name]
|
||||
flow.response.content = llsd.format_pretty_xml(parsed)
|
||||
elif cap_data.cap_name == "EventQueueGet":
|
||||
parsed_eq_resp = llsd.parse_xml(flow.response.content)
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
import multiprocessing
|
||||
import weakref
|
||||
from typing import *
|
||||
from typing import Optional
|
||||
|
||||
@@ -20,10 +22,11 @@ class HippoHTTPFlow:
|
||||
Hides the nastiness of writing to flow.metadata so we can pass
|
||||
state back and forth between the two proxies
|
||||
"""
|
||||
__slots__ = ("flow",)
|
||||
__slots__ = ("flow", "callback_queue")
|
||||
|
||||
def __init__(self, flow: HTTPFlow):
|
||||
def __init__(self, flow: HTTPFlow, callback_queue: Optional[multiprocessing.Queue] = None):
|
||||
self.flow: HTTPFlow = flow
|
||||
self.callback_queue = weakref.ref(callback_queue) if callback_queue else None
|
||||
meta = self.flow.metadata
|
||||
meta.setdefault("taken", False)
|
||||
meta.setdefault("can_stream", True)
|
||||
@@ -91,9 +94,19 @@ class HippoHTTPFlow:
|
||||
|
||||
def take(self) -> HippoHTTPFlow:
|
||||
"""Don't automatically pass this flow back to mitmproxy"""
|
||||
# TODO: Having to explicitly take / release Flows to use them in an async
|
||||
# context is kind of janky. The HTTP callback handling code should probably
|
||||
# be made totally async, including the addon hooks. Would coroutine per-callback
|
||||
# be expensive?
|
||||
self.metadata["taken"] = True
|
||||
return self
|
||||
|
||||
def release(self):
|
||||
"""Release the HTTP flow back to the normal processing flow"""
|
||||
assert self.taken and self.callback_queue
|
||||
self.metadata["taken"] = False
|
||||
self.callback_queue().put(("callback", self.flow.id, self.get_state()))
|
||||
|
||||
@property
|
||||
def taken(self) -> bool:
|
||||
return self.metadata["taken"]
|
||||
@@ -120,11 +133,14 @@ class HippoHTTPFlow:
|
||||
flow: Optional[HTTPFlow] = HTTPFlow.from_state(flow_state)
|
||||
assert flow is not None
|
||||
cap_data_ser = flow.metadata.get("cap_data_ser")
|
||||
callback_queue = None
|
||||
if session_manager:
|
||||
callback_queue = session_manager.flow_context.to_proxy_queue
|
||||
if cap_data_ser is not None:
|
||||
flow.metadata["cap_data"] = CapData.deserialize(cap_data_ser, session_manager)
|
||||
else:
|
||||
flow.metadata["cap_data"] = None
|
||||
return cls(flow)
|
||||
return cls(flow, callback_queue)
|
||||
|
||||
def copy(self) -> HippoHTTPFlow:
|
||||
# HACK: flow.copy() expects the flow to be fully JSON serializable, but
|
||||
|
||||
@@ -189,7 +189,10 @@ class IPCInterceptionAddon:
|
||||
flow.response.headers["Content-Encoding"] = "identity"
|
||||
|
||||
def response(self, flow: HTTPFlow):
|
||||
if flow.metadata["response_injected"]:
|
||||
cap_data: typing.Optional[SerializedCapData] = flow.metadata.get("cap_data")
|
||||
if flow.metadata["response_injected"] and cap_data and cap_data.asset_server_cap:
|
||||
# Don't bother intercepting asset server requests where we injected a response.
|
||||
# We don't want to log them and they don't need any more processing by user hooks.
|
||||
return
|
||||
self._queue_flow_interception("response", flow)
|
||||
|
||||
|
||||
@@ -51,10 +51,11 @@ class ProxiedRegion(BaseClientRegion):
|
||||
self.cache_id: Optional[UUID] = None
|
||||
self.circuit: Optional[ProxiedCircuit] = None
|
||||
self.circuit_addr = circuit_addr
|
||||
self._caps = CapsMultiDict()
|
||||
self.caps = CapsMultiDict()
|
||||
# Reverse lookup for URL -> cap data
|
||||
self._caps_url_lookup: Dict[str, Tuple[CapType, str]] = {}
|
||||
if seed_cap:
|
||||
self._caps["Seed"] = (CapType.NORMAL, seed_cap)
|
||||
self.caps["Seed"] = (CapType.NORMAL, seed_cap)
|
||||
self.session: Callable[[], Session] = weakref.ref(session)
|
||||
self.message_handler: MessageHandler[Message, str] = MessageHandler()
|
||||
self.http_message_handler: MessageHandler[HippoHTTPFlow, str] = MessageHandler()
|
||||
@@ -77,8 +78,8 @@ class ProxiedRegion(BaseClientRegion):
|
||||
self._name = val
|
||||
|
||||
@property
|
||||
def caps(self):
|
||||
return multidict.MultiDict((x, y[1]) for x, y in self._caps.items())
|
||||
def cap_urls(self) -> multidict.MultiDict[str, str]:
|
||||
return multidict.MultiDict((x, y[1]) for x, y in self.caps.items())
|
||||
|
||||
@property
|
||||
def global_pos(self) -> Vector3:
|
||||
@@ -95,12 +96,12 @@ class ProxiedRegion(BaseClientRegion):
|
||||
def update_caps(self, caps: Mapping[str, str]):
|
||||
for cap_name, cap_url in caps.items():
|
||||
if isinstance(cap_url, str) and cap_url.startswith('http'):
|
||||
self._caps.add(cap_name, (CapType.NORMAL, cap_url))
|
||||
self.caps.add(cap_name, (CapType.NORMAL, cap_url))
|
||||
self._recalc_caps()
|
||||
|
||||
def _recalc_caps(self):
|
||||
self._caps_url_lookup.clear()
|
||||
for name, cap_info in self._caps.items():
|
||||
for name, cap_info in self.caps.items():
|
||||
cap_type, cap_url = cap_info
|
||||
self._caps_url_lookup[cap_url] = (cap_type, name)
|
||||
|
||||
@@ -111,15 +112,15 @@ class ProxiedRegion(BaseClientRegion):
|
||||
caps like ViewerAsset may be the same globally and wouldn't let us infer
|
||||
which session / region the request was related to without a wrapper
|
||||
"""
|
||||
parsed = list(urllib.parse.urlsplit(self._caps[name][1]))
|
||||
seed_id = self._caps["Seed"][1].split("/")[-1].encode("utf8")
|
||||
parsed = list(urllib.parse.urlsplit(self.caps[name][1]))
|
||||
seed_id = self.caps["Seed"][1].split("/")[-1].encode("utf8")
|
||||
# Give it a unique domain tied to the current Seed URI
|
||||
parsed[1] = f"{name.lower()}-{hashlib.sha256(seed_id).hexdigest()[:16]}.hippo-proxy.localhost"
|
||||
# Force the URL to HTTP, we're going to handle the request ourselves so it doesn't need
|
||||
# to be secure. This should save on expensive TLS context setup for each req.
|
||||
parsed[0] = "http"
|
||||
wrapper_url = urllib.parse.urlunsplit(parsed)
|
||||
self._caps.add(name + "ProxyWrapper", (CapType.WRAPPER, wrapper_url))
|
||||
self.caps.add(name + "ProxyWrapper", (CapType.WRAPPER, wrapper_url))
|
||||
self._recalc_caps()
|
||||
return wrapper_url
|
||||
|
||||
@@ -127,14 +128,14 @@ class ProxiedRegion(BaseClientRegion):
|
||||
"""
|
||||
Register a cap to be completely handled by the proxy
|
||||
"""
|
||||
cap_url = f"https://caps.hippo-proxy.localhost/cap/{uuid.uuid4()!s}"
|
||||
self._caps.add(name, (CapType.PROXY_ONLY, cap_url))
|
||||
cap_url = f"http://{uuid.uuid4()!s}.caps.hippo-proxy.localhost"
|
||||
self.caps.add(name, (CapType.PROXY_ONLY, cap_url))
|
||||
self._recalc_caps()
|
||||
return cap_url
|
||||
|
||||
def register_temporary_cap(self, name: str, cap_url: str):
|
||||
"""Register a Cap that only has meaning the first time it's used"""
|
||||
self._caps.add(name, (CapType.TEMPORARY, cap_url))
|
||||
self.caps.add(name, (CapType.TEMPORARY, cap_url))
|
||||
self._recalc_caps()
|
||||
|
||||
def resolve_cap(self, url: str, consume=True) -> Optional[Tuple[str, str, CapType]]:
|
||||
@@ -143,9 +144,9 @@ class ProxiedRegion(BaseClientRegion):
|
||||
cap_type, name = self._caps_url_lookup[cap_url]
|
||||
if cap_type == CapType.TEMPORARY and consume:
|
||||
# Resolving a temporary cap pops it out of the dict
|
||||
temporary_caps = self._caps.popall(name)
|
||||
temporary_caps = self.caps.popall(name)
|
||||
temporary_caps.remove((cap_type, cap_url))
|
||||
self._caps.extend((name, x) for x in temporary_caps)
|
||||
self.caps.extend((name, x) for x in temporary_caps)
|
||||
self._recalc_caps()
|
||||
return name, cap_url, cap_type
|
||||
return None
|
||||
|
||||
@@ -99,12 +99,12 @@ class Session(BaseClientSession):
|
||||
|
||||
for region in self.regions:
|
||||
if region.circuit_addr == circuit_addr:
|
||||
if seed_url and region.caps.get("Seed") != seed_url:
|
||||
if seed_url and region.cap_urls.get("Seed") != seed_url:
|
||||
region.update_caps({"Seed": seed_url})
|
||||
if handle:
|
||||
region.handle = handle
|
||||
return region
|
||||
if seed_url and region.caps.get("Seed") == seed_url:
|
||||
if seed_url and region.cap_urls.get("Seed") == seed_url:
|
||||
return region
|
||||
|
||||
if not circuit_addr:
|
||||
@@ -113,6 +113,7 @@ class Session(BaseClientSession):
|
||||
logging.info("Registering region for %r" % (circuit_addr,))
|
||||
region = ProxiedRegion(circuit_addr, seed_url, self, handle=handle)
|
||||
self.regions.append(region)
|
||||
AddonManager.handle_region_registered(self, region)
|
||||
return region
|
||||
|
||||
def region_by_circuit_addr(self, circuit_addr) -> Optional[ProxiedRegion]:
|
||||
|
||||
Reference in New Issue
Block a user