mirror of
https://github.com/Grasscutters/mitmproxy.git
synced 2024-11-21 22:58:24 +00:00
tests++
This commit is contained in:
parent
bbc65e5f37
commit
5fc20e3e8c
@ -37,4 +37,4 @@ class KeepServing:
|
||||
ctx.options.rfile,
|
||||
]
|
||||
if any(opts) and not ctx.options.keepserving:
|
||||
asyncio.get_event_loop().create_task(self.watch())
|
||||
asyncio.get_running_loop().create_task(self.watch())
|
||||
|
@ -76,7 +76,7 @@ class ReadFile:
|
||||
|
||||
def running(self):
|
||||
if ctx.options.rfile:
|
||||
asyncio.get_event_loop().create_task(self.doread(ctx.options.rfile))
|
||||
asyncio.get_running_loop().create_task(self.doread(ctx.options.rfile))
|
||||
|
||||
@command.command("readfile.reading")
|
||||
def reading(self) -> bool:
|
||||
|
@ -14,6 +14,7 @@ from mitmproxy import command
|
||||
from mitmproxy import eventsequence
|
||||
from mitmproxy import ctx
|
||||
import mitmproxy.types as mtypes
|
||||
from mitmproxy.utils import asyncio_utils
|
||||
|
||||
|
||||
def load_script(path: str) -> typing.Optional[types.ModuleType]:
|
||||
@ -82,7 +83,10 @@ class Script:
|
||||
|
||||
self.reloadtask = None
|
||||
if reload:
|
||||
self.reloadtask = asyncio.ensure_future(self.watcher())
|
||||
self.reloadtask = asyncio_utils.create_task(
|
||||
self.watcher(),
|
||||
name=f"script watcher for {path}",
|
||||
)
|
||||
else:
|
||||
self.loadscript()
|
||||
|
||||
@ -107,10 +111,6 @@ class Script:
|
||||
ctx.master.addons.register(ns)
|
||||
self.ns = ns
|
||||
if self.ns:
|
||||
# We're already running, so we have to explicitly register and
|
||||
# configure the addon
|
||||
if self.is_running:
|
||||
ctx.master.addons.invoke_addon_sync(self.ns, hooks.RunningHook())
|
||||
try:
|
||||
ctx.master.addons.invoke_addon_sync(
|
||||
self.ns,
|
||||
@ -118,6 +118,9 @@ class Script:
|
||||
)
|
||||
except exceptions.OptionsError as e:
|
||||
script_error_handler(self.fullpath, e, msg=str(e))
|
||||
if self.is_running:
|
||||
# We're already running, so we call that on the addon now.
|
||||
ctx.master.addons.invoke_addon_sync(self.ns, hooks.RunningHook())
|
||||
|
||||
async def watcher(self):
|
||||
last_mtime = 0
|
||||
@ -166,11 +169,11 @@ class ScriptLoader:
|
||||
mod = load_script(path)
|
||||
if mod:
|
||||
with addonmanager.safecall():
|
||||
ctx.master.addons.invoke_addon_sync(mod, hooks.RunningHook())
|
||||
ctx.master.addons.invoke_addon_sync(
|
||||
mod,
|
||||
hooks.ConfigureHook(ctx.options.keys()),
|
||||
)
|
||||
ctx.master.addons.invoke_addon_sync(mod, hooks.RunningHook())
|
||||
for f in flows:
|
||||
for evt in eventsequence.iterate(f):
|
||||
ctx.master.addons.invoke_addon_sync(mod, evt)
|
||||
|
@ -241,7 +241,7 @@ class CommandManager:
|
||||
|
||||
return parsed, next_params
|
||||
|
||||
def call(self, command_name: str, *args: typing.Sequence[typing.Any]) -> typing.Any:
|
||||
def call(self, command_name: str, *args: typing.Any) -> typing.Any:
|
||||
"""
|
||||
Call a command with native arguments. May raise CommandError.
|
||||
"""
|
||||
|
@ -0,0 +1,82 @@
|
||||
"""
|
||||
SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
Vendored partial copy of https://github.com/tornadoweb/tornado/blob/master/tornado/platform/asyncio.py @ e18ea03
|
||||
to fix https://github.com/tornadoweb/tornado/issues/3092. Can be removed once tornado >6.1 is out.
|
||||
"""
|
||||
import errno
|
||||
|
||||
import select
|
||||
import tornado
|
||||
import tornado.platform.asyncio
|
||||
|
||||
|
||||
def patch_tornado():
|
||||
if tornado.version != "6.1":
|
||||
return
|
||||
|
||||
def _run_select(self) -> None:
|
||||
while True:
|
||||
with self._select_cond:
|
||||
while self._select_args is None and not self._closing_selector:
|
||||
self._select_cond.wait()
|
||||
if self._closing_selector:
|
||||
return
|
||||
assert self._select_args is not None
|
||||
to_read, to_write = self._select_args
|
||||
self._select_args = None
|
||||
|
||||
# We use the simpler interface of the select module instead of
|
||||
# the more stateful interface in the selectors module because
|
||||
# this class is only intended for use on windows, where
|
||||
# select.select is the only option. The selector interface
|
||||
# does not have well-documented thread-safety semantics that
|
||||
# we can rely on so ensuring proper synchronization would be
|
||||
# tricky.
|
||||
try:
|
||||
# On windows, selecting on a socket for write will not
|
||||
# return the socket when there is an error (but selecting
|
||||
# for reads works). Also select for errors when selecting
|
||||
# for writes, and merge the results.
|
||||
#
|
||||
# This pattern is also used in
|
||||
# https://github.com/python/cpython/blob/v3.8.0/Lib/selectors.py#L312-L317
|
||||
rs, ws, xs = select.select(to_read, to_write, to_write)
|
||||
ws = ws + xs
|
||||
except OSError as e:
|
||||
# After remove_reader or remove_writer is called, the file
|
||||
# descriptor may subsequently be closed on the event loop
|
||||
# thread. It's possible that this select thread hasn't
|
||||
# gotten into the select system call by the time that
|
||||
# happens in which case (at least on macOS), select may
|
||||
# raise a "bad file descriptor" error. If we get that
|
||||
# error, check and see if we're also being woken up by
|
||||
# polling the waker alone. If we are, just return to the
|
||||
# event loop and we'll get the updated set of file
|
||||
# descriptors on the next iteration. Otherwise, raise the
|
||||
# original error.
|
||||
if e.errno == getattr(errno, "WSAENOTSOCK", errno.EBADF):
|
||||
rs, _, _ = select.select([self._waker_r.fileno()], [], [], 0)
|
||||
if rs:
|
||||
ws = []
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
raise
|
||||
|
||||
try:
|
||||
self._real_loop.call_soon_threadsafe(self._handle_select, rs, ws)
|
||||
except RuntimeError:
|
||||
# "Event loop is closed". Swallow the exception for
|
||||
# consistency with PollIOLoop (and logical consistency
|
||||
# with the fact that we can't guarantee that an
|
||||
# add_callback that completes without error will
|
||||
# eventually execute).
|
||||
pass
|
||||
except AttributeError:
|
||||
# ProactorEventLoop may raise this instead of RuntimeError
|
||||
# if call_soon_threadsafe races with a call to close().
|
||||
# Swallow it too for consistency.
|
||||
pass
|
||||
|
||||
tornado.platform.asyncio.AddThreadSelectorEventLoop._run_select = _run_select
|
@ -1,282 +0,0 @@
|
||||
"""
|
||||
SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
Vendored partial copy of https://github.com/tornadoweb/tornado/blob/master/tornado/platform/asyncio.py @ e18ea03
|
||||
to fix https://github.com/tornadoweb/tornado/issues/3092. Can be removed once tornado >6.1 is out.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import atexit
|
||||
import errno
|
||||
import functools
|
||||
import socket
|
||||
import threading
|
||||
import typing
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union
|
||||
|
||||
import select
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from typing import Set # noqa: F401
|
||||
from typing_extensions import Protocol
|
||||
|
||||
|
||||
class _HasFileno(Protocol):
|
||||
def fileno(self) -> int:
|
||||
pass
|
||||
|
||||
|
||||
_FileDescriptorLike = Union[int, _HasFileno]
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
# Collection of selector thread event loops to shut down on exit.
|
||||
_selector_loops = set() # type: Set[AddThreadSelectorEventLoop]
|
||||
|
||||
|
||||
def _atexit_callback() -> None:
|
||||
for loop in _selector_loops:
|
||||
with loop._select_cond:
|
||||
loop._closing_selector = True
|
||||
loop._select_cond.notify()
|
||||
try:
|
||||
loop._waker_w.send(b"a")
|
||||
except BlockingIOError:
|
||||
pass
|
||||
# If we don't join our (daemon) thread here, we may get a deadlock
|
||||
# during interpreter shutdown. I don't really understand why. This
|
||||
# deadlock happens every time in CI (both travis and appveyor) but
|
||||
# I've never been able to reproduce locally.
|
||||
loop._thread.join()
|
||||
_selector_loops.clear()
|
||||
|
||||
|
||||
atexit.register(_atexit_callback)
|
||||
|
||||
|
||||
class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop):
|
||||
"""Wrap an event loop to add implementations of the ``add_reader`` method family.
|
||||
|
||||
Instances of this class start a second thread to run a selector.
|
||||
This thread is completely hidden from the user; all callbacks are
|
||||
run on the wrapped event loop's thread.
|
||||
|
||||
This class is used automatically by Tornado; applications should not need
|
||||
to refer to it directly.
|
||||
|
||||
It is safe to wrap any event loop with this class, although it only makes sense
|
||||
for event loops that do not implement the ``add_reader`` family of methods
|
||||
themselves (i.e. ``WindowsProactorEventLoop``)
|
||||
|
||||
Closing the ``AddThreadSelectorEventLoop`` also closes the wrapped event loop.
|
||||
|
||||
"""
|
||||
|
||||
# This class is a __getattribute__-based proxy. All attributes other than those
|
||||
# in this set are proxied through to the underlying loop.
|
||||
MY_ATTRIBUTES = {
|
||||
"_consume_waker",
|
||||
"_select_cond",
|
||||
"_select_args",
|
||||
"_closing_selector",
|
||||
"_thread",
|
||||
"_handle_event",
|
||||
"_readers",
|
||||
"_real_loop",
|
||||
"_start_select",
|
||||
"_run_select",
|
||||
"_handle_select",
|
||||
"_wake_selector",
|
||||
"_waker_r",
|
||||
"_waker_w",
|
||||
"_writers",
|
||||
"add_reader",
|
||||
"add_writer",
|
||||
"close",
|
||||
"remove_reader",
|
||||
"remove_writer",
|
||||
}
|
||||
|
||||
def __getattribute__(self, name: str) -> Any:
|
||||
if name in AddThreadSelectorEventLoop.MY_ATTRIBUTES:
|
||||
return super().__getattribute__(name)
|
||||
return getattr(self._real_loop, name)
|
||||
|
||||
def __init__(self, real_loop: asyncio.AbstractEventLoop) -> None:
|
||||
self._real_loop = real_loop
|
||||
|
||||
# Create a thread to run the select system call. We manage this thread
|
||||
# manually so we can trigger a clean shutdown from an atexit hook. Note
|
||||
# that due to the order of operations at shutdown, only daemon threads
|
||||
# can be shut down in this way (non-daemon threads would require the
|
||||
# introduction of a new hook: https://bugs.python.org/issue41962)
|
||||
self._select_cond = threading.Condition()
|
||||
self._select_args = (
|
||||
None
|
||||
) # type: Optional[Tuple[List[_FileDescriptorLike], List[_FileDescriptorLike]]]
|
||||
self._closing_selector = False
|
||||
self._thread = threading.Thread(
|
||||
name="Tornado selector",
|
||||
daemon=True,
|
||||
target=self._run_select,
|
||||
)
|
||||
self._thread.start()
|
||||
# Start the select loop once the loop is started.
|
||||
self._real_loop.call_soon(self._start_select)
|
||||
|
||||
self._readers = {} # type: Dict[_FileDescriptorLike, Callable]
|
||||
self._writers = {} # type: Dict[_FileDescriptorLike, Callable]
|
||||
|
||||
# Writing to _waker_w will wake up the selector thread, which
|
||||
# watches for _waker_r to be readable.
|
||||
self._waker_r, self._waker_w = socket.socketpair()
|
||||
self._waker_r.setblocking(False)
|
||||
self._waker_w.setblocking(False)
|
||||
_selector_loops.add(self)
|
||||
self.add_reader(self._waker_r, self._consume_waker)
|
||||
|
||||
def __del__(self) -> None:
|
||||
# If the top-level application code uses asyncio interfaces to
|
||||
# start and stop the event loop, no objects created in Tornado
|
||||
# can get a clean shutdown notification. If we're just left to
|
||||
# be GC'd, we must explicitly close our sockets to avoid
|
||||
# logging warnings.
|
||||
_selector_loops.discard(self)
|
||||
self._waker_r.close()
|
||||
self._waker_w.close()
|
||||
|
||||
def close(self) -> None:
|
||||
with self._select_cond:
|
||||
self._closing_selector = True
|
||||
self._select_cond.notify()
|
||||
self._wake_selector()
|
||||
self._thread.join()
|
||||
_selector_loops.discard(self)
|
||||
self._waker_r.close()
|
||||
self._waker_w.close()
|
||||
self._real_loop.close()
|
||||
|
||||
def _wake_selector(self) -> None:
|
||||
try:
|
||||
self._waker_w.send(b"a")
|
||||
except BlockingIOError:
|
||||
pass
|
||||
|
||||
def _consume_waker(self) -> None:
|
||||
try:
|
||||
self._waker_r.recv(1024)
|
||||
except BlockingIOError:
|
||||
pass
|
||||
|
||||
def _start_select(self) -> None:
|
||||
# Capture reader and writer sets here in the event loop
|
||||
# thread to avoid any problems with concurrent
|
||||
# modification while the select loop uses them.
|
||||
with self._select_cond:
|
||||
assert self._select_args is None
|
||||
self._select_args = (list(self._readers.keys()), list(self._writers.keys()))
|
||||
self._select_cond.notify()
|
||||
|
||||
def _run_select(self) -> None:
|
||||
while True:
|
||||
with self._select_cond:
|
||||
while self._select_args is None and not self._closing_selector:
|
||||
self._select_cond.wait()
|
||||
if self._closing_selector:
|
||||
return
|
||||
assert self._select_args is not None
|
||||
to_read, to_write = self._select_args
|
||||
self._select_args = None
|
||||
|
||||
# We use the simpler interface of the select module instead of
|
||||
# the more stateful interface in the selectors module because
|
||||
# this class is only intended for use on windows, where
|
||||
# select.select is the only option. The selector interface
|
||||
# does not have well-documented thread-safety semantics that
|
||||
# we can rely on so ensuring proper synchronization would be
|
||||
# tricky.
|
||||
try:
|
||||
# On windows, selecting on a socket for write will not
|
||||
# return the socket when there is an error (but selecting
|
||||
# for reads works). Also select for errors when selecting
|
||||
# for writes, and merge the results.
|
||||
#
|
||||
# This pattern is also used in
|
||||
# https://github.com/python/cpython/blob/v3.8.0/Lib/selectors.py#L312-L317
|
||||
rs, ws, xs = select.select(to_read, to_write, to_write)
|
||||
ws = ws + xs
|
||||
except OSError as e:
|
||||
# After remove_reader or remove_writer is called, the file
|
||||
# descriptor may subsequently be closed on the event loop
|
||||
# thread. It's possible that this select thread hasn't
|
||||
# gotten into the select system call by the time that
|
||||
# happens in which case (at least on macOS), select may
|
||||
# raise a "bad file descriptor" error. If we get that
|
||||
# error, check and see if we're also being woken up by
|
||||
# polling the waker alone. If we are, just return to the
|
||||
# event loop and we'll get the updated set of file
|
||||
# descriptors on the next iteration. Otherwise, raise the
|
||||
# original error.
|
||||
if e.errno == getattr(errno, "WSAENOTSOCK", errno.EBADF):
|
||||
rs, _, _ = select.select([self._waker_r.fileno()], [], [], 0)
|
||||
if rs:
|
||||
ws = []
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
raise
|
||||
|
||||
try:
|
||||
self._real_loop.call_soon_threadsafe(self._handle_select, rs, ws)
|
||||
except RuntimeError:
|
||||
# "Event loop is closed". Swallow the exception for
|
||||
# consistency with PollIOLoop (and logical consistency
|
||||
# with the fact that we can't guarantee that an
|
||||
# add_callback that completes without error will
|
||||
# eventually execute).
|
||||
pass
|
||||
except AttributeError:
|
||||
# ProactorEventLoop may raise this instead of RuntimeError
|
||||
# if call_soon_threadsafe races with a call to close().
|
||||
# Swallow it too for consistency.
|
||||
pass
|
||||
|
||||
def _handle_select(
|
||||
self, rs: List["_FileDescriptorLike"], ws: List["_FileDescriptorLike"]
|
||||
) -> None:
|
||||
for r in rs:
|
||||
self._handle_event(r, self._readers)
|
||||
for w in ws:
|
||||
self._handle_event(w, self._writers)
|
||||
self._start_select()
|
||||
|
||||
def _handle_event(
|
||||
self,
|
||||
fd: "_FileDescriptorLike",
|
||||
cb_map: Dict["_FileDescriptorLike", Callable],
|
||||
) -> None:
|
||||
try:
|
||||
callback = cb_map[fd]
|
||||
except KeyError:
|
||||
return
|
||||
callback()
|
||||
|
||||
def add_reader(
|
||||
self, fd: "_FileDescriptorLike", callback: Callable[..., None], *args: Any
|
||||
) -> None:
|
||||
self._readers[fd] = functools.partial(callback, *args)
|
||||
self._wake_selector()
|
||||
|
||||
def add_writer(
|
||||
self, fd: "_FileDescriptorLike", callback: Callable[..., None], *args: Any
|
||||
) -> None:
|
||||
self._writers[fd] = functools.partial(callback, *args)
|
||||
self._wake_selector()
|
||||
|
||||
def remove_reader(self, fd: "_FileDescriptorLike") -> None:
|
||||
del self._readers[fd]
|
||||
self._wake_selector()
|
||||
|
||||
def remove_writer(self, fd: "_FileDescriptorLike") -> None:
|
||||
del self._writers[fd]
|
||||
self._wake_selector()
|
@ -1,5 +1,6 @@
|
||||
import asyncio
|
||||
import traceback
|
||||
from typing import Optional
|
||||
|
||||
from mitmproxy import addonmanager, hooks
|
||||
from mitmproxy import command
|
||||
@ -18,26 +19,32 @@ class Master:
|
||||
|
||||
event_loop: asyncio.AbstractEventLoop
|
||||
|
||||
def __init__(self, opts):
|
||||
def __init__(self, opts, event_loop: Optional[asyncio.AbstractEventLoop] = None):
|
||||
self.should_exit = asyncio.Event()
|
||||
self.options: options.Options = opts or options.Options()
|
||||
self.commands = command.CommandManager(self)
|
||||
self.addons = addonmanager.AddonManager(self)
|
||||
self.log = log.Log(self)
|
||||
self.event_loop = event_loop or asyncio.get_running_loop()
|
||||
|
||||
mitmproxy_ctx.master = self
|
||||
mitmproxy_ctx.log = self.log
|
||||
mitmproxy_ctx.options = self.options
|
||||
|
||||
async def run(self) -> None:
|
||||
self.event_loop = asyncio.get_running_loop()
|
||||
old_handler = self.event_loop.get_exception_handler()
|
||||
self.event_loop.set_exception_handler(self._asyncio_exception_handler)
|
||||
try:
|
||||
self.should_exit.clear()
|
||||
|
||||
# Handle scheduled tasks (configure()) first.
|
||||
await asyncio.sleep(0)
|
||||
await self.running()
|
||||
await self.should_exit.wait()
|
||||
|
||||
await self.done()
|
||||
finally:
|
||||
self.event_loop.set_exception_handler(old_handler)
|
||||
|
||||
def shutdown(self):
|
||||
"""
|
||||
|
@ -17,11 +17,6 @@ import collections.abc
|
||||
import pydivert
|
||||
import pydivert.consts
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
class WindowsError(OSError):
|
||||
@property
|
||||
def winerror(self) -> int:
|
||||
return 42
|
||||
|
||||
REDIRECT_API_HOST = "127.0.0.1"
|
||||
REDIRECT_API_PORT = 8085
|
||||
@ -300,7 +295,7 @@ class Redirect(threading.Thread):
|
||||
while True:
|
||||
try:
|
||||
packet = self.windivert.recv()
|
||||
except WindowsError as e:
|
||||
except OSError as e:
|
||||
if e.winerror == 995:
|
||||
return
|
||||
else:
|
||||
@ -318,8 +313,8 @@ class Redirect(threading.Thread):
|
||||
"""
|
||||
try:
|
||||
return self.windivert.recv()
|
||||
except WindowsError as e:
|
||||
if e.winerror == 995:
|
||||
except OSError as e:
|
||||
if e.winerror == 995: # type: ignore
|
||||
return None
|
||||
else:
|
||||
raise
|
||||
|
@ -21,7 +21,11 @@ class TestAddons(addonmanager.AddonManager):
|
||||
|
||||
class RecordingMaster(mitmproxy.master.Master):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
except RuntimeError:
|
||||
loop = asyncio.new_event_loop()
|
||||
super().__init__(*args, **kwargs, event_loop=loop)
|
||||
self.addons = TestAddons(self)
|
||||
self.logs = []
|
||||
|
||||
|
@ -12,7 +12,7 @@ import tempfile
|
||||
import contextlib
|
||||
import threading
|
||||
|
||||
from mitmproxy.contrib.tornado.asyncio import AddThreadSelectorEventLoop
|
||||
from tornado.platform.asyncio import AddThreadSelectorEventLoop
|
||||
|
||||
import urwid
|
||||
|
||||
@ -23,6 +23,7 @@ from mitmproxy.addons import intercept
|
||||
from mitmproxy.addons import eventstore
|
||||
from mitmproxy.addons import readfile
|
||||
from mitmproxy.addons import view
|
||||
from mitmproxy.contrib.tornado import patch_tornado
|
||||
from mitmproxy.tools.console import consoleaddons
|
||||
from mitmproxy.tools.console import defaultkeys
|
||||
from mitmproxy.tools.console import keymap
|
||||
@ -209,8 +210,9 @@ class ConsoleMaster(master.Master):
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
if isinstance(loop, getattr(asyncio, "ProactorEventLoop", tuple())):
|
||||
patch_tornado()
|
||||
# fix for https://bugs.python.org/issue37373
|
||||
loop = AddThreadSelectorEventLoop(loop)
|
||||
loop = AddThreadSelectorEventLoop(loop) # type: ignore
|
||||
self.loop = urwid.MainLoop(
|
||||
urwid.SolidFill("x"),
|
||||
event_loop=urwid.AsyncioEventLoop(loop=loop),
|
||||
|
@ -63,7 +63,8 @@ class ActionBar(urwid.WidgetWrap):
|
||||
def prep_prompt(self, p):
|
||||
return p.strip() + ": "
|
||||
|
||||
def shorten_message(self, msg, max_width):
|
||||
@staticmethod
|
||||
def shorten_message(msg, max_width):
|
||||
"""
|
||||
Shorten message so that it fits into a single line in the statusbar.
|
||||
"""
|
||||
|
@ -9,7 +9,7 @@ from mitmproxy import exceptions, master
|
||||
from mitmproxy import options
|
||||
from mitmproxy import optmanager
|
||||
from mitmproxy.tools import cmdline
|
||||
from mitmproxy.utils import asyncio_utils, debug, arg_check
|
||||
from mitmproxy.utils import debug, arg_check
|
||||
|
||||
|
||||
def assert_utf8_env():
|
||||
@ -58,6 +58,7 @@ def run(
|
||||
extra: Extra argument processing callable which returns a dict of
|
||||
options.
|
||||
"""
|
||||
async def main() -> master.Master:
|
||||
debug.register_info_dumpers()
|
||||
|
||||
opts = options.Options()
|
||||
@ -99,7 +100,6 @@ def run(
|
||||
print("{}: {}".format(sys.argv[0], e), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
async def main():
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
def _sigint(*_):
|
||||
@ -113,11 +113,11 @@ def run(
|
||||
signal.signal(signal.SIGINT, _sigint)
|
||||
signal.signal(signal.SIGTERM, _sigterm)
|
||||
|
||||
return await master.run()
|
||||
|
||||
asyncio.run(main())
|
||||
await master.run()
|
||||
return master
|
||||
|
||||
return asyncio.run(main())
|
||||
|
||||
|
||||
def mitmproxy(args=None) -> typing.Optional[int]: # pragma: no cover
|
||||
if os.name == "nt":
|
||||
|
@ -1,6 +1,5 @@
|
||||
import tornado.httpserver
|
||||
import tornado.ioloop
|
||||
from tornado.platform.asyncio import AsyncIOMainLoop
|
||||
|
||||
from mitmproxy import addons
|
||||
from mitmproxy import log
|
||||
@ -11,6 +10,7 @@ from mitmproxy.addons import intercept
|
||||
from mitmproxy.addons import readfile
|
||||
from mitmproxy.addons import termlog
|
||||
from mitmproxy.addons import view
|
||||
from mitmproxy.contrib.tornado import patch_tornado
|
||||
from mitmproxy.tools.web import app, webaddons, static_viewer
|
||||
|
||||
|
||||
@ -93,6 +93,7 @@ class WebMaster(master.Master):
|
||||
)
|
||||
|
||||
async def running(self):
|
||||
patch_tornado()
|
||||
# Register tornado with the current event loop
|
||||
tornado.ioloop.IOLoop.current()
|
||||
|
||||
|
@ -1,11 +1,8 @@
|
||||
import asyncio
|
||||
import concurrent.futures
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
from asyncio import tasks
|
||||
from collections.abc import Coroutine
|
||||
from typing import Awaitable, Callable, Optional, TypeVar
|
||||
from typing import Optional
|
||||
|
||||
from mitmproxy.utils import human
|
||||
|
||||
|
@ -6,6 +6,7 @@ exclude = mitmproxy/contrib/*,test/mitmproxy/data/*,release/build/*
|
||||
addons = file,open,basestring,xrange,unicode,long,cmp
|
||||
|
||||
[tool:pytest]
|
||||
asyncio_mode = auto
|
||||
testpaths = test
|
||||
addopts = --capture=no --color=yes
|
||||
|
||||
|
2
setup.py
2
setup.py
@ -104,7 +104,7 @@ setup(
|
||||
"parver>=0.1,<2.0",
|
||||
"pdoc>=4.0.0",
|
||||
"pyinstaller==4.7",
|
||||
"pytest-asyncio>=0.10.0,<0.16,!=0.14",
|
||||
"pytest-asyncio>=0.17.0,<0.19",
|
||||
"pytest-cov>=2.7.1,<3.1",
|
||||
"pytest-timeout>=1.3.3,<2.1",
|
||||
"pytest-xdist>=2.1.0,<3",
|
||||
|
@ -53,7 +53,7 @@ class Benchmark:
|
||||
def running(self):
|
||||
if not self.started:
|
||||
self.started = True
|
||||
asyncio.get_event_loop().create_task(self.procs())
|
||||
asyncio.get_running_loop().create_task(self.procs())
|
||||
|
||||
def done(self):
|
||||
self.pr.dump_stats(ctx.options.benchmark_save_path + ".prof")
|
||||
|
@ -4,10 +4,8 @@ from mitmproxy.test import tutils
|
||||
from mitmproxy.test import taddons
|
||||
from mitmproxy.http import Headers
|
||||
|
||||
from ..mitmproxy import tservers
|
||||
|
||||
|
||||
class TestScripts(tservers.MasterTest):
|
||||
class TestScripts:
|
||||
def test_add_header(self, tdata):
|
||||
with taddons.context() as tctx:
|
||||
a = tctx.script(tdata.path("../examples/addons/anatomy2.py"))
|
||||
|
@ -194,7 +194,7 @@ class TestScriptLoader:
|
||||
await tctx.master.await_log("recorder response")
|
||||
debug = [i.msg for i in tctx.master.logs if i.level == "debug"]
|
||||
assert debug == [
|
||||
'recorder running', 'recorder configure',
|
||||
'recorder configure', 'recorder running',
|
||||
'recorder requestheaders', 'recorder request',
|
||||
'recorder responseheaders', 'recorder response'
|
||||
]
|
||||
@ -285,16 +285,16 @@ class TestScriptLoader:
|
||||
debug = [i.msg for i in tctx.master.logs if i.level == "debug"]
|
||||
assert debug == [
|
||||
'a load',
|
||||
'a running',
|
||||
'a configure',
|
||||
'a running',
|
||||
|
||||
'b load',
|
||||
'b running',
|
||||
'b configure',
|
||||
'b running',
|
||||
|
||||
'c load',
|
||||
'c running',
|
||||
'c configure',
|
||||
'c running',
|
||||
]
|
||||
|
||||
tctx.master.clear()
|
||||
@ -330,14 +330,16 @@ class TestScriptLoader:
|
||||
'b done',
|
||||
'a configure',
|
||||
'e load',
|
||||
'e running',
|
||||
'e configure',
|
||||
'e running',
|
||||
]
|
||||
|
||||
# stop reload tasks
|
||||
tctx.configure(sc, scripts=[])
|
||||
|
||||
def test_order(event_loop, tdata, capsys):
|
||||
|
||||
def test_order(tdata, capsys):
|
||||
"""Integration test: Make sure that the runtime hooks are triggered on startup in the correct order."""
|
||||
asyncio.set_event_loop(event_loop)
|
||||
main.mitmdump([
|
||||
"-n",
|
||||
"-s", tdata.path("mitmproxy/data/addonscripts/recorder/recorder.py"),
|
||||
@ -348,6 +350,7 @@ def test_order(event_loop, tdata, capsys):
|
||||
r"\('recorder', 'load', .+\n"
|
||||
r"\('recorder', 'configure', .+\n"
|
||||
r"Loading script.+shutdown.py\n"
|
||||
r"\('recorder', 'running', .+\n$",
|
||||
r"\('recorder', 'running', .+\n"
|
||||
r"\('recorder', 'done', .+\n$",
|
||||
capsys.readouterr().out,
|
||||
)
|
||||
|
@ -4,9 +4,11 @@ import pytest
|
||||
from mitmproxy.addons import termlog
|
||||
from mitmproxy import log
|
||||
from mitmproxy.test import taddons
|
||||
from test.conftest import skip_windows
|
||||
|
||||
|
||||
class TestTermLog:
|
||||
@skip_windows # not sure why this is suddenly necessary (03/2022)
|
||||
@pytest.mark.usefixtures('capfd')
|
||||
@pytest.mark.parametrize('outfile, expected_out, expected_err', [
|
||||
(None, ['one', 'three'], ['four']),
|
||||
|
@ -70,7 +70,7 @@ def test_command():
|
||||
assert tctx.master.commands.execute("test.command") == "here"
|
||||
|
||||
|
||||
def test_halt():
|
||||
async def test_halt():
|
||||
o = options.Options()
|
||||
m = master.Master(o)
|
||||
a = addonmanager.AddonManager(m)
|
||||
@ -218,7 +218,7 @@ async def test_simple():
|
||||
assert ta in a
|
||||
|
||||
|
||||
def test_load_option():
|
||||
async def test_load_option():
|
||||
o = options.Options()
|
||||
m = master.Master(o)
|
||||
a = addonmanager.AddonManager(m)
|
||||
@ -226,7 +226,7 @@ def test_load_option():
|
||||
assert "custom_option" in m.options._options
|
||||
|
||||
|
||||
def test_nesting():
|
||||
async def test_nesting():
|
||||
o = options.Options()
|
||||
m = master.Master(o)
|
||||
a = addonmanager.AddonManager(m)
|
||||
|
@ -1,3 +1,4 @@
|
||||
import asyncio
|
||||
import re
|
||||
import sys
|
||||
from typing import List
|
||||
@ -5,7 +6,6 @@ from typing import List
|
||||
import pytest
|
||||
|
||||
import mitmproxy.options
|
||||
from mitmproxy import master
|
||||
from mitmproxy.tools.console import window
|
||||
from mitmproxy.tools.console.master import ConsoleMaster
|
||||
|
||||
@ -25,37 +25,42 @@ class ConsoleTestMaster(ConsoleMaster):
|
||||
for key in tokenize(input):
|
||||
self.window.keypress(self.ui.get_cols_rows(), key)
|
||||
|
||||
def screen_contents(self) -> str:
|
||||
return b"\n".join(self.window.render((80, 24), True)._text_content()).decode()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def console(monkeypatch):
|
||||
monkeypatch.setattr(window.Screen, "get_cols_rows", lambda self: (120, 120))
|
||||
monkeypatch.setattr(master.Master, "run_loop", lambda *_: True)
|
||||
def console(monkeypatch) -> ConsoleTestMaster:
|
||||
# monkeypatch.setattr(window.Screen, "get_cols_rows", lambda self: (120, 120))
|
||||
monkeypatch.setattr(window.Screen, "start", lambda *_: True)
|
||||
monkeypatch.setattr(ConsoleTestMaster, "sig_call_in", lambda *_, **__: True)
|
||||
monkeypatch.setattr(sys.stdout, "isatty", lambda: True)
|
||||
|
||||
async def make_master():
|
||||
opts = mitmproxy.options.Options()
|
||||
m = ConsoleTestMaster(opts)
|
||||
m.run()
|
||||
await m.running()
|
||||
return m
|
||||
return asyncio.run(make_master())
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_integration(tdata, console):
|
||||
console.type(f":view.flows.load {tdata.path('mitmproxy/data/dumpfile-7.mitm')}<enter>")
|
||||
console.type("<enter><tab><tab>")
|
||||
console.type("<space><tab><tab>") # view second flow
|
||||
assert "http://example.com/" in console.screen_contents()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_options_home_end(console):
|
||||
console.type("O<home><end>")
|
||||
assert "Options" in console.screen_contents()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_keybindings_home_end(console):
|
||||
console.type("K<home><end>")
|
||||
assert "Key Binding" in console.screen_contents()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
def test_replay_count(console):
|
||||
console.type(":replay.server.count<enter>")
|
||||
assert "Data viewer" in console.screen_contents()
|
||||
|
@ -1,26 +0,0 @@
|
||||
import urwid
|
||||
|
||||
import pytest
|
||||
|
||||
from mitmproxy import options, hooks
|
||||
from mitmproxy.tools import console
|
||||
|
||||
from ... import tservers
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestMaster(tservers.MasterTest):
|
||||
def mkmaster(self, **opts):
|
||||
o = options.Options(**opts)
|
||||
m = console.master.ConsoleMaster(o)
|
||||
m.addons.trigger(hooks.ConfigureHook(o.keys()))
|
||||
return m
|
||||
|
||||
async def test_basic(self):
|
||||
m = self.mkmaster()
|
||||
for i in (1, 2, 3):
|
||||
try:
|
||||
await self.dummy_cycle(m, 1, b"")
|
||||
except urwid.ExitMainLoop:
|
||||
pass
|
||||
assert len(m.view) == i
|
@ -4,7 +4,7 @@ from mitmproxy import options
|
||||
from mitmproxy.tools.console import statusbar, master
|
||||
|
||||
|
||||
def test_statusbar(monkeypatch):
|
||||
async def test_statusbar(monkeypatch):
|
||||
o = options.Options()
|
||||
m = master.ConsoleMaster(o)
|
||||
m.options.update(
|
||||
@ -48,15 +48,9 @@ def test_statusbar(monkeypatch):
|
||||
("warn", "(more in eventlog)")])
|
||||
])
|
||||
def test_shorten_message(message, ready_message):
|
||||
o = options.Options()
|
||||
m = master.ConsoleMaster(o)
|
||||
ab = statusbar.ActionBar(m)
|
||||
assert ab.shorten_message(message, max_width=30) == ready_message
|
||||
assert statusbar.ActionBar.shorten_message(message, max_width=30) == ready_message
|
||||
|
||||
|
||||
def test_shorten_message_narrow():
|
||||
o = options.Options()
|
||||
m = master.ConsoleMaster(o)
|
||||
ab = statusbar.ActionBar(m)
|
||||
shorten_msg = ab.shorten_message("error", max_width=4)
|
||||
shorten_msg = statusbar.ActionBar.shorten_message("error", max_width=4)
|
||||
assert shorten_msg == [(None, "\u2026"), ("warn", "(more in eventlog)")]
|
||||
|
@ -1,7 +1,7 @@
|
||||
import argparse
|
||||
|
||||
from mitmproxy import options
|
||||
from mitmproxy.tools import cmdline, web, dump, console
|
||||
from mitmproxy.tools import cmdline
|
||||
from mitmproxy.tools import main
|
||||
|
||||
|
||||
@ -15,20 +15,17 @@ def test_common():
|
||||
|
||||
def test_mitmproxy():
|
||||
opts = options.Options()
|
||||
console.master.ConsoleMaster(opts)
|
||||
ap = cmdline.mitmproxy(opts)
|
||||
assert ap
|
||||
|
||||
|
||||
def test_mitmdump():
|
||||
opts = options.Options()
|
||||
dump.DumpMaster(opts)
|
||||
ap = cmdline.mitmdump(opts)
|
||||
assert ap
|
||||
|
||||
|
||||
def test_mitmweb():
|
||||
opts = options.Options()
|
||||
web.master.WebMaster(opts)
|
||||
ap = cmdline.mitmweb(opts)
|
||||
assert ap
|
||||
|
@ -13,21 +13,21 @@ class TestDumpMaster:
|
||||
m = dump.DumpMaster(o, with_termlog=False, with_dumper=False)
|
||||
return m
|
||||
|
||||
def test_has_error(self):
|
||||
async def test_has_error(self):
|
||||
m = self.mkmaster()
|
||||
ent = log.LogEntry("foo", "error")
|
||||
m.addons.trigger(log.AddLogHook(ent))
|
||||
assert m.errorcheck.has_errored
|
||||
|
||||
@pytest.mark.parametrize("termlog", [False, True])
|
||||
def test_addons_termlog(self, termlog):
|
||||
async def test_addons_termlog(self, termlog):
|
||||
with mock.patch('sys.stdout'):
|
||||
o = options.Options()
|
||||
m = dump.DumpMaster(o, with_termlog=termlog)
|
||||
assert (m.addons.get('termlog') is not None) == termlog
|
||||
|
||||
@pytest.mark.parametrize("dumper", [False, True])
|
||||
def test_addons_dumper(self, dumper):
|
||||
async def test_addons_dumper(self, dumper):
|
||||
with mock.patch('sys.stdout'):
|
||||
o = options.Options()
|
||||
m = dump.DumpMaster(o, with_dumper=dumper)
|
||||
|
@ -1,4 +1,3 @@
|
||||
import asyncio
|
||||
import io
|
||||
import gzip
|
||||
import json
|
||||
@ -76,7 +75,7 @@ def test_generate_tflow_js(tdata):
|
||||
)
|
||||
|
||||
|
||||
def test_generate_options_js():
|
||||
async def test_generate_options_js():
|
||||
o = options.Options()
|
||||
m = webmaster.WebMaster(o)
|
||||
opt: optmanager._Option
|
||||
@ -117,14 +116,12 @@ def test_generate_options_js():
|
||||
|
||||
@pytest.mark.usefixtures("no_tornado_logging", "tdata")
|
||||
class TestApp(tornado.testing.AsyncHTTPTestCase):
|
||||
def get_new_ioloop(self):
|
||||
io_loop = tornado.platform.asyncio.AsyncIOLoop()
|
||||
asyncio.set_event_loop(io_loop.asyncio_loop)
|
||||
return io_loop
|
||||
|
||||
def get_app(self):
|
||||
async def make_master():
|
||||
o = options.Options(http2=False)
|
||||
m = webmaster.WebMaster(o, with_termlog=False)
|
||||
return webmaster.WebMaster(o, with_termlog=False)
|
||||
|
||||
m = self.io_loop.asyncio_loop.run_until_complete(make_master())
|
||||
f = tflow.tflow(resp=True)
|
||||
f.id = "42"
|
||||
f.request.content = b"foo\nbar"
|
||||
|
@ -1,19 +0,0 @@
|
||||
import pytest
|
||||
|
||||
from mitmproxy import options
|
||||
from mitmproxy.tools.web import master
|
||||
|
||||
from ... import tservers
|
||||
|
||||
|
||||
class TestWebMaster(tservers.MasterTest):
|
||||
def mkmaster(self, **opts):
|
||||
o = options.Options(**opts)
|
||||
return master.WebMaster(o)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_basic(self):
|
||||
m = self.mkmaster()
|
||||
for i in (1, 2, 3):
|
||||
await self.dummy_cycle(m, 1, b"")
|
||||
assert len(m.view) == i
|
@ -1,31 +0,0 @@
|
||||
from unittest import mock
|
||||
|
||||
from mitmproxy import eventsequence
|
||||
from mitmproxy import io
|
||||
from mitmproxy.proxy import server_hooks
|
||||
from mitmproxy.test import tflow
|
||||
from mitmproxy.test import tutils
|
||||
|
||||
|
||||
class MasterTest:
|
||||
|
||||
async def cycle(self, master, content):
|
||||
f = tflow.tflow(req=tutils.treq(content=content))
|
||||
layer = mock.Mock("mitmproxy.proxy.protocol.base.Layer")
|
||||
layer.client_conn = f.client_conn
|
||||
await master.addons.handle_lifecycle(server_hooks.ClientConnectedHook(layer))
|
||||
for e in eventsequence.iterate(f):
|
||||
await master.addons.handle_lifecycle(e)
|
||||
await master.addons.handle_lifecycle(server_hooks.ClientDisconnectedHook(layer))
|
||||
return f
|
||||
|
||||
async def dummy_cycle(self, master, n, content):
|
||||
for i in range(n):
|
||||
await self.cycle(master, content)
|
||||
await master._shutdown()
|
||||
|
||||
def flowfile(self, path):
|
||||
with open(path, "wb") as f:
|
||||
fw = io.FlowWriter(f)
|
||||
t = tflow.tflow(resp=True)
|
||||
fw.add(t)
|
Loading…
Reference in New Issue
Block a user