cleanup mitmproxy.controller, raise Kill in Channel (#1085)

This commit is contained in:
Maximilian Hils 2016-04-14 12:03:29 -07:00
parent bc60c26c7b
commit 423c076c61
16 changed files with 266 additions and 179 deletions

View File

@ -451,7 +451,7 @@ class ConsoleMaster(flow.FlowMaster):
self.ui.clear() self.ui.clear()
def ticker(self, *userdata): def ticker(self, *userdata):
changed = self.tick(self.masterq, timeout=0) changed = self.tick(timeout=0)
if changed: if changed:
self.loop.draw_screen() self.loop.draw_screen()
signals.update_settings.send() signals.update_settings.send()
@ -467,11 +467,6 @@ class ConsoleMaster(flow.FlowMaster):
handle_mouse = not self.options.no_mouse, handle_mouse = not self.options.no_mouse,
) )
self.server.start_slave(
controller.Slave,
controller.Channel(self.masterq, self.should_exit)
)
if self.options.rfile: if self.options.rfile:
ret = self.load_flows_path(self.options.rfile) ret = self.load_flows_path(self.options.rfile)
if ret and self.state.flow_count(): if ret and self.state.flow_count():
@ -507,6 +502,7 @@ class ConsoleMaster(flow.FlowMaster):
lambda *args: self.view_flowlist() lambda *args: self.view_flowlist()
) )
self.start()
try: try:
self.loop.run() self.loop.run()
except Exception: except Exception:

View File

@ -2,44 +2,93 @@ from __future__ import absolute_import
from six.moves import queue from six.moves import queue
import threading import threading
from .exceptions import Kill
class DummyReply:
class Master(object):
""" """
A reply object that does nothing. Useful when we need an object to seem The master handles mitmproxy's main event loop.
like it has a channel, and during testing.
""" """
def __init__(self): def __init__(self):
self.acked = False self.event_queue = queue.Queue()
self.should_exit = threading.Event()
def __call__(self, msg=False): def start(self):
self.acked = True self.should_exit.clear()
def run(self):
self.start()
try:
while not self.should_exit.is_set():
# Don't choose a very small timeout in Python 2:
# https://github.com/mitmproxy/mitmproxy/issues/443
# TODO: Lower the timeout value if we move to Python 3.
self.tick(0.1)
finally:
self.shutdown()
def tick(self, timeout):
changed = False
try:
# This endless loop runs until the 'Queue.Empty'
# exception is thrown.
while True:
mtype, obj = self.event_queue.get(timeout=timeout)
handle_func = getattr(self, "handle_" + mtype)
handle_func(obj)
self.event_queue.task_done()
changed = True
except queue.Empty:
pass
return changed
def shutdown(self):
self.should_exit.set()
class Reply: class ServerMaster(Master):
""" """
Messages sent through a channel are decorated with a "reply" attribute. The ServerMaster adds server thread support to the master.
This object is used to respond to the message through the return
channel.
""" """
def __init__(self, obj): def __init__(self):
self.obj = obj super(ServerMaster, self).__init__()
self.q = queue.Queue() self.servers = []
self.acked = False
def __call__(self, msg=None): def add_server(self, server):
if not self.acked: # We give a Channel to the server which can be used to communicate with the master
self.acked = True channel = Channel(self.event_queue, self.should_exit)
if msg is None: server.set_channel(channel)
self.q.put(self.obj) self.servers.append(server)
else:
self.q.put(msg) def start(self):
super(ServerMaster, self).start()
for server in self.servers:
ServerThread(server).start()
def shutdown(self):
for server in self.servers:
server.shutdown()
super(ServerMaster, self).shutdown()
class Channel: class ServerThread(threading.Thread):
def __init__(self, server):
self.server = server
super(ServerThread, self).__init__()
address = getattr(self.server, "address", None)
self.name = "ServerThread ({})".format(repr(address))
def run(self):
self.server.serve_forever()
class Channel(object):
"""
The only way for the proxy server to communicate with the master
is to use the channel it has been given.
"""
def __init__(self, q, should_exit): def __init__(self, q, should_exit):
self.q = q self.q = q
@ -47,8 +96,11 @@ class Channel:
def ask(self, mtype, m): def ask(self, mtype, m):
""" """
Decorate a message with a reply attribute, and send it to the Decorate a message with a reply attribute, and send it to the
master. then wait for a response. master. Then wait for a response.
Raises:
Kill: All connections should be closed immediately.
""" """
m.reply = Reply(m) m.reply = Reply(m)
self.q.put((mtype, m)) self.q.put((mtype, m))
@ -58,85 +110,54 @@ class Channel:
g = m.reply.q.get(timeout=0.5) g = m.reply.q.get(timeout=0.5)
except queue.Empty: # pragma: no cover except queue.Empty: # pragma: no cover
continue continue
if g == Kill:
raise Kill()
return g return g
raise Kill()
def tell(self, mtype, m): def tell(self, mtype, m):
""" """
Decorate a message with a dummy reply attribute, send it to the Decorate a message with a dummy reply attribute, send it to the
master, then return immediately. master, then return immediately.
""" """
m.reply = DummyReply() m.reply = DummyReply()
self.q.put((mtype, m)) self.q.put((mtype, m))
class Slave(threading.Thread): class DummyReply(object):
""" """
Slaves get a channel end-point through which they can send messages to A reply object that does nothing. Useful when we need an object to seem
the master. like it has a channel, and during testing.
""" """
def __init__(self, channel, server): def __init__(self):
self.channel, self.server = channel, server self.acked = False
self.server.set_channel(channel)
threading.Thread.__init__(self)
self.name = "SlaveThread ({})".format(repr(self.server.address))
def run(self): def __call__(self, msg=False):
self.server.serve_forever() self.acked = True
class Master(object): # Special value to distinguish the case where no reply was sent
NO_REPLY = object()
class Reply(object):
""" """
Masters get and respond to messages from slaves. Messages sent through a channel are decorated with a "reply" attribute.
This object is used to respond to the message through the return
channel.
""" """
def __init__(self, server): def __init__(self, obj):
""" self.obj = obj
server may be None if no server is needed. self.q = queue.Queue()
""" self.acked = False
self.server = server
self.masterq = queue.Queue()
self.should_exit = threading.Event()
def tick(self, q, timeout): def __call__(self, msg=NO_REPLY):
changed = False if not self.acked:
try: self.acked = True
# This endless loop runs until the 'Queue.Empty' if msg is NO_REPLY:
# exception is thrown. If more than one request is in self.q.put(self.obj)
# the queue, this speeds up every request by 0.1 seconds, else:
# because get_input(..) function is not blocking. self.q.put(msg)
while True:
msg = q.get(timeout=timeout)
self.handle(*msg)
q.task_done()
changed = True
except queue.Empty:
pass
return changed
def run(self):
self.should_exit.clear()
self.server.start_slave(Slave, Channel(self.masterq, self.should_exit))
while not self.should_exit.is_set():
# Don't choose a very small timeout in Python 2:
# https://github.com/mitmproxy/mitmproxy/issues/443
# TODO: Lower the timeout value if we move to Python 3.
self.tick(self.masterq, 0.1)
self.shutdown()
def handle(self, mtype, obj):
c = "handle_" + mtype
m = getattr(self, c, None)
if m:
m(obj)
else:
obj.reply()
def shutdown(self):
if not self.should_exit.is_set():
self.should_exit.set()
if self.server:
self.server.shutdown()

View File

@ -343,15 +343,8 @@ class DumpMaster(flow.FlowMaster):
self._process_flow(f) self._process_flow(f)
return f return f
def shutdown(self): # pragma: no cover
return flow.FlowMaster.shutdown(self)
def run(self): # pragma: no cover def run(self): # pragma: no cover
if self.o.rfile and not self.o.keepserving: if self.o.rfile and not self.o.keepserving:
self.shutdown() self.shutdown()
return return
try: super(DumpMaster, self).run()
return super(DumpMaster, self).run()
except BaseException:
self.shutdown()
raise

View File

@ -17,6 +17,13 @@ class ProxyException(Exception):
super(ProxyException, self).__init__(message) super(ProxyException, self).__init__(message)
class Kill(ProxyException):
"""
Signal that both client and server connection(s) should be killed immediately.
"""
pass
class ProtocolException(ProxyException): class ProtocolException(ProxyException):
pass pass

View File

@ -8,11 +8,9 @@ from abc import abstractmethod, ABCMeta
import hashlib import hashlib
import six import six
from six.moves import http_cookies, http_cookiejar from six.moves import http_cookies, http_cookiejar, urllib
import os import os
import re import re
import time
from six.moves import urllib
from netlib import wsgi from netlib import wsgi
from netlib.exceptions import HttpException from netlib.exceptions import HttpException
@ -21,8 +19,8 @@ from . import controller, tnetstring, filt, script, version, flow_format_compat
from .onboarding import app from .onboarding import app
from .proxy.config import HostMatcher from .proxy.config import HostMatcher
from .protocol.http_replay import RequestReplayThread from .protocol.http_replay import RequestReplayThread
from .protocol import Kill from .exceptions import Kill
from .models import ClientConnection, ServerConnection, HTTPResponse, HTTPFlow, HTTPRequest from .models import ClientConnection, ServerConnection, HTTPFlow, HTTPRequest
class AppRegistry: class AppRegistry:
@ -630,10 +628,19 @@ class State(object):
self.flows.kill_all(master) self.flows.kill_all(master)
class FlowMaster(controller.Master): class FlowMaster(controller.ServerMaster):
@property
def server(self):
# At some point, we may want to have support for multiple servers.
# For now, this suffices.
if len(self.servers) > 0:
return self.servers[0]
def __init__(self, server, state): def __init__(self, server, state):
controller.Master.__init__(self, server) super(FlowMaster, self).__init__()
if server:
self.add_server(server)
self.state = state self.state = state
self.server_playback = None self.server_playback = None
self.client_playback = None self.client_playback = None
@ -695,7 +702,7 @@ class FlowMaster(controller.Master):
except script.ScriptException as e: except script.ScriptException as e:
return traceback.format_exc(e) return traceback.format_exc(e)
if use_reloader: if use_reloader:
script.reloader.watch(s, lambda: self.masterq.put(("script_change", s))) script.reloader.watch(s, lambda: self.event_queue.put(("script_change", s)))
self.scripts.append(s) self.scripts.append(s)
def _run_single_script_hook(self, script_obj, name, *args, **kwargs): def _run_single_script_hook(self, script_obj, name, *args, **kwargs):
@ -808,7 +815,7 @@ class FlowMaster(controller.Master):
return True return True
return None return None
def tick(self, q, timeout): def tick(self, timeout):
if self.client_playback: if self.client_playback:
stop = ( stop = (
self.client_playback.done() and self.client_playback.done() and
@ -833,7 +840,7 @@ class FlowMaster(controller.Master):
self.stop_server_playback() self.stop_server_playback()
if exit: if exit:
self.shutdown() self.shutdown()
return super(FlowMaster, self).tick(q, timeout) return super(FlowMaster, self).tick(timeout)
def duplicate_flow(self, f): def duplicate_flow(self, f):
return self.load_flow(f.copy()) return self.load_flow(f.copy())
@ -942,7 +949,7 @@ class FlowMaster(controller.Master):
rt = RequestReplayThread( rt = RequestReplayThread(
self.server.config, self.server.config,
f, f,
self.masterq if run_scripthooks else False, self.event_queue if run_scripthooks else False,
self.should_exit self.should_exit
) )
rt.start() # pragma: no cover rt.start() # pragma: no cover
@ -1066,7 +1073,6 @@ class FlowMaster(controller.Master):
m.reply() m.reply()
def shutdown(self): def shutdown(self):
self.unload_scripts()
super(FlowMaster, self).shutdown() super(FlowMaster, self).shutdown()
# Add all flows that are still active # Add all flows that are still active
@ -1076,6 +1082,8 @@ class FlowMaster(controller.Master):
self.stream.add(i) self.stream.add(i)
self.stop_stream() self.stop_stream()
self.unload_scripts()
def start_stream(self, fp, filt): def start_stream(self, fp, filt):
self.stream = FilteredFlowWriter(fp, filt) self.stream = FilteredFlowWriter(fp, filt)

View File

@ -4,6 +4,7 @@ import uuid
from .. import stateobject, utils, version from .. import stateobject, utils, version
from .connections import ClientConnection, ServerConnection from .connections import ClientConnection, ServerConnection
from ..exceptions import Kill
class Error(stateobject.StateObject): class Error(stateobject.StateObject):
@ -139,8 +140,6 @@ class Flow(stateobject.StateObject):
""" """
Kill this request. Kill this request.
""" """
from ..protocol import Kill
self.error = Error("Connection killed") self.error = Error("Connection killed")
self.intercepted = False self.intercepted = False
self.reply(Kill) self.reply(Kill)

View File

@ -26,7 +26,7 @@ as late as possible; this makes server replay without any outgoing connections p
""" """
from __future__ import (absolute_import, print_function, division) from __future__ import (absolute_import, print_function, division)
from .base import Layer, ServerConnectionMixin, Kill from .base import Layer, ServerConnectionMixin
from .tls import TlsLayer from .tls import TlsLayer
from .tls import is_tls_record_magic from .tls import is_tls_record_magic
from .tls import TlsClientHello from .tls import TlsClientHello
@ -36,7 +36,7 @@ from .http2 import Http2Layer
from .rawtcp import RawTCPLayer from .rawtcp import RawTCPLayer
__all__ = [ __all__ = [
"Layer", "ServerConnectionMixin", "Kill", "Layer", "ServerConnectionMixin",
"TlsLayer", "is_tls_record_magic", "TlsClientHello", "TlsLayer", "is_tls_record_magic", "TlsClientHello",
"UpstreamConnectLayer", "UpstreamConnectLayer",
"Http1Layer", "Http1Layer",

View File

@ -189,10 +189,3 @@ class ServerConnectionMixin(object):
), ),
sys.exc_info()[2] sys.exc_info()[2]
) )
class Kill(Exception):
"""
Signal that both client and server connection(s) should be killed immediately.
"""

View File

@ -21,7 +21,7 @@ from ..models import (
expect_continue_response expect_continue_response
) )
from .base import Layer, Kill from .base import Layer
class _HttpTransmissionLayer(Layer): class _HttpTransmissionLayer(Layer):
@ -194,13 +194,9 @@ class HttpLayer(Layer):
# response was set by an inline script. # response was set by an inline script.
# we now need to emulate the responseheaders hook. # we now need to emulate the responseheaders hook.
flow = self.channel.ask("responseheaders", flow) flow = self.channel.ask("responseheaders", flow)
if flow == Kill:
raise Kill()
self.log("response", "debug", [repr(flow.response)]) self.log("response", "debug", [repr(flow.response)])
flow = self.channel.ask("response", flow) flow = self.channel.ask("response", flow)
if flow == Kill:
raise Kill()
self.send_response_to_client(flow) self.send_response_to_client(flow)
if self.check_close_connection(flow): if self.check_close_connection(flow):
@ -315,8 +311,6 @@ class HttpLayer(Layer):
# call the appropriate script hook - this is an opportunity for an # call the appropriate script hook - this is an opportunity for an
# inline script to set flow.stream = True # inline script to set flow.stream = True
flow = self.channel.ask("responseheaders", flow) flow = self.channel.ask("responseheaders", flow)
if flow == Kill:
raise Kill()
if flow.response.stream: if flow.response.stream:
flow.response.data.content = None flow.response.data.content = None
@ -352,8 +346,6 @@ class HttpLayer(Layer):
flow.request.scheme = "https" if self.__initial_server_tls else "http" flow.request.scheme = "https" if self.__initial_server_tls else "http"
request_reply = self.channel.ask("request", flow) request_reply = self.channel.ask("request", flow)
if request_reply == Kill:
raise Kill()
if isinstance(request_reply, HTTPResponse): if isinstance(request_reply, HTTPResponse):
flow.response = request_reply flow.response = request_reply
return return

View File

@ -7,8 +7,7 @@ from netlib.http import http1
from ..controller import Channel from ..controller import Channel
from ..models import Error, HTTPResponse, ServerConnection, make_connect_request from ..models import Error, HTTPResponse, ServerConnection, make_connect_request
from .base import Kill from ..exceptions import Kill
# TODO: Doesn't really belong into mitmproxy.protocol... # TODO: Doesn't really belong into mitmproxy.protocol...
@ -16,14 +15,14 @@ from .base import Kill
class RequestReplayThread(threading.Thread): class RequestReplayThread(threading.Thread):
name = "RequestReplayThread" name = "RequestReplayThread"
def __init__(self, config, flow, masterq, should_exit): def __init__(self, config, flow, event_queue, should_exit):
""" """
masterqueue can be a queue or None, if no scripthooks should be event_queue can be a queue or None, if no scripthooks should be
processed. processed.
""" """
self.config, self.flow = config, flow self.config, self.flow = config, flow
if masterq: if event_queue:
self.channel = Channel(masterq, should_exit) self.channel = Channel(event_queue, should_exit)
else: else:
self.channel = None self.channel = None
super(RequestReplayThread, self).__init__() super(RequestReplayThread, self).__init__()
@ -37,9 +36,7 @@ class RequestReplayThread(threading.Thread):
# If we have a channel, run script hooks. # If we have a channel, run script hooks.
if self.channel: if self.channel:
request_reply = self.channel.ask("request", self.flow) request_reply = self.channel.ask("request", self.flow)
if request_reply == Kill: if isinstance(request_reply, HTTPResponse):
raise Kill()
elif isinstance(request_reply, HTTPResponse):
self.flow.response = request_reply self.flow.response = request_reply
if not self.flow.response: if not self.flow.response:

View File

@ -8,8 +8,7 @@ import six
from netlib import tcp from netlib import tcp
from netlib.exceptions import TcpException from netlib.exceptions import TcpException
from netlib.http.http1 import assemble_response from netlib.http.http1 import assemble_response
from ..exceptions import ProtocolException, ServerException, ClientHandshakeException from ..exceptions import ProtocolException, ServerException, ClientHandshakeException, Kill
from ..protocol import Kill
from ..models import ClientConnection, make_error_response from ..models import ClientConnection, make_error_response
from .modes import HttpUpstreamProxy, HttpProxy, ReverseProxy, TransparentProxy, Socks5Proxy from .modes import HttpUpstreamProxy, HttpProxy, ReverseProxy, TransparentProxy, Socks5Proxy
from .root_context import RootContext, Log from .root_context import RootContext, Log
@ -21,7 +20,10 @@ class DummyServer:
def __init__(self, config): def __init__(self, config):
self.config = config self.config = config
def start_slave(self, *args): def set_channel(self, channel):
pass
def serve_forever(self):
pass pass
def shutdown(self): def shutdown(self):
@ -47,10 +49,6 @@ class ProxyServer(tcp.TCPServer):
) )
self.channel = None self.channel = None
def start_slave(self, klass, channel):
slave = klass(channel, self)
slave.start()
def set_channel(self, channel): def set_channel(self, channel):
self.channel = channel self.channel = channel
@ -112,12 +110,9 @@ class ConnectionHandler(object):
self.log("clientconnect", "info") self.log("clientconnect", "info")
root_layer = self._create_root_layer() root_layer = self._create_root_layer()
root_layer = self.channel.ask("clientconnect", root_layer)
if root_layer == Kill:
def root_layer():
raise Kill()
try: try:
root_layer = self.channel.ask("clientconnect", root_layer)
root_layer() root_layer()
except Kill: except Kill:
self.log("Connection killed", "info") self.log("Connection killed", "info")

View File

@ -173,20 +173,15 @@ class WebMaster(flow.FlowMaster):
if self.options.app: if self.options.app:
self.start_app(self.options.app_host, self.options.app_port) self.start_app(self.options.app_host, self.options.app_port)
def tick(self):
flow.FlowMaster.tick(self, self.masterq, timeout=0)
def run(self): # pragma: no cover def run(self): # pragma: no cover
self.server.start_slave(
controller.Slave,
controller.Channel(self.masterq, self.should_exit)
)
iol = tornado.ioloop.IOLoop.instance() iol = tornado.ioloop.IOLoop.instance()
http_server = tornado.httpserver.HTTPServer(self.app) http_server = tornado.httpserver.HTTPServer(self.app)
http_server.listen(self.options.wport) http_server.listen(self.options.wport)
tornado.ioloop.PeriodicCallback(self.tick, 5).start() iol.add_callback(self.start)
tornado.ioloop.PeriodicCallback(lambda: self.tick(timeout=0), 5).start()
try: try:
iol.start() iol.start()
except (Stop, KeyboardInterrupt): except (Stop, KeyboardInterrupt):

View File

@ -1,11 +1,105 @@
import mock from threading import Thread, Event
from mitmproxy import controller
from mock import Mock
from mitmproxy.controller import Reply, DummyReply, Channel, ServerThread, ServerMaster, Master
from six.moves import queue
from mitmproxy.exceptions import Kill
from mitmproxy.proxy import DummyServer
from netlib.tutils import raises
class TestMaster: class TestMaster(object):
def test_simple(self):
def test_default_handler(self): class DummyMaster(Master):
m = controller.Master(None) def handle_panic(self, _):
msg = mock.MagicMock() m.should_exit.set()
m.handle("type", msg)
assert msg.reply.call_count == 1 def tick(self, timeout):
# Speed up test
super(DummyMaster, self).tick(0)
m = DummyMaster()
assert not m.should_exit.is_set()
m.event_queue.put(("panic", 42))
m.run()
assert m.should_exit.is_set()
class TestServerMaster(object):
def test_simple(self):
m = ServerMaster()
s = DummyServer(None)
m.add_server(s)
m.start()
m.shutdown()
m.start()
m.shutdown()
class TestServerThread(object):
def test_simple(self):
m = Mock()
t = ServerThread(m)
t.run()
assert m.serve_forever.called
class TestChannel(object):
def test_tell(self):
q = queue.Queue()
channel = Channel(q, Event())
m = Mock()
channel.tell("test", m)
assert q.get() == ("test", m)
assert m.reply
def test_ask_simple(self):
q = queue.Queue()
def reply():
m, obj = q.get()
assert m == "test"
obj.reply(42)
Thread(target=reply).start()
channel = Channel(q, Event())
assert channel.ask("test", Mock()) == 42
def test_ask_shutdown(self):
q = queue.Queue()
done = Event()
done.set()
channel = Channel(q, done)
with raises(Kill):
channel.ask("test", Mock())
class TestDummyReply(object):
def test_simple(self):
reply = DummyReply()
assert not reply.acked
reply()
assert reply.acked
class TestReply(object):
def test_simple(self):
reply = Reply(42)
assert not reply.acked
reply("foo")
assert reply.acked
assert reply.q.get() == "foo"
def test_default(self):
reply = Reply(42)
reply()
assert reply.q.get() == 42
def test_reply_none(self):
reply = Reply(42)
reply(None)
assert reply.q.get() is None

View File

@ -116,9 +116,8 @@ class TestClientPlaybackState:
c.clear(c.current) c.clear(c.current)
assert c.done() assert c.done()
q = queue.Queue()
fm.state.clear() fm.state.clear()
fm.tick(q, timeout=0) fm.tick(timeout=0)
fm.stop_client_playback() fm.stop_client_playback()
assert not fm.client_playback assert not fm.client_playback
@ -858,9 +857,8 @@ class TestFlowMaster:
assert not fm.start_client_playback(pb, False) assert not fm.start_client_playback(pb, False)
fm.client_playback.testing = True fm.client_playback.testing = True
q = queue.Queue()
assert not fm.state.flow_count() assert not fm.state.flow_count()
fm.tick(q, 0) fm.tick(0)
assert fm.state.flow_count() assert fm.state.flow_count()
f.error = Error("error") f.error = Error("error")
@ -904,8 +902,7 @@ class TestFlowMaster:
assert not fm.do_server_playback(r) assert not fm.do_server_playback(r)
assert fm.do_server_playback(tutils.tflow()) assert fm.do_server_playback(tutils.tflow())
q = queue.Queue() fm.tick(0)
fm.tick(q, 0)
assert fm.should_exit.is_set() assert fm.should_exit.is_set()
fm.stop_server_playback() fm.stop_server_playback()

View File

@ -175,7 +175,7 @@ class TestDummyServer:
def test_simple(self): def test_simple(self):
d = DummyServer(None) d = DummyServer(None)
d.start_slave() d.set_channel(None)
d.shutdown() d.shutdown()

View File

@ -13,7 +13,7 @@ from netlib.tutils import raises
from pathod import pathoc, pathod from pathod import pathoc, pathod
from mitmproxy.proxy.config import HostMatcher from mitmproxy.proxy.config import HostMatcher
from mitmproxy.protocol import Kill from mitmproxy.exceptions import Kill
from mitmproxy.models import Error, HTTPResponse from mitmproxy.models import Error, HTTPResponse
from . import tutils, tservers from . import tutils, tservers
@ -126,7 +126,7 @@ class TcpMixin:
i2 = self.pathod("306") i2 = self.pathod("306")
self._ignore_off() self._ignore_off()
self.master.masterq.join() self.master.event_queue.join()
assert n.status_code == 304 assert n.status_code == 304
assert i.status_code == 305 assert i.status_code == 305
@ -172,7 +172,7 @@ class TcpMixin:
i2 = self.pathod("306") i2 = self.pathod("306")
self._tcpproxy_off() self._tcpproxy_off()
self.master.masterq.join() self.master.event_queue.join()
assert n.status_code == 304 assert n.status_code == 304
assert i.status_code == 305 assert i.status_code == 305