mitmproxy/libmproxy/controller.py

144 lines
3.8 KiB
Python
Raw Normal View History

from __future__ import absolute_import
2015-05-30 00:03:28 +00:00
import Queue
import threading
2010-02-16 04:09:07 +00:00
class DummyReply:
2016-01-27 09:12:18 +00:00
"""
A reply object that does nothing. Useful when we need an object to seem
like it has a channel, and during testing.
"""
2015-05-30 00:03:28 +00:00
2010-02-16 04:09:07 +00:00
def __init__(self):
self.acked = False
def __call__(self, msg=False):
self.acked = True
class Reply:
2016-01-27 09:12:18 +00:00
"""
Messages sent through a channel are decorated with a "reply" attribute.
This object is used to respond to the message through the return
channel.
"""
2015-05-30 00:03:28 +00:00
def __init__(self, obj):
self.obj = obj
2010-02-16 04:09:07 +00:00
self.q = Queue.Queue()
self.acked = False
def __call__(self, msg=None):
if not self.acked:
self.acked = True
if msg is None:
self.q.put(self.obj)
else:
self.q.put(msg)
2010-02-16 04:09:07 +00:00
class Channel:
2016-01-27 09:12:18 +00:00
def __init__(self, q, should_exit):
self.q = q
self.should_exit = should_exit
def ask(self, mtype, m):
"""
Decorate a message with a reply attribute, and send it to the
master. then wait for a response.
"""
m.reply = Reply(m)
self.q.put((mtype, m))
while not self.should_exit.is_set():
try:
# The timeout is here so we can handle a should_exit event.
g = m.reply.q.get(timeout=0.5)
2016-01-25 18:41:22 +00:00
except Queue.Empty: # pragma: no cover
continue
return g
def tell(self, mtype, m):
"""
Decorate a message with a dummy reply attribute, send it to the
master, then return immediately.
"""
m.reply = DummyReply()
self.q.put((mtype, m))
2010-02-16 04:09:07 +00:00
class Slave(threading.Thread):
2016-01-27 09:12:18 +00:00
"""
Slaves get a channel end-point through which they can send messages to
the master.
"""
2015-05-30 00:03:28 +00:00
def __init__(self, channel, server):
self.channel, self.server = channel, server
self.server.set_channel(channel)
2010-02-16 04:09:07 +00:00
threading.Thread.__init__(self)
2015-05-30 00:03:28 +00:00
self.name = "SlaveThread (%s:%s)" % (
self.server.address.host, self.server.address.port)
2010-02-16 04:09:07 +00:00
def run(self):
self.server.serve_forever()
class Master(object):
2016-01-27 09:12:18 +00:00
"""
Masters get and respond to messages from slaves.
"""
2015-05-30 00:03:28 +00:00
2010-02-16 04:09:07 +00:00
def __init__(self, server):
"""
server may be None if no server is needed.
"""
2010-02-16 04:09:07 +00:00
self.server = server
2011-03-28 21:57:50 +00:00
self.masterq = Queue.Queue()
self.should_exit = threading.Event()
2010-02-16 04:09:07 +00:00
2014-09-13 23:30:00 +00:00
def tick(self, q, timeout):
changed = False
2010-02-16 04:09:07 +00:00
try:
# This endless loop runs until the 'Queue.Empty'
# exception is thrown. If more than one request is in
# the queue, this speeds up every request by 0.1 seconds,
# because get_input(..) function is not blocking.
while True:
2014-09-13 23:30:00 +00:00
msg = q.get(timeout=timeout)
self.handle(*msg)
2015-09-21 16:31:30 +00:00
q.task_done()
changed = True
2010-02-16 04:09:07 +00:00
except Queue.Empty:
pass
return changed
2010-02-16 04:09:07 +00:00
def run(self):
self.should_exit.clear()
self.server.start_slave(Slave, Channel(self.masterq, self.should_exit))
while not self.should_exit.is_set():
# Don't choose a very small timeout in Python 2:
# https://github.com/mitmproxy/mitmproxy/issues/443
# TODO: Lower the timeout value if we move to Python 3.
2015-01-09 15:40:26 +00:00
self.tick(self.masterq, 0.1)
2010-02-16 04:09:07 +00:00
self.shutdown()
def handle(self, mtype, obj):
c = "handle_" + mtype
2010-02-16 04:09:07 +00:00
m = getattr(self, c, None)
if m:
m(obj)
2010-02-16 04:09:07 +00:00
else:
obj.reply()
2010-02-16 04:09:07 +00:00
def shutdown(self):
if not self.should_exit.is_set():
self.should_exit.set()
if self.server:
self.server.shutdown()