2010-02-16 04:09:07 +00:00
|
|
|
"""
|
|
|
|
A simple proxy server implementation, which always reads all of a server
|
|
|
|
response into memory, performs some transformation, and then writes it back
|
2011-07-16 09:47:06 +00:00
|
|
|
to the client.
|
2010-02-16 04:09:07 +00:00
|
|
|
|
|
|
|
Development started from Neil Schemenauer's munchy.py
|
|
|
|
"""
|
2011-03-10 22:56:10 +00:00
|
|
|
import sys, os, string, socket, urlparse, re, select, copy, base64, time, Cookie
|
2011-03-09 07:05:30 +00:00
|
|
|
from email.utils import parsedate_tz, formatdate, mktime_tz
|
2011-02-20 00:29:41 +00:00
|
|
|
import shutil, tempfile
|
2011-02-19 23:53:42 +00:00
|
|
|
import optparse, SocketServer, ssl
|
2011-07-16 09:47:06 +00:00
|
|
|
import utils, controller, encoding
|
2010-02-16 04:09:07 +00:00
|
|
|
|
|
|
|
NAME = "mitmproxy"
|
|
|
|
|
|
|
|
|
|
|
|
class ProxyError(Exception):
|
|
|
|
def __init__(self, code, msg):
|
|
|
|
self.code, self.msg = code, msg
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return "ProxyError(%s, %s)"%(self.code, self.msg)
|
|
|
|
|
|
|
|
|
2011-03-18 03:45:31 +00:00
|
|
|
class SSLConfig:
|
2011-06-27 04:10:17 +00:00
|
|
|
def __init__(self, certfile = None, ciphers = None, cacert = None, cert_wait_time=None):
|
2011-02-08 17:00:59 +00:00
|
|
|
self.certfile = certfile
|
2011-02-08 15:58:38 +00:00
|
|
|
self.ciphers = ciphers
|
2011-02-08 17:00:59 +00:00
|
|
|
self.cacert = cacert
|
2011-02-20 00:29:41 +00:00
|
|
|
self.certdir = None
|
2011-06-27 04:10:17 +00:00
|
|
|
self.cert_wait_time = cert_wait_time
|
2010-02-16 04:09:07 +00:00
|
|
|
|
|
|
|
|
2010-11-12 15:01:17 +00:00
|
|
|
def read_chunked(fp):
|
|
|
|
content = ""
|
|
|
|
while 1:
|
|
|
|
line = fp.readline()
|
|
|
|
if line == "":
|
|
|
|
raise IOError("Connection closed")
|
|
|
|
if line == '\r\n' or line == '\n':
|
|
|
|
continue
|
|
|
|
length = int(line,16)
|
|
|
|
if not length:
|
|
|
|
break
|
|
|
|
content += fp.read(length)
|
|
|
|
line = fp.readline()
|
|
|
|
if line != '\r\n':
|
|
|
|
raise IOError("Malformed chunked body")
|
|
|
|
while 1:
|
|
|
|
line = fp.readline()
|
|
|
|
if line == "":
|
|
|
|
raise IOError("Connection closed")
|
|
|
|
if line == '\r\n' or line == '\n':
|
|
|
|
break
|
|
|
|
return content
|
2011-07-16 09:47:06 +00:00
|
|
|
|
2010-11-12 15:01:17 +00:00
|
|
|
|
|
|
|
def read_http_body(rfile, connection, headers, all):
|
2011-07-14 03:59:27 +00:00
|
|
|
if 'transfer-encoding' in headers:
|
2010-11-12 15:01:17 +00:00
|
|
|
if not ",".join(headers["transfer-encoding"]) == "chunked":
|
|
|
|
raise IOError('Invalid transfer-encoding')
|
|
|
|
content = read_chunked(rfile)
|
2011-07-14 03:59:27 +00:00
|
|
|
elif "content-length" in headers:
|
2010-11-12 15:01:17 +00:00
|
|
|
content = rfile.read(int(headers["content-length"][0]))
|
|
|
|
elif all:
|
|
|
|
content = rfile.read()
|
|
|
|
connection.close = True
|
|
|
|
else:
|
2011-02-16 02:10:00 +00:00
|
|
|
content = ""
|
2010-11-12 15:01:17 +00:00
|
|
|
return content
|
|
|
|
|
|
|
|
|
2010-02-16 04:09:07 +00:00
|
|
|
def parse_url(url):
|
|
|
|
"""
|
|
|
|
Returns a (scheme, host, port, path) tuple, or None on error.
|
|
|
|
"""
|
|
|
|
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
|
|
|
|
if not scheme:
|
|
|
|
return None
|
|
|
|
if ':' in netloc:
|
|
|
|
host, port = string.split(netloc, ':')
|
|
|
|
port = int(port)
|
|
|
|
else:
|
|
|
|
host = netloc
|
2011-02-02 23:16:03 +00:00
|
|
|
if scheme == "https":
|
|
|
|
port = 443
|
|
|
|
else:
|
|
|
|
port = 80
|
2010-02-16 04:09:07 +00:00
|
|
|
path = urlparse.urlunparse(('', '', path, params, query, fragment))
|
2011-02-23 21:33:39 +00:00
|
|
|
if not path.startswith("/"):
|
|
|
|
path = "/" + path
|
2010-02-16 04:09:07 +00:00
|
|
|
return scheme, host, port, path
|
|
|
|
|
|
|
|
|
2010-11-12 15:01:17 +00:00
|
|
|
def parse_request_line(request):
|
2010-02-16 04:09:07 +00:00
|
|
|
"""
|
2011-02-10 22:06:30 +00:00
|
|
|
Parse a proxy request line. Return (method, scheme, host, port, path, minor).
|
2010-02-16 04:09:07 +00:00
|
|
|
Raise ProxyError on error.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
method, url, protocol = string.split(request)
|
|
|
|
except ValueError:
|
|
|
|
raise ProxyError(400, "Can't parse request")
|
2010-11-12 15:01:17 +00:00
|
|
|
if method == 'CONNECT':
|
|
|
|
scheme = None
|
|
|
|
path = None
|
|
|
|
try:
|
|
|
|
host, port = url.split(":")
|
|
|
|
except ValueError:
|
|
|
|
raise ProxyError(400, "Can't parse request")
|
|
|
|
port = int(port)
|
|
|
|
else:
|
|
|
|
if url.startswith("/") or url == "*":
|
2010-02-16 04:09:07 +00:00
|
|
|
scheme, port, host, path = None, None, None, url
|
|
|
|
else:
|
|
|
|
parts = parse_url(url)
|
|
|
|
if not parts:
|
|
|
|
raise ProxyError(400, "Invalid url: %s"%url)
|
|
|
|
scheme, host, port, path = parts
|
2010-11-12 15:01:17 +00:00
|
|
|
if not protocol.startswith("HTTP/"):
|
|
|
|
raise ProxyError(400, "Unsupported protocol")
|
|
|
|
major,minor = protocol.split('/')[1].split('.')
|
|
|
|
major = int(major)
|
|
|
|
minor = int(minor)
|
|
|
|
if major != 1:
|
|
|
|
raise ProxyError(400, "Unsupported protocol")
|
|
|
|
return method, scheme, host, port, path, minor
|
2011-07-31 22:41:12 +00:00
|
|
|
|
|
|
|
|
|
|
|
class HTTPMsg(controller.Msg):
|
|
|
|
def decode(self):
|
|
|
|
"""
|
|
|
|
Alters Response object, decoding its content based on the current
|
|
|
|
Content-Encoding header and changing Content-Encoding header to
|
|
|
|
'identity'.
|
|
|
|
"""
|
|
|
|
self.content = encoding.decode(
|
|
|
|
(self.headers["content-encoding"] or ["identity"])[0],
|
|
|
|
self.content
|
|
|
|
)
|
|
|
|
self.headers["content-encoding"] = ["identity"]
|
|
|
|
|
|
|
|
def encode(self, e):
|
|
|
|
"""
|
|
|
|
Alters Response object, encoding its content with the specified
|
|
|
|
coding. This method should only be called on Responses with
|
|
|
|
Content-Encoding headers of 'identity'.
|
|
|
|
"""
|
|
|
|
self.content = encoding.encode(e, self.content)
|
|
|
|
self.headers["content-encoding"] = [e]
|
2010-02-16 04:09:07 +00:00
|
|
|
|
|
|
|
|
2011-07-31 22:41:12 +00:00
|
|
|
class Request(HTTPMsg):
|
2010-11-12 15:01:17 +00:00
|
|
|
FMT = '%s %s HTTP/1.1\r\n%s\r\n%s'
|
2010-11-17 13:11:56 +00:00
|
|
|
FMT_PROXY = '%s %s://%s:%s%s HTTP/1.1\r\n%s\r\n%s'
|
2011-02-03 01:51:32 +00:00
|
|
|
def __init__(self, client_conn, host, port, scheme, method, path, headers, content, timestamp=None):
|
|
|
|
self.client_conn = client_conn
|
2010-02-16 04:09:07 +00:00
|
|
|
self.host, self.port, self.scheme = host, port, scheme
|
|
|
|
self.method, self.path, self.headers, self.content = method, path, headers, content
|
2011-03-07 00:46:02 +00:00
|
|
|
self.timestamp = timestamp or utils.timestamp()
|
2010-11-12 15:01:17 +00:00
|
|
|
self.close = False
|
2010-02-16 04:09:07 +00:00
|
|
|
controller.Msg.__init__(self)
|
|
|
|
|
2011-03-20 04:31:54 +00:00
|
|
|
# Have this request's cookies been modified by sticky cookies or auth?
|
2011-02-25 08:23:44 +00:00
|
|
|
self.stickycookie = False
|
2011-03-20 04:31:54 +00:00
|
|
|
self.stickyauth = False
|
2011-02-25 08:23:44 +00:00
|
|
|
|
2011-03-09 00:15:31 +00:00
|
|
|
def anticache(self):
|
|
|
|
"""
|
|
|
|
Modifies this request to remove headers that might produce a cached
|
|
|
|
response. That is, we remove ETags and If-Modified-Since headers.
|
|
|
|
"""
|
|
|
|
delheaders = [
|
|
|
|
"if-modified-since",
|
|
|
|
"if-none-match",
|
|
|
|
]
|
|
|
|
for i in delheaders:
|
2011-07-14 03:59:27 +00:00
|
|
|
del self.headers[i]
|
2011-03-09 00:15:31 +00:00
|
|
|
|
2011-07-15 03:21:04 +00:00
|
|
|
def anticomp(self):
|
|
|
|
"""
|
2011-07-16 09:47:06 +00:00
|
|
|
Modifies this request to remove headers that will compress the
|
|
|
|
resource's data.
|
2011-07-15 03:21:04 +00:00
|
|
|
"""
|
|
|
|
self.headers["accept-encoding"] = ["identity"]
|
|
|
|
|
2011-07-16 09:47:06 +00:00
|
|
|
def constrain_encoding(self):
|
|
|
|
"""
|
|
|
|
Limits the permissible Accept-Encoding values, based on what we can
|
|
|
|
decode appropriately.
|
|
|
|
"""
|
|
|
|
if self.headers["accept-encoding"]:
|
|
|
|
self.headers["accept-encoding"] = [', '.join([
|
|
|
|
e for e in encoding.ENCODINGS if e in self.headers["accept-encoding"][0]
|
|
|
|
])]
|
|
|
|
|
2011-02-19 04:00:24 +00:00
|
|
|
def set_replay(self):
|
|
|
|
self.client_conn = None
|
|
|
|
|
|
|
|
def is_replay(self):
|
|
|
|
if self.client_conn:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2011-02-19 20:36:13 +00:00
|
|
|
def load_state(self, state):
|
|
|
|
if state["client_conn"]:
|
|
|
|
if self.client_conn:
|
|
|
|
self.client_conn.load_state(state["client_conn"])
|
|
|
|
else:
|
|
|
|
self.client_conn = ClientConnect.from_state(state["client_conn"])
|
|
|
|
else:
|
|
|
|
self.client_conn = None
|
|
|
|
self.host = state["host"]
|
|
|
|
self.port = state["port"]
|
|
|
|
self.scheme = state["scheme"]
|
|
|
|
self.method = state["method"]
|
|
|
|
self.path = state["path"]
|
|
|
|
self.headers = utils.Headers.from_state(state["headers"])
|
|
|
|
self.content = base64.decodestring(state["content"])
|
|
|
|
self.timestamp = state["timestamp"]
|
|
|
|
|
2011-01-26 01:52:03 +00:00
|
|
|
def get_state(self):
|
|
|
|
return dict(
|
2011-02-19 04:03:44 +00:00
|
|
|
client_conn = self.client_conn.get_state() if self.client_conn else None,
|
2011-01-26 01:52:03 +00:00
|
|
|
host = self.host,
|
|
|
|
port = self.port,
|
|
|
|
scheme = self.scheme,
|
|
|
|
method = self.method,
|
|
|
|
path = self.path,
|
|
|
|
headers = self.headers.get_state(),
|
2011-02-16 01:33:04 +00:00
|
|
|
content = base64.encodestring(self.content),
|
2011-02-03 00:30:47 +00:00
|
|
|
timestamp = self.timestamp,
|
2011-01-26 01:52:03 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
@classmethod
|
2011-02-19 04:00:24 +00:00
|
|
|
def from_state(klass, state):
|
2011-01-26 01:52:03 +00:00
|
|
|
return klass(
|
2011-02-19 20:36:13 +00:00
|
|
|
ClientConnect.from_state(state["client_conn"]),
|
2011-07-01 01:07:09 +00:00
|
|
|
str(state["host"]),
|
2011-01-26 01:52:03 +00:00
|
|
|
state["port"],
|
2011-07-01 01:07:09 +00:00
|
|
|
str(state["scheme"]),
|
|
|
|
str(state["method"]),
|
|
|
|
str(state["path"]),
|
2011-01-26 01:52:03 +00:00
|
|
|
utils.Headers.from_state(state["headers"]),
|
2011-02-16 01:33:04 +00:00
|
|
|
base64.decodestring(state["content"]),
|
2011-02-03 00:30:47 +00:00
|
|
|
state["timestamp"]
|
2011-01-26 01:52:03 +00:00
|
|
|
)
|
|
|
|
|
2011-02-19 04:00:24 +00:00
|
|
|
def __hash__(self):
|
|
|
|
return id(self)
|
|
|
|
|
2011-01-26 01:52:03 +00:00
|
|
|
def __eq__(self, other):
|
|
|
|
return self.get_state() == other.get_state()
|
|
|
|
|
2010-02-16 04:09:07 +00:00
|
|
|
def copy(self):
|
|
|
|
c = copy.copy(self)
|
|
|
|
c.headers = self.headers.copy()
|
|
|
|
return c
|
|
|
|
|
2010-11-12 15:01:17 +00:00
|
|
|
def hostport(self):
|
2010-02-16 04:09:07 +00:00
|
|
|
if (self.port, self.scheme) in [(80, "http"), (443, "https")]:
|
|
|
|
host = self.host
|
|
|
|
else:
|
|
|
|
host = "%s:%s"%(self.host, self.port)
|
2010-11-12 15:01:17 +00:00
|
|
|
return host
|
|
|
|
|
|
|
|
def url(self):
|
|
|
|
return "%s://%s%s"%(self.scheme, self.hostport(), self.path)
|
2010-02-16 04:09:07 +00:00
|
|
|
|
|
|
|
def set_url(self, url):
|
|
|
|
parts = parse_url(url)
|
|
|
|
if not parts:
|
|
|
|
return False
|
|
|
|
self.scheme, self.host, self.port, self.path = parts
|
|
|
|
return True
|
|
|
|
|
|
|
|
def is_response(self):
|
|
|
|
return False
|
|
|
|
|
2010-11-17 13:11:56 +00:00
|
|
|
def assemble(self, _proxy = False):
|
2010-02-16 04:09:07 +00:00
|
|
|
"""
|
|
|
|
Assembles the request for transmission to the server. We make some
|
|
|
|
modifications to make sure interception works properly.
|
|
|
|
"""
|
|
|
|
headers = self.headers.copy()
|
2010-11-17 11:03:42 +00:00
|
|
|
utils.try_del(headers, 'proxy-connection')
|
|
|
|
utils.try_del(headers, 'keep-alive')
|
|
|
|
utils.try_del(headers, 'connection')
|
2010-11-12 15:01:17 +00:00
|
|
|
utils.try_del(headers, 'content-length')
|
|
|
|
utils.try_del(headers, 'transfer-encoding')
|
2011-07-14 03:59:27 +00:00
|
|
|
if not 'host' in headers:
|
2010-11-12 15:01:17 +00:00
|
|
|
headers["host"] = [self.hostport()]
|
|
|
|
content = self.content
|
|
|
|
if content is not None:
|
|
|
|
headers["content-length"] = [str(len(content))]
|
|
|
|
else:
|
|
|
|
content = ""
|
|
|
|
if self.close:
|
|
|
|
headers["connection"] = ["close"]
|
2010-11-17 13:11:56 +00:00
|
|
|
if not _proxy:
|
|
|
|
return self.FMT % (self.method, self.path, str(headers), content)
|
|
|
|
else:
|
|
|
|
return self.FMT_PROXY % (self.method, self.scheme, self.host, self.port, self.path, str(headers), content)
|
2010-02-16 04:09:07 +00:00
|
|
|
|
2011-07-27 05:47:08 +00:00
|
|
|
def replace(self, pattern, repl, *args, **kwargs):
|
2011-07-22 05:48:42 +00:00
|
|
|
"""
|
|
|
|
Replaces a regular expression pattern with repl in both the headers
|
|
|
|
and the body of the request. Returns the number of replacements
|
2011-07-27 05:47:08 +00:00
|
|
|
made.
|
2011-07-22 05:48:42 +00:00
|
|
|
"""
|
2011-07-27 05:47:08 +00:00
|
|
|
self.content, c = re.subn(pattern, repl, self.content, *args, **kwargs)
|
|
|
|
self.path, pc = re.subn(pattern, repl, self.path, *args, **kwargs)
|
2011-07-22 08:52:13 +00:00
|
|
|
c += pc
|
2011-07-27 05:47:08 +00:00
|
|
|
c += self.headers.replace(pattern, repl, *args, **kwargs)
|
2011-07-22 05:48:42 +00:00
|
|
|
return c
|
|
|
|
|
2010-02-16 04:09:07 +00:00
|
|
|
|
2011-07-31 22:41:12 +00:00
|
|
|
class Response(HTTPMsg):
|
2010-02-16 04:09:07 +00:00
|
|
|
FMT = '%s\r\n%s\r\n%s'
|
2011-02-15 17:44:57 +00:00
|
|
|
def __init__(self, request, code, msg, headers, content, timestamp=None):
|
2010-02-16 04:09:07 +00:00
|
|
|
self.request = request
|
2011-02-15 17:44:57 +00:00
|
|
|
self.code, self.msg = code, msg
|
2010-02-16 04:09:07 +00:00
|
|
|
self.headers, self.content = headers, content
|
2011-03-07 00:46:02 +00:00
|
|
|
self.timestamp = timestamp or utils.timestamp()
|
2010-02-16 04:09:07 +00:00
|
|
|
controller.Msg.__init__(self)
|
2011-02-20 20:54:39 +00:00
|
|
|
self.replay = False
|
|
|
|
|
2011-03-10 22:56:10 +00:00
|
|
|
def _refresh_cookie(self, c, delta):
|
|
|
|
"""
|
|
|
|
Takes a cookie string c and a time delta in seconds, and returns
|
|
|
|
a refreshed cookie string.
|
|
|
|
"""
|
|
|
|
c = Cookie.SimpleCookie(str(c))
|
|
|
|
for i in c.values():
|
|
|
|
if "expires" in i:
|
|
|
|
d = parsedate_tz(i["expires"])
|
|
|
|
if d:
|
|
|
|
d = mktime_tz(d) + delta
|
|
|
|
i["expires"] = formatdate(d)
|
|
|
|
else:
|
|
|
|
# This can happen when the expires tag is invalid.
|
|
|
|
# reddit.com sends a an expires tag like this: "Thu, 31 Dec
|
|
|
|
# 2037 23:59:59 GMT", which is valid RFC 1123, but not
|
|
|
|
# strictly correct according tot he cookie spec. Browsers
|
|
|
|
# appear to parse this tolerantly - maybe we should too.
|
|
|
|
# For now, we just ignore this.
|
|
|
|
del i["expires"]
|
|
|
|
return c.output(header="").strip()
|
|
|
|
|
2011-03-09 07:05:30 +00:00
|
|
|
def refresh(self, now=None):
|
|
|
|
"""
|
|
|
|
This fairly complex and heuristic function refreshes a server
|
|
|
|
response for replay.
|
|
|
|
|
|
|
|
- It adjusts date, expires and last-modified headers.
|
|
|
|
- It adjusts cookie expiration.
|
|
|
|
"""
|
|
|
|
if not now:
|
|
|
|
now = time.time()
|
|
|
|
delta = now - self.timestamp
|
|
|
|
refresh_headers = [
|
|
|
|
"date",
|
|
|
|
"expires",
|
|
|
|
"last-modified",
|
|
|
|
]
|
|
|
|
for i in refresh_headers:
|
|
|
|
if i in self.headers:
|
|
|
|
d = parsedate_tz(self.headers[i][0])
|
2011-03-13 04:11:59 +00:00
|
|
|
if d:
|
|
|
|
new = mktime_tz(d) + delta
|
|
|
|
self.headers[i] = [formatdate(new)]
|
2011-03-10 22:56:10 +00:00
|
|
|
c = []
|
2011-07-14 03:59:27 +00:00
|
|
|
for i in self.headers["set-cookie"]:
|
2011-03-10 22:56:10 +00:00
|
|
|
c.append(self._refresh_cookie(i, delta))
|
|
|
|
if c:
|
|
|
|
self.headers["set-cookie"] = c
|
2011-03-09 07:05:30 +00:00
|
|
|
|
2011-02-20 20:54:39 +00:00
|
|
|
def set_replay(self):
|
|
|
|
self.replay = True
|
|
|
|
|
|
|
|
def is_replay(self):
|
|
|
|
return self.replay
|
2010-02-16 04:09:07 +00:00
|
|
|
|
2011-02-19 20:36:13 +00:00
|
|
|
def load_state(self, state):
|
|
|
|
self.code = state["code"]
|
|
|
|
self.msg = state["msg"]
|
|
|
|
self.headers = utils.Headers.from_state(state["headers"])
|
|
|
|
self.content = base64.decodestring(state["content"])
|
|
|
|
self.timestamp = state["timestamp"]
|
|
|
|
|
2011-01-26 01:52:03 +00:00
|
|
|
def get_state(self):
|
|
|
|
return dict(
|
|
|
|
code = self.code,
|
|
|
|
msg = self.msg,
|
|
|
|
headers = self.headers.get_state(),
|
2011-02-03 00:30:47 +00:00
|
|
|
timestamp = self.timestamp,
|
2011-02-16 01:33:04 +00:00
|
|
|
content = base64.encodestring(self.content)
|
2011-01-26 01:52:03 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_state(klass, request, state):
|
|
|
|
return klass(
|
|
|
|
request,
|
|
|
|
state["code"],
|
2011-07-01 01:07:09 +00:00
|
|
|
str(state["msg"]),
|
2011-01-26 01:52:03 +00:00
|
|
|
utils.Headers.from_state(state["headers"]),
|
2011-02-16 01:33:04 +00:00
|
|
|
base64.decodestring(state["content"]),
|
2011-02-03 00:30:47 +00:00
|
|
|
state["timestamp"],
|
2011-01-26 01:52:03 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
return self.get_state() == other.get_state()
|
|
|
|
|
2010-02-16 04:09:07 +00:00
|
|
|
def copy(self):
|
|
|
|
c = copy.copy(self)
|
|
|
|
c.headers = self.headers.copy()
|
|
|
|
return c
|
|
|
|
|
|
|
|
def is_response(self):
|
|
|
|
return True
|
|
|
|
|
|
|
|
def assemble(self):
|
|
|
|
"""
|
|
|
|
Assembles the response for transmission to the client. We make some
|
|
|
|
modifications to make sure interception works properly.
|
|
|
|
"""
|
|
|
|
headers = self.headers.copy()
|
2010-11-17 11:03:42 +00:00
|
|
|
utils.try_del(headers, 'proxy-connection')
|
|
|
|
utils.try_del(headers, 'connection')
|
|
|
|
utils.try_del(headers, 'keep-alive')
|
2010-11-12 15:01:17 +00:00
|
|
|
utils.try_del(headers, 'transfer-encoding')
|
|
|
|
content = self.content
|
|
|
|
if content is not None:
|
|
|
|
headers["content-length"] = [str(len(content))]
|
|
|
|
else:
|
|
|
|
content = ""
|
|
|
|
if self.request.client_conn.close:
|
|
|
|
headers["connection"] = ["close"]
|
2011-02-20 21:11:50 +00:00
|
|
|
proto = "HTTP/1.1 %s %s"%(self.code, str(self.msg))
|
2010-11-12 15:01:17 +00:00
|
|
|
data = (proto, str(headers), content)
|
2010-02-16 04:09:07 +00:00
|
|
|
return self.FMT%data
|
|
|
|
|
2011-07-27 05:47:08 +00:00
|
|
|
def replace(self, pattern, repl, *args, **kwargs):
|
2011-07-22 05:48:42 +00:00
|
|
|
"""
|
|
|
|
Replaces a regular expression pattern with repl in both the headers
|
|
|
|
and the body of the response. Returns the number of replacements
|
2011-07-27 05:47:08 +00:00
|
|
|
made.
|
2011-07-22 05:48:42 +00:00
|
|
|
"""
|
2011-07-27 05:47:08 +00:00
|
|
|
self.content, c = re.subn(pattern, repl, self.content, *args, **kwargs)
|
|
|
|
c += self.headers.replace(pattern, repl, *args, **kwargs)
|
2011-07-22 05:48:42 +00:00
|
|
|
return c
|
|
|
|
|
2010-02-16 04:09:07 +00:00
|
|
|
|
2011-02-19 04:00:24 +00:00
|
|
|
class ClientDisconnect(controller.Msg):
|
|
|
|
def __init__(self, client_conn):
|
|
|
|
controller.Msg.__init__(self)
|
|
|
|
self.client_conn = client_conn
|
|
|
|
|
|
|
|
|
|
|
|
class ClientConnect(controller.Msg):
|
2011-02-03 21:05:07 +00:00
|
|
|
def __init__(self, address):
|
|
|
|
"""
|
|
|
|
address is an (address, port) tuple, or None if this connection has
|
|
|
|
been replayed from within mitmproxy.
|
|
|
|
"""
|
|
|
|
self.address = address
|
2010-11-12 15:01:17 +00:00
|
|
|
self.close = False
|
2011-07-23 01:37:06 +00:00
|
|
|
self.requestcount = 0
|
|
|
|
self.connection_error = None
|
2010-02-16 04:09:07 +00:00
|
|
|
controller.Msg.__init__(self)
|
|
|
|
|
2011-02-19 20:36:13 +00:00
|
|
|
def __eq__(self, other):
|
|
|
|
return self.get_state() == other.get_state()
|
|
|
|
|
|
|
|
def load_state(self, state):
|
|
|
|
self.address = state
|
|
|
|
|
2011-02-05 21:28:43 +00:00
|
|
|
def get_state(self):
|
2011-02-16 03:03:22 +00:00
|
|
|
return list(self.address) if self.address else None
|
2011-02-05 21:28:43 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_state(klass, state):
|
2011-02-19 20:36:13 +00:00
|
|
|
if state:
|
|
|
|
return klass(state)
|
|
|
|
else:
|
|
|
|
return None
|
2011-02-05 21:28:43 +00:00
|
|
|
|
2010-02-16 04:09:07 +00:00
|
|
|
def copy(self):
|
|
|
|
return copy.copy(self)
|
|
|
|
|
|
|
|
|
|
|
|
class Error(controller.Msg):
|
2011-02-20 00:29:41 +00:00
|
|
|
def __init__(self, request, msg, timestamp=None):
|
|
|
|
self.request, self.msg = request, msg
|
2011-03-07 00:46:02 +00:00
|
|
|
self.timestamp = timestamp or utils.timestamp()
|
2010-02-16 04:09:07 +00:00
|
|
|
controller.Msg.__init__(self)
|
|
|
|
|
2011-02-19 20:36:13 +00:00
|
|
|
def load_state(self, state):
|
|
|
|
self.msg = state["msg"]
|
|
|
|
self.timestamp = state["timestamp"]
|
|
|
|
|
2010-02-16 04:09:07 +00:00
|
|
|
def copy(self):
|
|
|
|
return copy.copy(self)
|
|
|
|
|
2011-01-26 01:52:03 +00:00
|
|
|
def get_state(self):
|
|
|
|
return dict(
|
|
|
|
msg = self.msg,
|
2011-02-03 00:30:47 +00:00
|
|
|
timestamp = self.timestamp,
|
2011-01-26 01:52:03 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_state(klass, state):
|
|
|
|
return klass(
|
|
|
|
None,
|
|
|
|
state["msg"],
|
2011-02-03 00:30:47 +00:00
|
|
|
state["timestamp"],
|
2011-01-26 01:52:03 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
return self.get_state() == other.get_state()
|
|
|
|
|
2011-07-27 05:47:08 +00:00
|
|
|
def replace(self, pattern, repl, *args, **kwargs):
|
2011-07-22 05:48:42 +00:00
|
|
|
"""
|
|
|
|
Replaces a regular expression pattern with repl in both the headers
|
|
|
|
and the body of the request. Returns the number of replacements
|
2011-07-27 05:47:08 +00:00
|
|
|
made.
|
2011-07-22 05:48:42 +00:00
|
|
|
"""
|
2011-07-27 05:47:08 +00:00
|
|
|
self.msg, c = re.subn(pattern, repl, self.msg, *args, **kwargs)
|
2011-07-22 05:48:42 +00:00
|
|
|
return c
|
|
|
|
|
2010-02-16 04:09:07 +00:00
|
|
|
|
|
|
|
class FileLike:
|
|
|
|
def __init__(self, o):
|
|
|
|
self.o = o
|
|
|
|
|
|
|
|
def __getattr__(self, attr):
|
|
|
|
return getattr(self.o, attr)
|
|
|
|
|
|
|
|
def flush(self):
|
|
|
|
pass
|
|
|
|
|
2010-09-05 13:04:22 +00:00
|
|
|
def read(self, length):
|
2010-09-05 23:21:51 +00:00
|
|
|
result = ''
|
|
|
|
while len(result) < length:
|
2011-06-23 05:00:55 +00:00
|
|
|
try:
|
|
|
|
data = self.o.read(length)
|
|
|
|
except AttributeError:
|
|
|
|
break
|
2010-09-05 23:21:51 +00:00
|
|
|
if not data:
|
|
|
|
break
|
|
|
|
result += data
|
|
|
|
return result
|
|
|
|
|
2010-02-16 04:09:07 +00:00
|
|
|
def readline(self):
|
|
|
|
result = ''
|
|
|
|
while True:
|
|
|
|
ch = self.read(1)
|
|
|
|
if not ch:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
result += ch
|
|
|
|
if ch == '\n':
|
|
|
|
break
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2011-02-02 23:16:03 +00:00
|
|
|
#begin nocover
|
|
|
|
|
2010-02-16 04:09:07 +00:00
|
|
|
class ServerConnection:
|
|
|
|
def __init__(self, request):
|
2010-11-12 15:01:17 +00:00
|
|
|
self.host = request.host
|
|
|
|
self.port = request.port
|
|
|
|
self.scheme = request.scheme
|
|
|
|
self.close = False
|
2010-02-16 04:09:07 +00:00
|
|
|
self.server, self.rfile, self.wfile = None, None, None
|
|
|
|
self.connect()
|
|
|
|
|
|
|
|
def connect(self):
|
|
|
|
try:
|
2010-11-12 15:01:17 +00:00
|
|
|
addr = socket.gethostbyname(self.host)
|
2010-02-16 04:09:07 +00:00
|
|
|
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
2010-11-12 15:01:17 +00:00
|
|
|
if self.scheme == "https":
|
2010-02-16 04:09:07 +00:00
|
|
|
server = ssl.wrap_socket(server)
|
2010-11-12 15:01:17 +00:00
|
|
|
server.connect((addr, self.port))
|
2010-02-16 04:09:07 +00:00
|
|
|
except socket.error, err:
|
2010-11-12 15:01:17 +00:00
|
|
|
raise ProxyError(504, 'Error connecting to "%s": %s' % (self.host, err))
|
2010-02-16 04:09:07 +00:00
|
|
|
self.server = server
|
|
|
|
self.rfile, self.wfile = server.makefile('rb'), server.makefile('wb')
|
|
|
|
|
2010-11-12 15:01:17 +00:00
|
|
|
def send_request(self, request):
|
|
|
|
self.request = request
|
|
|
|
request.close = self.close
|
2010-02-16 04:09:07 +00:00
|
|
|
try:
|
2010-11-12 15:01:17 +00:00
|
|
|
self.wfile.write(request.assemble())
|
2010-02-16 04:09:07 +00:00
|
|
|
self.wfile.flush()
|
|
|
|
except socket.error, err:
|
2010-11-12 15:01:17 +00:00
|
|
|
raise ProxyError(504, 'Error sending data to "%s": %s' % (request.host, err))
|
2010-02-16 04:09:07 +00:00
|
|
|
|
|
|
|
def read_response(self):
|
2010-11-12 15:01:17 +00:00
|
|
|
line = self.rfile.readline()
|
|
|
|
if line == "\r\n" or line == "\n": # Possible leftover from previous message
|
|
|
|
line = self.rfile.readline()
|
|
|
|
if not line:
|
|
|
|
raise ProxyError(502, "Blank server response.")
|
|
|
|
parts = line.strip().split(" ", 2)
|
2010-02-16 04:09:07 +00:00
|
|
|
if not len(parts) == 3:
|
2010-11-12 15:01:17 +00:00
|
|
|
raise ProxyError(502, "Invalid server response: %s."%line)
|
2010-02-16 04:09:07 +00:00
|
|
|
proto, code, msg = parts
|
|
|
|
code = int(code)
|
|
|
|
headers = utils.Headers()
|
|
|
|
headers.read(self.rfile)
|
2010-11-12 15:01:17 +00:00
|
|
|
if code >= 100 and code <= 199:
|
|
|
|
return self.read_response()
|
|
|
|
if self.request.method == "HEAD" or code == 204 or code == 304:
|
2011-02-16 03:43:35 +00:00
|
|
|
content = ""
|
2010-02-16 04:09:07 +00:00
|
|
|
else:
|
2010-11-12 15:01:17 +00:00
|
|
|
content = read_http_body(self.rfile, self, headers, True)
|
2011-02-15 17:44:57 +00:00
|
|
|
return Response(self.request, code, msg, headers, content)
|
2010-02-16 04:09:07 +00:00
|
|
|
|
|
|
|
def terminate(self):
|
|
|
|
try:
|
|
|
|
if not self.wfile.closed:
|
|
|
|
self.wfile.flush()
|
|
|
|
self.server.close()
|
|
|
|
except IOError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class ProxyHandler(SocketServer.StreamRequestHandler):
|
2011-02-19 23:53:42 +00:00
|
|
|
def __init__(self, config, request, client_address, server, q):
|
|
|
|
self.config = config
|
2010-02-16 04:09:07 +00:00
|
|
|
self.mqueue = q
|
|
|
|
SocketServer.StreamRequestHandler.__init__(self, request, client_address, server)
|
|
|
|
|
|
|
|
def handle(self):
|
2011-02-19 04:00:24 +00:00
|
|
|
cc = ClientConnect(self.client_address)
|
2011-02-16 09:37:04 +00:00
|
|
|
cc.send(self.mqueue)
|
2010-11-12 15:01:17 +00:00
|
|
|
while not cc.close:
|
|
|
|
self.handle_request(cc)
|
2011-02-19 04:00:24 +00:00
|
|
|
cd = ClientDisconnect(cc)
|
|
|
|
cd.send(self.mqueue)
|
2010-11-12 15:01:17 +00:00
|
|
|
self.finish()
|
|
|
|
|
|
|
|
def handle_request(self, cc):
|
2011-03-11 00:06:51 +00:00
|
|
|
server, request, err = None, None, None
|
2010-02-16 04:09:07 +00:00
|
|
|
try:
|
2011-07-23 01:37:06 +00:00
|
|
|
try:
|
|
|
|
request = self.read_request(cc)
|
|
|
|
except IOError, v:
|
|
|
|
raise IOError, "Reading request: %s"%v
|
2010-11-12 15:01:17 +00:00
|
|
|
if request is None:
|
|
|
|
cc.close = True
|
|
|
|
return
|
2011-07-23 01:37:06 +00:00
|
|
|
cc.requestcount += 1
|
2010-02-16 04:09:07 +00:00
|
|
|
request = request.send(self.mqueue)
|
2011-01-27 00:32:24 +00:00
|
|
|
if request is None:
|
2010-11-12 15:01:17 +00:00
|
|
|
cc.close = True
|
2010-02-16 04:09:07 +00:00
|
|
|
return
|
2011-02-16 09:37:04 +00:00
|
|
|
|
2010-11-12 12:18:42 +00:00
|
|
|
if request.is_response():
|
|
|
|
response = request
|
|
|
|
request = False
|
|
|
|
response = response.send(self.mqueue)
|
|
|
|
else:
|
|
|
|
server = ServerConnection(request)
|
2010-11-12 15:01:17 +00:00
|
|
|
server.send_request(request)
|
2011-07-23 01:37:06 +00:00
|
|
|
try:
|
|
|
|
response = server.read_response()
|
|
|
|
except IOError, v:
|
|
|
|
raise IOError, "Reading response: %s"%v
|
2010-11-12 12:18:42 +00:00
|
|
|
response = response.send(self.mqueue)
|
|
|
|
if response is None:
|
|
|
|
server.terminate()
|
2011-01-27 00:32:24 +00:00
|
|
|
if response is None:
|
2010-11-12 15:01:17 +00:00
|
|
|
cc.close = True
|
2010-02-16 04:09:07 +00:00
|
|
|
return
|
|
|
|
self.send_response(response)
|
2011-07-23 01:37:06 +00:00
|
|
|
except IOError, v:
|
|
|
|
cc.connection_error = v
|
2011-06-23 05:00:55 +00:00
|
|
|
cc.close = True
|
2010-02-16 04:09:07 +00:00
|
|
|
except ProxyError, e:
|
2010-11-12 15:01:17 +00:00
|
|
|
cc.close = True
|
2011-07-23 01:37:06 +00:00
|
|
|
cc.connection_error = "%s: %s"%(e.code, e.msg)
|
|
|
|
if request:
|
|
|
|
err = Error(request, e.msg)
|
|
|
|
err.send(self.mqueue)
|
|
|
|
self.send_error(e.code, e.msg)
|
2010-02-16 04:09:07 +00:00
|
|
|
if server:
|
|
|
|
server.terminate()
|
|
|
|
|
2011-02-19 23:53:42 +00:00
|
|
|
def find_cert(self, host):
|
|
|
|
if self.config.certfile:
|
|
|
|
return self.config.certfile
|
2011-02-08 17:00:59 +00:00
|
|
|
else:
|
2011-02-20 00:29:41 +00:00
|
|
|
ret = utils.dummy_cert(self.config.certdir, self.config.cacert, host)
|
2011-06-27 04:10:17 +00:00
|
|
|
time.sleep(self.config.cert_wait_time)
|
2011-02-19 23:53:42 +00:00
|
|
|
if not ret:
|
|
|
|
raise ProxyError(400, "mitmproxy: Unable to generate dummy cert.")
|
|
|
|
return ret
|
2011-02-08 17:00:59 +00:00
|
|
|
|
2011-02-03 01:51:32 +00:00
|
|
|
def read_request(self, client_conn):
|
2010-11-12 15:01:17 +00:00
|
|
|
line = self.rfile.readline()
|
|
|
|
if line == "\r\n" or line == "\n": # Possible leftover from previous message
|
|
|
|
line = self.rfile.readline()
|
|
|
|
if line == "":
|
|
|
|
return None
|
|
|
|
method, scheme, host, port, path, httpminor = parse_request_line(line)
|
2010-02-16 04:09:07 +00:00
|
|
|
if method == "CONNECT":
|
|
|
|
# Discard additional headers sent to the proxy. Should I expose
|
|
|
|
# these to users?
|
|
|
|
while 1:
|
|
|
|
d = self.rfile.readline()
|
2010-11-12 15:01:17 +00:00
|
|
|
if d == '\r\n' or d == '\n':
|
2010-02-16 04:09:07 +00:00
|
|
|
break
|
2011-02-08 14:07:29 +00:00
|
|
|
self.wfile.write(
|
|
|
|
'HTTP/1.1 200 Connection established\r\n' +
|
|
|
|
('Proxy-agent: %s\r\n'%NAME) +
|
|
|
|
'\r\n'
|
|
|
|
)
|
2010-02-16 04:09:07 +00:00
|
|
|
self.wfile.flush()
|
2011-02-10 22:06:30 +00:00
|
|
|
kwargs = dict(
|
2011-02-19 23:53:42 +00:00
|
|
|
certfile = self.find_cert(host),
|
|
|
|
keyfile = self.config.certfile or self.config.cacert,
|
2010-02-16 04:09:07 +00:00
|
|
|
server_side = True,
|
|
|
|
ssl_version = ssl.PROTOCOL_SSLv23,
|
2011-07-23 04:59:48 +00:00
|
|
|
do_handshake_on_connect = True,
|
2010-02-16 04:09:07 +00:00
|
|
|
)
|
2011-02-10 22:06:30 +00:00
|
|
|
if sys.version_info[1] > 6:
|
2011-02-19 23:53:42 +00:00
|
|
|
kwargs["ciphers"] = self.config.ciphers
|
2011-02-10 22:06:30 +00:00
|
|
|
self.connection = ssl.wrap_socket(self.connection, **kwargs)
|
2010-02-16 04:09:07 +00:00
|
|
|
self.rfile = FileLike(self.connection)
|
|
|
|
self.wfile = FileLike(self.connection)
|
2010-11-12 15:01:17 +00:00
|
|
|
method, scheme, host, port, path, httpminor = parse_request_line(self.rfile.readline())
|
|
|
|
if scheme is None:
|
2010-02-16 04:09:07 +00:00
|
|
|
scheme = "https"
|
|
|
|
headers = utils.Headers()
|
|
|
|
headers.read(self.rfile)
|
2011-07-14 03:59:27 +00:00
|
|
|
if host is None and "host" in headers:
|
2010-11-12 15:01:17 +00:00
|
|
|
netloc = headers["host"][0]
|
|
|
|
if ':' in netloc:
|
|
|
|
host, port = string.split(netloc, ':')
|
|
|
|
port = int(port)
|
|
|
|
else:
|
|
|
|
host = netloc
|
|
|
|
if scheme == "https":
|
|
|
|
port = 443
|
|
|
|
else:
|
|
|
|
port = 80
|
|
|
|
port = int(port)
|
|
|
|
if host is None:
|
|
|
|
raise ProxyError(400, 'Invalid request: %s'%request)
|
2011-07-14 03:59:27 +00:00
|
|
|
if "expect" in headers:
|
2010-11-12 15:01:17 +00:00
|
|
|
expect = ",".join(headers['expect'])
|
|
|
|
if expect == "100-continue" and httpminor >= 1:
|
|
|
|
self.wfile.write('HTTP/1.1 100 Continue\r\n')
|
|
|
|
self.wfile.write('Proxy-agent: %s\r\n'%NAME)
|
|
|
|
self.wfile.write('\r\n')
|
|
|
|
del headers['expect']
|
|
|
|
else:
|
|
|
|
raise ProxyError(417, 'Unmet expect: %s'%expect)
|
|
|
|
if httpminor == 0:
|
|
|
|
client_conn.close = True
|
2011-07-14 03:59:27 +00:00
|
|
|
if "connection" in headers:
|
2010-11-12 15:01:17 +00:00
|
|
|
for value in ",".join(headers['connection']).split(","):
|
|
|
|
value = value.strip()
|
|
|
|
if value == "close":
|
|
|
|
client_conn.close = True
|
|
|
|
if value == "keep-alive":
|
|
|
|
client_conn.close = False
|
|
|
|
content = read_http_body(self.rfile, client_conn, headers, False)
|
2011-02-03 01:51:32 +00:00
|
|
|
return Request(client_conn, host, port, scheme, method, path, headers, content)
|
2010-02-16 04:09:07 +00:00
|
|
|
|
|
|
|
def send_response(self, response):
|
|
|
|
self.wfile.write(response.assemble())
|
|
|
|
self.wfile.flush()
|
|
|
|
|
|
|
|
def terminate(self, connection, wfile, rfile):
|
2011-01-27 00:32:24 +00:00
|
|
|
self.request.close()
|
2010-02-16 04:09:07 +00:00
|
|
|
try:
|
|
|
|
if not getattr(wfile, "closed", False):
|
|
|
|
wfile.flush()
|
|
|
|
connection.close()
|
|
|
|
except IOError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def finish(self):
|
|
|
|
self.terminate(self.connection, self.wfile, self.rfile)
|
|
|
|
|
|
|
|
def send_error(self, code, body):
|
2011-01-27 01:19:48 +00:00
|
|
|
try:
|
|
|
|
import BaseHTTPServer
|
|
|
|
response = BaseHTTPServer.BaseHTTPRequestHandler.responses[code][0]
|
2010-11-12 15:01:17 +00:00
|
|
|
self.wfile.write("HTTP/1.1 %s %s\r\n" % (code, response))
|
2011-01-27 01:19:48 +00:00
|
|
|
self.wfile.write("Server: %s\r\n"%NAME)
|
2010-11-12 15:01:17 +00:00
|
|
|
self.wfile.write("Connection: close\r\n")
|
2011-01-27 01:19:48 +00:00
|
|
|
self.wfile.write("Content-type: text/html\r\n")
|
|
|
|
self.wfile.write("\r\n")
|
|
|
|
self.wfile.write('<html><head>\n<title>%d %s</title>\n</head>\n'
|
|
|
|
'<body>\n%s\n</body>\n</html>' % (code, response, body))
|
|
|
|
self.wfile.flush()
|
2011-06-23 05:00:55 +00:00
|
|
|
except:
|
2011-01-27 01:19:48 +00:00
|
|
|
pass
|
2010-02-16 04:09:07 +00:00
|
|
|
|
|
|
|
|
2011-03-12 00:47:37 +00:00
|
|
|
class ProxyServerError(Exception): pass
|
|
|
|
|
2010-02-16 04:09:07 +00:00
|
|
|
ServerBase = SocketServer.ThreadingTCPServer
|
2011-02-10 01:56:14 +00:00
|
|
|
ServerBase.daemon_threads = True # Terminate workers when main thread terminates
|
2010-02-16 04:09:07 +00:00
|
|
|
class ProxyServer(ServerBase):
|
2011-01-27 01:19:48 +00:00
|
|
|
request_queue_size = 20
|
2010-02-16 04:09:07 +00:00
|
|
|
allow_reuse_address = True
|
2011-02-19 23:53:42 +00:00
|
|
|
def __init__(self, config, port, address=''):
|
2011-03-12 00:47:37 +00:00
|
|
|
"""
|
|
|
|
Raises ProxyServerError if there's a startup problem.
|
|
|
|
"""
|
2011-02-19 23:53:42 +00:00
|
|
|
self.config, self.port, self.address = config, port, address
|
2011-03-12 00:47:37 +00:00
|
|
|
try:
|
|
|
|
ServerBase.__init__(self, (address, port), ProxyHandler)
|
|
|
|
except socket.error, v:
|
|
|
|
raise ProxyServerError('Error starting proxy server: ' + v.strerror)
|
2010-02-16 04:09:07 +00:00
|
|
|
self.masterq = None
|
2011-02-20 00:29:41 +00:00
|
|
|
self.certdir = tempfile.mkdtemp(prefix="mitmproxy")
|
|
|
|
config.certdir = self.certdir
|
2010-02-16 04:09:07 +00:00
|
|
|
|
|
|
|
def set_mqueue(self, q):
|
|
|
|
self.masterq = q
|
|
|
|
|
|
|
|
def finish_request(self, request, client_address):
|
2011-02-19 23:53:42 +00:00
|
|
|
self.RequestHandlerClass(self.config, request, client_address, self, self.masterq)
|
2010-02-16 04:09:07 +00:00
|
|
|
|
2011-02-19 23:12:55 +00:00
|
|
|
def shutdown(self):
|
2011-02-20 00:29:41 +00:00
|
|
|
shutil.rmtree(self.certdir)
|
2011-02-19 23:12:55 +00:00
|
|
|
ServerBase.shutdown(self)
|
|
|
|
|
2011-02-19 23:53:42 +00:00
|
|
|
|
|
|
|
# Command-line utils
|
|
|
|
def certificate_option_group(parser):
|
|
|
|
group = optparse.OptionGroup(parser, "SSL")
|
|
|
|
group.add_option(
|
|
|
|
"--cert", action="store",
|
|
|
|
type = "str", dest="cert", default=None,
|
|
|
|
help = "User-created SSL certificate file."
|
|
|
|
)
|
|
|
|
group.add_option(
|
|
|
|
"--ciphers", action="store",
|
|
|
|
type = "str", dest="ciphers", default=None,
|
|
|
|
help = "SSL ciphers."
|
|
|
|
)
|
|
|
|
parser.add_option_group(group)
|
|
|
|
|
|
|
|
|
|
|
|
def process_certificate_option_group(parser, options):
|
|
|
|
conf = {}
|
|
|
|
if options.cert:
|
|
|
|
options.cert = os.path.expanduser(options.cert)
|
|
|
|
if not os.path.exists(options.cert):
|
|
|
|
parser.error("Manually created certificate does not exist: %s"%options.cert)
|
2011-03-18 03:45:31 +00:00
|
|
|
|
|
|
|
cacert = os.path.join(options.confdir, "mitmproxy-ca.pem")
|
|
|
|
cacert = os.path.expanduser(cacert)
|
|
|
|
if not os.path.exists(cacert):
|
|
|
|
utils.dummy_ca(cacert)
|
2011-02-19 23:53:42 +00:00
|
|
|
if getattr(options, "cache", None) is not None:
|
|
|
|
options.cache = os.path.expanduser(options.cache)
|
2011-03-18 03:45:31 +00:00
|
|
|
return SSLConfig(
|
2011-02-19 23:53:42 +00:00
|
|
|
certfile = options.cert,
|
2011-03-18 03:45:31 +00:00
|
|
|
cacert = cacert,
|
2011-06-27 04:10:17 +00:00
|
|
|
ciphers = options.ciphers,
|
|
|
|
cert_wait_time = options.cert_wait_time
|
2011-02-19 23:53:42 +00:00
|
|
|
)
|