2011-01-25 02:02:48 +00:00
|
|
|
"""
|
|
|
|
This module provides more sophisticated flow tracking. These match requests
|
|
|
|
with their responses, and provide filtering and interception facilities.
|
|
|
|
"""
|
2013-08-18 18:03:53 +00:00
|
|
|
import hashlib, Cookie, cookielib, copy, re, urlparse, os, threading
|
2012-08-19 01:03:21 +00:00
|
|
|
import time, urllib
|
2011-08-19 09:38:05 +00:00
|
|
|
import tnetstring, filt, script, utils, encoding, proxy
|
2011-08-03 10:38:23 +00:00
|
|
|
from email.utils import parsedate_tz, formatdate, mktime_tz
|
2012-06-27 04:43:33 +00:00
|
|
|
from netlib import odict, http, certutils
|
|
|
|
import controller, version
|
2013-03-24 20:20:26 +00:00
|
|
|
import app
|
2011-01-25 02:02:48 +00:00
|
|
|
|
2012-02-10 01:35:23 +00:00
|
|
|
HDR_FORM_URLENCODED = "application/x-www-form-urlencoded"
|
2012-05-16 03:42:58 +00:00
|
|
|
CONTENT_MISSING = 0
|
2012-02-10 01:27:39 +00:00
|
|
|
|
2012-06-18 21:58:50 +00:00
|
|
|
ODict = odict.ODict
|
|
|
|
ODictCaseless = odict.ODictCaseless
|
|
|
|
|
2011-01-31 00:26:56 +00:00
|
|
|
|
2012-03-16 22:31:05 +00:00
|
|
|
class ReplaceHooks:
|
2012-03-16 04:13:11 +00:00
|
|
|
def __init__(self):
|
|
|
|
self.lst = []
|
|
|
|
|
2012-08-18 11:39:52 +00:00
|
|
|
def set(self, r):
|
|
|
|
self.clear()
|
|
|
|
for i in r:
|
|
|
|
self.add(*i)
|
|
|
|
|
2012-03-16 22:31:05 +00:00
|
|
|
def add(self, fpatt, rex, s):
|
2012-03-16 04:13:11 +00:00
|
|
|
"""
|
2013-03-24 20:20:26 +00:00
|
|
|
add a replacement hook.
|
2012-03-16 04:13:11 +00:00
|
|
|
|
2013-03-24 20:20:26 +00:00
|
|
|
fpatt: a string specifying a filter pattern.
|
|
|
|
rex: a regular expression.
|
|
|
|
s: the replacement string
|
2012-03-16 04:13:11 +00:00
|
|
|
|
2013-03-24 20:20:26 +00:00
|
|
|
returns true if hook was added, false if the pattern could not be
|
2012-03-16 04:13:11 +00:00
|
|
|
parsed.
|
|
|
|
"""
|
2012-03-16 22:31:05 +00:00
|
|
|
cpatt = filt.parse(fpatt)
|
2012-03-16 04:13:11 +00:00
|
|
|
if not cpatt:
|
|
|
|
return False
|
2012-03-23 00:28:33 +00:00
|
|
|
try:
|
|
|
|
re.compile(rex)
|
|
|
|
except re.error:
|
|
|
|
return False
|
2012-03-16 22:31:05 +00:00
|
|
|
self.lst.append((fpatt, rex, s, cpatt))
|
2012-03-16 04:13:11 +00:00
|
|
|
return True
|
|
|
|
|
2012-03-17 21:33:11 +00:00
|
|
|
def get_specs(self):
|
|
|
|
"""
|
|
|
|
Retrieve the hook specifcations. Returns a list of (fpatt, rex, s) tuples.
|
|
|
|
"""
|
|
|
|
return [i[:3] for i in self.lst]
|
|
|
|
|
|
|
|
def count(self):
|
|
|
|
return len(self.lst)
|
|
|
|
|
2012-03-16 04:13:11 +00:00
|
|
|
def run(self, f):
|
2012-03-16 22:31:05 +00:00
|
|
|
for _, rex, s, cpatt in self.lst:
|
2012-03-16 04:13:11 +00:00
|
|
|
if cpatt(f):
|
2012-03-16 22:31:05 +00:00
|
|
|
if f.response:
|
|
|
|
f.response.replace(rex, s)
|
|
|
|
else:
|
|
|
|
f.request.replace(rex, s)
|
2012-03-16 04:13:11 +00:00
|
|
|
|
|
|
|
def clear(self):
|
|
|
|
self.lst = []
|
|
|
|
|
|
|
|
|
2012-08-18 11:39:52 +00:00
|
|
|
class SetHeaders:
|
|
|
|
def __init__(self):
|
|
|
|
self.lst = []
|
|
|
|
|
|
|
|
def set(self, r):
|
|
|
|
self.clear()
|
|
|
|
for i in r:
|
|
|
|
self.add(*i)
|
|
|
|
|
|
|
|
def add(self, fpatt, header, value):
|
|
|
|
"""
|
|
|
|
Add a set header hook.
|
|
|
|
|
|
|
|
fpatt: String specifying a filter pattern.
|
|
|
|
header: Header name.
|
|
|
|
value: Header value string
|
|
|
|
|
|
|
|
Returns True if hook was added, False if the pattern could not be
|
|
|
|
parsed.
|
|
|
|
"""
|
|
|
|
cpatt = filt.parse(fpatt)
|
|
|
|
if not cpatt:
|
|
|
|
return False
|
|
|
|
self.lst.append((fpatt, header, value, cpatt))
|
|
|
|
return True
|
|
|
|
|
|
|
|
def get_specs(self):
|
|
|
|
"""
|
|
|
|
Retrieve the hook specifcations. Returns a list of (fpatt, rex, s) tuples.
|
|
|
|
"""
|
|
|
|
return [i[:3] for i in self.lst]
|
|
|
|
|
|
|
|
def count(self):
|
|
|
|
return len(self.lst)
|
|
|
|
|
|
|
|
def clear(self):
|
|
|
|
self.lst = []
|
|
|
|
|
|
|
|
def run(self, f):
|
|
|
|
for _, header, value, cpatt in self.lst:
|
|
|
|
if cpatt(f):
|
|
|
|
if f.response:
|
|
|
|
del f.response.headers[header]
|
|
|
|
else:
|
|
|
|
del f.request.headers[header]
|
|
|
|
for _, header, value, cpatt in self.lst:
|
|
|
|
if cpatt(f):
|
|
|
|
if f.response:
|
|
|
|
f.response.headers.add(header, value)
|
|
|
|
else:
|
|
|
|
f.request.headers.add(header, value)
|
|
|
|
|
|
|
|
|
2011-08-03 22:14:44 +00:00
|
|
|
class ScriptContext:
|
|
|
|
def __init__(self, master):
|
|
|
|
self._master = master
|
|
|
|
|
|
|
|
def log(self, *args, **kwargs):
|
|
|
|
"""
|
2011-10-26 01:49:15 +00:00
|
|
|
Logs an event.
|
|
|
|
|
2011-08-03 22:14:44 +00:00
|
|
|
How this is handled depends on the front-end. mitmdump will display
|
|
|
|
events if the eventlog flag ("-e") was passed. mitmproxy sends
|
|
|
|
output to the eventlog for display ("v" keyboard shortcut).
|
|
|
|
"""
|
|
|
|
self._master.add_event(*args, **kwargs)
|
|
|
|
|
2012-02-18 22:29:49 +00:00
|
|
|
def duplicate_flow(self, f):
|
|
|
|
"""
|
|
|
|
Returns a duplicate of the specified flow. The flow is also
|
|
|
|
injected into the current state, and is ready for editing, replay,
|
|
|
|
etc.
|
|
|
|
"""
|
|
|
|
self._master.pause_scripts = True
|
|
|
|
f = self._master.duplicate_flow(f)
|
|
|
|
self._master.pause_scripts = False
|
|
|
|
return f
|
|
|
|
|
|
|
|
def replay_request(self, f):
|
|
|
|
"""
|
|
|
|
Replay the request on the current flow. The response will be added
|
|
|
|
to the flow object.
|
|
|
|
"""
|
|
|
|
self._master.replay_request(f)
|
|
|
|
|
2011-08-03 22:14:44 +00:00
|
|
|
|
2012-03-15 22:12:56 +00:00
|
|
|
class decoded(object):
|
|
|
|
"""
|
|
|
|
|
|
|
|
A context manager that decodes a request, response or error, and then
|
|
|
|
re-encodes it with the same encoding after execution of the block.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
|
|
|
|
with decoded(request):
|
|
|
|
request.content = request.content.replace("foo", "bar")
|
|
|
|
"""
|
|
|
|
def __init__(self, o):
|
|
|
|
self.o = o
|
2012-08-18 06:14:30 +00:00
|
|
|
ce = o.headers.get_first("content-encoding")
|
|
|
|
if ce in encoding.ENCODINGS:
|
|
|
|
self.ce = ce
|
2012-03-15 22:12:56 +00:00
|
|
|
else:
|
|
|
|
self.ce = None
|
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
if self.ce:
|
|
|
|
self.o.decode()
|
|
|
|
|
|
|
|
def __exit__(self, type, value, tb):
|
|
|
|
if self.ce:
|
|
|
|
self.o.encode(self.ce)
|
|
|
|
|
|
|
|
|
2013-02-23 01:08:28 +00:00
|
|
|
class StateObject:
|
|
|
|
def __eq__(self, other):
|
|
|
|
try:
|
|
|
|
return self._get_state() == other._get_state()
|
|
|
|
except AttributeError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
class HTTPMsg(StateObject):
|
2013-01-28 15:37:25 +00:00
|
|
|
def get_decoded_content(self):
|
|
|
|
"""
|
|
|
|
Returns the decoded content based on the current Content-Encoding header.
|
|
|
|
Doesn't change the message iteself or its headers.
|
|
|
|
"""
|
|
|
|
ce = self.headers.get_first("content-encoding")
|
|
|
|
if not self.content or ce not in encoding.ENCODINGS:
|
|
|
|
return self.content
|
|
|
|
return encoding.decode(ce, self.content)
|
|
|
|
|
2011-08-03 10:38:23 +00:00
|
|
|
def decode(self):
|
|
|
|
"""
|
2011-08-03 11:02:33 +00:00
|
|
|
Decodes content based on the current Content-Encoding header, then
|
|
|
|
removes the header. If there is no Content-Encoding header, no
|
|
|
|
action is taken.
|
2013-07-31 23:08:00 +00:00
|
|
|
|
2013-07-29 06:14:11 +00:00
|
|
|
Returns True if decoding succeeded, False otherwise.
|
2011-08-03 10:38:23 +00:00
|
|
|
"""
|
2012-08-18 06:14:30 +00:00
|
|
|
ce = self.headers.get_first("content-encoding")
|
|
|
|
if not self.content or ce not in encoding.ENCODINGS:
|
2013-07-29 06:14:11 +00:00
|
|
|
return False
|
|
|
|
data = encoding.decode(
|
2012-08-18 06:14:30 +00:00
|
|
|
ce,
|
2011-08-03 10:38:23 +00:00
|
|
|
self.content
|
|
|
|
)
|
2013-07-29 06:14:11 +00:00
|
|
|
if data is None:
|
|
|
|
return False
|
|
|
|
self.content = data
|
2011-08-03 10:38:23 +00:00
|
|
|
del self.headers["content-encoding"]
|
2013-07-29 06:14:11 +00:00
|
|
|
return True
|
2011-08-03 10:38:23 +00:00
|
|
|
|
|
|
|
def encode(self, e):
|
|
|
|
"""
|
2011-08-03 11:02:33 +00:00
|
|
|
Encodes content with the encoding e, where e is "gzip", "deflate"
|
|
|
|
or "identity".
|
2011-08-03 10:38:23 +00:00
|
|
|
"""
|
2011-08-03 11:02:33 +00:00
|
|
|
# FIXME: Error if there's an existing encoding header?
|
2011-08-03 10:38:23 +00:00
|
|
|
self.content = encoding.encode(e, self.content)
|
|
|
|
self.headers["content-encoding"] = [e]
|
|
|
|
|
2012-08-04 01:18:05 +00:00
|
|
|
def size(self, **kwargs):
|
|
|
|
"""
|
|
|
|
Size in bytes of a fully rendered message, including headers and
|
|
|
|
HTTP lead-in.
|
|
|
|
"""
|
|
|
|
hl = len(self._assemble_head(**kwargs))
|
|
|
|
if self.content:
|
|
|
|
return hl + len(self.content)
|
|
|
|
else:
|
|
|
|
return hl
|
|
|
|
|
2013-01-28 15:37:25 +00:00
|
|
|
def get_content_type(self):
|
|
|
|
return self.headers.get_first("content-type")
|
2012-08-04 01:18:05 +00:00
|
|
|
|
2013-01-28 15:37:25 +00:00
|
|
|
def get_transmitted_size(self):
|
|
|
|
# FIXME: this is inprecise in case chunking is used
|
|
|
|
# (we should count the chunking headers)
|
|
|
|
if not self.content:
|
|
|
|
return 0
|
|
|
|
return len(self.content)
|
2011-08-03 10:38:23 +00:00
|
|
|
|
2013-02-16 23:42:48 +00:00
|
|
|
|
2011-08-03 10:38:23 +00:00
|
|
|
class Request(HTTPMsg):
|
2011-08-03 21:26:26 +00:00
|
|
|
"""
|
|
|
|
An HTTP request.
|
|
|
|
|
|
|
|
Exposes the following attributes:
|
2011-10-26 01:49:15 +00:00
|
|
|
|
2012-04-03 10:37:24 +00:00
|
|
|
client_conn: ClientConnect object, or None if this is a replay.
|
2012-05-16 03:42:58 +00:00
|
|
|
|
2012-02-19 22:29:36 +00:00
|
|
|
headers: ODictCaseless object
|
2012-05-16 03:42:58 +00:00
|
|
|
|
|
|
|
content: Content of the request, None, or CONTENT_MISSING if there
|
|
|
|
is content associated, but not present. CONTENT_MISSING evaluates
|
|
|
|
to False to make checking for the presence of content natural.
|
2011-10-26 01:49:15 +00:00
|
|
|
|
2011-08-03 21:26:26 +00:00
|
|
|
scheme: URL scheme (http/https)
|
2012-05-16 03:42:58 +00:00
|
|
|
|
2011-08-03 21:26:26 +00:00
|
|
|
host: Host portion of the URL
|
2012-05-16 03:42:58 +00:00
|
|
|
|
2011-08-03 21:26:26 +00:00
|
|
|
port: Destination port
|
2012-05-16 03:42:58 +00:00
|
|
|
|
2011-08-03 21:29:55 +00:00
|
|
|
path: Path portion of the URL
|
2011-08-03 21:26:26 +00:00
|
|
|
|
2013-01-17 15:32:56 +00:00
|
|
|
timestamp_start: Seconds since the epoch signifying request transmission started
|
2012-06-27 22:02:14 +00:00
|
|
|
|
2011-10-26 01:49:15 +00:00
|
|
|
method: HTTP method
|
2013-01-17 15:32:56 +00:00
|
|
|
|
|
|
|
timestamp_end: Seconds since the epoch signifying request transmission ended
|
2013-03-19 16:21:52 +00:00
|
|
|
|
|
|
|
tcp_setup_timestamp: Seconds since the epoch signifying remote TCP connection setup completion time
|
|
|
|
(or None, if request didn't results TCP setup)
|
|
|
|
|
|
|
|
ssl_setup_timestamp: Seconds since the epoch signifying remote SSL encryption setup completion time
|
|
|
|
(or None, if request didn't results SSL setup)
|
|
|
|
|
2011-08-03 21:26:26 +00:00
|
|
|
"""
|
2013-12-12 01:11:22 +00:00
|
|
|
def __init__(
|
|
|
|
self, client_conn, httpversion, host, port, scheme, method, path, headers, content, timestamp_start=None,
|
|
|
|
timestamp_end=None, tcp_setup_timestamp=None, ssl_setup_timestamp=None, ip=None):
|
2012-02-19 22:29:36 +00:00
|
|
|
assert isinstance(headers, ODictCaseless)
|
2011-08-03 10:38:23 +00:00
|
|
|
self.client_conn = client_conn
|
2012-06-09 22:46:22 +00:00
|
|
|
self.httpversion = httpversion
|
2011-08-03 10:38:23 +00:00
|
|
|
self.host, self.port, self.scheme = host, port, scheme
|
|
|
|
self.method, self.path, self.headers, self.content = method, path, headers, content
|
2013-01-17 15:32:56 +00:00
|
|
|
self.timestamp_start = timestamp_start or utils.timestamp()
|
|
|
|
self.timestamp_end = max(timestamp_end or utils.timestamp(), timestamp_start)
|
2011-08-03 10:38:23 +00:00
|
|
|
self.close = False
|
2013-03-19 16:21:52 +00:00
|
|
|
self.tcp_setup_timestamp = tcp_setup_timestamp
|
|
|
|
self.ssl_setup_timestamp = ssl_setup_timestamp
|
2013-12-12 01:11:22 +00:00
|
|
|
self.ip = ip
|
2011-08-03 10:38:23 +00:00
|
|
|
|
|
|
|
# Have this request's cookies been modified by sticky cookies or auth?
|
|
|
|
self.stickycookie = False
|
|
|
|
self.stickyauth = False
|
|
|
|
|
|
|
|
def anticache(self):
|
|
|
|
"""
|
|
|
|
Modifies this request to remove headers that might produce a cached
|
|
|
|
response. That is, we remove ETags and If-Modified-Since headers.
|
|
|
|
"""
|
|
|
|
delheaders = [
|
|
|
|
"if-modified-since",
|
|
|
|
"if-none-match",
|
|
|
|
]
|
|
|
|
for i in delheaders:
|
|
|
|
del self.headers[i]
|
|
|
|
|
|
|
|
def anticomp(self):
|
|
|
|
"""
|
|
|
|
Modifies this request to remove headers that will compress the
|
|
|
|
resource's data.
|
|
|
|
"""
|
|
|
|
self.headers["accept-encoding"] = ["identity"]
|
|
|
|
|
|
|
|
def constrain_encoding(self):
|
|
|
|
"""
|
|
|
|
Limits the permissible Accept-Encoding values, based on what we can
|
|
|
|
decode appropriately.
|
|
|
|
"""
|
|
|
|
if self.headers["accept-encoding"]:
|
2011-08-18 21:20:38 +00:00
|
|
|
self.headers["accept-encoding"] = [', '.join(
|
2011-08-03 10:38:23 +00:00
|
|
|
e for e in encoding.ENCODINGS if e in self.headers["accept-encoding"][0]
|
2011-08-18 21:20:38 +00:00
|
|
|
)]
|
2011-08-03 10:38:23 +00:00
|
|
|
|
2011-08-03 21:26:26 +00:00
|
|
|
def _set_replay(self):
|
2011-08-03 10:38:23 +00:00
|
|
|
self.client_conn = None
|
|
|
|
|
|
|
|
def is_replay(self):
|
2011-08-03 21:26:26 +00:00
|
|
|
"""
|
|
|
|
Is this request a replay?
|
|
|
|
"""
|
2011-08-03 10:38:23 +00:00
|
|
|
if self.client_conn:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2011-08-03 10:48:40 +00:00
|
|
|
def _load_state(self, state):
|
2011-08-03 10:38:23 +00:00
|
|
|
if state["client_conn"]:
|
|
|
|
if self.client_conn:
|
2011-08-03 10:48:40 +00:00
|
|
|
self.client_conn._load_state(state["client_conn"])
|
2011-08-03 10:38:23 +00:00
|
|
|
else:
|
2011-08-03 10:48:40 +00:00
|
|
|
self.client_conn = ClientConnect._from_state(state["client_conn"])
|
2011-08-03 10:38:23 +00:00
|
|
|
else:
|
|
|
|
self.client_conn = None
|
|
|
|
self.host = state["host"]
|
|
|
|
self.port = state["port"]
|
|
|
|
self.scheme = state["scheme"]
|
|
|
|
self.method = state["method"]
|
|
|
|
self.path = state["path"]
|
2012-02-19 22:29:36 +00:00
|
|
|
self.headers = ODictCaseless._from_state(state["headers"])
|
2011-08-19 09:30:24 +00:00
|
|
|
self.content = state["content"]
|
2013-01-17 15:32:56 +00:00
|
|
|
self.timestamp_start = state["timestamp_start"]
|
|
|
|
self.timestamp_end = state["timestamp_end"]
|
2013-03-19 16:21:52 +00:00
|
|
|
self.tcp_setup_timestamp = state["tcp_setup_timestamp"]
|
|
|
|
self.ssl_setup_timestamp = state["ssl_setup_timestamp"]
|
2013-12-12 01:11:22 +00:00
|
|
|
self.ip = state["ip"]
|
2011-08-03 10:38:23 +00:00
|
|
|
|
2011-08-03 10:48:40 +00:00
|
|
|
def _get_state(self):
|
2011-08-03 10:38:23 +00:00
|
|
|
return dict(
|
2011-08-03 10:48:40 +00:00
|
|
|
client_conn = self.client_conn._get_state() if self.client_conn else None,
|
2012-06-09 22:46:22 +00:00
|
|
|
httpversion = self.httpversion,
|
2011-08-03 10:38:23 +00:00
|
|
|
host = self.host,
|
|
|
|
port = self.port,
|
|
|
|
scheme = self.scheme,
|
|
|
|
method = self.method,
|
|
|
|
path = self.path,
|
2011-08-03 10:48:40 +00:00
|
|
|
headers = self.headers._get_state(),
|
2011-08-19 09:30:24 +00:00
|
|
|
content = self.content,
|
2013-01-17 15:32:56 +00:00
|
|
|
timestamp_start = self.timestamp_start,
|
2013-03-19 16:21:52 +00:00
|
|
|
timestamp_end = self.timestamp_end,
|
|
|
|
tcp_setup_timestamp = self.tcp_setup_timestamp,
|
2013-12-12 01:11:22 +00:00
|
|
|
ssl_setup_timestamp = self.ssl_setup_timestamp,
|
|
|
|
ip = self.ip
|
2011-08-03 10:38:23 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
@classmethod
|
2011-08-03 10:48:40 +00:00
|
|
|
def _from_state(klass, state):
|
2011-08-03 10:38:23 +00:00
|
|
|
return klass(
|
2011-08-03 10:48:40 +00:00
|
|
|
ClientConnect._from_state(state["client_conn"]),
|
2012-06-09 22:46:22 +00:00
|
|
|
tuple(state["httpversion"]),
|
2011-08-03 10:38:23 +00:00
|
|
|
str(state["host"]),
|
|
|
|
state["port"],
|
|
|
|
str(state["scheme"]),
|
|
|
|
str(state["method"]),
|
|
|
|
str(state["path"]),
|
2012-02-19 22:29:36 +00:00
|
|
|
ODictCaseless._from_state(state["headers"]),
|
2011-08-19 09:30:24 +00:00
|
|
|
state["content"],
|
2013-01-17 15:32:56 +00:00
|
|
|
state["timestamp_start"],
|
|
|
|
state["timestamp_end"],
|
2013-03-19 16:21:52 +00:00
|
|
|
state["tcp_setup_timestamp"],
|
2013-12-12 01:11:22 +00:00
|
|
|
state["ssl_setup_timestamp"],
|
|
|
|
state["ip"]
|
2011-08-03 10:38:23 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
def __hash__(self):
|
|
|
|
return id(self)
|
|
|
|
|
|
|
|
def copy(self):
|
|
|
|
c = copy.copy(self)
|
|
|
|
c.headers = self.headers.copy()
|
|
|
|
return c
|
|
|
|
|
2012-02-10 01:27:39 +00:00
|
|
|
def get_form_urlencoded(self):
|
|
|
|
"""
|
2012-02-19 22:13:35 +00:00
|
|
|
Retrieves the URL-encoded form data, returning an ODict object.
|
|
|
|
Returns an empty ODict if there is no data or the content-type
|
|
|
|
indicates non-form data.
|
2012-02-10 01:27:39 +00:00
|
|
|
"""
|
2012-05-16 03:42:58 +00:00
|
|
|
if self.content and self.headers.in_any("content-type", HDR_FORM_URLENCODED, True):
|
2012-02-19 22:13:35 +00:00
|
|
|
return ODict(utils.urldecode(self.content))
|
|
|
|
return ODict([])
|
2012-02-10 01:27:39 +00:00
|
|
|
|
2012-02-19 22:13:35 +00:00
|
|
|
def set_form_urlencoded(self, odict):
|
2012-02-10 01:27:39 +00:00
|
|
|
"""
|
|
|
|
Sets the body to the URL-encoded form data, and adds the
|
2012-02-19 22:13:35 +00:00
|
|
|
appropriate content-type header. Note that this will destory the
|
|
|
|
existing body if there is one.
|
2012-02-10 01:27:39 +00:00
|
|
|
"""
|
2012-06-15 05:40:08 +00:00
|
|
|
# FIXME: If there's an existing content-type header indicating a
|
|
|
|
# url-encoded form, leave it alone.
|
2012-02-10 01:35:23 +00:00
|
|
|
self.headers["Content-Type"] = [HDR_FORM_URLENCODED]
|
2012-02-19 22:13:35 +00:00
|
|
|
self.content = utils.urlencode(odict.lst)
|
2012-02-10 01:27:39 +00:00
|
|
|
|
2012-08-19 01:03:21 +00:00
|
|
|
def get_path_components(self):
|
|
|
|
"""
|
|
|
|
Returns the path components of the URL as a list of strings.
|
|
|
|
|
|
|
|
Components are unquoted.
|
|
|
|
"""
|
|
|
|
_, _, path, _, _, _ = urlparse.urlparse(self.get_url())
|
|
|
|
return [urllib.unquote(i) for i in path.split("/") if i]
|
|
|
|
|
|
|
|
def set_path_components(self, lst):
|
|
|
|
"""
|
|
|
|
Takes a list of strings, and sets the path component of the URL.
|
|
|
|
|
|
|
|
Components are quoted.
|
|
|
|
"""
|
|
|
|
lst = [urllib.quote(i, safe="") for i in lst]
|
|
|
|
path = "/" + "/".join(lst)
|
|
|
|
scheme, netloc, _, params, query, fragment = urlparse.urlparse(self.get_url())
|
|
|
|
self.set_url(urlparse.urlunparse([scheme, netloc, path, params, query, fragment]))
|
|
|
|
|
2012-02-09 03:40:31 +00:00
|
|
|
def get_query(self):
|
|
|
|
"""
|
2012-02-19 22:13:35 +00:00
|
|
|
Gets the request query string. Returns an ODict object.
|
2012-02-09 03:40:31 +00:00
|
|
|
"""
|
|
|
|
_, _, _, _, query, _ = urlparse.urlparse(self.get_url())
|
2012-02-21 00:00:45 +00:00
|
|
|
if query:
|
|
|
|
return ODict(utils.urldecode(query))
|
|
|
|
return ODict([])
|
2012-02-09 03:40:31 +00:00
|
|
|
|
2012-02-19 22:13:35 +00:00
|
|
|
def set_query(self, odict):
|
2012-02-09 03:40:31 +00:00
|
|
|
"""
|
2012-02-19 22:13:35 +00:00
|
|
|
Takes an ODict object, and sets the request query string.
|
2012-02-09 03:40:31 +00:00
|
|
|
"""
|
|
|
|
scheme, netloc, path, params, _, fragment = urlparse.urlparse(self.get_url())
|
2012-02-19 22:13:35 +00:00
|
|
|
query = utils.urlencode(odict.lst)
|
2012-02-09 03:40:31 +00:00
|
|
|
self.set_url(urlparse.urlunparse([scheme, netloc, path, params, query, fragment]))
|
|
|
|
|
2013-03-17 04:31:35 +00:00
|
|
|
def get_url(self, hostheader=False):
|
2011-08-03 21:26:26 +00:00
|
|
|
"""
|
|
|
|
Returns a URL string, constructed from the Request's URL compnents.
|
2013-03-17 04:31:35 +00:00
|
|
|
|
|
|
|
If hostheader is True, we use the value specified in the request
|
|
|
|
Host header to construct the URL.
|
2011-08-03 21:26:26 +00:00
|
|
|
"""
|
2013-03-17 04:31:35 +00:00
|
|
|
if hostheader:
|
|
|
|
host = self.headers.get_first("host") or self.host
|
|
|
|
else:
|
|
|
|
host = self.host
|
|
|
|
host = host.encode("idna")
|
|
|
|
return utils.unparse_url(self.scheme, host, self.port, self.path).encode('ascii')
|
2011-08-03 10:38:23 +00:00
|
|
|
|
|
|
|
def set_url(self, url):
|
2011-08-03 21:26:26 +00:00
|
|
|
"""
|
|
|
|
Parses a URL specification, and updates the Request's information
|
|
|
|
accordingly.
|
|
|
|
|
2011-10-26 01:49:15 +00:00
|
|
|
Returns False if the URL was invalid, True if the request succeeded.
|
2011-08-03 21:26:26 +00:00
|
|
|
"""
|
2012-06-23 02:06:34 +00:00
|
|
|
parts = http.parse_url(url)
|
2011-08-03 10:38:23 +00:00
|
|
|
if not parts:
|
|
|
|
return False
|
|
|
|
self.scheme, self.host, self.port, self.path = parts
|
|
|
|
return True
|
|
|
|
|
2013-01-28 15:37:25 +00:00
|
|
|
def get_cookies(self):
|
|
|
|
cookie_headers = self.headers.get("cookie")
|
|
|
|
if not cookie_headers:
|
|
|
|
return None
|
|
|
|
|
|
|
|
cookies = []
|
|
|
|
for header in cookie_headers:
|
|
|
|
pairs = [pair.partition("=") for pair in header.split(';')]
|
|
|
|
cookies.extend((pair[0],(pair[2],{})) for pair in pairs)
|
|
|
|
return dict(cookies)
|
|
|
|
|
|
|
|
def get_header_size(self):
|
|
|
|
FMT = '%s %s HTTP/%s.%s\r\n%s\r\n'
|
|
|
|
assembled_header = FMT % (
|
|
|
|
self.method,
|
|
|
|
self.path,
|
|
|
|
self.httpversion[0],
|
|
|
|
self.httpversion[1],
|
|
|
|
str(self.headers)
|
|
|
|
)
|
|
|
|
return len(assembled_header)
|
|
|
|
|
2012-08-04 01:18:05 +00:00
|
|
|
def _assemble_head(self, proxy=False):
|
|
|
|
FMT = '%s %s HTTP/%s.%s\r\n%s\r\n'
|
|
|
|
FMT_PROXY = '%s %s://%s:%s%s HTTP/%s.%s\r\n%s\r\n'
|
2011-08-03 21:26:26 +00:00
|
|
|
|
2011-08-03 10:38:23 +00:00
|
|
|
headers = self.headers.copy()
|
|
|
|
utils.del_all(
|
|
|
|
headers,
|
|
|
|
[
|
|
|
|
'proxy-connection',
|
|
|
|
'keep-alive',
|
|
|
|
'connection',
|
|
|
|
'transfer-encoding'
|
|
|
|
]
|
|
|
|
)
|
|
|
|
if not 'host' in headers:
|
2012-02-18 03:27:09 +00:00
|
|
|
headers["host"] = [utils.hostport(self.scheme, self.host, self.port)]
|
2011-08-03 10:38:23 +00:00
|
|
|
content = self.content
|
2012-04-25 02:38:20 +00:00
|
|
|
if content:
|
2013-05-21 13:57:14 +00:00
|
|
|
headers["Content-Length"] = [str(len(content))]
|
2012-04-25 02:38:20 +00:00
|
|
|
else:
|
|
|
|
content = ""
|
2011-08-03 10:38:23 +00:00
|
|
|
if self.close:
|
|
|
|
headers["connection"] = ["close"]
|
2012-08-04 01:18:05 +00:00
|
|
|
if not proxy:
|
2012-06-09 22:46:22 +00:00
|
|
|
return FMT % (
|
2012-06-10 01:17:18 +00:00
|
|
|
self.method,
|
|
|
|
self.path,
|
2012-06-09 22:46:22 +00:00
|
|
|
self.httpversion[0],
|
|
|
|
self.httpversion[1],
|
2012-08-04 01:18:05 +00:00
|
|
|
str(headers)
|
2012-06-09 22:46:22 +00:00
|
|
|
)
|
2011-08-03 10:38:23 +00:00
|
|
|
else:
|
2012-06-10 01:17:18 +00:00
|
|
|
return FMT_PROXY % (
|
2012-06-09 22:46:22 +00:00
|
|
|
self.method,
|
|
|
|
self.scheme,
|
|
|
|
self.host,
|
|
|
|
self.port,
|
|
|
|
self.path,
|
|
|
|
self.httpversion[0],
|
|
|
|
self.httpversion[1],
|
2012-08-04 01:18:05 +00:00
|
|
|
str(headers)
|
2012-06-09 22:46:22 +00:00
|
|
|
)
|
2011-08-03 10:38:23 +00:00
|
|
|
|
2012-08-04 01:18:05 +00:00
|
|
|
def _assemble(self, _proxy = False):
|
|
|
|
"""
|
|
|
|
Assembles the request for transmission to the server. We make some
|
|
|
|
modifications to make sure interception works properly.
|
|
|
|
|
|
|
|
Returns None if the request cannot be assembled.
|
|
|
|
"""
|
|
|
|
if self.content == CONTENT_MISSING:
|
|
|
|
return None
|
|
|
|
head = self._assemble_head(_proxy)
|
|
|
|
if self.content:
|
|
|
|
return head + self.content
|
|
|
|
else:
|
|
|
|
return head
|
|
|
|
|
2011-08-03 10:38:23 +00:00
|
|
|
def replace(self, pattern, repl, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Replaces a regular expression pattern with repl in both the headers
|
2012-03-15 22:24:18 +00:00
|
|
|
and the body of the request. Encoded content will be decoded before
|
|
|
|
replacement, and re-encoded afterwards.
|
|
|
|
|
|
|
|
Returns the number of replacements made.
|
2011-08-03 10:38:23 +00:00
|
|
|
"""
|
2012-03-15 22:24:18 +00:00
|
|
|
with decoded(self):
|
2012-05-26 01:10:31 +00:00
|
|
|
self.content, c = utils.safe_subn(pattern, repl, self.content, *args, **kwargs)
|
|
|
|
self.path, pc = utils.safe_subn(pattern, repl, self.path, *args, **kwargs)
|
2011-08-03 10:38:23 +00:00
|
|
|
c += pc
|
|
|
|
c += self.headers.replace(pattern, repl, *args, **kwargs)
|
|
|
|
return c
|
|
|
|
|
|
|
|
|
|
|
|
class Response(HTTPMsg):
|
2011-08-03 21:29:55 +00:00
|
|
|
"""
|
|
|
|
An HTTP response.
|
|
|
|
|
|
|
|
Exposes the following attributes:
|
|
|
|
|
|
|
|
request: Request object.
|
2012-05-16 03:42:58 +00:00
|
|
|
|
2011-08-03 21:29:55 +00:00
|
|
|
code: HTTP response code
|
2012-05-16 03:42:58 +00:00
|
|
|
|
2011-08-03 21:29:55 +00:00
|
|
|
msg: HTTP response message
|
2012-05-16 03:42:58 +00:00
|
|
|
|
2012-02-19 21:34:32 +00:00
|
|
|
headers: ODict object
|
2012-05-16 03:42:58 +00:00
|
|
|
|
|
|
|
content: Content of the request, None, or CONTENT_MISSING if there
|
|
|
|
is content associated, but not present. CONTENT_MISSING evaluates
|
|
|
|
to False to make checking for the presence of content natural.
|
|
|
|
|
2013-01-17 15:32:56 +00:00
|
|
|
timestamp_start: Seconds since the epoch signifying response transmission started
|
|
|
|
|
|
|
|
timestamp_end: Seconds since the epoch signifying response transmission ended
|
2011-08-03 21:29:55 +00:00
|
|
|
"""
|
2013-01-17 15:32:56 +00:00
|
|
|
def __init__(self, request, httpversion, code, msg, headers, content, cert, timestamp_start=None, timestamp_end=None):
|
2012-02-19 22:29:36 +00:00
|
|
|
assert isinstance(headers, ODictCaseless)
|
2011-08-03 10:38:23 +00:00
|
|
|
self.request = request
|
2012-06-10 01:27:43 +00:00
|
|
|
self.httpversion, self.code, self.msg = httpversion, code, msg
|
2011-08-03 10:38:23 +00:00
|
|
|
self.headers, self.content = headers, content
|
2012-06-28 02:29:15 +00:00
|
|
|
self.cert = cert
|
2013-01-17 15:32:56 +00:00
|
|
|
self.timestamp_start = timestamp_start or utils.timestamp()
|
2013-08-22 22:01:19 +00:00
|
|
|
self.timestamp_end = timestamp_end or utils.timestamp()
|
2011-08-03 10:38:23 +00:00
|
|
|
self.replay = False
|
|
|
|
|
|
|
|
def _refresh_cookie(self, c, delta):
|
|
|
|
"""
|
|
|
|
Takes a cookie string c and a time delta in seconds, and returns
|
|
|
|
a refreshed cookie string.
|
|
|
|
"""
|
|
|
|
c = Cookie.SimpleCookie(str(c))
|
|
|
|
for i in c.values():
|
|
|
|
if "expires" in i:
|
|
|
|
d = parsedate_tz(i["expires"])
|
|
|
|
if d:
|
|
|
|
d = mktime_tz(d) + delta
|
|
|
|
i["expires"] = formatdate(d)
|
|
|
|
else:
|
|
|
|
# This can happen when the expires tag is invalid.
|
|
|
|
# reddit.com sends a an expires tag like this: "Thu, 31 Dec
|
|
|
|
# 2037 23:59:59 GMT", which is valid RFC 1123, but not
|
|
|
|
# strictly correct according tot he cookie spec. Browsers
|
|
|
|
# appear to parse this tolerantly - maybe we should too.
|
|
|
|
# For now, we just ignore this.
|
|
|
|
del i["expires"]
|
|
|
|
return c.output(header="").strip()
|
|
|
|
|
|
|
|
def refresh(self, now=None):
|
|
|
|
"""
|
|
|
|
This fairly complex and heuristic function refreshes a server
|
|
|
|
response for replay.
|
|
|
|
|
|
|
|
- It adjusts date, expires and last-modified headers.
|
|
|
|
- It adjusts cookie expiration.
|
|
|
|
"""
|
|
|
|
if not now:
|
|
|
|
now = time.time()
|
2013-01-17 15:32:56 +00:00
|
|
|
delta = now - self.timestamp_start
|
2011-08-03 10:38:23 +00:00
|
|
|
refresh_headers = [
|
|
|
|
"date",
|
|
|
|
"expires",
|
|
|
|
"last-modified",
|
|
|
|
]
|
|
|
|
for i in refresh_headers:
|
|
|
|
if i in self.headers:
|
|
|
|
d = parsedate_tz(self.headers[i][0])
|
|
|
|
if d:
|
|
|
|
new = mktime_tz(d) + delta
|
|
|
|
self.headers[i] = [formatdate(new)]
|
|
|
|
c = []
|
|
|
|
for i in self.headers["set-cookie"]:
|
|
|
|
c.append(self._refresh_cookie(i, delta))
|
|
|
|
if c:
|
|
|
|
self.headers["set-cookie"] = c
|
|
|
|
|
2011-08-03 21:26:26 +00:00
|
|
|
def _set_replay(self):
|
2011-08-03 10:38:23 +00:00
|
|
|
self.replay = True
|
|
|
|
|
|
|
|
def is_replay(self):
|
2011-08-03 21:26:26 +00:00
|
|
|
"""
|
|
|
|
Is this response a replay?
|
|
|
|
"""
|
2011-08-03 10:38:23 +00:00
|
|
|
return self.replay
|
|
|
|
|
2011-08-03 10:48:40 +00:00
|
|
|
def _load_state(self, state):
|
2011-08-03 10:38:23 +00:00
|
|
|
self.code = state["code"]
|
|
|
|
self.msg = state["msg"]
|
2012-02-19 22:29:36 +00:00
|
|
|
self.headers = ODictCaseless._from_state(state["headers"])
|
2011-08-19 09:30:24 +00:00
|
|
|
self.content = state["content"]
|
2013-01-17 15:32:56 +00:00
|
|
|
self.timestamp_start = state["timestamp_start"]
|
|
|
|
self.timestamp_end = state["timestamp_end"]
|
2012-07-04 23:52:56 +00:00
|
|
|
self.cert = certutils.SSLCert.from_pem(state["cert"]) if state["cert"] else None
|
2011-08-03 10:38:23 +00:00
|
|
|
|
2011-08-03 10:48:40 +00:00
|
|
|
def _get_state(self):
|
2011-08-03 10:38:23 +00:00
|
|
|
return dict(
|
2012-06-10 01:27:43 +00:00
|
|
|
httpversion = self.httpversion,
|
2011-08-03 10:38:23 +00:00
|
|
|
code = self.code,
|
|
|
|
msg = self.msg,
|
2011-08-03 10:48:40 +00:00
|
|
|
headers = self.headers._get_state(),
|
2013-01-17 15:32:56 +00:00
|
|
|
timestamp_start = self.timestamp_start,
|
|
|
|
timestamp_end = self.timestamp_end,
|
2012-06-28 02:29:15 +00:00
|
|
|
cert = self.cert.to_pem() if self.cert else None,
|
2013-01-17 15:32:56 +00:00
|
|
|
content = self.content,
|
2011-08-03 10:38:23 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
@classmethod
|
2011-08-03 10:48:40 +00:00
|
|
|
def _from_state(klass, request, state):
|
2011-08-03 10:38:23 +00:00
|
|
|
return klass(
|
|
|
|
request,
|
2012-06-10 01:27:43 +00:00
|
|
|
state["httpversion"],
|
2011-08-03 10:38:23 +00:00
|
|
|
state["code"],
|
|
|
|
str(state["msg"]),
|
2012-02-19 22:29:36 +00:00
|
|
|
ODictCaseless._from_state(state["headers"]),
|
2011-08-19 09:30:24 +00:00
|
|
|
state["content"],
|
2012-06-28 02:29:15 +00:00
|
|
|
certutils.SSLCert.from_pem(state["cert"]) if state["cert"] else None,
|
2013-01-17 15:32:56 +00:00
|
|
|
state["timestamp_start"],
|
|
|
|
state["timestamp_end"],
|
2011-08-03 10:38:23 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
def copy(self):
|
|
|
|
c = copy.copy(self)
|
|
|
|
c.headers = self.headers.copy()
|
|
|
|
return c
|
|
|
|
|
2012-08-04 01:18:05 +00:00
|
|
|
def _assemble_head(self):
|
|
|
|
FMT = '%s\r\n%s\r\n'
|
|
|
|
headers = self.headers.copy()
|
|
|
|
utils.del_all(
|
|
|
|
headers,
|
|
|
|
['proxy-connection', 'transfer-encoding']
|
|
|
|
)
|
|
|
|
if self.content:
|
2013-05-21 13:57:14 +00:00
|
|
|
headers["Content-Length"] = [str(len(self.content))]
|
2013-12-12 03:42:29 +00:00
|
|
|
elif 'Transfer-Encoding' in self.headers:
|
|
|
|
headers["Content-Length"] = ["0"]
|
2012-08-04 01:18:05 +00:00
|
|
|
proto = "HTTP/%s.%s %s %s"%(self.httpversion[0], self.httpversion[1], self.code, str(self.msg))
|
|
|
|
data = (proto, str(headers))
|
|
|
|
return FMT%data
|
|
|
|
|
2011-08-03 11:02:33 +00:00
|
|
|
def _assemble(self):
|
2011-08-03 10:38:23 +00:00
|
|
|
"""
|
|
|
|
Assembles the response for transmission to the client. We make some
|
|
|
|
modifications to make sure interception works properly.
|
2012-05-16 03:42:58 +00:00
|
|
|
|
|
|
|
Returns None if the request cannot be assembled.
|
2011-08-03 10:38:23 +00:00
|
|
|
"""
|
2012-05-16 03:42:58 +00:00
|
|
|
if self.content == CONTENT_MISSING:
|
|
|
|
return None
|
2012-08-04 01:18:05 +00:00
|
|
|
head = self._assemble_head()
|
|
|
|
if self.content:
|
|
|
|
return head + self.content
|
2012-04-25 02:38:20 +00:00
|
|
|
else:
|
2012-08-04 01:18:05 +00:00
|
|
|
return head
|
2011-08-03 10:38:23 +00:00
|
|
|
|
|
|
|
def replace(self, pattern, repl, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Replaces a regular expression pattern with repl in both the headers
|
2012-03-15 22:24:18 +00:00
|
|
|
and the body of the response. Encoded content will be decoded
|
|
|
|
before replacement, and re-encoded afterwards.
|
|
|
|
|
|
|
|
Returns the number of replacements made.
|
2011-08-03 10:38:23 +00:00
|
|
|
"""
|
2012-03-15 22:24:18 +00:00
|
|
|
with decoded(self):
|
2012-05-26 01:10:31 +00:00
|
|
|
self.content, c = utils.safe_subn(pattern, repl, self.content, *args, **kwargs)
|
2011-08-03 10:38:23 +00:00
|
|
|
c += self.headers.replace(pattern, repl, *args, **kwargs)
|
|
|
|
return c
|
|
|
|
|
2013-01-28 15:37:25 +00:00
|
|
|
def get_header_size(self):
|
|
|
|
FMT = '%s\r\n%s\r\n'
|
|
|
|
proto = "HTTP/%s.%s %s %s"%(self.httpversion[0], self.httpversion[1], self.code, str(self.msg))
|
|
|
|
assembled_header = FMT % (proto, str(self.headers))
|
|
|
|
return len(assembled_header)
|
|
|
|
|
|
|
|
def get_cookies(self):
|
|
|
|
cookie_headers = self.headers.get("set-cookie")
|
|
|
|
if not cookie_headers:
|
|
|
|
return None
|
|
|
|
|
|
|
|
cookies = []
|
|
|
|
for header in cookie_headers:
|
|
|
|
pairs = [pair.partition("=") for pair in header.split(';')]
|
|
|
|
cookie_name = pairs[0][0] # the key of the first key/value pairs
|
|
|
|
cookie_value = pairs[0][2] # the value of the first key/value pairs
|
|
|
|
cookie_parameters = {key.strip().lower():value.strip() for key,sep,value in pairs[1:]}
|
|
|
|
cookies.append((cookie_name, (cookie_value, cookie_parameters)))
|
|
|
|
return dict(cookies)
|
2011-08-03 10:38:23 +00:00
|
|
|
|
2013-07-13 02:44:09 +00:00
|
|
|
|
2013-02-16 23:42:48 +00:00
|
|
|
class ClientDisconnect:
|
2011-08-03 21:29:55 +00:00
|
|
|
"""
|
2011-10-26 01:49:15 +00:00
|
|
|
A client disconnection event.
|
2011-08-03 21:29:55 +00:00
|
|
|
|
|
|
|
Exposes the following attributes:
|
|
|
|
|
|
|
|
client_conn: ClientConnect object.
|
|
|
|
"""
|
2011-08-03 10:38:23 +00:00
|
|
|
def __init__(self, client_conn):
|
|
|
|
self.client_conn = client_conn
|
|
|
|
|
|
|
|
|
2013-02-23 01:08:28 +00:00
|
|
|
class ClientConnect(StateObject):
|
2011-08-03 21:29:55 +00:00
|
|
|
"""
|
|
|
|
A single client connection. Each connection can result in multiple HTTP
|
|
|
|
Requests.
|
|
|
|
|
|
|
|
Exposes the following attributes:
|
2011-10-26 01:49:15 +00:00
|
|
|
|
2011-08-03 21:29:55 +00:00
|
|
|
address: (address, port) tuple, or None if the connection is replayed.
|
|
|
|
requestcount: Number of requests created by this client connection.
|
|
|
|
close: Is the client connection closed?
|
2012-06-30 12:15:03 +00:00
|
|
|
error: Error string or None.
|
2011-08-03 21:29:55 +00:00
|
|
|
"""
|
2011-08-03 10:38:23 +00:00
|
|
|
def __init__(self, address):
|
|
|
|
"""
|
|
|
|
address is an (address, port) tuple, or None if this connection has
|
|
|
|
been replayed from within mitmproxy.
|
|
|
|
"""
|
|
|
|
self.address = address
|
|
|
|
self.close = False
|
|
|
|
self.requestcount = 0
|
2012-06-30 12:15:03 +00:00
|
|
|
self.error = None
|
2011-08-03 10:38:23 +00:00
|
|
|
|
2013-02-28 11:28:42 +00:00
|
|
|
def __str__(self):
|
|
|
|
if self.address:
|
|
|
|
return "%s:%d"%(self.address[0],self.address[1])
|
|
|
|
|
2011-08-03 10:48:40 +00:00
|
|
|
def _load_state(self, state):
|
2012-04-03 21:47:57 +00:00
|
|
|
self.close = True
|
2012-06-30 12:16:30 +00:00
|
|
|
self.error = state["error"]
|
2012-04-03 10:37:24 +00:00
|
|
|
self.requestcount = state["requestcount"]
|
2011-08-03 10:38:23 +00:00
|
|
|
|
2011-08-03 10:48:40 +00:00
|
|
|
def _get_state(self):
|
2012-04-03 10:37:24 +00:00
|
|
|
return dict(
|
|
|
|
address = list(self.address),
|
|
|
|
requestcount = self.requestcount,
|
2012-06-30 12:16:30 +00:00
|
|
|
error = self.error,
|
2012-04-03 10:37:24 +00:00
|
|
|
)
|
2011-08-03 10:38:23 +00:00
|
|
|
|
|
|
|
@classmethod
|
2011-08-03 10:48:40 +00:00
|
|
|
def _from_state(klass, state):
|
2011-08-03 10:38:23 +00:00
|
|
|
if state:
|
2012-04-03 10:37:24 +00:00
|
|
|
k = klass(state["address"])
|
|
|
|
k._load_state(state)
|
|
|
|
return k
|
2011-08-03 10:38:23 +00:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def copy(self):
|
2013-02-23 01:08:28 +00:00
|
|
|
return copy.copy(self)
|
2011-08-03 10:38:23 +00:00
|
|
|
|
|
|
|
|
2013-02-23 01:08:28 +00:00
|
|
|
class Error(StateObject):
|
2011-08-03 21:29:55 +00:00
|
|
|
"""
|
2011-10-26 01:49:15 +00:00
|
|
|
An Error.
|
2011-08-03 21:29:55 +00:00
|
|
|
|
|
|
|
This is distinct from an HTTP error response (say, a code 500), which
|
|
|
|
is represented by a normal Response object. This class is responsible
|
|
|
|
for indicating errors that fall outside of normal HTTP communications,
|
|
|
|
like interrupted connections, timeouts, protocol errors.
|
|
|
|
|
|
|
|
Exposes the following attributes:
|
|
|
|
|
|
|
|
request: Request object
|
|
|
|
msg: Message describing the error
|
|
|
|
timestamp: Seconds since the epoch
|
|
|
|
"""
|
2011-08-03 10:38:23 +00:00
|
|
|
def __init__(self, request, msg, timestamp=None):
|
|
|
|
self.request, self.msg = request, msg
|
|
|
|
self.timestamp = timestamp or utils.timestamp()
|
|
|
|
|
2011-08-03 10:48:40 +00:00
|
|
|
def _load_state(self, state):
|
2011-08-03 10:38:23 +00:00
|
|
|
self.msg = state["msg"]
|
|
|
|
self.timestamp = state["timestamp"]
|
|
|
|
|
|
|
|
def copy(self):
|
2012-02-18 22:29:49 +00:00
|
|
|
c = copy.copy(self)
|
|
|
|
return c
|
2011-08-03 10:38:23 +00:00
|
|
|
|
2011-08-03 10:48:40 +00:00
|
|
|
def _get_state(self):
|
2011-08-03 10:38:23 +00:00
|
|
|
return dict(
|
|
|
|
msg = self.msg,
|
|
|
|
timestamp = self.timestamp,
|
|
|
|
)
|
|
|
|
|
|
|
|
@classmethod
|
2012-02-17 23:25:22 +00:00
|
|
|
def _from_state(klass, request, state):
|
2011-08-03 10:38:23 +00:00
|
|
|
return klass(
|
2012-02-17 23:25:22 +00:00
|
|
|
request,
|
2011-08-03 10:38:23 +00:00
|
|
|
state["msg"],
|
|
|
|
state["timestamp"],
|
|
|
|
)
|
|
|
|
|
|
|
|
def replace(self, pattern, repl, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Replaces a regular expression pattern with repl in both the headers
|
|
|
|
and the body of the request. Returns the number of replacements
|
|
|
|
made.
|
2012-03-15 22:24:18 +00:00
|
|
|
|
|
|
|
FIXME: Is replace useful on an Error object??
|
2011-08-03 10:38:23 +00:00
|
|
|
"""
|
2012-05-26 01:10:31 +00:00
|
|
|
self.msg, c = utils.safe_subn(pattern, repl, self.msg, *args, **kwargs)
|
2011-08-03 10:38:23 +00:00
|
|
|
return c
|
|
|
|
|
|
|
|
|
2011-03-04 00:08:43 +00:00
|
|
|
class ClientPlaybackState:
|
2011-03-06 03:54:49 +00:00
|
|
|
def __init__(self, flows, exit):
|
|
|
|
self.flows, self.exit = flows, exit
|
2011-03-04 00:08:43 +00:00
|
|
|
self.current = None
|
|
|
|
|
|
|
|
def count(self):
|
|
|
|
return len(self.flows)
|
|
|
|
|
2011-03-06 03:54:49 +00:00
|
|
|
def done(self):
|
|
|
|
if len(self.flows) == 0 and not self.current:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2011-03-04 00:08:43 +00:00
|
|
|
def clear(self, flow):
|
|
|
|
"""
|
|
|
|
A request has returned in some way - if this is the one we're
|
|
|
|
servicing, go to the next flow.
|
|
|
|
"""
|
|
|
|
if flow is self.current:
|
|
|
|
self.current = None
|
|
|
|
|
|
|
|
def tick(self, master, testing=False):
|
|
|
|
"""
|
|
|
|
testing: Disables actual replay for testing.
|
|
|
|
"""
|
|
|
|
if self.flows and not self.current:
|
2011-03-05 22:21:31 +00:00
|
|
|
n = self.flows.pop(0)
|
2013-05-05 01:18:52 +00:00
|
|
|
n.request.reply = controller.DummyReply()
|
2011-03-06 03:02:28 +00:00
|
|
|
n.request.client_conn = None
|
2011-03-05 22:21:31 +00:00
|
|
|
self.current = master.handle_request(n.request)
|
2011-03-06 01:48:16 +00:00
|
|
|
if not testing and not self.current.response:
|
2012-06-09 01:42:43 +00:00
|
|
|
master.replay_request(self.current) # pragma: no cover
|
2011-03-06 01:48:16 +00:00
|
|
|
elif self.current.response:
|
|
|
|
master.handle_response(self.current.response)
|
2011-03-04 00:08:43 +00:00
|
|
|
|
|
|
|
|
2011-02-20 19:47:19 +00:00
|
|
|
class ServerPlaybackState:
|
2012-03-05 09:05:11 +00:00
|
|
|
def __init__(self, headers, flows, exit, nopop):
|
2011-02-22 21:54:51 +00:00
|
|
|
"""
|
2011-08-03 21:29:55 +00:00
|
|
|
headers: Case-insensitive list of request headers that should be
|
2011-02-22 21:54:51 +00:00
|
|
|
included in request-response matching.
|
|
|
|
"""
|
2012-03-05 09:05:11 +00:00
|
|
|
self.headers, self.exit, self.nopop = headers, exit, nopop
|
2011-02-20 19:47:19 +00:00
|
|
|
self.fmap = {}
|
|
|
|
for i in flows:
|
2011-02-20 22:08:35 +00:00
|
|
|
if i.response:
|
|
|
|
l = self.fmap.setdefault(self._hash(i), [])
|
|
|
|
l.append(i)
|
2011-02-20 19:47:19 +00:00
|
|
|
|
2011-03-04 00:08:43 +00:00
|
|
|
def count(self):
|
2011-08-18 21:20:38 +00:00
|
|
|
return sum(len(i) for i in self.fmap.values())
|
2011-07-16 09:47:06 +00:00
|
|
|
|
2011-02-20 19:47:19 +00:00
|
|
|
def _hash(self, flow):
|
|
|
|
"""
|
2011-07-16 09:47:06 +00:00
|
|
|
Calculates a loose hash of the flow request.
|
2011-02-20 19:47:19 +00:00
|
|
|
"""
|
|
|
|
r = flow.request
|
|
|
|
key = [
|
|
|
|
str(r.host),
|
|
|
|
str(r.port),
|
|
|
|
str(r.scheme),
|
|
|
|
str(r.method),
|
|
|
|
str(r.path),
|
|
|
|
str(r.content),
|
|
|
|
]
|
2011-02-22 21:54:51 +00:00
|
|
|
if self.headers:
|
|
|
|
hdrs = []
|
|
|
|
for i in self.headers:
|
2011-07-14 03:59:27 +00:00
|
|
|
v = r.headers[i]
|
2011-02-22 21:54:51 +00:00
|
|
|
# Slightly subtle: we need to convert everything to strings
|
|
|
|
# to prevent a mismatch between unicode/non-unicode.
|
|
|
|
v = [str(x) for x in v]
|
|
|
|
hdrs.append((i, v))
|
|
|
|
key.append(repr(hdrs))
|
2011-02-20 19:47:19 +00:00
|
|
|
return hashlib.sha256(repr(key)).digest()
|
|
|
|
|
|
|
|
def next_flow(self, request):
|
|
|
|
"""
|
|
|
|
Returns the next flow object, or None if no matching flow was
|
|
|
|
found.
|
|
|
|
"""
|
|
|
|
l = self.fmap.get(self._hash(request))
|
|
|
|
if not l:
|
|
|
|
return None
|
2012-03-05 09:05:11 +00:00
|
|
|
|
2012-03-13 22:20:25 +00:00
|
|
|
if self.nopop:
|
2012-03-05 09:05:11 +00:00
|
|
|
return l[0]
|
2012-03-13 22:20:25 +00:00
|
|
|
else:
|
2012-03-05 09:05:11 +00:00
|
|
|
return l.pop(0)
|
|
|
|
|
2011-02-20 19:47:19 +00:00
|
|
|
|
|
|
|
|
2011-02-23 21:33:39 +00:00
|
|
|
class StickyCookieState:
|
2011-02-24 02:15:51 +00:00
|
|
|
def __init__(self, flt):
|
|
|
|
"""
|
2011-08-03 21:29:55 +00:00
|
|
|
flt: Compiled filter.
|
2011-02-24 02:15:51 +00:00
|
|
|
"""
|
2011-02-23 21:33:39 +00:00
|
|
|
self.jar = {}
|
2011-02-24 02:15:51 +00:00
|
|
|
self.flt = flt
|
2011-02-23 21:33:39 +00:00
|
|
|
|
2011-02-24 02:15:51 +00:00
|
|
|
def ckey(self, m, f):
|
|
|
|
"""
|
2011-07-16 09:47:06 +00:00
|
|
|
Returns a (domain, port, path) tuple.
|
2011-02-24 02:15:51 +00:00
|
|
|
"""
|
|
|
|
return (
|
|
|
|
m["domain"] or f.request.host,
|
|
|
|
f.request.port,
|
|
|
|
m["path"] or "/"
|
|
|
|
)
|
2011-02-23 21:33:39 +00:00
|
|
|
|
2011-08-26 05:37:12 +00:00
|
|
|
def domain_match(self, a, b):
|
|
|
|
if cookielib.domain_match(a, b):
|
|
|
|
return True
|
|
|
|
elif cookielib.domain_match(a, b.strip(".")):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2011-02-24 02:15:51 +00:00
|
|
|
def handle_response(self, f):
|
2011-07-14 03:59:27 +00:00
|
|
|
for i in f.response.headers["set-cookie"]:
|
2011-03-10 22:56:10 +00:00
|
|
|
# FIXME: We now know that Cookie.py screws up some cookies with
|
|
|
|
# valid RFC 822/1123 datetime specifications for expiry. Sigh.
|
2011-07-24 04:08:27 +00:00
|
|
|
c = Cookie.SimpleCookie(str(i))
|
2011-02-23 21:33:39 +00:00
|
|
|
m = c.values()[0]
|
2011-02-24 02:15:51 +00:00
|
|
|
k = self.ckey(m, f)
|
2011-08-26 05:37:12 +00:00
|
|
|
if self.domain_match(f.request.host, k[0]):
|
2011-02-24 02:15:51 +00:00
|
|
|
self.jar[self.ckey(m, f)] = m
|
|
|
|
|
|
|
|
def handle_request(self, f):
|
2011-08-26 05:37:12 +00:00
|
|
|
l = []
|
2011-02-24 02:15:51 +00:00
|
|
|
if f.match(self.flt):
|
|
|
|
for i in self.jar.keys():
|
|
|
|
match = [
|
2011-08-26 05:37:12 +00:00
|
|
|
self.domain_match(f.request.host, i[0]),
|
2011-02-24 02:15:51 +00:00
|
|
|
f.request.port == i[1],
|
|
|
|
f.request.path.startswith(i[2])
|
|
|
|
]
|
|
|
|
if all(match):
|
|
|
|
l.append(self.jar[i].output(header="").strip())
|
2011-08-26 05:37:12 +00:00
|
|
|
if l:
|
|
|
|
f.request.stickycookie = True
|
|
|
|
f.request.headers["cookie"] = l
|
2011-02-23 21:33:39 +00:00
|
|
|
|
|
|
|
|
2011-03-20 04:31:54 +00:00
|
|
|
class StickyAuthState:
|
|
|
|
def __init__(self, flt):
|
|
|
|
"""
|
2011-08-03 21:29:55 +00:00
|
|
|
flt: Compiled filter.
|
2011-03-20 04:31:54 +00:00
|
|
|
"""
|
|
|
|
self.flt = flt
|
|
|
|
self.hosts = {}
|
|
|
|
|
|
|
|
def handle_request(self, f):
|
|
|
|
if "authorization" in f.request.headers:
|
|
|
|
self.hosts[f.request.host] = f.request.headers["authorization"]
|
|
|
|
elif f.match(self.flt):
|
|
|
|
if f.request.host in self.hosts:
|
|
|
|
f.request.headers["authorization"] = self.hosts[f.request.host]
|
|
|
|
|
2011-02-23 21:33:39 +00:00
|
|
|
|
2011-01-25 02:02:48 +00:00
|
|
|
class Flow:
|
2011-08-03 21:56:44 +00:00
|
|
|
"""
|
|
|
|
A Flow is a collection of objects representing a single HTTP
|
|
|
|
transaction. The main attributes are:
|
2011-10-26 01:49:15 +00:00
|
|
|
|
2011-08-03 21:56:44 +00:00
|
|
|
request: Request object
|
|
|
|
response: Response object
|
|
|
|
error: Error object
|
|
|
|
|
|
|
|
Note that it's possible for a Flow to have both a response and an error
|
|
|
|
object. This might happen, for instance, when a response was received
|
|
|
|
from the server, but there was an error sending it back to the client.
|
|
|
|
|
|
|
|
The following additional attributes are exposed:
|
|
|
|
|
|
|
|
intercepting: Is this flow currently being intercepted?
|
|
|
|
"""
|
2011-02-19 04:00:24 +00:00
|
|
|
def __init__(self, request):
|
|
|
|
self.request = request
|
|
|
|
self.response, self.error = None, None
|
2011-01-25 02:02:48 +00:00
|
|
|
self.intercepting = False
|
|
|
|
self._backup = None
|
|
|
|
|
2012-02-18 10:56:40 +00:00
|
|
|
def copy(self):
|
|
|
|
rc = self.request.copy()
|
|
|
|
f = Flow(rc)
|
|
|
|
if self.response:
|
|
|
|
f.response = self.response.copy()
|
2012-02-18 11:17:47 +00:00
|
|
|
f.response.request = rc
|
2012-02-18 10:56:40 +00:00
|
|
|
if self.error:
|
|
|
|
f.error = self.error.copy()
|
2012-02-18 11:17:47 +00:00
|
|
|
f.error.request = rc
|
2012-02-18 10:56:40 +00:00
|
|
|
return f
|
|
|
|
|
2011-03-06 03:11:45 +00:00
|
|
|
@classmethod
|
2011-08-03 10:48:40 +00:00
|
|
|
def _from_state(klass, state):
|
2011-03-06 03:11:45 +00:00
|
|
|
f = klass(None)
|
2011-08-03 10:48:40 +00:00
|
|
|
f._load_state(state)
|
2011-03-06 03:11:45 +00:00
|
|
|
return f
|
|
|
|
|
2012-04-03 21:47:57 +00:00
|
|
|
def _get_state(self):
|
2011-02-05 21:28:43 +00:00
|
|
|
d = dict(
|
2011-08-03 10:48:40 +00:00
|
|
|
request = self.request._get_state() if self.request else None,
|
|
|
|
response = self.response._get_state() if self.response else None,
|
|
|
|
error = self.error._get_state() if self.error else None,
|
2011-03-08 23:35:38 +00:00
|
|
|
version = version.IVERSION
|
2011-01-26 01:52:03 +00:00
|
|
|
)
|
2011-02-05 21:28:43 +00:00
|
|
|
return d
|
2011-01-26 01:52:03 +00:00
|
|
|
|
2011-08-03 10:48:40 +00:00
|
|
|
def _load_state(self, state):
|
2011-02-19 20:36:13 +00:00
|
|
|
if self.request:
|
2011-08-03 10:48:40 +00:00
|
|
|
self.request._load_state(state["request"])
|
2011-02-19 20:36:13 +00:00
|
|
|
else:
|
2011-08-03 10:48:40 +00:00
|
|
|
self.request = Request._from_state(state["request"])
|
2011-02-19 20:36:13 +00:00
|
|
|
|
2011-01-26 01:52:03 +00:00
|
|
|
if state["response"]:
|
2011-02-19 20:36:13 +00:00
|
|
|
if self.response:
|
2011-08-03 10:48:40 +00:00
|
|
|
self.response._load_state(state["response"])
|
2011-02-19 20:36:13 +00:00
|
|
|
else:
|
2011-08-03 10:48:40 +00:00
|
|
|
self.response = Response._from_state(self.request, state["response"])
|
2011-02-19 20:36:13 +00:00
|
|
|
else:
|
|
|
|
self.response = None
|
|
|
|
|
2011-01-26 01:52:03 +00:00
|
|
|
if state["error"]:
|
2011-02-19 20:36:13 +00:00
|
|
|
if self.error:
|
2011-08-03 10:48:40 +00:00
|
|
|
self.error._load_state(state["error"])
|
2011-02-19 20:36:13 +00:00
|
|
|
else:
|
2012-02-17 23:25:22 +00:00
|
|
|
self.error = Error._from_state(self.request, state["error"])
|
2011-02-19 20:36:13 +00:00
|
|
|
else:
|
|
|
|
self.error = None
|
2011-01-31 00:26:56 +00:00
|
|
|
|
2011-02-01 22:44:28 +00:00
|
|
|
def modified(self):
|
2011-08-03 21:56:44 +00:00
|
|
|
"""
|
|
|
|
Has this Flow been modified?
|
|
|
|
"""
|
2011-02-01 22:44:28 +00:00
|
|
|
# FIXME: Save a serialization in backup, compare current with
|
|
|
|
# backup to detect if flow has _really_ been modified.
|
|
|
|
if self._backup:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2012-04-03 21:47:57 +00:00
|
|
|
def backup(self, force=False):
|
2011-08-03 21:56:44 +00:00
|
|
|
"""
|
|
|
|
Save a backup of this Flow, which can be reverted to using a
|
|
|
|
call to .revert().
|
|
|
|
"""
|
2012-04-03 21:47:57 +00:00
|
|
|
if not self._backup:
|
|
|
|
self._backup = self._get_state()
|
2011-01-25 02:02:48 +00:00
|
|
|
|
|
|
|
def revert(self):
|
2011-08-03 21:56:44 +00:00
|
|
|
"""
|
|
|
|
Revert to the last backed up state.
|
|
|
|
"""
|
2011-01-25 02:02:48 +00:00
|
|
|
if self._backup:
|
2011-08-03 10:48:40 +00:00
|
|
|
self._load_state(self._backup)
|
2011-02-01 22:44:28 +00:00
|
|
|
self._backup = None
|
2011-01-25 02:02:48 +00:00
|
|
|
|
2011-08-03 21:56:44 +00:00
|
|
|
def match(self, f):
|
|
|
|
"""
|
|
|
|
Match this flow against a compiled filter expression. Returns True
|
|
|
|
if matched, False if not.
|
2012-09-13 21:41:01 +00:00
|
|
|
|
|
|
|
If f is a string, it will be compiled as a filter expression. If
|
|
|
|
the expression is invalid, ValueError is raised.
|
2011-08-03 21:56:44 +00:00
|
|
|
"""
|
2012-09-13 21:41:01 +00:00
|
|
|
if isinstance(f, basestring):
|
|
|
|
f = filt.parse(f)
|
|
|
|
if not f:
|
|
|
|
raise ValueError("Invalid filter expression.")
|
2011-08-03 21:56:44 +00:00
|
|
|
if f:
|
2012-02-23 04:03:58 +00:00
|
|
|
return f(self)
|
|
|
|
return True
|
2011-01-25 02:02:48 +00:00
|
|
|
|
2011-03-13 03:50:11 +00:00
|
|
|
def kill(self, master):
|
2011-08-03 21:56:44 +00:00
|
|
|
"""
|
|
|
|
Kill this request.
|
|
|
|
"""
|
2011-08-03 10:38:23 +00:00
|
|
|
self.error = Error(self.request, "Connection killed")
|
2013-02-16 23:42:48 +00:00
|
|
|
self.error.reply = controller.DummyReply()
|
|
|
|
if self.request and not self.request.reply.acked:
|
2013-02-23 01:08:28 +00:00
|
|
|
self.request.reply(proxy.KILL)
|
2013-02-16 23:42:48 +00:00
|
|
|
elif self.response and not self.response.reply.acked:
|
2013-02-23 01:08:28 +00:00
|
|
|
self.response.reply(proxy.KILL)
|
2011-03-13 03:50:11 +00:00
|
|
|
master.handle_error(self.error)
|
2011-03-15 04:53:29 +00:00
|
|
|
self.intercepting = False
|
2011-01-25 02:02:48 +00:00
|
|
|
|
|
|
|
def intercept(self):
|
2011-08-03 21:56:44 +00:00
|
|
|
"""
|
|
|
|
Intercept this Flow. Processing will stop until accept_intercept is
|
|
|
|
called.
|
|
|
|
"""
|
2011-01-25 02:02:48 +00:00
|
|
|
self.intercepting = True
|
|
|
|
|
|
|
|
def accept_intercept(self):
|
2011-08-03 21:56:44 +00:00
|
|
|
"""
|
|
|
|
Continue with the flow - called after an intercept().
|
|
|
|
"""
|
2011-01-25 02:02:48 +00:00
|
|
|
if self.request:
|
2013-02-16 23:42:48 +00:00
|
|
|
if not self.request.reply.acked:
|
|
|
|
self.request.reply()
|
|
|
|
elif self.response and not self.response.reply.acked:
|
|
|
|
self.response.reply()
|
2011-01-25 02:02:48 +00:00
|
|
|
self.intercepting = False
|
|
|
|
|
2011-07-27 05:47:08 +00:00
|
|
|
def replace(self, pattern, repl, *args, **kwargs):
|
2011-07-22 05:48:42 +00:00
|
|
|
"""
|
|
|
|
Replaces a regular expression pattern with repl in all parts of the
|
2012-03-15 22:24:18 +00:00
|
|
|
flow. Encoded content will be decoded before replacement, and
|
|
|
|
re-encoded afterwards.
|
|
|
|
|
|
|
|
Returns the number of replacements made.
|
2011-07-22 05:48:42 +00:00
|
|
|
"""
|
2011-07-27 05:47:08 +00:00
|
|
|
c = self.request.replace(pattern, repl, *args, **kwargs)
|
2011-07-22 05:48:42 +00:00
|
|
|
if self.response:
|
2011-07-27 05:47:08 +00:00
|
|
|
c += self.response.replace(pattern, repl, *args, **kwargs)
|
2011-07-22 05:48:42 +00:00
|
|
|
if self.error:
|
2011-07-27 05:47:08 +00:00
|
|
|
c += self.error.replace(pattern, repl, *args, **kwargs)
|
2011-07-22 05:48:42 +00:00
|
|
|
return c
|
|
|
|
|
2011-01-25 02:02:48 +00:00
|
|
|
|
2011-08-02 04:14:33 +00:00
|
|
|
class State(object):
|
2011-01-25 02:02:48 +00:00
|
|
|
def __init__(self):
|
2011-07-31 23:17:01 +00:00
|
|
|
self._flow_map = {}
|
|
|
|
self._flow_list = []
|
|
|
|
self.view = []
|
2011-02-19 04:00:24 +00:00
|
|
|
|
2011-01-25 02:02:48 +00:00
|
|
|
# These are compiled filt expressions:
|
2011-07-31 23:17:01 +00:00
|
|
|
self._limit = None
|
2011-01-25 02:02:48 +00:00
|
|
|
self.intercept = None
|
2011-07-31 23:17:01 +00:00
|
|
|
self._limit_txt = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def limit_txt(self):
|
|
|
|
return self._limit_txt
|
2011-01-25 02:02:48 +00:00
|
|
|
|
2011-03-06 03:11:45 +00:00
|
|
|
def flow_count(self):
|
2011-07-31 23:17:01 +00:00
|
|
|
return len(self._flow_map)
|
2011-03-06 03:11:45 +00:00
|
|
|
|
2012-02-18 11:17:47 +00:00
|
|
|
def index(self, f):
|
|
|
|
return self._flow_list.index(f)
|
|
|
|
|
2011-03-06 03:11:45 +00:00
|
|
|
def active_flow_count(self):
|
|
|
|
c = 0
|
2011-07-31 23:17:01 +00:00
|
|
|
for i in self._flow_list:
|
2011-03-06 03:11:45 +00:00
|
|
|
if not i.response and not i.error:
|
|
|
|
c += 1
|
|
|
|
return c
|
|
|
|
|
2011-01-25 02:02:48 +00:00
|
|
|
def add_request(self, req):
|
|
|
|
"""
|
|
|
|
Add a request to the state. Returns the matching flow.
|
|
|
|
"""
|
2011-02-19 04:00:24 +00:00
|
|
|
f = Flow(req)
|
2011-07-31 23:17:01 +00:00
|
|
|
self._flow_list.append(f)
|
|
|
|
self._flow_map[req] = f
|
2012-02-18 10:56:40 +00:00
|
|
|
assert len(self._flow_list) == len(self._flow_map)
|
2011-07-31 23:17:01 +00:00
|
|
|
if f.match(self._limit):
|
|
|
|
self.view.append(f)
|
2011-01-25 02:02:48 +00:00
|
|
|
return f
|
|
|
|
|
|
|
|
def add_response(self, resp):
|
|
|
|
"""
|
|
|
|
Add a response to the state. Returns the matching flow.
|
|
|
|
"""
|
2011-07-31 23:17:01 +00:00
|
|
|
f = self._flow_map.get(resp.request)
|
2011-01-25 02:02:48 +00:00
|
|
|
if not f:
|
|
|
|
return False
|
|
|
|
f.response = resp
|
2011-07-31 23:17:01 +00:00
|
|
|
if f.match(self._limit) and not f in self.view:
|
|
|
|
self.view.append(f)
|
2011-01-25 02:02:48 +00:00
|
|
|
return f
|
|
|
|
|
|
|
|
def add_error(self, err):
|
|
|
|
"""
|
|
|
|
Add an error response to the state. Returns the matching flow, or
|
|
|
|
None if there isn't one.
|
|
|
|
"""
|
2012-02-24 23:19:54 +00:00
|
|
|
f = self._flow_map.get(err.request)
|
|
|
|
if not f:
|
2011-01-25 02:02:48 +00:00
|
|
|
return None
|
|
|
|
f.error = err
|
2011-07-31 23:17:01 +00:00
|
|
|
if f.match(self._limit) and not f in self.view:
|
|
|
|
self.view.append(f)
|
2011-01-25 02:02:48 +00:00
|
|
|
return f
|
|
|
|
|
2011-02-16 02:10:00 +00:00
|
|
|
def load_flows(self, flows):
|
2011-07-31 23:17:01 +00:00
|
|
|
self._flow_list.extend(flows)
|
2011-02-16 02:10:00 +00:00
|
|
|
for i in flows:
|
2011-07-31 23:17:01 +00:00
|
|
|
self._flow_map[i.request] = i
|
|
|
|
self.recalculate_view()
|
2011-01-26 03:50:17 +00:00
|
|
|
|
2011-03-12 22:24:49 +00:00
|
|
|
def set_limit(self, txt):
|
|
|
|
if txt:
|
|
|
|
f = filt.parse(txt)
|
|
|
|
if not f:
|
|
|
|
return "Invalid filter expression."
|
2011-07-31 23:17:01 +00:00
|
|
|
self._limit = f
|
|
|
|
self._limit_txt = txt
|
2011-03-12 22:24:49 +00:00
|
|
|
else:
|
2011-07-31 23:17:01 +00:00
|
|
|
self._limit = None
|
|
|
|
self._limit_txt = None
|
|
|
|
self.recalculate_view()
|
2011-03-12 22:24:49 +00:00
|
|
|
|
|
|
|
def set_intercept(self, txt):
|
|
|
|
if txt:
|
|
|
|
f = filt.parse(txt)
|
|
|
|
if not f:
|
|
|
|
return "Invalid filter expression."
|
|
|
|
self.intercept = f
|
|
|
|
self.intercept_txt = txt
|
|
|
|
else:
|
|
|
|
self.intercept = None
|
|
|
|
self.intercept_txt = None
|
2011-01-25 02:02:48 +00:00
|
|
|
|
2011-07-31 23:17:01 +00:00
|
|
|
def recalculate_view(self):
|
|
|
|
if self._limit:
|
|
|
|
self.view = [i for i in self._flow_list if i.match(self._limit)]
|
2011-01-26 03:50:17 +00:00
|
|
|
else:
|
2011-07-31 23:17:01 +00:00
|
|
|
self.view = self._flow_list[:]
|
2011-01-26 03:50:17 +00:00
|
|
|
|
2011-01-25 02:02:48 +00:00
|
|
|
def delete_flow(self, f):
|
2011-07-31 23:17:01 +00:00
|
|
|
if f.request in self._flow_map:
|
|
|
|
del self._flow_map[f.request]
|
|
|
|
self._flow_list.remove(f)
|
2013-03-17 19:36:56 +00:00
|
|
|
if f in self.view:
|
2011-07-31 23:17:01 +00:00
|
|
|
self.view.remove(f)
|
2011-03-15 04:53:29 +00:00
|
|
|
return True
|
2011-01-25 02:02:48 +00:00
|
|
|
|
|
|
|
def clear(self):
|
2011-07-31 23:17:01 +00:00
|
|
|
for i in self._flow_list[:]:
|
2011-01-25 02:02:48 +00:00
|
|
|
self.delete_flow(i)
|
|
|
|
|
|
|
|
def accept_all(self):
|
2011-07-31 23:17:01 +00:00
|
|
|
for i in self._flow_list[:]:
|
2011-01-25 02:02:48 +00:00
|
|
|
i.accept_intercept()
|
|
|
|
|
|
|
|
def revert(self, f):
|
|
|
|
f.revert()
|
|
|
|
|
2011-07-31 23:17:01 +00:00
|
|
|
def killall(self, master):
|
|
|
|
for i in self._flow_list:
|
|
|
|
i.kill(master)
|
|
|
|
|
2011-02-16 01:33:04 +00:00
|
|
|
|
2011-02-16 03:03:22 +00:00
|
|
|
class FlowMaster(controller.Master):
|
|
|
|
def __init__(self, server, state):
|
|
|
|
controller.Master.__init__(self, server)
|
|
|
|
self.state = state
|
2011-03-05 00:03:26 +00:00
|
|
|
self.server_playback = None
|
|
|
|
self.client_playback = None
|
2011-02-20 22:40:49 +00:00
|
|
|
self.kill_nonreplay = False
|
2013-06-13 14:04:04 +00:00
|
|
|
self.scripts = []
|
2012-02-18 22:29:49 +00:00
|
|
|
self.pause_scripts = False
|
2011-03-13 02:55:47 +00:00
|
|
|
|
2011-02-24 02:15:51 +00:00
|
|
|
self.stickycookie_state = False
|
2011-03-13 02:55:47 +00:00
|
|
|
self.stickycookie_txt = None
|
2011-03-10 22:56:10 +00:00
|
|
|
|
2011-03-20 04:31:54 +00:00
|
|
|
self.stickyauth_state = False
|
|
|
|
self.stickyauth_txt = None
|
|
|
|
|
2011-03-09 00:15:31 +00:00
|
|
|
self.anticache = False
|
2011-07-15 03:21:04 +00:00
|
|
|
self.anticomp = False
|
2011-03-10 22:56:10 +00:00
|
|
|
self.refresh_server_playback = False
|
2012-03-16 22:31:05 +00:00
|
|
|
self.replacehooks = ReplaceHooks()
|
2012-08-18 11:39:52 +00:00
|
|
|
self.setheaders = SetHeaders()
|
2011-02-20 20:54:39 +00:00
|
|
|
|
2012-07-08 22:18:37 +00:00
|
|
|
self.stream = None
|
2013-03-24 20:20:26 +00:00
|
|
|
app.mapp.config["PMASTER"] = self
|
2012-07-08 22:18:37 +00:00
|
|
|
|
2013-08-18 18:03:53 +00:00
|
|
|
def start_app(self, host, port, external):
|
|
|
|
if not external:
|
|
|
|
self.server.apps.add(
|
|
|
|
app.mapp,
|
|
|
|
host,
|
|
|
|
port
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
print host
|
|
|
|
threading.Thread(target=app.mapp.run,kwargs={
|
|
|
|
"use_reloader": False,
|
|
|
|
"host": host,
|
|
|
|
"port": port}).start()
|
2013-07-23 22:32:56 +00:00
|
|
|
|
2011-08-03 01:33:18 +00:00
|
|
|
def add_event(self, e, level="info"):
|
|
|
|
"""
|
|
|
|
level: info, error
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2013-06-13 14:04:04 +00:00
|
|
|
def get_script(self, script_argv):
|
2011-08-03 04:36:20 +00:00
|
|
|
"""
|
2011-08-03 05:35:18 +00:00
|
|
|
Returns an (error, script) tuple.
|
2011-08-03 04:36:20 +00:00
|
|
|
"""
|
2013-06-13 14:04:04 +00:00
|
|
|
s = script.Script(script_argv, ScriptContext(self))
|
2011-08-03 04:36:20 +00:00
|
|
|
try:
|
|
|
|
s.load()
|
|
|
|
except script.ScriptError, v:
|
2011-08-03 05:35:18 +00:00
|
|
|
return (v.args[0], None)
|
|
|
|
return (None, s)
|
|
|
|
|
2013-06-13 14:04:04 +00:00
|
|
|
def unload_script(self,script):
|
|
|
|
script.unload()
|
|
|
|
self.scripts.remove(script)
|
|
|
|
|
|
|
|
def load_script(self, script_argv):
|
2011-08-03 05:35:18 +00:00
|
|
|
"""
|
|
|
|
Loads a script. Returns an error description if something went
|
2013-06-13 14:04:04 +00:00
|
|
|
wrong.
|
2011-08-03 05:35:18 +00:00
|
|
|
"""
|
2013-06-13 14:04:04 +00:00
|
|
|
r = self.get_script(script_argv)
|
|
|
|
if r[0]:
|
|
|
|
return r[0]
|
2011-08-03 05:35:18 +00:00
|
|
|
else:
|
2013-06-13 14:04:04 +00:00
|
|
|
self.scripts.append(r[1])
|
|
|
|
|
|
|
|
def run_single_script_hook(self, script, name, *args, **kwargs):
|
|
|
|
if script and not self.pause_scripts:
|
|
|
|
ret = script.run(name, *args, **kwargs)
|
|
|
|
if not ret[0] and ret[1]:
|
|
|
|
e = "Script error:\n" + ret[1][1]
|
|
|
|
self.add_event(e, "error")
|
2011-02-20 22:40:49 +00:00
|
|
|
|
2013-06-13 14:04:04 +00:00
|
|
|
def run_script_hook(self, name, *args, **kwargs):
|
|
|
|
for script in self.scripts:
|
|
|
|
self.run_single_script_hook(script, name, *args, **kwargs)
|
|
|
|
|
2011-02-24 02:15:51 +00:00
|
|
|
def set_stickycookie(self, txt):
|
|
|
|
if txt:
|
|
|
|
flt = filt.parse(txt)
|
|
|
|
if not flt:
|
|
|
|
return "Invalid filter expression."
|
|
|
|
self.stickycookie_state = StickyCookieState(flt)
|
2011-03-12 22:43:55 +00:00
|
|
|
self.stickycookie_txt = txt
|
2011-02-24 02:15:51 +00:00
|
|
|
else:
|
|
|
|
self.stickycookie_state = None
|
2011-03-12 22:43:55 +00:00
|
|
|
self.stickycookie_txt = None
|
2011-02-24 02:15:51 +00:00
|
|
|
|
2011-03-20 04:31:54 +00:00
|
|
|
def set_stickyauth(self, txt):
|
|
|
|
if txt:
|
|
|
|
flt = filt.parse(txt)
|
|
|
|
if not flt:
|
|
|
|
return "Invalid filter expression."
|
|
|
|
self.stickyauth_state = StickyAuthState(flt)
|
|
|
|
self.stickyauth_txt = txt
|
|
|
|
else:
|
|
|
|
self.stickyauth_state = None
|
|
|
|
self.stickyauth_txt = None
|
|
|
|
|
2011-03-06 03:54:49 +00:00
|
|
|
def start_client_playback(self, flows, exit):
|
2011-03-05 00:03:26 +00:00
|
|
|
"""
|
2011-08-03 21:29:55 +00:00
|
|
|
flows: List of flows.
|
2011-03-05 00:03:26 +00:00
|
|
|
"""
|
2011-03-06 03:54:49 +00:00
|
|
|
self.client_playback = ClientPlaybackState(flows, exit)
|
2011-03-05 00:03:26 +00:00
|
|
|
|
2011-03-17 20:38:51 +00:00
|
|
|
def stop_client_playback(self):
|
|
|
|
self.client_playback = None
|
|
|
|
|
2012-03-05 09:05:11 +00:00
|
|
|
def start_server_playback(self, flows, kill, headers, exit, nopop):
|
2011-02-20 22:40:49 +00:00
|
|
|
"""
|
2011-08-03 21:29:55 +00:00
|
|
|
flows: List of flows.
|
2011-02-20 22:40:49 +00:00
|
|
|
kill: Boolean, should we kill requests not part of the replay?
|
|
|
|
"""
|
2012-03-05 09:05:11 +00:00
|
|
|
self.server_playback = ServerPlaybackState(headers, flows, exit, nopop)
|
2011-02-20 22:40:49 +00:00
|
|
|
self.kill_nonreplay = kill
|
|
|
|
|
2011-03-17 20:43:47 +00:00
|
|
|
def stop_server_playback(self):
|
2013-01-05 08:41:16 +00:00
|
|
|
if self.server_playback.exit:
|
|
|
|
self.shutdown()
|
2011-03-17 20:43:47 +00:00
|
|
|
self.server_playback = None
|
|
|
|
|
2011-03-05 00:03:26 +00:00
|
|
|
def do_server_playback(self, flow):
|
2011-02-20 20:54:39 +00:00
|
|
|
"""
|
|
|
|
This method should be called by child classes in the handle_request
|
|
|
|
handler. Returns True if playback has taken place, None if not.
|
|
|
|
"""
|
2011-03-05 00:03:26 +00:00
|
|
|
if self.server_playback:
|
|
|
|
rflow = self.server_playback.next_flow(flow)
|
2011-02-20 20:54:39 +00:00
|
|
|
if not rflow:
|
|
|
|
return None
|
2011-08-03 10:48:40 +00:00
|
|
|
response = Response._from_state(flow.request, rflow.response._get_state())
|
2011-08-03 21:26:26 +00:00
|
|
|
response._set_replay()
|
2011-02-20 20:54:39 +00:00
|
|
|
flow.response = response
|
2011-03-10 22:56:10 +00:00
|
|
|
if self.refresh_server_playback:
|
|
|
|
response.refresh()
|
2013-02-16 23:42:48 +00:00
|
|
|
flow.request.reply(response)
|
2012-08-17 12:23:41 +00:00
|
|
|
if self.server_playback.count() == 0:
|
|
|
|
self.stop_server_playback()
|
2011-02-20 20:54:39 +00:00
|
|
|
return True
|
|
|
|
return None
|
2011-02-16 03:03:22 +00:00
|
|
|
|
2011-03-05 00:03:26 +00:00
|
|
|
def tick(self, q):
|
|
|
|
if self.client_playback:
|
2011-03-06 03:54:49 +00:00
|
|
|
e = [
|
|
|
|
self.client_playback.done(),
|
|
|
|
self.client_playback.exit,
|
|
|
|
self.state.active_flow_count() == 0
|
|
|
|
]
|
|
|
|
if all(e):
|
|
|
|
self.shutdown()
|
2011-03-05 22:21:31 +00:00
|
|
|
self.client_playback.tick(self)
|
2011-03-06 04:08:56 +00:00
|
|
|
|
2011-06-27 02:01:08 +00:00
|
|
|
return controller.Master.tick(self, q)
|
2011-03-05 00:03:26 +00:00
|
|
|
|
2012-02-18 10:56:40 +00:00
|
|
|
def duplicate_flow(self, f):
|
|
|
|
return self.load_flow(f.copy())
|
|
|
|
|
|
|
|
def load_flow(self, f):
|
2012-02-18 11:17:47 +00:00
|
|
|
"""
|
|
|
|
Loads a flow, and returns a new flow object.
|
|
|
|
"""
|
2012-02-18 10:56:40 +00:00
|
|
|
if f.request:
|
2013-02-16 23:42:48 +00:00
|
|
|
f.request.reply = controller.DummyReply()
|
2012-02-18 10:56:40 +00:00
|
|
|
fr = self.handle_request(f.request)
|
|
|
|
if f.response:
|
2013-02-16 23:42:48 +00:00
|
|
|
f.response.reply = controller.DummyReply()
|
2012-02-18 10:56:40 +00:00
|
|
|
self.handle_response(f.response)
|
|
|
|
if f.error:
|
2013-02-16 23:42:48 +00:00
|
|
|
f.error.reply = controller.DummyReply()
|
2012-02-18 10:56:40 +00:00
|
|
|
self.handle_error(f.error)
|
|
|
|
return fr
|
|
|
|
|
2011-05-14 23:22:35 +00:00
|
|
|
def load_flows(self, fr):
|
|
|
|
"""
|
|
|
|
Load flows from a FlowReader object.
|
|
|
|
"""
|
|
|
|
for i in fr.stream():
|
2012-02-18 10:56:40 +00:00
|
|
|
self.load_flow(i)
|
2011-05-14 23:22:35 +00:00
|
|
|
|
2011-03-15 02:11:03 +00:00
|
|
|
def process_new_request(self, f):
|
|
|
|
if self.stickycookie_state:
|
|
|
|
self.stickycookie_state.handle_request(f)
|
2011-03-20 04:31:54 +00:00
|
|
|
if self.stickyauth_state:
|
|
|
|
self.stickyauth_state.handle_request(f)
|
|
|
|
|
2011-03-15 02:11:03 +00:00
|
|
|
if self.anticache:
|
|
|
|
f.request.anticache()
|
2011-07-15 03:21:04 +00:00
|
|
|
if self.anticomp:
|
|
|
|
f.request.anticomp()
|
2011-07-16 09:47:06 +00:00
|
|
|
|
2011-03-15 02:11:03 +00:00
|
|
|
if self.server_playback:
|
|
|
|
pb = self.do_server_playback(f)
|
|
|
|
if not pb:
|
|
|
|
if self.kill_nonreplay:
|
|
|
|
f.kill(self)
|
|
|
|
else:
|
2013-02-16 23:42:48 +00:00
|
|
|
f.request.reply()
|
2011-03-15 02:11:03 +00:00
|
|
|
|
2011-05-14 23:22:35 +00:00
|
|
|
def process_new_response(self, f):
|
|
|
|
if self.stickycookie_state:
|
|
|
|
self.stickycookie_state.handle_response(f)
|
|
|
|
|
2012-07-10 11:29:33 +00:00
|
|
|
def replay_request(self, f, block=False):
|
2011-03-15 02:11:03 +00:00
|
|
|
"""
|
|
|
|
Returns None if successful, or error message if not.
|
|
|
|
"""
|
|
|
|
if f.intercepting:
|
|
|
|
return "Can't replay while intercepting..."
|
2012-05-16 06:24:32 +00:00
|
|
|
if f.request.content == CONTENT_MISSING:
|
|
|
|
return "Can't replay request with missing content..."
|
2011-03-15 02:11:03 +00:00
|
|
|
if f.request:
|
2011-08-03 21:26:26 +00:00
|
|
|
f.request._set_replay()
|
2011-03-15 02:11:03 +00:00
|
|
|
if f.request.content:
|
2013-05-21 13:57:14 +00:00
|
|
|
f.request.headers["Content-Length"] = [str(len(f.request.content))]
|
2011-03-15 02:11:03 +00:00
|
|
|
f.response = None
|
|
|
|
f.error = None
|
|
|
|
self.process_new_request(f)
|
2011-09-09 05:31:36 +00:00
|
|
|
rt = proxy.RequestReplayThread(
|
2012-02-18 01:45:22 +00:00
|
|
|
self.server.config,
|
2011-09-09 05:31:36 +00:00
|
|
|
f,
|
|
|
|
self.masterq,
|
|
|
|
)
|
2012-06-09 01:42:43 +00:00
|
|
|
rt.start() # pragma: no cover
|
2012-07-10 11:29:33 +00:00
|
|
|
if block:
|
|
|
|
rt.join()
|
2011-03-15 02:11:03 +00:00
|
|
|
|
2011-08-03 04:36:20 +00:00
|
|
|
def handle_clientconnect(self, cc):
|
2011-08-05 02:03:10 +00:00
|
|
|
self.run_script_hook("clientconnect", cc)
|
2013-02-16 23:42:48 +00:00
|
|
|
cc.reply()
|
2011-02-19 04:00:24 +00:00
|
|
|
|
|
|
|
def handle_clientdisconnect(self, r):
|
2011-08-05 02:03:10 +00:00
|
|
|
self.run_script_hook("clientdisconnect", r)
|
2013-02-16 23:42:48 +00:00
|
|
|
r.reply()
|
2011-02-16 03:03:22 +00:00
|
|
|
|
2013-11-18 16:25:52 +00:00
|
|
|
def handle_serverconnection(self, sc):
|
|
|
|
# To unify the mitmproxy script API, we call the script hook "serverconnect" rather than "serverconnection".
|
|
|
|
# As things are handled differently in libmproxy (ClientConnect + ClientDisconnect vs ServerConnection class),
|
|
|
|
# there is no "serverdisonnect" event at the moment.
|
|
|
|
self.run_script_hook("serverconnect", sc)
|
|
|
|
sc.reply()
|
|
|
|
|
2011-02-16 03:03:22 +00:00
|
|
|
def handle_error(self, r):
|
|
|
|
f = self.state.add_error(r)
|
2011-08-03 04:36:20 +00:00
|
|
|
if f:
|
2011-08-05 02:03:10 +00:00
|
|
|
self.run_script_hook("error", f)
|
2011-03-05 00:03:26 +00:00
|
|
|
if self.client_playback:
|
|
|
|
self.client_playback.clear(f)
|
2013-02-16 23:42:48 +00:00
|
|
|
r.reply()
|
2011-02-16 03:03:22 +00:00
|
|
|
return f
|
|
|
|
|
|
|
|
def handle_request(self, r):
|
2011-02-20 22:40:49 +00:00
|
|
|
f = self.state.add_request(r)
|
2012-03-16 22:31:05 +00:00
|
|
|
self.replacehooks.run(f)
|
2012-08-18 11:39:52 +00:00
|
|
|
self.setheaders.run(f)
|
2011-08-05 02:03:10 +00:00
|
|
|
self.run_script_hook("request", f)
|
2011-03-15 02:11:03 +00:00
|
|
|
self.process_new_request(f)
|
2011-02-20 22:40:49 +00:00
|
|
|
return f
|
2011-02-16 03:03:22 +00:00
|
|
|
|
|
|
|
def handle_response(self, r):
|
|
|
|
f = self.state.add_response(r)
|
2011-08-03 04:36:20 +00:00
|
|
|
if f:
|
2012-07-08 22:18:37 +00:00
|
|
|
self.replacehooks.run(f)
|
2012-08-18 11:39:52 +00:00
|
|
|
self.setheaders.run(f)
|
2011-08-05 02:03:10 +00:00
|
|
|
self.run_script_hook("response", f)
|
2012-07-08 22:18:37 +00:00
|
|
|
if self.client_playback:
|
|
|
|
self.client_playback.clear(f)
|
2011-08-26 06:03:03 +00:00
|
|
|
self.process_new_response(f)
|
2012-07-08 22:18:37 +00:00
|
|
|
if self.stream:
|
|
|
|
self.stream.add(f)
|
|
|
|
else:
|
2013-02-16 23:42:48 +00:00
|
|
|
r.reply()
|
2011-02-16 03:43:35 +00:00
|
|
|
return f
|
2011-02-16 03:03:22 +00:00
|
|
|
|
2011-08-05 02:03:10 +00:00
|
|
|
def shutdown(self):
|
2013-06-13 14:04:04 +00:00
|
|
|
for script in self.scripts:
|
|
|
|
self.unload_script(script)
|
2011-08-05 02:03:10 +00:00
|
|
|
controller.Master.shutdown(self)
|
2012-07-08 22:18:37 +00:00
|
|
|
if self.stream:
|
|
|
|
for i in self.state._flow_list:
|
|
|
|
if not i.response:
|
|
|
|
self.stream.add(i)
|
|
|
|
self.stop_stream()
|
|
|
|
|
2013-03-13 20:19:43 +00:00
|
|
|
def start_stream(self, fp, filt):
|
|
|
|
self.stream = FilteredFlowWriter(fp, filt)
|
2012-07-08 22:18:37 +00:00
|
|
|
|
|
|
|
def stop_stream(self):
|
2012-07-26 12:19:18 +00:00
|
|
|
self.stream.fo.close()
|
2012-07-08 22:18:37 +00:00
|
|
|
self.stream = None
|
|
|
|
|
2011-08-05 02:03:10 +00:00
|
|
|
|
2011-02-16 01:33:04 +00:00
|
|
|
|
|
|
|
class FlowWriter:
|
|
|
|
def __init__(self, fo):
|
|
|
|
self.fo = fo
|
|
|
|
|
|
|
|
def add(self, flow):
|
2011-08-03 10:48:40 +00:00
|
|
|
d = flow._get_state()
|
2011-08-19 09:38:05 +00:00
|
|
|
tnetstring.dump(d, self.fo)
|
2011-02-16 01:33:04 +00:00
|
|
|
|
2011-03-12 00:47:37 +00:00
|
|
|
|
2011-03-11 02:16:31 +00:00
|
|
|
class FlowReadError(Exception):
|
|
|
|
@property
|
|
|
|
def strerror(self):
|
|
|
|
return self.args[0]
|
2011-02-16 01:33:04 +00:00
|
|
|
|
2011-03-12 00:47:37 +00:00
|
|
|
|
2011-02-16 01:33:04 +00:00
|
|
|
class FlowReader:
|
|
|
|
def __init__(self, fo):
|
|
|
|
self.fo = fo
|
|
|
|
|
|
|
|
def stream(self):
|
|
|
|
"""
|
|
|
|
Yields Flow objects from the dump.
|
|
|
|
"""
|
2011-08-19 09:30:24 +00:00
|
|
|
off = 0
|
2011-03-11 02:16:31 +00:00
|
|
|
try:
|
2011-08-19 09:30:24 +00:00
|
|
|
while 1:
|
2011-08-19 09:38:05 +00:00
|
|
|
data = tnetstring.load(self.fo)
|
2013-07-23 22:32:56 +00:00
|
|
|
if tuple(data["version"][:2]) != version.IVERSION[:2]:
|
2012-04-10 22:10:53 +00:00
|
|
|
v = ".".join(str(i) for i in data["version"])
|
|
|
|
raise FlowReadError("Incompatible serialized data version: %s"%v)
|
2011-08-19 09:30:24 +00:00
|
|
|
off = self.fo.tell()
|
2011-08-03 10:48:40 +00:00
|
|
|
yield Flow._from_state(data)
|
2012-07-24 03:15:41 +00:00
|
|
|
except ValueError, v:
|
2011-08-19 09:30:24 +00:00
|
|
|
# Error is due to EOF
|
|
|
|
if self.fo.tell() == off and self.fo.read() == '':
|
|
|
|
return
|
2011-03-11 02:16:31 +00:00
|
|
|
raise FlowReadError("Invalid data format.")
|
2011-02-16 01:33:04 +00:00
|
|
|
|
2013-03-13 20:19:43 +00:00
|
|
|
|
|
|
|
class FilteredFlowWriter:
|
|
|
|
def __init__(self, fo, filt):
|
|
|
|
self.fo = fo
|
|
|
|
self.filt = filt
|
|
|
|
|
|
|
|
def add(self, f):
|
|
|
|
if self.filt and not f.match(self.filt):
|
|
|
|
return
|
|
|
|
d = f._get_state()
|
|
|
|
tnetstring.dump(d, self.fo)
|
|
|
|
|