2011-01-25 02:02:48 +00:00
|
|
|
"""
|
2014-09-08 10:20:40 +00:00
|
|
|
This module provides more sophisticated flow tracking and provides filtering and interception facilities.
|
2011-01-25 02:02:48 +00:00
|
|
|
"""
|
2014-03-10 21:36:47 +00:00
|
|
|
from __future__ import absolute_import
|
2014-11-26 03:18:21 +00:00
|
|
|
from abc import abstractmethod, ABCMeta
|
2014-09-16 21:40:25 +00:00
|
|
|
import hashlib
|
|
|
|
import Cookie
|
|
|
|
import cookielib
|
2015-01-02 00:26:22 +00:00
|
|
|
import os
|
2014-09-16 21:40:25 +00:00
|
|
|
import re
|
2015-02-12 04:42:48 +00:00
|
|
|
from netlib import odict, wsgi, tcp
|
2014-03-10 21:36:47 +00:00
|
|
|
import netlib.http
|
2014-09-09 23:34:58 +00:00
|
|
|
from . import controller, protocol, tnetstring, filt, script, version
|
2014-09-12 00:30:03 +00:00
|
|
|
from .onboarding import app
|
2014-07-27 00:10:39 +00:00
|
|
|
from .protocol import http, handle
|
2014-10-18 16:29:35 +00:00
|
|
|
from .proxy.config import HostMatcher
|
2015-02-12 04:42:48 +00:00
|
|
|
from .proxy.connection import ClientConnection, ServerConnection
|
2014-10-03 10:29:44 +00:00
|
|
|
import urlparse
|
2012-02-10 01:27:39 +00:00
|
|
|
|
2011-01-31 00:26:56 +00:00
|
|
|
|
2014-01-04 01:35:11 +00:00
|
|
|
class AppRegistry:
|
|
|
|
def __init__(self):
|
|
|
|
self.apps = {}
|
|
|
|
|
|
|
|
def add(self, app, domain, port):
|
|
|
|
"""
|
|
|
|
Add a WSGI app to the registry, to be served for requests to the
|
|
|
|
specified domain, on the specified port.
|
|
|
|
"""
|
2014-11-05 21:51:30 +00:00
|
|
|
self.apps[(domain, port)] = wsgi.WSGIAdaptor(
|
|
|
|
app,
|
|
|
|
domain,
|
|
|
|
port,
|
|
|
|
version.NAMEVERSION
|
|
|
|
)
|
2014-01-04 01:35:11 +00:00
|
|
|
|
|
|
|
def get(self, request):
|
|
|
|
"""
|
|
|
|
Returns an WSGIAdaptor instance if request matches an app, or None.
|
|
|
|
"""
|
2014-09-03 14:57:56 +00:00
|
|
|
if (request.host, request.port) in self.apps:
|
|
|
|
return self.apps[(request.host, request.port)]
|
2014-01-04 01:35:11 +00:00
|
|
|
if "host" in request.headers:
|
|
|
|
host = request.headers["host"][0]
|
2014-09-03 14:57:56 +00:00
|
|
|
return self.apps.get((host, request.port), None)
|
2014-01-04 01:35:11 +00:00
|
|
|
|
|
|
|
|
2012-03-16 22:31:05 +00:00
|
|
|
class ReplaceHooks:
|
2012-03-16 04:13:11 +00:00
|
|
|
def __init__(self):
|
|
|
|
self.lst = []
|
|
|
|
|
2012-08-18 11:39:52 +00:00
|
|
|
def set(self, r):
|
|
|
|
self.clear()
|
|
|
|
for i in r:
|
|
|
|
self.add(*i)
|
|
|
|
|
2012-03-16 22:31:05 +00:00
|
|
|
def add(self, fpatt, rex, s):
|
2012-03-16 04:13:11 +00:00
|
|
|
"""
|
2013-03-24 20:20:26 +00:00
|
|
|
add a replacement hook.
|
2012-03-16 04:13:11 +00:00
|
|
|
|
2013-03-24 20:20:26 +00:00
|
|
|
fpatt: a string specifying a filter pattern.
|
|
|
|
rex: a regular expression.
|
|
|
|
s: the replacement string
|
2012-03-16 04:13:11 +00:00
|
|
|
|
2013-03-24 20:20:26 +00:00
|
|
|
returns true if hook was added, false if the pattern could not be
|
2012-03-16 04:13:11 +00:00
|
|
|
parsed.
|
|
|
|
"""
|
2012-03-16 22:31:05 +00:00
|
|
|
cpatt = filt.parse(fpatt)
|
2012-03-16 04:13:11 +00:00
|
|
|
if not cpatt:
|
|
|
|
return False
|
2012-03-23 00:28:33 +00:00
|
|
|
try:
|
|
|
|
re.compile(rex)
|
|
|
|
except re.error:
|
|
|
|
return False
|
2012-03-16 22:31:05 +00:00
|
|
|
self.lst.append((fpatt, rex, s, cpatt))
|
2012-03-16 04:13:11 +00:00
|
|
|
return True
|
|
|
|
|
2012-03-17 21:33:11 +00:00
|
|
|
def get_specs(self):
|
|
|
|
"""
|
2014-11-05 21:51:30 +00:00
|
|
|
Retrieve the hook specifcations. Returns a list of (fpatt, rex, s)
|
|
|
|
tuples.
|
2012-03-17 21:33:11 +00:00
|
|
|
"""
|
|
|
|
return [i[:3] for i in self.lst]
|
|
|
|
|
|
|
|
def count(self):
|
|
|
|
return len(self.lst)
|
|
|
|
|
2012-03-16 04:13:11 +00:00
|
|
|
def run(self, f):
|
2012-03-16 22:31:05 +00:00
|
|
|
for _, rex, s, cpatt in self.lst:
|
2012-03-16 04:13:11 +00:00
|
|
|
if cpatt(f):
|
2012-03-16 22:31:05 +00:00
|
|
|
if f.response:
|
|
|
|
f.response.replace(rex, s)
|
|
|
|
else:
|
|
|
|
f.request.replace(rex, s)
|
2012-03-16 04:13:11 +00:00
|
|
|
|
|
|
|
def clear(self):
|
|
|
|
self.lst = []
|
|
|
|
|
|
|
|
|
2012-08-18 11:39:52 +00:00
|
|
|
class SetHeaders:
|
|
|
|
def __init__(self):
|
|
|
|
self.lst = []
|
|
|
|
|
|
|
|
def set(self, r):
|
|
|
|
self.clear()
|
|
|
|
for i in r:
|
|
|
|
self.add(*i)
|
|
|
|
|
|
|
|
def add(self, fpatt, header, value):
|
|
|
|
"""
|
|
|
|
Add a set header hook.
|
|
|
|
|
|
|
|
fpatt: String specifying a filter pattern.
|
|
|
|
header: Header name.
|
|
|
|
value: Header value string
|
|
|
|
|
|
|
|
Returns True if hook was added, False if the pattern could not be
|
|
|
|
parsed.
|
|
|
|
"""
|
|
|
|
cpatt = filt.parse(fpatt)
|
|
|
|
if not cpatt:
|
|
|
|
return False
|
|
|
|
self.lst.append((fpatt, header, value, cpatt))
|
|
|
|
return True
|
|
|
|
|
|
|
|
def get_specs(self):
|
|
|
|
"""
|
2014-11-05 21:51:30 +00:00
|
|
|
Retrieve the hook specifcations. Returns a list of (fpatt, rex, s)
|
|
|
|
tuples.
|
2012-08-18 11:39:52 +00:00
|
|
|
"""
|
|
|
|
return [i[:3] for i in self.lst]
|
|
|
|
|
|
|
|
def count(self):
|
|
|
|
return len(self.lst)
|
|
|
|
|
|
|
|
def clear(self):
|
|
|
|
self.lst = []
|
|
|
|
|
|
|
|
def run(self, f):
|
|
|
|
for _, header, value, cpatt in self.lst:
|
|
|
|
if cpatt(f):
|
|
|
|
if f.response:
|
|
|
|
del f.response.headers[header]
|
|
|
|
else:
|
|
|
|
del f.request.headers[header]
|
|
|
|
for _, header, value, cpatt in self.lst:
|
|
|
|
if cpatt(f):
|
|
|
|
if f.response:
|
|
|
|
f.response.headers.add(header, value)
|
|
|
|
else:
|
|
|
|
f.request.headers.add(header, value)
|
|
|
|
|
|
|
|
|
2014-07-21 19:06:55 +00:00
|
|
|
class StreamLargeBodies(object):
|
|
|
|
def __init__(self, max_size):
|
|
|
|
self.max_size = max_size
|
|
|
|
|
|
|
|
def run(self, flow, is_request):
|
|
|
|
r = flow.request if is_request else flow.response
|
|
|
|
code = flow.response.code if flow.response else None
|
2014-09-16 21:40:25 +00:00
|
|
|
expected_size = netlib.http.expected_http_body_size(
|
|
|
|
r.headers, is_request, flow.request.method, code
|
|
|
|
)
|
2014-07-21 19:06:55 +00:00
|
|
|
if not (0 <= expected_size <= self.max_size):
|
2015-05-30 00:03:28 +00:00
|
|
|
# r.stream may already be a callable, which we want to preserve.
|
|
|
|
r.stream = r.stream or True
|
2014-07-21 19:06:55 +00:00
|
|
|
|
2014-09-16 21:40:25 +00:00
|
|
|
|
2011-03-04 00:08:43 +00:00
|
|
|
class ClientPlaybackState:
|
2011-03-06 03:54:49 +00:00
|
|
|
def __init__(self, flows, exit):
|
|
|
|
self.flows, self.exit = flows, exit
|
2011-03-04 00:08:43 +00:00
|
|
|
self.current = None
|
2014-11-13 23:26:22 +00:00
|
|
|
self.testing = False # Disables actual replay for testing.
|
2011-03-04 00:08:43 +00:00
|
|
|
|
|
|
|
def count(self):
|
|
|
|
return len(self.flows)
|
|
|
|
|
2011-03-06 03:54:49 +00:00
|
|
|
def done(self):
|
|
|
|
if len(self.flows) == 0 and not self.current:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2011-03-04 00:08:43 +00:00
|
|
|
def clear(self, flow):
|
|
|
|
"""
|
|
|
|
A request has returned in some way - if this is the one we're
|
|
|
|
servicing, go to the next flow.
|
|
|
|
"""
|
|
|
|
if flow is self.current:
|
|
|
|
self.current = None
|
|
|
|
|
2014-11-13 23:26:22 +00:00
|
|
|
def tick(self, master):
|
2011-03-04 00:08:43 +00:00
|
|
|
if self.flows and not self.current:
|
2014-11-13 23:26:22 +00:00
|
|
|
self.current = self.flows.pop(0).copy()
|
|
|
|
if not self.testing:
|
|
|
|
master.replay_request(self.current)
|
|
|
|
else:
|
|
|
|
self.current.reply = controller.DummyReply()
|
|
|
|
master.handle_request(self.current)
|
|
|
|
if self.current.response:
|
|
|
|
master.handle_response(self.current)
|
2011-03-04 00:08:43 +00:00
|
|
|
|
|
|
|
|
2011-02-20 19:47:19 +00:00
|
|
|
class ServerPlaybackState:
|
2015-05-30 00:03:28 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
headers,
|
|
|
|
flows,
|
|
|
|
exit,
|
|
|
|
nopop,
|
|
|
|
ignore_params,
|
|
|
|
ignore_content,
|
|
|
|
ignore_payload_params,
|
|
|
|
ignore_host):
|
2011-02-22 21:54:51 +00:00
|
|
|
"""
|
2011-08-03 21:29:55 +00:00
|
|
|
headers: Case-insensitive list of request headers that should be
|
2011-02-22 21:54:51 +00:00
|
|
|
included in request-response matching.
|
|
|
|
"""
|
2015-03-07 16:38:18 +00:00
|
|
|
self.headers = headers
|
|
|
|
self.exit = exit
|
|
|
|
self.nopop = nopop
|
|
|
|
self.ignore_params = ignore_params
|
|
|
|
self.ignore_content = ignore_content
|
|
|
|
self.ignore_payload_params = ignore_payload_params
|
|
|
|
self.ignore_host = ignore_host
|
2011-02-20 19:47:19 +00:00
|
|
|
self.fmap = {}
|
|
|
|
for i in flows:
|
2011-02-20 22:08:35 +00:00
|
|
|
if i.response:
|
|
|
|
l = self.fmap.setdefault(self._hash(i), [])
|
|
|
|
l.append(i)
|
2011-02-20 19:47:19 +00:00
|
|
|
|
2011-03-04 00:08:43 +00:00
|
|
|
def count(self):
|
2011-08-18 21:20:38 +00:00
|
|
|
return sum(len(i) for i in self.fmap.values())
|
2011-07-16 09:47:06 +00:00
|
|
|
|
2011-02-20 19:47:19 +00:00
|
|
|
def _hash(self, flow):
|
|
|
|
"""
|
2011-07-16 09:47:06 +00:00
|
|
|
Calculates a loose hash of the flow request.
|
2011-02-20 19:47:19 +00:00
|
|
|
"""
|
|
|
|
r = flow.request
|
2014-10-03 10:29:44 +00:00
|
|
|
|
|
|
|
_, _, path, _, query, _ = urlparse.urlparse(r.url)
|
2015-04-03 05:10:01 +00:00
|
|
|
queriesArray = urlparse.parse_qsl(query, keep_blank_values=True)
|
2014-10-03 10:29:44 +00:00
|
|
|
|
2011-02-20 19:47:19 +00:00
|
|
|
key = [
|
|
|
|
str(r.port),
|
|
|
|
str(r.scheme),
|
|
|
|
str(r.method),
|
2014-10-03 10:29:44 +00:00
|
|
|
str(path),
|
2014-12-18 20:56:27 +00:00
|
|
|
]
|
|
|
|
|
2014-10-03 10:29:44 +00:00
|
|
|
if not self.ignore_content:
|
2015-03-10 09:44:06 +00:00
|
|
|
form_contents = r.get_form()
|
2014-12-22 23:30:35 +00:00
|
|
|
if self.ignore_payload_params and form_contents:
|
|
|
|
key.extend(
|
|
|
|
p for p in form_contents
|
|
|
|
if p[0] not in self.ignore_payload_params
|
|
|
|
)
|
2014-12-18 20:56:27 +00:00
|
|
|
else:
|
|
|
|
key.append(str(r.content))
|
2014-10-03 10:29:44 +00:00
|
|
|
|
2015-03-07 16:38:18 +00:00
|
|
|
if not self.ignore_host:
|
|
|
|
key.append(r.host)
|
|
|
|
|
2014-12-18 20:56:27 +00:00
|
|
|
filtered = []
|
|
|
|
ignore_params = self.ignore_params or []
|
|
|
|
for p in queriesArray:
|
|
|
|
if p[0] not in ignore_params:
|
|
|
|
filtered.append(p)
|
2014-10-03 10:29:44 +00:00
|
|
|
for p in filtered:
|
|
|
|
key.append(p[0])
|
|
|
|
key.append(p[1])
|
|
|
|
|
2011-02-22 21:54:51 +00:00
|
|
|
if self.headers:
|
|
|
|
hdrs = []
|
|
|
|
for i in self.headers:
|
2011-07-14 03:59:27 +00:00
|
|
|
v = r.headers[i]
|
2011-02-22 21:54:51 +00:00
|
|
|
# Slightly subtle: we need to convert everything to strings
|
|
|
|
# to prevent a mismatch between unicode/non-unicode.
|
|
|
|
v = [str(x) for x in v]
|
|
|
|
hdrs.append((i, v))
|
2015-04-14 22:29:57 +00:00
|
|
|
key.append(hdrs)
|
2011-02-20 19:47:19 +00:00
|
|
|
return hashlib.sha256(repr(key)).digest()
|
|
|
|
|
|
|
|
def next_flow(self, request):
|
|
|
|
"""
|
|
|
|
Returns the next flow object, or None if no matching flow was
|
|
|
|
found.
|
|
|
|
"""
|
|
|
|
l = self.fmap.get(self._hash(request))
|
|
|
|
if not l:
|
|
|
|
return None
|
2012-03-05 09:05:11 +00:00
|
|
|
|
2012-03-13 22:20:25 +00:00
|
|
|
if self.nopop:
|
2012-03-05 09:05:11 +00:00
|
|
|
return l[0]
|
2012-03-13 22:20:25 +00:00
|
|
|
else:
|
2012-03-05 09:05:11 +00:00
|
|
|
return l.pop(0)
|
|
|
|
|
2011-02-20 19:47:19 +00:00
|
|
|
|
2011-02-23 21:33:39 +00:00
|
|
|
class StickyCookieState:
|
2011-02-24 02:15:51 +00:00
|
|
|
def __init__(self, flt):
|
|
|
|
"""
|
2011-08-03 21:29:55 +00:00
|
|
|
flt: Compiled filter.
|
2011-02-24 02:15:51 +00:00
|
|
|
"""
|
2011-02-23 21:33:39 +00:00
|
|
|
self.jar = {}
|
2011-02-24 02:15:51 +00:00
|
|
|
self.flt = flt
|
2011-02-23 21:33:39 +00:00
|
|
|
|
2011-02-24 02:15:51 +00:00
|
|
|
def ckey(self, m, f):
|
|
|
|
"""
|
2011-07-16 09:47:06 +00:00
|
|
|
Returns a (domain, port, path) tuple.
|
2011-02-24 02:15:51 +00:00
|
|
|
"""
|
|
|
|
return (
|
2014-09-03 21:44:54 +00:00
|
|
|
m["domain"] or f.request.host,
|
|
|
|
f.request.port,
|
2011-02-24 02:15:51 +00:00
|
|
|
m["path"] or "/"
|
|
|
|
)
|
2011-02-23 21:33:39 +00:00
|
|
|
|
2011-08-26 05:37:12 +00:00
|
|
|
def domain_match(self, a, b):
|
|
|
|
if cookielib.domain_match(a, b):
|
|
|
|
return True
|
|
|
|
elif cookielib.domain_match(a, b.strip(".")):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2011-02-24 02:15:51 +00:00
|
|
|
def handle_response(self, f):
|
2011-07-14 03:59:27 +00:00
|
|
|
for i in f.response.headers["set-cookie"]:
|
2011-03-10 22:56:10 +00:00
|
|
|
# FIXME: We now know that Cookie.py screws up some cookies with
|
|
|
|
# valid RFC 822/1123 datetime specifications for expiry. Sigh.
|
2011-07-24 04:08:27 +00:00
|
|
|
c = Cookie.SimpleCookie(str(i))
|
2015-02-07 00:17:24 +00:00
|
|
|
for m in c.values():
|
|
|
|
k = self.ckey(m, f)
|
|
|
|
if self.domain_match(f.request.host, k[0]):
|
|
|
|
self.jar[k] = m
|
2011-02-24 02:15:51 +00:00
|
|
|
|
|
|
|
def handle_request(self, f):
|
2011-08-26 05:37:12 +00:00
|
|
|
l = []
|
2011-02-24 02:15:51 +00:00
|
|
|
if f.match(self.flt):
|
|
|
|
for i in self.jar.keys():
|
|
|
|
match = [
|
2014-09-03 21:44:54 +00:00
|
|
|
self.domain_match(f.request.host, i[0]),
|
|
|
|
f.request.port == i[1],
|
2011-02-24 02:15:51 +00:00
|
|
|
f.request.path.startswith(i[2])
|
|
|
|
]
|
|
|
|
if all(match):
|
|
|
|
l.append(self.jar[i].output(header="").strip())
|
2011-08-26 05:37:12 +00:00
|
|
|
if l:
|
|
|
|
f.request.stickycookie = True
|
|
|
|
f.request.headers["cookie"] = l
|
2011-02-23 21:33:39 +00:00
|
|
|
|
|
|
|
|
2011-03-20 04:31:54 +00:00
|
|
|
class StickyAuthState:
|
|
|
|
def __init__(self, flt):
|
|
|
|
"""
|
2011-08-03 21:29:55 +00:00
|
|
|
flt: Compiled filter.
|
2011-03-20 04:31:54 +00:00
|
|
|
"""
|
|
|
|
self.flt = flt
|
|
|
|
self.hosts = {}
|
|
|
|
|
|
|
|
def handle_request(self, f):
|
2014-09-03 21:44:54 +00:00
|
|
|
host = f.request.host
|
2011-03-20 04:31:54 +00:00
|
|
|
if "authorization" in f.request.headers:
|
2014-02-05 19:26:47 +00:00
|
|
|
self.hosts[host] = f.request.headers["authorization"]
|
2011-03-20 04:31:54 +00:00
|
|
|
elif f.match(self.flt):
|
2014-02-05 19:26:47 +00:00
|
|
|
if host in self.hosts:
|
|
|
|
f.request.headers["authorization"] = self.hosts[host]
|
2011-03-20 04:31:54 +00:00
|
|
|
|
2011-02-23 21:33:39 +00:00
|
|
|
|
2014-11-26 03:18:21 +00:00
|
|
|
class FlowList(object):
|
|
|
|
__metaclass__ = ABCMeta
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return iter(self._list)
|
|
|
|
|
|
|
|
def __contains__(self, item):
|
|
|
|
return item in self._list
|
|
|
|
|
|
|
|
def __getitem__(self, item):
|
|
|
|
return self._list[item]
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
|
|
|
return bool(self._list)
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return len(self._list)
|
|
|
|
|
|
|
|
def index(self, f):
|
|
|
|
return self._list.index(f)
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def _add(self, f):
|
|
|
|
return
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def _update(self, f):
|
|
|
|
return
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def _remove(self, f):
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
class FlowView(FlowList):
|
|
|
|
def __init__(self, store, filt=None):
|
|
|
|
self._list = []
|
|
|
|
if not filt:
|
|
|
|
filt = lambda flow: True
|
|
|
|
self._build(store, filt)
|
|
|
|
|
|
|
|
self.store = store
|
|
|
|
self.store.views.append(self)
|
|
|
|
|
|
|
|
def _close(self):
|
|
|
|
self.store.views.remove(self)
|
|
|
|
|
|
|
|
def _build(self, flows, filt=None):
|
|
|
|
if filt:
|
|
|
|
self.filt = filt
|
|
|
|
self._list = list(filter(self.filt, flows))
|
|
|
|
|
|
|
|
def _add(self, f):
|
|
|
|
if self.filt(f):
|
|
|
|
self._list.append(f)
|
|
|
|
|
|
|
|
def _update(self, f):
|
|
|
|
if f not in self._list:
|
|
|
|
self._add(f)
|
|
|
|
elif not self.filt(f):
|
|
|
|
self._remove(f)
|
|
|
|
|
|
|
|
def _remove(self, f):
|
|
|
|
if f in self._list:
|
|
|
|
self._list.remove(f)
|
|
|
|
|
|
|
|
def _recalculate(self, flows):
|
|
|
|
self._build(flows)
|
|
|
|
|
|
|
|
|
|
|
|
class FlowStore(FlowList):
|
|
|
|
"""
|
|
|
|
Responsible for handling flows in the state:
|
|
|
|
Keeps a list of all flows and provides views on them.
|
|
|
|
"""
|
2014-12-22 23:30:35 +00:00
|
|
|
|
2014-11-26 03:18:21 +00:00
|
|
|
def __init__(self):
|
|
|
|
self._list = []
|
|
|
|
self._set = set() # Used for O(1) lookups
|
|
|
|
self.views = []
|
|
|
|
self._recalculate_views()
|
|
|
|
|
2014-12-24 00:07:57 +00:00
|
|
|
def get(self, flow_id):
|
|
|
|
for f in self._list:
|
|
|
|
if f.id == flow_id:
|
|
|
|
return f
|
|
|
|
|
2014-11-26 03:18:21 +00:00
|
|
|
def __contains__(self, f):
|
|
|
|
return f in self._set
|
|
|
|
|
|
|
|
def _add(self, f):
|
|
|
|
"""
|
|
|
|
Adds a flow to the state.
|
|
|
|
The flow to add must not be present in the state.
|
|
|
|
"""
|
|
|
|
self._list.append(f)
|
|
|
|
self._set.add(f)
|
|
|
|
for view in self.views:
|
|
|
|
view._add(f)
|
|
|
|
|
|
|
|
def _update(self, f):
|
|
|
|
"""
|
|
|
|
Notifies the state that a flow has been updated.
|
|
|
|
The flow must be present in the state.
|
|
|
|
"""
|
2015-05-28 23:36:56 +00:00
|
|
|
if f in self:
|
|
|
|
for view in self.views:
|
|
|
|
view._update(f)
|
2014-11-26 03:18:21 +00:00
|
|
|
|
|
|
|
def _remove(self, f):
|
|
|
|
"""
|
|
|
|
Deletes a flow from the state.
|
|
|
|
The flow must be present in the state.
|
|
|
|
"""
|
|
|
|
self._list.remove(f)
|
|
|
|
self._set.remove(f)
|
|
|
|
for view in self.views:
|
|
|
|
view._remove(f)
|
|
|
|
|
|
|
|
# Expensive bulk operations
|
|
|
|
|
|
|
|
def _extend(self, flows):
|
|
|
|
"""
|
|
|
|
Adds a list of flows to the state.
|
|
|
|
The list of flows to add must not contain flows that are already in the state.
|
|
|
|
"""
|
|
|
|
self._list.extend(flows)
|
|
|
|
self._set.update(flows)
|
|
|
|
self._recalculate_views()
|
|
|
|
|
|
|
|
def _clear(self):
|
2015-06-11 16:40:03 +00:00
|
|
|
marked_flows = []
|
|
|
|
for f in self._list:
|
|
|
|
if f.marked:
|
|
|
|
marked_flows.append(f)
|
|
|
|
if len(marked_flows) == len(self._list):
|
|
|
|
marked_flows = []
|
|
|
|
|
2014-11-26 03:18:21 +00:00
|
|
|
self._list = []
|
|
|
|
self._set = set()
|
2015-06-11 16:40:03 +00:00
|
|
|
|
|
|
|
for f in marked_flows:
|
|
|
|
self._add(f)
|
|
|
|
|
2014-11-26 03:18:21 +00:00
|
|
|
self._recalculate_views()
|
|
|
|
|
|
|
|
def _recalculate_views(self):
|
|
|
|
"""
|
|
|
|
Expensive operation: Recalculate all the views after a bulk change.
|
|
|
|
"""
|
|
|
|
for view in self.views:
|
|
|
|
view._recalculate(self)
|
|
|
|
|
|
|
|
# Utility functions.
|
|
|
|
# There are some common cases where we need to argue about all flows
|
|
|
|
# irrespective of filters on the view etc (i.e. on shutdown).
|
|
|
|
|
|
|
|
def active_count(self):
|
|
|
|
c = 0
|
|
|
|
for i in self._list:
|
|
|
|
if not i.response and not i.error:
|
|
|
|
c += 1
|
|
|
|
return c
|
|
|
|
|
|
|
|
# TODO: Should accept_all operate on views or on all flows?
|
2014-12-23 19:33:42 +00:00
|
|
|
def accept_all(self, master):
|
2014-11-26 03:18:21 +00:00
|
|
|
for f in self._list:
|
2014-12-23 19:33:42 +00:00
|
|
|
f.accept_intercept(master)
|
2014-11-26 03:18:21 +00:00
|
|
|
|
|
|
|
def kill_all(self, master):
|
|
|
|
for f in self._list:
|
|
|
|
f.kill(master)
|
|
|
|
|
|
|
|
|
2011-08-02 04:14:33 +00:00
|
|
|
class State(object):
|
2011-01-25 02:02:48 +00:00
|
|
|
def __init__(self):
|
2014-11-26 03:18:21 +00:00
|
|
|
self.flows = FlowStore()
|
|
|
|
self.view = FlowView(self.flows, None)
|
2011-02-19 04:00:24 +00:00
|
|
|
|
2011-01-25 02:02:48 +00:00
|
|
|
# These are compiled filt expressions:
|
|
|
|
self.intercept = None
|
2011-07-31 23:17:01 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def limit_txt(self):
|
2014-11-26 03:18:21 +00:00
|
|
|
return getattr(self.view.filt, "pattern", None)
|
2011-01-25 02:02:48 +00:00
|
|
|
|
2011-03-06 03:11:45 +00:00
|
|
|
def flow_count(self):
|
2014-11-26 03:18:21 +00:00
|
|
|
return len(self.flows)
|
2011-03-06 03:11:45 +00:00
|
|
|
|
2015-05-30 00:03:28 +00:00
|
|
|
# TODO: All functions regarding flows that don't cause side-effects should
|
|
|
|
# be moved into FlowStore.
|
2012-02-18 11:17:47 +00:00
|
|
|
def index(self, f):
|
2014-11-26 03:18:21 +00:00
|
|
|
return self.flows.index(f)
|
2012-02-18 11:17:47 +00:00
|
|
|
|
2011-03-06 03:11:45 +00:00
|
|
|
def active_flow_count(self):
|
2014-11-26 03:18:21 +00:00
|
|
|
return self.flows.active_count()
|
2011-03-06 03:11:45 +00:00
|
|
|
|
2014-11-26 03:18:21 +00:00
|
|
|
def add_flow(self, f):
|
2011-01-25 02:02:48 +00:00
|
|
|
"""
|
2014-11-26 03:18:21 +00:00
|
|
|
Add a request to the state.
|
2011-01-25 02:02:48 +00:00
|
|
|
"""
|
2014-11-26 03:18:21 +00:00
|
|
|
self.flows._add(f)
|
2011-01-25 02:02:48 +00:00
|
|
|
return f
|
|
|
|
|
2014-11-26 03:18:21 +00:00
|
|
|
def update_flow(self, f):
|
2011-01-25 02:02:48 +00:00
|
|
|
"""
|
2014-11-26 03:18:21 +00:00
|
|
|
Add a response to the state.
|
2011-01-25 02:02:48 +00:00
|
|
|
"""
|
2014-11-26 03:18:21 +00:00
|
|
|
self.flows._update(f)
|
2011-01-25 02:02:48 +00:00
|
|
|
return f
|
|
|
|
|
2014-11-26 03:18:21 +00:00
|
|
|
def delete_flow(self, f):
|
|
|
|
self.flows._remove(f)
|
|
|
|
|
2011-02-16 02:10:00 +00:00
|
|
|
def load_flows(self, flows):
|
2014-11-26 03:18:21 +00:00
|
|
|
self.flows._extend(flows)
|
2011-01-26 03:50:17 +00:00
|
|
|
|
2011-03-12 22:24:49 +00:00
|
|
|
def set_limit(self, txt):
|
2014-11-26 03:18:21 +00:00
|
|
|
if txt == self.limit_txt:
|
|
|
|
return
|
2011-03-12 22:24:49 +00:00
|
|
|
if txt:
|
|
|
|
f = filt.parse(txt)
|
|
|
|
if not f:
|
|
|
|
return "Invalid filter expression."
|
2014-11-26 03:18:21 +00:00
|
|
|
self.view._close()
|
|
|
|
self.view = FlowView(self.flows, f)
|
2011-03-12 22:24:49 +00:00
|
|
|
else:
|
2014-11-26 03:18:21 +00:00
|
|
|
self.view._close()
|
|
|
|
self.view = FlowView(self.flows, None)
|
2011-03-12 22:24:49 +00:00
|
|
|
|
|
|
|
def set_intercept(self, txt):
|
|
|
|
if txt:
|
|
|
|
f = filt.parse(txt)
|
|
|
|
if not f:
|
|
|
|
return "Invalid filter expression."
|
|
|
|
self.intercept = f
|
|
|
|
else:
|
|
|
|
self.intercept = None
|
2011-01-26 03:50:17 +00:00
|
|
|
|
2014-11-26 03:18:21 +00:00
|
|
|
@property
|
|
|
|
def intercept_txt(self):
|
|
|
|
return getattr(self.intercept, "pattern", None)
|
2011-01-25 02:02:48 +00:00
|
|
|
|
|
|
|
def clear(self):
|
2014-11-26 03:18:21 +00:00
|
|
|
self.flows._clear()
|
2011-01-25 02:02:48 +00:00
|
|
|
|
2014-12-23 19:33:42 +00:00
|
|
|
def accept_all(self, master):
|
|
|
|
self.flows.accept_all(master)
|
2011-01-25 02:02:48 +00:00
|
|
|
|
2015-03-26 17:17:30 +00:00
|
|
|
def backup(self, f):
|
|
|
|
f.backup()
|
|
|
|
self.update_flow(f)
|
|
|
|
|
2011-01-25 02:02:48 +00:00
|
|
|
def revert(self, f):
|
|
|
|
f.revert()
|
2014-12-25 15:10:47 +00:00
|
|
|
self.update_flow(f)
|
2011-01-25 02:02:48 +00:00
|
|
|
|
2011-07-31 23:17:01 +00:00
|
|
|
def killall(self, master):
|
2014-11-26 03:18:21 +00:00
|
|
|
self.flows.kill_all(master)
|
2011-07-31 23:17:01 +00:00
|
|
|
|
2011-02-16 01:33:04 +00:00
|
|
|
|
2011-02-16 03:03:22 +00:00
|
|
|
class FlowMaster(controller.Master):
|
|
|
|
def __init__(self, server, state):
|
|
|
|
controller.Master.__init__(self, server)
|
|
|
|
self.state = state
|
2011-03-05 00:03:26 +00:00
|
|
|
self.server_playback = None
|
|
|
|
self.client_playback = None
|
2011-02-20 22:40:49 +00:00
|
|
|
self.kill_nonreplay = False
|
2013-06-13 14:04:04 +00:00
|
|
|
self.scripts = []
|
2012-02-18 22:29:49 +00:00
|
|
|
self.pause_scripts = False
|
2011-03-13 02:55:47 +00:00
|
|
|
|
2011-02-24 02:15:51 +00:00
|
|
|
self.stickycookie_state = False
|
2011-03-13 02:55:47 +00:00
|
|
|
self.stickycookie_txt = None
|
2011-03-10 22:56:10 +00:00
|
|
|
|
2011-03-20 04:31:54 +00:00
|
|
|
self.stickyauth_state = False
|
|
|
|
self.stickyauth_txt = None
|
|
|
|
|
2011-03-09 00:15:31 +00:00
|
|
|
self.anticache = False
|
2011-07-15 03:21:04 +00:00
|
|
|
self.anticomp = False
|
2014-07-21 19:06:55 +00:00
|
|
|
self.stream_large_bodies = False
|
2011-03-10 22:56:10 +00:00
|
|
|
self.refresh_server_playback = False
|
2012-03-16 22:31:05 +00:00
|
|
|
self.replacehooks = ReplaceHooks()
|
2012-08-18 11:39:52 +00:00
|
|
|
self.setheaders = SetHeaders()
|
2014-10-26 04:44:49 +00:00
|
|
|
self.replay_ignore_params = False
|
2014-10-03 10:29:44 +00:00
|
|
|
self.replay_ignore_content = None
|
2015-03-07 16:38:18 +00:00
|
|
|
self.replay_ignore_host = False
|
2014-10-03 10:29:44 +00:00
|
|
|
|
2012-07-08 22:18:37 +00:00
|
|
|
self.stream = None
|
2014-01-04 01:35:11 +00:00
|
|
|
self.apps = AppRegistry()
|
2012-07-08 22:18:37 +00:00
|
|
|
|
2014-09-13 23:46:01 +00:00
|
|
|
def start_app(self, host, port):
|
|
|
|
self.apps.add(
|
|
|
|
app.mapp,
|
|
|
|
host,
|
|
|
|
port
|
|
|
|
)
|
2013-07-23 22:32:56 +00:00
|
|
|
|
2014-03-13 00:04:45 +00:00
|
|
|
def add_event(self, e, level="info"):
|
2011-08-03 01:33:18 +00:00
|
|
|
"""
|
2014-03-12 21:39:23 +00:00
|
|
|
level: debug, info, error
|
2011-08-03 01:33:18 +00:00
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2014-01-13 01:20:34 +00:00
|
|
|
def unload_scripts(self):
|
2014-01-19 05:16:24 +00:00
|
|
|
for s in self.scripts[:]:
|
2014-09-07 13:57:36 +00:00
|
|
|
self.unload_script(s)
|
|
|
|
|
|
|
|
def unload_script(self, script):
|
|
|
|
script.unload()
|
|
|
|
self.scripts.remove(script)
|
2011-08-03 05:35:18 +00:00
|
|
|
|
2014-01-12 10:01:59 +00:00
|
|
|
def load_script(self, command):
|
2011-08-03 05:35:18 +00:00
|
|
|
"""
|
|
|
|
Loads a script. Returns an error description if something went
|
2013-06-13 14:04:04 +00:00
|
|
|
wrong.
|
2011-08-03 05:35:18 +00:00
|
|
|
"""
|
2014-01-12 00:59:32 +00:00
|
|
|
try:
|
2014-01-12 10:01:59 +00:00
|
|
|
s = script.Script(command, self)
|
2015-05-30 00:03:28 +00:00
|
|
|
except script.ScriptError as v:
|
2014-01-12 00:59:32 +00:00
|
|
|
return v.args[0]
|
|
|
|
self.scripts.append(s)
|
2013-06-13 14:04:04 +00:00
|
|
|
|
|
|
|
def run_single_script_hook(self, script, name, *args, **kwargs):
|
|
|
|
if script and not self.pause_scripts:
|
|
|
|
ret = script.run(name, *args, **kwargs)
|
|
|
|
if not ret[0] and ret[1]:
|
|
|
|
e = "Script error:\n" + ret[1][1]
|
|
|
|
self.add_event(e, "error")
|
2011-02-20 22:40:49 +00:00
|
|
|
|
2013-06-13 14:04:04 +00:00
|
|
|
def run_script_hook(self, name, *args, **kwargs):
|
|
|
|
for script in self.scripts:
|
|
|
|
self.run_single_script_hook(script, name, *args, **kwargs)
|
2013-12-24 01:28:20 +00:00
|
|
|
|
2014-10-18 16:29:35 +00:00
|
|
|
def get_ignore_filter(self):
|
|
|
|
return self.server.config.check_ignore.patterns
|
2014-09-08 10:20:40 +00:00
|
|
|
|
2014-10-18 16:29:35 +00:00
|
|
|
def set_ignore_filter(self, host_patterns):
|
|
|
|
self.server.config.check_ignore = HostMatcher(host_patterns)
|
|
|
|
|
|
|
|
def get_tcp_filter(self):
|
|
|
|
return self.server.config.check_tcp.patterns
|
|
|
|
|
|
|
|
def set_tcp_filter(self, host_patterns):
|
|
|
|
self.server.config.check_tcp = HostMatcher(host_patterns)
|
2014-09-08 10:20:40 +00:00
|
|
|
|
2011-02-24 02:15:51 +00:00
|
|
|
def set_stickycookie(self, txt):
|
|
|
|
if txt:
|
|
|
|
flt = filt.parse(txt)
|
|
|
|
if not flt:
|
|
|
|
return "Invalid filter expression."
|
|
|
|
self.stickycookie_state = StickyCookieState(flt)
|
2011-03-12 22:43:55 +00:00
|
|
|
self.stickycookie_txt = txt
|
2011-02-24 02:15:51 +00:00
|
|
|
else:
|
|
|
|
self.stickycookie_state = None
|
2011-03-12 22:43:55 +00:00
|
|
|
self.stickycookie_txt = None
|
2011-02-24 02:15:51 +00:00
|
|
|
|
2014-07-21 19:06:55 +00:00
|
|
|
def set_stream_large_bodies(self, max_size):
|
|
|
|
if max_size is not None:
|
|
|
|
self.stream_large_bodies = StreamLargeBodies(max_size)
|
|
|
|
else:
|
|
|
|
self.stream_large_bodies = False
|
|
|
|
|
2011-03-20 04:31:54 +00:00
|
|
|
def set_stickyauth(self, txt):
|
|
|
|
if txt:
|
|
|
|
flt = filt.parse(txt)
|
|
|
|
if not flt:
|
|
|
|
return "Invalid filter expression."
|
|
|
|
self.stickyauth_state = StickyAuthState(flt)
|
|
|
|
self.stickyauth_txt = txt
|
|
|
|
else:
|
|
|
|
self.stickyauth_state = None
|
|
|
|
self.stickyauth_txt = None
|
|
|
|
|
2011-03-06 03:54:49 +00:00
|
|
|
def start_client_playback(self, flows, exit):
|
2011-03-05 00:03:26 +00:00
|
|
|
"""
|
2011-08-03 21:29:55 +00:00
|
|
|
flows: List of flows.
|
2011-03-05 00:03:26 +00:00
|
|
|
"""
|
2011-03-06 03:54:49 +00:00
|
|
|
self.client_playback = ClientPlaybackState(flows, exit)
|
2011-03-05 00:03:26 +00:00
|
|
|
|
2011-03-17 20:38:51 +00:00
|
|
|
def stop_client_playback(self):
|
|
|
|
self.client_playback = None
|
|
|
|
|
2015-05-30 00:03:28 +00:00
|
|
|
def start_server_playback(
|
|
|
|
self,
|
|
|
|
flows,
|
|
|
|
kill,
|
|
|
|
headers,
|
|
|
|
exit,
|
|
|
|
nopop,
|
|
|
|
ignore_params,
|
|
|
|
ignore_content,
|
|
|
|
ignore_payload_params,
|
|
|
|
ignore_host):
|
2011-02-20 22:40:49 +00:00
|
|
|
"""
|
2011-08-03 21:29:55 +00:00
|
|
|
flows: List of flows.
|
2011-02-20 22:40:49 +00:00
|
|
|
kill: Boolean, should we kill requests not part of the replay?
|
2014-10-03 10:29:44 +00:00
|
|
|
ignore_params: list of parameters to ignore in server replay
|
|
|
|
ignore_content: true if request content should be ignored in server replay
|
2015-03-07 16:38:18 +00:00
|
|
|
ignore_payload_params: list of content params to ignore in server replay
|
|
|
|
ignore_host: true if request host should be ignored in server replay
|
2011-02-20 22:40:49 +00:00
|
|
|
"""
|
2015-05-30 00:03:28 +00:00
|
|
|
self.server_playback = ServerPlaybackState(
|
|
|
|
headers,
|
|
|
|
flows,
|
|
|
|
exit,
|
|
|
|
nopop,
|
|
|
|
ignore_params,
|
|
|
|
ignore_content,
|
|
|
|
ignore_payload_params,
|
|
|
|
ignore_host)
|
2011-02-20 22:40:49 +00:00
|
|
|
self.kill_nonreplay = kill
|
|
|
|
|
2011-03-17 20:43:47 +00:00
|
|
|
def stop_server_playback(self):
|
2013-01-05 08:41:16 +00:00
|
|
|
if self.server_playback.exit:
|
|
|
|
self.shutdown()
|
2011-03-17 20:43:47 +00:00
|
|
|
self.server_playback = None
|
|
|
|
|
2011-03-05 00:03:26 +00:00
|
|
|
def do_server_playback(self, flow):
|
2011-02-20 20:54:39 +00:00
|
|
|
"""
|
|
|
|
This method should be called by child classes in the handle_request
|
|
|
|
handler. Returns True if playback has taken place, None if not.
|
|
|
|
"""
|
2011-03-05 00:03:26 +00:00
|
|
|
if self.server_playback:
|
|
|
|
rflow = self.server_playback.next_flow(flow)
|
2011-02-20 20:54:39 +00:00
|
|
|
if not rflow:
|
|
|
|
return None
|
2014-09-16 23:35:14 +00:00
|
|
|
response = http.HTTPResponse.from_state(rflow.response.get_state())
|
2014-02-04 04:02:17 +00:00
|
|
|
response.is_replay = True
|
2011-03-10 22:56:10 +00:00
|
|
|
if self.refresh_server_playback:
|
|
|
|
response.refresh()
|
2014-09-03 14:57:56 +00:00
|
|
|
flow.reply(response)
|
2012-08-17 12:23:41 +00:00
|
|
|
if self.server_playback.count() == 0:
|
|
|
|
self.stop_server_playback()
|
2011-02-20 20:54:39 +00:00
|
|
|
return True
|
|
|
|
return None
|
2011-02-16 03:03:22 +00:00
|
|
|
|
2014-09-13 23:30:00 +00:00
|
|
|
def tick(self, q, timeout):
|
2011-03-05 00:03:26 +00:00
|
|
|
if self.client_playback:
|
2011-03-06 03:54:49 +00:00
|
|
|
e = [
|
|
|
|
self.client_playback.done(),
|
|
|
|
self.client_playback.exit,
|
|
|
|
self.state.active_flow_count() == 0
|
|
|
|
]
|
|
|
|
if all(e):
|
|
|
|
self.shutdown()
|
2014-11-10 16:11:36 +00:00
|
|
|
self.client_playback.tick(self)
|
2015-03-25 20:40:25 +00:00
|
|
|
if self.client_playback.done():
|
|
|
|
self.client_playback = None
|
2011-03-06 04:08:56 +00:00
|
|
|
|
2015-01-09 15:40:26 +00:00
|
|
|
return super(FlowMaster, self).tick(q, timeout)
|
2011-03-05 00:03:26 +00:00
|
|
|
|
2012-02-18 10:56:40 +00:00
|
|
|
def duplicate_flow(self, f):
|
2015-06-11 17:52:15 +00:00
|
|
|
new_flow = f.copy()
|
|
|
|
new_flow.marked = False
|
|
|
|
return self.load_flow(new_flow)
|
2012-02-18 10:56:40 +00:00
|
|
|
|
2015-02-11 19:05:49 +00:00
|
|
|
def create_request(self, method, scheme, host, port, path):
|
|
|
|
"""
|
2015-02-12 04:52:13 +00:00
|
|
|
this method creates a new artificial and minimalist request also adds it to flowlist
|
2015-04-06 05:27:46 +00:00
|
|
|
"""
|
2015-02-12 04:42:48 +00:00
|
|
|
c = ClientConnection.from_state(dict(
|
2015-05-30 00:03:28 +00:00
|
|
|
address=dict(address=(host, port), use_ipv6=False),
|
|
|
|
clientcert=None
|
|
|
|
))
|
2015-02-12 04:42:48 +00:00
|
|
|
|
|
|
|
s = ServerConnection.from_state(dict(
|
2015-05-30 00:03:28 +00:00
|
|
|
address=dict(address=(host, port), use_ipv6=False),
|
|
|
|
state=[],
|
|
|
|
source_address=None,
|
|
|
|
# source_address=dict(address=(host, port), use_ipv6=False),
|
|
|
|
cert=None,
|
|
|
|
sni=host,
|
|
|
|
ssl_established=True
|
|
|
|
))
|
|
|
|
f = http.HTTPFlow(c, s)
|
2015-06-08 16:26:02 +00:00
|
|
|
headers = odict.ODictCaseless()
|
2015-04-06 05:27:46 +00:00
|
|
|
|
2015-05-30 00:03:28 +00:00
|
|
|
req = http.HTTPRequest(
|
|
|
|
"absolute",
|
|
|
|
method,
|
|
|
|
scheme,
|
|
|
|
host,
|
|
|
|
port,
|
|
|
|
path,
|
|
|
|
(1,
|
|
|
|
1),
|
|
|
|
headers,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
None)
|
2015-02-10 18:24:21 +00:00
|
|
|
f.request = req
|
|
|
|
return self.load_flow(f)
|
|
|
|
|
2012-02-18 10:56:40 +00:00
|
|
|
def load_flow(self, f):
|
2012-02-18 11:17:47 +00:00
|
|
|
"""
|
|
|
|
Loads a flow, and returns a new flow object.
|
|
|
|
"""
|
2014-09-08 12:43:05 +00:00
|
|
|
|
|
|
|
if self.server and self.server.config.mode == "reverse":
|
|
|
|
f.request.host, f.request.port = self.server.config.mode.dst[2:]
|
2015-05-30 00:03:28 +00:00
|
|
|
f.request.scheme = "https" if self.server.config.mode.dst[
|
|
|
|
1] else "http"
|
2014-09-08 12:43:05 +00:00
|
|
|
|
2014-09-03 14:57:56 +00:00
|
|
|
f.reply = controller.DummyReply()
|
2012-02-18 10:56:40 +00:00
|
|
|
if f.request:
|
2014-09-03 14:57:56 +00:00
|
|
|
self.handle_request(f)
|
2012-02-18 10:56:40 +00:00
|
|
|
if f.response:
|
2014-09-04 17:08:54 +00:00
|
|
|
self.handle_responseheaders(f)
|
2014-09-03 14:57:56 +00:00
|
|
|
self.handle_response(f)
|
2012-02-18 10:56:40 +00:00
|
|
|
if f.error:
|
2014-09-03 14:57:56 +00:00
|
|
|
self.handle_error(f)
|
|
|
|
return f
|
2012-02-18 10:56:40 +00:00
|
|
|
|
2011-05-14 23:22:35 +00:00
|
|
|
def load_flows(self, fr):
|
|
|
|
"""
|
|
|
|
Load flows from a FlowReader object.
|
|
|
|
"""
|
2015-01-02 00:26:22 +00:00
|
|
|
cnt = 0
|
2011-05-14 23:22:35 +00:00
|
|
|
for i in fr.stream():
|
2015-01-02 00:26:22 +00:00
|
|
|
cnt += 1
|
2012-02-18 10:56:40 +00:00
|
|
|
self.load_flow(i)
|
2015-01-02 00:26:22 +00:00
|
|
|
return cnt
|
|
|
|
|
|
|
|
def load_flows_file(self, path):
|
|
|
|
path = os.path.expanduser(path)
|
|
|
|
try:
|
|
|
|
f = file(path, "rb")
|
|
|
|
freader = FlowReader(f)
|
2015-05-30 00:03:28 +00:00
|
|
|
except IOError as v:
|
2015-01-02 00:26:22 +00:00
|
|
|
raise FlowReadError(v.strerror)
|
|
|
|
return self.load_flows(freader)
|
2011-05-14 23:22:35 +00:00
|
|
|
|
2011-03-15 02:11:03 +00:00
|
|
|
def process_new_request(self, f):
|
|
|
|
if self.stickycookie_state:
|
|
|
|
self.stickycookie_state.handle_request(f)
|
2011-03-20 04:31:54 +00:00
|
|
|
if self.stickyauth_state:
|
|
|
|
self.stickyauth_state.handle_request(f)
|
|
|
|
|
2011-03-15 02:11:03 +00:00
|
|
|
if self.anticache:
|
|
|
|
f.request.anticache()
|
2011-07-15 03:21:04 +00:00
|
|
|
if self.anticomp:
|
|
|
|
f.request.anticomp()
|
2011-07-16 09:47:06 +00:00
|
|
|
|
2011-03-15 02:11:03 +00:00
|
|
|
if self.server_playback:
|
|
|
|
pb = self.do_server_playback(f)
|
|
|
|
if not pb:
|
|
|
|
if self.kill_nonreplay:
|
|
|
|
f.kill(self)
|
|
|
|
else:
|
2014-09-03 14:57:56 +00:00
|
|
|
f.reply()
|
2011-03-15 02:11:03 +00:00
|
|
|
|
2011-05-14 23:22:35 +00:00
|
|
|
def process_new_response(self, f):
|
|
|
|
if self.stickycookie_state:
|
|
|
|
self.stickycookie_state.handle_response(f)
|
|
|
|
|
2015-02-07 15:26:19 +00:00
|
|
|
def replay_request(self, f, block=False, run_scripthooks=True):
|
2011-03-15 02:11:03 +00:00
|
|
|
"""
|
|
|
|
Returns None if successful, or error message if not.
|
|
|
|
"""
|
2015-02-07 18:33:36 +00:00
|
|
|
if f.live and run_scripthooks:
|
|
|
|
return "Can't replay live request."
|
2014-12-23 19:33:42 +00:00
|
|
|
if f.intercepted:
|
2011-03-15 02:11:03 +00:00
|
|
|
return "Can't replay while intercepting..."
|
2014-03-09 20:51:24 +00:00
|
|
|
if f.request.content == http.CONTENT_MISSING:
|
2012-05-16 06:24:32 +00:00
|
|
|
return "Can't replay request with missing content..."
|
2011-03-15 02:11:03 +00:00
|
|
|
if f.request:
|
2014-12-25 15:10:47 +00:00
|
|
|
f.backup()
|
2014-02-04 04:02:17 +00:00
|
|
|
f.request.is_replay = True
|
2011-03-15 02:11:03 +00:00
|
|
|
if f.request.content:
|
2015-05-30 00:03:28 +00:00
|
|
|
f.request.headers[
|
|
|
|
"Content-Length"] = [str(len(f.request.content))]
|
2011-03-15 02:11:03 +00:00
|
|
|
f.response = None
|
|
|
|
f.error = None
|
|
|
|
self.process_new_request(f)
|
2014-08-08 17:04:58 +00:00
|
|
|
rt = http.RequestReplayThread(
|
2014-09-16 21:40:25 +00:00
|
|
|
self.server.config,
|
|
|
|
f,
|
2015-02-07 15:26:19 +00:00
|
|
|
self.masterq if run_scripthooks else False,
|
2014-09-16 21:40:25 +00:00
|
|
|
self.should_exit
|
|
|
|
)
|
2014-09-04 17:08:54 +00:00
|
|
|
rt.start() # pragma: no cover
|
2012-07-10 11:29:33 +00:00
|
|
|
if block:
|
|
|
|
rt.join()
|
2011-03-15 02:11:03 +00:00
|
|
|
|
2014-03-13 00:04:45 +00:00
|
|
|
def handle_log(self, l):
|
|
|
|
self.add_event(l.msg, l.level)
|
|
|
|
l.reply()
|
|
|
|
|
2011-08-03 04:36:20 +00:00
|
|
|
def handle_clientconnect(self, cc):
|
2011-08-05 02:03:10 +00:00
|
|
|
self.run_script_hook("clientconnect", cc)
|
2013-02-16 23:42:48 +00:00
|
|
|
cc.reply()
|
2011-02-19 04:00:24 +00:00
|
|
|
|
|
|
|
def handle_clientdisconnect(self, r):
|
2011-08-05 02:03:10 +00:00
|
|
|
self.run_script_hook("clientdisconnect", r)
|
2013-02-16 23:42:48 +00:00
|
|
|
r.reply()
|
2011-02-16 03:03:22 +00:00
|
|
|
|
2014-07-26 10:02:18 +00:00
|
|
|
def handle_serverconnect(self, sc):
|
2013-11-18 16:25:52 +00:00
|
|
|
self.run_script_hook("serverconnect", sc)
|
|
|
|
sc.reply()
|
|
|
|
|
2014-09-03 14:57:56 +00:00
|
|
|
def handle_error(self, f):
|
2014-11-26 03:18:21 +00:00
|
|
|
self.state.update_flow(f)
|
2014-09-03 14:57:56 +00:00
|
|
|
self.run_script_hook("error", f)
|
2011-03-05 00:03:26 +00:00
|
|
|
if self.client_playback:
|
|
|
|
self.client_playback.clear(f)
|
2014-09-03 14:57:56 +00:00
|
|
|
f.reply()
|
2011-02-16 03:03:22 +00:00
|
|
|
return f
|
|
|
|
|
2014-09-03 14:57:56 +00:00
|
|
|
def handle_request(self, f):
|
|
|
|
if f.live:
|
|
|
|
app = self.apps.get(f.request)
|
2014-01-04 21:58:53 +00:00
|
|
|
if app:
|
2014-10-26 04:44:49 +00:00
|
|
|
err = app.serve(
|
|
|
|
f,
|
|
|
|
f.client_conn.wfile,
|
|
|
|
**{"mitmproxy.master": self}
|
|
|
|
)
|
2014-01-04 21:58:53 +00:00
|
|
|
if err:
|
2014-12-22 23:30:35 +00:00
|
|
|
self.add_event("Error in wsgi app. %s" % err, "error")
|
2014-09-03 14:57:56 +00:00
|
|
|
f.reply(protocol.KILL)
|
2014-01-04 21:58:53 +00:00
|
|
|
return
|
2014-11-26 03:18:21 +00:00
|
|
|
if f not in self.state.flows: # don't add again on replay
|
|
|
|
self.state.add_flow(f)
|
2012-03-16 22:31:05 +00:00
|
|
|
self.replacehooks.run(f)
|
2012-08-18 11:39:52 +00:00
|
|
|
self.setheaders.run(f)
|
2011-08-05 02:03:10 +00:00
|
|
|
self.run_script_hook("request", f)
|
2011-03-15 02:11:03 +00:00
|
|
|
self.process_new_request(f)
|
2011-02-20 22:40:49 +00:00
|
|
|
return f
|
2011-02-16 03:03:22 +00:00
|
|
|
|
2014-09-03 14:57:56 +00:00
|
|
|
def handle_responseheaders(self, f):
|
2014-07-20 09:17:53 +00:00
|
|
|
self.run_script_hook("responseheaders", f)
|
2014-07-21 19:06:55 +00:00
|
|
|
|
Fix crash while streaming
Found using fuzzing. Reproduction with pathoc, given "mitmproxy -s" and
pathod running on 9999:
get:'http://localhost:9999/p/':s'200:b\'foo\':h\'Content-Length\'=\'3\'':i58,'\x1a':r
return flow.FlowMaster.run(self)
File "/Users/aldo/mitmproxy/mitmproxy/libmproxy/controller.py", line 111, in run
self.tick(self.masterq, 0.01)
File "/Users/aldo/mitmproxy/mitmproxy/libmproxy/flow.py", line 613, in tick
return controller.Master.tick(self, q, timeout)
File "/Users/aldo/mitmproxy/mitmproxy/libmproxy/controller.py", line 101, in tick
self.handle(*msg)
File "/Users/aldo/mitmproxy/mitmproxy/libmproxy/controller.py", line 118, in handle
m(obj)
File "/Users/aldo/mitmproxy/mitmproxy/libmproxy/flow.py", line 738, in handle_responseheaders
self.stream_large_bodies.run(f, False)
File "/Users/aldo/mitmproxy/mitmproxy/libmproxy/flow.py", line 155, in run
r.headers, is_request, flow.request.method, code
File "/Users/aldo/mitmproxy/mitmproxy/netlib/http.py", line 401, in expected_http_body_size
raise HttpError(400 if is_request else 502, "Invalid content-length header: %s" % headers["content-length"])
netlib.http.HttpError: Invalid content-length header: ['\x1a3']
2014-10-26 04:58:36 +00:00
|
|
|
try:
|
|
|
|
if self.stream_large_bodies:
|
|
|
|
self.stream_large_bodies.run(f, False)
|
|
|
|
except netlib.http.HttpError:
|
|
|
|
f.reply(protocol.KILL)
|
|
|
|
return
|
2014-07-21 19:06:55 +00:00
|
|
|
|
2014-09-03 14:57:56 +00:00
|
|
|
f.reply()
|
2014-09-09 23:34:58 +00:00
|
|
|
return f
|
2014-07-18 05:43:26 +00:00
|
|
|
|
2014-09-03 14:57:56 +00:00
|
|
|
def handle_response(self, f):
|
2014-11-26 03:18:21 +00:00
|
|
|
self.state.update_flow(f)
|
2014-09-03 14:57:56 +00:00
|
|
|
self.replacehooks.run(f)
|
|
|
|
self.setheaders.run(f)
|
|
|
|
self.run_script_hook("response", f)
|
|
|
|
if self.client_playback:
|
|
|
|
self.client_playback.clear(f)
|
|
|
|
self.process_new_response(f)
|
|
|
|
if self.stream:
|
|
|
|
self.stream.add(f)
|
2011-02-16 03:43:35 +00:00
|
|
|
return f
|
2011-02-16 03:03:22 +00:00
|
|
|
|
2014-12-23 19:33:42 +00:00
|
|
|
def handle_intercept(self, f):
|
|
|
|
self.state.update_flow(f)
|
|
|
|
|
|
|
|
def handle_accept_intercept(self, f):
|
|
|
|
self.state.update_flow(f)
|
|
|
|
|
2011-08-05 02:03:10 +00:00
|
|
|
def shutdown(self):
|
2014-01-13 01:20:34 +00:00
|
|
|
self.unload_scripts()
|
2011-08-05 02:03:10 +00:00
|
|
|
controller.Master.shutdown(self)
|
2012-07-08 22:18:37 +00:00
|
|
|
if self.stream:
|
2014-11-26 03:18:21 +00:00
|
|
|
for i in self.state.flows:
|
2012-07-08 22:18:37 +00:00
|
|
|
if not i.response:
|
|
|
|
self.stream.add(i)
|
|
|
|
self.stop_stream()
|
|
|
|
|
2013-03-13 20:19:43 +00:00
|
|
|
def start_stream(self, fp, filt):
|
|
|
|
self.stream = FilteredFlowWriter(fp, filt)
|
2012-07-08 22:18:37 +00:00
|
|
|
|
|
|
|
def stop_stream(self):
|
2012-07-26 12:19:18 +00:00
|
|
|
self.stream.fo.close()
|
2012-07-08 22:18:37 +00:00
|
|
|
self.stream = None
|
|
|
|
|
2011-08-05 02:03:10 +00:00
|
|
|
|
2015-02-05 13:44:45 +00:00
|
|
|
def read_flows_from_paths(paths):
|
|
|
|
"""
|
|
|
|
Given a list of filepaths, read all flows and return a list of them.
|
|
|
|
From a performance perspective, streaming would be advisable -
|
|
|
|
however, if there's an error with one of the files, we want it to be raised immediately.
|
|
|
|
|
|
|
|
If an error occurs, a FlowReadError will be raised.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
flows = []
|
|
|
|
for path in paths:
|
|
|
|
path = os.path.expanduser(path)
|
|
|
|
with file(path, "rb") as f:
|
|
|
|
flows.extend(FlowReader(f).stream())
|
|
|
|
except IOError as e:
|
|
|
|
raise FlowReadError(e.strerror)
|
|
|
|
return flows
|
|
|
|
|
|
|
|
|
2011-02-16 01:33:04 +00:00
|
|
|
class FlowWriter:
|
|
|
|
def __init__(self, fo):
|
|
|
|
self.fo = fo
|
|
|
|
|
|
|
|
def add(self, flow):
|
2014-09-16 23:35:14 +00:00
|
|
|
d = flow.get_state()
|
2011-08-19 09:38:05 +00:00
|
|
|
tnetstring.dump(d, self.fo)
|
2011-02-16 01:33:04 +00:00
|
|
|
|
2011-03-12 00:47:37 +00:00
|
|
|
|
2011-03-11 02:16:31 +00:00
|
|
|
class FlowReadError(Exception):
|
|
|
|
@property
|
|
|
|
def strerror(self):
|
|
|
|
return self.args[0]
|
2011-02-16 01:33:04 +00:00
|
|
|
|
2011-03-12 00:47:37 +00:00
|
|
|
|
2011-02-16 01:33:04 +00:00
|
|
|
class FlowReader:
|
|
|
|
def __init__(self, fo):
|
|
|
|
self.fo = fo
|
|
|
|
|
|
|
|
def stream(self):
|
|
|
|
"""
|
|
|
|
Yields Flow objects from the dump.
|
|
|
|
"""
|
2011-08-19 09:30:24 +00:00
|
|
|
off = 0
|
2011-03-11 02:16:31 +00:00
|
|
|
try:
|
2015-05-30 00:03:28 +00:00
|
|
|
while True:
|
2011-08-19 09:38:05 +00:00
|
|
|
data = tnetstring.load(self.fo)
|
2013-07-23 22:32:56 +00:00
|
|
|
if tuple(data["version"][:2]) != version.IVERSION[:2]:
|
2012-04-10 22:10:53 +00:00
|
|
|
v = ".".join(str(i) for i in data["version"])
|
2015-01-02 00:26:22 +00:00
|
|
|
raise FlowReadError(
|
|
|
|
"Incompatible serialized data version: %s" % v
|
|
|
|
)
|
2011-08-19 09:30:24 +00:00
|
|
|
off = self.fo.tell()
|
2014-10-18 16:29:35 +00:00
|
|
|
yield handle.protocols[data["type"]]["flow"].from_state(data)
|
2015-05-30 00:03:28 +00:00
|
|
|
except ValueError as v:
|
2011-08-19 09:30:24 +00:00
|
|
|
# Error is due to EOF
|
|
|
|
if self.fo.tell() == off and self.fo.read() == '':
|
|
|
|
return
|
2011-03-11 02:16:31 +00:00
|
|
|
raise FlowReadError("Invalid data format.")
|
2011-02-16 01:33:04 +00:00
|
|
|
|
2013-03-13 20:19:43 +00:00
|
|
|
|
|
|
|
class FilteredFlowWriter:
|
|
|
|
def __init__(self, fo, filt):
|
|
|
|
self.fo = fo
|
|
|
|
self.filt = filt
|
|
|
|
|
|
|
|
def add(self, f):
|
|
|
|
if self.filt and not f.match(self.filt):
|
|
|
|
return
|
2014-09-16 23:35:14 +00:00
|
|
|
d = f.get_state()
|
2013-03-13 20:19:43 +00:00
|
|
|
tnetstring.dump(d, self.fo)
|