mitmproxy/libmproxy/protocol/http.py

1576 lines
54 KiB
Python
Raw Normal View History

from __future__ import absolute_import
2014-09-16 21:54:17 +00:00
import Cookie
import urllib
import urlparse
import time
import copy
from email.utils import parsedate_tz, formatdate, mktime_tz
2014-08-08 17:04:58 +00:00
import threading
from netlib import http, tcp, http_status, http_cookies
import netlib.utils
from netlib import odict
2014-08-30 18:15:19 +00:00
from .tcp import TCPHandler
from .primitives import KILL, ProtocolHandler, Flow, Error
2014-03-10 20:57:50 +00:00
from ..proxy.connection import ServerConnection
2014-08-03 00:34:29 +00:00
from .. import encoding, utils, controller, stateobject, proxy
HDR_FORM_URLENCODED = "application/x-www-form-urlencoded"
HDR_FORM_MULTIPART = "multipart/form-data"
CONTENT_MISSING = 0
class KillSignal(Exception):
pass
def send_connect_request(conn, host, port, update_state=True):
2014-09-16 21:54:17 +00:00
upstream_request = HTTPRequest(
"authority",
"CONNECT",
None,
host,
port,
None,
(1, 1),
odict.ODictCaseless(),
2014-09-16 21:54:17 +00:00
""
)
conn.send(upstream_request.assemble())
2014-08-08 17:04:58 +00:00
resp = HTTPResponse.from_stream(conn.rfile, upstream_request.method)
if resp.code != 200:
raise proxy.ProxyError(resp.code,
"Cannot establish SSL " +
"connection with upstream proxy: \r\n" +
str(resp.assemble()))
if update_state:
conn.state.append(("http", {
"state": "connect",
"host": host,
"port": port}
))
2014-08-08 17:04:58 +00:00
return resp
class decoded(object):
"""
A context manager that decodes a request or response, and then
re-encodes it with the same encoding after execution of the block.
Example:
with decoded(request):
request.content = request.content.replace("foo", "bar")
"""
def __init__(self, o):
self.o = o
ce = o.headers.get_first("content-encoding")
if ce in encoding.ENCODINGS:
self.ce = ce
else:
self.ce = None
def __enter__(self):
if self.ce:
self.o.decode()
def __exit__(self, type, value, tb):
if self.ce:
self.o.encode(self.ce)
2014-01-30 05:03:41 +00:00
class HTTPMessage(stateobject.StateObject):
"""
Base class for HTTPRequest and HTTPResponse
"""
2014-05-15 16:16:42 +00:00
def __init__(self, httpversion, headers, content, timestamp_start=None,
timestamp_end=None):
self.httpversion = httpversion
self.headers = headers
"""@type: odict.ODictCaseless"""
self.content = content
2014-08-23 11:53:43 +00:00
2014-09-21 20:46:48 +00:00
self.timestamp_start = timestamp_start
self.timestamp_end = timestamp_end
_stateobject_attributes = dict(
httpversion=tuple,
headers=odict.ODictCaseless,
content=str,
timestamp_start=float,
timestamp_end=float
)
2014-09-17 15:30:19 +00:00
_stateobject_long_attributes = {"content"}
2014-09-18 23:35:36 +00:00
def get_state(self, short=False):
ret = super(HTTPMessage, self).get_state(short)
if short:
if self.content:
ret["contentLength"] = len(self.content)
2015-03-21 21:49:51 +00:00
elif self.content == CONTENT_MISSING:
ret["contentLength"] = None
else:
ret["contentLength"] = 0
2014-09-18 23:35:36 +00:00
return ret
def get_decoded_content(self):
"""
2014-09-16 21:54:17 +00:00
Returns the decoded content based on the current Content-Encoding
header.
Doesn't change the message iteself or its headers.
"""
ce = self.headers.get_first("content-encoding")
if not self.content or ce not in encoding.ENCODINGS:
return self.content
return encoding.decode(ce, self.content)
def decode(self):
"""
Decodes content based on the current Content-Encoding header, then
removes the header. If there is no Content-Encoding header, no
action is taken.
Returns True if decoding succeeded, False otherwise.
"""
ce = self.headers.get_first("content-encoding")
if not self.content or ce not in encoding.ENCODINGS:
return False
data = encoding.decode(ce, self.content)
if data is None:
return False
self.content = data
del self.headers["content-encoding"]
return True
def encode(self, e):
"""
Encodes content with the encoding e, where e is "gzip", "deflate"
or "identity".
"""
# FIXME: Error if there's an existing encoding header?
self.content = encoding.encode(e, self.content)
self.headers["content-encoding"] = [e]
def size(self, **kwargs):
"""
Size in bytes of a fully rendered message, including headers and
HTTP lead-in.
"""
hl = len(self._assemble_head(**kwargs))
if self.content:
return hl + len(self.content)
else:
return hl
def copy(self):
c = copy.copy(self)
c.headers = self.headers.copy()
return c
def replace(self, pattern, repl, *args, **kwargs):
"""
Replaces a regular expression pattern with repl in both the headers
and the body of the message. Encoded content will be decoded
before replacement, and re-encoded afterwards.
Returns the number of replacements made.
"""
with decoded(self):
2014-09-16 21:54:17 +00:00
self.content, c = utils.safe_subn(
pattern, repl, self.content, *args, **kwargs
)
c += self.headers.replace(pattern, repl, *args, **kwargs)
return c
def _assemble_first_line(self):
"""
Returns the assembled request/response line
"""
raise NotImplementedError() # pragma: nocover
def _assemble_headers(self):
"""
Returns the assembled headers
"""
raise NotImplementedError() # pragma: nocover
def _assemble_head(self):
"""
Returns the assembled request/response line plus headers
"""
raise NotImplementedError() # pragma: nocover
def assemble(self):
"""
Returns the assembled request/response
"""
raise NotImplementedError() # pragma: nocover
class HTTPRequest(HTTPMessage):
"""
An HTTP request.
Exposes the following attributes:
method: HTTP method
scheme: URL scheme (http/https)
2014-09-16 21:54:17 +00:00
host: Target hostname of the request. This is not neccessarily the
directy upstream server (which could be another proxy), but it's always
the target server we want to reach at the end. This attribute is either
inferred from the request itself (absolute-form, authority-form) or from
the connection metadata (e.g. the host in reverse proxy mode).
port: Destination port
path: Path portion of the URL (not present in authority-form)
httpversion: HTTP version tuple, e.g. (1,1)
headers: odict.ODictCaseless object
content: Content of the request, None, or CONTENT_MISSING if there
is content associated, but not present. CONTENT_MISSING evaluates
to False to make checking for the presence of content natural.
2014-10-26 05:32:45 +00:00
form_in: The request form which mitmproxy has received. The following
values are possible:
2014-10-26 05:32:45 +00:00
- relative (GET /index.html, OPTIONS *) (covers origin form and
asterisk form)
- absolute (GET http://example.com:80/index.html)
- authority-form (CONNECT example.com:443)
Details: http://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-25#section-5.3
2014-10-31 18:50:03 +00:00
form_out: The request form which mitmproxy will send out to the
2014-10-26 05:32:45 +00:00
destination
timestamp_start: Timestamp indicating when request transmission started
timestamp_end: Timestamp indicating when request transmission ended
"""
2014-05-15 16:16:42 +00:00
2014-10-26 05:32:45 +00:00
def __init__(
self,
form_in,
method,
scheme,
host,
port,
path,
httpversion,
headers,
content,
timestamp_start=None,
timestamp_end=None,
form_out=None
):
assert isinstance(headers, odict.ODictCaseless) or not headers
2014-10-26 05:32:45 +00:00
HTTPMessage.__init__(
self,
httpversion,
headers,
content,
timestamp_start,
timestamp_end
)
self.form_in = form_in
self.method = method
self.scheme = scheme
self.host = host
self.port = port
self.path = path
self.httpversion = httpversion
self.form_out = form_out or form_in
# Have this request's cookies been modified by sticky cookies or auth?
self.stickycookie = False
self.stickyauth = False
# Is this request replayed?
self.is_replay = False
_stateobject_attributes = HTTPMessage._stateobject_attributes.copy()
_stateobject_attributes.update(
form_in=str,
method=str,
scheme=str,
host=str,
port=int,
path=str,
2014-12-25 01:03:55 +00:00
form_out=str,
is_replay=bool
)
@classmethod
def from_state(cls, state):
2015-05-30 00:03:28 +00:00
f = cls(
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None)
f.load_state(state)
return f
2014-08-17 22:55:30 +00:00
def __repr__(self):
2014-09-16 21:54:17 +00:00
return "<HTTPRequest: {0}>".format(
self._assemble_first_line(self.form_in)[:-9]
)
2014-08-17 22:55:30 +00:00
@classmethod
2015-05-30 00:03:28 +00:00
def from_stream(
cls,
rfile,
include_body=True,
body_size_limit=None,
wfile=None):
"""
Parse an HTTP request from a file stream
2015-04-10 12:59:38 +00:00
Args:
rfile (file): Input file to read from
include_body (bool): Read response body as well
body_size_limit (bool): Maximum body size
wfile (file): If specified, HTTP Expect headers are handled automatically.
by writing a HTTP 100 CONTINUE response to the stream.
Returns:
HTTPRequest: The HTTP request
Raises:
HttpError: If the input is invalid.
"""
timestamp_start, timestamp_end = None, None
2014-08-23 11:53:43 +00:00
timestamp_start = utils.timestamp()
2014-02-07 02:56:57 +00:00
if hasattr(rfile, "reset_timestamps"):
rfile.reset_timestamps()
2015-04-20 23:05:37 +00:00
req = http.read_request(
rfile,
include_body = include_body,
body_size_limit = body_size_limit,
wfile = wfile
)
if hasattr(rfile, "first_byte_timestamp"):
# more accurate timestamp_start
timestamp_start = rfile.first_byte_timestamp
2015-04-20 23:05:37 +00:00
timestamp_end = utils.timestamp()
2014-10-26 04:44:49 +00:00
return HTTPRequest(
2015-04-20 23:05:37 +00:00
req.form_in,
req.method,
req.scheme,
req.host,
req.port,
req.path,
req.httpversion,
req.headers,
req.content,
2014-10-26 04:44:49 +00:00
timestamp_start,
timestamp_end
)
def _assemble_first_line(self, form=None):
form = form or self.form_out
if form == "relative":
2014-10-26 04:44:49 +00:00
request_line = '%s %s HTTP/%s.%s' % (
2014-10-31 18:50:03 +00:00
self.method, self.path, self.httpversion[0], self.httpversion[1]
2014-10-26 04:44:49 +00:00
)
elif form == "authority":
2014-10-26 04:44:49 +00:00
request_line = '%s %s:%s HTTP/%s.%s' % (
self.method, self.host, self.port, self.httpversion[0],
self.httpversion[1]
)
elif form == "absolute":
2014-10-26 04:44:49 +00:00
request_line = '%s %s://%s:%s%s HTTP/%s.%s' % (
self.method, self.scheme, self.host,
self.port, self.path, self.httpversion[0],
self.httpversion[1]
)
else:
raise http.HttpError(400, "Invalid request form")
return request_line
# This list is adopted legacy code.
# We probably don't need to strip off keep-alive.
_headers_to_strip_off = ['Proxy-Connection',
'Keep-Alive',
'Connection',
'Transfer-Encoding',
'Upgrade']
def _assemble_headers(self):
headers = self.headers.copy()
for k in self._headers_to_strip_off:
del headers[k]
if 'host' not in headers and self.scheme and self.host and self.port:
headers["Host"] = [utils.hostport(self.scheme,
self.host,
self.port)]
2015-05-30 00:03:28 +00:00
# If content is defined (i.e. not None or CONTENT_MISSING), we always
# add a content-length header.
2014-11-07 08:52:46 +00:00
if self.content or self.content == "":
headers["Content-Length"] = [str(len(self.content))]
2015-04-14 22:29:57 +00:00
return headers.format()
def _assemble_head(self, form=None):
2014-10-26 04:44:49 +00:00
return "%s\r\n%s\r\n" % (
self._assemble_first_line(form), self._assemble_headers()
)
def assemble(self, form=None):
"""
Assembles the request for transmission to the server. We make some
modifications to make sure interception works properly.
Raises an Exception if the request cannot be assembled.
"""
if self.content == CONTENT_MISSING:
2014-10-26 04:44:49 +00:00
raise proxy.ProxyError(
502,
"Cannot assemble flow with CONTENT_MISSING"
)
head = self._assemble_head(form)
if self.content:
return head + self.content
else:
return head
def __hash__(self):
return id(self)
def anticache(self):
"""
Modifies this request to remove headers that might produce a cached
response. That is, we remove ETags and If-Modified-Since headers.
"""
delheaders = [
"if-modified-since",
"if-none-match",
]
for i in delheaders:
del self.headers[i]
def anticomp(self):
"""
Modifies this request to remove headers that will compress the
resource's data.
"""
self.headers["accept-encoding"] = ["identity"]
def constrain_encoding(self):
"""
Limits the permissible Accept-Encoding values, based on what we can
decode appropriately.
"""
if self.headers["accept-encoding"]:
2015-05-30 00:03:28 +00:00
self.headers["accept-encoding"] = [
', '.join(
e for e in encoding.ENCODINGS if e in self.headers["accept-encoding"][0])]
2014-09-03 21:44:54 +00:00
def update_host_header(self):
"""
Update the host header to reflect the current target.
"""
self.headers["Host"] = [self.host]
def get_form(self):
"""
Retrieves the URL-encoded or multipart form data, returning an ODict object.
Returns an empty ODict if there is no data or the content-type
indicates non-form data.
"""
if self.content:
if self.headers.in_any("content-type", HDR_FORM_URLENCODED, True):
return self.get_form_urlencoded()
elif self.headers.in_any("content-type", HDR_FORM_MULTIPART, True):
return self.get_form_multipart()
return odict.ODict([])
def get_form_urlencoded(self):
"""
Retrieves the URL-encoded form data, returning an ODict object.
Returns an empty ODict if there is no data or the content-type
indicates non-form data.
"""
2015-05-30 00:03:28 +00:00
if self.content and self.headers.in_any(
"content-type",
HDR_FORM_URLENCODED,
True):
return odict.ODict(utils.urldecode(self.content))
return odict.ODict([])
def get_form_multipart(self):
2015-05-30 00:03:28 +00:00
if self.content and self.headers.in_any(
"content-type",
HDR_FORM_MULTIPART,
True):
return odict.ODict(
utils.multipartdecode(
self.headers,
self.content))
return odict.ODict([])
def set_form_urlencoded(self, odict):
"""
Sets the body to the URL-encoded form data, and adds the
appropriate content-type header. Note that this will destory the
existing body if there is one.
"""
# FIXME: If there's an existing content-type header indicating a
# url-encoded form, leave it alone.
self.headers["Content-Type"] = [HDR_FORM_URLENCODED]
self.content = utils.urlencode(odict.lst)
def get_path_components(self):
"""
Returns the path components of the URL as a list of strings.
Components are unquoted.
"""
2014-09-03 21:44:54 +00:00
_, _, path, _, _, _ = urlparse.urlparse(self.url)
return [urllib.unquote(i) for i in path.split("/") if i]
def set_path_components(self, lst):
"""
Takes a list of strings, and sets the path component of the URL.
Components are quoted.
"""
lst = [urllib.quote(i, safe="") for i in lst]
path = "/" + "/".join(lst)
2014-09-03 21:44:54 +00:00
scheme, netloc, _, params, query, fragment = urlparse.urlparse(self.url)
2014-09-16 21:54:17 +00:00
self.url = urlparse.urlunparse(
[scheme, netloc, path, params, query, fragment]
)
def get_query(self):
"""
Gets the request query string. Returns an ODict object.
"""
2014-09-03 21:44:54 +00:00
_, _, _, _, query, _ = urlparse.urlparse(self.url)
if query:
return odict.ODict(utils.urldecode(query))
return odict.ODict([])
def set_query(self, odict):
"""
Takes an ODict object, and sets the request query string.
"""
2014-09-03 21:44:54 +00:00
scheme, netloc, path, params, _, fragment = urlparse.urlparse(self.url)
query = utils.urlencode(odict.lst)
2014-09-16 21:54:17 +00:00
self.url = urlparse.urlunparse(
[scheme, netloc, path, params, query, fragment]
)
2014-09-03 21:44:54 +00:00
def pretty_host(self, hostheader):
"""
Heuristic to get the host of the request.
2014-08-08 23:02:58 +00:00
2014-09-16 21:54:17 +00:00
Note that pretty_host() does not always return the TCP destination
of the request, e.g. if an upstream proxy is in place
If hostheader is set to True, the Host: header will be used as
additional (and preferred) data source. This is handy in transparent
mode, where only the ip of the destination is known, but not the
resolved name. This is disabled by default, as an attacker may spoof
the host header to confuse an analyst.
2014-08-08 23:02:58 +00:00
"""
host = None
if hostheader:
host = self.headers.get_first("host")
if not host:
2014-09-03 21:44:54 +00:00
host = self.host
2015-04-10 17:35:42 +00:00
if host:
return host.encode("idna")
else:
return None
2014-09-03 21:44:54 +00:00
def pretty_url(self, hostheader):
if self.form_out == "authority": # upstream proxy mode
return "%s:%s" % (self.pretty_host(hostheader), self.port)
return utils.unparse_url(self.scheme,
self.pretty_host(hostheader),
self.port,
self.path).encode('ascii')
2014-09-03 21:44:54 +00:00
@property
def url(self):
"""
Returns a URL string, constructed from the Request's URL components.
"""
2014-09-16 21:54:17 +00:00
return utils.unparse_url(
self.scheme,
self.host,
self.port,
self.path
).encode('ascii')
2014-09-03 21:44:54 +00:00
@url.setter
def url(self, url):
"""
Parses a URL specification, and updates the Request's information
accordingly.
Returns False if the URL was invalid, True if the request succeeded.
"""
parts = http.parse_url(url)
if not parts:
2014-09-03 21:44:54 +00:00
raise ValueError("Invalid URL: %s" % url)
self.scheme, self.host, self.port, self.path = parts
def get_cookies(self):
"""
Returns a possibly empty netlib.odict.ODict object.
"""
ret = odict.ODict()
for i in self.headers["cookie"]:
ret.extend(http_cookies.parse_cookie_header(i))
return ret
def set_cookies(self, odict):
"""
Takes an netlib.odict.ODict object. Over-writes any existing Cookie
headers.
"""
v = http_cookies.format_cookie_header(odict)
self.headers["Cookie"] = [v]
def replace(self, pattern, repl, *args, **kwargs):
"""
2014-09-16 21:54:17 +00:00
Replaces a regular expression pattern with repl in the headers, the
request path and the body of the request. Encoded content will be
decoded before replacement, and re-encoded afterwards.
Returns the number of replacements made.
"""
c = HTTPMessage.replace(self, pattern, repl, *args, **kwargs)
2014-09-16 21:54:17 +00:00
self.path, pc = utils.safe_subn(
pattern, repl, self.path, *args, **kwargs
)
c += pc
return c
class HTTPResponse(HTTPMessage):
"""
An HTTP response.
Exposes the following attributes:
httpversion: HTTP version tuple, e.g. (1,1)
code: HTTP response code
msg: HTTP response message
headers: ODict object
content: Content of the request, None, or CONTENT_MISSING if there
is content associated, but not present. CONTENT_MISSING evaluates
to False to make checking for the presence of content natural.
timestamp_start: Timestamp indicating when request transmission started
timestamp_end: Timestamp indicating when request transmission ended
"""
2014-05-15 16:16:42 +00:00
2015-05-30 00:03:28 +00:00
def __init__(
self,
httpversion,
code,
msg,
headers,
content,
timestamp_start=None,
timestamp_end=None):
assert isinstance(headers, odict.ODictCaseless) or headers is None
2014-09-16 21:54:17 +00:00
HTTPMessage.__init__(
self,
httpversion,
headers,
content,
timestamp_start,
timestamp_end
)
self.code = code
self.msg = msg
# Is this request replayed?
self.is_replay = False
self.stream = False
_stateobject_attributes = HTTPMessage._stateobject_attributes.copy()
_stateobject_attributes.update(
code=int,
msg=str
)
@classmethod
def from_state(cls, state):
f = cls(None, None, None, None, None)
f.load_state(state)
return f
2014-08-17 22:55:30 +00:00
def __repr__(self):
2015-04-30 00:18:01 +00:00
if self.content:
size = netlib.utils.pretty_size(len(self.content))
else:
size = "content missing"
2014-08-17 22:55:30 +00:00
return "<HTTPResponse: {code} {msg} ({contenttype}, {size})>".format(
code=self.code,
msg=self.msg,
2014-10-26 05:32:45 +00:00
contenttype=self.headers.get_first(
"content-type", "unknown content type"
),
2014-09-04 10:29:44 +00:00
size=size
2014-08-17 22:55:30 +00:00
)
@classmethod
2015-05-30 00:03:28 +00:00
def from_stream(
cls,
rfile,
request_method,
include_body=True,
body_size_limit=None):
"""
Parse an HTTP response from a file stream
"""
2014-02-07 02:56:57 +00:00
2014-08-23 11:53:43 +00:00
timestamp_start = utils.timestamp()
2014-02-07 02:56:57 +00:00
if hasattr(rfile, "reset_timestamps"):
rfile.reset_timestamps()
httpversion, code, msg, headers, content = http.read_response(
rfile,
request_method,
body_size_limit,
2014-07-14 15:26:22 +00:00
include_body=include_body)
2014-02-07 02:56:57 +00:00
2014-10-26 05:32:45 +00:00
if hasattr(rfile, "first_byte_timestamp"):
# more accurate timestamp_start
2014-02-07 02:56:57 +00:00
timestamp_start = rfile.first_byte_timestamp
2014-09-21 20:46:48 +00:00
if include_body:
timestamp_end = utils.timestamp()
else:
timestamp_end = None
2014-09-16 21:54:17 +00:00
return HTTPResponse(
httpversion,
code,
msg,
headers,
content,
timestamp_start,
timestamp_end
)
def _assemble_first_line(self):
2014-05-15 16:16:42 +00:00
return 'HTTP/%s.%s %s %s' % \
(self.httpversion[0], self.httpversion[1], self.code, self.msg)
_headers_to_strip_off = ['Proxy-Connection',
'Alternate-Protocol',
'Alt-Svc']
def _assemble_headers(self, preserve_transfer_encoding=False):
headers = self.headers.copy()
for k in self._headers_to_strip_off:
del headers[k]
if not preserve_transfer_encoding:
del headers['Transfer-Encoding']
2015-05-30 00:03:28 +00:00
# If content is defined (i.e. not None or CONTENT_MISSING), we always
# add a content-length header.
2014-11-07 08:52:46 +00:00
if self.content or self.content == "":
headers["Content-Length"] = [str(len(self.content))]
2015-04-14 22:29:57 +00:00
return headers.format()
def _assemble_head(self, preserve_transfer_encoding=False):
2014-07-21 12:09:24 +00:00
return '%s\r\n%s\r\n' % (
2014-10-26 05:32:45 +00:00
self._assemble_first_line(),
self._assemble_headers(
preserve_transfer_encoding=preserve_transfer_encoding
)
)
def assemble(self):
"""
Assembles the response for transmission to the client. We make some
modifications to make sure interception works properly.
Raises an Exception if the request cannot be assembled.
"""
if self.content == CONTENT_MISSING:
2014-10-26 05:32:45 +00:00
raise proxy.ProxyError(
502,
"Cannot assemble flow with CONTENT_MISSING"
)
head = self._assemble_head()
if self.content:
return head + self.content
else:
return head
def _refresh_cookie(self, c, delta):
"""
Takes a cookie string c and a time delta in seconds, and returns
a refreshed cookie string.
"""
c = Cookie.SimpleCookie(str(c))
for i in c.values():
if "expires" in i:
d = parsedate_tz(i["expires"])
if d:
d = mktime_tz(d) + delta
i["expires"] = formatdate(d)
else:
# This can happen when the expires tag is invalid.
# reddit.com sends a an expires tag like this: "Thu, 31 Dec
# 2037 23:59:59 GMT", which is valid RFC 1123, but not
2014-02-07 02:56:57 +00:00
# strictly correct according to the cookie spec. Browsers
# appear to parse this tolerantly - maybe we should too.
# For now, we just ignore this.
del i["expires"]
return c.output(header="").strip()
def refresh(self, now=None):
"""
This fairly complex and heuristic function refreshes a server
response for replay.
- It adjusts date, expires and last-modified headers.
- It adjusts cookie expiration.
"""
if not now:
now = time.time()
delta = now - self.timestamp_start
refresh_headers = [
"date",
"expires",
"last-modified",
]
for i in refresh_headers:
if i in self.headers:
d = parsedate_tz(self.headers[i][0])
if d:
new = mktime_tz(d) + delta
self.headers[i] = [formatdate(new)]
c = []
for i in self.headers["set-cookie"]:
c.append(self._refresh_cookie(i, delta))
if c:
self.headers["set-cookie"] = c
def get_cookies(self):
2015-04-14 03:14:36 +00:00
"""
Get the contents of all Set-Cookie headers.
2015-04-14 03:14:36 +00:00
Returns a possibly empty ODict, where keys are cookie name strings,
and values are [value, attr] lists. Value is a string, and attr is
an ODictCaseless containing cookie attributes. Within attrs, unary
attributes (e.g. HTTPOnly) are indicated by a Null value.
"""
ret = []
for header in self.headers["set-cookie"]:
v = http_cookies.parse_set_cookie_header(header)
if v:
name, value, attrs = v
ret.append([name, [value, attrs]])
return odict.ODict(ret)
2015-04-14 04:23:51 +00:00
def set_cookies(self, odict):
"""
Set the Set-Cookie headers on this response, over-writing existing
headers.
Accepts an ODict of the same format as that returned by get_cookies.
"""
values = []
for i in odict.lst:
values.append(
http_cookies.format_set_cookie_header(
i[0],
i[1][0],
i[1][1]
)
)
self.headers["Set-Cookie"] = values
class HTTPFlow(Flow):
"""
A HTTPFlow is a collection of objects representing a single HTTP
transaction. The main attributes are:
request: HTTPRequest object
response: HTTPResponse object
error: Error object
server_conn: ServerConnection object
client_conn: ClientConnection object
Note that it's possible for a Flow to have both a response and an error
object. This might happen, for instance, when a response was received
from the server, but there was an error sending it back to the client.
The following additional attributes are exposed:
2014-12-23 19:33:42 +00:00
intercepted: Is this flow currently being intercepted?
2014-08-24 12:22:11 +00:00
live: Does this flow have a live client connection?
"""
2014-05-15 16:16:42 +00:00
2014-08-24 12:22:11 +00:00
def __init__(self, client_conn, server_conn, live=None):
super(HTTPFlow, self).__init__("http", client_conn, server_conn, live)
self.request = None
"""@type: HTTPRequest"""
self.response = None
"""@type: HTTPResponse"""
2014-01-30 04:21:53 +00:00
_stateobject_attributes = Flow._stateobject_attributes.copy()
_stateobject_attributes.update(
request=HTTPRequest,
response=HTTPResponse
)
@classmethod
def from_state(cls, state):
f = cls(None, None)
f.load_state(state)
return f
2014-08-17 22:55:30 +00:00
def __repr__(self):
s = "<HTTPFlow"
for a in ("request", "response", "error", "client_conn", "server_conn"):
if getattr(self, a, False):
s += "\r\n %s = {flow.%s}" % (a, a)
2014-08-17 22:55:30 +00:00
s += ">"
return s.format(flow=self)
def copy(self):
f = super(HTTPFlow, self).copy()
if self.request:
f.request = self.request.copy()
if self.response:
2014-02-04 04:02:17 +00:00
f.response = self.response.copy()
return f
2014-01-30 04:21:53 +00:00
def match(self, f):
"""
Match this flow against a compiled filter expression. Returns True
if matched, False if not.
If f is a string, it will be compiled as a filter expression. If
the expression is invalid, ValueError is raised.
"""
if isinstance(f, basestring):
2014-08-03 00:34:29 +00:00
from .. import filt
2014-01-30 04:21:53 +00:00
f = filt.parse(f)
if not f:
raise ValueError("Invalid filter expression.")
if f:
return f(self)
return True
def replace(self, pattern, repl, *args, **kwargs):
"""
2014-09-16 21:54:17 +00:00
Replaces a regular expression pattern with repl in both request and
response of the flow. Encoded content will be decoded before
replacement, and re-encoded afterwards.
2014-01-30 04:21:53 +00:00
Returns the number of replacements made.
"""
c = self.request.replace(pattern, repl, *args, **kwargs)
if self.response:
c += self.response.replace(pattern, repl, *args, **kwargs)
return c
class HttpAuthenticationError(Exception):
def __init__(self, auth_headers=None):
2014-10-26 05:32:45 +00:00
super(HttpAuthenticationError, self).__init__(
"Proxy Authentication Required"
)
2014-05-15 16:16:42 +00:00
self.headers = auth_headers
self.code = 407
2014-05-15 16:16:42 +00:00
def __repr__(self):
return "Proxy Authentication Required"
2014-08-24 12:22:11 +00:00
class HTTPHandler(ProtocolHandler):
"""
HTTPHandler implements mitmproxys understanding of the HTTP protocol.
"""
def __init__(self, c):
super(HTTPHandler, self).__init__(c)
self.expected_form_in = c.config.mode.http_form_in
self.expected_form_out = c.config.mode.http_form_out
self.skip_authentication = False
def handle_messages(self):
while self.handle_flow():
pass
2014-09-05 17:39:05 +00:00
def get_response_from_server(self, flow):
self.c.establish_server_connection()
2014-09-05 17:39:05 +00:00
request_raw = flow.request.assemble()
2014-09-05 17:39:05 +00:00
for attempt in (0, 1):
try:
2014-02-04 04:02:17 +00:00
self.c.server_conn.send(request_raw)
2014-09-05 17:39:05 +00:00
# Only get the headers at first...
2014-10-26 04:44:49 +00:00
flow.response = HTTPResponse.from_stream(
self.c.server_conn.rfile, flow.request.method,
body_size_limit=self.c.config.body_size_limit,
include_body=False
)
2014-09-05 17:39:05 +00:00
break
2015-05-30 00:03:28 +00:00
except (tcp.NetLibError, http.HttpErrorConnClosed) as v:
2014-10-26 04:44:49 +00:00
self.c.log(
"error in server communication: %s" % repr(v),
level="debug"
)
2014-09-05 17:39:05 +00:00
if attempt == 0:
2014-10-26 04:44:49 +00:00
# In any case, we try to reconnect at least once. This is
# necessary because it might be possible that we already
# initiated an upstream connection after clientconnect that
# has already been expired, e.g consider the following event
# log:
# > clientconnect (transparent mode destination known)
# > serverconnect
# > read n% of large request
# > server detects timeout, disconnects
# > read (100-n)% of large request
# > send large request upstream
self.c.server_reconnect()
else:
2014-08-31 14:52:26 +00:00
raise
2014-09-16 21:54:17 +00:00
# call the appropriate script hook - this is an opportunity for an
# inline script to set flow.stream = True
flow = self.c.channel.ask("responseheaders", flow)
2014-12-08 16:17:37 +00:00
if flow is None or flow == KILL:
raise KillSignal()
2014-09-05 17:39:05 +00:00
else:
# now get the rest of the request body, if body still needs to be
# read but not streaming this response
if flow.response.stream:
flow.response.content = CONTENT_MISSING
else:
flow.response.content = http.read_http_body(
self.c.server_conn.rfile, flow.response.headers,
self.c.config.body_size_limit,
flow.request.method, flow.response.code, False
)
flow.response.timestamp_end = utils.timestamp()
2014-09-05 17:39:05 +00:00
def handle_flow(self):
2014-08-24 12:22:11 +00:00
flow = HTTPFlow(self.c.client_conn, self.c.server_conn, self.live)
try:
try:
2014-09-16 21:54:17 +00:00
req = HTTPRequest.from_stream(
self.c.client_conn.rfile,
2015-04-10 12:59:38 +00:00
body_size_limit=self.c.config.body_size_limit,
wfile=self.c.client_conn.wfile
2014-09-16 21:54:17 +00:00
)
2015-02-05 14:24:32 +00:00
except tcp.NetLibError:
# don't throw an error for disconnects that happen
# before/between requests.
return False
2014-09-16 21:54:17 +00:00
self.c.log(
"request",
"debug",
[req._assemble_first_line(req.form_in)]
)
2014-08-30 18:15:19 +00:00
ret = self.process_request(flow, req)
if ret is not None:
return ret
# Be careful NOT to assign the request to the flow before
# process_request completes. This is because the call can raise an
# exception. If the request object is already attached, this results
# in an Error object that has an attached request that has not been
# sent through to the Master.
flow.request = req
request_reply = self.c.channel.ask("request", flow)
if request_reply is None or request_reply == KILL:
raise KillSignal()
2015-05-30 00:03:28 +00:00
# The inline script may have changed request.host
self.process_server_address(flow)
2014-12-08 16:17:37 +00:00
2014-01-30 04:21:53 +00:00
if isinstance(request_reply, HTTPResponse):
flow.response = request_reply
else:
2014-09-05 17:39:05 +00:00
self.get_response_from_server(flow)
# no further manipulation of self.c.server_conn beyond this point
# we can safely set it as the final attribute value here.
flow.server_conn = self.c.server_conn
2015-05-30 00:03:28 +00:00
self.c.log(
"response", "debug", [
flow.response._assemble_first_line()])
response_reply = self.c.channel.ask("response", flow)
if response_reply is None or response_reply == KILL:
raise KillSignal()
2014-09-05 17:39:05 +00:00
self.send_response_to_client(flow)
2014-09-05 17:39:05 +00:00
if self.check_close_connection(flow):
return False
2014-08-30 18:15:19 +00:00
# We sent a CONNECT request to an upstream proxy.
2014-07-27 01:28:23 +00:00
if flow.request.form_in == "authority" and flow.response.code == 200:
2014-09-16 21:54:17 +00:00
# TODO: Possibly add headers (memory consumption/usefulness
# tradeoff) Make sure to add state info before the actual
# processing of the CONNECT request happens. During an SSL
# upgrade, we may receive an SNI indication from the client,
# which resets the upstream connection. If this is the case, we
# must already re-issue the CONNECT request at this point.
self.c.server_conn.state.append(
(
"http", {
"state": "connect",
"host": flow.request.host,
"port": flow.request.port
}
)
)
2015-05-30 00:03:28 +00:00
if not self.process_connect_request(
(flow.request.host, flow.request.port)):
2014-08-30 18:15:19 +00:00
return False
2014-05-15 16:16:42 +00:00
# If the user has changed the target server on this connection,
# restore the original target server
2014-08-24 12:22:11 +00:00
flow.live.restore_server()
2014-09-05 17:39:05 +00:00
return True # Next flow please.
except (
HttpAuthenticationError,
http.HttpError,
proxy.ProxyError,
tcp.NetLibError,
2015-05-30 00:03:28 +00:00
) as e:
self.handle_error(e, flow)
except KillSignal:
self.c.log("Connection killed", "info")
2014-09-05 17:39:05 +00:00
finally:
flow.live = None # Connection is not live anymore.
return False
def handle_server_reconnect(self, state):
if state["state"] == "connect":
2014-10-26 05:32:45 +00:00
send_connect_request(
self.c.server_conn,
state["host"],
state["port"],
update_state=False
)
else: # pragma: nocover
raise RuntimeError("Unknown State: %s" % state["state"])
def handle_error(self, error, flow=None):
2014-05-15 16:16:42 +00:00
message = repr(error)
2014-09-08 11:34:08 +00:00
message_debug = None
2014-05-15 16:16:42 +00:00
2015-02-05 14:24:32 +00:00
if isinstance(error, tcp.NetLibError):
2014-09-08 11:34:08 +00:00
message = None
message_debug = "TCP connection closed unexpectedly."
elif "tlsv1 alert unknown ca" in message:
message = "TLSv1 Alert Unknown CA: The client does not trust the proxy's certificate."
elif "handshake error" in message:
message_debug = message
message = "SSL handshake error: The client may not trust the proxy's certificate."
if message:
self.c.log(message, level="info")
if message_debug:
2014-10-08 19:41:03 +00:00
self.c.log(message_debug, level="debug")
if flow:
2014-10-26 05:32:45 +00:00
# TODO: no flows without request or with both request and response
# at the moment.
if flow.request and not flow.response:
2014-09-08 11:34:08 +00:00
flow.error = Error(message or message_debug)
self.c.channel.ask("error", flow)
2014-05-15 16:16:42 +00:00
try:
2014-09-02 16:13:18 +00:00
code = getattr(error, "code", 502)
headers = getattr(error, "headers", None)
2014-09-08 11:34:08 +00:00
html_message = message or ""
if message_debug:
html_message += "<pre>%s</pre>" % message_debug
self.send_error(code, html_message, headers)
2014-05-15 16:16:42 +00:00
except:
pass
def send_error(self, code, message, headers):
response = http_status.RESPONSES.get(code, "Unknown")
2014-10-26 05:32:45 +00:00
html_content = """
<html>
<head>
<title>%d %s</title>
</head>
2014-11-26 03:56:17 +00:00
<body>%s</body>
2014-10-26 05:32:45 +00:00
</html>
""" % (code, response, message)
self.c.client_conn.wfile.write("HTTP/1.1 %s %s\r\n" % (code, response))
2014-10-26 05:32:45 +00:00
self.c.client_conn.wfile.write(
"Server: %s\r\n" % self.c.config.server_version
)
self.c.client_conn.wfile.write("Content-type: text/html\r\n")
2014-10-26 05:32:45 +00:00
self.c.client_conn.wfile.write(
"Content-Length: %d\r\n" % len(html_content)
)
if headers:
for key, value in headers.items():
self.c.client_conn.wfile.write("%s: %s\r\n" % (key, value))
self.c.client_conn.wfile.write("Connection: close\r\n")
self.c.client_conn.wfile.write("\r\n")
self.c.client_conn.wfile.write(html_content)
self.c.client_conn.wfile.flush()
def process_request(self, flow, request):
2014-08-30 18:15:19 +00:00
"""
@returns:
True, if the request should not be sent upstream
False, if the connection should be aborted
None, if the request should be sent upstream
(a status code != None should be returned directly by handle_flow)
"""
if not self.skip_authentication:
self.authenticate(request)
# Determine .scheme, .host and .port attributes
# For absolute-form requests, they are directly given in the request.
# For authority-form requests, we only need to determine the request scheme.
2015-05-30 00:03:28 +00:00
# For relative-form requests, we need to determine host and port as
# well.
if not request.scheme:
request.scheme = "https" if flow.server_conn and flow.server_conn.ssl_established else "http"
if not request.host:
# Host/Port Complication: In upstream mode, use the server we CONNECTed to,
# not the upstream proxy.
2014-09-03 21:44:54 +00:00
if flow.server_conn:
for s in flow.server_conn.state:
if s[0] == "http" and s[1]["state"] == "connect":
request.host, request.port = s[1]["host"], s[1]["port"]
if not request.host and flow.server_conn:
request.host, request.port = flow.server_conn.address.host, flow.server_conn.address.port
# Now we can process the request.
if request.form_in == "authority":
if self.c.client_conn.ssl_established:
2014-10-26 05:32:45 +00:00
raise http.HttpError(
400,
"Must not CONNECT on already encrypted connection"
)
if self.c.config.mode == "regular":
self.c.set_server_address((request.host, request.port))
2014-10-26 05:32:45 +00:00
# Update server_conn attribute on the flow
flow.server_conn = self.c.server_conn
self.c.establish_server_connection()
self.c.client_conn.send(
2015-05-30 00:03:28 +00:00
('HTTP/%s.%s 200 ' % (request.httpversion[0], request.httpversion[1])) +
'Connection established\r\n' +
'Content-Length: 0\r\n' +
('Proxy-agent: %s\r\n' % self.c.config.server_version) +
'\r\n'
)
return self.process_connect_request(self.c.server_conn.address)
elif self.c.config.mode == "upstream":
return None
else:
2014-10-26 05:32:45 +00:00
# CONNECT should never occur if we don't expect absolute-form
# requests
pass
elif request.form_in == self.expected_form_in:
request.form_out = self.expected_form_out
if request.form_in == "absolute":
if request.scheme != "http":
2014-10-26 05:32:45 +00:00
raise http.HttpError(
400,
"Invalid request scheme: %s" % request.scheme
)
2014-09-08 14:02:31 +00:00
if self.c.config.mode == "regular":
2014-10-26 05:32:45 +00:00
# Update info so that an inline script sees the correct
# value at flow.server_conn
self.c.set_server_address((request.host, request.port))
flow.server_conn = self.c.server_conn
2014-08-30 18:15:19 +00:00
return None
2014-10-26 05:32:45 +00:00
raise http.HttpError(
400, "Invalid HTTP request form (expected: %s, got: %s)" % (
self.expected_form_in, request.form_in
)
)
def process_server_address(self, flow):
# Depending on the proxy mode, server handling is entirely different
2014-10-26 05:32:45 +00:00
# We provide a mostly unified API to the user, which needs to be
# unfiddled here
# ( See also: https://github.com/mitmproxy/mitmproxy/issues/337 )
address = netlib.tcp.Address((flow.request.host, flow.request.port))
ssl = (flow.request.scheme == "https")
if self.c.config.mode == "upstream":
2014-10-26 05:32:45 +00:00
# The connection to the upstream proxy may have a state we may need
# to take into account.
connected_to = None
for s in flow.server_conn.state:
if s[0] == "http" and s[1]["state"] == "connect":
connected_to = tcp.Address((s[1]["host"], s[1]["port"]))
2014-10-26 05:32:45 +00:00
# We need to reconnect if the current flow either requires a
# (possibly impossible) change to the connection state, e.g. the
# host has changed but we already CONNECTed somewhere else.
needs_server_change = (
ssl != self.c.server_conn.ssl_established
or
2014-10-26 05:32:45 +00:00
# HTTP proxying is "stateless", CONNECT isn't.
(connected_to and address != connected_to)
)
if needs_server_change:
2015-04-20 23:05:37 +00:00
# force create new connection to the proxy server to reset
# state
self.live.change_server(self.c.server_conn.address, force=True)
if ssl:
2014-10-26 05:32:45 +00:00
send_connect_request(
self.c.server_conn,
address.host,
address.port
)
self.c.establish_ssl(server=True)
else:
2015-04-20 23:05:37 +00:00
# If we're not in upstream mode, we just want to update the host
# and possibly establish TLS. This is a no op if the addresses
# match.
2014-10-26 05:32:45 +00:00
self.live.change_server(address, ssl=ssl)
flow.server_conn = self.c.server_conn
2014-09-05 17:39:05 +00:00
def send_response_to_client(self, flow):
if not flow.response.stream:
# no streaming:
2015-04-20 23:05:37 +00:00
# we already received the full response from the server and can
# send it to the client straight away.
2014-09-05 17:39:05 +00:00
self.c.client_conn.send(flow.response.assemble())
else:
# streaming:
2014-10-26 05:32:45 +00:00
# First send the headers and then transfer the response
# incrementally:
2014-09-05 17:39:05 +00:00
h = flow.response._assemble_head(preserve_transfer_encoding=True)
self.c.client_conn.send(h)
2015-02-27 14:24:27 +00:00
chunks = http.read_http_body_chunked(
self.c.server_conn.rfile,
flow.response.headers,
self.c.config.body_size_limit,
flow.request.method,
flow.response.code,
False,
4096
)
if callable(flow.response.stream):
chunks = flow.response.stream(chunks)
for chunk in chunks:
2014-09-05 17:39:05 +00:00
for part in chunk:
self.c.client_conn.wfile.write(part)
self.c.client_conn.wfile.flush()
2014-09-21 20:46:48 +00:00
flow.response.timestamp_end = utils.timestamp()
2014-09-05 17:39:05 +00:00
def check_close_connection(self, flow):
"""
2014-10-26 05:32:45 +00:00
Checks if the connection should be closed depending on the HTTP
semantics. Returns True, if so.
2014-09-05 17:39:05 +00:00
"""
close_connection = (
2015-05-30 00:03:28 +00:00
http.connection_close(
flow.request.httpversion,
flow.request.headers) or http.connection_close(
flow.response.httpversion,
flow.response.headers) or http.expected_http_body_size(
flow.response.headers,
False,
flow.request.method,
flow.response.code) == -1)
2014-09-05 17:39:05 +00:00
if close_connection:
if flow.request.form_in == "authority" and flow.response.code == 200:
2015-04-20 23:05:37 +00:00
# Workaround for
# https://github.com/mitmproxy/mitmproxy/issues/313: Some
# proxies (e.g. Charles) send a CONNECT response with HTTP/1.0
# and no Content-Length header
2014-09-05 17:39:05 +00:00
pass
else:
return True
return False
def process_connect_request(self, address):
"""
Process a CONNECT request.
Returns True if the CONNECT request has been processed successfully.
Returns False, if the connection should be closed immediately.
"""
address = tcp.Address.wrap(address)
2014-10-18 16:29:35 +00:00
if self.c.config.check_ignore(address):
2014-09-05 17:39:05 +00:00
self.c.log("Ignore host: %s:%s" % address(), "info")
2014-10-18 16:29:35 +00:00
TCPHandler(self.c, log=False).handle_messages()
2014-09-05 17:39:05 +00:00
return False
else:
self.expected_form_in = "relative"
self.expected_form_out = "relative"
self.skip_authentication = True
2015-04-20 23:05:37 +00:00
# In practice, nobody issues a CONNECT request to send unencrypted
# HTTP requests afterwards. If we don't delegate to TCP mode, we
# should always negotiate a SSL connection.
#
2015-04-20 23:05:37 +00:00
# FIXME: Turns out the previous statement isn't entirely true.
# Chrome on Windows CONNECTs to :80 if an explicit proxy is
# configured and a websocket connection should be established. We
# don't support websocket at the moment, so it fails anyway, but we
# should come up with a better solution to this if we start to
# support WebSockets.
2014-10-21 13:08:39 +00:00
should_establish_ssl = (
address.port in self.c.config.ssl_ports
or
not self.c.config.check_tcp(address)
)
if should_establish_ssl:
2015-04-20 23:05:37 +00:00
self.c.log(
"Received CONNECT request to SSL port. "
"Upgrading to SSL...", "debug"
)
2014-09-05 17:39:05 +00:00
self.c.establish_ssl(server=True, client=True)
self.c.log("Upgrade to SSL completed.", "debug")
2014-10-18 16:29:35 +00:00
if self.c.config.check_tcp(address):
2015-04-20 23:05:37 +00:00
self.c.log(
"Generic TCP mode for host: %s:%s" % address(),
"info"
)
2014-10-18 16:29:35 +00:00
TCPHandler(self.c).handle_messages()
return False
2014-09-05 17:39:05 +00:00
return True
def authenticate(self, request):
if self.c.config.authenticator:
if self.c.config.authenticator.authenticate(request.headers):
self.c.config.authenticator.clean(request.headers)
else:
2014-05-15 16:16:42 +00:00
raise HttpAuthenticationError(
self.c.config.authenticator.auth_challenge_headers())
return request.headers
2014-08-08 17:04:58 +00:00
class RequestReplayThread(threading.Thread):
name = "RequestReplayThread"
def __init__(self, config, flow, masterq, should_exit):
2015-02-07 15:26:19 +00:00
"""
2015-04-20 23:05:37 +00:00
masterqueue can be a queue or None, if no scripthooks should be
processed.
2015-02-07 15:26:19 +00:00
"""
self.config, self.flow = config, flow
if masterq:
self.channel = controller.Channel(masterq, should_exit)
else:
self.channel = None
super(RequestReplayThread, self).__init__()
2014-08-08 17:04:58 +00:00
def run(self):
r = self.flow.request
form_out_backup = r.form_out
2014-08-08 17:04:58 +00:00
try:
self.flow.response = None
2015-02-07 15:26:19 +00:00
# If we have a channel, run script hooks.
if self.channel:
request_reply = self.channel.ask("request", self.flow)
if request_reply is None or request_reply == KILL:
raise KillSignal()
elif isinstance(request_reply, HTTPResponse):
self.flow.response = request_reply
if not self.flow.response:
# In all modes, we directly connect to the server displayed
if self.config.mode == "upstream":
2015-04-20 23:05:37 +00:00
server_address = self.config.mode.get_upstream_server(
self.flow.client_conn
)[2:]
server = ServerConnection(server_address)
server.connect()
if r.scheme == "https":
send_connect_request(server, r.host, r.port)
2015-04-20 23:05:37 +00:00
server.establish_ssl(
self.config.clientcerts,
sni=self.flow.server_conn.sni
)
r.form_out = "relative"
else:
r.form_out = "absolute"
else:
server_address = (r.host, r.port)
server = ServerConnection(server_address)
server.connect()
if r.scheme == "https":
2015-04-20 23:05:37 +00:00
server.establish_ssl(
self.config.clientcerts,
sni=self.flow.server_conn.sni
)
r.form_out = "relative"
server.send(r.assemble())
self.flow.server_conn = server
2015-04-20 23:05:37 +00:00
self.flow.response = HTTPResponse.from_stream(
server.rfile,
r.method,
body_size_limit=self.config.body_size_limit
)
2015-02-07 15:26:19 +00:00
if self.channel:
response_reply = self.channel.ask("response", self.flow)
if response_reply is None or response_reply == KILL:
raise KillSignal()
except (proxy.ProxyError, http.HttpError, tcp.NetLibError) as v:
2014-08-08 17:04:58 +00:00
self.flow.error = Error(repr(v))
2015-02-07 15:26:19 +00:00
if self.channel:
self.channel.ask("error", self.flow)
except KillSignal:
2015-04-20 23:05:37 +00:00
# KillSignal should only be raised if there's a channel in the
# first place.
self.channel.tell("log", proxy.Log("Connection killed", "info"))
2014-08-08 22:53:16 +00:00
finally:
r.form_out = form_out_backup