mirror of
https://github.com/Grasscutters/mitmproxy.git
synced 2024-11-23 00:01:36 +00:00
adjust to new netlib Headers class
This commit is contained in:
parent
00561d280c
commit
5125c669cc
@ -1,2 +1,2 @@
|
|||||||
def response(context, flow):
|
def response(context, flow):
|
||||||
flow.response.headers["newheader"] = ["foo"]
|
flow.response.headers["newheader"] = "foo"
|
||||||
|
@ -147,8 +147,8 @@ def response(context, flow):
|
|||||||
response_body_size = len(flow.response.content)
|
response_body_size = len(flow.response.content)
|
||||||
response_body_decoded_size = len(flow.response.get_decoded_content())
|
response_body_decoded_size = len(flow.response.get_decoded_content())
|
||||||
response_body_compression = response_body_decoded_size - response_body_size
|
response_body_compression = response_body_decoded_size - response_body_size
|
||||||
response_mime_type = flow.response.headers.get_first('Content-Type', '')
|
response_mime_type = flow.response.headers.get('Content-Type', '')
|
||||||
response_redirect_url = flow.response.headers.get_first('Location', '')
|
response_redirect_url = flow.response.headers.get('Location', '')
|
||||||
|
|
||||||
entry = HAR.entries(
|
entry = HAR.entries(
|
||||||
{
|
{
|
||||||
@ -201,12 +201,12 @@ def response(context, flow):
|
|||||||
# Lookup the referer in the page_ref of context.HARLog to point this entries
|
# Lookup the referer in the page_ref of context.HARLog to point this entries
|
||||||
# pageref attribute to the right pages object, then set it as a new
|
# pageref attribute to the right pages object, then set it as a new
|
||||||
# reference to build a reference tree.
|
# reference to build a reference tree.
|
||||||
elif context.HARLog.get_page_ref(flow.request.headers.get('Referer', (None, ))[0]) is not None:
|
elif context.HARLog.get_page_ref(flow.request.headers.get('Referer')) is not None:
|
||||||
entry['pageref'] = context.HARLog.get_page_ref(
|
entry['pageref'] = context.HARLog.get_page_ref(
|
||||||
flow.request.headers['Referer'][0]
|
flow.request.headers['Referer']
|
||||||
)
|
)
|
||||||
context.HARLog.set_page_ref(
|
context.HARLog.set_page_ref(
|
||||||
flow.request.headers['Referer'][0], entry['pageref']
|
flow.request.headers['Referer'], entry['pageref']
|
||||||
)
|
)
|
||||||
|
|
||||||
context.HARLog.add(entry)
|
context.HARLog.add(entry)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
def request(context, flow):
|
def request(context, flow):
|
||||||
if "application/x-www-form-urlencoded" in flow.request.headers["content-type"]:
|
if "application/x-www-form-urlencoded" in flow.request.headers.get("content-type", ""):
|
||||||
form = flow.request.get_form_urlencoded()
|
form = flow.request.get_form_urlencoded()
|
||||||
form["mitmproxy"] = ["rocks"]
|
form["mitmproxy"] = ["rocks"]
|
||||||
flow.request.set_form_urlencoded(form)
|
flow.request.set_form_urlencoded(form)
|
||||||
|
@ -2,8 +2,7 @@
|
|||||||
This example shows two ways to redirect flows to other destinations.
|
This example shows two ways to redirect flows to other destinations.
|
||||||
"""
|
"""
|
||||||
from libmproxy.models import HTTPResponse
|
from libmproxy.models import HTTPResponse
|
||||||
from netlib.odict import ODictCaseless
|
from netlib.http import Headers
|
||||||
|
|
||||||
|
|
||||||
def request(context, flow):
|
def request(context, flow):
|
||||||
# pretty_host(hostheader=True) takes the Host: header of the request into account,
|
# pretty_host(hostheader=True) takes the Host: header of the request into account,
|
||||||
@ -14,7 +13,7 @@ def request(context, flow):
|
|||||||
if flow.request.pretty_host(hostheader=True).endswith("example.com"):
|
if flow.request.pretty_host(hostheader=True).endswith("example.com"):
|
||||||
resp = HTTPResponse(
|
resp = HTTPResponse(
|
||||||
[1, 1], 200, "OK",
|
[1, 1], 200, "OK",
|
||||||
ODictCaseless([["Content-Type", "text/html"]]),
|
Headers(Content_Type="text/html"),
|
||||||
"helloworld")
|
"helloworld")
|
||||||
flow.reply(resp)
|
flow.reply(resp)
|
||||||
|
|
||||||
|
@ -23,16 +23,16 @@ class StickyMaster(controller.Master):
|
|||||||
|
|
||||||
def handle_request(self, flow):
|
def handle_request(self, flow):
|
||||||
hid = (flow.request.host, flow.request.port)
|
hid = (flow.request.host, flow.request.port)
|
||||||
if flow.request.headers["cookie"]:
|
if "cookie" in flow.request.headers:
|
||||||
self.stickyhosts[hid] = flow.request.headers["cookie"]
|
self.stickyhosts[hid] = flow.request.headers.get_all("cookie")
|
||||||
elif hid in self.stickyhosts:
|
elif hid in self.stickyhosts:
|
||||||
flow.request.headers["cookie"] = self.stickyhosts[hid]
|
flow.request.headers.set_all("cookie", self.stickyhosts[hid])
|
||||||
flow.reply()
|
flow.reply()
|
||||||
|
|
||||||
def handle_response(self, flow):
|
def handle_response(self, flow):
|
||||||
hid = (flow.request.host, flow.request.port)
|
hid = (flow.request.host, flow.request.port)
|
||||||
if flow.response.headers["set-cookie"]:
|
if "set-cookie" in flow.response.headers:
|
||||||
self.stickyhosts[hid] = flow.response.headers["set-cookie"]
|
self.stickyhosts[hid] = flow.response.headers.get_all("set-cookie")
|
||||||
flow.reply()
|
flow.reply()
|
||||||
|
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ from libmproxy.models import decoded
|
|||||||
|
|
||||||
|
|
||||||
def response(context, flow):
|
def response(context, flow):
|
||||||
if flow.response.headers.get_first("content-type", "").startswith("image"):
|
if flow.response.headers.get("content-type", "").startswith("image"):
|
||||||
with decoded(flow.response): # automatically decode gzipped responses.
|
with decoded(flow.response): # automatically decode gzipped responses.
|
||||||
try:
|
try:
|
||||||
s = cStringIO.StringIO(flow.response.content)
|
s = cStringIO.StringIO(flow.response.content)
|
||||||
@ -12,6 +12,6 @@ def response(context, flow):
|
|||||||
s2 = cStringIO.StringIO()
|
s2 = cStringIO.StringIO()
|
||||||
img.save(s2, "png")
|
img.save(s2, "png")
|
||||||
flow.response.content = s2.getvalue()
|
flow.response.content = s2.getvalue()
|
||||||
flow.response.headers["content-type"] = ["image/png"]
|
flow.response.headers["content-type"] = "image/png"
|
||||||
except: # Unknown image types etc.
|
except: # Unknown image types etc.
|
||||||
pass
|
pass
|
||||||
|
@ -415,9 +415,9 @@ def format_flow(f, focus, extended=False, hostheader=False, padding=2,
|
|||||||
resp_clen = contentdesc,
|
resp_clen = contentdesc,
|
||||||
roundtrip = roundtrip,
|
roundtrip = roundtrip,
|
||||||
))
|
))
|
||||||
t = f.response.headers["content-type"]
|
t = f.response.headers.get("content-type")
|
||||||
if t:
|
if t:
|
||||||
d["resp_ctype"] = t[0].split(";")[0]
|
d["resp_ctype"] = t.split(";")[0]
|
||||||
else:
|
else:
|
||||||
d["resp_ctype"] = ""
|
d["resp_ctype"] = ""
|
||||||
return flowcache.get(
|
return flowcache.get(
|
||||||
|
@ -12,7 +12,7 @@ import urwid
|
|||||||
import html2text
|
import html2text
|
||||||
|
|
||||||
import netlib.utils
|
import netlib.utils
|
||||||
from netlib import odict, encoding
|
from netlib import encoding
|
||||||
|
|
||||||
from . import common, signals
|
from . import common, signals
|
||||||
from .. import utils
|
from .. import utils
|
||||||
@ -74,7 +74,7 @@ class ViewAuto:
|
|||||||
content_types = []
|
content_types = []
|
||||||
|
|
||||||
def __call__(self, hdrs, content, limit):
|
def __call__(self, hdrs, content, limit):
|
||||||
ctype = hdrs.get_first("content-type")
|
ctype = hdrs.get("content-type")
|
||||||
if ctype:
|
if ctype:
|
||||||
ct = netlib.utils.parse_content_type(ctype) if ctype else None
|
ct = netlib.utils.parse_content_type(ctype) if ctype else None
|
||||||
ct = "%s/%s" % (ct[0], ct[1])
|
ct = "%s/%s" % (ct[0], ct[1])
|
||||||
@ -508,7 +508,7 @@ def get(name):
|
|||||||
return i
|
return i
|
||||||
|
|
||||||
|
|
||||||
def get_content_view(viewmode, hdrItems, content, limit, is_request):
|
def get_content_view(viewmode, headers, content, limit, is_request):
|
||||||
"""
|
"""
|
||||||
Returns a (msg, body) tuple.
|
Returns a (msg, body) tuple.
|
||||||
"""
|
"""
|
||||||
@ -519,16 +519,14 @@ def get_content_view(viewmode, hdrItems, content, limit, is_request):
|
|||||||
return "No content", ""
|
return "No content", ""
|
||||||
msg = []
|
msg = []
|
||||||
|
|
||||||
hdrs = odict.ODictCaseless([list(i) for i in hdrItems])
|
enc = headers.get("content-encoding")
|
||||||
|
|
||||||
enc = hdrs.get_first("content-encoding")
|
|
||||||
if enc and enc != "identity":
|
if enc and enc != "identity":
|
||||||
decoded = encoding.decode(enc, content)
|
decoded = encoding.decode(enc, content)
|
||||||
if decoded:
|
if decoded:
|
||||||
content = decoded
|
content = decoded
|
||||||
msg.append("[decoded %s]" % enc)
|
msg.append("[decoded %s]" % enc)
|
||||||
try:
|
try:
|
||||||
ret = viewmode(hdrs, content, limit)
|
ret = viewmode(headers, content, limit)
|
||||||
# Third-party viewers can fail in unexpected ways...
|
# Third-party viewers can fail in unexpected ways...
|
||||||
except Exception:
|
except Exception:
|
||||||
s = traceback.format_exc()
|
s = traceback.format_exc()
|
||||||
@ -536,7 +534,7 @@ def get_content_view(viewmode, hdrItems, content, limit, is_request):
|
|||||||
signals.add_event(s, "error")
|
signals.add_event(s, "error")
|
||||||
ret = None
|
ret = None
|
||||||
if not ret:
|
if not ret:
|
||||||
ret = get("Raw")(hdrs, content, limit)
|
ret = get("Raw")(headers, content, limit)
|
||||||
msg.append("Couldn't parse: falling back to Raw")
|
msg.append("Couldn't parse: falling back to Raw")
|
||||||
else:
|
else:
|
||||||
msg.append(ret[0])
|
msg.append(ret[0])
|
||||||
|
@ -4,7 +4,7 @@ import sys
|
|||||||
import urwid
|
import urwid
|
||||||
|
|
||||||
from netlib import odict
|
from netlib import odict
|
||||||
from netlib.http.semantics import CONTENT_MISSING
|
from netlib.http.semantics import CONTENT_MISSING, Headers
|
||||||
|
|
||||||
from . import common, grideditor, contentview, signals, searchable, tabs
|
from . import common, grideditor, contentview, signals, searchable, tabs
|
||||||
from . import flowdetailview
|
from . import flowdetailview
|
||||||
@ -182,7 +182,7 @@ class FlowView(tabs.Tabs):
|
|||||||
description, text_objects = cache.get(
|
description, text_objects = cache.get(
|
||||||
contentview.get_content_view,
|
contentview.get_content_view,
|
||||||
viewmode,
|
viewmode,
|
||||||
tuple(tuple(i) for i in conn.headers.lst),
|
conn.headers,
|
||||||
conn.content,
|
conn.content,
|
||||||
limit,
|
limit,
|
||||||
isinstance(conn, HTTPRequest)
|
isinstance(conn, HTTPRequest)
|
||||||
@ -199,7 +199,7 @@ class FlowView(tabs.Tabs):
|
|||||||
def conn_text(self, conn):
|
def conn_text(self, conn):
|
||||||
if conn:
|
if conn:
|
||||||
txt = common.format_keyvals(
|
txt = common.format_keyvals(
|
||||||
[(h + ":", v) for (h, v) in conn.headers.lst],
|
[(h + ":", v) for (h, v) in conn.headers.fields],
|
||||||
key = "header",
|
key = "header",
|
||||||
val = "text"
|
val = "text"
|
||||||
)
|
)
|
||||||
@ -284,8 +284,8 @@ class FlowView(tabs.Tabs):
|
|||||||
response.msg = msg
|
response.msg = msg
|
||||||
signals.flow_change.send(self, flow = self.flow)
|
signals.flow_change.send(self, flow = self.flow)
|
||||||
|
|
||||||
def set_headers(self, lst, conn):
|
def set_headers(self, fields, conn):
|
||||||
conn.headers = odict.ODictCaseless(lst)
|
conn.headers = Headers(fields)
|
||||||
signals.flow_change.send(self, flow = self.flow)
|
signals.flow_change.send(self, flow = self.flow)
|
||||||
|
|
||||||
def set_query(self, lst, conn):
|
def set_query(self, lst, conn):
|
||||||
@ -330,7 +330,7 @@ class FlowView(tabs.Tabs):
|
|||||||
if not self.flow.response:
|
if not self.flow.response:
|
||||||
self.flow.response = HTTPResponse(
|
self.flow.response = HTTPResponse(
|
||||||
self.flow.request.httpversion,
|
self.flow.request.httpversion,
|
||||||
200, "OK", odict.ODictCaseless(), ""
|
200, "OK", Headers(), ""
|
||||||
)
|
)
|
||||||
self.flow.response.reply = controller.DummyReply()
|
self.flow.response.reply = controller.DummyReply()
|
||||||
message = self.flow.response
|
message = self.flow.response
|
||||||
@ -381,7 +381,7 @@ class FlowView(tabs.Tabs):
|
|||||||
self.master.view_grideditor(
|
self.master.view_grideditor(
|
||||||
grideditor.HeaderEditor(
|
grideditor.HeaderEditor(
|
||||||
self.master,
|
self.master,
|
||||||
message.headers.lst,
|
message.headers.fields,
|
||||||
self.set_headers,
|
self.set_headers,
|
||||||
message
|
message
|
||||||
)
|
)
|
||||||
@ -616,8 +616,7 @@ class FlowView(tabs.Tabs):
|
|||||||
key = None
|
key = None
|
||||||
elif key == "v":
|
elif key == "v":
|
||||||
if conn.content:
|
if conn.content:
|
||||||
t = conn.headers["content-type"] or [None]
|
t = conn.headers.get("content-type")
|
||||||
t = t[0]
|
|
||||||
if "EDITOR" in os.environ or "PAGER" in os.environ:
|
if "EDITOR" in os.environ or "PAGER" in os.environ:
|
||||||
self.master.spawn_external_viewer(conn.content, t)
|
self.master.spawn_external_viewer(conn.content, t)
|
||||||
else:
|
else:
|
||||||
@ -626,7 +625,7 @@ class FlowView(tabs.Tabs):
|
|||||||
)
|
)
|
||||||
elif key == "z":
|
elif key == "z":
|
||||||
self.flow.backup()
|
self.flow.backup()
|
||||||
e = conn.headers.get_first("content-encoding", "identity")
|
e = conn.headers.get("content-encoding", "identity")
|
||||||
if e != "identity":
|
if e != "identity":
|
||||||
if not conn.decode():
|
if not conn.decode():
|
||||||
signals.status_message.send(
|
signals.status_message.send(
|
||||||
|
@ -174,7 +174,7 @@ class DumpMaster(flow.FlowMaster):
|
|||||||
|
|
||||||
def _print_message(self, message):
|
def _print_message(self, message):
|
||||||
if self.o.flow_detail >= 2:
|
if self.o.flow_detail >= 2:
|
||||||
print(self.indent(4, message.headers.format()), file=self.outfile)
|
print(self.indent(4, str(message.headers)), file=self.outfile)
|
||||||
if self.o.flow_detail >= 3:
|
if self.o.flow_detail >= 3:
|
||||||
if message.content == CONTENT_MISSING:
|
if message.content == CONTENT_MISSING:
|
||||||
print(self.indent(4, "(content missing)"), file=self.outfile)
|
print(self.indent(4, "(content missing)"), file=self.outfile)
|
||||||
|
@ -77,17 +77,19 @@ class FResp(_Action):
|
|||||||
|
|
||||||
|
|
||||||
class _Rex(_Action):
|
class _Rex(_Action):
|
||||||
|
flags = 0
|
||||||
|
|
||||||
def __init__(self, expr):
|
def __init__(self, expr):
|
||||||
self.expr = expr
|
self.expr = expr
|
||||||
try:
|
try:
|
||||||
self.re = re.compile(self.expr)
|
self.re = re.compile(self.expr, self.flags)
|
||||||
except:
|
except:
|
||||||
raise ValueError("Cannot compile expression.")
|
raise ValueError("Cannot compile expression.")
|
||||||
|
|
||||||
|
|
||||||
def _check_content_type(expr, o):
|
def _check_content_type(expr, o):
|
||||||
val = o.headers["content-type"]
|
val = o.headers.get("content-type")
|
||||||
if val and re.search(expr, val[0]):
|
if val and re.search(expr, val):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -145,11 +147,12 @@ class FResponseContentType(_Rex):
|
|||||||
class FHead(_Rex):
|
class FHead(_Rex):
|
||||||
code = "h"
|
code = "h"
|
||||||
help = "Header"
|
help = "Header"
|
||||||
|
flags = re.MULTILINE
|
||||||
|
|
||||||
def __call__(self, f):
|
def __call__(self, f):
|
||||||
if f.request.headers.match_re(self.expr):
|
if f.request and self.re.search(str(f.request.headers)):
|
||||||
return True
|
return True
|
||||||
elif f.response and f.response.headers.match_re(self.expr):
|
if f.response and self.re.search(str(f.response.headers)):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -157,18 +160,20 @@ class FHead(_Rex):
|
|||||||
class FHeadRequest(_Rex):
|
class FHeadRequest(_Rex):
|
||||||
code = "hq"
|
code = "hq"
|
||||||
help = "Request header"
|
help = "Request header"
|
||||||
|
flags = re.MULTILINE
|
||||||
|
|
||||||
def __call__(self, f):
|
def __call__(self, f):
|
||||||
if f.request.headers.match_re(self.expr):
|
if f.request and self.re.search(str(f.request.headers)):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
class FHeadResponse(_Rex):
|
class FHeadResponse(_Rex):
|
||||||
code = "hs"
|
code = "hs"
|
||||||
help = "Response header"
|
help = "Response header"
|
||||||
|
flags = re.MULTILINE
|
||||||
|
|
||||||
def __call__(self, f):
|
def __call__(self, f):
|
||||||
if f.response and f.response.headers.match_re(self.expr):
|
if f.response and self.re.search(str(f.response.headers)):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@ -178,10 +183,10 @@ class FBod(_Rex):
|
|||||||
|
|
||||||
def __call__(self, f):
|
def __call__(self, f):
|
||||||
if f.request and f.request.content:
|
if f.request and f.request.content:
|
||||||
if re.search(self.expr, f.request.get_decoded_content()):
|
if self.re.search(f.request.get_decoded_content()):
|
||||||
return True
|
return True
|
||||||
if f.response and f.response.content:
|
if f.response and f.response.content:
|
||||||
if re.search(self.expr, f.response.get_decoded_content()):
|
if self.re.search(f.response.get_decoded_content()):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -192,7 +197,7 @@ class FBodRequest(_Rex):
|
|||||||
|
|
||||||
def __call__(self, f):
|
def __call__(self, f):
|
||||||
if f.request and f.request.content:
|
if f.request and f.request.content:
|
||||||
if re.search(self.expr, f.request.get_decoded_content()):
|
if self.re.search(f.request.get_decoded_content()):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@ -202,24 +207,26 @@ class FBodResponse(_Rex):
|
|||||||
|
|
||||||
def __call__(self, f):
|
def __call__(self, f):
|
||||||
if f.response and f.response.content:
|
if f.response and f.response.content:
|
||||||
if re.search(self.expr, f.response.get_decoded_content()):
|
if self.re.search(f.response.get_decoded_content()):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
class FMethod(_Rex):
|
class FMethod(_Rex):
|
||||||
code = "m"
|
code = "m"
|
||||||
help = "Method"
|
help = "Method"
|
||||||
|
flags = re.IGNORECASE
|
||||||
|
|
||||||
def __call__(self, f):
|
def __call__(self, f):
|
||||||
return bool(re.search(self.expr, f.request.method, re.IGNORECASE))
|
return bool(self.re.search(f.request.method))
|
||||||
|
|
||||||
|
|
||||||
class FDomain(_Rex):
|
class FDomain(_Rex):
|
||||||
code = "d"
|
code = "d"
|
||||||
help = "Domain"
|
help = "Domain"
|
||||||
|
flags = re.IGNORECASE
|
||||||
|
|
||||||
def __call__(self, f):
|
def __call__(self, f):
|
||||||
return bool(re.search(self.expr, f.request.host, re.IGNORECASE))
|
return bool(self.re.search(f.request.host))
|
||||||
|
|
||||||
|
|
||||||
class FUrl(_Rex):
|
class FUrl(_Rex):
|
||||||
@ -234,21 +241,24 @@ class FUrl(_Rex):
|
|||||||
return klass(*toks)
|
return klass(*toks)
|
||||||
|
|
||||||
def __call__(self, f):
|
def __call__(self, f):
|
||||||
return re.search(self.expr, f.request.url)
|
return self.re.search(f.request.url)
|
||||||
|
|
||||||
|
|
||||||
class FSrc(_Rex):
|
class FSrc(_Rex):
|
||||||
code = "src"
|
code = "src"
|
||||||
help = "Match source address"
|
help = "Match source address"
|
||||||
|
|
||||||
def __call__(self, f):
|
def __call__(self, f):
|
||||||
return f.client_conn.address and re.search(self.expr, repr(f.client_conn.address))
|
return f.client_conn.address and self.re.search(repr(f.client_conn.address))
|
||||||
|
|
||||||
|
|
||||||
class FDst(_Rex):
|
class FDst(_Rex):
|
||||||
code = "dst"
|
code = "dst"
|
||||||
help = "Match destination address"
|
help = "Match destination address"
|
||||||
|
|
||||||
def __call__(self, f):
|
def __call__(self, f):
|
||||||
return f.server_conn.address and re.search(self.expr, repr(f.server_conn.address))
|
return f.server_conn.address and self.re.search(repr(f.server_conn.address))
|
||||||
|
|
||||||
|
|
||||||
class _Int(_Action):
|
class _Int(_Action):
|
||||||
def __init__(self, num):
|
def __init__(self, num):
|
||||||
|
@ -11,8 +11,8 @@ import re
|
|||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
|
|
||||||
from netlib import odict, wsgi
|
from netlib import wsgi
|
||||||
from netlib.http.semantics import CONTENT_MISSING
|
from netlib.http.semantics import CONTENT_MISSING, Headers
|
||||||
import netlib.http
|
import netlib.http
|
||||||
from . import controller, tnetstring, filt, script, version
|
from . import controller, tnetstring, filt, script, version
|
||||||
from .onboarding import app
|
from .onboarding import app
|
||||||
@ -45,7 +45,7 @@ class AppRegistry:
|
|||||||
if (request.host, request.port) in self.apps:
|
if (request.host, request.port) in self.apps:
|
||||||
return self.apps[(request.host, request.port)]
|
return self.apps[(request.host, request.port)]
|
||||||
if "host" in request.headers:
|
if "host" in request.headers:
|
||||||
host = request.headers["host"][0]
|
host = request.headers["host"]
|
||||||
return self.apps.get((host, request.port), None)
|
return self.apps.get((host, request.port), None)
|
||||||
|
|
||||||
|
|
||||||
@ -144,15 +144,15 @@ class SetHeaders:
|
|||||||
for _, header, value, cpatt in self.lst:
|
for _, header, value, cpatt in self.lst:
|
||||||
if cpatt(f):
|
if cpatt(f):
|
||||||
if f.response:
|
if f.response:
|
||||||
del f.response.headers[header]
|
f.response.headers.pop(header, None)
|
||||||
else:
|
else:
|
||||||
del f.request.headers[header]
|
f.request.headers.pop(header, None)
|
||||||
for _, header, value, cpatt in self.lst:
|
for _, header, value, cpatt in self.lst:
|
||||||
if cpatt(f):
|
if cpatt(f):
|
||||||
if f.response:
|
if f.response:
|
||||||
f.response.headers.add(header, value)
|
f.response.headers.fields.append((header, value))
|
||||||
else:
|
else:
|
||||||
f.request.headers.add(header, value)
|
f.request.headers.fields.append((header, value))
|
||||||
|
|
||||||
|
|
||||||
class StreamLargeBodies(object):
|
class StreamLargeBodies(object):
|
||||||
@ -278,14 +278,11 @@ class ServerPlaybackState:
|
|||||||
key.append(p[1])
|
key.append(p[1])
|
||||||
|
|
||||||
if self.headers:
|
if self.headers:
|
||||||
hdrs = []
|
headers = []
|
||||||
for i in self.headers:
|
for i in self.headers:
|
||||||
v = r.headers[i]
|
v = r.headers.get(i)
|
||||||
# Slightly subtle: we need to convert everything to strings
|
headers.append((i, v))
|
||||||
# to prevent a mismatch between unicode/non-unicode.
|
key.append(headers)
|
||||||
v = [str(x) for x in v]
|
|
||||||
hdrs.append((i, v))
|
|
||||||
key.append(hdrs)
|
|
||||||
return hashlib.sha256(repr(key)).digest()
|
return hashlib.sha256(repr(key)).digest()
|
||||||
|
|
||||||
def next_flow(self, request):
|
def next_flow(self, request):
|
||||||
@ -329,7 +326,7 @@ class StickyCookieState:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def handle_response(self, f):
|
def handle_response(self, f):
|
||||||
for i in f.response.headers["set-cookie"]:
|
for i in f.response.headers.get_all("set-cookie"):
|
||||||
# FIXME: We now know that Cookie.py screws up some cookies with
|
# FIXME: We now know that Cookie.py screws up some cookies with
|
||||||
# valid RFC 822/1123 datetime specifications for expiry. Sigh.
|
# valid RFC 822/1123 datetime specifications for expiry. Sigh.
|
||||||
c = Cookie.SimpleCookie(str(i))
|
c = Cookie.SimpleCookie(str(i))
|
||||||
@ -351,7 +348,7 @@ class StickyCookieState:
|
|||||||
l.append(self.jar[i].output(header="").strip())
|
l.append(self.jar[i].output(header="").strip())
|
||||||
if l:
|
if l:
|
||||||
f.request.stickycookie = True
|
f.request.stickycookie = True
|
||||||
f.request.headers["cookie"] = l
|
f.request.headers.set_all("cookie",l)
|
||||||
|
|
||||||
|
|
||||||
class StickyAuthState:
|
class StickyAuthState:
|
||||||
@ -836,7 +833,7 @@ class FlowMaster(controller.Master):
|
|||||||
ssl_established=True
|
ssl_established=True
|
||||||
))
|
))
|
||||||
f = HTTPFlow(c, s)
|
f = HTTPFlow(c, s)
|
||||||
headers = odict.ODictCaseless()
|
headers = Headers()
|
||||||
|
|
||||||
req = HTTPRequest(
|
req = HTTPRequest(
|
||||||
"absolute",
|
"absolute",
|
||||||
@ -930,8 +927,7 @@ class FlowMaster(controller.Master):
|
|||||||
f.backup()
|
f.backup()
|
||||||
f.request.is_replay = True
|
f.request.is_replay = True
|
||||||
if f.request.content:
|
if f.request.content:
|
||||||
f.request.headers[
|
f.request.headers["Content-Length"] = str(len(f.request.content))
|
||||||
"Content-Length"] = [str(len(f.request.content))]
|
|
||||||
f.response = None
|
f.response = None
|
||||||
f.error = None
|
f.error = None
|
||||||
self.process_new_request(f)
|
self.process_new_request(f)
|
||||||
|
@ -5,8 +5,8 @@ from email.utils import parsedate_tz, formatdate, mktime_tz
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
from libmproxy import utils
|
from libmproxy import utils
|
||||||
from netlib import odict, encoding
|
from netlib import encoding
|
||||||
from netlib.http import status_codes
|
from netlib.http import status_codes, Headers
|
||||||
from netlib.tcp import Address
|
from netlib.tcp import Address
|
||||||
from netlib.http.semantics import Request, Response, CONTENT_MISSING
|
from netlib.http.semantics import Request, Response, CONTENT_MISSING
|
||||||
from .. import version, stateobject
|
from .. import version, stateobject
|
||||||
@ -16,7 +16,7 @@ from .flow import Flow
|
|||||||
class MessageMixin(stateobject.StateObject):
|
class MessageMixin(stateobject.StateObject):
|
||||||
_stateobject_attributes = dict(
|
_stateobject_attributes = dict(
|
||||||
httpversion=tuple,
|
httpversion=tuple,
|
||||||
headers=odict.ODictCaseless,
|
headers=Headers,
|
||||||
body=str,
|
body=str,
|
||||||
timestamp_start=float,
|
timestamp_start=float,
|
||||||
timestamp_end=float
|
timestamp_end=float
|
||||||
@ -40,7 +40,7 @@ class MessageMixin(stateobject.StateObject):
|
|||||||
header.
|
header.
|
||||||
Doesn't change the message iteself or its headers.
|
Doesn't change the message iteself or its headers.
|
||||||
"""
|
"""
|
||||||
ce = self.headers.get_first("content-encoding")
|
ce = self.headers.get("content-encoding")
|
||||||
if not self.body or ce not in encoding.ENCODINGS:
|
if not self.body or ce not in encoding.ENCODINGS:
|
||||||
return self.body
|
return self.body
|
||||||
return encoding.decode(ce, self.body)
|
return encoding.decode(ce, self.body)
|
||||||
@ -53,14 +53,14 @@ class MessageMixin(stateobject.StateObject):
|
|||||||
|
|
||||||
Returns True if decoding succeeded, False otherwise.
|
Returns True if decoding succeeded, False otherwise.
|
||||||
"""
|
"""
|
||||||
ce = self.headers.get_first("content-encoding")
|
ce = self.headers.get("content-encoding")
|
||||||
if not self.body or ce not in encoding.ENCODINGS:
|
if not self.body or ce not in encoding.ENCODINGS:
|
||||||
return False
|
return False
|
||||||
data = encoding.decode(ce, self.body)
|
data = encoding.decode(ce, self.body)
|
||||||
if data is None:
|
if data is None:
|
||||||
return False
|
return False
|
||||||
self.body = data
|
self.body = data
|
||||||
del self.headers["content-encoding"]
|
self.headers.pop("content-encoding", None)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def encode(self, e):
|
def encode(self, e):
|
||||||
@ -70,7 +70,7 @@ class MessageMixin(stateobject.StateObject):
|
|||||||
"""
|
"""
|
||||||
# FIXME: Error if there's an existing encoding header?
|
# FIXME: Error if there's an existing encoding header?
|
||||||
self.body = encoding.encode(e, self.body)
|
self.body = encoding.encode(e, self.body)
|
||||||
self.headers["content-encoding"] = [e]
|
self.headers["content-encoding"] = e
|
||||||
|
|
||||||
def copy(self):
|
def copy(self):
|
||||||
c = copy.copy(self)
|
c = copy.copy(self)
|
||||||
@ -86,11 +86,18 @@ class MessageMixin(stateobject.StateObject):
|
|||||||
Returns the number of replacements made.
|
Returns the number of replacements made.
|
||||||
"""
|
"""
|
||||||
with decoded(self):
|
with decoded(self):
|
||||||
self.body, c = utils.safe_subn(
|
self.body, count = utils.safe_subn(
|
||||||
pattern, repl, self.body, *args, **kwargs
|
pattern, repl, self.body, *args, **kwargs
|
||||||
)
|
)
|
||||||
c += self.headers.replace(pattern, repl, *args, **kwargs)
|
fields = []
|
||||||
return c
|
for name, value in self.headers.fields:
|
||||||
|
name, c = utils.safe_subn(pattern, repl, name, *args, **kwargs)
|
||||||
|
count += c
|
||||||
|
value, c = utils.safe_subn(pattern, repl, value, *args, **kwargs)
|
||||||
|
count += c
|
||||||
|
fields.append([name, value])
|
||||||
|
self.headers.fields = fields
|
||||||
|
return count
|
||||||
|
|
||||||
|
|
||||||
class HTTPRequest(MessageMixin, Request):
|
class HTTPRequest(MessageMixin, Request):
|
||||||
@ -115,7 +122,7 @@ class HTTPRequest(MessageMixin, Request):
|
|||||||
|
|
||||||
httpversion: HTTP version tuple, e.g. (1,1)
|
httpversion: HTTP version tuple, e.g. (1,1)
|
||||||
|
|
||||||
headers: odict.ODictCaseless object
|
headers: Headers object
|
||||||
|
|
||||||
content: Content of the request, None, or CONTENT_MISSING if there
|
content: Content of the request, None, or CONTENT_MISSING if there
|
||||||
is content associated, but not present. CONTENT_MISSING evaluates
|
is content associated, but not present. CONTENT_MISSING evaluates
|
||||||
@ -266,7 +273,7 @@ class HTTPResponse(MessageMixin, Response):
|
|||||||
|
|
||||||
msg: HTTP response message
|
msg: HTTP response message
|
||||||
|
|
||||||
headers: ODict Caseless object
|
headers: Headers object
|
||||||
|
|
||||||
content: Content of the request, None, or CONTENT_MISSING if there
|
content: Content of the request, None, or CONTENT_MISSING if there
|
||||||
is content associated, but not present. CONTENT_MISSING evaluates
|
is content associated, but not present. CONTENT_MISSING evaluates
|
||||||
@ -379,15 +386,15 @@ class HTTPResponse(MessageMixin, Response):
|
|||||||
]
|
]
|
||||||
for i in refresh_headers:
|
for i in refresh_headers:
|
||||||
if i in self.headers:
|
if i in self.headers:
|
||||||
d = parsedate_tz(self.headers[i][0])
|
d = parsedate_tz(self.headers[i])
|
||||||
if d:
|
if d:
|
||||||
new = mktime_tz(d) + delta
|
new = mktime_tz(d) + delta
|
||||||
self.headers[i] = [formatdate(new)]
|
self.headers[i] = formatdate(new)
|
||||||
c = []
|
c = []
|
||||||
for i in self.headers["set-cookie"]:
|
for i in self.headers.get_all("set-cookie"):
|
||||||
c.append(self._refresh_cookie(i, delta))
|
c.append(self._refresh_cookie(i, delta))
|
||||||
if c:
|
if c:
|
||||||
self.headers["set-cookie"] = c
|
self.headers.set_all("set-cookie", c)
|
||||||
|
|
||||||
|
|
||||||
class HTTPFlow(Flow):
|
class HTTPFlow(Flow):
|
||||||
@ -490,7 +497,7 @@ class decoded(object):
|
|||||||
|
|
||||||
def __init__(self, o):
|
def __init__(self, o):
|
||||||
self.o = o
|
self.o = o
|
||||||
ce = o.headers.get_first("content-encoding")
|
ce = o.headers.get("content-encoding")
|
||||||
if ce in encoding.ENCODINGS:
|
if ce in encoding.ENCODINGS:
|
||||||
self.ce = ce
|
self.ce = ce
|
||||||
else:
|
else:
|
||||||
@ -517,11 +524,12 @@ def make_error_response(status_code, message, headers=None):
|
|||||||
""".strip() % (status_code, response, message)
|
""".strip() % (status_code, response, message)
|
||||||
|
|
||||||
if not headers:
|
if not headers:
|
||||||
headers = odict.ODictCaseless()
|
headers = Headers(
|
||||||
headers["Server"] = [version.NAMEVERSION]
|
Server=version.NAMEVERSION,
|
||||||
headers["Connection"] = ["close"]
|
Connection="close",
|
||||||
headers["Content-Length"] = [len(body)]
|
Content_Length=str(len(body)),
|
||||||
headers["Content-Type"] = ["text/html"]
|
Content_Type="text/html"
|
||||||
|
)
|
||||||
|
|
||||||
return HTTPResponse(
|
return HTTPResponse(
|
||||||
(1, 1), # FIXME: Should be a string.
|
(1, 1), # FIXME: Should be a string.
|
||||||
@ -536,15 +544,15 @@ def make_connect_request(address):
|
|||||||
address = Address.wrap(address)
|
address = Address.wrap(address)
|
||||||
return HTTPRequest(
|
return HTTPRequest(
|
||||||
"authority", "CONNECT", None, address.host, address.port, None, (1, 1),
|
"authority", "CONNECT", None, address.host, address.port, None, (1, 1),
|
||||||
odict.ODictCaseless(), ""
|
Headers(), ""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def make_connect_response(httpversion):
|
def make_connect_response(httpversion):
|
||||||
headers = odict.ODictCaseless([
|
headers = Headers(
|
||||||
["Content-Length", "0"],
|
Content_Length="0",
|
||||||
["Proxy-Agent", version.NAMEVERSION]
|
Proxy_Agent=version.NAMEVERSION
|
||||||
])
|
)
|
||||||
return HTTPResponse(
|
return HTTPResponse(
|
||||||
httpversion,
|
httpversion,
|
||||||
200,
|
200,
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from __future__ import (absolute_import, print_function, division)
|
from __future__ import (absolute_import, print_function, division)
|
||||||
|
|
||||||
from netlib import tcp
|
from netlib import tcp
|
||||||
from netlib.http import http1, HttpErrorConnClosed, HttpError
|
from netlib.http import http1, HttpErrorConnClosed, HttpError, Headers
|
||||||
from netlib.http.semantics import CONTENT_MISSING
|
from netlib.http.semantics import CONTENT_MISSING
|
||||||
from netlib import odict
|
from netlib import odict
|
||||||
from netlib.tcp import NetLibError, Address
|
from netlib.tcp import NetLibError, Address
|
||||||
@ -568,10 +568,6 @@ class HttpLayer(Layer):
|
|||||||
self.send_response(make_error_response(
|
self.send_response(make_error_response(
|
||||||
407,
|
407,
|
||||||
"Proxy Authentication Required",
|
"Proxy Authentication Required",
|
||||||
odict.ODictCaseless(
|
Headers(**self.config.authenticator.auth_challenge_headers())
|
||||||
[
|
|
||||||
[k, v] for k, v in
|
|
||||||
self.config.authenticator.auth_challenge_headers().items()
|
|
||||||
])
|
|
||||||
))
|
))
|
||||||
raise InvalidCredentials("Proxy Authentication Required")
|
raise InvalidCredentials("Proxy Authentication Required")
|
||||||
|
@ -27,8 +27,7 @@ class RequestHandler(tornado.web.RequestHandler):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def json(self):
|
def json(self):
|
||||||
if not self.request.headers.get(
|
if not self.request.headers.get("Content-Type").startswith("application/json"):
|
||||||
"Content-Type").startswith("application/json"):
|
|
||||||
return None
|
return None
|
||||||
return json.loads(self.request.body)
|
return json.loads(self.request.body)
|
||||||
|
|
||||||
@ -186,12 +185,12 @@ class FlowContent(RequestHandler):
|
|||||||
if not message.content:
|
if not message.content:
|
||||||
raise APIError(400, "No content.")
|
raise APIError(400, "No content.")
|
||||||
|
|
||||||
content_encoding = message.headers.get_first("Content-Encoding", None)
|
content_encoding = message.headers.get("Content-Encoding", None)
|
||||||
if content_encoding:
|
if content_encoding:
|
||||||
content_encoding = re.sub(r"[^\w]", "", content_encoding)
|
content_encoding = re.sub(r"[^\w]", "", content_encoding)
|
||||||
self.set_header("Content-Encoding", content_encoding)
|
self.set_header("Content-Encoding", content_encoding)
|
||||||
|
|
||||||
original_cd = message.headers.get_first("Content-Disposition", None)
|
original_cd = message.headers.get("Content-Disposition", None)
|
||||||
filename = None
|
filename = None
|
||||||
if original_cd:
|
if original_cd:
|
||||||
filename = re.search("filename=([\w\" \.\-\(\)]+)", original_cd)
|
filename = re.search("filename=([\w\" \.\-\(\)]+)", original_cd)
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
import os
|
import os
|
||||||
from nose.plugins.skip import SkipTest
|
from nose.plugins.skip import SkipTest
|
||||||
|
from netlib.http import Headers
|
||||||
|
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
raise SkipTest("Skipped on Windows.")
|
raise SkipTest("Skipped on Windows.")
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import netlib.utils
|
import netlib.utils
|
||||||
from netlib import odict, encoding
|
from netlib import encoding
|
||||||
|
|
||||||
import libmproxy.console.contentview as cv
|
import libmproxy.console.contentview as cv
|
||||||
from libmproxy import utils, flow
|
from libmproxy import utils, flow
|
||||||
@ -33,34 +35,28 @@ class TestContentView:
|
|||||||
def test_view_auto(self):
|
def test_view_auto(self):
|
||||||
v = cv.ViewAuto()
|
v = cv.ViewAuto()
|
||||||
f = v(
|
f = v(
|
||||||
odict.ODictCaseless(),
|
Headers(),
|
||||||
"foo",
|
"foo",
|
||||||
1000
|
1000
|
||||||
)
|
)
|
||||||
assert f[0] == "Raw"
|
assert f[0] == "Raw"
|
||||||
|
|
||||||
f = v(
|
f = v(
|
||||||
odict.ODictCaseless(
|
Headers(content_type="text/html"),
|
||||||
[["content-type", "text/html"]],
|
|
||||||
),
|
|
||||||
"<html></html>",
|
"<html></html>",
|
||||||
1000
|
1000
|
||||||
)
|
)
|
||||||
assert f[0] == "HTML"
|
assert f[0] == "HTML"
|
||||||
|
|
||||||
f = v(
|
f = v(
|
||||||
odict.ODictCaseless(
|
Headers(content_type="text/flibble"),
|
||||||
[["content-type", "text/flibble"]],
|
|
||||||
),
|
|
||||||
"foo",
|
"foo",
|
||||||
1000
|
1000
|
||||||
)
|
)
|
||||||
assert f[0] == "Raw"
|
assert f[0] == "Raw"
|
||||||
|
|
||||||
f = v(
|
f = v(
|
||||||
odict.ODictCaseless(
|
Headers(content_type="text/flibble"),
|
||||||
[["content-type", "text/flibble"]],
|
|
||||||
),
|
|
||||||
"<xml></xml>",
|
"<xml></xml>",
|
||||||
1000
|
1000
|
||||||
)
|
)
|
||||||
@ -168,28 +164,22 @@ Content-Disposition: form-data; name="submit-name"
|
|||||||
Larry
|
Larry
|
||||||
--AaB03x
|
--AaB03x
|
||||||
""".strip()
|
""".strip()
|
||||||
h = odict.ODictCaseless(
|
h = Headers(content_type="multipart/form-data; boundary=AaB03x"),
|
||||||
[("Content-Type", "multipart/form-data; boundary=AaB03x")]
|
|
||||||
)
|
|
||||||
assert view(h, v, 1000)
|
assert view(h, v, 1000)
|
||||||
|
|
||||||
h = odict.ODictCaseless()
|
h = Headers(),
|
||||||
assert not view(h, v, 1000)
|
assert not view(h, v, 1000)
|
||||||
|
|
||||||
h = odict.ODictCaseless(
|
h = Headers(content_type="multipart/form-data"),
|
||||||
[("Content-Type", "multipart/form-data")]
|
|
||||||
)
|
|
||||||
assert not view(h, v, 1000)
|
assert not view(h, v, 1000)
|
||||||
|
|
||||||
h = odict.ODictCaseless(
|
h = Headers(content_type="unparseable"),
|
||||||
[("Content-Type", "unparseable")]
|
|
||||||
)
|
|
||||||
assert not view(h, v, 1000)
|
assert not view(h, v, 1000)
|
||||||
|
|
||||||
def test_get_content_view(self):
|
def test_get_content_view(self):
|
||||||
r = cv.get_content_view(
|
r = cv.get_content_view(
|
||||||
cv.get("Raw"),
|
cv.get("Raw"),
|
||||||
[["content-type", "application/json"]],
|
Headers(content_type="application/json"),
|
||||||
"[1, 2, 3]",
|
"[1, 2, 3]",
|
||||||
1000,
|
1000,
|
||||||
False
|
False
|
||||||
@ -198,7 +188,7 @@ Larry
|
|||||||
|
|
||||||
r = cv.get_content_view(
|
r = cv.get_content_view(
|
||||||
cv.get("Auto"),
|
cv.get("Auto"),
|
||||||
[["content-type", "application/json"]],
|
Headers(content_type="application/json"),
|
||||||
"[1, 2, 3]",
|
"[1, 2, 3]",
|
||||||
1000,
|
1000,
|
||||||
False
|
False
|
||||||
@ -207,7 +197,7 @@ Larry
|
|||||||
|
|
||||||
r = cv.get_content_view(
|
r = cv.get_content_view(
|
||||||
cv.get("Auto"),
|
cv.get("Auto"),
|
||||||
[["content-type", "application/json"]],
|
Headers(content_type="application/json"),
|
||||||
"[1, 2",
|
"[1, 2",
|
||||||
1000,
|
1000,
|
||||||
False
|
False
|
||||||
@ -216,7 +206,7 @@ Larry
|
|||||||
|
|
||||||
r = cv.get_content_view(
|
r = cv.get_content_view(
|
||||||
cv.get("AMF"),
|
cv.get("AMF"),
|
||||||
[],
|
Headers(),
|
||||||
"[1, 2",
|
"[1, 2",
|
||||||
1000,
|
1000,
|
||||||
False
|
False
|
||||||
@ -225,10 +215,10 @@ Larry
|
|||||||
|
|
||||||
r = cv.get_content_view(
|
r = cv.get_content_view(
|
||||||
cv.get("Auto"),
|
cv.get("Auto"),
|
||||||
[
|
Headers(
|
||||||
["content-type", "application/json"],
|
content_type="application/json",
|
||||||
["content-encoding", "gzip"]
|
content_encoding="gzip"
|
||||||
],
|
),
|
||||||
encoding.encode('gzip', "[1, 2, 3]"),
|
encoding.encode('gzip', "[1, 2, 3]"),
|
||||||
1000,
|
1000,
|
||||||
False
|
False
|
||||||
@ -238,10 +228,10 @@ Larry
|
|||||||
|
|
||||||
r = cv.get_content_view(
|
r = cv.get_content_view(
|
||||||
cv.get("XML"),
|
cv.get("XML"),
|
||||||
[
|
Headers(
|
||||||
["content-type", "application/json"],
|
content_type="application/json",
|
||||||
["content-encoding", "gzip"]
|
content_encoding="gzip"
|
||||||
],
|
),
|
||||||
encoding.encode('gzip', "[1, 2, 3]"),
|
encoding.encode('gzip', "[1, 2, 3]"),
|
||||||
1000,
|
1000,
|
||||||
False
|
False
|
||||||
|
@ -145,7 +145,7 @@ class TestDumpMaster:
|
|||||||
o = dump.Options(setheaders=[(".*", "one", "two")])
|
o = dump.Options(setheaders=[(".*", "one", "two")])
|
||||||
m = dump.DumpMaster(None, o, outfile=cs)
|
m = dump.DumpMaster(None, o, outfile=cs)
|
||||||
f = self._cycle(m, "content")
|
f = self._cycle(m, "content")
|
||||||
assert f.request.headers["one"] == ["two"]
|
assert f.request.headers["one"] == "two"
|
||||||
|
|
||||||
def test_basic(self):
|
def test_basic(self):
|
||||||
for i in (1, 2, 3):
|
for i in (1, 2, 3):
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
import cStringIO
|
import cStringIO
|
||||||
from netlib import odict
|
|
||||||
from libmproxy import filt, flow
|
from libmproxy import filt, flow
|
||||||
from libmproxy.protocol import http
|
from libmproxy.protocol import http
|
||||||
from libmproxy.models import Error
|
from libmproxy.models import Error
|
||||||
|
from netlib.http import Headers
|
||||||
import tutils
|
import tutils
|
||||||
|
|
||||||
|
|
||||||
@ -76,8 +76,7 @@ class TestParsing:
|
|||||||
|
|
||||||
class TestMatching:
|
class TestMatching:
|
||||||
def req(self):
|
def req(self):
|
||||||
headers = odict.ODictCaseless()
|
headers = Headers(header="qvalue")
|
||||||
headers["header"] = ["qvalue"]
|
|
||||||
req = http.HTTPRequest(
|
req = http.HTTPRequest(
|
||||||
"absolute",
|
"absolute",
|
||||||
"GET",
|
"GET",
|
||||||
@ -98,8 +97,7 @@ class TestMatching:
|
|||||||
def resp(self):
|
def resp(self):
|
||||||
f = self.req()
|
f = self.req()
|
||||||
|
|
||||||
headers = odict.ODictCaseless()
|
headers = Headers([["header_response", "svalue"]])
|
||||||
headers["header_response"] = ["svalue"]
|
|
||||||
f.response = http.HTTPResponse(
|
f.response = http.HTTPResponse(
|
||||||
(1,
|
(1,
|
||||||
1),
|
1),
|
||||||
@ -123,7 +121,7 @@ class TestMatching:
|
|||||||
def test_asset(self):
|
def test_asset(self):
|
||||||
s = self.resp()
|
s = self.resp()
|
||||||
assert not self.q("~a", s)
|
assert not self.q("~a", s)
|
||||||
s.response.headers["content-type"] = ["text/javascript"]
|
s.response.headers["content-type"] = "text/javascript"
|
||||||
assert self.q("~a", s)
|
assert self.q("~a", s)
|
||||||
|
|
||||||
def test_fcontenttype(self):
|
def test_fcontenttype(self):
|
||||||
@ -132,16 +130,16 @@ class TestMatching:
|
|||||||
assert not self.q("~t content", q)
|
assert not self.q("~t content", q)
|
||||||
assert not self.q("~t content", s)
|
assert not self.q("~t content", s)
|
||||||
|
|
||||||
q.request.headers["content-type"] = ["text/json"]
|
q.request.headers["content-type"] = "text/json"
|
||||||
assert self.q("~t json", q)
|
assert self.q("~t json", q)
|
||||||
assert self.q("~tq json", q)
|
assert self.q("~tq json", q)
|
||||||
assert not self.q("~ts json", q)
|
assert not self.q("~ts json", q)
|
||||||
|
|
||||||
s.response.headers["content-type"] = ["text/json"]
|
s.response.headers["content-type"] = "text/json"
|
||||||
assert self.q("~t json", s)
|
assert self.q("~t json", s)
|
||||||
|
|
||||||
del s.response.headers["content-type"]
|
del s.response.headers["content-type"]
|
||||||
s.request.headers["content-type"] = ["text/json"]
|
s.request.headers["content-type"] = "text/json"
|
||||||
assert self.q("~t json", s)
|
assert self.q("~t json", s)
|
||||||
assert self.q("~tq json", s)
|
assert self.q("~tq json", s)
|
||||||
assert not self.q("~ts json", s)
|
assert not self.q("~ts json", s)
|
||||||
|
@ -8,7 +8,7 @@ import mock
|
|||||||
|
|
||||||
import netlib.utils
|
import netlib.utils
|
||||||
from netlib import odict
|
from netlib import odict
|
||||||
from netlib.http.semantics import CONTENT_MISSING, HDR_FORM_URLENCODED
|
from netlib.http.semantics import CONTENT_MISSING, HDR_FORM_URLENCODED, Headers
|
||||||
from libmproxy import filt, protocol, controller, tnetstring, flow
|
from libmproxy import filt, protocol, controller, tnetstring, flow
|
||||||
from libmproxy.models import Error, Flow, HTTPRequest, HTTPResponse, HTTPFlow, decoded
|
from libmproxy.models import Error, Flow, HTTPRequest, HTTPResponse, HTTPFlow, decoded
|
||||||
from libmproxy.proxy.config import HostMatcher
|
from libmproxy.proxy.config import HostMatcher
|
||||||
@ -34,7 +34,7 @@ def test_app_registry():
|
|||||||
r.host = "domain2"
|
r.host = "domain2"
|
||||||
r.port = 80
|
r.port = 80
|
||||||
assert not ar.get(r)
|
assert not ar.get(r)
|
||||||
r.headers["host"] = ["domain"]
|
r.headers["host"] = "domain"
|
||||||
assert ar.get(r)
|
assert ar.get(r)
|
||||||
|
|
||||||
|
|
||||||
@ -42,7 +42,7 @@ class TestStickyCookieState:
|
|||||||
def _response(self, cookie, host):
|
def _response(self, cookie, host):
|
||||||
s = flow.StickyCookieState(filt.parse(".*"))
|
s = flow.StickyCookieState(filt.parse(".*"))
|
||||||
f = tutils.tflow(req=netlib.tutils.treq(host=host, port=80), resp=True)
|
f = tutils.tflow(req=netlib.tutils.treq(host=host, port=80), resp=True)
|
||||||
f.response.headers["Set-Cookie"] = [cookie]
|
f.response.headers["Set-Cookie"] = cookie
|
||||||
s.handle_response(f)
|
s.handle_response(f)
|
||||||
return s, f
|
return s, f
|
||||||
|
|
||||||
@ -75,13 +75,13 @@ class TestStickyAuthState:
|
|||||||
def test_handle_response(self):
|
def test_handle_response(self):
|
||||||
s = flow.StickyAuthState(filt.parse(".*"))
|
s = flow.StickyAuthState(filt.parse(".*"))
|
||||||
f = tutils.tflow(resp=True)
|
f = tutils.tflow(resp=True)
|
||||||
f.request.headers["authorization"] = ["foo"]
|
f.request.headers["authorization"] = "foo"
|
||||||
s.handle_request(f)
|
s.handle_request(f)
|
||||||
assert "address" in s.hosts
|
assert "address" in s.hosts
|
||||||
|
|
||||||
f = tutils.tflow(resp=True)
|
f = tutils.tflow(resp=True)
|
||||||
s.handle_request(f)
|
s.handle_request(f)
|
||||||
assert f.request.headers["authorization"] == ["foo"]
|
assert f.request.headers["authorization"] == "foo"
|
||||||
|
|
||||||
|
|
||||||
class TestClientPlaybackState:
|
class TestClientPlaybackState:
|
||||||
@ -133,7 +133,7 @@ class TestServerPlaybackState:
|
|||||||
|
|
||||||
assert s._hash(r)
|
assert s._hash(r)
|
||||||
assert s._hash(r) == s._hash(r2)
|
assert s._hash(r) == s._hash(r2)
|
||||||
r.request.headers["foo"] = ["bar"]
|
r.request.headers["foo"] = "bar"
|
||||||
assert s._hash(r) == s._hash(r2)
|
assert s._hash(r) == s._hash(r2)
|
||||||
r.request.path = "voing"
|
r.request.path = "voing"
|
||||||
assert s._hash(r) != s._hash(r2)
|
assert s._hash(r) != s._hash(r2)
|
||||||
@ -153,12 +153,12 @@ class TestServerPlaybackState:
|
|||||||
None,
|
None,
|
||||||
False)
|
False)
|
||||||
r = tutils.tflow(resp=True)
|
r = tutils.tflow(resp=True)
|
||||||
r.request.headers["foo"] = ["bar"]
|
r.request.headers["foo"] = "bar"
|
||||||
r2 = tutils.tflow(resp=True)
|
r2 = tutils.tflow(resp=True)
|
||||||
assert not s._hash(r) == s._hash(r2)
|
assert not s._hash(r) == s._hash(r2)
|
||||||
r2.request.headers["foo"] = ["bar"]
|
r2.request.headers["foo"] = "bar"
|
||||||
assert s._hash(r) == s._hash(r2)
|
assert s._hash(r) == s._hash(r2)
|
||||||
r2.request.headers["oink"] = ["bar"]
|
r2.request.headers["oink"] = "bar"
|
||||||
assert s._hash(r) == s._hash(r2)
|
assert s._hash(r) == s._hash(r2)
|
||||||
|
|
||||||
r = tutils.tflow(resp=True)
|
r = tutils.tflow(resp=True)
|
||||||
@ -167,10 +167,10 @@ class TestServerPlaybackState:
|
|||||||
|
|
||||||
def test_load(self):
|
def test_load(self):
|
||||||
r = tutils.tflow(resp=True)
|
r = tutils.tflow(resp=True)
|
||||||
r.request.headers["key"] = ["one"]
|
r.request.headers["key"] = "one"
|
||||||
|
|
||||||
r2 = tutils.tflow(resp=True)
|
r2 = tutils.tflow(resp=True)
|
||||||
r2.request.headers["key"] = ["two"]
|
r2.request.headers["key"] = "two"
|
||||||
|
|
||||||
s = flow.ServerPlaybackState(
|
s = flow.ServerPlaybackState(
|
||||||
None, [
|
None, [
|
||||||
@ -179,21 +179,21 @@ class TestServerPlaybackState:
|
|||||||
assert len(s.fmap.keys()) == 1
|
assert len(s.fmap.keys()) == 1
|
||||||
|
|
||||||
n = s.next_flow(r)
|
n = s.next_flow(r)
|
||||||
assert n.request.headers["key"] == ["one"]
|
assert n.request.headers["key"] == "one"
|
||||||
assert s.count() == 1
|
assert s.count() == 1
|
||||||
|
|
||||||
n = s.next_flow(r)
|
n = s.next_flow(r)
|
||||||
assert n.request.headers["key"] == ["two"]
|
assert n.request.headers["key"] == "two"
|
||||||
assert s.count() == 0
|
assert s.count() == 0
|
||||||
|
|
||||||
assert not s.next_flow(r)
|
assert not s.next_flow(r)
|
||||||
|
|
||||||
def test_load_with_nopop(self):
|
def test_load_with_nopop(self):
|
||||||
r = tutils.tflow(resp=True)
|
r = tutils.tflow(resp=True)
|
||||||
r.request.headers["key"] = ["one"]
|
r.request.headers["key"] = "one"
|
||||||
|
|
||||||
r2 = tutils.tflow(resp=True)
|
r2 = tutils.tflow(resp=True)
|
||||||
r2.request.headers["key"] = ["two"]
|
r2.request.headers["key"] = "two"
|
||||||
|
|
||||||
s = flow.ServerPlaybackState(
|
s = flow.ServerPlaybackState(
|
||||||
None, [
|
None, [
|
||||||
@ -224,12 +224,10 @@ class TestServerPlaybackState:
|
|||||||
None, [], False, False, None, False, [
|
None, [], False, False, None, False, [
|
||||||
"param1", "param2"], False)
|
"param1", "param2"], False)
|
||||||
r = tutils.tflow(resp=True)
|
r = tutils.tflow(resp=True)
|
||||||
r.request.headers[
|
r.request.headers["Content-Type"] = "application/x-www-form-urlencoded"
|
||||||
"Content-Type"] = ["application/x-www-form-urlencoded"]
|
|
||||||
r.request.content = "paramx=x¶m1=1"
|
r.request.content = "paramx=x¶m1=1"
|
||||||
r2 = tutils.tflow(resp=True)
|
r2 = tutils.tflow(resp=True)
|
||||||
r2.request.headers[
|
r2.request.headers["Content-Type"] = "application/x-www-form-urlencoded"
|
||||||
"Content-Type"] = ["application/x-www-form-urlencoded"]
|
|
||||||
r2.request.content = "paramx=x¶m1=1"
|
r2.request.content = "paramx=x¶m1=1"
|
||||||
# same parameters
|
# same parameters
|
||||||
assert s._hash(r) == s._hash(r2)
|
assert s._hash(r) == s._hash(r2)
|
||||||
@ -254,10 +252,10 @@ class TestServerPlaybackState:
|
|||||||
None, [], False, False, None, False, [
|
None, [], False, False, None, False, [
|
||||||
"param1", "param2"], False)
|
"param1", "param2"], False)
|
||||||
r = tutils.tflow(resp=True)
|
r = tutils.tflow(resp=True)
|
||||||
r.request.headers["Content-Type"] = ["application/json"]
|
r.request.headers["Content-Type"] = "application/json"
|
||||||
r.request.content = '{"param1":"1"}'
|
r.request.content = '{"param1":"1"}'
|
||||||
r2 = tutils.tflow(resp=True)
|
r2 = tutils.tflow(resp=True)
|
||||||
r2.request.headers["Content-Type"] = ["application/json"]
|
r2.request.headers["Content-Type"] = "application/json"
|
||||||
r2.request.content = '{"param1":"1"}'
|
r2.request.content = '{"param1":"1"}'
|
||||||
# same content
|
# same content
|
||||||
assert s._hash(r) == s._hash(r2)
|
assert s._hash(r) == s._hash(r2)
|
||||||
@ -271,12 +269,10 @@ class TestServerPlaybackState:
|
|||||||
None, [], False, False, None, True, [
|
None, [], False, False, None, True, [
|
||||||
"param1", "param2"], False)
|
"param1", "param2"], False)
|
||||||
r = tutils.tflow(resp=True)
|
r = tutils.tflow(resp=True)
|
||||||
r.request.headers[
|
r.request.headers["Content-Type"] = "application/x-www-form-urlencoded"
|
||||||
"Content-Type"] = ["application/x-www-form-urlencoded"]
|
|
||||||
r.request.content = "paramx=y"
|
r.request.content = "paramx=y"
|
||||||
r2 = tutils.tflow(resp=True)
|
r2 = tutils.tflow(resp=True)
|
||||||
r2.request.headers[
|
r2.request.headers["Content-Type"] = "application/x-www-form-urlencoded"
|
||||||
"Content-Type"] = ["application/x-www-form-urlencoded"]
|
|
||||||
r2.request.content = "paramx=x"
|
r2.request.content = "paramx=x"
|
||||||
# same parameters
|
# same parameters
|
||||||
assert s._hash(r) == s._hash(r2)
|
assert s._hash(r) == s._hash(r2)
|
||||||
@ -460,17 +456,17 @@ class TestFlow:
|
|||||||
|
|
||||||
def test_replace(self):
|
def test_replace(self):
|
||||||
f = tutils.tflow(resp=True)
|
f = tutils.tflow(resp=True)
|
||||||
f.request.headers["foo"] = ["foo"]
|
f.request.headers["foo"] = "foo"
|
||||||
f.request.content = "afoob"
|
f.request.content = "afoob"
|
||||||
|
|
||||||
f.response.headers["foo"] = ["foo"]
|
f.response.headers["foo"] = "foo"
|
||||||
f.response.content = "afoob"
|
f.response.content = "afoob"
|
||||||
|
|
||||||
assert f.replace("foo", "bar") == 6
|
assert f.replace("foo", "bar") == 6
|
||||||
|
|
||||||
assert f.request.headers["bar"] == ["bar"]
|
assert f.request.headers["bar"] == "bar"
|
||||||
assert f.request.content == "abarb"
|
assert f.request.content == "abarb"
|
||||||
assert f.response.headers["bar"] == ["bar"]
|
assert f.response.headers["bar"] == "bar"
|
||||||
assert f.response.content == "abarb"
|
assert f.response.content == "abarb"
|
||||||
|
|
||||||
def test_replace_encoded(self):
|
def test_replace_encoded(self):
|
||||||
@ -938,14 +934,14 @@ class TestFlowMaster:
|
|||||||
|
|
||||||
fm.set_stickycookie(".*")
|
fm.set_stickycookie(".*")
|
||||||
f = tutils.tflow(resp=True)
|
f = tutils.tflow(resp=True)
|
||||||
f.response.headers["set-cookie"] = ["foo=bar"]
|
f.response.headers["set-cookie"] = "foo=bar"
|
||||||
fm.handle_request(f)
|
fm.handle_request(f)
|
||||||
fm.handle_response(f)
|
fm.handle_response(f)
|
||||||
assert fm.stickycookie_state.jar
|
assert fm.stickycookie_state.jar
|
||||||
assert not "cookie" in f.request.headers
|
assert not "cookie" in f.request.headers
|
||||||
f = f.copy()
|
f = f.copy()
|
||||||
fm.handle_request(f)
|
fm.handle_request(f)
|
||||||
assert f.request.headers["cookie"] == ["foo=bar"]
|
assert f.request.headers["cookie"] == "foo=bar"
|
||||||
|
|
||||||
def test_stickyauth(self):
|
def test_stickyauth(self):
|
||||||
s = flow.State()
|
s = flow.State()
|
||||||
@ -958,14 +954,14 @@ class TestFlowMaster:
|
|||||||
|
|
||||||
fm.set_stickyauth(".*")
|
fm.set_stickyauth(".*")
|
||||||
f = tutils.tflow(resp=True)
|
f = tutils.tflow(resp=True)
|
||||||
f.request.headers["authorization"] = ["foo"]
|
f.request.headers["authorization"] = "foo"
|
||||||
fm.handle_request(f)
|
fm.handle_request(f)
|
||||||
|
|
||||||
f = tutils.tflow(resp=True)
|
f = tutils.tflow(resp=True)
|
||||||
assert fm.stickyauth_state.hosts
|
assert fm.stickyauth_state.hosts
|
||||||
assert not "authorization" in f.request.headers
|
assert not "authorization" in f.request.headers
|
||||||
fm.handle_request(f)
|
fm.handle_request(f)
|
||||||
assert f.request.headers["authorization"] == ["foo"]
|
assert f.request.headers["authorization"] == "foo"
|
||||||
|
|
||||||
def test_stream(self):
|
def test_stream(self):
|
||||||
with tutils.tmpdir() as tdir:
|
with tutils.tmpdir() as tdir:
|
||||||
@ -1022,7 +1018,7 @@ class TestRequest:
|
|||||||
assert r.url == "https://address:22/path"
|
assert r.url == "https://address:22/path"
|
||||||
|
|
||||||
assert r.pretty_url(True) == "https://address:22/path"
|
assert r.pretty_url(True) == "https://address:22/path"
|
||||||
r.headers["Host"] = ["foo.com"]
|
r.headers["Host"] = "foo.com"
|
||||||
assert r.pretty_url(False) == "https://address:22/path"
|
assert r.pretty_url(False) == "https://address:22/path"
|
||||||
assert r.pretty_url(True) == "https://foo.com:22/path"
|
assert r.pretty_url(True) == "https://foo.com:22/path"
|
||||||
|
|
||||||
@ -1048,19 +1044,17 @@ class TestRequest:
|
|||||||
def test_getset_form_urlencoded(self):
|
def test_getset_form_urlencoded(self):
|
||||||
d = odict.ODict([("one", "two"), ("three", "four")])
|
d = odict.ODict([("one", "two"), ("three", "four")])
|
||||||
r = HTTPRequest.wrap(netlib.tutils.treq(content=netlib.utils.urlencode(d.lst)))
|
r = HTTPRequest.wrap(netlib.tutils.treq(content=netlib.utils.urlencode(d.lst)))
|
||||||
r.headers["content-type"] = [HDR_FORM_URLENCODED]
|
r.headers["content-type"] = HDR_FORM_URLENCODED
|
||||||
assert r.get_form_urlencoded() == d
|
assert r.get_form_urlencoded() == d
|
||||||
|
|
||||||
d = odict.ODict([("x", "y")])
|
d = odict.ODict([("x", "y")])
|
||||||
r.set_form_urlencoded(d)
|
r.set_form_urlencoded(d)
|
||||||
assert r.get_form_urlencoded() == d
|
assert r.get_form_urlencoded() == d
|
||||||
|
|
||||||
r.headers["content-type"] = ["foo"]
|
r.headers["content-type"] = "foo"
|
||||||
assert not r.get_form_urlencoded()
|
assert not r.get_form_urlencoded()
|
||||||
|
|
||||||
def test_getset_query(self):
|
def test_getset_query(self):
|
||||||
h = odict.ODictCaseless()
|
|
||||||
|
|
||||||
r = HTTPRequest.wrap(netlib.tutils.treq())
|
r = HTTPRequest.wrap(netlib.tutils.treq())
|
||||||
r.path = "/foo?x=y&a=b"
|
r.path = "/foo?x=y&a=b"
|
||||||
q = r.get_query()
|
q = r.get_query()
|
||||||
@ -1083,11 +1077,10 @@ class TestRequest:
|
|||||||
assert r.get_query() == qv
|
assert r.get_query() == qv
|
||||||
|
|
||||||
def test_anticache(self):
|
def test_anticache(self):
|
||||||
h = odict.ODictCaseless()
|
|
||||||
r = HTTPRequest.wrap(netlib.tutils.treq())
|
r = HTTPRequest.wrap(netlib.tutils.treq())
|
||||||
r.headers = h
|
r.headers = Headers()
|
||||||
h["if-modified-since"] = ["test"]
|
r.headers["if-modified-since"] = "test"
|
||||||
h["if-none-match"] = ["test"]
|
r.headers["if-none-match"] = "test"
|
||||||
r.anticache()
|
r.anticache()
|
||||||
assert not "if-modified-since" in r.headers
|
assert not "if-modified-since" in r.headers
|
||||||
assert not "if-none-match" in r.headers
|
assert not "if-none-match" in r.headers
|
||||||
@ -1095,25 +1088,29 @@ class TestRequest:
|
|||||||
def test_replace(self):
|
def test_replace(self):
|
||||||
r = HTTPRequest.wrap(netlib.tutils.treq())
|
r = HTTPRequest.wrap(netlib.tutils.treq())
|
||||||
r.path = "path/foo"
|
r.path = "path/foo"
|
||||||
r.headers["Foo"] = ["fOo"]
|
r.headers["Foo"] = "fOo"
|
||||||
r.content = "afoob"
|
r.content = "afoob"
|
||||||
assert r.replace("foo(?i)", "boo") == 4
|
assert r.replace("foo(?i)", "boo") == 4
|
||||||
assert r.path == "path/boo"
|
assert r.path == "path/boo"
|
||||||
assert not "foo" in r.content
|
assert not "foo" in r.content
|
||||||
assert r.headers["boo"] == ["boo"]
|
assert r.headers["boo"] == "boo"
|
||||||
|
|
||||||
def test_constrain_encoding(self):
|
def test_constrain_encoding(self):
|
||||||
r = HTTPRequest.wrap(netlib.tutils.treq())
|
r = HTTPRequest.wrap(netlib.tutils.treq())
|
||||||
r.headers["accept-encoding"] = ["gzip", "oink"]
|
r.headers["accept-encoding"] = "gzip, oink"
|
||||||
|
r.constrain_encoding()
|
||||||
|
assert "oink" not in r.headers["accept-encoding"]
|
||||||
|
|
||||||
|
r.headers.set_all("accept-encoding", ["gzip", "oink"])
|
||||||
r.constrain_encoding()
|
r.constrain_encoding()
|
||||||
assert "oink" not in r.headers["accept-encoding"]
|
assert "oink" not in r.headers["accept-encoding"]
|
||||||
|
|
||||||
def test_decodeencode(self):
|
def test_decodeencode(self):
|
||||||
r = HTTPRequest.wrap(netlib.tutils.treq())
|
r = HTTPRequest.wrap(netlib.tutils.treq())
|
||||||
r.headers["content-encoding"] = ["identity"]
|
r.headers["content-encoding"] = "identity"
|
||||||
r.content = "falafel"
|
r.content = "falafel"
|
||||||
r.decode()
|
r.decode()
|
||||||
assert not r.headers["content-encoding"]
|
assert "content-encoding" not in r.headers
|
||||||
assert r.content == "falafel"
|
assert r.content == "falafel"
|
||||||
|
|
||||||
r = HTTPRequest.wrap(netlib.tutils.treq())
|
r = HTTPRequest.wrap(netlib.tutils.treq())
|
||||||
@ -1121,26 +1118,26 @@ class TestRequest:
|
|||||||
assert not r.decode()
|
assert not r.decode()
|
||||||
|
|
||||||
r = HTTPRequest.wrap(netlib.tutils.treq())
|
r = HTTPRequest.wrap(netlib.tutils.treq())
|
||||||
r.headers["content-encoding"] = ["identity"]
|
r.headers["content-encoding"] = "identity"
|
||||||
r.content = "falafel"
|
r.content = "falafel"
|
||||||
r.encode("identity")
|
r.encode("identity")
|
||||||
assert r.headers["content-encoding"] == ["identity"]
|
assert r.headers["content-encoding"] == "identity"
|
||||||
assert r.content == "falafel"
|
assert r.content == "falafel"
|
||||||
|
|
||||||
r = HTTPRequest.wrap(netlib.tutils.treq())
|
r = HTTPRequest.wrap(netlib.tutils.treq())
|
||||||
r.headers["content-encoding"] = ["identity"]
|
r.headers["content-encoding"] = "identity"
|
||||||
r.content = "falafel"
|
r.content = "falafel"
|
||||||
r.encode("gzip")
|
r.encode("gzip")
|
||||||
assert r.headers["content-encoding"] == ["gzip"]
|
assert r.headers["content-encoding"] == "gzip"
|
||||||
assert r.content != "falafel"
|
assert r.content != "falafel"
|
||||||
r.decode()
|
r.decode()
|
||||||
assert not r.headers["content-encoding"]
|
assert "content-encoding" not in r.headers
|
||||||
assert r.content == "falafel"
|
assert r.content == "falafel"
|
||||||
|
|
||||||
def test_get_decoded_content(self):
|
def test_get_decoded_content(self):
|
||||||
r = HTTPRequest.wrap(netlib.tutils.treq())
|
r = HTTPRequest.wrap(netlib.tutils.treq())
|
||||||
r.content = None
|
r.content = None
|
||||||
r.headers["content-encoding"] = ["identity"]
|
r.headers["content-encoding"] = "identity"
|
||||||
assert r.get_decoded_content() == None
|
assert r.get_decoded_content() == None
|
||||||
|
|
||||||
r.content = "falafel"
|
r.content = "falafel"
|
||||||
@ -1148,11 +1145,9 @@ class TestRequest:
|
|||||||
assert r.get_decoded_content() == "falafel"
|
assert r.get_decoded_content() == "falafel"
|
||||||
|
|
||||||
def test_get_content_type(self):
|
def test_get_content_type(self):
|
||||||
h = odict.ODictCaseless()
|
|
||||||
h["Content-Type"] = ["text/plain"]
|
|
||||||
resp = HTTPResponse.wrap(netlib.tutils.tresp())
|
resp = HTTPResponse.wrap(netlib.tutils.tresp())
|
||||||
resp.headers = h
|
resp.headers = Headers(content_type="text/plain")
|
||||||
assert resp.headers.get_first("content-type") == "text/plain"
|
assert resp.headers["content-type"] == "text/plain"
|
||||||
|
|
||||||
|
|
||||||
class TestResponse:
|
class TestResponse:
|
||||||
@ -1165,19 +1160,18 @@ class TestResponse:
|
|||||||
def test_refresh(self):
|
def test_refresh(self):
|
||||||
r = HTTPResponse.wrap(netlib.tutils.tresp())
|
r = HTTPResponse.wrap(netlib.tutils.tresp())
|
||||||
n = time.time()
|
n = time.time()
|
||||||
r.headers["date"] = [email.utils.formatdate(n)]
|
r.headers["date"] = email.utils.formatdate(n)
|
||||||
pre = r.headers["date"]
|
pre = r.headers["date"]
|
||||||
r.refresh(n)
|
r.refresh(n)
|
||||||
assert pre == r.headers["date"]
|
assert pre == r.headers["date"]
|
||||||
r.refresh(n + 60)
|
r.refresh(n + 60)
|
||||||
|
|
||||||
d = email.utils.parsedate_tz(r.headers["date"][0])
|
d = email.utils.parsedate_tz(r.headers["date"])
|
||||||
d = email.utils.mktime_tz(d)
|
d = email.utils.mktime_tz(d)
|
||||||
# Weird that this is not exact...
|
# Weird that this is not exact...
|
||||||
assert abs(60 - (d - n)) <= 1
|
assert abs(60 - (d - n)) <= 1
|
||||||
|
|
||||||
r.headers[
|
r.headers["set-cookie"] = "MOO=BAR; Expires=Tue, 08-Mar-2011 00:20:38 GMT; Path=foo.com; Secure"
|
||||||
"set-cookie"] = ["MOO=BAR; Expires=Tue, 08-Mar-2011 00:20:38 GMT; Path=foo.com; Secure"]
|
|
||||||
r.refresh()
|
r.refresh()
|
||||||
|
|
||||||
def test_refresh_cookie(self):
|
def test_refresh_cookie(self):
|
||||||
@ -1192,47 +1186,45 @@ class TestResponse:
|
|||||||
|
|
||||||
def test_replace(self):
|
def test_replace(self):
|
||||||
r = HTTPResponse.wrap(netlib.tutils.tresp())
|
r = HTTPResponse.wrap(netlib.tutils.tresp())
|
||||||
r.headers["Foo"] = ["fOo"]
|
r.headers["Foo"] = "fOo"
|
||||||
r.content = "afoob"
|
r.content = "afoob"
|
||||||
assert r.replace("foo(?i)", "boo") == 3
|
assert r.replace("foo(?i)", "boo") == 3
|
||||||
assert not "foo" in r.content
|
assert not "foo" in r.content
|
||||||
assert r.headers["boo"] == ["boo"]
|
assert r.headers["boo"] == "boo"
|
||||||
|
|
||||||
def test_decodeencode(self):
|
def test_decodeencode(self):
|
||||||
r = HTTPResponse.wrap(netlib.tutils.tresp())
|
r = HTTPResponse.wrap(netlib.tutils.tresp())
|
||||||
r.headers["content-encoding"] = ["identity"]
|
r.headers["content-encoding"] = "identity"
|
||||||
r.content = "falafel"
|
r.content = "falafel"
|
||||||
assert r.decode()
|
assert r.decode()
|
||||||
assert not r.headers["content-encoding"]
|
assert "content-encoding" not in r.headers
|
||||||
assert r.content == "falafel"
|
assert r.content == "falafel"
|
||||||
|
|
||||||
r = HTTPResponse.wrap(netlib.tutils.tresp())
|
r = HTTPResponse.wrap(netlib.tutils.tresp())
|
||||||
r.headers["content-encoding"] = ["identity"]
|
r.headers["content-encoding"] = "identity"
|
||||||
r.content = "falafel"
|
r.content = "falafel"
|
||||||
r.encode("identity")
|
r.encode("identity")
|
||||||
assert r.headers["content-encoding"] == ["identity"]
|
assert r.headers["content-encoding"] == "identity"
|
||||||
assert r.content == "falafel"
|
assert r.content == "falafel"
|
||||||
|
|
||||||
r = HTTPResponse.wrap(netlib.tutils.tresp())
|
r = HTTPResponse.wrap(netlib.tutils.tresp())
|
||||||
r.headers["content-encoding"] = ["identity"]
|
r.headers["content-encoding"] = "identity"
|
||||||
r.content = "falafel"
|
r.content = "falafel"
|
||||||
r.encode("gzip")
|
r.encode("gzip")
|
||||||
assert r.headers["content-encoding"] == ["gzip"]
|
assert r.headers["content-encoding"] == "gzip"
|
||||||
assert r.content != "falafel"
|
assert r.content != "falafel"
|
||||||
assert r.decode()
|
assert r.decode()
|
||||||
assert not r.headers["content-encoding"]
|
assert "content-encoding" not in r.headers
|
||||||
assert r.content == "falafel"
|
assert r.content == "falafel"
|
||||||
|
|
||||||
r.headers["content-encoding"] = ["gzip"]
|
r.headers["content-encoding"] = "gzip"
|
||||||
assert not r.decode()
|
assert not r.decode()
|
||||||
assert r.content == "falafel"
|
assert r.content == "falafel"
|
||||||
|
|
||||||
def test_get_content_type(self):
|
def test_get_content_type(self):
|
||||||
h = odict.ODictCaseless()
|
|
||||||
h["Content-Type"] = ["text/plain"]
|
|
||||||
resp = HTTPResponse.wrap(netlib.tutils.tresp())
|
resp = HTTPResponse.wrap(netlib.tutils.tresp())
|
||||||
resp.headers = h
|
resp.headers = Headers(content_type="text/plain")
|
||||||
assert resp.headers.get_first("content-type") == "text/plain"
|
assert resp.headers["content-type"] == "text/plain"
|
||||||
|
|
||||||
|
|
||||||
class TestError:
|
class TestError:
|
||||||
@ -1276,12 +1268,12 @@ class TestClientConnection:
|
|||||||
def test_decoded():
|
def test_decoded():
|
||||||
r = HTTPRequest.wrap(netlib.tutils.treq())
|
r = HTTPRequest.wrap(netlib.tutils.treq())
|
||||||
assert r.content == "content"
|
assert r.content == "content"
|
||||||
assert not r.headers["content-encoding"]
|
assert "content-encoding" not in r.headers
|
||||||
r.encode("gzip")
|
r.encode("gzip")
|
||||||
assert r.headers["content-encoding"]
|
assert r.headers["content-encoding"]
|
||||||
assert r.content != "content"
|
assert r.content != "content"
|
||||||
with decoded(r):
|
with decoded(r):
|
||||||
assert not r.headers["content-encoding"]
|
assert "content-encoding" not in r.headers
|
||||||
assert r.content == "content"
|
assert r.content == "content"
|
||||||
assert r.headers["content-encoding"]
|
assert r.headers["content-encoding"]
|
||||||
assert r.content != "content"
|
assert r.content != "content"
|
||||||
@ -1378,18 +1370,18 @@ def test_setheaders():
|
|||||||
h.add("~s", "one", "two")
|
h.add("~s", "one", "two")
|
||||||
h.add("~s", "one", "three")
|
h.add("~s", "one", "three")
|
||||||
f = tutils.tflow(resp=True)
|
f = tutils.tflow(resp=True)
|
||||||
f.request.headers["one"] = ["xxx"]
|
f.request.headers["one"] = "xxx"
|
||||||
f.response.headers["one"] = ["xxx"]
|
f.response.headers["one"] = "xxx"
|
||||||
h.run(f)
|
h.run(f)
|
||||||
assert f.request.headers["one"] == ["xxx"]
|
assert f.request.headers["one"] == "xxx"
|
||||||
assert f.response.headers["one"] == ["two", "three"]
|
assert f.response.headers.get_all("one") == ["two", "three"]
|
||||||
|
|
||||||
h.clear()
|
h.clear()
|
||||||
h.add("~q", "one", "two")
|
h.add("~q", "one", "two")
|
||||||
h.add("~q", "one", "three")
|
h.add("~q", "one", "three")
|
||||||
f = tutils.tflow()
|
f = tutils.tflow()
|
||||||
f.request.headers["one"] = ["xxx"]
|
f.request.headers["one"] = "xxx"
|
||||||
h.run(f)
|
h.run(f)
|
||||||
assert f.request.headers["one"] == ["two", "three"]
|
assert f.request.headers.get_all("one") == ["two", "three"]
|
||||||
|
|
||||||
assert not h.add("~", "foo", "bar")
|
assert not h.add("~", "foo", "bar")
|
||||||
|
@ -5,7 +5,6 @@ from mock import MagicMock
|
|||||||
|
|
||||||
from libmproxy.protocol.http import *
|
from libmproxy.protocol.http import *
|
||||||
import netlib.http
|
import netlib.http
|
||||||
from netlib import odict
|
|
||||||
from netlib.http import http1
|
from netlib.http import http1
|
||||||
from netlib.http.semantics import CONTENT_MISSING
|
from netlib.http.semantics import CONTENT_MISSING
|
||||||
|
|
||||||
|
@ -623,8 +623,7 @@ class MasterRedirectRequest(tservers.TestMaster):
|
|||||||
|
|
||||||
def handle_response(self, f):
|
def handle_response(self, f):
|
||||||
f.response.content = str(f.client_conn.address.port)
|
f.response.content = str(f.client_conn.address.port)
|
||||||
f.response.headers[
|
f.response.headers["server-conn-id"] = str(f.server_conn.source_address.port)
|
||||||
"server-conn-id"] = [str(f.server_conn.source_address.port)]
|
|
||||||
super(MasterRedirectRequest, self).handle_response(f)
|
super(MasterRedirectRequest, self).handle_response(f)
|
||||||
|
|
||||||
|
|
||||||
@ -721,7 +720,7 @@ class TestStreamRequest(tservers.HTTPProxTest):
|
|||||||
protocol = http.http1.HTTP1Protocol(rfile=fconn)
|
protocol = http.http1.HTTP1Protocol(rfile=fconn)
|
||||||
resp = protocol.read_response("GET", None, include_body=False)
|
resp = protocol.read_response("GET", None, include_body=False)
|
||||||
|
|
||||||
assert resp.headers["Transfer-Encoding"][0] == 'chunked'
|
assert resp.headers["Transfer-Encoding"] == 'chunked'
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
|
|
||||||
chunks = list(protocol.read_http_body_chunked(
|
chunks = list(protocol.read_http_body_chunked(
|
||||||
@ -743,7 +742,7 @@ class TestFakeResponse(tservers.HTTPProxTest):
|
|||||||
|
|
||||||
def test_fake(self):
|
def test_fake(self):
|
||||||
f = self.pathod("200")
|
f = self.pathod("200")
|
||||||
assert "header_response" in f.headers.keys()
|
assert "header_response" in f.headers
|
||||||
|
|
||||||
|
|
||||||
class TestServerConnect(tservers.HTTPProxTest):
|
class TestServerConnect(tservers.HTTPProxTest):
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
from libmproxy import utils
|
from libmproxy import utils
|
||||||
from netlib import odict
|
|
||||||
import tutils
|
import tutils
|
||||||
|
|
||||||
utils.CERT_SLEEP_TIME = 0
|
utils.CERT_SLEEP_TIME = 0
|
||||||
|
Loading…
Reference in New Issue
Block a user