This commit is contained in:
Matías Lang 2019-01-13 23:55:27 -03:00
commit e6da62a50a
19 changed files with 32 additions and 29 deletions

View File

@ -20,9 +20,9 @@ class MyAddon:
for f in flows:
totals[f.request.host] = totals.setdefault(f.request.host, 0) + 1
fp = open(path, "w+")
for cnt, dom in sorted([(v, k) for (k, v) in totals.items()]):
fp.write("%s: %s\n" % (cnt, dom))
with open(path, "w+") as fp:
for cnt, dom in sorted([(v, k) for (k, v) in totals.items()]):
fp.write("%s: %s\n" % (cnt, dom))
ctx.log.alert("done")

View File

@ -38,7 +38,7 @@ def response(flow: http.HTTPFlow) -> None:
flow.response.content = flow.response.content.replace(b'https://', b'http://')
# strip meta tag upgrade-insecure-requests in response body
csp_meta_tag_pattern = b'<meta.*http-equiv=["\']Content-Security-Policy[\'"].*upgrade-insecure-requests.*?>'
csp_meta_tag_pattern = br'<meta.*http-equiv=["\']Content-Security-Policy[\'"].*upgrade-insecure-requests.*?>'
flow.response.content = re.sub(csp_meta_tag_pattern, b'', flow.response.content, flags=re.IGNORECASE)
# strip links in 'Location' header
@ -52,7 +52,7 @@ def response(flow: http.HTTPFlow) -> None:
# strip upgrade-insecure-requests in Content-Security-Policy header
if re.search('upgrade-insecure-requests', flow.response.headers.get('Content-Security-Policy', ''), flags=re.IGNORECASE):
csp = flow.response.headers['Content-Security-Policy']
flow.response.headers['Content-Security-Policy'] = re.sub('upgrade-insecure-requests[;\s]*', '', csp, flags=re.IGNORECASE)
flow.response.headers['Content-Security-Policy'] = re.sub(r'upgrade-insecure-requests[;\s]*', '', csp, flags=re.IGNORECASE)
# strip secure flag from 'Set-Cookie' headers
cookies = flow.response.headers.get_all('Set-Cookie')

View File

@ -1,4 +1,4 @@
"""
r"""
__ __ _____ _____ _____
\ \ / // ____/ ____| / ____|

View File

@ -87,8 +87,8 @@ class SessionDB:
def _create_session(self):
script_path = pkg_data.path("io/sql/session_create.sql")
qry = open(script_path, 'r').read()
self.con.executescript(qry)
with open(script_path, 'r') as qry:
self.con.executescript(qry.read())
self.con.commit()
@staticmethod

View File

@ -16,7 +16,7 @@ A custom CSS prettifier. Compared to other prettifiers, its main features are:
CSS_SPECIAL_AREAS = (
"'" + strutils.SINGLELINE_CONTENT + strutils.NO_ESCAPE + "'",
'"' + strutils.SINGLELINE_CONTENT + strutils.NO_ESCAPE + '"',
r"/\*" + strutils.MULTILINE_CONTENT + "\*/",
r"/\*" + strutils.MULTILINE_CONTENT + r"\*/",
"//" + strutils.SINGLELINE_CONTENT + "$"
)
CSS_SPECIAL_CHARS = "{};:"

View File

@ -10,9 +10,9 @@ SPECIAL_AREAS = (
r"'" + strutils.MULTILINE_CONTENT_LINE_CONTINUATION + strutils.NO_ESCAPE + "'",
r'"' + strutils.MULTILINE_CONTENT_LINE_CONTINUATION + strutils.NO_ESCAPE + '"',
r'`' + strutils.MULTILINE_CONTENT + strutils.NO_ESCAPE + '`',
r"/\*" + strutils.MULTILINE_CONTENT + "\*/",
r"/\*" + strutils.MULTILINE_CONTENT + r"\*/",
r"//" + strutils.SINGLELINE_CONTENT + "$",
r"for\(" + strutils.SINGLELINE_CONTENT + "\)",
r"for\(" + strutils.SINGLELINE_CONTENT + r"\)",
)

View File

@ -18,7 +18,7 @@ The implementation is split into two main parts: tokenization and formatting of
"""
# http://www.xml.com/pub/a/2001/07/25/namingparts.html - this is close enough for what we do.
REGEX_TAG = re.compile("[a-zA-Z0-9._:\-]+(?!=)")
REGEX_TAG = re.compile(r"[a-zA-Z0-9._:\-]+(?!=)")
# https://www.w3.org/TR/html5/syntax.html#void-elements
HTML_VOID_ELEMENTS = {
"area", "base", "br", "col", "embed", "hr", "img", "input", "keygen", "link", "meta", "param",

View File

@ -63,7 +63,8 @@ if __name__ == "__main__":
listOfSamples = os.listdir(samplesDir)
for filename in listOfSamples:
byteWBXML = open(samplesDir + os.sep + filename, "rb").read()
with open(samplesDir + os.sep + filename, "rb") as f:
byteWBXML = f.read()
logging.info("-"*100)
logging.info(filename)

View File

@ -2,7 +2,7 @@ import ipaddress
import re
# Allow underscore in host name
_label_valid = re.compile(b"(?!-)[A-Z\d\-_]{1,63}(?<!-)$", re.IGNORECASE)
_label_valid = re.compile(br"(?!-)[A-Z\d\-_]{1,63}(?<!-)$", re.IGNORECASE)
def is_valid_host(host: bytes) -> bool:

View File

@ -11,7 +11,7 @@ def lookup(address, port, s):
"""
# We may get an ipv4-mapped ipv6 address here, e.g. ::ffff:127.0.0.1.
# Those still appear as "127.0.0.1" in the table, so we need to strip the prefix.
address = re.sub("^::ffff:(?=\d+.\d+.\d+.\d+$)", "", address)
address = re.sub(r"^::ffff:(?=\d+.\d+.\d+.\d+$)", "", address)
s = s.decode()
spec = "%s:%s" % (address, port)
for i in s.split("\n"):

View File

@ -58,7 +58,7 @@ class Resolver:
def original_addr(self, csock: socket.socket):
ip, port = csock.getpeername()[:2]
ip = re.sub("^::ffff:(?=\d+.\d+.\d+.\d+$)", "", ip)
ip = re.sub(r"^::ffff:(?=\d+.\d+.\d+.\d+$)", "", ip)
ip = ip.split("%", 1)[0]
with self.lock:
try:

View File

@ -91,9 +91,9 @@ class HelpView(tabs.Tabs, layoutwidget.LayoutWidget):
)
)
examples = [
("google\.com", "Url containing \"google.com"),
("~q ~b test", "Requests where body contains \"test\""),
("!(~q & ~t \"text/html\")", "Anything but requests with a text/html content type."),
(r"google\.com", r"Url containing \"google.com"),
("~q ~b test", r"Requests where body contains \"test\""),
(r"!(~q & ~t \"text/html\")", "Anything but requests with a text/html content type."),
]
text.extend(
common.format_keyvals(examples, indent=4)

View File

@ -370,7 +370,7 @@ class FlowContent(RequestHandler):
original_cd = message.headers.get("Content-Disposition", None)
filename = None
if original_cd:
filename = re.search('filename=([-\w" .()]+)', original_cd)
filename = re.search(r'filename=([-\w" .()]+)', original_cd)
if filename:
filename = filename.group(1)
if not filename:

View File

@ -169,7 +169,7 @@ def split_special_areas(
>>> split_special_areas(
>>> "test /* don't modify me */ foo",
>>> [r"/\*[\s\S]*?\*/"]) # (regex matching comments)
>>> [r"/\\*[\\s\\S]*?\\*/"]) # (regex matching comments)
["test ", "/* don't modify me */", " foo"]
"".join(split_special_areas(x, ...)) == x always holds true.
@ -201,7 +201,7 @@ def escape_special_areas(
>>> x = escape_special_areas(x, "{", ["'" + SINGLELINE_CONTENT + "'"])
>>> print(x)
if (true) { console.log('<EFBFBD>}'); }
>>> x = re.sub(r"\s*{\s*", " {\n ", x)
>>> x = re.sub(r"\\s*{\\s*", " {\n ", x)
>>> x = unescape_special_areas(x)
>>> print(x)
if (true) {

View File

@ -31,7 +31,8 @@ class Benchmark:
stdout=asyncio.subprocess.PIPE
)
stdout, _ = await traf.communicate()
open(ctx.options.benchmark_save_path + ".bench", mode="wb").write(stdout)
with open(ctx.options.benchmark_save_path + ".bench", mode="wb") as f:
f.write(stdout)
ctx.log.error("Proxy saw %s requests, %s responses" % (self.reqs, self.resps))
ctx.log.error(stdout.decode("ascii"))
backend.kill()

View File

@ -68,7 +68,8 @@ class TestSession:
os.remove(path)
con = sqlite3.connect(path)
script_path = pkg_data.path("io/sql/session_create.sql")
qry = open(script_path, 'r').read()
with open(script_path) as f:
qry = f.read()
with con:
con.executescript(qry)
blob = b'blob_of_data'

View File

@ -4,4 +4,4 @@ from mitmproxy.coretypes import basethread
def test_basethread():
t = basethread.BaseThread('foobar')
assert re.match('foobar - age: \d+s', t._threadinfo())
assert re.match(r'foobar - age: \d+s', t._threadinfo())

View File

@ -27,7 +27,7 @@ cookie_pairs = [
[["one", "uno"], ["two", "due"]]
],
[
'one="uno"; two="\due"',
'one="uno"; two="\\due"',
[["one", "uno"], ["two", "due"]]
],
[
@ -70,7 +70,7 @@ def test_read_key():
def test_read_quoted_string():
tokens = [
[('"foo" x', 0), ("foo", 5)],
[('"f\oo" x', 0), ("foo", 6)],
[('"f\\oo" x', 0), ("foo", 6)],
[(r'"f\\o" x', 0), (r"f\o", 6)],
[(r'"f\\" x', 0), (r"f" + '\\', 5)],
[('"fo\\\"" x', 0), ("fo\"", 6)],

View File

@ -102,7 +102,7 @@ class TestServerBind(tservers.ServerTestBase):
# We may get an ipv4-mapped ipv6 address here, e.g. ::ffff:127.0.0.1.
# Those still appear as "127.0.0.1" in the table, so we need to strip the prefix.
peername = self.connection.getpeername()
address = re.sub("^::ffff:(?=\d+.\d+.\d+.\d+$)", "", peername[0])
address = re.sub(r"^::ffff:(?=\d+.\d+.\d+.\d+$)", "", peername[0])
port = peername[1]
self.wfile.write(str((address, port)).encode())