mirror of
https://github.com/Grasscutters/mitmproxy.git
synced 2024-11-29 19:08:44 +00:00
Merge pull request #3443 from BoboTiG/fix-invalid-seq-warnings
Fix several DeprecationWarning: invalid escape sequence
This commit is contained in:
commit
4f270b5506
@ -38,7 +38,7 @@ def response(flow: http.HTTPFlow) -> None:
|
|||||||
flow.response.content = flow.response.content.replace(b'https://', b'http://')
|
flow.response.content = flow.response.content.replace(b'https://', b'http://')
|
||||||
|
|
||||||
# strip meta tag upgrade-insecure-requests in response body
|
# strip meta tag upgrade-insecure-requests in response body
|
||||||
csp_meta_tag_pattern = b'<meta.*http-equiv=["\']Content-Security-Policy[\'"].*upgrade-insecure-requests.*?>'
|
csp_meta_tag_pattern = br'<meta.*http-equiv=["\']Content-Security-Policy[\'"].*upgrade-insecure-requests.*?>'
|
||||||
flow.response.content = re.sub(csp_meta_tag_pattern, b'', flow.response.content, flags=re.IGNORECASE)
|
flow.response.content = re.sub(csp_meta_tag_pattern, b'', flow.response.content, flags=re.IGNORECASE)
|
||||||
|
|
||||||
# strip links in 'Location' header
|
# strip links in 'Location' header
|
||||||
@ -52,7 +52,7 @@ def response(flow: http.HTTPFlow) -> None:
|
|||||||
# strip upgrade-insecure-requests in Content-Security-Policy header
|
# strip upgrade-insecure-requests in Content-Security-Policy header
|
||||||
if re.search('upgrade-insecure-requests', flow.response.headers.get('Content-Security-Policy', ''), flags=re.IGNORECASE):
|
if re.search('upgrade-insecure-requests', flow.response.headers.get('Content-Security-Policy', ''), flags=re.IGNORECASE):
|
||||||
csp = flow.response.headers['Content-Security-Policy']
|
csp = flow.response.headers['Content-Security-Policy']
|
||||||
flow.response.headers['Content-Security-Policy'] = re.sub('upgrade-insecure-requests[;\s]*', '', csp, flags=re.IGNORECASE)
|
flow.response.headers['Content-Security-Policy'] = re.sub(r'upgrade-insecure-requests[;\s]*', '', csp, flags=re.IGNORECASE)
|
||||||
|
|
||||||
# strip secure flag from 'Set-Cookie' headers
|
# strip secure flag from 'Set-Cookie' headers
|
||||||
cookies = flow.response.headers.get_all('Set-Cookie')
|
cookies = flow.response.headers.get_all('Set-Cookie')
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
"""
|
r"""
|
||||||
|
|
||||||
__ __ _____ _____ _____
|
__ __ _____ _____ _____
|
||||||
\ \ / // ____/ ____| / ____|
|
\ \ / // ____/ ____| / ____|
|
||||||
|
@ -16,7 +16,7 @@ A custom CSS prettifier. Compared to other prettifiers, its main features are:
|
|||||||
CSS_SPECIAL_AREAS = (
|
CSS_SPECIAL_AREAS = (
|
||||||
"'" + strutils.SINGLELINE_CONTENT + strutils.NO_ESCAPE + "'",
|
"'" + strutils.SINGLELINE_CONTENT + strutils.NO_ESCAPE + "'",
|
||||||
'"' + strutils.SINGLELINE_CONTENT + strutils.NO_ESCAPE + '"',
|
'"' + strutils.SINGLELINE_CONTENT + strutils.NO_ESCAPE + '"',
|
||||||
r"/\*" + strutils.MULTILINE_CONTENT + "\*/",
|
r"/\*" + strutils.MULTILINE_CONTENT + r"\*/",
|
||||||
"//" + strutils.SINGLELINE_CONTENT + "$"
|
"//" + strutils.SINGLELINE_CONTENT + "$"
|
||||||
)
|
)
|
||||||
CSS_SPECIAL_CHARS = "{};:"
|
CSS_SPECIAL_CHARS = "{};:"
|
||||||
|
@ -10,9 +10,9 @@ SPECIAL_AREAS = (
|
|||||||
r"'" + strutils.MULTILINE_CONTENT_LINE_CONTINUATION + strutils.NO_ESCAPE + "'",
|
r"'" + strutils.MULTILINE_CONTENT_LINE_CONTINUATION + strutils.NO_ESCAPE + "'",
|
||||||
r'"' + strutils.MULTILINE_CONTENT_LINE_CONTINUATION + strutils.NO_ESCAPE + '"',
|
r'"' + strutils.MULTILINE_CONTENT_LINE_CONTINUATION + strutils.NO_ESCAPE + '"',
|
||||||
r'`' + strutils.MULTILINE_CONTENT + strutils.NO_ESCAPE + '`',
|
r'`' + strutils.MULTILINE_CONTENT + strutils.NO_ESCAPE + '`',
|
||||||
r"/\*" + strutils.MULTILINE_CONTENT + "\*/",
|
r"/\*" + strutils.MULTILINE_CONTENT + r"\*/",
|
||||||
r"//" + strutils.SINGLELINE_CONTENT + "$",
|
r"//" + strutils.SINGLELINE_CONTENT + "$",
|
||||||
r"for\(" + strutils.SINGLELINE_CONTENT + "\)",
|
r"for\(" + strutils.SINGLELINE_CONTENT + r"\)",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ The implementation is split into two main parts: tokenization and formatting of
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# http://www.xml.com/pub/a/2001/07/25/namingparts.html - this is close enough for what we do.
|
# http://www.xml.com/pub/a/2001/07/25/namingparts.html - this is close enough for what we do.
|
||||||
REGEX_TAG = re.compile("[a-zA-Z0-9._:\-]+(?!=)")
|
REGEX_TAG = re.compile(r"[a-zA-Z0-9._:\-]+(?!=)")
|
||||||
# https://www.w3.org/TR/html5/syntax.html#void-elements
|
# https://www.w3.org/TR/html5/syntax.html#void-elements
|
||||||
HTML_VOID_ELEMENTS = {
|
HTML_VOID_ELEMENTS = {
|
||||||
"area", "base", "br", "col", "embed", "hr", "img", "input", "keygen", "link", "meta", "param",
|
"area", "base", "br", "col", "embed", "hr", "img", "input", "keygen", "link", "meta", "param",
|
||||||
|
@ -2,7 +2,7 @@ import ipaddress
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
# Allow underscore in host name
|
# Allow underscore in host name
|
||||||
_label_valid = re.compile(b"(?!-)[A-Z\d\-_]{1,63}(?<!-)$", re.IGNORECASE)
|
_label_valid = re.compile(br"(?!-)[A-Z\d\-_]{1,63}(?<!-)$", re.IGNORECASE)
|
||||||
|
|
||||||
|
|
||||||
def is_valid_host(host: bytes) -> bool:
|
def is_valid_host(host: bytes) -> bool:
|
||||||
|
@ -11,7 +11,7 @@ def lookup(address, port, s):
|
|||||||
"""
|
"""
|
||||||
# We may get an ipv4-mapped ipv6 address here, e.g. ::ffff:127.0.0.1.
|
# We may get an ipv4-mapped ipv6 address here, e.g. ::ffff:127.0.0.1.
|
||||||
# Those still appear as "127.0.0.1" in the table, so we need to strip the prefix.
|
# Those still appear as "127.0.0.1" in the table, so we need to strip the prefix.
|
||||||
address = re.sub("^::ffff:(?=\d+.\d+.\d+.\d+$)", "", address)
|
address = re.sub(r"^::ffff:(?=\d+.\d+.\d+.\d+$)", "", address)
|
||||||
s = s.decode()
|
s = s.decode()
|
||||||
spec = "%s:%s" % (address, port)
|
spec = "%s:%s" % (address, port)
|
||||||
for i in s.split("\n"):
|
for i in s.split("\n"):
|
||||||
|
@ -58,7 +58,7 @@ class Resolver:
|
|||||||
|
|
||||||
def original_addr(self, csock: socket.socket):
|
def original_addr(self, csock: socket.socket):
|
||||||
ip, port = csock.getpeername()[:2]
|
ip, port = csock.getpeername()[:2]
|
||||||
ip = re.sub("^::ffff:(?=\d+.\d+.\d+.\d+$)", "", ip)
|
ip = re.sub(r"^::ffff:(?=\d+.\d+.\d+.\d+$)", "", ip)
|
||||||
ip = ip.split("%", 1)[0]
|
ip = ip.split("%", 1)[0]
|
||||||
with self.lock:
|
with self.lock:
|
||||||
try:
|
try:
|
||||||
|
@ -91,9 +91,9 @@ class HelpView(tabs.Tabs, layoutwidget.LayoutWidget):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
examples = [
|
examples = [
|
||||||
("google\.com", "Url containing \"google.com"),
|
(r"google\.com", r"Url containing \"google.com"),
|
||||||
("~q ~b test", "Requests where body contains \"test\""),
|
("~q ~b test", r"Requests where body contains \"test\""),
|
||||||
("!(~q & ~t \"text/html\")", "Anything but requests with a text/html content type."),
|
(r"!(~q & ~t \"text/html\")", "Anything but requests with a text/html content type."),
|
||||||
]
|
]
|
||||||
text.extend(
|
text.extend(
|
||||||
common.format_keyvals(examples, indent=4)
|
common.format_keyvals(examples, indent=4)
|
||||||
|
@ -370,7 +370,7 @@ class FlowContent(RequestHandler):
|
|||||||
original_cd = message.headers.get("Content-Disposition", None)
|
original_cd = message.headers.get("Content-Disposition", None)
|
||||||
filename = None
|
filename = None
|
||||||
if original_cd:
|
if original_cd:
|
||||||
filename = re.search('filename=([-\w" .()]+)', original_cd)
|
filename = re.search(r'filename=([-\w" .()]+)', original_cd)
|
||||||
if filename:
|
if filename:
|
||||||
filename = filename.group(1)
|
filename = filename.group(1)
|
||||||
if not filename:
|
if not filename:
|
||||||
|
@ -169,7 +169,7 @@ def split_special_areas(
|
|||||||
|
|
||||||
>>> split_special_areas(
|
>>> split_special_areas(
|
||||||
>>> "test /* don't modify me */ foo",
|
>>> "test /* don't modify me */ foo",
|
||||||
>>> [r"/\*[\s\S]*?\*/"]) # (regex matching comments)
|
>>> [r"/\\*[\\s\\S]*?\\*/"]) # (regex matching comments)
|
||||||
["test ", "/* don't modify me */", " foo"]
|
["test ", "/* don't modify me */", " foo"]
|
||||||
|
|
||||||
"".join(split_special_areas(x, ...)) == x always holds true.
|
"".join(split_special_areas(x, ...)) == x always holds true.
|
||||||
@ -201,7 +201,7 @@ def escape_special_areas(
|
|||||||
>>> x = escape_special_areas(x, "{", ["'" + SINGLELINE_CONTENT + "'"])
|
>>> x = escape_special_areas(x, "{", ["'" + SINGLELINE_CONTENT + "'"])
|
||||||
>>> print(x)
|
>>> print(x)
|
||||||
if (true) { console.log('<EFBFBD>}'); }
|
if (true) { console.log('<EFBFBD>}'); }
|
||||||
>>> x = re.sub(r"\s*{\s*", " {\n ", x)
|
>>> x = re.sub(r"\\s*{\\s*", " {\n ", x)
|
||||||
>>> x = unescape_special_areas(x)
|
>>> x = unescape_special_areas(x)
|
||||||
>>> print(x)
|
>>> print(x)
|
||||||
if (true) {
|
if (true) {
|
||||||
|
@ -4,4 +4,4 @@ from mitmproxy.coretypes import basethread
|
|||||||
|
|
||||||
def test_basethread():
|
def test_basethread():
|
||||||
t = basethread.BaseThread('foobar')
|
t = basethread.BaseThread('foobar')
|
||||||
assert re.match('foobar - age: \d+s', t._threadinfo())
|
assert re.match(r'foobar - age: \d+s', t._threadinfo())
|
||||||
|
@ -27,7 +27,7 @@ cookie_pairs = [
|
|||||||
[["one", "uno"], ["two", "due"]]
|
[["one", "uno"], ["two", "due"]]
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
'one="uno"; two="\due"',
|
'one="uno"; two="\\due"',
|
||||||
[["one", "uno"], ["two", "due"]]
|
[["one", "uno"], ["two", "due"]]
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
@ -70,7 +70,7 @@ def test_read_key():
|
|||||||
def test_read_quoted_string():
|
def test_read_quoted_string():
|
||||||
tokens = [
|
tokens = [
|
||||||
[('"foo" x', 0), ("foo", 5)],
|
[('"foo" x', 0), ("foo", 5)],
|
||||||
[('"f\oo" x', 0), ("foo", 6)],
|
[('"f\\oo" x', 0), ("foo", 6)],
|
||||||
[(r'"f\\o" x', 0), (r"f\o", 6)],
|
[(r'"f\\o" x', 0), (r"f\o", 6)],
|
||||||
[(r'"f\\" x', 0), (r"f" + '\\', 5)],
|
[(r'"f\\" x', 0), (r"f" + '\\', 5)],
|
||||||
[('"fo\\\"" x', 0), ("fo\"", 6)],
|
[('"fo\\\"" x', 0), ("fo\"", 6)],
|
||||||
|
@ -102,7 +102,7 @@ class TestServerBind(tservers.ServerTestBase):
|
|||||||
# We may get an ipv4-mapped ipv6 address here, e.g. ::ffff:127.0.0.1.
|
# We may get an ipv4-mapped ipv6 address here, e.g. ::ffff:127.0.0.1.
|
||||||
# Those still appear as "127.0.0.1" in the table, so we need to strip the prefix.
|
# Those still appear as "127.0.0.1" in the table, so we need to strip the prefix.
|
||||||
peername = self.connection.getpeername()
|
peername = self.connection.getpeername()
|
||||||
address = re.sub("^::ffff:(?=\d+.\d+.\d+.\d+$)", "", peername[0])
|
address = re.sub(r"^::ffff:(?=\d+.\d+.\d+.\d+$)", "", peername[0])
|
||||||
port = peername[1]
|
port = peername[1]
|
||||||
|
|
||||||
self.wfile.write(str((address, port)).encode())
|
self.wfile.write(str((address, port)).encode())
|
||||||
|
Loading…
Reference in New Issue
Block a user