pyupgrade --py36-plus mitmproxy/**/*.py

This commit is contained in:
Thomas Kriechbaumer 2020-11-20 19:25:26 +01:00
parent 5b6d75614e
commit 38cca379df
133 changed files with 354 additions and 372 deletions

View File

@ -20,7 +20,7 @@ class MyAddon:
with open(path, "w+") as fp: with open(path, "w+") as fp:
for cnt, dom in sorted([(v, k) for (k, v) in totals.items()]): for cnt, dom in sorted([(v, k) for (k, v) in totals.items()]):
fp.write("%s: %s\n" % (cnt, dom)) fp.write(f"{cnt}: {dom}\n")
ctx.log.alert("done") ctx.log.alert("done")

View File

@ -17,4 +17,4 @@ with open(sys.argv[1], "rb") as logfile:
pp.pprint(f.get_state()) pp.pprint(f.get_state())
print("") print("")
except FlowReadException as e: except FlowReadException as e:
print("Flow file corrupted: {}".format(e)) print(f"Flow file corrupted: {e}")

View File

@ -12,6 +12,6 @@ from mitmproxy.script import concurrent
@concurrent # Remove this and see what happens @concurrent # Remove this and see what happens
def request(flow): def request(flow):
# This is ugly in mitmproxy's UI, but you don't want to use mitmproxy.ctx.log from a different thread. # This is ugly in mitmproxy's UI, but you don't want to use mitmproxy.ctx.log from a different thread.
print("handle request: %s%s" % (flow.request.host, flow.request.path)) print(f"handle request: {flow.request.host}{flow.request.path}")
time.sleep(5) time.sleep(5)
print("start request: %s%s" % (flow.request.host, flow.request.path)) print(f"start request: {flow.request.host}{flow.request.path}")

View File

@ -1,3 +1,2 @@
def request(flow): def request(flow):
flow.request.headers["myheader"] = "value" flow.request.headers["myheader"] = "value"

View File

@ -9,9 +9,9 @@ def websocket_message(flow):
# was the message sent from the client or server? # was the message sent from the client or server?
if message.from_client: if message.from_client:
ctx.log.info("Client sent a message: {}".format(message.content)) ctx.log.info(f"Client sent a message: {message.content}")
else: else:
ctx.log.info("Server sent a message: {}".format(message.content)) ctx.log.info(f"Server sent a message: {message.content}")
# manipulate the message content # manipulate the message content
message.content = re.sub(r'^Hello', 'HAPPY', message.content) message.content = re.sub(r'^Hello', 'HAPPY', message.content)

View File

@ -58,7 +58,7 @@ def monkey_dummy_cert(privkey, cacert, commonname, sans):
return Cert(cert) return Cert(cert)
class CheckSSLPinning(object): class CheckSSLPinning:
def load(self, loader): def load(self, loader):
loader.add_option( loader.add_option(
"certbeginon", bool, False, "certbeginon", bool, False,

View File

@ -31,14 +31,14 @@ class Wrapper:
return dict([re.findall(r'([^:]+): (.*)', line)[0] for line in state]) return dict([re.findall(r'([^:]+): (.*)', line)[0] for line in state])
def enable_proxy_for_service(self, service): def enable_proxy_for_service(self, service):
print('Enabling proxy on {}...'.format(service)) print(f'Enabling proxy on {service}...')
for subcommand in ['-setwebproxy', '-setsecurewebproxy']: for subcommand in ['-setwebproxy', '-setsecurewebproxy']:
self.run_networksetup_command( self.run_networksetup_command(
subcommand, service, '127.0.0.1', str( subcommand, service, '127.0.0.1', str(
self.port)) self.port))
def disable_proxy_for_service(self, service): def disable_proxy_for_service(self, service):
print('Disabling proxy on {}...'.format(service)) print(f'Disabling proxy on {service}...')
for subcommand in ['-setwebproxystate', '-setsecurewebproxystate']: for subcommand in ['-setwebproxystate', '-setsecurewebproxystate']:
self.run_networksetup_command(subcommand, service, 'Off') self.run_networksetup_command(subcommand, service, 'Off')
@ -48,7 +48,7 @@ class Wrapper:
r'\(\d+\)\s(.*)$\n\(.*Device: (.+)\)$', r'\(\d+\)\s(.*)$\n\(.*Device: (.+)\)$',
order, order,
re.MULTILINE) re.MULTILINE)
return dict([(b, a) for (a, b) in mapping]) return {b: a for (a, b) in mapping}
def run_command_with_input(self, command, input): def run_command_with_input(self, command, input):
popen = subprocess.Popen( popen = subprocess.Popen(

View File

@ -26,7 +26,7 @@ class TestHARDump:
tctx.configure(a, hardump=path) tctx.configure(a, hardump=path)
tctx.invoke(a, "response", self.flow()) tctx.invoke(a, "response", self.flow())
tctx.invoke(a, "done") tctx.invoke(a, "done")
with open(path, "r") as inp: with open(path) as inp:
har = json.load(inp) har = json.load(inp)
assert len(har["log"]["entries"]) == 1 assert len(har["log"]["entries"]) == 1
@ -40,7 +40,7 @@ class TestHARDump:
a, "response", self.flow(resp_content=b"foo" + b"\xFF" * 10) a, "response", self.flow(resp_content=b"foo" + b"\xFF" * 10)
) )
tctx.invoke(a, "done") tctx.invoke(a, "done")
with open(path, "r") as inp: with open(path) as inp:
har = json.load(inp) har = json.load(inp)
assert har["log"]["entries"][0]["response"]["content"]["encoding"] == "base64" assert har["log"]["entries"][0]["response"]["content"]["encoding"] == "base64"
@ -79,6 +79,6 @@ class TestHARDump:
tctx.invoke(a, "response", f) tctx.invoke(a, "response", f)
tctx.invoke(a, "done") tctx.invoke(a, "done")
with open(path, "r") as inp: with open(path) as inp:
har = json.load(inp) har = json.load(inp)
assert len(har["log"]["entries"]) == 1 assert len(har["log"]["entries"]) == 1

View File

@ -34,7 +34,7 @@ class TestJSONDump:
tctx.configure(a, dump_destination=path) tctx.configure(a, dump_destination=path)
tctx.invoke(a, "response", self.flow()) tctx.invoke(a, "response", self.flow())
tctx.invoke(a, "done") tctx.invoke(a, "done")
with open(path, "r") as inp: with open(path) as inp:
entry = json.loads(inp.readline()) entry = json.loads(inp.readline())
assert entry['response']['content'] == 'message' assert entry['response']['content'] == 'message'
@ -49,7 +49,7 @@ class TestJSONDump:
a, "response", self.flow(resp_content=content) a, "response", self.flow(resp_content=content)
) )
tctx.invoke(a, "done") tctx.invoke(a, "done")
with open(path, "r") as inp: with open(path) as inp:
entry = json.loads(inp.readline()) entry = json.loads(inp.readline())
assert entry['response']['content'] == base64.b64encode(content).decode('utf-8') assert entry['response']['content'] == base64.b64encode(content).decode('utf-8')

View File

@ -83,7 +83,7 @@ class ProbabilisticStrategy(_TlsStrategy):
def __init__(self, p): def __init__(self, p):
self.p = p self.p = p
super(ProbabilisticStrategy, self).__init__() super().__init__()
def should_intercept(self, server_address): def should_intercept(self, server_address):
return random.uniform(0, 1) < self.p return random.uniform(0, 1) < self.p
@ -99,7 +99,7 @@ class TlsFeedback(TlsLayer):
server_address = self.server_conn.address server_address = self.server_conn.address
try: try:
super(TlsFeedback, self)._establish_tls_with_client() super()._establish_tls_with_client()
except TlsProtocolException as e: except TlsProtocolException as e:
tls_strategy.record_failure(server_address) tls_strategy.record_failure(server_address)
raise e raise e

View File

@ -68,7 +68,7 @@ class MappingAddon:
self.filename = filename self.filename = filename
self.persistent = persistent self.persistent = persistent
self.logger = logging.getLogger(self.__class__.__name__) self.logger = logging.getLogger(self.__class__.__name__)
with open(filename, "r") as f: with open(filename) as f:
self.mapping_templates = URLDict.load(f) self.mapping_templates = URLDict.load(f)
def load(self, loader): def load(self, loader):
@ -84,7 +84,7 @@ class MappingAddon:
def configure(self, updated): def configure(self, updated):
if self.OPT_MAPPING_FILE in updated: if self.OPT_MAPPING_FILE in updated:
self.filename = updated[self.OPT_MAPPING_FILE] self.filename = updated[self.OPT_MAPPING_FILE]
with open(self.filename, "r") as f: with open(self.filename) as f:
self.mapping_templates = URLDict.load(f) self.mapping_templates = URLDict.load(f)
if self.OPT_MAP_PERSISTENT in updated: if self.OPT_MAP_PERSISTENT in updated:

View File

@ -125,7 +125,7 @@ class TestMappingAddon:
open_mock = mock.mock_open(read_data="{}") open_mock = mock.mock_open(read_data="{}")
with mock.patch("builtins.open", open_mock): with mock.patch("builtins.open", open_mock):
mapping.done() mapping.done()
with open(tmpfile, "r") as tfile: with open(tmpfile) as tfile:
results = tfile.read() results = tfile.read()
assert len(open_mock.mock_calls) != 0 assert len(open_mock.mock_calls) != 0
assert results == mapping_content assert results == mapping_content
@ -143,7 +143,7 @@ class TestMappingAddon:
mapping.response(f) mapping.response(f)
mapping.done() mapping.done()
with open(tmpfile, "r") as tfile: with open(tmpfile) as tfile:
results = tfile.read() results = tfile.read()
assert mapping_content in results assert mapping_content in results

View File

@ -26,7 +26,7 @@ class TestUrlDict:
tmpfile = tmpdir.join("tmpfile") tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile: with open(tmpfile, "w") as tfile:
tfile.write(input_file_content_error) tfile.write(input_file_content_error)
with open(tmpfile, "r") as tfile: with open(tmpfile) as tfile:
try: try:
URLDict.load(tfile) URLDict.load(tfile)
except ValueError: except ValueError:
@ -38,7 +38,7 @@ class TestUrlDict:
tmpfile = tmpdir.join("tmpfile") tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile: with open(tmpfile, "w") as tfile:
tfile.write(input_file_content) tfile.write(input_file_content)
with open(tmpfile, "r") as tfile: with open(tmpfile) as tfile:
urldict = URLDict.load(tfile) urldict = URLDict.load(tfile)
f = tflow.tflow(resp=tutils.tresp()) f = tflow.tflow(resp=tutils.tresp())
@ -68,7 +68,7 @@ class TestUrlDict:
tmpfile = tmpdir.join("tmpfile") tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile: with open(tmpfile, "w") as tfile:
tfile.write(input_file_content) tfile.write(input_file_content)
with open(tmpfile, "r") as tfile: with open(tmpfile) as tfile:
urldict = URLDict.load(tfile) urldict = URLDict.load(tfile)
dump = urldict.dumps() dump = urldict.dumps()
@ -79,11 +79,11 @@ class TestUrlDict:
outfile = tmpdir.join("outfile") outfile = tmpdir.join("outfile")
with open(tmpfile, "w") as tfile: with open(tmpfile, "w") as tfile:
tfile.write(input_file_content) tfile.write(input_file_content)
with open(tmpfile, "r") as tfile: with open(tmpfile) as tfile:
urldict = URLDict.load(tfile) urldict = URLDict.load(tfile)
with open(outfile, "w") as ofile: with open(outfile, "w") as ofile:
urldict.dump(ofile) urldict.dump(ofile)
with open(outfile, "r") as ofile: with open(outfile) as ofile:
output = ofile.read() output = ofile.read()
assert output == input_file_content assert output == input_file_content

View File

@ -89,7 +89,7 @@ class TestJSONUrlIndexWriter:
writer.add_url(f) writer.add_url(f)
writer.save() writer.save()
with open(tmpfile, "r") as results: with open(tmpfile) as results:
try: try:
content = json.load(results) content = json.load(results)
except JSONDecodeError: except JSONDecodeError:
@ -130,7 +130,7 @@ class TestTestUrlIndexWriter:
code = f.response.status_code code = f.response.status_code
writer.add_url(f) writer.add_url(f)
with open(tmpfile, "r") as results: with open(tmpfile) as results:
content = results.read() content = results.read()
assert url in content assert url in content
assert method in content assert method in content
@ -146,7 +146,7 @@ class TestTestUrlIndexWriter:
writer.add_url(f) writer.add_url(f)
writer.save() writer.save()
with open(tmpfile, "r") as results: with open(tmpfile) as results:
content = results.read() content = results.read()
assert url in content assert url in content
assert method in content assert method in content

View File

@ -157,7 +157,7 @@ class UrlInjectionAddon:
self.name = f"{self.__class__.__name__}-{injection_gen.__class__.__name__}-{self.__hash__()}" self.name = f"{self.__class__.__name__}-{injection_gen.__class__.__name__}-{self.__hash__()}"
self.flt = flowfilter.parse(flt) self.flt = flowfilter.parse(flt)
self.injection_gen = injection_gen self.injection_gen = injection_gen
with open(url_index_file, "r") as f: with open(url_index_file) as f:
self.url_store = json.load(f) self.url_store = json.load(f)
def response(self, flow: HTTPFlow): def response(self, flow: HTTPFlow):

View File

@ -59,20 +59,22 @@ FULL_PAYLOAD = FRONT_WALL + PAYLOAD + BACK_WALL
# - injection_point -> str # - injection_point -> str
# - exploit -> str # - exploit -> str
# - line -> str # - line -> str
XSSData = NamedTuple('XSSData', [('url', str), class XSSData(NamedTuple):
('injection_point', str), url: str
('exploit', str), injection_point: str
('line', str)]) exploit: str
line: str
# A SQLiData is named tuple with the following fields: # A SQLiData is named tuple with the following fields:
# - url -> str # - url -> str
# - injection_point -> str # - injection_point -> str
# - regex -> str # - regex -> str
# - dbms -> str # - dbms -> str
SQLiData = NamedTuple('SQLiData', [('url', str), class SQLiData(NamedTuple):
('injection_point', str), url: str
('regex', str), injection_point: str
('dbms', str)]) regex: str
dbms: str
VulnData = Tuple[Optional[XSSData], Optional[SQLiData]] VulnData = Tuple[Optional[XSSData], Optional[SQLiData]]

View File

@ -244,7 +244,7 @@ class AddonManager:
pass pass
else: else:
raise exceptions.AddonManagerError( raise exceptions.AddonManagerError(
"Addon handler {} ({}) not callable".format(name, a) f"Addon handler {name} ({a}) not callable"
) )
def trigger(self, name, *args, **kwargs): def trigger(self, name, *args, **kwargs):

View File

@ -53,7 +53,7 @@ class Browser:
[ [
cmd, cmd,
"--user-data-dir=%s" % str(self.tdir.name), "--user-data-dir=%s" % str(self.tdir.name),
"--proxy-server=%s:%s" % ( "--proxy-server={}:{}".format(
ctx.options.listen_host or "127.0.0.1", ctx.options.listen_host or "127.0.0.1",
ctx.options.listen_port ctx.options.listen_port
), ),

View File

@ -70,7 +70,7 @@ class Core:
client_certs = os.path.expanduser(opts.client_certs) client_certs = os.path.expanduser(opts.client_certs)
if not os.path.exists(client_certs): if not os.path.exists(client_certs):
raise exceptions.OptionsError( raise exceptions.OptionsError(
"Client certificate path does not exist: {}".format(opts.client_certs) f"Client certificate path does not exist: {opts.client_certs}"
) )
@command.command("set") @command.command("set")
@ -194,7 +194,7 @@ class Core:
req.url = val req.url = val
except ValueError as e: except ValueError as e:
raise exceptions.CommandError( raise exceptions.CommandError(
"URL %s is invalid: %s" % (repr(val), e) "URL {} is invalid: {}".format(repr(val), e)
) from e ) from e
else: else:
self.rupdate = False self.rupdate = False
@ -215,7 +215,7 @@ class Core:
updated.append(f) updated.append(f)
ctx.master.addons.trigger("update", updated) ctx.master.addons.trigger("update", updated)
ctx.log.alert("Set %s on %s flows." % (attr, len(updated))) ctx.log.alert("Set {} on {} flows.".format(attr, len(updated)))
@command.command("flow.decode") @command.command("flow.decode")
def decode(self, flows: typing.Sequence[flow.Flow], part: str) -> None: def decode(self, flows: typing.Sequence[flow.Flow], part: str) -> None:

View File

@ -112,7 +112,7 @@ class Cut:
[strutils.always_str(x) or "" for x in vals] # type: ignore [strutils.always_str(x) or "" for x in vals] # type: ignore
) )
ctx.log.alert("Saved %s cuts over %d flows as CSV." % (len(cuts), len(flows))) ctx.log.alert("Saved %s cuts over %d flows as CSV." % (len(cuts), len(flows)))
except IOError as e: except OSError as e:
ctx.log.error(str(e)) ctx.log.error(str(e))
@command.command("cut.clip") @command.command("cut.clip")

View File

@ -23,7 +23,7 @@ def indent(n: int, text: str) -> str:
def colorful(line, styles): def colorful(line, styles):
yield u" " # we can already indent here yield " " # we can already indent here
for (style, text) in line: for (style, text) in line:
yield click.style(text, **styles.get(style, {})) yield click.style(text, **styles.get(style, {}))
@ -115,8 +115,8 @@ class Dumper:
text=dict(fg="green") text=dict(fg="green")
) )
content = u"\r\n".join( content = "\r\n".join(
u"".join(colorful(line, styles)) for line in lines_to_echo "".join(colorful(line, styles)) for line in lines_to_echo
) )
if content: if content:
self.echo("") self.echo("")
@ -252,7 +252,7 @@ class Dumper:
if f.error: if f.error:
msg = strutils.escape_control_characters(f.error.msg) msg = strutils.escape_control_characters(f.error.msg)
self.echo(" << {}".format(msg), bold=True, fg="red") self.echo(f" << {msg}", bold=True, fg="red")
def match(self, f): def match(self, f):
if ctx.options.flow_detail == 0: if ctx.options.flow_detail == 0:

View File

@ -141,7 +141,7 @@ class Export():
fp.write(v) fp.write(v)
else: else:
fp.write(v.encode("utf-8")) fp.write(v.encode("utf-8"))
except IOError as e: except OSError as e:
ctx.log.error(str(e)) ctx.log.error(str(e))
@command.command("export.clip") @command.command("export.clip")

View File

@ -135,7 +135,7 @@ class MapLocal:
try: try:
contents = local_file.read_bytes() contents = local_file.read_bytes()
except IOError as e: except OSError as e:
ctx.log.warn(f"Could not read file: {e}") ctx.log.warn(f"Could not read file: {e}")
continue continue

View File

@ -43,7 +43,7 @@ class ModifyBody:
if spec.matches(flow): if spec.matches(flow):
try: try:
replacement = spec.read_replacement() replacement = spec.read_replacement()
except IOError as e: except OSError as e:
ctx.log.warn(f"Could not read replacement file: {e}") ctx.log.warn(f"Could not read replacement file: {e}")
continue continue
if flow.response: if flow.response:

View File

@ -42,7 +42,7 @@ def parse_modify_spec(option: str, subject_is_regex: bool) -> ModifySpec:
try: try:
spec.read_replacement() spec.read_replacement()
except IOError as e: except OSError as e:
raise ValueError(f"Invalid file path: {replacement[1:]} ({e})") raise ValueError(f"Invalid file path: {replacement[1:]} ({e})")
return spec return spec
@ -91,7 +91,7 @@ class ModifyHeaders:
if spec.matches(flow): if spec.matches(flow):
try: try:
replacement = spec.read_replacement() replacement = spec.read_replacement()
except IOError as e: except OSError as e:
ctx.log.warn(f"Could not read replacement file: {e}") ctx.log.warn(f"Could not read replacement file: {e}")
continue continue
else: else:

View File

@ -85,12 +85,12 @@ class ProxyAuth:
if self.is_proxy_auth(): if self.is_proxy_auth():
return http.make_error_response( return http.make_error_response(
status_codes.PROXY_AUTH_REQUIRED, status_codes.PROXY_AUTH_REQUIRED,
headers=mitmproxy.net.http.Headers(Proxy_Authenticate='Basic realm="{}"'.format(REALM)), headers=mitmproxy.net.http.Headers(Proxy_Authenticate=f'Basic realm="{REALM}"'),
) )
else: else:
return http.make_error_response( return http.make_error_response(
status_codes.UNAUTHORIZED, status_codes.UNAUTHORIZED,
headers=mitmproxy.net.http.Headers(WWW_Authenticate='Basic realm="{}"'.format(REALM)), headers=mitmproxy.net.http.Headers(WWW_Authenticate=f'Basic realm="{REALM}"'),
) )
def check(self, f: http.HTTPFlow) -> Optional[Tuple[str, str]]: def check(self, f: http.HTTPFlow) -> Optional[Tuple[str, str]]:

View File

@ -48,7 +48,7 @@ class ReadFile:
continue continue
await ctx.master.load_flow(flow) await ctx.master.load_flow(flow)
cnt += 1 cnt += 1
except (IOError, exceptions.FlowReadException) as e: except (OSError, exceptions.FlowReadException) as e:
if cnt: if cnt:
ctx.log.warn("Flow file corrupted - loaded %i flows." % cnt) ctx.log.warn("Flow file corrupted - loaded %i flows." % cnt)
else: else:
@ -62,8 +62,8 @@ class ReadFile:
try: try:
with open(path, "rb") as f: with open(path, "rb") as f:
return await self.load_flows(f) return await self.load_flows(f)
except IOError as e: except OSError as e:
ctx.log.error("Cannot load flows: {}".format(e)) ctx.log.error(f"Cannot load flows: {e}")
raise exceptions.FlowReadException(str(e)) from e raise exceptions.FlowReadException(str(e)) from e
async def doread(self, rfile): async def doread(self, rfile):

View File

@ -38,7 +38,7 @@ class Save:
def start_stream_to_path(self, path, flt): def start_stream_to_path(self, path, flt):
try: try:
f = self.open_file(path) f = self.open_file(path)
except IOError as v: except OSError as v:
raise exceptions.OptionsError(str(v)) raise exceptions.OptionsError(str(v))
self.stream = io.FilteredFlowWriter(f, flt) self.stream = io.FilteredFlowWriter(f, flt)
self.active_flows = set() self.active_flows = set()
@ -68,7 +68,7 @@ class Save:
""" """
try: try:
f = self.open_file(path) f = self.open_file(path)
except IOError as v: except OSError as v:
raise exceptions.CommandError(v) from v raise exceptions.CommandError(v) from v
stream = io.FlowWriter(f) stream = io.FlowWriter(f)
for i in flows: for i in flows:
@ -107,6 +107,6 @@ class Save:
if self.stream: if self.stream:
for f in self.active_flows: for f in self.active_flows:
self.stream.add(f) self.stream.add(f)
self.active_flows = set([]) self.active_flows = set()
self.stream.fo.close() self.stream.fo.close()
self.stream = None self.stream = None

View File

@ -51,7 +51,7 @@ def script_error_handler(path, exc, msg="", tb=False):
lineno = "" lineno = ""
if hasattr(exc, "lineno"): if hasattr(exc, "lineno"):
lineno = str(exc.lineno) lineno = str(exc.lineno)
log_msg = "in script {}:{} {}".format(path, lineno, exception) log_msg = f"in script {path}:{lineno} {exception}"
if tb: if tb:
etype, value, tback = sys.exc_info() etype, value, tback = sys.exc_info()
tback = addonmanager.cut_traceback(tback, "invoke_addon") tback = addonmanager.cut_traceback(tback, "invoke_addon")

View File

@ -17,7 +17,7 @@ from mitmproxy.exceptions import SessionLoadException, CommandError
from mitmproxy.utils.data import pkg_data from mitmproxy.utils.data import pkg_data
class KeyifyList(object): class KeyifyList:
def __init__(self, inner, key): def __init__(self, inner, key):
self.inner = inner self.inner = inner
self.key = key self.key = key
@ -87,7 +87,7 @@ class SessionDB:
def _create_session(self): def _create_session(self):
script_path = pkg_data.path("io/sql/session_create.sql") script_path = pkg_data.path("io/sql/session_create.sql")
with open(script_path, 'r') as qry: with open(script_path) as qry:
self.con.executescript(qry.read()) self.con.executescript(qry.read())
self.con.commit() self.con.commit()

View File

@ -479,7 +479,7 @@ class View(collections.abc.Sequence):
# get new flows each time. It would be more efficient to just have a # get new flows each time. It would be more efficient to just have a
# .newid() method or something. # .newid() method or something.
self.add([i.copy()]) self.add([i.copy()])
except IOError as e: except OSError as e:
ctx.log.error(e.strerror) ctx.log.error(e.strerror)
except exceptions.FlowReadException as e: except exceptions.FlowReadException as e:
ctx.log.error(str(e)) ctx.log.error(str(e))

View File

@ -159,7 +159,7 @@ class CommandManager:
self.add(o.command_name, o) self.add(o.command_name, o)
except exceptions.CommandError as e: except exceptions.CommandError as e:
self.master.log.warn( self.master.log.warn(
"Could not load command %s: %s" % (o.command_name, e) f"Could not load command {o.command_name}: {e}"
) )
def add(self, path: str, func: typing.Callable): def add(self, path: str, func: typing.Callable):

View File

@ -61,7 +61,7 @@ class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
def __repr__(self): def __repr__(self):
if self.tls_established: if self.tls_established:
tls = "[{}] ".format(self.tls_version) tls = f"[{self.tls_version}] "
else: else:
tls = "" tls = ""

View File

@ -113,7 +113,7 @@ def get_message_content_view(viewname, message, flow):
) )
if enc: if enc:
description = "{} {}".format(enc, description) description = f"{enc} {description}"
return description, lines, error return description, lines, error

View File

@ -12,7 +12,7 @@ class ViewAuto(base.View):
ctype = headers.get("content-type") ctype = headers.get("content-type")
if data and ctype: if data and ctype:
ct = http.parse_content_type(ctype) if ctype else None ct = http.parse_content_type(ctype) if ctype else None
ct = "%s/%s" % (ct[0], ct[1]) ct = "{}/{}".format(ct[0], ct[1])
if ct in contentviews.content_types_map: if ct in contentviews.content_types_map:
return contentviews.content_types_map[ct][0](data, **metadata) return contentviews.content_types_map[ct][0](data, **metadata)
elif strutils.is_xml(data): elif strutils.is_xml(data):

View File

@ -15,7 +15,7 @@ def parse_png(data: bytes) -> Metadata:
img = png.Png(KaitaiStream(io.BytesIO(data))) img = png.Png(KaitaiStream(io.BytesIO(data)))
parts = [ parts = [
('Format', 'Portable network graphics'), ('Format', 'Portable network graphics'),
('Size', "{0} x {1} px".format(img.ihdr.width, img.ihdr.height)) ('Size', f"{img.ihdr.width} x {img.ihdr.height} px")
] ]
for chunk in img.chunks: for chunk in img.chunks:
if chunk.type == 'gAMA': if chunk.type == 'gAMA':
@ -23,7 +23,7 @@ def parse_png(data: bytes) -> Metadata:
elif chunk.type == 'pHYs': elif chunk.type == 'pHYs':
aspectx = chunk.body.pixels_per_unit_x aspectx = chunk.body.pixels_per_unit_x
aspecty = chunk.body.pixels_per_unit_y aspecty = chunk.body.pixels_per_unit_y
parts.append(('aspect', "{0} x {1}".format(aspectx, aspecty))) parts.append(('aspect', f"{aspectx} x {aspecty}"))
elif chunk.type == 'tEXt': elif chunk.type == 'tEXt':
parts.append((chunk.body.keyword, chunk.body.text)) parts.append((chunk.body.keyword, chunk.body.text))
elif chunk.type == 'iTXt': elif chunk.type == 'iTXt':
@ -38,8 +38,8 @@ def parse_gif(data: bytes) -> Metadata:
descriptor = img.logical_screen_descriptor descriptor = img.logical_screen_descriptor
parts = [ parts = [
('Format', 'Compuserve GIF'), ('Format', 'Compuserve GIF'),
('Version', "GIF{}".format(img.hdr.version)), ('Version', f"GIF{img.hdr.version}"),
('Size', "{} x {} px".format(descriptor.screen_width, descriptor.screen_height)), ('Size', f"{descriptor.screen_width} x {descriptor.screen_height} px"),
('background', str(descriptor.bg_color_index)) ('background', str(descriptor.bg_color_index))
] ]
ext_blocks = [] ext_blocks = []
@ -66,10 +66,10 @@ def parse_jpeg(data: bytes) -> Metadata:
] ]
for segment in img.segments: for segment in img.segments:
if segment.marker._name_ == 'sof0': if segment.marker._name_ == 'sof0':
parts.append(('Size', "{0} x {1} px".format(segment.data.image_width, segment.data.image_height))) parts.append(('Size', f"{segment.data.image_width} x {segment.data.image_height} px"))
if segment.marker._name_ == 'app0': if segment.marker._name_ == 'app0':
parts.append(('jfif_version', "({0}, {1})".format(segment.data.version_major, segment.data.version_minor))) parts.append(('jfif_version', f"({segment.data.version_major}, {segment.data.version_minor})"))
parts.append(('jfif_density', "({0}, {1})".format(segment.data.density_x, segment.data.density_y))) parts.append(('jfif_density', f"({segment.data.density_x}, {segment.data.density_y})"))
parts.append(('jfif_unit', str(segment.data.density_units._value_))) parts.append(('jfif_unit', str(segment.data.density_units._value_)))
if segment.marker._name_ == 'com': if segment.marker._name_ == 'com':
parts.append(('comment', str(segment.data))) parts.append(('comment', str(segment.data)))

View File

@ -41,7 +41,7 @@ class ViewImage(base.View):
("Image Format", image_type or "unknown") ("Image Format", image_type or "unknown")
] ]
if image_type: if image_type:
view_name = "{} Image".format(image_type.upper()) view_name = f"{image_type.upper()} Image"
else: else:
view_name = "Unknown Image" view_name = "Unknown Image"
return view_name, base.format_dict(multidict.MultiDict(image_metadata)) return view_name, base.format_dict(multidict.MultiDict(image_metadata))

View File

@ -10,8 +10,7 @@ class ViewMultipart(base.View):
@staticmethod @staticmethod
def _format(v): def _format(v):
yield [("highlight", "Form data:\n")] yield [("highlight", "Form data:\n")]
for message in base.format_dict(multidict.MultiDict(v)): yield from base.format_dict(multidict.MultiDict(v))
yield message
def __call__(self, data, **metadata): def __call__(self, data, **metadata):
headers = metadata.get("headers", {}) headers = metadata.get("headers", {})

View File

@ -30,7 +30,7 @@ class Gif(KaitaiStruct):
self.hdr = self._root.Header(self._io, self, self._root) self.hdr = self._root.Header(self._io, self, self._root)
self.logical_screen_descriptor = self._root.LogicalScreenDescriptorStruct(self._io, self, self._root) self.logical_screen_descriptor = self._root.LogicalScreenDescriptorStruct(self._io, self, self._root)
if self.logical_screen_descriptor.has_color_table: if self.logical_screen_descriptor.has_color_table:
self._raw_global_color_table = self._io.read_bytes((self.logical_screen_descriptor.color_table_size * 3)) self._raw_global_color_table = self._io.read_bytes(self.logical_screen_descriptor.color_table_size * 3)
io = KaitaiStream(BytesIO(self._raw_global_color_table)) io = KaitaiStream(BytesIO(self._raw_global_color_table))
self.global_color_table = self._root.ColorTable(io, self, self._root) self.global_color_table = self._root.ColorTable(io, self, self._root)
@ -99,7 +99,7 @@ class Gif(KaitaiStruct):
self.height = self._io.read_u2le() self.height = self._io.read_u2le()
self.flags = self._io.read_u1() self.flags = self._io.read_u1()
if self.has_color_table: if self.has_color_table:
self._raw_local_color_table = self._io.read_bytes((self.color_table_size * 3)) self._raw_local_color_table = self._io.read_bytes(self.color_table_size * 3)
io = KaitaiStream(BytesIO(self._raw_local_color_table)) io = KaitaiStream(BytesIO(self._raw_local_color_table))
self.local_color_table = self._root.ColorTable(io, self, self._root) self.local_color_table = self._root.ColorTable(io, self, self._root)
@ -168,7 +168,7 @@ class Gif(KaitaiStruct):
self._parent = _parent self._parent = _parent
self._root = _root if _root else self self._root = _root if _root else self
self.magic = self._io.ensure_fixed_contents(struct.pack('3b', 71, 73, 70)) self.magic = self._io.ensure_fixed_contents(struct.pack('3b', 71, 73, 70))
self.version = (self._io.read_bytes(3)).decode(u"ASCII") self.version = (self._io.read_bytes(3)).decode("ASCII")
class ExtGraphicControl(KaitaiStruct): class ExtGraphicControl(KaitaiStruct):

View File

@ -94,7 +94,7 @@ class GoogleProtobuf(KaitaiStruct):
if hasattr(self, '_m_wire_type'): if hasattr(self, '_m_wire_type'):
return self._m_wire_type if hasattr(self, '_m_wire_type') else None return self._m_wire_type if hasattr(self, '_m_wire_type') else None
self._m_wire_type = self._root.Pair.WireTypes((self.key.value & 7)) self._m_wire_type = self._root.Pair.WireTypes(self.key.value & 7)
return self._m_wire_type if hasattr(self, '_m_wire_type') else None return self._m_wire_type if hasattr(self, '_m_wire_type') else None
@property @property

View File

@ -79,23 +79,23 @@ class Jpeg(KaitaiStruct):
if ((self.marker != self._root.Segment.MarkerEnum.soi) and (self.marker != self._root.Segment.MarkerEnum.eoi)) : if ((self.marker != self._root.Segment.MarkerEnum.soi) and (self.marker != self._root.Segment.MarkerEnum.eoi)) :
_on = self.marker _on = self.marker
if _on == self._root.Segment.MarkerEnum.sos: if _on == self._root.Segment.MarkerEnum.sos:
self._raw_data = self._io.read_bytes((self.length - 2)) self._raw_data = self._io.read_bytes(self.length - 2)
io = KaitaiStream(BytesIO(self._raw_data)) io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentSos(io, self, self._root) self.data = self._root.SegmentSos(io, self, self._root)
elif _on == self._root.Segment.MarkerEnum.app1: elif _on == self._root.Segment.MarkerEnum.app1:
self._raw_data = self._io.read_bytes((self.length - 2)) self._raw_data = self._io.read_bytes(self.length - 2)
io = KaitaiStream(BytesIO(self._raw_data)) io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentApp1(io, self, self._root) self.data = self._root.SegmentApp1(io, self, self._root)
elif _on == self._root.Segment.MarkerEnum.sof0: elif _on == self._root.Segment.MarkerEnum.sof0:
self._raw_data = self._io.read_bytes((self.length - 2)) self._raw_data = self._io.read_bytes(self.length - 2)
io = KaitaiStream(BytesIO(self._raw_data)) io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentSof0(io, self, self._root) self.data = self._root.SegmentSof0(io, self, self._root)
elif _on == self._root.Segment.MarkerEnum.app0: elif _on == self._root.Segment.MarkerEnum.app0:
self._raw_data = self._io.read_bytes((self.length - 2)) self._raw_data = self._io.read_bytes(self.length - 2)
io = KaitaiStream(BytesIO(self._raw_data)) io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentApp0(io, self, self._root) self.data = self._root.SegmentApp0(io, self, self._root)
else: else:
self.data = self._io.read_bytes((self.length - 2)) self.data = self._io.read_bytes(self.length - 2)
if self.marker == self._root.Segment.MarkerEnum.sos: if self.marker == self._root.Segment.MarkerEnum.sos:
self.image_data = self._io.read_bytes_full() self.image_data = self._io.read_bytes_full()
@ -131,9 +131,9 @@ class Jpeg(KaitaiStruct):
self._io = _io self._io = _io
self._parent = _parent self._parent = _parent
self._root = _root if _root else self self._root = _root if _root else self
self.magic = (self._io.read_bytes_term(0, False, True, True)).decode(u"ASCII") self.magic = (self._io.read_bytes_term(0, False, True, True)).decode("ASCII")
_on = self.magic _on = self.magic
if _on == u"Exif": if _on == "Exif":
self.body = self._root.ExifInJpeg(self._io, self, self._root) self.body = self._root.ExifInJpeg(self._io, self, self._root)
@ -199,7 +199,7 @@ class Jpeg(KaitaiStruct):
self._io = _io self._io = _io
self._parent = _parent self._parent = _parent
self._root = _root if _root else self self._root = _root if _root else self
self.magic = (self._io.read_bytes(5)).decode(u"ASCII") self.magic = (self._io.read_bytes(5)).decode("ASCII")
self.version_major = self._io.read_u1() self.version_major = self._io.read_u1()
self.version_minor = self._io.read_u1() self.version_minor = self._io.read_u1()
self.density_units = self._root.SegmentApp0.DensityUnit(self._io.read_u1()) self.density_units = self._root.SegmentApp0.DensityUnit(self._io.read_u1())
@ -207,4 +207,4 @@ class Jpeg(KaitaiStruct):
self.density_y = self._io.read_u2be() self.density_y = self._io.read_u2be()
self.thumbnail_x = self._io.read_u1() self.thumbnail_x = self._io.read_u1()
self.thumbnail_y = self._io.read_u1() self.thumbnail_y = self._io.read_u1()
self.thumbnail = self._io.read_bytes(((self.thumbnail_x * self.thumbnail_y) * 3)) self.thumbnail = self._io.read_bytes((self.thumbnail_x * self.thumbnail_y) * 3)

View File

@ -37,7 +37,7 @@ class Png(KaitaiStruct):
while True: while True:
_ = self._root.Chunk(self._io, self, self._root) _ = self._root.Chunk(self._io, self, self._root)
self.chunks.append(_) self.chunks.append(_)
if ((_.type == u"IEND") or (self._io.is_eof())) : if ((_.type == "IEND") or (self._io.is_eof())) :
break break
class Rgb(KaitaiStruct): class Rgb(KaitaiStruct):
@ -56,45 +56,45 @@ class Png(KaitaiStruct):
self._parent = _parent self._parent = _parent
self._root = _root if _root else self self._root = _root if _root else self
self.len = self._io.read_u4be() self.len = self._io.read_u4be()
self.type = (self._io.read_bytes(4)).decode(u"UTF-8") self.type = (self._io.read_bytes(4)).decode("UTF-8")
_on = self.type _on = self.type
if _on == u"iTXt": if _on == "iTXt":
self._raw_body = self._io.read_bytes(self.len) self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body)) io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.InternationalTextChunk(io, self, self._root) self.body = self._root.InternationalTextChunk(io, self, self._root)
elif _on == u"gAMA": elif _on == "gAMA":
self._raw_body = self._io.read_bytes(self.len) self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body)) io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.GamaChunk(io, self, self._root) self.body = self._root.GamaChunk(io, self, self._root)
elif _on == u"tIME": elif _on == "tIME":
self._raw_body = self._io.read_bytes(self.len) self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body)) io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.TimeChunk(io, self, self._root) self.body = self._root.TimeChunk(io, self, self._root)
elif _on == u"PLTE": elif _on == "PLTE":
self._raw_body = self._io.read_bytes(self.len) self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body)) io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.PlteChunk(io, self, self._root) self.body = self._root.PlteChunk(io, self, self._root)
elif _on == u"bKGD": elif _on == "bKGD":
self._raw_body = self._io.read_bytes(self.len) self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body)) io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.BkgdChunk(io, self, self._root) self.body = self._root.BkgdChunk(io, self, self._root)
elif _on == u"pHYs": elif _on == "pHYs":
self._raw_body = self._io.read_bytes(self.len) self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body)) io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.PhysChunk(io, self, self._root) self.body = self._root.PhysChunk(io, self, self._root)
elif _on == u"tEXt": elif _on == "tEXt":
self._raw_body = self._io.read_bytes(self.len) self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body)) io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.TextChunk(io, self, self._root) self.body = self._root.TextChunk(io, self, self._root)
elif _on == u"cHRM": elif _on == "cHRM":
self._raw_body = self._io.read_bytes(self.len) self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body)) io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.ChrmChunk(io, self, self._root) self.body = self._root.ChrmChunk(io, self, self._root)
elif _on == u"sRGB": elif _on == "sRGB":
self._raw_body = self._io.read_bytes(self.len) self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body)) io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.SrgbChunk(io, self, self._root) self.body = self._root.SrgbChunk(io, self, self._root)
elif _on == u"zTXt": elif _on == "zTXt":
self._raw_body = self._io.read_bytes(self.len) self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body)) io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.CompressedTextChunk(io, self, self._root) self.body = self._root.CompressedTextChunk(io, self, self._root)
@ -199,7 +199,7 @@ class Png(KaitaiStruct):
self._io = _io self._io = _io
self._parent = _parent self._parent = _parent
self._root = _root if _root else self self._root = _root if _root else self
self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8") self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode("UTF-8")
self.compression_method = self._io.read_u1() self.compression_method = self._io.read_u1()
self._raw_text_datastream = self._io.read_bytes_full() self._raw_text_datastream = self._io.read_bytes_full()
self.text_datastream = zlib.decompress(self._raw_text_datastream) self.text_datastream = zlib.decompress(self._raw_text_datastream)
@ -264,12 +264,12 @@ class Png(KaitaiStruct):
self._io = _io self._io = _io
self._parent = _parent self._parent = _parent
self._root = _root if _root else self self._root = _root if _root else self
self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8") self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode("UTF-8")
self.compression_flag = self._io.read_u1() self.compression_flag = self._io.read_u1()
self.compression_method = self._io.read_u1() self.compression_method = self._io.read_u1()
self.language_tag = (self._io.read_bytes_term(0, False, True, True)).decode(u"ASCII") self.language_tag = (self._io.read_bytes_term(0, False, True, True)).decode("ASCII")
self.translated_keyword = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8") self.translated_keyword = (self._io.read_bytes_term(0, False, True, True)).decode("UTF-8")
self.text = (self._io.read_bytes_full()).decode(u"UTF-8") self.text = (self._io.read_bytes_full()).decode("UTF-8")
class TextChunk(KaitaiStruct): class TextChunk(KaitaiStruct):
@ -277,8 +277,8 @@ class Png(KaitaiStruct):
self._io = _io self._io = _io
self._parent = _parent self._parent = _parent
self._root = _root if _root else self self._root = _root if _root else self
self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode(u"iso8859-1") self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode("iso8859-1")
self.text = (self._io.read_bytes_full()).decode(u"iso8859-1") self.text = (self._io.read_bytes_full()).decode("iso8859-1")
class TimeChunk(KaitaiStruct): class TimeChunk(KaitaiStruct):

View File

@ -41,7 +41,7 @@ class ASCommandResponse:
raise ValueError("Empty WBXML body passed") raise ValueError("Empty WBXML body passed")
except Exception as e: except Exception as e:
self.xmlString = None self.xmlString = None
raise ValueError("Error: {0}".format(e)) raise ValueError(f"Error: {e}")
def getWBXMLBytes(self): def getWBXMLBytes(self):
return self.wbxmlBytes return self.wbxmlBytes

View File

@ -861,7 +861,7 @@ class ASWBXML:
if (newCodePage >= 0 and newCodePage < 25): if (newCodePage >= 0 and newCodePage < 25):
self.currentCodePage = newCodePage self.currentCodePage = newCodePage
else: else:
raise InvalidDataException("Unknown code page ID 0x{0:X} encountered in WBXML".format(currentByte)) raise InvalidDataException(f"Unknown code page ID 0x{currentByte:X} encountered in WBXML")
elif ( currentByte == GlobalTokens.END ): elif ( currentByte == GlobalTokens.END ):
if (currentNode != None and currentNode.parentNode != None): if (currentNode != None and currentNode.parentNode != None):
currentNode = currentNode.parentNode currentNode = currentNode.parentNode
@ -878,14 +878,14 @@ class ASWBXML:
currentNode.appendChild(newTextNode) currentNode.appendChild(newTextNode)
elif ( currentByte in unusedArray): elif ( currentByte in unusedArray):
raise InvalidDataException("Encountered unknown global token 0x{0:X}.".format(currentByte)) raise InvalidDataException(f"Encountered unknown global token 0x{currentByte:X}.")
else: else:
hasAttributes = (currentByte & 0x80) > 0 hasAttributes = (currentByte & 0x80) > 0
hasContent = (currentByte & 0x40) > 0 hasContent = (currentByte & 0x40) > 0
token = currentByte & 0x3F token = currentByte & 0x3F
if (hasAttributes): if (hasAttributes):
raise InvalidDataException("Token 0x{0:X} has attributes.".format(token)) raise InvalidDataException(f"Token 0x{token:X} has attributes.")
strTag = self.codePages[self.currentCodePage].getTag(token) strTag = self.codePages[self.currentCodePage].getTag(token)
if (strTag == None): if (strTag == None):

View File

@ -52,7 +52,7 @@ class ASWBXMLByteQueue(Queue):
def dequeueAndLog(self): def dequeueAndLog(self):
singleByte = self.get() singleByte = self.get()
self.bytesDequeued += 1 self.bytesDequeued += 1
logging.debug("Dequeued byte 0x{0:X} ({1} total)".format(singleByte, self.bytesDequeued)) logging.debug(f"Dequeued byte 0x{singleByte:X} ({self.bytesDequeued} total)")
return singleByte return singleByte
""" """

View File

@ -89,7 +89,7 @@ class Reply:
""" """
if self.state != "start": if self.state != "start":
raise exceptions.ControlException( raise exceptions.ControlException(
"Reply is {}, but expected it to be start.".format(self.state) f"Reply is {self.state}, but expected it to be start."
) )
self._state = "taken" self._state = "taken"
@ -101,7 +101,7 @@ class Reply:
""" """
if self.state != "taken": if self.state != "taken":
raise exceptions.ControlException( raise exceptions.ControlException(
"Reply is {}, but expected it to be taken.".format(self.state) f"Reply is {self.state}, but expected it to be taken."
) )
if not self.has_message: if not self.has_message:
raise exceptions.ControlException("There is no reply message.") raise exceptions.ControlException("There is no reply message.")
@ -119,7 +119,7 @@ class Reply:
def send(self, msg, force=False): def send(self, msg, force=False):
if self.state not in {"start", "taken"}: if self.state not in {"start", "taken"}:
raise exceptions.ControlException( raise exceptions.ControlException(
"Reply is {}, but expected it to be start or taken.".format(self.state) f"Reply is {self.state}, but expected it to be start or taken."
) )
if self.has_message and not force: if self.has_message and not force:
raise exceptions.ControlException("There is already a reply message.") raise exceptions.ControlException("There is already a reply message.")

View File

@ -1,5 +1,3 @@
class BiDi: class BiDi:
""" """

View File

@ -60,7 +60,7 @@ class _MultiDict(MutableMapping, metaclass=ABCMeta):
yield key yield key
def __len__(self): def __len__(self):
return len(set(self._kconv(key) for key, _ in self.fields)) return len({self._kconv(key) for key, _ in self.fields})
def __eq__(self, other): def __eq__(self, other):
if isinstance(other, MultiDict): if isinstance(other, MultiDict):

View File

@ -90,6 +90,6 @@ def iterate(f: flow.Flow) -> TEventGenerator:
try: try:
e = _iterate_map[type(f)] e = _iterate_map[type(f)]
except KeyError as err: except KeyError as err:
raise TypeError("Unknown flow type: {}".format(f)) from err raise TypeError(f"Unknown flow type: {f}") from err
else: else:
yield from e(f) yield from e(f)

View File

@ -46,7 +46,7 @@ class HTTPFlow(flow.Flow):
s = "<HTTPFlow" s = "<HTTPFlow"
for a in ("request", "response", "error", "client_conn", "server_conn"): for a in ("request", "response", "error", "client_conn", "server_conn"):
if getattr(self, a, False): if getattr(self, a, False):
s += "\r\n %s = {flow.%s}" % (a, a) s += f"\r\n {a} = {{flow.{a}}}"
s += ">" s += ">"
return s.format(flow=self) return s.format(flow=self)

View File

@ -1,4 +1,3 @@
from .io import FlowWriter, FlowReader, FilteredFlowWriter, read_flows_from_paths from .io import FlowWriter, FlowReader, FilteredFlowWriter, read_flows_from_paths
from .db import DBHandler from .db import DBHandler

View File

@ -36,5 +36,5 @@ class DBHandler:
flows = [] flows = []
self._c.execute('SELECT pbuf_blob FROM FLOWS') self._c.execute('SELECT pbuf_blob FROM FLOWS')
for row in self._c.fetchall(): for row in self._c.fetchall():
flows.append((protobuf.loads(row[0]))) flows.append(protobuf.loads(row[0]))
return flows return flows

View File

@ -82,6 +82,6 @@ def read_flows_from_paths(paths):
path = os.path.expanduser(path) path = os.path.expanduser(path)
with open(path, "rb") as f: with open(path, "rb") as f:
flows.extend(FlowReader(f).stream()) flows.extend(FlowReader(f).stream())
except IOError as e: except OSError as e:
raise exceptions.FlowReadException(e.strerror) raise exceptions.FlowReadException(e.strerror)
return flows return flows

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT! # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: http.proto # source: http.proto
"""Generated protocol buffer code.""" """Generated protocol buffer code."""

View File

@ -222,7 +222,7 @@ def parse(data_type: int, data: bytes) -> TSerializable:
val, data = pop(data) val, data = pop(data)
d[key] = val # type: ignore d[key] = val # type: ignore
return d return d
raise ValueError("unknown type tag: {}".format(data_type)) raise ValueError(f"unknown type tag: {data_type}")
def pop(data: bytes) -> typing.Tuple[TSerializable, bytes]: def pop(data: bytes) -> typing.Tuple[TSerializable, bytes]:
@ -242,7 +242,7 @@ def pop(data: bytes) -> typing.Tuple[TSerializable, bytes]:
except IndexError: except IndexError:
# This fires if len(data) < dlen, meaning we don't need # This fires if len(data) < dlen, meaning we don't need
# to further validate that data is the right length. # to further validate that data is the right length.
raise ValueError("not a tnetstring: invalid length prefix: {}".format(length)) raise ValueError(f"not a tnetstring: invalid length prefix: {length}")
# Parse the data based on the type tag. # Parse the data based on the type tag.
return parse(data_type, data), remain return parse(data_type, data), remain

View File

@ -12,7 +12,7 @@ class LogEntry:
return False return False
def __repr__(self): def __repr__(self):
return "LogEntry({}, {})".format(self.msg, self.level) return f"LogEntry({self.msg}, {self.level})"
class Log: class Log:

View File

@ -201,7 +201,7 @@ def _format_pairs(pairs, specials=(), sep="; "):
if k.lower() not in specials and _has_special(v): if k.lower() not in specials and _has_special(v):
v = ESCAPE.sub(r"\\\1", v) v = ESCAPE.sub(r"\\\1", v)
v = '"%s"' % v v = '"%s"' % v
vals.append("%s=%s" % (k, v)) vals.append(f"{k}={v}")
return sep.join(vals) return sep.join(vals)

View File

@ -178,9 +178,9 @@ def parse_content_type(c: str) -> Optional[Tuple[str, str, Dict[str, str]]]:
def assemble_content_type(type, subtype, parameters): def assemble_content_type(type, subtype, parameters):
if not parameters: if not parameters:
return "{}/{}".format(type, subtype) return f"{type}/{subtype}"
params = "; ".join( params = "; ".join(
"{}={}".format(k, v) f"{k}={v}"
for k, v in parameters.items() for k, v in parameters.items()
) )
return "{}/{}; {}".format( return "{}/{}; {}".format(

View File

@ -124,8 +124,7 @@ def read_body(rfile, expected_size, limit=None, max_chunk_size=4096):
max_chunk_size = limit max_chunk_size = limit
if expected_size is None: if expected_size is None:
for x in _read_chunked(rfile, limit): yield from _read_chunked(rfile, limit)
yield x
elif expected_size >= 0: elif expected_size >= 0:
if limit is not None and expected_size > limit: if limit is not None and expected_size > limit:
raise exceptions.HttpException( raise exceptions.HttpException(
@ -151,7 +150,7 @@ def read_body(rfile, expected_size, limit=None, max_chunk_size=4096):
bytes_left -= chunk_size bytes_left -= chunk_size
not_done = rfile.read(1) not_done = rfile.read(1)
if not_done: if not_done:
raise exceptions.HttpException("HTTP body too large. Limit is {}.".format(limit)) raise exceptions.HttpException(f"HTTP body too large. Limit is {limit}.")
def connection_close(http_version, headers): def connection_close(http_version, headers):
@ -291,14 +290,14 @@ def _read_response_line(rfile):
_check_http_version(http_version) _check_http_version(http_version)
except ValueError: except ValueError:
raise exceptions.HttpSyntaxException("Bad HTTP response line: {}".format(line)) raise exceptions.HttpSyntaxException(f"Bad HTTP response line: {line}")
return http_version, status_code, message return http_version, status_code, message
def _check_http_version(http_version): def _check_http_version(http_version):
if not re.match(br"^HTTP/\d\.\d$", http_version): if not re.match(br"^HTTP/\d\.\d$", http_version):
raise exceptions.HttpSyntaxException("Unknown HTTP version: {}".format(http_version)) raise exceptions.HttpSyntaxException(f"Unknown HTTP version: {http_version}")
def _read_headers(rfile): def _read_headers(rfile):
@ -354,7 +353,7 @@ def _read_chunked(rfile, limit=sys.maxsize):
try: try:
length = int(line, 16) length = int(line, 16)
except ValueError: except ValueError:
raise exceptions.HttpSyntaxException("Invalid chunked encoding length: {}".format(line)) raise exceptions.HttpSyntaxException(f"Invalid chunked encoding length: {line}")
total += length total += length
if total > limit: if total > limit:
raise exceptions.HttpException( raise exceptions.HttpException(

View File

@ -134,7 +134,7 @@ class Message(serializable.Serializable):
content = encoding.decode(self.raw_content, ce) content = encoding.decode(self.raw_content, ce)
# A client may illegally specify a byte -> str encoding here (e.g. utf8) # A client may illegally specify a byte -> str encoding here (e.g. utf8)
if isinstance(content, str): if isinstance(content, str):
raise ValueError("Invalid Content-Encoding: {}".format(ce)) raise ValueError(f"Invalid Content-Encoding: {ce}")
return content return content
except ValueError: except ValueError:
if strict: if strict:

View File

@ -39,19 +39,19 @@ def parse(server_spec: str) -> ServerSpec:
""" """
m = server_spec_re.match(server_spec) m = server_spec_re.match(server_spec)
if not m: if not m:
raise ValueError("Invalid server specification: {}".format(server_spec)) raise ValueError(f"Invalid server specification: {server_spec}")
# defaulting to https/port 443 may annoy some folks, but it's secure-by-default. # defaulting to https/port 443 may annoy some folks, but it's secure-by-default.
scheme = m.group("scheme") or "https" scheme = m.group("scheme") or "https"
if scheme not in ("http", "https"): if scheme not in ("http", "https"):
raise ValueError("Invalid server scheme: {}".format(scheme)) raise ValueError(f"Invalid server scheme: {scheme}")
host = m.group("host") host = m.group("host")
# IPv6 brackets # IPv6 brackets
if host.startswith("[") and host.endswith("]"): if host.startswith("[") and host.endswith("]"):
host = host[1:-1] host = host[1:-1]
if not check.is_valid_host(host.encode("idna")): if not check.is_valid_host(host.encode("idna")):
raise ValueError("Invalid hostname: {}".format(host)) raise ValueError(f"Invalid hostname: {host}")
if m.group("port"): if m.group("port"):
port = int(m.group("port")) port = int(m.group("port"))
@ -61,7 +61,7 @@ def parse(server_spec: str) -> ServerSpec:
"https": 443 "https": 443
}[scheme] }[scheme]
if not check.is_valid_port(port): if not check.is_valid_port(port):
raise ValueError("Invalid port: {}".format(port)) raise ValueError(f"Invalid port: {port}")
return ServerSpec(scheme, (host, port)) return ServerSpec(scheme, (host, port))

View File

@ -80,7 +80,7 @@ class Writer(_FileLike):
if hasattr(self.o, "flush"): if hasattr(self.o, "flush"):
try: try:
self.o.flush() self.o.flush()
except (socket.error, IOError) as v: except OSError as v:
raise exceptions.TcpDisconnect(str(v)) raise exceptions.TcpDisconnect(str(v))
def write(self, v): def write(self, v):
@ -97,7 +97,7 @@ class Writer(_FileLike):
r = self.o.write(v) r = self.o.write(v)
self.add_log(v[:r]) self.add_log(v[:r])
return r return r
except (SSL.Error, socket.error) as e: except (SSL.Error, OSError) as e:
raise exceptions.TcpDisconnect(str(e)) raise exceptions.TcpDisconnect(str(e))
@ -134,7 +134,7 @@ class Reader(_FileLike):
raise exceptions.TcpTimeout() raise exceptions.TcpTimeout()
except socket.timeout: except socket.timeout:
raise exceptions.TcpTimeout() raise exceptions.TcpTimeout()
except socket.error as e: except OSError as e:
raise exceptions.TcpDisconnect(str(e)) raise exceptions.TcpDisconnect(str(e))
except SSL.SysCallError as e: except SSL.SysCallError as e:
if e.args == (-1, 'Unexpected EOF'): if e.args == (-1, 'Unexpected EOF'):
@ -178,7 +178,7 @@ class Reader(_FileLike):
raise exceptions.TcpDisconnect() raise exceptions.TcpDisconnect()
else: else:
raise exceptions.TcpReadIncomplete( raise exceptions.TcpReadIncomplete(
"Expected %s bytes, got %s" % (length, len(result)) "Expected {} bytes, got {}".format(length, len(result))
) )
return result return result
@ -197,7 +197,7 @@ class Reader(_FileLike):
if isinstance(self.o, socket_fileobject): if isinstance(self.o, socket_fileobject):
try: try:
return self.o._sock.recv(length, socket.MSG_PEEK) return self.o._sock.recv(length, socket.MSG_PEEK)
except socket.error as e: except OSError as e:
raise exceptions.TcpException(repr(e)) raise exceptions.TcpException(repr(e))
elif isinstance(self.o, SSL.Connection): elif isinstance(self.o, SSL.Connection):
try: try:
@ -268,7 +268,7 @@ def close_socket(sock):
# Now we can close the other half as well. # Now we can close the other half as well.
sock.shutdown(socket.SHUT_RD) sock.shutdown(socket.SHUT_RD)
except socket.error: except OSError:
pass pass
sock.close() sock.close()
@ -442,7 +442,7 @@ class TCPClient(_Connection):
sock.connect(sa) sock.connect(sa)
return sock return sock
except socket.error as _: except OSError as _:
err = _ err = _
if sock is not None: if sock is not None:
sock.close() sock.close()
@ -450,12 +450,12 @@ class TCPClient(_Connection):
if err is not None: if err is not None:
raise err raise err
else: else:
raise socket.error("getaddrinfo returns an empty list") # pragma: no cover raise OSError("getaddrinfo returns an empty list") # pragma: no cover
def connect(self): def connect(self):
try: try:
connection = self.create_connection() connection = self.create_connection()
except (socket.error, IOError) as err: except OSError as err:
raise exceptions.TcpException( raise exceptions.TcpException(
'Error connecting to "%s": %s' % 'Error connecting to "%s": %s' %
(self.address[0], err) (self.address[0], err)
@ -555,7 +555,7 @@ class TCPServer:
self.__shutdown_request = False self.__shutdown_request = False
if self.address[0] == 'localhost': if self.address[0] == 'localhost':
raise socket.error("Binding to 'localhost' is prohibited. Please use '::1' or '127.0.0.1' directly.") raise OSError("Binding to 'localhost' is prohibited. Please use '::1' or '127.0.0.1' directly.")
self.socket = None self.socket = None
@ -568,7 +568,7 @@ class TCPServer:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self.socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 0) self.socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
self.socket.bind(self.address) self.socket.bind(self.address)
except socket.error: except OSError:
if self.socket: if self.socket:
self.socket.close() self.socket.close()
self.socket = None self.socket = None
@ -580,7 +580,7 @@ class TCPServer:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self.socket.bind(self.address) self.socket.bind(self.address)
except socket.error: except OSError:
if self.socket: if self.socket:
self.socket.close() self.socket.close()
self.socket = None self.socket = None
@ -620,7 +620,7 @@ class TCPServer:
if self.socket in r: if self.socket in r:
connection, client_address = self.socket.accept() connection, client_address = self.socket.accept()
t = basethread.BaseThread( t = basethread.BaseThread(
"TCPConnectionHandler (%s: %s:%s -> %s:%s)" % ( "TCPConnectionHandler ({}: {}:{} -> {}:{})".format(
self.__class__.__name__, self.__class__.__name__,
client_address[0], client_address[0],
client_address[1], client_address[1],
@ -654,11 +654,11 @@ class TCPServer:
# none. # none.
if traceback: if traceback:
exc = str(traceback.format_exc()) exc = str(traceback.format_exc())
print(u'-' * 40, file=fp) print('-' * 40, file=fp)
print( print(
u"Error in processing of request from %s" % repr(client_address), file=fp) "Error in processing of request from %s" % repr(client_address), file=fp)
print(exc, file=fp) print(exc, file=fp)
print(u'-' * 40, file=fp) print('-' * 40, file=fp)
def handle_client_connection(self, conn, client_address): # pragma: no cover def handle_client_connection(self, conn, client_address): # pragma: no cover
""" """

View File

@ -40,7 +40,7 @@ class _Option:
self.choices = choices self.choices = choices
def __repr__(self): def __repr__(self):
return "{value} [{type}]".format(value=self.current(), type=self.typespec) return f"{self.current()} [{self.typespec}]"
@property @property
def default(self): def default(self):
@ -517,18 +517,18 @@ def load_paths(opts: OptManager, *paths: str) -> None:
for p in paths: for p in paths:
p = os.path.expanduser(p) p = os.path.expanduser(p)
if os.path.exists(p) and os.path.isfile(p): if os.path.exists(p) and os.path.isfile(p):
with open(p, "rt", encoding="utf8") as f: with open(p, encoding="utf8") as f:
try: try:
txt = f.read() txt = f.read()
except UnicodeDecodeError as e: except UnicodeDecodeError as e:
raise exceptions.OptionsError( raise exceptions.OptionsError(
"Error reading %s: %s" % (p, e) f"Error reading {p}: {e}"
) )
try: try:
load(opts, txt) load(opts, txt)
except exceptions.OptionsError as e: except exceptions.OptionsError as e:
raise exceptions.OptionsError( raise exceptions.OptionsError(
"Error reading %s: %s" % (p, e) f"Error reading {p}: {e}"
) )
@ -563,12 +563,12 @@ def save(opts: OptManager, path: str, defaults: bool =False) -> None:
""" """
path = os.path.expanduser(path) path = os.path.expanduser(path)
if os.path.exists(path) and os.path.isfile(path): if os.path.exists(path) and os.path.isfile(path):
with open(path, "rt", encoding="utf8") as f: with open(path, encoding="utf8") as f:
try: try:
data = f.read() data = f.read()
except UnicodeDecodeError as e: except UnicodeDecodeError as e:
raise exceptions.OptionsError( raise exceptions.OptionsError(
"Error trying to modify %s: %s" % (path, e) f"Error trying to modify {path}: {e}"
) )
else: else:
data = "" data = ""

View File

@ -1,3 +1,2 @@
def original_addr(csock): def original_addr(csock):
return csock.getsockname() return csock.getsockname()

View File

@ -15,10 +15,10 @@ def lookup(address, port, s):
s = s.decode() s = s.decode()
# ALL tcp 192.168.1.13:57474 -> 23.205.82.58:443 ESTABLISHED:ESTABLISHED # ALL tcp 192.168.1.13:57474 -> 23.205.82.58:443 ESTABLISHED:ESTABLISHED
specv4 = "%s:%s" % (address, port) specv4 = f"{address}:{port}"
# ALL tcp 2a01:e35:8bae:50f0:9d9b:ef0d:2de3:b733[58505] -> 2606:4700:30::681f:4ad0[443] ESTABLISHED:ESTABLISHED # ALL tcp 2a01:e35:8bae:50f0:9d9b:ef0d:2de3:b733[58505] -> 2606:4700:30::681f:4ad0[443] ESTABLISHED:ESTABLISHED
specv6 = "%s[%s]" % (address, port) specv6 = f"{address}[{port}]"
for i in s.split("\n"): for i in s.split("\n"):
if "ESTABLISHED:ESTABLISHED" in i and specv4 in i: if "ESTABLISHED:ESTABLISHED" in i and specv4 in i:

View File

@ -68,7 +68,7 @@ class Resolver:
if addr is None: if addr is None:
raise RuntimeError("Cannot resolve original destination.") raise RuntimeError("Cannot resolve original destination.")
return tuple(addr) return tuple(addr)
except (EOFError, socket.error): except (EOFError, OSError):
self._connect() self._connect()
return self.original_addr(csock) return self.original_addr(csock)
@ -91,7 +91,7 @@ class APIRequestHandler(socketserver.StreamRequestHandler):
except KeyError: except KeyError:
server = None server = None
write(server, self.wfile) write(server, self.wfile)
except (EOFError, socket.error): except (EOFError, OSError):
pass pass
@ -288,7 +288,7 @@ class Redirect(threading.Thread):
while True: while True:
try: try:
packet = self.windivert.recv() packet = self.windivert.recv()
except WindowsError as e: except OSError as e:
if e.winerror == 995: if e.winerror == 995:
return return
else: else:
@ -306,7 +306,7 @@ class Redirect(threading.Thread):
""" """
try: try:
return self.windivert.recv() return self.windivert.recv()
except WindowsError as e: except OSError as e:
if e.winerror == 995: if e.winerror == 995:
return None return None
else: else:

View File

@ -101,8 +101,8 @@ class UpstreamConnectLayer(base.Layer):
def _send_connect_request(self): def _send_connect_request(self):
self.log("Sending CONNECT request", "debug", [ self.log("Sending CONNECT request", "debug", [
"Proxy Server: {}".format(self.ctx.server_conn.address), f"Proxy Server: {self.ctx.server_conn.address}",
"Connect to: {}:{}".format(self.connect_request.host, self.connect_request.port) f"Connect to: {self.connect_request.host}:{self.connect_request.port}"
]) ])
self.send_request(self.connect_request) self.send_request(self.connect_request)
resp = self.read_response(self.connect_request) resp = self.read_response(self.connect_request)
@ -157,15 +157,15 @@ def validate_request_form(mode, request):
if request.is_http2 and mode is HTTPMode.transparent and request.first_line_format == "absolute": if request.is_http2 and mode is HTTPMode.transparent and request.first_line_format == "absolute":
return # dirty hack: h2 may have authority info. will be fixed properly with sans-io. return # dirty hack: h2 may have authority info. will be fixed properly with sans-io.
if mode == HTTPMode.transparent: if mode == HTTPMode.transparent:
err_message = textwrap.dedent(( err_message = textwrap.dedent(
""" """
Mitmproxy received an {} request even though it is not running Mitmproxy received an {} request even though it is not running
in regular mode. This usually indicates a misconfiguration, in regular mode. This usually indicates a misconfiguration,
please see the mitmproxy mode documentation for details. please see the mitmproxy mode documentation for details.
""" """
)).strip().format("HTTP CONNECT" if request.first_line_format == "authority" else "absolute-form") ).strip().format("HTTP CONNECT" if request.first_line_format == "authority" else "absolute-form")
else: else:
err_message = "Invalid HTTP request form (expected: %s, got: %s)" % ( err_message = "Invalid HTTP request form (expected: {}, got: {})".format(
" or ".join(allowed_request_forms), request.first_line_format " or ".join(allowed_request_forms), request.first_line_format
) )
raise exceptions.HttpException(err_message) raise exceptions.HttpException(err_message)
@ -313,7 +313,7 @@ class HttpLayer(base.Layer):
self.log( self.log(
"request", "request",
"warn", "warn",
["HTTP protocol error in client request: {}".format(e)] [f"HTTP protocol error in client request: {e}"]
) )
return False return False
@ -499,7 +499,7 @@ class HttpLayer(base.Layer):
response = http.make_error_response(code, message, headers) response = http.make_error_response(code, message, headers)
self.send_response(response) self.send_response(response)
except (exceptions.NetlibException, h2.exceptions.H2Error, exceptions.Http2ProtocolException): except (exceptions.NetlibException, h2.exceptions.H2Error, exceptions.Http2ProtocolException):
self.log("Failed to send error response to client: {}".format(message), "debug") self.log(f"Failed to send error response to client: {message}", "debug")
def change_upstream_proxy_server(self, address): def change_upstream_proxy_server(self, address):
# Make set_upstream_proxy_server always available, # Make set_upstream_proxy_server always available,

View File

@ -206,7 +206,7 @@ class Http2Layer(base.Layer):
event.stream_id, event.stream_id,
h2.errors.ErrorCodes.REFUSED_STREAM h2.errors.ErrorCodes.REFUSED_STREAM
) )
self.log("HTTP body too large. Limit is {}.".format(bsl), "info") self.log(f"HTTP body too large. Limit is {bsl}.", "info")
else: else:
self.streams[eid].data_queue.put(event.data) self.streams[eid].data_queue.put(event.data)
self.streams[eid].queued_data_length += len(event.data) self.streams[eid].queued_data_length += len(event.data)
@ -240,7 +240,7 @@ class Http2Layer(base.Layer):
return True return True
def _handle_remote_settings_changed(self, event, other_conn): def _handle_remote_settings_changed(self, event, other_conn):
new_settings = dict([(key, cs.new_value) for (key, cs) in event.changed_settings.items()]) new_settings = {key: cs.new_value for (key, cs) in event.changed_settings.items()}
self.connections[other_conn].safe_update_settings(new_settings) self.connections[other_conn].safe_update_settings(new_settings)
return True return True
@ -410,7 +410,7 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
def __init__(self, ctx, h2_connection, stream_id: int, request_headers: mitmproxy.net.http.Headers) -> None: def __init__(self, ctx, h2_connection, stream_id: int, request_headers: mitmproxy.net.http.Headers) -> None:
super().__init__( super().__init__(
ctx, name="Http2SingleStreamLayer-{}".format(stream_id) ctx, name=f"Http2SingleStreamLayer-{stream_id}"
) )
self.h2_connection = h2_connection self.h2_connection = h2_connection
self.zombie: Optional[float] = None self.zombie: Optional[float] = None
@ -497,7 +497,7 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
if self.zombie is not None or connection_closed: if self.zombie is not None or connection_closed:
if pre_command is not None: if pre_command is not None:
pre_command() pre_command()
raise exceptions.Http2ZombieException("Connection or stream already dead: {}, {}".format(self.zombie, connection_closed)) raise exceptions.Http2ZombieException(f"Connection or stream already dead: {self.zombie}, {connection_closed}")
@detect_zombie_stream @detect_zombie_stream
def read_request_headers(self, flow): def read_request_headers(self, flow):
@ -713,7 +713,7 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
) )
def __call__(self): # pragma: no cover def __call__(self): # pragma: no cover
raise EnvironmentError('Http2SingleStreamLayer must be run as thread') raise OSError('Http2SingleStreamLayer must be run as thread')
def run(self): def run(self):
layer = httpbase.HttpLayer(self, self.mode) layer = httpbase.HttpLayer(self, self.mode)
@ -726,7 +726,7 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
except exceptions.ProtocolException as e: # pragma: no cover except exceptions.ProtocolException as e: # pragma: no cover
self.log(repr(e), "info") self.log(repr(e), "info")
except exceptions.SetServerNotAllowedException as e: # pragma: no cover except exceptions.SetServerNotAllowedException as e: # pragma: no cover
self.log("Changing the Host server for HTTP/2 connections not allowed: {}".format(e), "info") self.log(f"Changing the Host server for HTTP/2 connections not allowed: {e}", "info")
except exceptions.Kill: # pragma: no cover except exceptions.Kill: # pragma: no cover
self.log(flow.Error.KILLED_MESSAGE, "info") self.log(flow.Error.KILLED_MESSAGE, "info")

View File

@ -63,7 +63,7 @@ class RawTCPLayer(base.Layer):
self.channel.ask("tcp_message", f) self.channel.ask("tcp_message", f)
dst.sendall(tcp_message.content) dst.sendall(tcp_message.content)
except (socket.error, exceptions.TcpException, SSL.Error) as e: except (OSError, exceptions.TcpException, SSL.Error) as e:
if not self.ignore: if not self.ignore:
f.error = flow.Error("TCP connection closed unexpectedly: {}".format(repr(e))) f.error = flow.Error("TCP connection closed unexpectedly: {}".format(repr(e)))
self.channel.tell("tcp_error", f) self.channel.tell("tcp_error", f)

View File

@ -464,7 +464,7 @@ class TlsLayer(base.Layer):
) )
proto = self.alpn_for_client_connection.decode() if self.alpn_for_client_connection else '-' proto = self.alpn_for_client_connection.decode() if self.alpn_for_client_connection else '-'
self.log("ALPN selected by server: {}".format(proto), "debug") self.log(f"ALPN selected by server: {proto}", "debug")
def _find_cert(self): def _find_cert(self):
""" """

View File

@ -220,7 +220,7 @@ class WebSocketLayer(base.Layer):
if not self._handle_event(event, source_conn, other_conn, is_server): if not self._handle_event(event, source_conn, other_conn, is_server):
if not close_received: if not close_received:
close_received = True close_received = True
except (socket.error, exceptions.TcpException, SSL.Error) as e: except (OSError, exceptions.TcpException, SSL.Error) as e:
s = 'server' if is_server else 'client' s = 'server' if is_server else 'client'
self.flow.error = flow.Error("WebSocket connection closed unexpectedly by {}: {}".format(s, repr(e))) self.flow.error = flow.Error("WebSocket connection closed unexpectedly by {}: {}".format(s, repr(e)))
self.channel.tell("websocket_error", self.flow) self.channel.tell("websocket_error", self.flow)

View File

@ -46,7 +46,7 @@ class StateObject(serializable.Serializable):
else: else:
setattr(self, attr, make_object(cls, val)) setattr(self, attr, make_object(cls, val))
if state: if state:
raise RuntimeWarning("Unexpected State in __setstate__: {}".format(state)) raise RuntimeWarning(f"Unexpected State in __setstate__: {state}")
def _process(typeinfo: typecheck.Type, val: typing.Any, make: bool) -> typing.Any: def _process(typeinfo: typecheck.Type, val: typing.Any, make: bool) -> typing.Any:
@ -65,7 +65,7 @@ def _process(typeinfo: typecheck.Type, val: typing.Any, make: bool) -> typing.An
elif typename.startswith("typing.Tuple"): elif typename.startswith("typing.Tuple"):
Ts = typecheck.tuple_types(typeinfo) Ts = typecheck.tuple_types(typeinfo)
if len(Ts) != len(val): if len(Ts) != len(val):
raise ValueError("Invalid data. Expected {}, got {}.".format(Ts, val)) raise ValueError(f"Invalid data. Expected {Ts}, got {val}.")
return tuple( return tuple(
_process(T, x, make) for T, x in zip(Ts, val) _process(T, x, make) for T, x in zip(Ts, val)
) )

View File

@ -28,7 +28,7 @@ class RecordingMaster(mitmproxy.master.Master):
def dump_log(self, outf=sys.stdout): def dump_log(self, outf=sys.stdout):
for i in self.logs: for i in self.logs:
print("%s: %s" % (i.level, i.msg), file=outf) print(f"{i.level}: {i.msg}", file=outf)
def has_log(self, txt, level=None): def has_log(self, txt, level=None):
for i in self.logs: for i in self.logs:

View File

@ -86,8 +86,8 @@ def twebsocketflow(client_conn=True, server_conn=True, messages=True, err=None,
if messages is True: if messages is True:
messages = [ messages = [
websocket.WebSocketMessage(Opcode.BINARY, True, b"hello binary"), websocket.WebSocketMessage(Opcode.BINARY, True, b"hello binary"),
websocket.WebSocketMessage(Opcode.TEXT, True, "hello text".encode()), websocket.WebSocketMessage(Opcode.TEXT, True, b"hello text"),
websocket.WebSocketMessage(Opcode.TEXT, False, "it's me".encode()), websocket.WebSocketMessage(Opcode.TEXT, False, b"it's me"),
] ]
if err is True: if err is True:
err = terr() err = terr()

View File

@ -131,7 +131,7 @@ def run(
master.run() master.run()
except exceptions.OptionsError as e: except exceptions.OptionsError as e:
print("%s: %s" % (sys.argv[0], e), file=sys.stderr) print("{}: {}".format(sys.argv[0], e), file=sys.stderr)
sys.exit(1) sys.exit(1)
except (KeyboardInterrupt, RuntimeError): except (KeyboardInterrupt, RuntimeError):
pass pass

View File

@ -98,23 +98,23 @@ def fcol(s: str, attr: str) -> typing.Tuple[str, int, urwid.Text]:
if urwid.util.detected_encoding: if urwid.util.detected_encoding:
SYMBOL_REPLAY = u"\u21ba" SYMBOL_REPLAY = "\u21ba"
SYMBOL_RETURN = u"\u2190" SYMBOL_RETURN = "\u2190"
SYMBOL_MARK = u"\u25cf" SYMBOL_MARK = "\u25cf"
SYMBOL_UP = u"\u21E7" SYMBOL_UP = "\u21E7"
SYMBOL_DOWN = u"\u21E9" SYMBOL_DOWN = "\u21E9"
SYMBOL_ELLIPSIS = u"\u2026" SYMBOL_ELLIPSIS = "\u2026"
SYMBOL_FROM_CLIENT = u"\u21d2" SYMBOL_FROM_CLIENT = "\u21d2"
SYMBOL_TO_CLIENT = u"\u21d0" SYMBOL_TO_CLIENT = "\u21d0"
else: else:
SYMBOL_REPLAY = u"[r]" SYMBOL_REPLAY = "[r]"
SYMBOL_RETURN = u"<-" SYMBOL_RETURN = "<-"
SYMBOL_MARK = "#" SYMBOL_MARK = "#"
SYMBOL_UP = "^" SYMBOL_UP = "^"
SYMBOL_DOWN = " " SYMBOL_DOWN = " "
SYMBOL_ELLIPSIS = "~" SYMBOL_ELLIPSIS = "~"
SYMBOL_FROM_CLIENT = u"->" SYMBOL_FROM_CLIENT = "->"
SYMBOL_TO_CLIENT = u"<-" SYMBOL_TO_CLIENT = "<-"
SCHEME_STYLES = { SCHEME_STYLES = {
'http': 'scheme_http', 'http': 'scheme_http',
@ -164,7 +164,7 @@ class TruncatedText(urwid.Widget):
self.text = text self.text = text
self.attr = attr self.attr = attr
self.align = align self.align = align
super(TruncatedText, self).__init__() super().__init__()
def pack(self, size, focus=False): def pack(self, size, focus=False):
return (len(self.text), 1) return (len(self.text), 1)

View File

@ -533,7 +533,7 @@ class ConsoleAddon:
[strutils.always_str(x) or "" for x in row] # type: ignore [strutils.always_str(x) or "" for x in row] # type: ignore
) )
ctx.log.alert("Saved %s rows as CSV." % (len(rows))) ctx.log.alert("Saved %s rows as CSV." % (len(rows)))
except IOError as e: except OSError as e:
ctx.log.error(str(e)) ctx.log.error(str(e))
@command.command("console.grideditor.editor") @command.command("console.grideditor.editor")
@ -560,7 +560,7 @@ class ConsoleAddon:
try: try:
self.master.commands.call_strings( self.master.commands.call_strings(
"view.settings.setval", "view.settings.setval",
["@focus", "flowview_mode_%s" % (idx,), mode] ["@focus", f"flowview_mode_{idx}", mode]
) )
except exceptions.CommandError as e: except exceptions.CommandError as e:
ctx.log.error(str(e)) ctx.log.error(str(e))
@ -584,7 +584,7 @@ class ConsoleAddon:
return self.master.commands.call_strings( return self.master.commands.call_strings(
"view.settings.getval", "view.settings.getval",
["@focus", "flowview_mode_%s" % (idx,), self.master.options.console_default_contentview] ["@focus", f"flowview_mode_{idx}", self.master.options.console_default_contentview]
) )
@command.command("console.key.contexts") @command.command("console.key.contexts")

View File

@ -1,4 +1,3 @@
def map(km): def map(km):
km.add(":", "console.command ", ["commonkey", "global"], "Command prompt") km.add(":", "console.command ", ["commonkey", "global"], "Command prompt")
km.add("?", "console.view.help", ["global"], "View help") km.add("?", "console.view.help", ["global"], "View help")

View File

@ -46,7 +46,7 @@ class EventLog(urwid.ListBox, layoutwidget.LayoutWidget):
def add_event(self, event_store, entry: log.LogEntry): def add_event(self, event_store, entry: log.LogEntry):
if log.log_tier(self.master.options.console_eventlog_verbosity) < log.log_tier(entry.level): if log.log_tier(self.master.options.console_eventlog_verbosity) < log.log_tier(entry.level):
return return
txt = "%s: %s" % (entry.level, str(entry.msg)) txt = "{}: {}".format(entry.level, str(entry.msg))
if entry.level in ("error", "warn", "alert"): if entry.level in ("error", "warn", "alert"):
e = urwid.Text((entry.level, txt)) e = urwid.Text((entry.level, txt))
else: else:

View File

@ -16,7 +16,7 @@ def read_file(filename: str, escaped: bool) -> typing.AnyStr:
try: try:
with open(filename, "r" if escaped else "rb") as f: with open(filename, "r" if escaped else "rb") as f:
d = f.read() d = f.read()
except IOError as v: except OSError as v:
raise exceptions.CommandError(v) raise exceptions.CommandError(v)
if escaped: if escaped:
try: try:
@ -155,7 +155,7 @@ class GridWalker(urwid.ListWalker):
def set_value(self, val, focus, focus_col, errors=None): def set_value(self, val, focus, focus_col, errors=None):
if not errors: if not errors:
errors = set([]) errors = set()
row = list(self.lst[focus][0]) row = list(self.lst[focus][0])
row[focus_col] = val row[focus_col] = val
self.lst[focus] = [tuple(row), errors] self.lst[focus] = [tuple(row), errors]
@ -171,7 +171,7 @@ class GridWalker(urwid.ListWalker):
self.focus = pos self.focus = pos
self.lst.insert( self.lst.insert(
self.focus, self.focus,
([c.blank() for c in self.editor.columns], set([])) ([c.blank() for c in self.editor.columns], set())
) )
self.focus_col = 0 self.focus_col = 0
self.start_edit() self.start_edit()

View File

@ -161,7 +161,7 @@ keyAttrs = {
"ctx": lambda x: isinstance(x, list) and [isinstance(v, str) for v in x], "ctx": lambda x: isinstance(x, list) and [isinstance(v, str) for v in x],
"help": lambda x: isinstance(x, str), "help": lambda x: isinstance(x, str),
} }
requiredKeyAttrs = set(["key", "cmd"]) requiredKeyAttrs = {"key", "cmd"}
class KeymapConfig: class KeymapConfig:
@ -186,18 +186,18 @@ class KeymapConfig:
def load_path(self, km, p): def load_path(self, km, p):
if os.path.exists(p) and os.path.isfile(p): if os.path.exists(p) and os.path.isfile(p):
with open(p, "rt", encoding="utf8") as f: with open(p, encoding="utf8") as f:
try: try:
txt = f.read() txt = f.read()
except UnicodeDecodeError as e: except UnicodeDecodeError as e:
raise KeyBindingError( raise KeyBindingError(
"Encoding error - expected UTF8: %s: %s" % (p, e) f"Encoding error - expected UTF8: {p}: {e}"
) )
try: try:
vals = self.parse(txt) vals = self.parse(txt)
except KeyBindingError as e: except KeyBindingError as e:
raise KeyBindingError( raise KeyBindingError(
"Error reading %s: %s" % (p, e) f"Error reading {p}: {e}"
) from e ) from e
for v in vals: for v in vals:
user_ctxs = v.get("ctx", ["global"]) user_ctxs = v.get("ctx", ["global"])
@ -212,7 +212,7 @@ class KeymapConfig:
) )
except ValueError as e: except ValueError as e:
raise KeyBindingError( raise KeyBindingError(
"Error reading %s: %s" % (p, e) f"Error reading {p}: {e}"
) from e ) from e
def parse(self, text): def parse(self, text):

View File

@ -1,5 +1,3 @@
class LayoutWidget: class LayoutWidget:
""" """
All top-level layout widgets and all widgets that may be set in an All top-level layout widgets and all widgets that may be set in an

View File

@ -20,7 +20,7 @@ class PromptPath:
pth = os.path.expanduser(pth) pth = os.path.expanduser(pth)
try: try:
return self.callback(pth, *self.args) return self.callback(pth, *self.args)
except IOError as v: except OSError as v:
signals.status_message.send(message=v.strerror) signals.status_message.send(message=v.strerror)
@ -128,7 +128,7 @@ class ActionBar(urwid.WidgetWrap):
mkup.append(",") mkup.append(",")
prompt.extend(mkup) prompt.extend(mkup)
prompt.append(")? ") prompt.append(")? ")
self.onekey = set(i[1] for i in keys) self.onekey = {i[1] for i in keys}
self._w = urwid.Edit(prompt, "") self._w = urwid.Edit(prompt, "")
self.prompting = PromptStub(callback, args) self.prompting = PromptStub(callback, args)
@ -305,14 +305,14 @@ class StatusBar(urwid.WidgetWrap):
marked = "M" marked = "M"
t = [ t = [
('heading', ("%s %s [%s/%s]" % (arrow, marked, offset, fc)).ljust(11)), ('heading', (f"{arrow} {marked} [{offset}/{fc}]").ljust(11)),
] ]
if self.master.options.server: if self.master.options.server:
host = self.master.options.listen_host host = self.master.options.listen_host
if host == "0.0.0.0" or host == "": if host == "0.0.0.0" or host == "":
host = "*" host = "*"
boundaddr = "[%s:%s]" % (host, self.master.options.listen_port) boundaddr = f"[{host}:{self.master.options.listen_port}]"
else: else:
boundaddr = "" boundaddr = ""
t.extend(self.get_status()) t.extend(self.get_status())

View File

@ -3,7 +3,6 @@ This file must be kept in a python2.7 and python3.5 compatible syntax!
DO NOT use type annotations or other python3.6-only features that makes this file unparsable by older interpreters! DO NOT use type annotations or other python3.6-only features that makes this file unparsable by older interpreters!
""" """
from __future__ import print_function # this is here for the version check to work on Python 2.
import sys import sys

View File

@ -124,7 +124,7 @@ class RequestHandler(tornado.web.RequestHandler):
if isinstance(chunk, list): if isinstance(chunk, list):
chunk = tornado.escape.json_encode(chunk) chunk = tornado.escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8") self.set_header("Content-Type", "application/json; charset=UTF-8")
super(RequestHandler, self).write(chunk) super().write(chunk)
def set_default_headers(self): def set_default_headers(self):
super().set_default_headers() super().set_default_headers()
@ -312,7 +312,7 @@ class FlowHandler(RequestHandler):
elif k == "content": elif k == "content":
request.text = v request.text = v
else: else:
raise APIError(400, "Unknown update request.{}: {}".format(k, v)) raise APIError(400, f"Unknown update request.{k}: {v}")
elif a == "response" and hasattr(flow, "response"): elif a == "response" and hasattr(flow, "response"):
response = flow.response response = flow.response
@ -332,9 +332,9 @@ class FlowHandler(RequestHandler):
elif k == "content": elif k == "content":
response.text = v response.text = v
else: else:
raise APIError(400, "Unknown update response.{}: {}".format(k, v)) raise APIError(400, f"Unknown update response.{k}: {v}")
else: else:
raise APIError(400, "Unknown update {}: {}".format(a, b)) raise APIError(400, f"Unknown update {a}: {b}")
except APIError: except APIError:
flow.revert() flow.revert()
raise raise
@ -395,7 +395,7 @@ class FlowContent(RequestHandler):
filename = self.flow.request.path.split("?")[0].split("/")[-1] filename = self.flow.request.path.split("?")[0].split("/")[-1]
filename = re.sub(r'[^-\w" .()]', "", filename) filename = re.sub(r'[^-\w" .()]', "", filename)
cd = "attachment; filename={}".format(filename) cd = f"attachment; filename={filename}"
self.set_header("Content-Disposition", cd) self.set_header("Content-Disposition", cd)
self.set_header("Content-Type", "application/text") self.set_header("Content-Type", "application/text")
self.set_header("X-Content-Type-Options", "nosniff") self.set_header("X-Content-Type-Options", "nosniff")
@ -456,7 +456,7 @@ class Settings(RequestHandler):
} }
for k in update: for k in update:
if k not in allowed_options: if k not in allowed_options:
raise APIError(400, "Unknown setting {}".format(k)) raise APIError(400, f"Unknown setting {k}")
self.master.options.update(**update) self.master.options.update(**update)
@ -469,7 +469,7 @@ class Options(RequestHandler):
try: try:
self.master.options.update(**update) self.master.options.update(**update)
except Exception as err: except Exception as err:
raise APIError(400, "{}".format(err)) raise APIError(400, f"{err}")
class SaveOptions(RequestHandler): class SaveOptions(RequestHandler):

View File

@ -106,8 +106,8 @@ class WebMaster(master.Master):
iol = tornado.ioloop.IOLoop.instance() iol = tornado.ioloop.IOLoop.instance()
http_server = tornado.httpserver.HTTPServer(self.app) http_server = tornado.httpserver.HTTPServer(self.app)
http_server.listen(self.options.web_port, self.options.web_host) http_server.listen(self.options.web_port, self.options.web_host)
web_url = "http://{}:{}/".format(self.options.web_host, self.options.web_port) web_url = f"http://{self.options.web_host}:{self.options.web_port}/"
self.log.info( self.log.info(
"Web server listening at {}".format(web_url), f"Web server listening at {web_url}",
) )
self.run_loop(iol.start) self.run_loop(iol.start)

View File

@ -24,11 +24,11 @@ class WebAddon:
def running(self): def running(self):
if hasattr(ctx.options, "web_open_browser") and ctx.options.web_open_browser: if hasattr(ctx.options, "web_open_browser") and ctx.options.web_open_browser:
web_url = "http://{}:{}/".format(ctx.options.web_host, ctx.options.web_port) web_url = f"http://{ctx.options.web_host}:{ctx.options.web_port}/"
success = open_browser(web_url) success = open_browser(web_url)
if not success: if not success:
ctx.log.info( ctx.log.info(
"No web browser found. Please open a browser and point it to {}".format(web_url), f"No web browser found. Please open a browser and point it to {web_url}",
) )

View File

@ -117,7 +117,7 @@ def check():
for option in ("-e", "--eventlog", "--norefresh"): for option in ("-e", "--eventlog", "--norefresh"):
if option in args: if option in args:
print("{} has been removed.".format(option)) print(f"{option} has been removed.")
for option in ("--nonanonymous", "--singleuser", "--htpasswd"): for option in ("--nonanonymous", "--singleuser", "--htpasswd"):
if option in args: if option in args:

View File

@ -15,10 +15,10 @@ def dump_system_info():
mitmproxy_version = version.get_dev_version() mitmproxy_version = version.get_dev_version()
data = [ data = [
"Mitmproxy: {}".format(mitmproxy_version), f"Mitmproxy: {mitmproxy_version}",
"Python: {}".format(platform.python_version()), f"Python: {platform.python_version()}",
"OpenSSL: {}".format(SSL.SSLeay_version(SSL.SSLEAY_VERSION).decode()), "OpenSSL: {}".format(SSL.SSLeay_version(SSL.SSLEAY_VERSION).decode()),
"Platform: {}".format(platform.platform()), f"Platform: {platform.platform()}",
] ]
return "\n".join(data) return "\n".join(data)
@ -84,7 +84,7 @@ def dump_info(signal=None, frame=None, file=sys.stdout, testing=False): # pragm
def dump_stacks(signal=None, frame=None, file=sys.stdout, testing=False): def dump_stacks(signal=None, frame=None, file=sys.stdout, testing=False):
id2name = dict([(th.ident, th.name) for th in threading.enumerate()]) id2name = {th.ident: th.name for th in threading.enumerate()}
code = [] code = []
for threadId, stack in sys._current_frames().items(): for threadId, stack in sys._current_frames().items():
code.append( code.append(

View File

@ -24,7 +24,7 @@ def pretty_size(size):
if x == int(x): if x == int(x):
x = int(x) x = int(x)
return str(x) + suf return str(x) + suf
return "%s%s" % (size, SIZE_TABLE[0][0]) return "{}{}".format(size, SIZE_TABLE[0][0])
@functools.lru_cache() @functools.lru_cache()

View File

@ -92,7 +92,7 @@ def bytes_to_escaped_str(data, keep_spacing=False, escape_single_quotes=False):
""" """
if not isinstance(data, bytes): if not isinstance(data, bytes):
raise ValueError("data must be bytes, but is {}".format(data.__class__.__name__)) raise ValueError(f"data must be bytes, but is {data.__class__.__name__}")
# We always insert a double-quote here so that we get a single-quoted string back # We always insert a double-quote here so that we get a single-quoted string back
# https://stackoverflow.com/questions/29019340/why-does-python-use-different-quotes-for-representing-strings-depending-on-their # https://stackoverflow.com/questions/29019340/why-does-python-use-different-quotes-for-representing-strings-depending-on-their
ret = repr(b'"' + data).lstrip("b")[2:-1] ret = repr(b'"' + data).lstrip("b")[2:-1]
@ -115,7 +115,7 @@ def escaped_str_to_bytes(data):
ValueError, if the escape sequence is invalid. ValueError, if the escape sequence is invalid.
""" """
if not isinstance(data, str): if not isinstance(data, str):
raise ValueError("data must be str, but is {}".format(data.__class__.__name__)) raise ValueError(f"data must be str, but is {data.__class__.__name__}")
# This one is difficult - we use an undocumented Python API here # This one is difficult - we use an undocumented Python API here
# as per http://stackoverflow.com/a/23151714/934719 # as per http://stackoverflow.com/a/23151714/934719
@ -154,12 +154,12 @@ def hexdump(s):
A generator of (offset, hex, str) tuples A generator of (offset, hex, str) tuples
""" """
for i in range(0, len(s), 16): for i in range(0, len(s), 16):
offset = "{:0=10x}".format(i) offset = f"{i:0=10x}"
part = s[i:i + 16] part = s[i:i + 16]
x = " ".join("{:0=2x}".format(i) for i in part) x = " ".join(f"{i:0=2x}" for i in part)
x = x.ljust(47) # 16*2 + 15 x = x.ljust(47) # 16*2 + 15
part_repr = always_str(escape_control_characters( part_repr = always_str(escape_control_characters(
part.decode("ascii", "replace").replace(u"\ufffd", u"."), part.decode("ascii", "replace").replace("\ufffd", "."),
False False
)) ))
yield (offset, x, part_repr) yield (offset, x, part_repr)
@ -230,7 +230,7 @@ def escape_special_areas(
""" """
buf = io.StringIO() buf = io.StringIO()
parts = split_special_areas(data, area_delimiter) parts = split_special_areas(data, area_delimiter)
rex = re.compile(r"[{}]".format(control_characters)) rex = re.compile(fr"[{control_characters}]")
for i, x in enumerate(parts): for i, x in enumerate(parts):
if i % 2: if i % 2:
x = rex.sub(_move_to_private_code_plane, x) x = rex.sub(_move_to_private_code_plane, x)

View File

@ -55,7 +55,7 @@ def check_option_type(name: str, value: typing.Any, typeinfo: Type) -> None:
if len(types) != len(value): if len(types) != len(value):
raise e raise e
for i, (x, T) in enumerate(zip(value, types)): for i, (x, T) in enumerate(zip(value, types)):
check_option_type("{}[{}]".format(name, i), x, T) check_option_type(f"{name}[{i}]", x, T)
return return
elif typename.startswith("typing.Sequence"): elif typename.startswith("typing.Sequence"):
T = sequence_type(typeinfo) T = sequence_type(typeinfo)

View File

@ -70,7 +70,7 @@ class PauseAt(_Action):
return e.setParseAction(lambda x: cls(*x)) return e.setParseAction(lambda x: cls(*x))
def spec(self): def spec(self):
return "p%s,%s" % (self.offset, self.seconds) return f"p{self.offset},{self.seconds}"
def intermediate(self, settings): def intermediate(self, settings):
return (self.offset, "pause", self.seconds) return (self.offset, "pause", self.seconds)
@ -116,7 +116,7 @@ class InjectAt(_Action):
return e.setParseAction(lambda x: cls(*x)) return e.setParseAction(lambda x: cls(*x))
def spec(self): def spec(self):
return "i%s,%s" % (self.offset, self.value.spec()) return f"i{self.offset},{self.value.spec()}"
def intermediate(self, settings): def intermediate(self, settings):
return ( return (

View File

@ -295,7 +295,7 @@ class KeyValue(_Component):
return e.setParseAction(lambda x: cls(*x)) return e.setParseAction(lambda x: cls(*x))
def spec(self): def spec(self):
return "%s%s=%s" % (self.preamble, self.key.spec(), self.value.spec()) return f"{self.preamble}{self.key.spec()}={self.value.spec()}"
def freeze(self, settings): def freeze(self, settings):
return self.__class__( return self.__class__(
@ -369,7 +369,7 @@ class OptionsOrValue(_Component):
s = self.value.spec() s = self.value.spec()
if s[1:-1].lower() in self.options: if s[1:-1].lower() in self.options:
s = s[1:-1].lower() s = s[1:-1].lower()
return "%s%s" % (self.preamble, s) return f"{self.preamble}{s}"
def freeze(self, settings): def freeze(self, settings):
return self.__class__(self.value.freeze(settings)) return self.__class__(self.value.freeze(settings))
@ -403,7 +403,7 @@ class Integer(_Component):
return [self.value] return [self.value]
def spec(self): def spec(self):
return "%s%s" % (self.preamble, self.value.decode()) return f"{self.preamble}{self.value.decode()}"
def freeze(self, settings_): def freeze(self, settings_):
return self return self
@ -430,7 +430,7 @@ class Value(_Component):
return [self.value.get_generator(settings)] return [self.value.get_generator(settings)]
def spec(self): def spec(self):
return "%s%s" % (self.preamble, self.value.spec()) return f"{self.preamble}{self.value.spec()}"
def freeze(self, settings): def freeze(self, settings):
return self.__class__(self.value.freeze(settings)) return self.__class__(self.value.freeze(settings))
@ -454,7 +454,7 @@ class FixedLengthValue(Value):
# This check will fail if we know the length upfront # This check will fail if we know the length upfront
if lenguess is not None and lenguess != self.length: if lenguess is not None and lenguess != self.length:
raise exceptions.RenderError( raise exceptions.RenderError(
"Invalid value length: '%s' is %s bytes, should be %s." % ( "Invalid value length: '{}' is {} bytes, should be {}.".format(
self.spec(), self.spec(),
lenguess, lenguess,
self.length self.length
@ -468,7 +468,7 @@ class FixedLengthValue(Value):
# file inputs # file inputs
if l != self.length: if l != self.length:
raise exceptions.RenderError( raise exceptions.RenderError(
"Invalid value length: '%s' is %s bytes, should be %s." % ( "Invalid value length: '{}' is {} bytes, should be {}.".format(
self.spec(), self.spec(),
l, l,
self.length self.length
@ -503,7 +503,7 @@ class Boolean(_Component):
return e.setParseAction(parse) return e.setParseAction(parse)
def spec(self): def spec(self):
return "%s%s" % ("-" if not self.value else "", self.name) return "{}{}".format("-" if not self.value else "", self.name)
class IntField(_Component): class IntField(_Component):
@ -537,4 +537,4 @@ class IntField(_Component):
return [str(self.value)] return [str(self.value)]
def spec(self): def spec(self):
return "%s%s" % (self.preamble, self.origvalue) return f"{self.preamble}{self.origvalue}"

View File

@ -15,7 +15,7 @@ class ParseException(Exception):
self.col = col self.col = col
def marked(self): def marked(self):
return "%s\n%s" % (self.s, " " * (self.col - 1) + "^") return "{}\n{}".format(self.s, " " * (self.col - 1) + "^")
def __str__(self): def __str__(self):
return "%s at char %s" % (self.msg, self.col) return f"{self.msg} at char {self.col}"

View File

@ -70,7 +70,7 @@ class RandomGenerator:
return rand_byte(chars) return rand_byte(chars)
def __repr__(self): def __repr__(self):
return "%s random from %s" % (self.length, self.dtype) return f"{self.length} random from {self.dtype}"
class FileGenerator: class FileGenerator:

View File

@ -158,7 +158,7 @@ class Response(_HTTP2Message):
) )
def __init__(self, tokens): def __init__(self, tokens):
super(Response, self).__init__(tokens) super().__init__(tokens)
self.rendered_values = None self.rendered_values = None
self.stream_id = 2 self.stream_id = 2
@ -226,7 +226,7 @@ class Request(_HTTP2Message):
logattrs = ["method", "path"] logattrs = ["method", "path"]
def __init__(self, tokens): def __init__(self, tokens):
super(Request, self).__init__(tokens) super().__init__(tokens)
self.rendered_values = None self.rendered_values = None
self.stream_id = 1 self.stream_id = 1

View File

@ -14,7 +14,7 @@ class Message:
logattrs: typing.List[str] = [] logattrs: typing.List[str] = []
def __init__(self, tokens): def __init__(self, tokens):
track = set([]) track = set()
for i in tokens: for i in tokens:
if i.unique_name: if i.unique_name:
if i.unique_name in track: if i.unique_name in track:
@ -133,7 +133,7 @@ class NestedMessage(base.Token):
] ]
def spec(self): def spec(self):
return "%s%s" % (self.preamble, self.value.spec()) return f"{self.preamble}{self.value.spec()}"
def freeze(self, settings): def freeze(self, settings):
f = self.parsed.freeze(settings).spec() f = self.parsed.freeze(settings).spec()

View File

@ -65,7 +65,7 @@ class LogCtx:
strutils.escape_control_characters( strutils.escape_control_characters(
data data
.decode("ascii", "replace") .decode("ascii", "replace")
.replace(u"\ufffd", u".") .replace("\ufffd", ".")
) )
) )
for i in data.split("\n"): for i in data.split("\n"):

View File

@ -47,20 +47,20 @@ class SSLInfo:
parts.append(" Certificate [%s]" % n) parts.append(" Certificate [%s]" % n)
parts.append("\tSubject: ") parts.append("\tSubject: ")
for cn in i.get_subject().get_components(): for cn in i.get_subject().get_components():
parts.append("\t\t%s=%s" % ( parts.append("\t\t{}={}".format(
strutils.always_str(cn[0], "utf8"), strutils.always_str(cn[0], "utf8"),
strutils.always_str(cn[1], "utf8")) strutils.always_str(cn[1], "utf8"))
) )
parts.append("\tIssuer: ") parts.append("\tIssuer: ")
for cn in i.get_issuer().get_components(): for cn in i.get_issuer().get_components():
parts.append("\t\t%s=%s" % ( parts.append("\t\t{}={}".format(
strutils.always_str(cn[0], "utf8"), strutils.always_str(cn[0], "utf8"),
strutils.always_str(cn[1], "utf8")) strutils.always_str(cn[1], "utf8"))
) )
parts.extend( parts.extend(
[ [
"\tVersion: %s" % i.get_version(), "\tVersion: %s" % i.get_version(),
"\tValidity: %s - %s" % ( "\tValidity: {} - {}".format(
strutils.always_str(i.get_notBefore(), "utf8"), strutils.always_str(i.get_notBefore(), "utf8"),
strutils.always_str(i.get_notAfter(), "utf8") strutils.always_str(i.get_notAfter(), "utf8")
), ),
@ -74,7 +74,7 @@ class SSLInfo:
OpenSSL.crypto.TYPE_DSA: "DSA" OpenSSL.crypto.TYPE_DSA: "DSA"
} }
t = types.get(pk.type(), "Uknown") t = types.get(pk.type(), "Uknown")
parts.append("\tPubkey: %s bit %s" % (pk.bits(), t)) parts.append(f"\tPubkey: {pk.bits()} bit {t}")
s = certs.Cert(i) s = certs.Cert(i)
if s.altnames: if s.altnames:
parts.append("\tSANs: %s" % " ".join(strutils.always_str(n, "utf8") for n in s.altnames)) parts.append("\tSANs: %s" % " ".join(strutils.always_str(n, "utf8") for n in s.altnames))
@ -463,7 +463,7 @@ class Pathoc(tcp.TCPClient):
raise raise
finally: finally:
if resp: if resp:
lg("<< %s %s: %s bytes" % ( lg("<< {} {}: {} bytes".format(
resp.status_code, strutils.escape_control_characters(resp.reason) if resp.reason else "", len(resp.content) resp.status_code, strutils.escape_control_characters(resp.reason) if resp.reason else "", len(resp.content)
)) ))
if resp.status_code in self.ignorecodes: if resp.status_code in self.ignorecodes:

Some files were not shown because too many files have changed in this diff Show More