pyupgrade --py36-plus mitmproxy/**/*.py

This commit is contained in:
Thomas Kriechbaumer 2020-11-20 19:25:26 +01:00
parent 5b6d75614e
commit 38cca379df
133 changed files with 354 additions and 372 deletions

View File

@ -20,7 +20,7 @@ class MyAddon:
with open(path, "w+") as fp:
for cnt, dom in sorted([(v, k) for (k, v) in totals.items()]):
fp.write("%s: %s\n" % (cnt, dom))
fp.write(f"{cnt}: {dom}\n")
ctx.log.alert("done")

View File

@ -17,4 +17,4 @@ with open(sys.argv[1], "rb") as logfile:
pp.pprint(f.get_state())
print("")
except FlowReadException as e:
print("Flow file corrupted: {}".format(e))
print(f"Flow file corrupted: {e}")

View File

@ -12,6 +12,6 @@ from mitmproxy.script import concurrent
@concurrent # Remove this and see what happens
def request(flow):
# This is ugly in mitmproxy's UI, but you don't want to use mitmproxy.ctx.log from a different thread.
print("handle request: %s%s" % (flow.request.host, flow.request.path))
print(f"handle request: {flow.request.host}{flow.request.path}")
time.sleep(5)
print("start request: %s%s" % (flow.request.host, flow.request.path))
print(f"start request: {flow.request.host}{flow.request.path}")

View File

@ -1,3 +1,2 @@
def request(flow):
flow.request.headers["myheader"] = "value"

View File

@ -9,9 +9,9 @@ def websocket_message(flow):
# was the message sent from the client or server?
if message.from_client:
ctx.log.info("Client sent a message: {}".format(message.content))
ctx.log.info(f"Client sent a message: {message.content}")
else:
ctx.log.info("Server sent a message: {}".format(message.content))
ctx.log.info(f"Server sent a message: {message.content}")
# manipulate the message content
message.content = re.sub(r'^Hello', 'HAPPY', message.content)

View File

@ -58,7 +58,7 @@ def monkey_dummy_cert(privkey, cacert, commonname, sans):
return Cert(cert)
class CheckSSLPinning(object):
class CheckSSLPinning:
def load(self, loader):
loader.add_option(
"certbeginon", bool, False,

View File

@ -31,14 +31,14 @@ class Wrapper:
return dict([re.findall(r'([^:]+): (.*)', line)[0] for line in state])
def enable_proxy_for_service(self, service):
print('Enabling proxy on {}...'.format(service))
print(f'Enabling proxy on {service}...')
for subcommand in ['-setwebproxy', '-setsecurewebproxy']:
self.run_networksetup_command(
subcommand, service, '127.0.0.1', str(
self.port))
def disable_proxy_for_service(self, service):
print('Disabling proxy on {}...'.format(service))
print(f'Disabling proxy on {service}...')
for subcommand in ['-setwebproxystate', '-setsecurewebproxystate']:
self.run_networksetup_command(subcommand, service, 'Off')
@ -48,7 +48,7 @@ class Wrapper:
r'\(\d+\)\s(.*)$\n\(.*Device: (.+)\)$',
order,
re.MULTILINE)
return dict([(b, a) for (a, b) in mapping])
return {b: a for (a, b) in mapping}
def run_command_with_input(self, command, input):
popen = subprocess.Popen(

View File

@ -26,7 +26,7 @@ class TestHARDump:
tctx.configure(a, hardump=path)
tctx.invoke(a, "response", self.flow())
tctx.invoke(a, "done")
with open(path, "r") as inp:
with open(path) as inp:
har = json.load(inp)
assert len(har["log"]["entries"]) == 1
@ -40,7 +40,7 @@ class TestHARDump:
a, "response", self.flow(resp_content=b"foo" + b"\xFF" * 10)
)
tctx.invoke(a, "done")
with open(path, "r") as inp:
with open(path) as inp:
har = json.load(inp)
assert har["log"]["entries"][0]["response"]["content"]["encoding"] == "base64"
@ -79,6 +79,6 @@ class TestHARDump:
tctx.invoke(a, "response", f)
tctx.invoke(a, "done")
with open(path, "r") as inp:
with open(path) as inp:
har = json.load(inp)
assert len(har["log"]["entries"]) == 1

View File

@ -34,7 +34,7 @@ class TestJSONDump:
tctx.configure(a, dump_destination=path)
tctx.invoke(a, "response", self.flow())
tctx.invoke(a, "done")
with open(path, "r") as inp:
with open(path) as inp:
entry = json.loads(inp.readline())
assert entry['response']['content'] == 'message'
@ -49,7 +49,7 @@ class TestJSONDump:
a, "response", self.flow(resp_content=content)
)
tctx.invoke(a, "done")
with open(path, "r") as inp:
with open(path) as inp:
entry = json.loads(inp.readline())
assert entry['response']['content'] == base64.b64encode(content).decode('utf-8')

View File

@ -83,7 +83,7 @@ class ProbabilisticStrategy(_TlsStrategy):
def __init__(self, p):
self.p = p
super(ProbabilisticStrategy, self).__init__()
super().__init__()
def should_intercept(self, server_address):
return random.uniform(0, 1) < self.p
@ -99,7 +99,7 @@ class TlsFeedback(TlsLayer):
server_address = self.server_conn.address
try:
super(TlsFeedback, self)._establish_tls_with_client()
super()._establish_tls_with_client()
except TlsProtocolException as e:
tls_strategy.record_failure(server_address)
raise e

View File

@ -68,7 +68,7 @@ class MappingAddon:
self.filename = filename
self.persistent = persistent
self.logger = logging.getLogger(self.__class__.__name__)
with open(filename, "r") as f:
with open(filename) as f:
self.mapping_templates = URLDict.load(f)
def load(self, loader):
@ -84,7 +84,7 @@ class MappingAddon:
def configure(self, updated):
if self.OPT_MAPPING_FILE in updated:
self.filename = updated[self.OPT_MAPPING_FILE]
with open(self.filename, "r") as f:
with open(self.filename) as f:
self.mapping_templates = URLDict.load(f)
if self.OPT_MAP_PERSISTENT in updated:

View File

@ -125,7 +125,7 @@ class TestMappingAddon:
open_mock = mock.mock_open(read_data="{}")
with mock.patch("builtins.open", open_mock):
mapping.done()
with open(tmpfile, "r") as tfile:
with open(tmpfile) as tfile:
results = tfile.read()
assert len(open_mock.mock_calls) != 0
assert results == mapping_content
@ -143,7 +143,7 @@ class TestMappingAddon:
mapping.response(f)
mapping.done()
with open(tmpfile, "r") as tfile:
with open(tmpfile) as tfile:
results = tfile.read()
assert mapping_content in results

View File

@ -26,7 +26,7 @@ class TestUrlDict:
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(input_file_content_error)
with open(tmpfile, "r") as tfile:
with open(tmpfile) as tfile:
try:
URLDict.load(tfile)
except ValueError:
@ -38,7 +38,7 @@ class TestUrlDict:
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(input_file_content)
with open(tmpfile, "r") as tfile:
with open(tmpfile) as tfile:
urldict = URLDict.load(tfile)
f = tflow.tflow(resp=tutils.tresp())
@ -68,7 +68,7 @@ class TestUrlDict:
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(input_file_content)
with open(tmpfile, "r") as tfile:
with open(tmpfile) as tfile:
urldict = URLDict.load(tfile)
dump = urldict.dumps()
@ -79,11 +79,11 @@ class TestUrlDict:
outfile = tmpdir.join("outfile")
with open(tmpfile, "w") as tfile:
tfile.write(input_file_content)
with open(tmpfile, "r") as tfile:
with open(tmpfile) as tfile:
urldict = URLDict.load(tfile)
with open(outfile, "w") as ofile:
urldict.dump(ofile)
with open(outfile, "r") as ofile:
with open(outfile) as ofile:
output = ofile.read()
assert output == input_file_content

View File

@ -89,7 +89,7 @@ class TestJSONUrlIndexWriter:
writer.add_url(f)
writer.save()
with open(tmpfile, "r") as results:
with open(tmpfile) as results:
try:
content = json.load(results)
except JSONDecodeError:
@ -130,7 +130,7 @@ class TestTestUrlIndexWriter:
code = f.response.status_code
writer.add_url(f)
with open(tmpfile, "r") as results:
with open(tmpfile) as results:
content = results.read()
assert url in content
assert method in content
@ -146,7 +146,7 @@ class TestTestUrlIndexWriter:
writer.add_url(f)
writer.save()
with open(tmpfile, "r") as results:
with open(tmpfile) as results:
content = results.read()
assert url in content
assert method in content

View File

@ -157,7 +157,7 @@ class UrlInjectionAddon:
self.name = f"{self.__class__.__name__}-{injection_gen.__class__.__name__}-{self.__hash__()}"
self.flt = flowfilter.parse(flt)
self.injection_gen = injection_gen
with open(url_index_file, "r") as f:
with open(url_index_file) as f:
self.url_store = json.load(f)
def response(self, flow: HTTPFlow):

View File

@ -59,20 +59,22 @@ FULL_PAYLOAD = FRONT_WALL + PAYLOAD + BACK_WALL
# - injection_point -> str
# - exploit -> str
# - line -> str
XSSData = NamedTuple('XSSData', [('url', str),
('injection_point', str),
('exploit', str),
('line', str)])
class XSSData(NamedTuple):
url: str
injection_point: str
exploit: str
line: str
# A SQLiData is named tuple with the following fields:
# - url -> str
# - injection_point -> str
# - regex -> str
# - dbms -> str
SQLiData = NamedTuple('SQLiData', [('url', str),
('injection_point', str),
('regex', str),
('dbms', str)])
class SQLiData(NamedTuple):
url: str
injection_point: str
regex: str
dbms: str
VulnData = Tuple[Optional[XSSData], Optional[SQLiData]]

View File

@ -244,7 +244,7 @@ class AddonManager:
pass
else:
raise exceptions.AddonManagerError(
"Addon handler {} ({}) not callable".format(name, a)
f"Addon handler {name} ({a}) not callable"
)
def trigger(self, name, *args, **kwargs):

View File

@ -53,7 +53,7 @@ class Browser:
[
cmd,
"--user-data-dir=%s" % str(self.tdir.name),
"--proxy-server=%s:%s" % (
"--proxy-server={}:{}".format(
ctx.options.listen_host or "127.0.0.1",
ctx.options.listen_port
),

View File

@ -70,7 +70,7 @@ class Core:
client_certs = os.path.expanduser(opts.client_certs)
if not os.path.exists(client_certs):
raise exceptions.OptionsError(
"Client certificate path does not exist: {}".format(opts.client_certs)
f"Client certificate path does not exist: {opts.client_certs}"
)
@command.command("set")
@ -194,7 +194,7 @@ class Core:
req.url = val
except ValueError as e:
raise exceptions.CommandError(
"URL %s is invalid: %s" % (repr(val), e)
"URL {} is invalid: {}".format(repr(val), e)
) from e
else:
self.rupdate = False
@ -215,7 +215,7 @@ class Core:
updated.append(f)
ctx.master.addons.trigger("update", updated)
ctx.log.alert("Set %s on %s flows." % (attr, len(updated)))
ctx.log.alert("Set {} on {} flows.".format(attr, len(updated)))
@command.command("flow.decode")
def decode(self, flows: typing.Sequence[flow.Flow], part: str) -> None:

View File

@ -112,7 +112,7 @@ class Cut:
[strutils.always_str(x) or "" for x in vals] # type: ignore
)
ctx.log.alert("Saved %s cuts over %d flows as CSV." % (len(cuts), len(flows)))
except IOError as e:
except OSError as e:
ctx.log.error(str(e))
@command.command("cut.clip")

View File

@ -23,7 +23,7 @@ def indent(n: int, text: str) -> str:
def colorful(line, styles):
yield u" " # we can already indent here
yield " " # we can already indent here
for (style, text) in line:
yield click.style(text, **styles.get(style, {}))
@ -115,8 +115,8 @@ class Dumper:
text=dict(fg="green")
)
content = u"\r\n".join(
u"".join(colorful(line, styles)) for line in lines_to_echo
content = "\r\n".join(
"".join(colorful(line, styles)) for line in lines_to_echo
)
if content:
self.echo("")
@ -252,7 +252,7 @@ class Dumper:
if f.error:
msg = strutils.escape_control_characters(f.error.msg)
self.echo(" << {}".format(msg), bold=True, fg="red")
self.echo(f" << {msg}", bold=True, fg="red")
def match(self, f):
if ctx.options.flow_detail == 0:

View File

@ -141,7 +141,7 @@ class Export():
fp.write(v)
else:
fp.write(v.encode("utf-8"))
except IOError as e:
except OSError as e:
ctx.log.error(str(e))
@command.command("export.clip")

View File

@ -135,7 +135,7 @@ class MapLocal:
try:
contents = local_file.read_bytes()
except IOError as e:
except OSError as e:
ctx.log.warn(f"Could not read file: {e}")
continue

View File

@ -43,7 +43,7 @@ class ModifyBody:
if spec.matches(flow):
try:
replacement = spec.read_replacement()
except IOError as e:
except OSError as e:
ctx.log.warn(f"Could not read replacement file: {e}")
continue
if flow.response:

View File

@ -42,7 +42,7 @@ def parse_modify_spec(option: str, subject_is_regex: bool) -> ModifySpec:
try:
spec.read_replacement()
except IOError as e:
except OSError as e:
raise ValueError(f"Invalid file path: {replacement[1:]} ({e})")
return spec
@ -91,7 +91,7 @@ class ModifyHeaders:
if spec.matches(flow):
try:
replacement = spec.read_replacement()
except IOError as e:
except OSError as e:
ctx.log.warn(f"Could not read replacement file: {e}")
continue
else:

View File

@ -85,12 +85,12 @@ class ProxyAuth:
if self.is_proxy_auth():
return http.make_error_response(
status_codes.PROXY_AUTH_REQUIRED,
headers=mitmproxy.net.http.Headers(Proxy_Authenticate='Basic realm="{}"'.format(REALM)),
headers=mitmproxy.net.http.Headers(Proxy_Authenticate=f'Basic realm="{REALM}"'),
)
else:
return http.make_error_response(
status_codes.UNAUTHORIZED,
headers=mitmproxy.net.http.Headers(WWW_Authenticate='Basic realm="{}"'.format(REALM)),
headers=mitmproxy.net.http.Headers(WWW_Authenticate=f'Basic realm="{REALM}"'),
)
def check(self, f: http.HTTPFlow) -> Optional[Tuple[str, str]]:

View File

@ -48,7 +48,7 @@ class ReadFile:
continue
await ctx.master.load_flow(flow)
cnt += 1
except (IOError, exceptions.FlowReadException) as e:
except (OSError, exceptions.FlowReadException) as e:
if cnt:
ctx.log.warn("Flow file corrupted - loaded %i flows." % cnt)
else:
@ -62,8 +62,8 @@ class ReadFile:
try:
with open(path, "rb") as f:
return await self.load_flows(f)
except IOError as e:
ctx.log.error("Cannot load flows: {}".format(e))
except OSError as e:
ctx.log.error(f"Cannot load flows: {e}")
raise exceptions.FlowReadException(str(e)) from e
async def doread(self, rfile):

View File

@ -38,7 +38,7 @@ class Save:
def start_stream_to_path(self, path, flt):
try:
f = self.open_file(path)
except IOError as v:
except OSError as v:
raise exceptions.OptionsError(str(v))
self.stream = io.FilteredFlowWriter(f, flt)
self.active_flows = set()
@ -68,7 +68,7 @@ class Save:
"""
try:
f = self.open_file(path)
except IOError as v:
except OSError as v:
raise exceptions.CommandError(v) from v
stream = io.FlowWriter(f)
for i in flows:
@ -107,6 +107,6 @@ class Save:
if self.stream:
for f in self.active_flows:
self.stream.add(f)
self.active_flows = set([])
self.active_flows = set()
self.stream.fo.close()
self.stream = None

View File

@ -51,7 +51,7 @@ def script_error_handler(path, exc, msg="", tb=False):
lineno = ""
if hasattr(exc, "lineno"):
lineno = str(exc.lineno)
log_msg = "in script {}:{} {}".format(path, lineno, exception)
log_msg = f"in script {path}:{lineno} {exception}"
if tb:
etype, value, tback = sys.exc_info()
tback = addonmanager.cut_traceback(tback, "invoke_addon")

View File

@ -17,7 +17,7 @@ from mitmproxy.exceptions import SessionLoadException, CommandError
from mitmproxy.utils.data import pkg_data
class KeyifyList(object):
class KeyifyList:
def __init__(self, inner, key):
self.inner = inner
self.key = key
@ -87,7 +87,7 @@ class SessionDB:
def _create_session(self):
script_path = pkg_data.path("io/sql/session_create.sql")
with open(script_path, 'r') as qry:
with open(script_path) as qry:
self.con.executescript(qry.read())
self.con.commit()

View File

@ -479,7 +479,7 @@ class View(collections.abc.Sequence):
# get new flows each time. It would be more efficient to just have a
# .newid() method or something.
self.add([i.copy()])
except IOError as e:
except OSError as e:
ctx.log.error(e.strerror)
except exceptions.FlowReadException as e:
ctx.log.error(str(e))

View File

@ -159,7 +159,7 @@ class CommandManager:
self.add(o.command_name, o)
except exceptions.CommandError as e:
self.master.log.warn(
"Could not load command %s: %s" % (o.command_name, e)
f"Could not load command {o.command_name}: {e}"
)
def add(self, path: str, func: typing.Callable):

View File

@ -61,7 +61,7 @@ class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
def __repr__(self):
if self.tls_established:
tls = "[{}] ".format(self.tls_version)
tls = f"[{self.tls_version}] "
else:
tls = ""

View File

@ -113,7 +113,7 @@ def get_message_content_view(viewname, message, flow):
)
if enc:
description = "{} {}".format(enc, description)
description = f"{enc} {description}"
return description, lines, error

View File

@ -12,7 +12,7 @@ class ViewAuto(base.View):
ctype = headers.get("content-type")
if data and ctype:
ct = http.parse_content_type(ctype) if ctype else None
ct = "%s/%s" % (ct[0], ct[1])
ct = "{}/{}".format(ct[0], ct[1])
if ct in contentviews.content_types_map:
return contentviews.content_types_map[ct][0](data, **metadata)
elif strutils.is_xml(data):

View File

@ -15,7 +15,7 @@ def parse_png(data: bytes) -> Metadata:
img = png.Png(KaitaiStream(io.BytesIO(data)))
parts = [
('Format', 'Portable network graphics'),
('Size', "{0} x {1} px".format(img.ihdr.width, img.ihdr.height))
('Size', f"{img.ihdr.width} x {img.ihdr.height} px")
]
for chunk in img.chunks:
if chunk.type == 'gAMA':
@ -23,7 +23,7 @@ def parse_png(data: bytes) -> Metadata:
elif chunk.type == 'pHYs':
aspectx = chunk.body.pixels_per_unit_x
aspecty = chunk.body.pixels_per_unit_y
parts.append(('aspect', "{0} x {1}".format(aspectx, aspecty)))
parts.append(('aspect', f"{aspectx} x {aspecty}"))
elif chunk.type == 'tEXt':
parts.append((chunk.body.keyword, chunk.body.text))
elif chunk.type == 'iTXt':
@ -38,8 +38,8 @@ def parse_gif(data: bytes) -> Metadata:
descriptor = img.logical_screen_descriptor
parts = [
('Format', 'Compuserve GIF'),
('Version', "GIF{}".format(img.hdr.version)),
('Size', "{} x {} px".format(descriptor.screen_width, descriptor.screen_height)),
('Version', f"GIF{img.hdr.version}"),
('Size', f"{descriptor.screen_width} x {descriptor.screen_height} px"),
('background', str(descriptor.bg_color_index))
]
ext_blocks = []
@ -66,10 +66,10 @@ def parse_jpeg(data: bytes) -> Metadata:
]
for segment in img.segments:
if segment.marker._name_ == 'sof0':
parts.append(('Size', "{0} x {1} px".format(segment.data.image_width, segment.data.image_height)))
parts.append(('Size', f"{segment.data.image_width} x {segment.data.image_height} px"))
if segment.marker._name_ == 'app0':
parts.append(('jfif_version', "({0}, {1})".format(segment.data.version_major, segment.data.version_minor)))
parts.append(('jfif_density', "({0}, {1})".format(segment.data.density_x, segment.data.density_y)))
parts.append(('jfif_version', f"({segment.data.version_major}, {segment.data.version_minor})"))
parts.append(('jfif_density', f"({segment.data.density_x}, {segment.data.density_y})"))
parts.append(('jfif_unit', str(segment.data.density_units._value_)))
if segment.marker._name_ == 'com':
parts.append(('comment', str(segment.data)))

View File

@ -41,7 +41,7 @@ class ViewImage(base.View):
("Image Format", image_type or "unknown")
]
if image_type:
view_name = "{} Image".format(image_type.upper())
view_name = f"{image_type.upper()} Image"
else:
view_name = "Unknown Image"
return view_name, base.format_dict(multidict.MultiDict(image_metadata))

View File

@ -10,8 +10,7 @@ class ViewMultipart(base.View):
@staticmethod
def _format(v):
yield [("highlight", "Form data:\n")]
for message in base.format_dict(multidict.MultiDict(v)):
yield message
yield from base.format_dict(multidict.MultiDict(v))
def __call__(self, data, **metadata):
headers = metadata.get("headers", {})

View File

@ -30,7 +30,7 @@ class Gif(KaitaiStruct):
self.hdr = self._root.Header(self._io, self, self._root)
self.logical_screen_descriptor = self._root.LogicalScreenDescriptorStruct(self._io, self, self._root)
if self.logical_screen_descriptor.has_color_table:
self._raw_global_color_table = self._io.read_bytes((self.logical_screen_descriptor.color_table_size * 3))
self._raw_global_color_table = self._io.read_bytes(self.logical_screen_descriptor.color_table_size * 3)
io = KaitaiStream(BytesIO(self._raw_global_color_table))
self.global_color_table = self._root.ColorTable(io, self, self._root)
@ -99,7 +99,7 @@ class Gif(KaitaiStruct):
self.height = self._io.read_u2le()
self.flags = self._io.read_u1()
if self.has_color_table:
self._raw_local_color_table = self._io.read_bytes((self.color_table_size * 3))
self._raw_local_color_table = self._io.read_bytes(self.color_table_size * 3)
io = KaitaiStream(BytesIO(self._raw_local_color_table))
self.local_color_table = self._root.ColorTable(io, self, self._root)
@ -168,7 +168,7 @@ class Gif(KaitaiStruct):
self._parent = _parent
self._root = _root if _root else self
self.magic = self._io.ensure_fixed_contents(struct.pack('3b', 71, 73, 70))
self.version = (self._io.read_bytes(3)).decode(u"ASCII")
self.version = (self._io.read_bytes(3)).decode("ASCII")
class ExtGraphicControl(KaitaiStruct):

View File

@ -94,7 +94,7 @@ class GoogleProtobuf(KaitaiStruct):
if hasattr(self, '_m_wire_type'):
return self._m_wire_type if hasattr(self, '_m_wire_type') else None
self._m_wire_type = self._root.Pair.WireTypes((self.key.value & 7))
self._m_wire_type = self._root.Pair.WireTypes(self.key.value & 7)
return self._m_wire_type if hasattr(self, '_m_wire_type') else None
@property

View File

@ -79,23 +79,23 @@ class Jpeg(KaitaiStruct):
if ((self.marker != self._root.Segment.MarkerEnum.soi) and (self.marker != self._root.Segment.MarkerEnum.eoi)) :
_on = self.marker
if _on == self._root.Segment.MarkerEnum.sos:
self._raw_data = self._io.read_bytes((self.length - 2))
self._raw_data = self._io.read_bytes(self.length - 2)
io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentSos(io, self, self._root)
elif _on == self._root.Segment.MarkerEnum.app1:
self._raw_data = self._io.read_bytes((self.length - 2))
self._raw_data = self._io.read_bytes(self.length - 2)
io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentApp1(io, self, self._root)
elif _on == self._root.Segment.MarkerEnum.sof0:
self._raw_data = self._io.read_bytes((self.length - 2))
self._raw_data = self._io.read_bytes(self.length - 2)
io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentSof0(io, self, self._root)
elif _on == self._root.Segment.MarkerEnum.app0:
self._raw_data = self._io.read_bytes((self.length - 2))
self._raw_data = self._io.read_bytes(self.length - 2)
io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentApp0(io, self, self._root)
else:
self.data = self._io.read_bytes((self.length - 2))
self.data = self._io.read_bytes(self.length - 2)
if self.marker == self._root.Segment.MarkerEnum.sos:
self.image_data = self._io.read_bytes_full()
@ -131,9 +131,9 @@ class Jpeg(KaitaiStruct):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.magic = (self._io.read_bytes_term(0, False, True, True)).decode(u"ASCII")
self.magic = (self._io.read_bytes_term(0, False, True, True)).decode("ASCII")
_on = self.magic
if _on == u"Exif":
if _on == "Exif":
self.body = self._root.ExifInJpeg(self._io, self, self._root)
@ -199,7 +199,7 @@ class Jpeg(KaitaiStruct):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.magic = (self._io.read_bytes(5)).decode(u"ASCII")
self.magic = (self._io.read_bytes(5)).decode("ASCII")
self.version_major = self._io.read_u1()
self.version_minor = self._io.read_u1()
self.density_units = self._root.SegmentApp0.DensityUnit(self._io.read_u1())
@ -207,4 +207,4 @@ class Jpeg(KaitaiStruct):
self.density_y = self._io.read_u2be()
self.thumbnail_x = self._io.read_u1()
self.thumbnail_y = self._io.read_u1()
self.thumbnail = self._io.read_bytes(((self.thumbnail_x * self.thumbnail_y) * 3))
self.thumbnail = self._io.read_bytes((self.thumbnail_x * self.thumbnail_y) * 3)

View File

@ -37,7 +37,7 @@ class Png(KaitaiStruct):
while True:
_ = self._root.Chunk(self._io, self, self._root)
self.chunks.append(_)
if ((_.type == u"IEND") or (self._io.is_eof())) :
if ((_.type == "IEND") or (self._io.is_eof())) :
break
class Rgb(KaitaiStruct):
@ -56,45 +56,45 @@ class Png(KaitaiStruct):
self._parent = _parent
self._root = _root if _root else self
self.len = self._io.read_u4be()
self.type = (self._io.read_bytes(4)).decode(u"UTF-8")
self.type = (self._io.read_bytes(4)).decode("UTF-8")
_on = self.type
if _on == u"iTXt":
if _on == "iTXt":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.InternationalTextChunk(io, self, self._root)
elif _on == u"gAMA":
elif _on == "gAMA":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.GamaChunk(io, self, self._root)
elif _on == u"tIME":
elif _on == "tIME":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.TimeChunk(io, self, self._root)
elif _on == u"PLTE":
elif _on == "PLTE":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.PlteChunk(io, self, self._root)
elif _on == u"bKGD":
elif _on == "bKGD":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.BkgdChunk(io, self, self._root)
elif _on == u"pHYs":
elif _on == "pHYs":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.PhysChunk(io, self, self._root)
elif _on == u"tEXt":
elif _on == "tEXt":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.TextChunk(io, self, self._root)
elif _on == u"cHRM":
elif _on == "cHRM":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.ChrmChunk(io, self, self._root)
elif _on == u"sRGB":
elif _on == "sRGB":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.SrgbChunk(io, self, self._root)
elif _on == u"zTXt":
elif _on == "zTXt":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.CompressedTextChunk(io, self, self._root)
@ -199,7 +199,7 @@ class Png(KaitaiStruct):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8")
self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode("UTF-8")
self.compression_method = self._io.read_u1()
self._raw_text_datastream = self._io.read_bytes_full()
self.text_datastream = zlib.decompress(self._raw_text_datastream)
@ -264,12 +264,12 @@ class Png(KaitaiStruct):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8")
self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode("UTF-8")
self.compression_flag = self._io.read_u1()
self.compression_method = self._io.read_u1()
self.language_tag = (self._io.read_bytes_term(0, False, True, True)).decode(u"ASCII")
self.translated_keyword = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8")
self.text = (self._io.read_bytes_full()).decode(u"UTF-8")
self.language_tag = (self._io.read_bytes_term(0, False, True, True)).decode("ASCII")
self.translated_keyword = (self._io.read_bytes_term(0, False, True, True)).decode("UTF-8")
self.text = (self._io.read_bytes_full()).decode("UTF-8")
class TextChunk(KaitaiStruct):
@ -277,8 +277,8 @@ class Png(KaitaiStruct):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode(u"iso8859-1")
self.text = (self._io.read_bytes_full()).decode(u"iso8859-1")
self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode("iso8859-1")
self.text = (self._io.read_bytes_full()).decode("iso8859-1")
class TimeChunk(KaitaiStruct):

View File

@ -41,7 +41,7 @@ class ASCommandResponse:
raise ValueError("Empty WBXML body passed")
except Exception as e:
self.xmlString = None
raise ValueError("Error: {0}".format(e))
raise ValueError(f"Error: {e}")
def getWBXMLBytes(self):
return self.wbxmlBytes

View File

@ -861,7 +861,7 @@ class ASWBXML:
if (newCodePage >= 0 and newCodePage < 25):
self.currentCodePage = newCodePage
else:
raise InvalidDataException("Unknown code page ID 0x{0:X} encountered in WBXML".format(currentByte))
raise InvalidDataException(f"Unknown code page ID 0x{currentByte:X} encountered in WBXML")
elif ( currentByte == GlobalTokens.END ):
if (currentNode != None and currentNode.parentNode != None):
currentNode = currentNode.parentNode
@ -878,14 +878,14 @@ class ASWBXML:
currentNode.appendChild(newTextNode)
elif ( currentByte in unusedArray):
raise InvalidDataException("Encountered unknown global token 0x{0:X}.".format(currentByte))
raise InvalidDataException(f"Encountered unknown global token 0x{currentByte:X}.")
else:
hasAttributes = (currentByte & 0x80) > 0
hasContent = (currentByte & 0x40) > 0
token = currentByte & 0x3F
if (hasAttributes):
raise InvalidDataException("Token 0x{0:X} has attributes.".format(token))
raise InvalidDataException(f"Token 0x{token:X} has attributes.")
strTag = self.codePages[self.currentCodePage].getTag(token)
if (strTag == None):

View File

@ -52,7 +52,7 @@ class ASWBXMLByteQueue(Queue):
def dequeueAndLog(self):
singleByte = self.get()
self.bytesDequeued += 1
logging.debug("Dequeued byte 0x{0:X} ({1} total)".format(singleByte, self.bytesDequeued))
logging.debug(f"Dequeued byte 0x{singleByte:X} ({self.bytesDequeued} total)")
return singleByte
"""

View File

@ -89,7 +89,7 @@ class Reply:
"""
if self.state != "start":
raise exceptions.ControlException(
"Reply is {}, but expected it to be start.".format(self.state)
f"Reply is {self.state}, but expected it to be start."
)
self._state = "taken"
@ -101,7 +101,7 @@ class Reply:
"""
if self.state != "taken":
raise exceptions.ControlException(
"Reply is {}, but expected it to be taken.".format(self.state)
f"Reply is {self.state}, but expected it to be taken."
)
if not self.has_message:
raise exceptions.ControlException("There is no reply message.")
@ -119,7 +119,7 @@ class Reply:
def send(self, msg, force=False):
if self.state not in {"start", "taken"}:
raise exceptions.ControlException(
"Reply is {}, but expected it to be start or taken.".format(self.state)
f"Reply is {self.state}, but expected it to be start or taken."
)
if self.has_message and not force:
raise exceptions.ControlException("There is already a reply message.")

View File

@ -1,5 +1,3 @@
class BiDi:
"""

View File

@ -60,7 +60,7 @@ class _MultiDict(MutableMapping, metaclass=ABCMeta):
yield key
def __len__(self):
return len(set(self._kconv(key) for key, _ in self.fields))
return len({self._kconv(key) for key, _ in self.fields})
def __eq__(self, other):
if isinstance(other, MultiDict):

View File

@ -90,6 +90,6 @@ def iterate(f: flow.Flow) -> TEventGenerator:
try:
e = _iterate_map[type(f)]
except KeyError as err:
raise TypeError("Unknown flow type: {}".format(f)) from err
raise TypeError(f"Unknown flow type: {f}") from err
else:
yield from e(f)

View File

@ -46,7 +46,7 @@ class HTTPFlow(flow.Flow):
s = "<HTTPFlow"
for a in ("request", "response", "error", "client_conn", "server_conn"):
if getattr(self, a, False):
s += "\r\n %s = {flow.%s}" % (a, a)
s += f"\r\n {a} = {{flow.{a}}}"
s += ">"
return s.format(flow=self)

View File

@ -1,4 +1,3 @@
from .io import FlowWriter, FlowReader, FilteredFlowWriter, read_flows_from_paths
from .db import DBHandler

View File

@ -36,5 +36,5 @@ class DBHandler:
flows = []
self._c.execute('SELECT pbuf_blob FROM FLOWS')
for row in self._c.fetchall():
flows.append((protobuf.loads(row[0])))
flows.append(protobuf.loads(row[0]))
return flows

View File

@ -82,6 +82,6 @@ def read_flows_from_paths(paths):
path = os.path.expanduser(path)
with open(path, "rb") as f:
flows.extend(FlowReader(f).stream())
except IOError as e:
except OSError as e:
raise exceptions.FlowReadException(e.strerror)
return flows

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: http.proto
"""Generated protocol buffer code."""

View File

@ -222,7 +222,7 @@ def parse(data_type: int, data: bytes) -> TSerializable:
val, data = pop(data)
d[key] = val # type: ignore
return d
raise ValueError("unknown type tag: {}".format(data_type))
raise ValueError(f"unknown type tag: {data_type}")
def pop(data: bytes) -> typing.Tuple[TSerializable, bytes]:
@ -242,7 +242,7 @@ def pop(data: bytes) -> typing.Tuple[TSerializable, bytes]:
except IndexError:
# This fires if len(data) < dlen, meaning we don't need
# to further validate that data is the right length.
raise ValueError("not a tnetstring: invalid length prefix: {}".format(length))
raise ValueError(f"not a tnetstring: invalid length prefix: {length}")
# Parse the data based on the type tag.
return parse(data_type, data), remain

View File

@ -12,7 +12,7 @@ class LogEntry:
return False
def __repr__(self):
return "LogEntry({}, {})".format(self.msg, self.level)
return f"LogEntry({self.msg}, {self.level})"
class Log:

View File

@ -201,7 +201,7 @@ def _format_pairs(pairs, specials=(), sep="; "):
if k.lower() not in specials and _has_special(v):
v = ESCAPE.sub(r"\\\1", v)
v = '"%s"' % v
vals.append("%s=%s" % (k, v))
vals.append(f"{k}={v}")
return sep.join(vals)

View File

@ -178,9 +178,9 @@ def parse_content_type(c: str) -> Optional[Tuple[str, str, Dict[str, str]]]:
def assemble_content_type(type, subtype, parameters):
if not parameters:
return "{}/{}".format(type, subtype)
return f"{type}/{subtype}"
params = "; ".join(
"{}={}".format(k, v)
f"{k}={v}"
for k, v in parameters.items()
)
return "{}/{}; {}".format(

View File

@ -124,8 +124,7 @@ def read_body(rfile, expected_size, limit=None, max_chunk_size=4096):
max_chunk_size = limit
if expected_size is None:
for x in _read_chunked(rfile, limit):
yield x
yield from _read_chunked(rfile, limit)
elif expected_size >= 0:
if limit is not None and expected_size > limit:
raise exceptions.HttpException(
@ -151,7 +150,7 @@ def read_body(rfile, expected_size, limit=None, max_chunk_size=4096):
bytes_left -= chunk_size
not_done = rfile.read(1)
if not_done:
raise exceptions.HttpException("HTTP body too large. Limit is {}.".format(limit))
raise exceptions.HttpException(f"HTTP body too large. Limit is {limit}.")
def connection_close(http_version, headers):
@ -291,14 +290,14 @@ def _read_response_line(rfile):
_check_http_version(http_version)
except ValueError:
raise exceptions.HttpSyntaxException("Bad HTTP response line: {}".format(line))
raise exceptions.HttpSyntaxException(f"Bad HTTP response line: {line}")
return http_version, status_code, message
def _check_http_version(http_version):
if not re.match(br"^HTTP/\d\.\d$", http_version):
raise exceptions.HttpSyntaxException("Unknown HTTP version: {}".format(http_version))
raise exceptions.HttpSyntaxException(f"Unknown HTTP version: {http_version}")
def _read_headers(rfile):
@ -354,7 +353,7 @@ def _read_chunked(rfile, limit=sys.maxsize):
try:
length = int(line, 16)
except ValueError:
raise exceptions.HttpSyntaxException("Invalid chunked encoding length: {}".format(line))
raise exceptions.HttpSyntaxException(f"Invalid chunked encoding length: {line}")
total += length
if total > limit:
raise exceptions.HttpException(

View File

@ -134,7 +134,7 @@ class Message(serializable.Serializable):
content = encoding.decode(self.raw_content, ce)
# A client may illegally specify a byte -> str encoding here (e.g. utf8)
if isinstance(content, str):
raise ValueError("Invalid Content-Encoding: {}".format(ce))
raise ValueError(f"Invalid Content-Encoding: {ce}")
return content
except ValueError:
if strict:

View File

@ -39,19 +39,19 @@ def parse(server_spec: str) -> ServerSpec:
"""
m = server_spec_re.match(server_spec)
if not m:
raise ValueError("Invalid server specification: {}".format(server_spec))
raise ValueError(f"Invalid server specification: {server_spec}")
# defaulting to https/port 443 may annoy some folks, but it's secure-by-default.
scheme = m.group("scheme") or "https"
if scheme not in ("http", "https"):
raise ValueError("Invalid server scheme: {}".format(scheme))
raise ValueError(f"Invalid server scheme: {scheme}")
host = m.group("host")
# IPv6 brackets
if host.startswith("[") and host.endswith("]"):
host = host[1:-1]
if not check.is_valid_host(host.encode("idna")):
raise ValueError("Invalid hostname: {}".format(host))
raise ValueError(f"Invalid hostname: {host}")
if m.group("port"):
port = int(m.group("port"))
@ -61,7 +61,7 @@ def parse(server_spec: str) -> ServerSpec:
"https": 443
}[scheme]
if not check.is_valid_port(port):
raise ValueError("Invalid port: {}".format(port))
raise ValueError(f"Invalid port: {port}")
return ServerSpec(scheme, (host, port))

View File

@ -80,7 +80,7 @@ class Writer(_FileLike):
if hasattr(self.o, "flush"):
try:
self.o.flush()
except (socket.error, IOError) as v:
except OSError as v:
raise exceptions.TcpDisconnect(str(v))
def write(self, v):
@ -97,7 +97,7 @@ class Writer(_FileLike):
r = self.o.write(v)
self.add_log(v[:r])
return r
except (SSL.Error, socket.error) as e:
except (SSL.Error, OSError) as e:
raise exceptions.TcpDisconnect(str(e))
@ -134,7 +134,7 @@ class Reader(_FileLike):
raise exceptions.TcpTimeout()
except socket.timeout:
raise exceptions.TcpTimeout()
except socket.error as e:
except OSError as e:
raise exceptions.TcpDisconnect(str(e))
except SSL.SysCallError as e:
if e.args == (-1, 'Unexpected EOF'):
@ -178,7 +178,7 @@ class Reader(_FileLike):
raise exceptions.TcpDisconnect()
else:
raise exceptions.TcpReadIncomplete(
"Expected %s bytes, got %s" % (length, len(result))
"Expected {} bytes, got {}".format(length, len(result))
)
return result
@ -197,7 +197,7 @@ class Reader(_FileLike):
if isinstance(self.o, socket_fileobject):
try:
return self.o._sock.recv(length, socket.MSG_PEEK)
except socket.error as e:
except OSError as e:
raise exceptions.TcpException(repr(e))
elif isinstance(self.o, SSL.Connection):
try:
@ -268,7 +268,7 @@ def close_socket(sock):
# Now we can close the other half as well.
sock.shutdown(socket.SHUT_RD)
except socket.error:
except OSError:
pass
sock.close()
@ -442,7 +442,7 @@ class TCPClient(_Connection):
sock.connect(sa)
return sock
except socket.error as _:
except OSError as _:
err = _
if sock is not None:
sock.close()
@ -450,12 +450,12 @@ class TCPClient(_Connection):
if err is not None:
raise err
else:
raise socket.error("getaddrinfo returns an empty list") # pragma: no cover
raise OSError("getaddrinfo returns an empty list") # pragma: no cover
def connect(self):
try:
connection = self.create_connection()
except (socket.error, IOError) as err:
except OSError as err:
raise exceptions.TcpException(
'Error connecting to "%s": %s' %
(self.address[0], err)
@ -555,7 +555,7 @@ class TCPServer:
self.__shutdown_request = False
if self.address[0] == 'localhost':
raise socket.error("Binding to 'localhost' is prohibited. Please use '::1' or '127.0.0.1' directly.")
raise OSError("Binding to 'localhost' is prohibited. Please use '::1' or '127.0.0.1' directly.")
self.socket = None
@ -568,7 +568,7 @@ class TCPServer:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self.socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
self.socket.bind(self.address)
except socket.error:
except OSError:
if self.socket:
self.socket.close()
self.socket = None
@ -580,7 +580,7 @@ class TCPServer:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self.socket.bind(self.address)
except socket.error:
except OSError:
if self.socket:
self.socket.close()
self.socket = None
@ -620,7 +620,7 @@ class TCPServer:
if self.socket in r:
connection, client_address = self.socket.accept()
t = basethread.BaseThread(
"TCPConnectionHandler (%s: %s:%s -> %s:%s)" % (
"TCPConnectionHandler ({}: {}:{} -> {}:{})".format(
self.__class__.__name__,
client_address[0],
client_address[1],
@ -654,11 +654,11 @@ class TCPServer:
# none.
if traceback:
exc = str(traceback.format_exc())
print(u'-' * 40, file=fp)
print('-' * 40, file=fp)
print(
u"Error in processing of request from %s" % repr(client_address), file=fp)
"Error in processing of request from %s" % repr(client_address), file=fp)
print(exc, file=fp)
print(u'-' * 40, file=fp)
print('-' * 40, file=fp)
def handle_client_connection(self, conn, client_address): # pragma: no cover
"""

View File

@ -40,7 +40,7 @@ class _Option:
self.choices = choices
def __repr__(self):
return "{value} [{type}]".format(value=self.current(), type=self.typespec)
return f"{self.current()} [{self.typespec}]"
@property
def default(self):
@ -517,18 +517,18 @@ def load_paths(opts: OptManager, *paths: str) -> None:
for p in paths:
p = os.path.expanduser(p)
if os.path.exists(p) and os.path.isfile(p):
with open(p, "rt", encoding="utf8") as f:
with open(p, encoding="utf8") as f:
try:
txt = f.read()
except UnicodeDecodeError as e:
raise exceptions.OptionsError(
"Error reading %s: %s" % (p, e)
f"Error reading {p}: {e}"
)
try:
load(opts, txt)
except exceptions.OptionsError as e:
raise exceptions.OptionsError(
"Error reading %s: %s" % (p, e)
f"Error reading {p}: {e}"
)
@ -563,12 +563,12 @@ def save(opts: OptManager, path: str, defaults: bool =False) -> None:
"""
path = os.path.expanduser(path)
if os.path.exists(path) and os.path.isfile(path):
with open(path, "rt", encoding="utf8") as f:
with open(path, encoding="utf8") as f:
try:
data = f.read()
except UnicodeDecodeError as e:
raise exceptions.OptionsError(
"Error trying to modify %s: %s" % (path, e)
f"Error trying to modify {path}: {e}"
)
else:
data = ""

View File

@ -1,3 +1,2 @@
def original_addr(csock):
return csock.getsockname()

View File

@ -15,10 +15,10 @@ def lookup(address, port, s):
s = s.decode()
# ALL tcp 192.168.1.13:57474 -> 23.205.82.58:443 ESTABLISHED:ESTABLISHED
specv4 = "%s:%s" % (address, port)
specv4 = f"{address}:{port}"
# ALL tcp 2a01:e35:8bae:50f0:9d9b:ef0d:2de3:b733[58505] -> 2606:4700:30::681f:4ad0[443] ESTABLISHED:ESTABLISHED
specv6 = "%s[%s]" % (address, port)
specv6 = f"{address}[{port}]"
for i in s.split("\n"):
if "ESTABLISHED:ESTABLISHED" in i and specv4 in i:

View File

@ -68,7 +68,7 @@ class Resolver:
if addr is None:
raise RuntimeError("Cannot resolve original destination.")
return tuple(addr)
except (EOFError, socket.error):
except (EOFError, OSError):
self._connect()
return self.original_addr(csock)
@ -91,7 +91,7 @@ class APIRequestHandler(socketserver.StreamRequestHandler):
except KeyError:
server = None
write(server, self.wfile)
except (EOFError, socket.error):
except (EOFError, OSError):
pass
@ -288,7 +288,7 @@ class Redirect(threading.Thread):
while True:
try:
packet = self.windivert.recv()
except WindowsError as e:
except OSError as e:
if e.winerror == 995:
return
else:
@ -306,7 +306,7 @@ class Redirect(threading.Thread):
"""
try:
return self.windivert.recv()
except WindowsError as e:
except OSError as e:
if e.winerror == 995:
return None
else:

View File

@ -101,8 +101,8 @@ class UpstreamConnectLayer(base.Layer):
def _send_connect_request(self):
self.log("Sending CONNECT request", "debug", [
"Proxy Server: {}".format(self.ctx.server_conn.address),
"Connect to: {}:{}".format(self.connect_request.host, self.connect_request.port)
f"Proxy Server: {self.ctx.server_conn.address}",
f"Connect to: {self.connect_request.host}:{self.connect_request.port}"
])
self.send_request(self.connect_request)
resp = self.read_response(self.connect_request)
@ -157,15 +157,15 @@ def validate_request_form(mode, request):
if request.is_http2 and mode is HTTPMode.transparent and request.first_line_format == "absolute":
return # dirty hack: h2 may have authority info. will be fixed properly with sans-io.
if mode == HTTPMode.transparent:
err_message = textwrap.dedent((
err_message = textwrap.dedent(
"""
Mitmproxy received an {} request even though it is not running
in regular mode. This usually indicates a misconfiguration,
please see the mitmproxy mode documentation for details.
"""
)).strip().format("HTTP CONNECT" if request.first_line_format == "authority" else "absolute-form")
).strip().format("HTTP CONNECT" if request.first_line_format == "authority" else "absolute-form")
else:
err_message = "Invalid HTTP request form (expected: %s, got: %s)" % (
err_message = "Invalid HTTP request form (expected: {}, got: {})".format(
" or ".join(allowed_request_forms), request.first_line_format
)
raise exceptions.HttpException(err_message)
@ -313,7 +313,7 @@ class HttpLayer(base.Layer):
self.log(
"request",
"warn",
["HTTP protocol error in client request: {}".format(e)]
[f"HTTP protocol error in client request: {e}"]
)
return False
@ -499,7 +499,7 @@ class HttpLayer(base.Layer):
response = http.make_error_response(code, message, headers)
self.send_response(response)
except (exceptions.NetlibException, h2.exceptions.H2Error, exceptions.Http2ProtocolException):
self.log("Failed to send error response to client: {}".format(message), "debug")
self.log(f"Failed to send error response to client: {message}", "debug")
def change_upstream_proxy_server(self, address):
# Make set_upstream_proxy_server always available,

View File

@ -206,7 +206,7 @@ class Http2Layer(base.Layer):
event.stream_id,
h2.errors.ErrorCodes.REFUSED_STREAM
)
self.log("HTTP body too large. Limit is {}.".format(bsl), "info")
self.log(f"HTTP body too large. Limit is {bsl}.", "info")
else:
self.streams[eid].data_queue.put(event.data)
self.streams[eid].queued_data_length += len(event.data)
@ -240,7 +240,7 @@ class Http2Layer(base.Layer):
return True
def _handle_remote_settings_changed(self, event, other_conn):
new_settings = dict([(key, cs.new_value) for (key, cs) in event.changed_settings.items()])
new_settings = {key: cs.new_value for (key, cs) in event.changed_settings.items()}
self.connections[other_conn].safe_update_settings(new_settings)
return True
@ -410,7 +410,7 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
def __init__(self, ctx, h2_connection, stream_id: int, request_headers: mitmproxy.net.http.Headers) -> None:
super().__init__(
ctx, name="Http2SingleStreamLayer-{}".format(stream_id)
ctx, name=f"Http2SingleStreamLayer-{stream_id}"
)
self.h2_connection = h2_connection
self.zombie: Optional[float] = None
@ -497,7 +497,7 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
if self.zombie is not None or connection_closed:
if pre_command is not None:
pre_command()
raise exceptions.Http2ZombieException("Connection or stream already dead: {}, {}".format(self.zombie, connection_closed))
raise exceptions.Http2ZombieException(f"Connection or stream already dead: {self.zombie}, {connection_closed}")
@detect_zombie_stream
def read_request_headers(self, flow):
@ -713,7 +713,7 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
)
def __call__(self): # pragma: no cover
raise EnvironmentError('Http2SingleStreamLayer must be run as thread')
raise OSError('Http2SingleStreamLayer must be run as thread')
def run(self):
layer = httpbase.HttpLayer(self, self.mode)
@ -726,7 +726,7 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
except exceptions.ProtocolException as e: # pragma: no cover
self.log(repr(e), "info")
except exceptions.SetServerNotAllowedException as e: # pragma: no cover
self.log("Changing the Host server for HTTP/2 connections not allowed: {}".format(e), "info")
self.log(f"Changing the Host server for HTTP/2 connections not allowed: {e}", "info")
except exceptions.Kill: # pragma: no cover
self.log(flow.Error.KILLED_MESSAGE, "info")

View File

@ -63,7 +63,7 @@ class RawTCPLayer(base.Layer):
self.channel.ask("tcp_message", f)
dst.sendall(tcp_message.content)
except (socket.error, exceptions.TcpException, SSL.Error) as e:
except (OSError, exceptions.TcpException, SSL.Error) as e:
if not self.ignore:
f.error = flow.Error("TCP connection closed unexpectedly: {}".format(repr(e)))
self.channel.tell("tcp_error", f)

View File

@ -464,7 +464,7 @@ class TlsLayer(base.Layer):
)
proto = self.alpn_for_client_connection.decode() if self.alpn_for_client_connection else '-'
self.log("ALPN selected by server: {}".format(proto), "debug")
self.log(f"ALPN selected by server: {proto}", "debug")
def _find_cert(self):
"""

View File

@ -220,7 +220,7 @@ class WebSocketLayer(base.Layer):
if not self._handle_event(event, source_conn, other_conn, is_server):
if not close_received:
close_received = True
except (socket.error, exceptions.TcpException, SSL.Error) as e:
except (OSError, exceptions.TcpException, SSL.Error) as e:
s = 'server' if is_server else 'client'
self.flow.error = flow.Error("WebSocket connection closed unexpectedly by {}: {}".format(s, repr(e)))
self.channel.tell("websocket_error", self.flow)

View File

@ -46,7 +46,7 @@ class StateObject(serializable.Serializable):
else:
setattr(self, attr, make_object(cls, val))
if state:
raise RuntimeWarning("Unexpected State in __setstate__: {}".format(state))
raise RuntimeWarning(f"Unexpected State in __setstate__: {state}")
def _process(typeinfo: typecheck.Type, val: typing.Any, make: bool) -> typing.Any:
@ -65,7 +65,7 @@ def _process(typeinfo: typecheck.Type, val: typing.Any, make: bool) -> typing.An
elif typename.startswith("typing.Tuple"):
Ts = typecheck.tuple_types(typeinfo)
if len(Ts) != len(val):
raise ValueError("Invalid data. Expected {}, got {}.".format(Ts, val))
raise ValueError(f"Invalid data. Expected {Ts}, got {val}.")
return tuple(
_process(T, x, make) for T, x in zip(Ts, val)
)

View File

@ -28,7 +28,7 @@ class RecordingMaster(mitmproxy.master.Master):
def dump_log(self, outf=sys.stdout):
for i in self.logs:
print("%s: %s" % (i.level, i.msg), file=outf)
print(f"{i.level}: {i.msg}", file=outf)
def has_log(self, txt, level=None):
for i in self.logs:

View File

@ -86,8 +86,8 @@ def twebsocketflow(client_conn=True, server_conn=True, messages=True, err=None,
if messages is True:
messages = [
websocket.WebSocketMessage(Opcode.BINARY, True, b"hello binary"),
websocket.WebSocketMessage(Opcode.TEXT, True, "hello text".encode()),
websocket.WebSocketMessage(Opcode.TEXT, False, "it's me".encode()),
websocket.WebSocketMessage(Opcode.TEXT, True, b"hello text"),
websocket.WebSocketMessage(Opcode.TEXT, False, b"it's me"),
]
if err is True:
err = terr()

View File

@ -131,7 +131,7 @@ def run(
master.run()
except exceptions.OptionsError as e:
print("%s: %s" % (sys.argv[0], e), file=sys.stderr)
print("{}: {}".format(sys.argv[0], e), file=sys.stderr)
sys.exit(1)
except (KeyboardInterrupt, RuntimeError):
pass

View File

@ -98,23 +98,23 @@ def fcol(s: str, attr: str) -> typing.Tuple[str, int, urwid.Text]:
if urwid.util.detected_encoding:
SYMBOL_REPLAY = u"\u21ba"
SYMBOL_RETURN = u"\u2190"
SYMBOL_MARK = u"\u25cf"
SYMBOL_UP = u"\u21E7"
SYMBOL_DOWN = u"\u21E9"
SYMBOL_ELLIPSIS = u"\u2026"
SYMBOL_FROM_CLIENT = u"\u21d2"
SYMBOL_TO_CLIENT = u"\u21d0"
SYMBOL_REPLAY = "\u21ba"
SYMBOL_RETURN = "\u2190"
SYMBOL_MARK = "\u25cf"
SYMBOL_UP = "\u21E7"
SYMBOL_DOWN = "\u21E9"
SYMBOL_ELLIPSIS = "\u2026"
SYMBOL_FROM_CLIENT = "\u21d2"
SYMBOL_TO_CLIENT = "\u21d0"
else:
SYMBOL_REPLAY = u"[r]"
SYMBOL_RETURN = u"<-"
SYMBOL_REPLAY = "[r]"
SYMBOL_RETURN = "<-"
SYMBOL_MARK = "#"
SYMBOL_UP = "^"
SYMBOL_DOWN = " "
SYMBOL_ELLIPSIS = "~"
SYMBOL_FROM_CLIENT = u"->"
SYMBOL_TO_CLIENT = u"<-"
SYMBOL_FROM_CLIENT = "->"
SYMBOL_TO_CLIENT = "<-"
SCHEME_STYLES = {
'http': 'scheme_http',
@ -164,7 +164,7 @@ class TruncatedText(urwid.Widget):
self.text = text
self.attr = attr
self.align = align
super(TruncatedText, self).__init__()
super().__init__()
def pack(self, size, focus=False):
return (len(self.text), 1)

View File

@ -533,7 +533,7 @@ class ConsoleAddon:
[strutils.always_str(x) or "" for x in row] # type: ignore
)
ctx.log.alert("Saved %s rows as CSV." % (len(rows)))
except IOError as e:
except OSError as e:
ctx.log.error(str(e))
@command.command("console.grideditor.editor")
@ -560,7 +560,7 @@ class ConsoleAddon:
try:
self.master.commands.call_strings(
"view.settings.setval",
["@focus", "flowview_mode_%s" % (idx,), mode]
["@focus", f"flowview_mode_{idx}", mode]
)
except exceptions.CommandError as e:
ctx.log.error(str(e))
@ -584,7 +584,7 @@ class ConsoleAddon:
return self.master.commands.call_strings(
"view.settings.getval",
["@focus", "flowview_mode_%s" % (idx,), self.master.options.console_default_contentview]
["@focus", f"flowview_mode_{idx}", self.master.options.console_default_contentview]
)
@command.command("console.key.contexts")

View File

@ -1,4 +1,3 @@
def map(km):
km.add(":", "console.command ", ["commonkey", "global"], "Command prompt")
km.add("?", "console.view.help", ["global"], "View help")

View File

@ -46,7 +46,7 @@ class EventLog(urwid.ListBox, layoutwidget.LayoutWidget):
def add_event(self, event_store, entry: log.LogEntry):
if log.log_tier(self.master.options.console_eventlog_verbosity) < log.log_tier(entry.level):
return
txt = "%s: %s" % (entry.level, str(entry.msg))
txt = "{}: {}".format(entry.level, str(entry.msg))
if entry.level in ("error", "warn", "alert"):
e = urwid.Text((entry.level, txt))
else:

View File

@ -16,7 +16,7 @@ def read_file(filename: str, escaped: bool) -> typing.AnyStr:
try:
with open(filename, "r" if escaped else "rb") as f:
d = f.read()
except IOError as v:
except OSError as v:
raise exceptions.CommandError(v)
if escaped:
try:
@ -155,7 +155,7 @@ class GridWalker(urwid.ListWalker):
def set_value(self, val, focus, focus_col, errors=None):
if not errors:
errors = set([])
errors = set()
row = list(self.lst[focus][0])
row[focus_col] = val
self.lst[focus] = [tuple(row), errors]
@ -171,7 +171,7 @@ class GridWalker(urwid.ListWalker):
self.focus = pos
self.lst.insert(
self.focus,
([c.blank() for c in self.editor.columns], set([]))
([c.blank() for c in self.editor.columns], set())
)
self.focus_col = 0
self.start_edit()

View File

@ -161,7 +161,7 @@ keyAttrs = {
"ctx": lambda x: isinstance(x, list) and [isinstance(v, str) for v in x],
"help": lambda x: isinstance(x, str),
}
requiredKeyAttrs = set(["key", "cmd"])
requiredKeyAttrs = {"key", "cmd"}
class KeymapConfig:
@ -186,18 +186,18 @@ class KeymapConfig:
def load_path(self, km, p):
if os.path.exists(p) and os.path.isfile(p):
with open(p, "rt", encoding="utf8") as f:
with open(p, encoding="utf8") as f:
try:
txt = f.read()
except UnicodeDecodeError as e:
raise KeyBindingError(
"Encoding error - expected UTF8: %s: %s" % (p, e)
f"Encoding error - expected UTF8: {p}: {e}"
)
try:
vals = self.parse(txt)
except KeyBindingError as e:
raise KeyBindingError(
"Error reading %s: %s" % (p, e)
f"Error reading {p}: {e}"
) from e
for v in vals:
user_ctxs = v.get("ctx", ["global"])
@ -212,7 +212,7 @@ class KeymapConfig:
)
except ValueError as e:
raise KeyBindingError(
"Error reading %s: %s" % (p, e)
f"Error reading {p}: {e}"
) from e
def parse(self, text):

View File

@ -1,5 +1,3 @@
class LayoutWidget:
"""
All top-level layout widgets and all widgets that may be set in an

View File

@ -20,7 +20,7 @@ class PromptPath:
pth = os.path.expanduser(pth)
try:
return self.callback(pth, *self.args)
except IOError as v:
except OSError as v:
signals.status_message.send(message=v.strerror)
@ -128,7 +128,7 @@ class ActionBar(urwid.WidgetWrap):
mkup.append(",")
prompt.extend(mkup)
prompt.append(")? ")
self.onekey = set(i[1] for i in keys)
self.onekey = {i[1] for i in keys}
self._w = urwid.Edit(prompt, "")
self.prompting = PromptStub(callback, args)
@ -305,14 +305,14 @@ class StatusBar(urwid.WidgetWrap):
marked = "M"
t = [
('heading', ("%s %s [%s/%s]" % (arrow, marked, offset, fc)).ljust(11)),
('heading', (f"{arrow} {marked} [{offset}/{fc}]").ljust(11)),
]
if self.master.options.server:
host = self.master.options.listen_host
if host == "0.0.0.0" or host == "":
host = "*"
boundaddr = "[%s:%s]" % (host, self.master.options.listen_port)
boundaddr = f"[{host}:{self.master.options.listen_port}]"
else:
boundaddr = ""
t.extend(self.get_status())

View File

@ -3,7 +3,6 @@ This file must be kept in a python2.7 and python3.5 compatible syntax!
DO NOT use type annotations or other python3.6-only features that makes this file unparsable by older interpreters!
"""
from __future__ import print_function # this is here for the version check to work on Python 2.
import sys

View File

@ -124,7 +124,7 @@ class RequestHandler(tornado.web.RequestHandler):
if isinstance(chunk, list):
chunk = tornado.escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
super(RequestHandler, self).write(chunk)
super().write(chunk)
def set_default_headers(self):
super().set_default_headers()
@ -312,7 +312,7 @@ class FlowHandler(RequestHandler):
elif k == "content":
request.text = v
else:
raise APIError(400, "Unknown update request.{}: {}".format(k, v))
raise APIError(400, f"Unknown update request.{k}: {v}")
elif a == "response" and hasattr(flow, "response"):
response = flow.response
@ -332,9 +332,9 @@ class FlowHandler(RequestHandler):
elif k == "content":
response.text = v
else:
raise APIError(400, "Unknown update response.{}: {}".format(k, v))
raise APIError(400, f"Unknown update response.{k}: {v}")
else:
raise APIError(400, "Unknown update {}: {}".format(a, b))
raise APIError(400, f"Unknown update {a}: {b}")
except APIError:
flow.revert()
raise
@ -395,7 +395,7 @@ class FlowContent(RequestHandler):
filename = self.flow.request.path.split("?")[0].split("/")[-1]
filename = re.sub(r'[^-\w" .()]', "", filename)
cd = "attachment; filename={}".format(filename)
cd = f"attachment; filename={filename}"
self.set_header("Content-Disposition", cd)
self.set_header("Content-Type", "application/text")
self.set_header("X-Content-Type-Options", "nosniff")
@ -456,7 +456,7 @@ class Settings(RequestHandler):
}
for k in update:
if k not in allowed_options:
raise APIError(400, "Unknown setting {}".format(k))
raise APIError(400, f"Unknown setting {k}")
self.master.options.update(**update)
@ -469,7 +469,7 @@ class Options(RequestHandler):
try:
self.master.options.update(**update)
except Exception as err:
raise APIError(400, "{}".format(err))
raise APIError(400, f"{err}")
class SaveOptions(RequestHandler):

View File

@ -106,8 +106,8 @@ class WebMaster(master.Master):
iol = tornado.ioloop.IOLoop.instance()
http_server = tornado.httpserver.HTTPServer(self.app)
http_server.listen(self.options.web_port, self.options.web_host)
web_url = "http://{}:{}/".format(self.options.web_host, self.options.web_port)
web_url = f"http://{self.options.web_host}:{self.options.web_port}/"
self.log.info(
"Web server listening at {}".format(web_url),
f"Web server listening at {web_url}",
)
self.run_loop(iol.start)

View File

@ -24,11 +24,11 @@ class WebAddon:
def running(self):
if hasattr(ctx.options, "web_open_browser") and ctx.options.web_open_browser:
web_url = "http://{}:{}/".format(ctx.options.web_host, ctx.options.web_port)
web_url = f"http://{ctx.options.web_host}:{ctx.options.web_port}/"
success = open_browser(web_url)
if not success:
ctx.log.info(
"No web browser found. Please open a browser and point it to {}".format(web_url),
f"No web browser found. Please open a browser and point it to {web_url}",
)

View File

@ -117,7 +117,7 @@ def check():
for option in ("-e", "--eventlog", "--norefresh"):
if option in args:
print("{} has been removed.".format(option))
print(f"{option} has been removed.")
for option in ("--nonanonymous", "--singleuser", "--htpasswd"):
if option in args:

View File

@ -15,10 +15,10 @@ def dump_system_info():
mitmproxy_version = version.get_dev_version()
data = [
"Mitmproxy: {}".format(mitmproxy_version),
"Python: {}".format(platform.python_version()),
f"Mitmproxy: {mitmproxy_version}",
f"Python: {platform.python_version()}",
"OpenSSL: {}".format(SSL.SSLeay_version(SSL.SSLEAY_VERSION).decode()),
"Platform: {}".format(platform.platform()),
f"Platform: {platform.platform()}",
]
return "\n".join(data)
@ -84,7 +84,7 @@ def dump_info(signal=None, frame=None, file=sys.stdout, testing=False): # pragm
def dump_stacks(signal=None, frame=None, file=sys.stdout, testing=False):
id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
id2name = {th.ident: th.name for th in threading.enumerate()}
code = []
for threadId, stack in sys._current_frames().items():
code.append(

View File

@ -24,7 +24,7 @@ def pretty_size(size):
if x == int(x):
x = int(x)
return str(x) + suf
return "%s%s" % (size, SIZE_TABLE[0][0])
return "{}{}".format(size, SIZE_TABLE[0][0])
@functools.lru_cache()

View File

@ -92,7 +92,7 @@ def bytes_to_escaped_str(data, keep_spacing=False, escape_single_quotes=False):
"""
if not isinstance(data, bytes):
raise ValueError("data must be bytes, but is {}".format(data.__class__.__name__))
raise ValueError(f"data must be bytes, but is {data.__class__.__name__}")
# We always insert a double-quote here so that we get a single-quoted string back
# https://stackoverflow.com/questions/29019340/why-does-python-use-different-quotes-for-representing-strings-depending-on-their
ret = repr(b'"' + data).lstrip("b")[2:-1]
@ -115,7 +115,7 @@ def escaped_str_to_bytes(data):
ValueError, if the escape sequence is invalid.
"""
if not isinstance(data, str):
raise ValueError("data must be str, but is {}".format(data.__class__.__name__))
raise ValueError(f"data must be str, but is {data.__class__.__name__}")
# This one is difficult - we use an undocumented Python API here
# as per http://stackoverflow.com/a/23151714/934719
@ -154,12 +154,12 @@ def hexdump(s):
A generator of (offset, hex, str) tuples
"""
for i in range(0, len(s), 16):
offset = "{:0=10x}".format(i)
offset = f"{i:0=10x}"
part = s[i:i + 16]
x = " ".join("{:0=2x}".format(i) for i in part)
x = " ".join(f"{i:0=2x}" for i in part)
x = x.ljust(47) # 16*2 + 15
part_repr = always_str(escape_control_characters(
part.decode("ascii", "replace").replace(u"\ufffd", u"."),
part.decode("ascii", "replace").replace("\ufffd", "."),
False
))
yield (offset, x, part_repr)
@ -230,7 +230,7 @@ def escape_special_areas(
"""
buf = io.StringIO()
parts = split_special_areas(data, area_delimiter)
rex = re.compile(r"[{}]".format(control_characters))
rex = re.compile(fr"[{control_characters}]")
for i, x in enumerate(parts):
if i % 2:
x = rex.sub(_move_to_private_code_plane, x)

View File

@ -55,7 +55,7 @@ def check_option_type(name: str, value: typing.Any, typeinfo: Type) -> None:
if len(types) != len(value):
raise e
for i, (x, T) in enumerate(zip(value, types)):
check_option_type("{}[{}]".format(name, i), x, T)
check_option_type(f"{name}[{i}]", x, T)
return
elif typename.startswith("typing.Sequence"):
T = sequence_type(typeinfo)

View File

@ -70,7 +70,7 @@ class PauseAt(_Action):
return e.setParseAction(lambda x: cls(*x))
def spec(self):
return "p%s,%s" % (self.offset, self.seconds)
return f"p{self.offset},{self.seconds}"
def intermediate(self, settings):
return (self.offset, "pause", self.seconds)
@ -116,7 +116,7 @@ class InjectAt(_Action):
return e.setParseAction(lambda x: cls(*x))
def spec(self):
return "i%s,%s" % (self.offset, self.value.spec())
return f"i{self.offset},{self.value.spec()}"
def intermediate(self, settings):
return (

View File

@ -295,7 +295,7 @@ class KeyValue(_Component):
return e.setParseAction(lambda x: cls(*x))
def spec(self):
return "%s%s=%s" % (self.preamble, self.key.spec(), self.value.spec())
return f"{self.preamble}{self.key.spec()}={self.value.spec()}"
def freeze(self, settings):
return self.__class__(
@ -369,7 +369,7 @@ class OptionsOrValue(_Component):
s = self.value.spec()
if s[1:-1].lower() in self.options:
s = s[1:-1].lower()
return "%s%s" % (self.preamble, s)
return f"{self.preamble}{s}"
def freeze(self, settings):
return self.__class__(self.value.freeze(settings))
@ -403,7 +403,7 @@ class Integer(_Component):
return [self.value]
def spec(self):
return "%s%s" % (self.preamble, self.value.decode())
return f"{self.preamble}{self.value.decode()}"
def freeze(self, settings_):
return self
@ -430,7 +430,7 @@ class Value(_Component):
return [self.value.get_generator(settings)]
def spec(self):
return "%s%s" % (self.preamble, self.value.spec())
return f"{self.preamble}{self.value.spec()}"
def freeze(self, settings):
return self.__class__(self.value.freeze(settings))
@ -454,7 +454,7 @@ class FixedLengthValue(Value):
# This check will fail if we know the length upfront
if lenguess is not None and lenguess != self.length:
raise exceptions.RenderError(
"Invalid value length: '%s' is %s bytes, should be %s." % (
"Invalid value length: '{}' is {} bytes, should be {}.".format(
self.spec(),
lenguess,
self.length
@ -468,7 +468,7 @@ class FixedLengthValue(Value):
# file inputs
if l != self.length:
raise exceptions.RenderError(
"Invalid value length: '%s' is %s bytes, should be %s." % (
"Invalid value length: '{}' is {} bytes, should be {}.".format(
self.spec(),
l,
self.length
@ -503,7 +503,7 @@ class Boolean(_Component):
return e.setParseAction(parse)
def spec(self):
return "%s%s" % ("-" if not self.value else "", self.name)
return "{}{}".format("-" if not self.value else "", self.name)
class IntField(_Component):
@ -537,4 +537,4 @@ class IntField(_Component):
return [str(self.value)]
def spec(self):
return "%s%s" % (self.preamble, self.origvalue)
return f"{self.preamble}{self.origvalue}"

View File

@ -15,7 +15,7 @@ class ParseException(Exception):
self.col = col
def marked(self):
return "%s\n%s" % (self.s, " " * (self.col - 1) + "^")
return "{}\n{}".format(self.s, " " * (self.col - 1) + "^")
def __str__(self):
return "%s at char %s" % (self.msg, self.col)
return f"{self.msg} at char {self.col}"

View File

@ -70,7 +70,7 @@ class RandomGenerator:
return rand_byte(chars)
def __repr__(self):
return "%s random from %s" % (self.length, self.dtype)
return f"{self.length} random from {self.dtype}"
class FileGenerator:

View File

@ -158,7 +158,7 @@ class Response(_HTTP2Message):
)
def __init__(self, tokens):
super(Response, self).__init__(tokens)
super().__init__(tokens)
self.rendered_values = None
self.stream_id = 2
@ -226,7 +226,7 @@ class Request(_HTTP2Message):
logattrs = ["method", "path"]
def __init__(self, tokens):
super(Request, self).__init__(tokens)
super().__init__(tokens)
self.rendered_values = None
self.stream_id = 1

View File

@ -14,7 +14,7 @@ class Message:
logattrs: typing.List[str] = []
def __init__(self, tokens):
track = set([])
track = set()
for i in tokens:
if i.unique_name:
if i.unique_name in track:
@ -133,7 +133,7 @@ class NestedMessage(base.Token):
]
def spec(self):
return "%s%s" % (self.preamble, self.value.spec())
return f"{self.preamble}{self.value.spec()}"
def freeze(self, settings):
f = self.parsed.freeze(settings).spec()

View File

@ -65,7 +65,7 @@ class LogCtx:
strutils.escape_control_characters(
data
.decode("ascii", "replace")
.replace(u"\ufffd", u".")
.replace("\ufffd", ".")
)
)
for i in data.split("\n"):

View File

@ -47,20 +47,20 @@ class SSLInfo:
parts.append(" Certificate [%s]" % n)
parts.append("\tSubject: ")
for cn in i.get_subject().get_components():
parts.append("\t\t%s=%s" % (
parts.append("\t\t{}={}".format(
strutils.always_str(cn[0], "utf8"),
strutils.always_str(cn[1], "utf8"))
)
parts.append("\tIssuer: ")
for cn in i.get_issuer().get_components():
parts.append("\t\t%s=%s" % (
parts.append("\t\t{}={}".format(
strutils.always_str(cn[0], "utf8"),
strutils.always_str(cn[1], "utf8"))
)
parts.extend(
[
"\tVersion: %s" % i.get_version(),
"\tValidity: %s - %s" % (
"\tValidity: {} - {}".format(
strutils.always_str(i.get_notBefore(), "utf8"),
strutils.always_str(i.get_notAfter(), "utf8")
),
@ -74,7 +74,7 @@ class SSLInfo:
OpenSSL.crypto.TYPE_DSA: "DSA"
}
t = types.get(pk.type(), "Uknown")
parts.append("\tPubkey: %s bit %s" % (pk.bits(), t))
parts.append(f"\tPubkey: {pk.bits()} bit {t}")
s = certs.Cert(i)
if s.altnames:
parts.append("\tSANs: %s" % " ".join(strutils.always_str(n, "utf8") for n in s.altnames))
@ -463,7 +463,7 @@ class Pathoc(tcp.TCPClient):
raise
finally:
if resp:
lg("<< %s %s: %s bytes" % (
lg("<< {} {}: {} bytes".format(
resp.status_code, strutils.escape_control_characters(resp.reason) if resp.reason else "", len(resp.content)
))
if resp.status_code in self.ignorecodes:

Some files were not shown because too many files have changed in this diff Show More