mirror of
https://github.com/Grasscutters/mitmproxy.git
synced 2024-11-23 00:01:36 +00:00
netlib: fix most flake8 offenses
This commit is contained in:
parent
e1cc91900f
commit
e5038c9ab7
@ -27,6 +27,7 @@ variants. Serialization follows RFC6265.
|
|||||||
|
|
||||||
# TODO: Disallow LHS-only Cookie values
|
# TODO: Disallow LHS-only Cookie values
|
||||||
|
|
||||||
|
|
||||||
def _read_until(s, start, term):
|
def _read_until(s, start, term):
|
||||||
"""
|
"""
|
||||||
Read until one of the characters in term is reached.
|
Read until one of the characters in term is reached.
|
||||||
|
@ -14,12 +14,18 @@ from ..utils import always_bytes
|
|||||||
# See also: http://lucumr.pocoo.org/2013/7/2/the-updated-guide-to-unicode/
|
# See also: http://lucumr.pocoo.org/2013/7/2/the-updated-guide-to-unicode/
|
||||||
|
|
||||||
if six.PY2: # pragma: no cover
|
if six.PY2: # pragma: no cover
|
||||||
_native = lambda x: x
|
def _native(x):
|
||||||
_always_bytes = lambda x: x
|
return x
|
||||||
|
|
||||||
|
def _always_bytes(x):
|
||||||
|
return x
|
||||||
else:
|
else:
|
||||||
# While headers _should_ be ASCII, it's not uncommon for certain headers to be utf-8 encoded.
|
# While headers _should_ be ASCII, it's not uncommon for certain headers to be utf-8 encoded.
|
||||||
_native = lambda x: x.decode("utf-8", "surrogateescape")
|
def _native(x):
|
||||||
_always_bytes = lambda x: always_bytes(x, "utf-8", "surrogateescape")
|
return x.decode("utf-8", "surrogateescape")
|
||||||
|
|
||||||
|
def _always_bytes(x):
|
||||||
|
return always_bytes(x, "utf-8", "surrogateescape")
|
||||||
|
|
||||||
|
|
||||||
class Headers(MultiDict):
|
class Headers(MultiDict):
|
||||||
|
@ -3,6 +3,7 @@ from __future__ import absolute_import, print_function, division
|
|||||||
from ... import utils
|
from ... import utils
|
||||||
from ...exceptions import HttpException
|
from ...exceptions import HttpException
|
||||||
|
|
||||||
|
|
||||||
def assemble_request(request):
|
def assemble_request(request):
|
||||||
if request.content is None:
|
if request.content is None:
|
||||||
raise HttpException("Cannot assemble flow with missing content")
|
raise HttpException("Cannot assemble flow with missing content")
|
||||||
|
@ -6,14 +6,21 @@ import six
|
|||||||
|
|
||||||
from .headers import Headers
|
from .headers import Headers
|
||||||
from .. import encoding, utils
|
from .. import encoding, utils
|
||||||
|
from ..utils import always_bytes
|
||||||
|
|
||||||
if six.PY2: # pragma: no cover
|
if six.PY2: # pragma: no cover
|
||||||
_native = lambda x: x
|
def _native(x):
|
||||||
_always_bytes = lambda x: x
|
return x
|
||||||
|
|
||||||
|
def _always_bytes(x):
|
||||||
|
return x
|
||||||
else:
|
else:
|
||||||
# While the HTTP head _should_ be ASCII, it's not uncommon for certain headers to be utf-8 encoded.
|
# While headers _should_ be ASCII, it's not uncommon for certain headers to be utf-8 encoded.
|
||||||
_native = lambda x: x.decode("utf-8", "surrogateescape")
|
def _native(x):
|
||||||
_always_bytes = lambda x: utils.always_bytes(x, "utf-8", "surrogateescape")
|
return x.decode("utf-8", "surrogateescape")
|
||||||
|
|
||||||
|
def _always_bytes(x):
|
||||||
|
return always_bytes(x, "utf-8", "surrogateescape")
|
||||||
|
|
||||||
|
|
||||||
class MessageData(utils.Serializable):
|
class MessageData(utils.Serializable):
|
||||||
|
@ -71,6 +71,7 @@ sslversion_choices = {
|
|||||||
"TLSv1_2": (SSL.TLSv1_2_METHOD, SSL_BASIC_OPTIONS),
|
"TLSv1_2": (SSL.TLSv1_2_METHOD, SSL_BASIC_OPTIONS),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class SSLKeyLogger(object):
|
class SSLKeyLogger(object):
|
||||||
|
|
||||||
def __init__(self, filename):
|
def __init__(self, filename):
|
||||||
|
@ -91,8 +91,7 @@ class RaisesContext(object):
|
|||||||
|
|
||||||
test_data = utils.Data(__name__)
|
test_data = utils.Data(__name__)
|
||||||
# FIXME: Temporary workaround during repo merge.
|
# FIXME: Temporary workaround during repo merge.
|
||||||
import os
|
test_data.dirname = os.path.join(test_data.dirname, "..", "test", "netlib")
|
||||||
test_data.dirname = os.path.join(test_data.dirname,"..","test","netlib")
|
|
||||||
|
|
||||||
|
|
||||||
def treq(**kwargs):
|
def treq(**kwargs):
|
||||||
|
@ -14,7 +14,7 @@ from netlib import utils
|
|||||||
MAX_16_BIT_INT = (1 << 16)
|
MAX_16_BIT_INT = (1 << 16)
|
||||||
MAX_64_BIT_INT = (1 << 64)
|
MAX_64_BIT_INT = (1 << 64)
|
||||||
|
|
||||||
DEFAULT=object()
|
DEFAULT = object()
|
||||||
|
|
||||||
OPCODE = utils.BiDi(
|
OPCODE = utils.BiDi(
|
||||||
CONTINUE=0x00,
|
CONTINUE=0x00,
|
||||||
|
@ -1,18 +1,18 @@
|
|||||||
|
"""
|
||||||
|
Colleciton of utility functions that implement small portions of the RFC6455
|
||||||
|
WebSockets Protocol Useful for building WebSocket clients and servers.
|
||||||
|
|
||||||
|
Emphassis is on readabilty, simplicity and modularity, not performance or
|
||||||
|
completeness
|
||||||
|
|
||||||
|
This is a work in progress and does not yet contain all the utilites need to
|
||||||
|
create fully complient client/servers #
|
||||||
|
Spec: https://tools.ietf.org/html/rfc6455
|
||||||
|
|
||||||
# Colleciton of utility functions that implement small portions of the RFC6455
|
The magic sha that websocket servers must know to prove they understand
|
||||||
# WebSockets Protocol Useful for building WebSocket clients and servers.
|
RFC6455
|
||||||
#
|
"""
|
||||||
# Emphassis is on readabilty, simplicity and modularity, not performance or
|
|
||||||
# completeness
|
|
||||||
#
|
|
||||||
# This is a work in progress and does not yet contain all the utilites need to
|
|
||||||
# create fully complient client/servers #
|
|
||||||
# Spec: https://tools.ietf.org/html/rfc6455
|
|
||||||
|
|
||||||
# The magic sha that websocket servers must know to prove they understand
|
|
||||||
# RFC6455
|
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
@ -94,21 +94,18 @@ class WebsocketsProtocol(object):
|
|||||||
upgrade="websocket"
|
upgrade="websocket"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def check_client_handshake(self, headers):
|
def check_client_handshake(self, headers):
|
||||||
if headers.get("upgrade") != "websocket":
|
if headers.get("upgrade") != "websocket":
|
||||||
return
|
return
|
||||||
return headers.get("sec-websocket-key")
|
return headers.get("sec-websocket-key")
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def check_server_handshake(self, headers):
|
def check_server_handshake(self, headers):
|
||||||
if headers.get("upgrade") != "websocket":
|
if headers.get("upgrade") != "websocket":
|
||||||
return
|
return
|
||||||
return headers.get("sec-websocket-accept")
|
return headers.get("sec-websocket-accept")
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_server_nonce(self, client_nonce):
|
def create_server_nonce(self, client_nonce):
|
||||||
return base64.b64encode(hashlib.sha1(client_nonce + websockets_magic).digest())
|
return base64.b64encode(hashlib.sha1(client_nonce + websockets_magic).digest())
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
from __future__ import (absolute_import, print_function, division)
|
from __future__ import (absolute_import, print_function, division)
|
||||||
from io import BytesIO
|
|
||||||
import urllib
|
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
from io import BytesIO
|
||||||
from six.moves import urllib
|
from six.moves import urllib
|
||||||
|
|
||||||
from netlib.utils import always_bytes, native
|
from netlib.utils import always_bytes, native
|
||||||
from . import http, tcp
|
from . import http, tcp
|
||||||
|
|
||||||
|
|
||||||
class ClientConn(object):
|
class ClientConn(object):
|
||||||
|
|
||||||
def __init__(self, address):
|
def __init__(self, address):
|
||||||
@ -140,7 +140,7 @@ class WSGIAdaptor(object):
|
|||||||
elif state["status"]:
|
elif state["status"]:
|
||||||
raise AssertionError('Response already started')
|
raise AssertionError('Response already started')
|
||||||
state["status"] = status
|
state["status"] = status
|
||||||
state["headers"] = http.Headers([[always_bytes(k), always_bytes(v)] for k,v in headers])
|
state["headers"] = http.Headers([[always_bytes(k), always_bytes(v)] for k, v in headers])
|
||||||
if exc_info:
|
if exc_info:
|
||||||
self.error_page(soc, state["headers_sent"], traceback.format_tb(exc_info[2]))
|
self.error_page(soc, state["headers_sent"], traceback.format_tb(exc_info[2]))
|
||||||
state["headers_sent"] = True
|
state["headers_sent"] = True
|
||||||
|
Loading…
Reference in New Issue
Block a user