mirror of
https://github.com/Grasscutters/mitmproxy.git
synced 2024-11-23 00:01:36 +00:00
added some additional functions for dealing with chunks - needed for mitmproxy streaming capability
This commit is contained in:
parent
273c25a705
commit
280d9b8625
@ -136,6 +136,49 @@ def read_chunked(fp, headers, limit, is_request):
|
||||
break
|
||||
return content
|
||||
|
||||
def read_next_chunk(fp, headers, is_request):
|
||||
"""
|
||||
Read next piece of a chunked HTTP body. Returns next piece of
|
||||
content as a string or None if we hit the end.
|
||||
"""
|
||||
# TODO: see and understand the FIXME in read_chunked and
|
||||
# see if we need to apply here?
|
||||
content = ""
|
||||
code = 400 if is_request else 502
|
||||
line = fp.readline(128)
|
||||
if line == "":
|
||||
raise HttpErrorConnClosed(code, "Connection closed prematurely")
|
||||
try:
|
||||
length = int(line, 16)
|
||||
except ValueError:
|
||||
# TODO: see note in this part of read_chunked()
|
||||
raise HttpError(code, "Invalid chunked encoding length: %s"%line)
|
||||
if length > 0:
|
||||
content += fp.read(length)
|
||||
print "read content: '%s'" % content
|
||||
line = fp.readline(5)
|
||||
if line == '':
|
||||
raise HttpErrorConnClosed(code, "Connection closed prematurely")
|
||||
if line != '\r\n':
|
||||
raise HttpError(code, "Malformed chunked body: '%s' (len=%d)" % (line, length))
|
||||
if content == "":
|
||||
content = None # normalize zero length to None, meaning end of chunked stream
|
||||
return content # return this chunk
|
||||
|
||||
def write_chunk(fp, content):
|
||||
"""
|
||||
Write a chunk with chunked encoding format, returns True
|
||||
if there should be more chunks or False if you passed
|
||||
None, meaning this was the last chunk.
|
||||
"""
|
||||
if content == None or content == "":
|
||||
fp.write("0\r\n\r\n")
|
||||
return False
|
||||
fp.write("%x\r\n" % len(content))
|
||||
fp.write(content)
|
||||
fp.write("\r\n")
|
||||
return True
|
||||
|
||||
|
||||
def get_header_tokens(headers, key):
|
||||
"""
|
||||
@ -350,4 +393,22 @@ def read_http_body(rfile, headers, limit, is_request):
|
||||
not_done = rfile.read(1)
|
||||
if not_done:
|
||||
raise HttpError(400 if is_request else 509, "HTTP Body too large. Limit is %s," % limit)
|
||||
return content
|
||||
return content
|
||||
|
||||
def expected_http_body_size(headers, is_request):
|
||||
"""
|
||||
Returns length of body expected or -1 if not
|
||||
known and we should just read until end of
|
||||
stream.
|
||||
"""
|
||||
if "content-length" in headers:
|
||||
try:
|
||||
l = int(headers["content-length"][0])
|
||||
if l < 0:
|
||||
raise ValueError()
|
||||
return l
|
||||
except ValueError:
|
||||
raise HttpError(400 if is_request else 502, "Invalid content-length header: %s"%headers["content-length"])
|
||||
elif is_request:
|
||||
return 0
|
||||
return -1
|
||||
|
@ -38,6 +38,57 @@ def test_read_chunked():
|
||||
tutils.raises("too large", http.read_chunked, s, None, 2, True)
|
||||
|
||||
|
||||
def test_read_next_chunk():
|
||||
s = cStringIO.StringIO(
|
||||
"4\r\n" +
|
||||
"mitm\r\n" +
|
||||
"5\r\n" +
|
||||
"proxy\r\n" +
|
||||
"e\r\n" +
|
||||
" in\r\n\r\nchunks.\r\n" +
|
||||
"0\r\n" +
|
||||
"\r\n")
|
||||
assert http.read_next_chunk(s, None, False) == "mitm"
|
||||
assert http.read_next_chunk(s, None, False) == "proxy"
|
||||
assert http.read_next_chunk(s, None, False) == " in\r\n\r\nchunks."
|
||||
assert http.read_next_chunk(s, None, False) == None
|
||||
|
||||
s = cStringIO.StringIO("")
|
||||
tutils.raises("closed prematurely", http.read_next_chunk, s, None, False)
|
||||
|
||||
s = cStringIO.StringIO("1\r\na\r\n0\r\n")
|
||||
http.read_next_chunk(s, None, False)
|
||||
tutils.raises("closed prematurely", http.read_next_chunk, s, None, False)
|
||||
|
||||
s = cStringIO.StringIO("1\r\nfoo")
|
||||
tutils.raises("malformed chunked body", http.read_next_chunk, s, None, False)
|
||||
|
||||
s = cStringIO.StringIO("foo\r\nfoo")
|
||||
tutils.raises(http.HttpError, http.read_next_chunk, s, None, False)
|
||||
|
||||
def test_write_chunk():
|
||||
|
||||
expected = ("" +
|
||||
"4\r\n" +
|
||||
"mitm\r\n" +
|
||||
"5\r\n" +
|
||||
"proxy\r\n" +
|
||||
"e\r\n" +
|
||||
" in\r\n\r\nchunks.\r\n" +
|
||||
"0\r\n" +
|
||||
"\r\n")
|
||||
|
||||
s = cStringIO.StringIO()
|
||||
http.write_chunk(s, "mitm")
|
||||
http.write_chunk(s, "proxy")
|
||||
http.write_chunk(s, " in\r\n\r\nchunks.")
|
||||
http.write_chunk(s, None)
|
||||
|
||||
print len(s.getvalue())
|
||||
print len(expected)
|
||||
|
||||
assert s.getvalue() == expected
|
||||
|
||||
def test_connection_close():
|
||||
h = odict.ODictCaseless()
|
||||
assert http.connection_close((1, 0), h)
|
||||
@ -111,6 +162,25 @@ def test_read_http_body():
|
||||
s = cStringIO.StringIO("5\r\naaaaa\r\n0\r\n\r\n")
|
||||
assert http.read_http_body(s, h, 100, False) == "aaaaa"
|
||||
|
||||
def test_expected_http_body_size():
|
||||
# gibber in the content-length field
|
||||
h = odict.ODictCaseless()
|
||||
h["content-length"] = ["foo"]
|
||||
tutils.raises(http.HttpError, http.expected_http_body_size, h, False)
|
||||
# negative number in the content-length field
|
||||
h = odict.ODictCaseless()
|
||||
h["content-length"] = ["-7"]
|
||||
tutils.raises(http.HttpError, http.expected_http_body_size, h, False)
|
||||
# explicit length
|
||||
h = odict.ODictCaseless()
|
||||
h["content-length"] = ["5"]
|
||||
assert http.expected_http_body_size(h, False) == 5
|
||||
# no length
|
||||
h = odict.ODictCaseless()
|
||||
assert http.expected_http_body_size(h, False) == -1
|
||||
# no length request
|
||||
h = odict.ODictCaseless()
|
||||
assert http.expected_http_body_size(h, True) == 0
|
||||
|
||||
def test_parse_http_protocol():
|
||||
assert http.parse_http_protocol("HTTP/1.1") == (1, 1)
|
||||
|
Loading…
Reference in New Issue
Block a user