[sans-io] more http testing, more bugfixes!

This commit is contained in:
Maximilian Hils 2020-11-19 16:48:01 +01:00
parent fdcdb28251
commit 38f006eb9a
5 changed files with 120 additions and 20 deletions

View File

@ -2,7 +2,7 @@ import asyncio
import warnings
from typing import Optional
from mitmproxy import controller, ctx, eventsequence, log, master, options
from mitmproxy import controller, ctx, eventsequence, flow, log, master, options
from mitmproxy.flow import Error
from mitmproxy.proxy2 import commands
from mitmproxy.proxy2 import server
@ -26,7 +26,7 @@ class AsyncReply(controller.Reply):
def kill(self, force=False):
warnings.warn("reply.kill() is deprecated, set the error attribute instead.", PendingDeprecationWarning)
self.obj.error = Error.KILLED_MESSAGE
self.obj.error = flow.Error(Error.KILLED_MESSAGE)
class ProxyConnectionHandler(server.StreamConnectionHandler):

View File

@ -192,6 +192,7 @@ class HttpStream(layer.Layer):
if self.flow.request.stream:
if self.flow.response:
raise NotImplementedError("Can't set a response and enable streaming at the same time.")
yield HttpRequestHook(self.flow)
ok = yield from self.make_server_connection()
if not ok:
return
@ -266,6 +267,7 @@ class HttpStream(layer.Layer):
yield SendHttp(ResponseData(self.stream_id, data), self.context.client)
elif isinstance(event, ResponseEndOfMessage):
self.flow.response.timestamp_end = time.time()
yield HttpResponseHook(self.flow)
yield SendHttp(ResponseEndOfMessage(self.stream_id), self.context.client)
self.server_state = self.state_done

View File

@ -3,29 +3,62 @@ from mitmproxy.proxy2 import commands
class HttpRequestHeadersHook(commands.Hook):
"""
HTTP request headers were successfully read. At this point, the body is empty.
"""
name = "requestheaders"
flow: http.HTTPFlow
class HttpRequestHook(commands.Hook):
"""
The full HTTP request has been read.
Note: This event fires immediately after requestheaders if the request body is streamed.
This ensures that requestheaders -> request -> responseheaders -> response happen in that order.
"""
name = "request"
flow: http.HTTPFlow
class HttpResponseHook(commands.Hook):
name = "response"
flow: http.HTTPFlow
class HttpResponseHeadersHook(commands.Hook):
"""
The full HTTP response has been read.
"""
name = "responseheaders"
flow: http.HTTPFlow
class HttpConnectHook(commands.Hook):
class HttpResponseHook(commands.Hook):
"""
HTTP response headers were successfully read. At this point, the body is empty.
Note: If response streaming is active, this event fires after the entire body has been streamed.
"""
name = "response"
flow: http.HTTPFlow
class HttpErrorHook(commands.Hook):
"""
An HTTP error has occurred, e.g. invalid server responses, or
interrupted connections. This is distinct from a valid server HTTP
error response, which is simply a response with an HTTP error code.
"""
name = "error"
flow: http.HTTPFlow
class HttpConnectHook(commands.Hook):
"""
An HTTP CONNECT request was received. This event can be ignored for most practical purposes.
This event only occurs in regular and upstream proxy modes
when the client instructs mitmproxy to open a connection to an upstream host.
Setting a non 2xx response on the flow will return the response to the client and abort the connection.
CONNECT requests are HTTP proxy instructions for mitmproxy itself
and not forwarded. They do not generate the usual HTTP handler events,
but all requests going over the newly opened connection will.
"""
flow: http.HTTPFlow

View File

@ -233,8 +233,12 @@ def test_response_streaming(tctx):
flow.response.stream = lambda x: x.upper()
assert (
Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=False)
Playbook(http.HttpLayer(tctx, HTTPMode.regular))
>> DataReceived(tctx.client, b"GET http://example.com/largefile HTTP/1.1\r\nHost: example.com\r\n\r\n")
<< http.HttpRequestHeadersHook(flow)
>> reply()
<< http.HttpRequestHook(flow)
>> reply()
<< OpenConnection(server)
>> reply(None)
<< SendData(server, b"GET /largefile HTTP/1.1\r\nHost: example.com\r\n\r\n")
@ -244,6 +248,8 @@ def test_response_streaming(tctx):
<< SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 6\r\n\r\nABC")
>> DataReceived(server, b"def")
<< SendData(tctx.client, b"DEF")
<< http.HttpResponseHook(flow)
>> reply()
)
@ -256,7 +262,7 @@ def test_request_streaming(tctx, response):
"""
server = Placeholder(Server)
flow = Placeholder(HTTPFlow)
playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular), hooks=False)
playbook = Playbook(http.HttpLayer(tctx, HTTPMode.regular))
def enable_streaming(flow: HTTPFlow):
flow.request.stream = lambda x: x.upper()
@ -269,6 +275,8 @@ def test_request_streaming(tctx, response):
b"abc")
<< http.HttpRequestHeadersHook(flow)
>> reply(side_effect=enable_streaming)
<< http.HttpRequestHook(flow)
>> reply()
<< OpenConnection(server)
>> reply(None)
<< SendData(server, b"POST / HTTP/1.1\r\n"
@ -282,6 +290,10 @@ def test_request_streaming(tctx, response):
>> DataReceived(tctx.client, b"def")
<< SendData(server, b"DEF")
>> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")
<< http.HttpResponseHeadersHook(flow)
>> reply()
<< http.HttpResponseHook(flow)
>> reply()
<< SendData(tctx.client, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")
)
elif response == "early response":
@ -291,17 +303,28 @@ def test_request_streaming(tctx, response):
assert (
playbook
>> DataReceived(server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
<< http.HttpResponseHeadersHook(flow)
>> reply()
<< http.HttpResponseHook(flow)
>> reply()
<< SendData(tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
>> DataReceived(tctx.client, b"def")
<< SendData(server, b"DEF")
# Important: no request hook here!
)
elif response == "early close":
assert (
playbook
>> DataReceived(server, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
<< http.HttpResponseHeadersHook(flow)
>> reply()
<< http.HttpResponseHook(flow)
>> reply()
<< SendData(tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
>> ConnectionClosed(server)
<< CloseConnection(server)
<< http.HttpErrorHook(flow)
>> reply()
<< CloseConnection(tctx.client)
)
elif response == "early kill":
@ -310,6 +333,8 @@ def test_request_streaming(tctx, response):
playbook
>> ConnectionClosed(server)
<< CloseConnection(server)
<< http.HttpErrorHook(flow)
>> reply()
<< SendData(tctx.client, err)
<< CloseConnection(tctx.client)
)
@ -640,6 +665,8 @@ def test_http_client_aborts(tctx, stream):
assert (
playbook
>> reply(side_effect=enable_streaming)
<< http.HttpRequestHook(flow)
>> reply()
<< OpenConnection(server)
>> reply(None)
<< SendData(server, b"POST / HTTP/1.1\r\n"

View File

@ -102,24 +102,31 @@ def test_simple(tctx):
@pytest.mark.parametrize("stream", [True, False])
def test_http2_client_aborts(tctx, stream):
@pytest.mark.parametrize("when", ["request", "response"])
def test_http2_client_aborts(tctx, stream, when):
"""Test handling of the case where a client aborts during request transmission."""
server = Placeholder(Server)
flow = Placeholder(HTTPFlow)
playbook, cff = start_h2_client(tctx)
resp = Placeholder(bytes)
def enable_streaming(flow: HTTPFlow):
def enable_request_streaming(flow: HTTPFlow):
flow.request.stream = True
def enable_response_streaming(flow: HTTPFlow):
flow.response.stream = True
assert (
playbook
>> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers).serialize())
<< http.HttpRequestHeadersHook(flow)
)
if stream:
if stream and when == "request":
assert (
playbook
>> reply(side_effect=enable_streaming)
>> reply(side_effect=enable_request_streaming)
<< http.HttpRequestHook(flow)
>> reply()
<< OpenConnection(server)
>> reply(None)
<< SendData(server, b"GET / HTTP/1.1\r\n"
@ -127,16 +134,47 @@ def test_http2_client_aborts(tctx, stream):
)
else:
assert playbook >> reply()
if when == "request":
assert (
playbook
>> ConnectionClosed(tctx.client)
<< CloseConnection(tctx.client)
<< http.HttpErrorHook(flow)
>> reply()
)
assert "peer closed connection" in flow().error.msg
return
assert (
playbook
>> ConnectionClosed(tctx.client)
<< CloseConnection(tctx.client)
<< http.HttpErrorHook(flow)
>> DataReceived(tctx.client, cff.build_data_frame(b"", flags=["END_STREAM"]).serialize())
<< http.HttpRequestHook(flow)
>> reply()
<< OpenConnection(server)
>> reply(None)
<< SendData(server, b"GET / HTTP/1.1\r\n"
b"Host: example.com\r\n\r\n")
>> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 6\r\n\r\n123")
<< http.HttpResponseHeadersHook(flow)
)
if stream:
assert (
playbook
>> reply(side_effect=enable_response_streaming)
<< SendData(tctx.client, resp)
)
else:
assert playbook >> reply()
assert (
playbook
>> ConnectionClosed(tctx.client)
<< CloseConnection(tctx.client)
>> DataReceived(server, b"456")
<< http.HttpResponseHook(flow)
>> reply()
)
assert "peer closed connection" in flow().error.msg
def test_no_normalization(tctx):