mirror of
https://github.com/Grasscutters/mitmproxy.git
synced 2024-11-26 18:18:25 +00:00
use request/response hooks for trailers
This commit is contained in:
parent
ed7067d36d
commit
12e4785d44
@ -17,10 +17,10 @@ from mitmproxy.proxy.utils import expect
|
|||||||
from mitmproxy.utils import human
|
from mitmproxy.utils import human
|
||||||
from mitmproxy.websocket import WebSocketData
|
from mitmproxy.websocket import WebSocketData
|
||||||
from ._base import HttpCommand, HttpConnection, ReceiveHttp, StreamId
|
from ._base import HttpCommand, HttpConnection, ReceiveHttp, StreamId
|
||||||
from ._events import HttpEvent, RequestData, RequestEndOfMessage, RequestHeaders, RequestProtocolError, ResponseData, RequestTrailers, \
|
from ._events import HttpEvent, RequestData, RequestEndOfMessage, RequestHeaders, RequestProtocolError, RequestTrailers, \
|
||||||
ResponseTrailers, ResponseEndOfMessage, ResponseHeaders, ResponseProtocolError
|
ResponseData, ResponseEndOfMessage, ResponseHeaders, ResponseProtocolError, ResponseTrailers
|
||||||
from ._hooks import HttpConnectHook, HttpErrorHook, HttpRequestHeadersHook, HttpRequestHook, HttpResponseHeadersHook, \
|
from ._hooks import HttpConnectHook, HttpErrorHook, HttpRequestHeadersHook, HttpRequestHook, HttpResponseHeadersHook, \
|
||||||
HttpResponseHook, HttpRequestTrailersHook, HttpResponseTrailersHook
|
HttpResponseHook
|
||||||
from ._http1 import Http1Client, Http1Server
|
from ._http1 import Http1Client, Http1Server
|
||||||
from ._http2 import Http2Client, Http2Server
|
from ._http2 import Http2Client, Http2Server
|
||||||
from ...context import Context
|
from ...context import Context
|
||||||
@ -211,7 +211,6 @@ class HttpStream(layer.Layer):
|
|||||||
def start_request_stream(self) -> layer.CommandGenerator[None]:
|
def start_request_stream(self) -> layer.CommandGenerator[None]:
|
||||||
if self.flow.response:
|
if self.flow.response:
|
||||||
raise NotImplementedError("Can't set a response and enable streaming at the same time.")
|
raise NotImplementedError("Can't set a response and enable streaming at the same time.")
|
||||||
yield HttpRequestHook(self.flow)
|
|
||||||
ok = yield from self.make_server_connection()
|
ok = yield from self.make_server_connection()
|
||||||
if not ok:
|
if not ok:
|
||||||
return
|
return
|
||||||
@ -227,11 +226,12 @@ class HttpStream(layer.Layer):
|
|||||||
if callable(self.flow.request.stream):
|
if callable(self.flow.request.stream):
|
||||||
event.data = self.flow.request.stream(event.data)
|
event.data = self.flow.request.stream(event.data)
|
||||||
elif isinstance(event, RequestTrailers):
|
elif isinstance(event, RequestTrailers):
|
||||||
assert self.flow.request
|
|
||||||
self.flow.request.trailers = event.trailers
|
self.flow.request.trailers = event.trailers
|
||||||
yield HttpRequestTrailersHook(self.flow)
|
# we don't do anything further here, we wait for RequestEndOfMessage first to trigger the request hook.
|
||||||
|
return
|
||||||
elif isinstance(event, RequestEndOfMessage):
|
elif isinstance(event, RequestEndOfMessage):
|
||||||
self.flow.request.timestamp_end = time.time()
|
self.flow.request.timestamp_end = time.time()
|
||||||
|
yield HttpRequestHook(self.flow)
|
||||||
self.client_state = self.state_done
|
self.client_state = self.state_done
|
||||||
|
|
||||||
# edge case found while fuzzing:
|
# edge case found while fuzzing:
|
||||||
@ -245,6 +245,8 @@ class HttpStream(layer.Layer):
|
|||||||
if isinstance(evt, ResponseProtocolError):
|
if isinstance(evt, ResponseProtocolError):
|
||||||
return
|
return
|
||||||
if self.flow.request.trailers:
|
if self.flow.request.trailers:
|
||||||
|
# we've delayed sending trailers until after `request` has been triggered.
|
||||||
|
assert isinstance(event, RequestEndOfMessage)
|
||||||
yield SendHttp(RequestTrailers(self.stream_id, self.flow.request.trailers), self.context.server)
|
yield SendHttp(RequestTrailers(self.stream_id, self.flow.request.trailers), self.context.server)
|
||||||
yield SendHttp(event, self.context.server)
|
yield SendHttp(event, self.context.server)
|
||||||
|
|
||||||
@ -255,7 +257,6 @@ class HttpStream(layer.Layer):
|
|||||||
elif isinstance(event, RequestTrailers):
|
elif isinstance(event, RequestTrailers):
|
||||||
assert self.flow.request
|
assert self.flow.request
|
||||||
self.flow.request.trailers = event.trailers
|
self.flow.request.trailers = event.trailers
|
||||||
yield HttpRequestTrailersHook(self.flow)
|
|
||||||
elif isinstance(event, RequestEndOfMessage):
|
elif isinstance(event, RequestEndOfMessage):
|
||||||
self.flow.request.timestamp_end = time.time()
|
self.flow.request.timestamp_end = time.time()
|
||||||
self.flow.request.data.content = self.request_body_buf
|
self.flow.request.data.content = self.request_body_buf
|
||||||
@ -309,7 +310,7 @@ class HttpStream(layer.Layer):
|
|||||||
elif isinstance(event, ResponseTrailers):
|
elif isinstance(event, ResponseTrailers):
|
||||||
assert self.flow.response
|
assert self.flow.response
|
||||||
self.flow.response.trailers = event.trailers
|
self.flow.response.trailers = event.trailers
|
||||||
yield HttpResponseTrailersHook(self.flow)
|
# will be sent in send_response() after the response hook.
|
||||||
elif isinstance(event, ResponseEndOfMessage):
|
elif isinstance(event, ResponseEndOfMessage):
|
||||||
yield from self.send_response(already_streamed=True)
|
yield from self.send_response(already_streamed=True)
|
||||||
|
|
||||||
@ -320,7 +321,6 @@ class HttpStream(layer.Layer):
|
|||||||
elif isinstance(event, ResponseTrailers):
|
elif isinstance(event, ResponseTrailers):
|
||||||
assert self.flow.response
|
assert self.flow.response
|
||||||
self.flow.response.trailers = event.trailers
|
self.flow.response.trailers = event.trailers
|
||||||
yield HttpResponseTrailersHook(self.flow)
|
|
||||||
elif isinstance(event, ResponseEndOfMessage):
|
elif isinstance(event, ResponseEndOfMessage):
|
||||||
assert self.flow.response
|
assert self.flow.response
|
||||||
self.flow.response.data.content = self.response_body_buf
|
self.flow.response.data.content = self.response_body_buf
|
||||||
@ -356,9 +356,9 @@ class HttpStream(layer.Layer):
|
|||||||
yield SendHttp(ResponseHeaders(self.stream_id, self.flow.response, not content), self.context.client)
|
yield SendHttp(ResponseHeaders(self.stream_id, self.flow.response, not content), self.context.client)
|
||||||
if content:
|
if content:
|
||||||
yield SendHttp(ResponseData(self.stream_id, content), self.context.client)
|
yield SendHttp(ResponseData(self.stream_id, content), self.context.client)
|
||||||
if self.flow.response.trailers:
|
|
||||||
yield SendHttp(ResponseTrailers(self.stream_id, self.flow.response.trailers), self.context.client)
|
|
||||||
|
|
||||||
|
if self.flow.response.trailers:
|
||||||
|
yield SendHttp(ResponseTrailers(self.stream_id, self.flow.response.trailers), self.context.client)
|
||||||
yield SendHttp(ResponseEndOfMessage(self.stream_id), self.context.client)
|
yield SendHttp(ResponseEndOfMessage(self.stream_id), self.context.client)
|
||||||
|
|
||||||
if self.flow.response.status_code == 101:
|
if self.flow.response.status_code == 101:
|
||||||
|
@ -49,11 +49,19 @@ class ResponseData(HttpEvent):
|
|||||||
class RequestTrailers(HttpEvent):
|
class RequestTrailers(HttpEvent):
|
||||||
trailers: http.Headers
|
trailers: http.Headers
|
||||||
|
|
||||||
|
def __init__(self, stream_id: int, trailers: http.Headers):
|
||||||
|
self.stream_id = stream_id
|
||||||
|
self.trailers = trailers
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class ResponseTrailers(HttpEvent):
|
class ResponseTrailers(HttpEvent):
|
||||||
trailers: http.Headers
|
trailers: http.Headers
|
||||||
|
|
||||||
|
def __init__(self, stream_id: int, trailers: http.Headers):
|
||||||
|
self.stream_id = stream_id
|
||||||
|
self.trailers = trailers
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class RequestEndOfMessage(HttpEvent):
|
class RequestEndOfMessage(HttpEvent):
|
||||||
|
@ -18,8 +18,10 @@ class HttpRequestHook(commands.StartHook):
|
|||||||
"""
|
"""
|
||||||
The full HTTP request has been read.
|
The full HTTP request has been read.
|
||||||
|
|
||||||
Note: This event fires immediately after requestheaders if the request body is streamed.
|
Note: If request streaming is active, this event fires after the entire body has been streamed.
|
||||||
This ensures that requestheaders -> request -> responseheaders -> response happen in that order.
|
HTTP trailers, if present, have not been transmitted to the server yet and can still be modified.
|
||||||
|
Enabling streaming may cause unexpected event sequences: For example, `response` may now occur
|
||||||
|
before `request` because the server replied with "413 Payload Too Large" during upload.
|
||||||
"""
|
"""
|
||||||
name = "request"
|
name = "request"
|
||||||
flow: http.HTTPFlow
|
flow: http.HTTPFlow
|
||||||
@ -28,7 +30,7 @@ class HttpRequestHook(commands.StartHook):
|
|||||||
@dataclass
|
@dataclass
|
||||||
class HttpResponseHeadersHook(commands.StartHook):
|
class HttpResponseHeadersHook(commands.StartHook):
|
||||||
"""
|
"""
|
||||||
The full HTTP response has been read.
|
HTTP response headers were successfully read. At this point, the body is empty.
|
||||||
"""
|
"""
|
||||||
name = "responseheaders"
|
name = "responseheaders"
|
||||||
flow: http.HTTPFlow
|
flow: http.HTTPFlow
|
||||||
@ -37,42 +39,15 @@ class HttpResponseHeadersHook(commands.StartHook):
|
|||||||
@dataclass
|
@dataclass
|
||||||
class HttpResponseHook(commands.StartHook):
|
class HttpResponseHook(commands.StartHook):
|
||||||
"""
|
"""
|
||||||
HTTP response headers were successfully read. At this point, the body is empty.
|
The full HTTP response has been read.
|
||||||
|
|
||||||
Note: If response streaming is active, this event fires after the entire body has been streamed.
|
Note: If response streaming is active, this event fires after the entire body has been streamed.
|
||||||
|
HTTP trailers, if present, have not been transmitted to the client yet and can still be modified.
|
||||||
"""
|
"""
|
||||||
name = "response"
|
name = "response"
|
||||||
flow: http.HTTPFlow
|
flow: http.HTTPFlow
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class HttpRequestTrailersHook(commands.StartHook):
|
|
||||||
"""
|
|
||||||
The HTTP request trailers has been read.
|
|
||||||
HTTP trailers are a rarely-used feature in the HTTP specification
|
|
||||||
which allows peers to send additional headers after the message body.
|
|
||||||
This is useful for metadata that is dynamically generated while
|
|
||||||
the message body is sent, for example a digital signature
|
|
||||||
or post-processing status.
|
|
||||||
"""
|
|
||||||
name = "requesttrailers"
|
|
||||||
flow: http.HTTPFlow
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class HttpResponseTrailersHook(commands.StartHook):
|
|
||||||
"""
|
|
||||||
The HTTP response trailers has been read.
|
|
||||||
HTTP trailers are a rarely-used feature in the HTTP specification
|
|
||||||
which allows peers to send additional headers after the message body.
|
|
||||||
This is useful for metadata that is dynamically generated while
|
|
||||||
the message body is sent, for example a digital signature
|
|
||||||
or post-processing status.
|
|
||||||
"""
|
|
||||||
name = "responsetrailers"
|
|
||||||
flow: http.HTTPFlow
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class HttpErrorHook(commands.StartHook):
|
class HttpErrorHook(commands.StartHook):
|
||||||
"""
|
"""
|
||||||
|
@ -51,6 +51,7 @@ class Http2Connection(HttpConnection):
|
|||||||
|
|
||||||
ReceiveProtocolError: Type[Union[RequestProtocolError, ResponseProtocolError]]
|
ReceiveProtocolError: Type[Union[RequestProtocolError, ResponseProtocolError]]
|
||||||
ReceiveData: Type[Union[RequestData, ResponseData]]
|
ReceiveData: Type[Union[RequestData, ResponseData]]
|
||||||
|
ReceiveTrailers: Type[Union[RequestTrailers, ResponseTrailers]]
|
||||||
ReceiveEndOfMessage: Type[Union[RequestEndOfMessage, ResponseEndOfMessage]]
|
ReceiveEndOfMessage: Type[Union[RequestEndOfMessage, ResponseEndOfMessage]]
|
||||||
|
|
||||||
def __init__(self, context: Context, conn: Connection):
|
def __init__(self, context: Context, conn: Connection):
|
||||||
@ -175,6 +176,9 @@ class Http2Connection(HttpConnection):
|
|||||||
yield from self.protocol_error(f"Received HTTP/2 data frame, expected headers.")
|
yield from self.protocol_error(f"Received HTTP/2 data frame, expected headers.")
|
||||||
return True
|
return True
|
||||||
self.h2_conn.acknowledge_received_data(event.flow_controlled_length, event.stream_id)
|
self.h2_conn.acknowledge_received_data(event.flow_controlled_length, event.stream_id)
|
||||||
|
elif isinstance(event, h2.events.TrailersReceived):
|
||||||
|
trailers = http.Headers(event.headers)
|
||||||
|
yield ReceiveHttp(self.ReceiveTrailers(event.stream_id, trailers))
|
||||||
elif isinstance(event, h2.events.StreamEnded):
|
elif isinstance(event, h2.events.StreamEnded):
|
||||||
state = self.streams.get(event.stream_id, None)
|
state = self.streams.get(event.stream_id, None)
|
||||||
if state is StreamState.HEADERS_RECEIVED:
|
if state is StreamState.HEADERS_RECEIVED:
|
||||||
@ -219,8 +223,6 @@ class Http2Connection(HttpConnection):
|
|||||||
pass
|
pass
|
||||||
elif isinstance(event, h2.events.PingAckReceived):
|
elif isinstance(event, h2.events.PingAckReceived):
|
||||||
pass
|
pass
|
||||||
elif isinstance(event, h2.events.TrailersReceived):
|
|
||||||
pass
|
|
||||||
elif isinstance(event, h2.events.PushedStreamReceived):
|
elif isinstance(event, h2.events.PushedStreamReceived):
|
||||||
yield Log("Received HTTP/2 push promise, even though we signalled no support.", "error")
|
yield Log("Received HTTP/2 push promise, even though we signalled no support.", "error")
|
||||||
elif isinstance(event, h2.events.UnknownFrameReceived):
|
elif isinstance(event, h2.events.UnknownFrameReceived):
|
||||||
@ -278,6 +280,7 @@ class Http2Server(Http2Connection):
|
|||||||
|
|
||||||
ReceiveProtocolError = RequestProtocolError
|
ReceiveProtocolError = RequestProtocolError
|
||||||
ReceiveData = RequestData
|
ReceiveData = RequestData
|
||||||
|
ReceiveTrailers = RequestTrailers
|
||||||
ReceiveEndOfMessage = RequestEndOfMessage
|
ReceiveEndOfMessage = RequestEndOfMessage
|
||||||
|
|
||||||
def __init__(self, context: Context):
|
def __init__(self, context: Context):
|
||||||
@ -326,10 +329,6 @@ class Http2Server(Http2Connection):
|
|||||||
self.streams[event.stream_id] = StreamState.HEADERS_RECEIVED
|
self.streams[event.stream_id] = StreamState.HEADERS_RECEIVED
|
||||||
yield ReceiveHttp(RequestHeaders(event.stream_id, request, end_stream=bool(event.stream_ended)))
|
yield ReceiveHttp(RequestHeaders(event.stream_id, request, end_stream=bool(event.stream_ended)))
|
||||||
return False
|
return False
|
||||||
elif isinstance(event, h2.events.TrailersReceived):
|
|
||||||
trailers = http.Headers(event.headers)
|
|
||||||
yield ReceiveHttp(RequestTrailers(event.stream_id, trailers))
|
|
||||||
return False
|
|
||||||
else:
|
else:
|
||||||
return (yield from super().handle_h2_event(event))
|
return (yield from super().handle_h2_event(event))
|
||||||
|
|
||||||
@ -346,6 +345,7 @@ class Http2Client(Http2Connection):
|
|||||||
|
|
||||||
ReceiveProtocolError = ResponseProtocolError
|
ReceiveProtocolError = ResponseProtocolError
|
||||||
ReceiveData = ResponseData
|
ReceiveData = ResponseData
|
||||||
|
ReceiveTrailers = ResponseTrailers
|
||||||
ReceiveEndOfMessage = ResponseEndOfMessage
|
ReceiveEndOfMessage = ResponseEndOfMessage
|
||||||
|
|
||||||
our_stream_id: Dict[int, int]
|
our_stream_id: Dict[int, int]
|
||||||
@ -456,10 +456,6 @@ class Http2Client(Http2Connection):
|
|||||||
self.streams[event.stream_id] = StreamState.HEADERS_RECEIVED
|
self.streams[event.stream_id] = StreamState.HEADERS_RECEIVED
|
||||||
yield ReceiveHttp(ResponseHeaders(event.stream_id, response, bool(event.stream_ended)))
|
yield ReceiveHttp(ResponseHeaders(event.stream_id, response, bool(event.stream_ended)))
|
||||||
return False
|
return False
|
||||||
elif isinstance(event, h2.events.TrailersReceived):
|
|
||||||
trailers = http.Headers(event.headers)
|
|
||||||
yield ReceiveHttp(ResponseTrailers(event.stream_id, trailers))
|
|
||||||
return False
|
|
||||||
elif isinstance(event, h2.events.RequestReceived):
|
elif isinstance(event, h2.events.RequestReceived):
|
||||||
yield from self.protocol_error(f"HTTP/2 protocol error: received request from server")
|
yield from self.protocol_error(f"HTTP/2 protocol error: received request from server")
|
||||||
return True
|
return True
|
||||||
|
@ -316,8 +316,6 @@ def test_request_streaming(tctx, response):
|
|||||||
b"abc")
|
b"abc")
|
||||||
<< http.HttpRequestHeadersHook(flow)
|
<< http.HttpRequestHeadersHook(flow)
|
||||||
>> reply(side_effect=enable_streaming)
|
>> reply(side_effect=enable_streaming)
|
||||||
<< http.HttpRequestHook(flow)
|
|
||||||
>> reply()
|
|
||||||
<< OpenConnection(server)
|
<< OpenConnection(server)
|
||||||
>> reply(None)
|
>> reply(None)
|
||||||
<< SendData(server, b"POST / HTTP/1.1\r\n"
|
<< SendData(server, b"POST / HTTP/1.1\r\n"
|
||||||
@ -330,6 +328,8 @@ def test_request_streaming(tctx, response):
|
|||||||
playbook
|
playbook
|
||||||
>> DataReceived(tctx.client, b"def")
|
>> DataReceived(tctx.client, b"def")
|
||||||
<< SendData(server, b"DEF")
|
<< SendData(server, b"DEF")
|
||||||
|
<< http.HttpRequestHook(flow)
|
||||||
|
>> reply()
|
||||||
>> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")
|
>> DataReceived(server, b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")
|
||||||
<< http.HttpResponseHeadersHook(flow)
|
<< http.HttpResponseHeadersHook(flow)
|
||||||
>> reply()
|
>> reply()
|
||||||
@ -350,7 +350,9 @@ def test_request_streaming(tctx, response):
|
|||||||
>> reply()
|
>> reply()
|
||||||
<< SendData(tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
|
<< SendData(tctx.client, b"HTTP/1.1 413 Request Entity Too Large\r\nContent-Length: 0\r\n\r\n")
|
||||||
>> DataReceived(tctx.client, b"def")
|
>> DataReceived(tctx.client, b"def")
|
||||||
<< SendData(server, b"DEF") # Important: no request hook here!
|
<< SendData(server, b"DEF")
|
||||||
|
<< http.HttpRequestHook(flow)
|
||||||
|
>> reply()
|
||||||
)
|
)
|
||||||
elif response == "early close":
|
elif response == "early close":
|
||||||
assert (
|
assert (
|
||||||
@ -705,8 +707,6 @@ def test_http_client_aborts(tctx, stream):
|
|||||||
assert (
|
assert (
|
||||||
playbook
|
playbook
|
||||||
>> reply(side_effect=enable_streaming)
|
>> reply(side_effect=enable_streaming)
|
||||||
<< http.HttpRequestHook(flow)
|
|
||||||
>> reply()
|
|
||||||
<< OpenConnection(server)
|
<< OpenConnection(server)
|
||||||
>> reply(None)
|
>> reply(None)
|
||||||
<< SendData(server, b"POST / HTTP/1.1\r\n"
|
<< SendData(server, b"POST / HTTP/1.1\r\n"
|
||||||
|
@ -6,16 +6,16 @@ import hyperframe.frame
|
|||||||
import pytest
|
import pytest
|
||||||
from h2.errors import ErrorCodes
|
from h2.errors import ErrorCodes
|
||||||
|
|
||||||
|
from mitmproxy.connection import ConnectionState, Server
|
||||||
from mitmproxy.flow import Error
|
from mitmproxy.flow import Error
|
||||||
from mitmproxy.http import HTTPFlow, Headers, Request
|
from mitmproxy.http import HTTPFlow, Headers, Request
|
||||||
from mitmproxy.net.http import status_codes
|
from mitmproxy.net.http import status_codes
|
||||||
from mitmproxy.proxy.context import Context
|
|
||||||
from mitmproxy.proxy.layers.http import HTTPMode
|
|
||||||
from mitmproxy.proxy.commands import CloseConnection, OpenConnection, SendData
|
from mitmproxy.proxy.commands import CloseConnection, OpenConnection, SendData
|
||||||
from mitmproxy.connection import Server
|
from mitmproxy.proxy.context import Context
|
||||||
from mitmproxy.proxy.events import ConnectionClosed, DataReceived
|
from mitmproxy.proxy.events import ConnectionClosed, DataReceived
|
||||||
from mitmproxy.proxy.layers import http
|
from mitmproxy.proxy.layers import http
|
||||||
from mitmproxy.proxy.layers.http._http2 import split_pseudo_headers, Http2Client
|
from mitmproxy.proxy.layers.http import HTTPMode
|
||||||
|
from mitmproxy.proxy.layers.http._http2 import Http2Client, split_pseudo_headers
|
||||||
from test.mitmproxy.proxy.layers.http.hyper_h2_test_helpers import FrameFactory
|
from test.mitmproxy.proxy.layers.http.hyper_h2_test_helpers import FrameFactory
|
||||||
from test.mitmproxy.proxy.tutils import Placeholder, Playbook, reply
|
from test.mitmproxy.proxy.tutils import Placeholder, Playbook, reply
|
||||||
|
|
||||||
@ -41,6 +41,16 @@ example_response_trailers = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def open_h2_server_conn():
|
||||||
|
# this is a bit fake here (port 80, with alpn, but no tls - c'mon),
|
||||||
|
# but we don't want to pollute our tests with TLS handshakes.
|
||||||
|
s = Server(("example.com", 80))
|
||||||
|
s.state = ConnectionState.OPEN
|
||||||
|
s.alpn = b"h2"
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
def decode_frames(data: bytes) -> List[hyperframe.frame.Frame]:
|
def decode_frames(data: bytes) -> List[hyperframe.frame.Frame]:
|
||||||
# swallow preamble
|
# swallow preamble
|
||||||
if data.startswith(b"PRI * HTTP/2.0"):
|
if data.startswith(b"PRI * HTTP/2.0"):
|
||||||
@ -113,73 +123,92 @@ def test_simple(tctx):
|
|||||||
assert flow().response.text == "Hello, World!"
|
assert flow().response.text == "Hello, World!"
|
||||||
|
|
||||||
|
|
||||||
def test_response_trailers(tctx):
|
@pytest.mark.parametrize("stream", ["stream", ""])
|
||||||
|
def test_response_trailers(tctx: Context, open_h2_server_conn: Server, stream):
|
||||||
playbook, cff = start_h2_client(tctx)
|
playbook, cff = start_h2_client(tctx)
|
||||||
flow = Placeholder(HTTPFlow)
|
tctx.server = open_h2_server_conn
|
||||||
server = Placeholder(Server)
|
|
||||||
initial = Placeholder(bytes)
|
|
||||||
assert (
|
|
||||||
playbook
|
|
||||||
>> DataReceived(tctx.client,
|
|
||||||
cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize())
|
|
||||||
<< http.HttpRequestHeadersHook(flow)
|
|
||||||
>> reply()
|
|
||||||
<< http.HttpRequestHook(flow)
|
|
||||||
>> reply()
|
|
||||||
<< OpenConnection(server)
|
|
||||||
>> reply(None, side_effect=make_h2)
|
|
||||||
<< SendData(server, initial)
|
|
||||||
)
|
|
||||||
frames = decode_frames(initial())
|
|
||||||
assert [type(x) for x in frames] == [
|
|
||||||
hyperframe.frame.SettingsFrame,
|
|
||||||
hyperframe.frame.HeadersFrame,
|
|
||||||
]
|
|
||||||
sff = FrameFactory()
|
sff = FrameFactory()
|
||||||
|
|
||||||
|
def enable_streaming(flow: HTTPFlow):
|
||||||
|
flow.response.stream = bool(stream)
|
||||||
|
|
||||||
|
flow = Placeholder(HTTPFlow)
|
||||||
|
(
|
||||||
|
playbook
|
||||||
|
>> DataReceived(tctx.client,
|
||||||
|
cff.build_headers_frame(example_request_headers, flags=["END_STREAM"]).serialize())
|
||||||
|
<< http.HttpRequestHeadersHook(flow)
|
||||||
|
>> reply()
|
||||||
|
<< http.HttpRequestHook(flow)
|
||||||
|
>> reply()
|
||||||
|
<< SendData(tctx.server, Placeholder(bytes))
|
||||||
|
# a conforming h2 server would send settings first, we disregard this for now.
|
||||||
|
>> DataReceived(tctx.server, sff.build_headers_frame(example_response_headers).serialize() +
|
||||||
|
sff.build_data_frame(b"Hello, World!").serialize())
|
||||||
|
<< http.HttpResponseHeadersHook(flow)
|
||||||
|
>> reply(side_effect=enable_streaming)
|
||||||
|
)
|
||||||
|
if stream:
|
||||||
|
playbook << SendData(
|
||||||
|
tctx.client,
|
||||||
|
cff.build_headers_frame(example_response_headers).serialize() +
|
||||||
|
cff.build_data_frame(b"Hello, World!").serialize()
|
||||||
|
)
|
||||||
assert (
|
assert (
|
||||||
|
playbook
|
||||||
|
>> DataReceived(tctx.server, sff.build_headers_frame(example_response_trailers, flags=["END_STREAM"]).serialize())
|
||||||
|
<< http.HttpResponseHook(flow)
|
||||||
|
)
|
||||||
|
assert flow().response.trailers
|
||||||
|
del flow().response.trailers["resp-trailer-a"]
|
||||||
|
if stream:
|
||||||
|
assert (
|
||||||
playbook
|
playbook
|
||||||
# a conforming h2 server would send settings first, we disregard this for now.
|
|
||||||
>> DataReceived(server, sff.build_headers_frame(example_response_headers).serialize())
|
|
||||||
<< http.HttpResponseHeadersHook(flow)
|
|
||||||
>> reply()
|
>> reply()
|
||||||
>> DataReceived(server, sff.build_data_frame(b"Hello, World!").serialize())
|
<< SendData(tctx.client,
|
||||||
>> DataReceived(server, sff.build_headers_frame(example_response_trailers, flags=["END_STREAM"]).serialize())
|
cff.build_headers_frame(example_response_trailers[1:], flags=["END_STREAM"]).serialize())
|
||||||
<< http.HttpResponseTrailersHook(flow)
|
)
|
||||||
>> reply()
|
else:
|
||||||
<< http.HttpResponseHook(flow)
|
assert (
|
||||||
|
playbook
|
||||||
>> reply()
|
>> reply()
|
||||||
<< SendData(tctx.client,
|
<< SendData(tctx.client,
|
||||||
cff.build_headers_frame(example_response_headers).serialize() +
|
cff.build_headers_frame(example_response_headers).serialize() +
|
||||||
cff.build_data_frame(b"Hello, World!").serialize() +
|
cff.build_data_frame(b"Hello, World!").serialize() +
|
||||||
cff.build_headers_frame(example_response_trailers, flags=["END_STREAM"]).serialize())
|
cff.build_headers_frame(example_response_trailers[1:], flags=["END_STREAM"]).serialize()))
|
||||||
)
|
|
||||||
assert flow().request.url == "http://example.com/"
|
|
||||||
assert flow().response.text == "Hello, World!"
|
|
||||||
|
|
||||||
|
|
||||||
def test_request_trailers(tctx):
|
@pytest.mark.parametrize("stream", ["stream", ""])
|
||||||
|
def test_request_trailers(tctx: Context, open_h2_server_conn: Server, stream):
|
||||||
playbook, cff = start_h2_client(tctx)
|
playbook, cff = start_h2_client(tctx)
|
||||||
|
tctx.server = open_h2_server_conn
|
||||||
|
|
||||||
|
def enable_streaming(flow: HTTPFlow):
|
||||||
|
flow.request.stream = bool(stream)
|
||||||
|
|
||||||
flow = Placeholder(HTTPFlow)
|
flow = Placeholder(HTTPFlow)
|
||||||
server = Placeholder(Server)
|
server_data1 = Placeholder(bytes)
|
||||||
initial = Placeholder(bytes)
|
server_data2 = Placeholder(bytes)
|
||||||
assert (
|
(
|
||||||
playbook
|
playbook
|
||||||
>> DataReceived(tctx.client,
|
>> DataReceived(tctx.client,
|
||||||
cff.build_headers_frame(example_request_headers).serialize())
|
cff.build_headers_frame(example_request_headers).serialize() +
|
||||||
<< http.HttpRequestHeadersHook(flow)
|
cff.build_data_frame(b"Hello, World!").serialize()
|
||||||
>> reply()
|
)
|
||||||
>> DataReceived(tctx.client, cff.build_data_frame(b"Hello, World!").serialize())
|
<< http.HttpRequestHeadersHook(flow)
|
||||||
>> DataReceived(tctx.client,
|
>> reply(side_effect=enable_streaming)
|
||||||
cff.build_headers_frame(example_request_trailers, flags=["END_STREAM"]).serialize())
|
|
||||||
<< http.HttpRequestTrailersHook(flow)
|
|
||||||
>> reply()
|
|
||||||
<< http.HttpRequestHook(flow)
|
|
||||||
>> reply()
|
|
||||||
<< OpenConnection(server)
|
|
||||||
>> reply(None, side_effect=make_h2)
|
|
||||||
<< SendData(server, initial)
|
|
||||||
)
|
)
|
||||||
frames = decode_frames(initial())
|
if stream:
|
||||||
|
playbook << SendData(tctx.server, server_data1)
|
||||||
|
assert (
|
||||||
|
playbook
|
||||||
|
>> DataReceived(tctx.client,
|
||||||
|
cff.build_headers_frame(example_request_trailers, flags=["END_STREAM"]).serialize())
|
||||||
|
<< http.HttpRequestHook(flow)
|
||||||
|
>> reply()
|
||||||
|
<< SendData(tctx.server, server_data2)
|
||||||
|
)
|
||||||
|
frames = decode_frames(server_data1.setdefault(b"") + server_data2())
|
||||||
assert [type(x) for x in frames] == [
|
assert [type(x) for x in frames] == [
|
||||||
hyperframe.frame.SettingsFrame,
|
hyperframe.frame.SettingsFrame,
|
||||||
hyperframe.frame.HeadersFrame,
|
hyperframe.frame.HeadersFrame,
|
||||||
@ -248,8 +277,6 @@ def test_http2_client_aborts(tctx, stream, when, how):
|
|||||||
assert (
|
assert (
|
||||||
playbook
|
playbook
|
||||||
>> reply(side_effect=enable_request_streaming)
|
>> reply(side_effect=enable_request_streaming)
|
||||||
<< http.HttpRequestHook(flow)
|
|
||||||
>> reply()
|
|
||||||
<< OpenConnection(server)
|
<< OpenConnection(server)
|
||||||
>> reply(None)
|
>> reply(None)
|
||||||
<< SendData(server, b"GET / HTTP/1.1\r\n"
|
<< SendData(server, b"GET / HTTP/1.1\r\n"
|
||||||
@ -589,9 +616,11 @@ def test_stream_concurrent_get_connection(tctx):
|
|||||||
data = Placeholder(bytes)
|
data = Placeholder(bytes)
|
||||||
|
|
||||||
assert (playbook
|
assert (playbook
|
||||||
>> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"], stream_id=1).serialize())
|
>> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"],
|
||||||
|
stream_id=1).serialize())
|
||||||
<< (o := OpenConnection(server))
|
<< (o := OpenConnection(server))
|
||||||
>> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"], stream_id=3).serialize())
|
>> DataReceived(tctx.client, cff.build_headers_frame(example_request_headers, flags=["END_STREAM"],
|
||||||
|
stream_id=3).serialize())
|
||||||
>> reply(None, to=o, side_effect=make_h2)
|
>> reply(None, to=o, side_effect=make_h2)
|
||||||
<< SendData(server, data)
|
<< SendData(server, data)
|
||||||
)
|
)
|
||||||
|
@ -279,9 +279,9 @@ def test_fuzz_h2_response_mutations(chunks):
|
|||||||
|
|
||||||
@pytest.mark.parametrize("example", [(
|
@pytest.mark.parametrize("example", [(
|
||||||
True, False,
|
True, False,
|
||||||
["data_req", "reply_hook_req_headers", "reply_hook_req", "reply_openconn", "data_resp", "data_reqbody",
|
["data_req", "reply_hook_req_headers", "reply_openconn", "data_resp", "data_reqbody",
|
||||||
"data_respbody", "err_server_rst", "reply_hook_resp_headers"]),
|
"data_respbody", "err_server_rst", "reply_hook_resp_headers"]),
|
||||||
(True, False, ["data_req", "reply_hook_req_headers", "reply_hook_req", "reply_openconn", "err_server_rst",
|
(True, False, ["data_req", "reply_hook_req_headers", "reply_openconn", "err_server_rst",
|
||||||
"data_reqbody", "reply_hook_error"]),
|
"data_reqbody", "reply_hook_error"]),
|
||||||
])
|
])
|
||||||
def test_cancel_examples(example):
|
def test_cancel_examples(example):
|
||||||
|
Loading…
Reference in New Issue
Block a user