From 6661770d4eee3eab3305793613586f3684c24ae9 Mon Sep 17 00:00:00 2001 From: Maximilian Hils Date: Mon, 28 Sep 2015 10:59:10 +0200 Subject: [PATCH] handle Expect: 100-continue header, fix #770 --- libmproxy/models/__init__.py | 4 ++-- libmproxy/models/http.py | 2 ++ libmproxy/protocol/http.py | 22 +++++++++++++++++++--- setup.cfg | 3 +++ test/test_protocol_http.py | 28 ++++++++++++++++++++++++++++ 5 files changed, 54 insertions(+), 5 deletions(-) diff --git a/libmproxy/models/__init__.py b/libmproxy/models/__init__.py index ff1bcbc1d..653b19fd5 100644 --- a/libmproxy/models/__init__.py +++ b/libmproxy/models/__init__.py @@ -2,7 +2,7 @@ from __future__ import (absolute_import, print_function, division) from .http import ( HTTPFlow, HTTPRequest, HTTPResponse, Headers, decoded, - make_error_response, make_connect_request, make_connect_response + make_error_response, make_connect_request, make_connect_response, expect_continue_response ) from .connections import ClientConnection, ServerConnection from .flow import Flow, Error @@ -10,7 +10,7 @@ from .flow import Flow, Error __all__ = [ "HTTPFlow", "HTTPRequest", "HTTPResponse", "Headers", "decoded", "make_error_response", "make_connect_request", - "make_connect_response", + "make_connect_response", "expect_continue_response", "ClientConnection", "ServerConnection", "Flow", "Error", ] diff --git a/libmproxy/models/http.py b/libmproxy/models/http.py index 5a83cb463..1815d6f55 100644 --- a/libmproxy/models/http.py +++ b/libmproxy/models/http.py @@ -562,3 +562,5 @@ def make_connect_response(http_version): headers, "", ) + +expect_continue_response = HTTPResponse(b"HTTP/1.1", 100, "Continue", Headers(), b"") \ No newline at end of file diff --git a/libmproxy/protocol/http.py b/libmproxy/protocol/http.py index 21c2a72c3..34bccaf83 100644 --- a/libmproxy/protocol/http.py +++ b/libmproxy/protocol/http.py @@ -15,7 +15,7 @@ from netlib.http.http2.frame import GoAwayFrame, PriorityFrame, WindowUpdateFram from .. import utils from ..exceptions import HttpProtocolException, ProtocolException from ..models import ( - HTTPFlow, HTTPRequest, HTTPResponse, make_error_response, make_connect_response, Error + HTTPFlow, HTTPRequest, HTTPResponse, make_error_response, make_connect_response, Error, expect_continue_response ) from .base import Layer, Kill @@ -26,10 +26,13 @@ class _HttpLayer(Layer): def read_request(self): raise NotImplementedError() + def read_request_body(self, request): + raise NotImplementedError() + def send_request(self, request): raise NotImplementedError() - def read_response(self, request_method): + def read_response(self, request): raise NotImplementedError() def send_response(self, response): @@ -78,6 +81,10 @@ class Http1Layer(_StreamingHttpLayer): req = http1.read_request(self.client_conn.rfile, body_size_limit=self.config.body_size_limit) return HTTPRequest.wrap(req) + def read_request_body(self, request): + expected_size = http1.expected_http_body_size(request) + return http1.read_body(self.client_conn.rfile, expected_size, self.config.body_size_limit) + def send_request(self, request): self.server_conn.wfile.write(http1.assemble_request(request)) self.server_conn.wfile.flush() @@ -299,7 +306,7 @@ class HttpLayer(Layer): self.__original_server_conn = self.server_conn while True: try: - request = self.read_request() + request = self.get_request_from_client() self.log("request", "debug", [repr(request)]) # Handle Proxy Authentication @@ -372,6 +379,14 @@ class HttpLayer(Layer): finally: flow.live = False + def get_request_from_client(self): + request = self.read_request() + if request.headers.get("expect", "").lower() == "100-continue": + self.send_response(expect_continue_response) + request.headers.pop("expect") + request.body = b"".join(self.read_request_body(request)) + return request + def send_error_response(self, code, message): try: response = make_error_response(code, message) @@ -478,6 +493,7 @@ class HttpLayer(Layer): else: flow.request.host = self.__original_server_conn.address.host flow.request.port = self.__original_server_conn.address.port + # TODO: This does not really work if we change the first request and --no-upstream-cert is enabled flow.request.scheme = "https" if self.__original_server_conn.tls_established else "http" request_reply = self.channel.ask("request", flow) diff --git a/setup.cfg b/setup.cfg index 4207020ed..4cafebb78 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,3 +6,6 @@ max-complexity = 15 max-line-length = 80 exclude = */contrib/* ignore = E251,E309 + +[pytest] +testpaths = test \ No newline at end of file diff --git a/test/test_protocol_http.py b/test/test_protocol_http.py index 5ddb5b5b8..5943b57f8 100644 --- a/test/test_protocol_http.py +++ b/test/test_protocol_http.py @@ -1,7 +1,9 @@ +import socket from io import BytesIO from netlib.exceptions import HttpSyntaxException from netlib.http import http1 +from netlib.tcp import TCPClient from netlib.tutils import treq, raises import tutils import tservers @@ -54,3 +56,29 @@ class TestInvalidRequests(tservers.HTTPProxTest): r = p.request("get:/p/200") assert r.status_code == 400 assert "Invalid HTTP request form" in r.body + + +class TestExpectHeader(tservers.HTTPProxTest): + def test_simple(self): + client = TCPClient(("127.0.0.1", self.proxy.port)) + client.connect() + + # call pathod server, wait a second to complete the request + client.wfile.write( + b"POST http://localhost:%d/p/200 HTTP/1.1\r\n" + b"Expect: 100-continue\r\n" + b"Content-Length: 16\r\n" + b"\r\n" % self.server.port + ) + client.wfile.flush() + + assert client.rfile.readline() == "HTTP/1.1 100 Continue\r\n" + assert client.rfile.readline() == "\r\n" + + client.wfile.write(b"0123456789abcdef\r\n") + client.wfile.flush() + + resp = http1.read_response(client.rfile, treq()) + assert resp.status_code == 200 + + client.finish() \ No newline at end of file