2012-10-24 21:59:18 +00:00
|
|
|
import urllib, threading, re, logging, socket, sys, base64
|
2013-08-12 04:05:07 +00:00
|
|
|
from netlib import tcp, http, odict, wsgi, certutils
|
2012-09-30 23:48:26 +00:00
|
|
|
import netlib.utils
|
2013-01-05 02:25:09 +00:00
|
|
|
import version, app, language, utils
|
2012-06-19 04:57:57 +00:00
|
|
|
|
2012-08-23 00:00:16 +00:00
|
|
|
logger = logging.getLogger('pathod')
|
|
|
|
|
2012-06-24 04:38:32 +00:00
|
|
|
class PathodError(Exception): pass
|
|
|
|
|
2012-04-28 22:56:33 +00:00
|
|
|
|
2013-01-05 03:48:49 +00:00
|
|
|
class SSLOptions:
|
2013-05-12 21:03:48 +00:00
|
|
|
def __init__(self, certfile=None, keyfile=None, not_after_connect=None, request_client_cert=False):
|
2013-01-05 03:48:49 +00:00
|
|
|
self.keyfile = keyfile or utils.data.path("resources/server.key")
|
|
|
|
self.certfile = certfile or utils.data.path("resources/server.crt")
|
2013-08-19 17:21:47 +00:00
|
|
|
self.cert = certutils.SSLCert.from_pem(file(self.certfile, "rb").read())
|
2013-01-05 03:48:49 +00:00
|
|
|
self.not_after_connect = not_after_connect
|
2013-05-12 21:03:48 +00:00
|
|
|
self.request_client_cert = request_client_cert
|
2013-01-05 03:48:49 +00:00
|
|
|
|
|
|
|
|
2012-06-19 01:23:07 +00:00
|
|
|
class PathodHandler(tcp.BaseHandler):
|
2012-06-26 02:49:40 +00:00
|
|
|
wbufsize = 0
|
2012-06-25 22:15:11 +00:00
|
|
|
sni = None
|
2012-07-19 23:40:37 +00:00
|
|
|
def info(self, s):
|
2012-08-23 00:00:16 +00:00
|
|
|
logger.info("%s:%s: %s"%(self.client_address[0], self.client_address[1], str(s)))
|
2012-07-19 23:40:37 +00:00
|
|
|
|
2012-06-25 22:15:11 +00:00
|
|
|
def handle_sni(self, connection):
|
|
|
|
self.sni = connection.get_servername()
|
|
|
|
|
2012-07-23 09:39:31 +00:00
|
|
|
def serve_crafted(self, crafted, request_log):
|
2012-10-27 23:56:08 +00:00
|
|
|
c = self.server.check_policy(crafted, self.server.request_settings)
|
2012-10-27 04:40:22 +00:00
|
|
|
if c:
|
2013-03-03 03:33:50 +00:00
|
|
|
err = language.make_error_response(c)
|
2012-10-30 01:46:18 +00:00
|
|
|
language.serve(err, self.wfile, self.server.request_settings)
|
2012-10-27 04:40:22 +00:00
|
|
|
log = dict(
|
|
|
|
type = "error",
|
|
|
|
msg = c
|
|
|
|
)
|
|
|
|
return False, log
|
|
|
|
|
2013-03-03 03:33:50 +00:00
|
|
|
if self.server.explain and not isinstance(crafted, language.PathodErrorResponse):
|
2012-10-30 22:23:53 +00:00
|
|
|
crafted = crafted.freeze(self.server.request_settings, None)
|
2013-03-03 03:33:50 +00:00
|
|
|
self.info(">> Spec: %s"%crafted.spec())
|
2012-10-30 01:46:18 +00:00
|
|
|
response_log = language.serve(crafted, self.wfile, self.server.request_settings, None)
|
2012-09-30 23:48:26 +00:00
|
|
|
log = dict(
|
2012-07-23 09:39:31 +00:00
|
|
|
type = "crafted",
|
|
|
|
request=request_log,
|
|
|
|
response=response_log
|
|
|
|
)
|
|
|
|
if response_log["disconnect"]:
|
2012-09-30 23:48:26 +00:00
|
|
|
return False, log
|
|
|
|
return True, log
|
2012-07-23 09:39:31 +00:00
|
|
|
|
2012-07-21 08:50:41 +00:00
|
|
|
def handle_request(self):
|
|
|
|
"""
|
2012-10-04 21:30:32 +00:00
|
|
|
Returns a (again, log) tuple.
|
2012-09-30 23:48:26 +00:00
|
|
|
|
|
|
|
again: True if request handling should continue.
|
|
|
|
log: A dictionary, or None
|
2012-07-21 08:50:41 +00:00
|
|
|
"""
|
|
|
|
line = self.rfile.readline()
|
|
|
|
if line == "\r\n" or line == "\n": # Possible leftover from previous message
|
|
|
|
line = self.rfile.readline()
|
|
|
|
if line == "":
|
2012-09-30 23:48:26 +00:00
|
|
|
# Normal termination
|
|
|
|
return False, None
|
2012-07-21 08:50:41 +00:00
|
|
|
|
2013-01-05 02:25:09 +00:00
|
|
|
m = utils.MemBool()
|
|
|
|
if m(http.parse_init_connect(line)):
|
2013-01-05 04:06:41 +00:00
|
|
|
headers = http.read_headers(self.rfile)
|
2013-01-05 02:25:09 +00:00
|
|
|
self.wfile.write(
|
|
|
|
'HTTP/1.1 200 Connection established\r\n' +
|
|
|
|
('Proxy-agent: %s\r\n'%version.NAMEVERSION) +
|
|
|
|
'\r\n'
|
|
|
|
)
|
|
|
|
self.wfile.flush()
|
2013-01-05 07:36:06 +00:00
|
|
|
if not self.server.ssloptions.not_after_connect:
|
|
|
|
try:
|
|
|
|
self.convert_to_ssl(
|
2013-08-12 04:05:07 +00:00
|
|
|
self.server.ssloptions.cert,
|
2013-01-05 07:36:06 +00:00
|
|
|
self.server.ssloptions.keyfile,
|
2013-05-12 21:03:48 +00:00
|
|
|
handle_sni = self.handle_sni,
|
|
|
|
request_client_cert = self.server.ssloptions.request_client_cert
|
2013-01-05 07:36:06 +00:00
|
|
|
)
|
|
|
|
except tcp.NetLibError, v:
|
|
|
|
s = str(v)
|
|
|
|
self.info(s)
|
|
|
|
return False, dict(type = "error", msg = s)
|
2013-01-05 04:06:41 +00:00
|
|
|
return True, None
|
|
|
|
elif m(http.parse_init_proxy(line)):
|
2013-01-05 02:25:09 +00:00
|
|
|
method, _, _, _, path, httpversion = m.v
|
|
|
|
elif m(http.parse_init_http(line)):
|
|
|
|
method, path, httpversion = m.v
|
|
|
|
else:
|
2012-07-22 00:40:27 +00:00
|
|
|
s = "Invalid first line: %s"%repr(line)
|
2012-07-21 08:50:41 +00:00
|
|
|
self.info(s)
|
2012-09-30 23:48:26 +00:00
|
|
|
return False, dict(type = "error", msg = s)
|
2012-07-21 08:50:41 +00:00
|
|
|
|
|
|
|
headers = http.read_headers(self.rfile)
|
2012-07-30 00:53:41 +00:00
|
|
|
if headers is None:
|
|
|
|
s = "Invalid headers"
|
|
|
|
self.info(s)
|
2012-09-30 23:48:26 +00:00
|
|
|
return False, dict(type = "error", msg = s)
|
2012-07-30 00:53:41 +00:00
|
|
|
|
2013-01-20 09:37:43 +00:00
|
|
|
clientcert = None
|
|
|
|
if self.clientcert:
|
|
|
|
clientcert = dict(
|
|
|
|
cn = self.clientcert.cn,
|
|
|
|
subject = self.clientcert.subject,
|
|
|
|
serial = self.clientcert.serial,
|
|
|
|
notbefore = self.clientcert.notbefore.isoformat(),
|
|
|
|
notafter = self.clientcert.notafter.isoformat(),
|
|
|
|
keyinfo = self.clientcert.keyinfo,
|
|
|
|
)
|
|
|
|
|
2012-07-23 09:39:31 +00:00
|
|
|
request_log = dict(
|
|
|
|
path = path,
|
|
|
|
method = method,
|
|
|
|
headers = headers.lst,
|
|
|
|
httpversion = httpversion,
|
|
|
|
sni = self.sni,
|
|
|
|
remote_address = self.client_address,
|
2013-01-20 09:37:43 +00:00
|
|
|
clientcert = clientcert
|
2012-07-23 09:39:31 +00:00
|
|
|
)
|
|
|
|
|
2012-07-22 00:30:10 +00:00
|
|
|
try:
|
2013-12-15 05:42:58 +00:00
|
|
|
content = http.read_http_body(
|
|
|
|
self.rfile, headers, None, True
|
2012-07-22 00:30:10 +00:00
|
|
|
)
|
|
|
|
except http.HttpError, s:
|
|
|
|
s = str(s)
|
|
|
|
self.info(s)
|
2012-09-30 23:48:26 +00:00
|
|
|
return False, dict(type = "error", msg = s)
|
2012-07-21 08:50:41 +00:00
|
|
|
|
|
|
|
for i in self.server.anchors:
|
|
|
|
if i[0].match(path):
|
2012-08-23 00:00:16 +00:00
|
|
|
self.info("crafting anchor: %s"%path)
|
2012-10-04 21:30:32 +00:00
|
|
|
aresp = language.parse_response(self.server.request_settings, i[1])
|
2012-07-30 00:53:41 +00:00
|
|
|
return self.serve_crafted(aresp, request_log)
|
2012-07-21 08:50:41 +00:00
|
|
|
|
2012-07-24 09:51:43 +00:00
|
|
|
if not self.server.nocraft and path.startswith(self.server.craftanchor):
|
|
|
|
spec = urllib.unquote(path)[len(self.server.craftanchor):]
|
2012-08-23 00:00:16 +00:00
|
|
|
self.info("crafting spec: %s"%spec)
|
2012-07-21 08:50:41 +00:00
|
|
|
try:
|
2012-10-04 21:30:32 +00:00
|
|
|
crafted = language.parse_response(self.server.request_settings, spec)
|
|
|
|
except language.ParseException, v:
|
2012-07-27 02:03:15 +00:00
|
|
|
self.info("Parse error: %s"%v.msg)
|
2013-03-03 03:33:50 +00:00
|
|
|
crafted = language.make_error_response(
|
2012-07-23 09:39:31 +00:00
|
|
|
"Parse Error",
|
|
|
|
"Error parsing response spec: %s\n"%v.msg + v.marked()
|
|
|
|
)
|
|
|
|
return self.serve_crafted(crafted, request_log)
|
|
|
|
elif self.server.noweb:
|
2013-03-03 03:33:50 +00:00
|
|
|
crafted = language.make_error_response("Access Denied")
|
2012-10-30 01:46:18 +00:00
|
|
|
language.serve(crafted, self.wfile, self.server.request_settings)
|
2012-09-30 23:48:26 +00:00
|
|
|
return False, dict(type = "error", msg="Access denied: web interface disabled")
|
2012-07-21 08:50:41 +00:00
|
|
|
else:
|
2012-07-27 02:03:15 +00:00
|
|
|
self.info("app: %s %s"%(method, path))
|
2012-07-21 08:50:41 +00:00
|
|
|
cc = wsgi.ClientConn(self.client_address)
|
|
|
|
req = wsgi.Request(cc, "http", method, path, headers, content)
|
|
|
|
sn = self.connection.getsockname()
|
|
|
|
app = wsgi.WSGIAdaptor(
|
|
|
|
self.server.app,
|
|
|
|
sn[0],
|
|
|
|
self.server.port,
|
|
|
|
version.NAMEVERSION
|
|
|
|
)
|
|
|
|
app.serve(req, self.wfile)
|
2012-09-30 23:48:26 +00:00
|
|
|
return True, None
|
|
|
|
|
|
|
|
def _log_bytes(self, header, data, hexdump):
|
|
|
|
s = []
|
|
|
|
if hexdump:
|
|
|
|
s.append("%s (hex dump):"%header)
|
|
|
|
for line in netlib.utils.hexdump(data):
|
|
|
|
s.append("\t%s %s %s"%line)
|
|
|
|
else:
|
|
|
|
s.append("%s (unprintables escaped):"%header)
|
|
|
|
s.append(netlib.utils.cleanBin(data))
|
|
|
|
self.info("\n".join(s))
|
2012-07-21 08:50:41 +00:00
|
|
|
|
2012-06-16 19:57:24 +00:00
|
|
|
def handle(self):
|
2013-01-05 03:48:49 +00:00
|
|
|
if self.server.ssl:
|
2012-06-25 23:03:35 +00:00
|
|
|
try:
|
|
|
|
self.convert_to_ssl(
|
2013-08-12 04:05:07 +00:00
|
|
|
self.server.ssloptions.cert,
|
2013-01-05 03:48:49 +00:00
|
|
|
self.server.ssloptions.keyfile,
|
2013-05-12 21:03:48 +00:00
|
|
|
handle_sni = self.handle_sni,
|
|
|
|
request_client_cert = self.server.ssloptions.request_client_cert
|
2012-06-25 23:03:35 +00:00
|
|
|
)
|
|
|
|
except tcp.NetLibError, v:
|
2012-07-20 01:21:33 +00:00
|
|
|
s = str(v)
|
|
|
|
self.server.add_log(
|
|
|
|
dict(
|
|
|
|
type = "error",
|
|
|
|
msg = s
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.info(s)
|
2012-07-21 08:50:41 +00:00
|
|
|
return
|
2012-09-30 23:01:02 +00:00
|
|
|
self.settimeout(self.server.timeout)
|
2012-06-24 23:34:29 +00:00
|
|
|
while not self.finished:
|
2012-09-30 23:48:26 +00:00
|
|
|
if self.server.logreq:
|
|
|
|
self.rfile.start_log()
|
|
|
|
if self.server.logresp:
|
|
|
|
self.wfile.start_log()
|
|
|
|
again, log = self.handle_request()
|
|
|
|
if log:
|
|
|
|
if self.server.logreq:
|
2012-10-24 21:59:18 +00:00
|
|
|
log["request_bytes"] = self.rfile.get_log().encode("string_escape")
|
2012-09-30 23:48:26 +00:00
|
|
|
self._log_bytes("Request", log["request_bytes"], self.server.hexdump)
|
|
|
|
if self.server.logresp:
|
2012-10-24 21:59:18 +00:00
|
|
|
log["response_bytes"] = self.wfile.get_log().encode("string_escape")
|
2012-09-30 23:48:26 +00:00
|
|
|
self._log_bytes("Response", log["response_bytes"], self.server.hexdump)
|
|
|
|
self.server.add_log(log)
|
|
|
|
if not again:
|
2012-07-21 08:50:41 +00:00
|
|
|
return
|
2012-06-19 04:57:57 +00:00
|
|
|
|
2012-04-28 22:56:33 +00:00
|
|
|
|
2012-06-19 01:23:07 +00:00
|
|
|
class Pathod(tcp.TCPServer):
|
2012-06-21 02:29:49 +00:00
|
|
|
LOGBUF = 500
|
2012-07-23 07:55:33 +00:00
|
|
|
def __init__( self,
|
2013-01-05 03:48:49 +00:00
|
|
|
addr, ssl=False, ssloptions=None, craftanchor="/p/", staticdir=None, anchors=None,
|
2012-09-30 23:01:02 +00:00
|
|
|
sizelimit=None, noweb=False, nocraft=False, noapi=False, nohang=False,
|
2012-10-30 22:23:53 +00:00
|
|
|
timeout=None, logreq=False, logresp=False, explain=False, hexdump=False
|
2012-07-23 07:55:33 +00:00
|
|
|
):
|
2012-06-24 04:20:50 +00:00
|
|
|
"""
|
|
|
|
addr: (address, port) tuple. If port is 0, a free port will be
|
|
|
|
automatically chosen.
|
2013-05-12 21:03:48 +00:00
|
|
|
ssloptions: an SSLOptions object.
|
2012-07-24 09:51:43 +00:00
|
|
|
craftanchor: string specifying the path under which to anchor response generation.
|
2012-06-24 04:20:50 +00:00
|
|
|
staticdir: path to a directory of static resources, or None.
|
|
|
|
anchors: A list of (regex, spec) tuples, or None.
|
2012-07-23 02:37:00 +00:00
|
|
|
sizelimit: Limit size of served data.
|
2012-07-26 08:01:51 +00:00
|
|
|
nocraft: Disable response crafting.
|
|
|
|
noapi: Disable the API.
|
|
|
|
nohang: Disable pauses.
|
2012-06-24 04:20:50 +00:00
|
|
|
"""
|
2012-06-19 01:23:07 +00:00
|
|
|
tcp.TCPServer.__init__(self, addr)
|
2013-01-05 03:48:49 +00:00
|
|
|
self.ssl = ssl
|
|
|
|
self.ssloptions = ssloptions or SSLOptions()
|
2012-06-24 03:07:45 +00:00
|
|
|
self.staticdir = staticdir
|
2012-07-24 09:51:43 +00:00
|
|
|
self.craftanchor = craftanchor
|
2012-07-23 02:37:00 +00:00
|
|
|
self.sizelimit = sizelimit
|
2012-07-26 08:01:51 +00:00
|
|
|
self.noweb, self.nocraft, self.noapi, self.nohang = noweb, nocraft, noapi, nohang
|
2012-09-30 23:48:26 +00:00
|
|
|
self.timeout, self.logreq, self.logresp, self.hexdump = timeout, logreq, logresp, hexdump
|
2012-10-30 22:23:53 +00:00
|
|
|
self.explain = explain
|
2012-09-30 23:48:26 +00:00
|
|
|
|
2013-02-24 06:42:40 +00:00
|
|
|
self.app = app.make_app(noapi)
|
2012-06-19 04:57:57 +00:00
|
|
|
self.app.config["pathod"] = self
|
2012-06-21 02:29:49 +00:00
|
|
|
self.log = []
|
|
|
|
self.logid = 0
|
2012-06-24 04:20:50 +00:00
|
|
|
self.anchors = []
|
|
|
|
if anchors:
|
|
|
|
for i in anchors:
|
2012-06-24 04:38:32 +00:00
|
|
|
try:
|
|
|
|
arex = re.compile(i[0])
|
|
|
|
except re.error:
|
|
|
|
raise PathodError("Invalid regex in anchor: %s"%i[0])
|
|
|
|
try:
|
2012-10-04 21:30:32 +00:00
|
|
|
aresp = language.parse_response(self.request_settings, i[1])
|
|
|
|
except language.ParseException, v:
|
2012-06-24 04:38:32 +00:00
|
|
|
raise PathodError("Invalid page spec in anchor: '%s', %s"%(i[1], str(v)))
|
2012-07-30 00:53:41 +00:00
|
|
|
self.anchors.append((arex, i[1]))
|
2012-06-07 04:35:54 +00:00
|
|
|
|
2012-10-27 23:56:08 +00:00
|
|
|
def check_policy(self, req, settings):
|
2012-07-23 03:38:06 +00:00
|
|
|
"""
|
|
|
|
A policy check that verifies the request size is withing limits.
|
|
|
|
"""
|
2012-10-27 23:56:08 +00:00
|
|
|
try:
|
2012-10-30 03:04:48 +00:00
|
|
|
l = req.maximum_length(settings)
|
2012-10-27 23:56:08 +00:00
|
|
|
except language.FileAccessDenied, v:
|
|
|
|
return "File access denied."
|
|
|
|
if self.sizelimit and l > self.sizelimit:
|
2012-07-23 03:38:06 +00:00
|
|
|
return "Response too large."
|
2012-10-27 04:40:22 +00:00
|
|
|
if self.nohang and any([isinstance(i, language.PauseAt) for i in req.actions]):
|
2012-07-26 08:01:51 +00:00
|
|
|
return "Pauses have been disabled."
|
2012-07-23 03:38:06 +00:00
|
|
|
return False
|
|
|
|
|
2012-06-21 04:54:49 +00:00
|
|
|
@property
|
|
|
|
def request_settings(self):
|
2012-06-24 03:07:45 +00:00
|
|
|
return dict(
|
|
|
|
staticdir = self.staticdir
|
|
|
|
)
|
2012-06-21 04:54:49 +00:00
|
|
|
|
2014-01-09 17:04:04 +00:00
|
|
|
def handle_client_connection(self, request, client_address):
|
2012-06-24 23:22:44 +00:00
|
|
|
h = PathodHandler(request, client_address, self)
|
2012-07-29 23:58:29 +00:00
|
|
|
try:
|
|
|
|
h.handle()
|
|
|
|
h.finish()
|
|
|
|
except tcp.NetLibDisconnect: # pragma: no cover
|
|
|
|
h.info("Disconnect")
|
|
|
|
self.add_log(
|
|
|
|
dict(
|
|
|
|
type = "error",
|
|
|
|
msg = "Disconnect"
|
|
|
|
)
|
|
|
|
)
|
|
|
|
return
|
2012-10-30 23:32:13 +00:00
|
|
|
except tcp.NetLibTimeout:
|
2012-09-30 23:01:02 +00:00
|
|
|
h.info("Timeout")
|
|
|
|
self.add_log(
|
|
|
|
dict(
|
|
|
|
type = "timeout",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
return
|
2012-06-21 02:29:49 +00:00
|
|
|
|
|
|
|
def add_log(self, d):
|
2012-07-24 10:30:15 +00:00
|
|
|
if not self.noapi:
|
|
|
|
lock = threading.Lock()
|
|
|
|
with lock:
|
|
|
|
d["id"] = self.logid
|
|
|
|
self.log.insert(0, d)
|
|
|
|
if len(self.log) > self.LOGBUF:
|
|
|
|
self.log.pop()
|
|
|
|
self.logid += 1
|
|
|
|
return d["id"]
|
2012-06-21 02:29:49 +00:00
|
|
|
|
|
|
|
def clear_log(self):
|
|
|
|
lock = threading.Lock()
|
|
|
|
with lock:
|
|
|
|
self.log = []
|
|
|
|
|
|
|
|
def log_by_id(self, id):
|
|
|
|
for i in self.log:
|
|
|
|
if i["id"] == id:
|
|
|
|
return i
|
|
|
|
|
|
|
|
def get_log(self):
|
|
|
|
return self.log
|