2014-10-24 01:01:34 +00:00
|
|
|
import urllib
|
|
|
|
import threading
|
|
|
|
import re
|
|
|
|
import logging
|
|
|
|
import os
|
2014-01-19 05:20:01 +00:00
|
|
|
from netlib import tcp, http, wsgi, certutils
|
2012-09-30 23:48:26 +00:00
|
|
|
import netlib.utils
|
2014-10-24 01:01:34 +00:00
|
|
|
|
2013-01-05 02:25:09 +00:00
|
|
|
import version, app, language, utils
|
2012-06-19 04:57:57 +00:00
|
|
|
|
2014-03-02 00:45:35 +00:00
|
|
|
|
|
|
|
DEFAULT_CERT_DOMAIN = "pathod.net"
|
|
|
|
CONFDIR = "~/.mitmproxy"
|
2014-03-05 02:03:31 +00:00
|
|
|
CERTSTORE_BASENAME = "mitmproxy"
|
2014-03-02 00:45:35 +00:00
|
|
|
CA_CERT_NAME = "mitmproxy-ca.pem"
|
|
|
|
|
2012-08-23 00:00:16 +00:00
|
|
|
logger = logging.getLogger('pathod')
|
|
|
|
|
2014-07-21 12:08:09 +00:00
|
|
|
|
2014-10-24 01:01:34 +00:00
|
|
|
class PathodError(Exception):
|
|
|
|
pass
|
2012-06-24 04:38:32 +00:00
|
|
|
|
2012-04-28 22:56:33 +00:00
|
|
|
|
2013-01-05 03:48:49 +00:00
|
|
|
class SSLOptions:
|
2014-07-21 12:08:09 +00:00
|
|
|
def __init__(self, confdir=CONFDIR, cn=None, not_after_connect=None,
|
|
|
|
request_client_cert=False, sslversion=tcp.SSLv23_METHOD,
|
|
|
|
ciphers=None, certs=None):
|
2014-03-02 00:45:35 +00:00
|
|
|
self.confdir = confdir
|
|
|
|
self.cn = cn
|
2014-03-05 02:03:31 +00:00
|
|
|
self.certstore = certutils.CertStore.from_store(
|
|
|
|
os.path.expanduser(confdir),
|
|
|
|
CERTSTORE_BASENAME
|
|
|
|
)
|
|
|
|
for i in certs or []:
|
|
|
|
self.certstore.add_cert_file(*i)
|
2013-01-05 03:48:49 +00:00
|
|
|
self.not_after_connect = not_after_connect
|
2013-05-12 21:03:48 +00:00
|
|
|
self.request_client_cert = request_client_cert
|
2014-02-27 05:33:48 +00:00
|
|
|
self.ciphers = ciphers
|
|
|
|
self.sslversion = sslversion
|
2013-01-05 03:48:49 +00:00
|
|
|
|
2014-03-02 02:13:56 +00:00
|
|
|
def get_cert(self, name):
|
|
|
|
if self.cn:
|
|
|
|
name = self.cn
|
|
|
|
elif not name:
|
|
|
|
name = DEFAULT_CERT_DOMAIN
|
|
|
|
return self.certstore.get_cert(name, [])
|
|
|
|
|
2013-01-05 03:48:49 +00:00
|
|
|
|
2012-06-19 01:23:07 +00:00
|
|
|
class PathodHandler(tcp.BaseHandler):
|
2012-06-26 02:49:40 +00:00
|
|
|
wbufsize = 0
|
2012-06-25 22:15:11 +00:00
|
|
|
sni = None
|
2014-07-21 12:08:09 +00:00
|
|
|
|
2012-07-19 23:40:37 +00:00
|
|
|
def info(self, s):
|
2014-07-21 12:08:09 +00:00
|
|
|
logger.info("%s:%s: %s" % (self.address.host, self.address.port, str(s)))
|
2012-07-19 23:40:37 +00:00
|
|
|
|
2012-06-25 22:15:11 +00:00
|
|
|
def handle_sni(self, connection):
|
|
|
|
self.sni = connection.get_servername()
|
|
|
|
|
2014-03-02 09:10:49 +00:00
|
|
|
def serve_crafted(self, crafted):
|
2012-10-27 23:56:08 +00:00
|
|
|
c = self.server.check_policy(crafted, self.server.request_settings)
|
2012-10-27 04:40:22 +00:00
|
|
|
if c:
|
2013-03-03 03:33:50 +00:00
|
|
|
err = language.make_error_response(c)
|
2012-10-30 01:46:18 +00:00
|
|
|
language.serve(err, self.wfile, self.server.request_settings)
|
2012-10-27 04:40:22 +00:00
|
|
|
log = dict(
|
2014-07-21 12:08:09 +00:00
|
|
|
type="error",
|
|
|
|
msg=c
|
2012-10-27 04:40:22 +00:00
|
|
|
)
|
|
|
|
return False, log
|
|
|
|
|
2013-03-03 03:33:50 +00:00
|
|
|
if self.server.explain and not isinstance(crafted, language.PathodErrorResponse):
|
2012-10-30 22:23:53 +00:00
|
|
|
crafted = crafted.freeze(self.server.request_settings, None)
|
2014-07-21 12:08:09 +00:00
|
|
|
self.info(">> Spec: %s" % crafted.spec())
|
2012-10-30 01:46:18 +00:00
|
|
|
response_log = language.serve(crafted, self.wfile, self.server.request_settings, None)
|
2012-07-23 09:39:31 +00:00
|
|
|
if response_log["disconnect"]:
|
2014-03-02 09:10:49 +00:00
|
|
|
return False, response_log
|
|
|
|
return True, response_log
|
2012-07-23 09:39:31 +00:00
|
|
|
|
2012-07-21 08:50:41 +00:00
|
|
|
def handle_request(self):
|
|
|
|
"""
|
2012-10-04 21:30:32 +00:00
|
|
|
Returns a (again, log) tuple.
|
2012-09-30 23:48:26 +00:00
|
|
|
|
|
|
|
again: True if request handling should continue.
|
|
|
|
log: A dictionary, or None
|
2012-07-21 08:50:41 +00:00
|
|
|
"""
|
|
|
|
line = self.rfile.readline()
|
2014-07-21 12:08:09 +00:00
|
|
|
if line == "\r\n" or line == "\n": # Possible leftover from previous message
|
2012-07-21 08:50:41 +00:00
|
|
|
line = self.rfile.readline()
|
|
|
|
if line == "":
|
2012-09-30 23:48:26 +00:00
|
|
|
# Normal termination
|
|
|
|
return False, None
|
2012-07-21 08:50:41 +00:00
|
|
|
|
2013-01-05 02:25:09 +00:00
|
|
|
m = utils.MemBool()
|
|
|
|
if m(http.parse_init_connect(line)):
|
2013-01-05 04:06:41 +00:00
|
|
|
headers = http.read_headers(self.rfile)
|
2013-01-05 02:25:09 +00:00
|
|
|
self.wfile.write(
|
2014-07-21 12:08:09 +00:00
|
|
|
'HTTP/1.1 200 Connection established\r\n' +
|
|
|
|
('Proxy-agent: %s\r\n' % version.NAMEVERSION) +
|
|
|
|
'\r\n'
|
|
|
|
)
|
2013-01-05 02:25:09 +00:00
|
|
|
self.wfile.flush()
|
2013-01-05 07:36:06 +00:00
|
|
|
if not self.server.ssloptions.not_after_connect:
|
|
|
|
try:
|
2014-10-08 18:48:16 +00:00
|
|
|
cert, key, chain_file = self.server.ssloptions.get_cert(m.v[0])
|
2013-01-05 07:36:06 +00:00
|
|
|
self.convert_to_ssl(
|
2014-03-05 02:03:31 +00:00
|
|
|
cert, key,
|
2014-07-21 12:08:09 +00:00
|
|
|
handle_sni=self.handle_sni,
|
|
|
|
request_client_cert=self.server.ssloptions.request_client_cert,
|
|
|
|
cipher_list=self.server.ssloptions.ciphers,
|
|
|
|
method=self.server.ssloptions.sslversion,
|
2013-01-05 07:36:06 +00:00
|
|
|
)
|
|
|
|
except tcp.NetLibError, v:
|
|
|
|
s = str(v)
|
|
|
|
self.info(s)
|
2014-07-21 12:08:09 +00:00
|
|
|
return False, dict(type="error", msg=s)
|
2013-01-05 04:06:41 +00:00
|
|
|
return True, None
|
|
|
|
elif m(http.parse_init_proxy(line)):
|
2013-01-05 02:25:09 +00:00
|
|
|
method, _, _, _, path, httpversion = m.v
|
|
|
|
elif m(http.parse_init_http(line)):
|
|
|
|
method, path, httpversion = m.v
|
|
|
|
else:
|
2014-07-21 12:08:09 +00:00
|
|
|
s = "Invalid first line: %s" % repr(line)
|
2012-07-21 08:50:41 +00:00
|
|
|
self.info(s)
|
2014-07-21 12:08:09 +00:00
|
|
|
return False, dict(type="error", msg=s)
|
2012-07-21 08:50:41 +00:00
|
|
|
|
|
|
|
headers = http.read_headers(self.rfile)
|
2012-07-30 00:53:41 +00:00
|
|
|
if headers is None:
|
|
|
|
s = "Invalid headers"
|
|
|
|
self.info(s)
|
2014-07-21 12:08:09 +00:00
|
|
|
return False, dict(type="error", msg=s)
|
2012-07-30 00:53:41 +00:00
|
|
|
|
2013-01-20 09:37:43 +00:00
|
|
|
clientcert = None
|
|
|
|
if self.clientcert:
|
|
|
|
clientcert = dict(
|
2014-07-21 12:08:09 +00:00
|
|
|
cn=self.clientcert.cn,
|
|
|
|
subject=self.clientcert.subject,
|
|
|
|
serial=self.clientcert.serial,
|
|
|
|
notbefore=self.clientcert.notbefore.isoformat(),
|
|
|
|
notafter=self.clientcert.notafter.isoformat(),
|
|
|
|
keyinfo=self.clientcert.keyinfo,
|
2013-01-20 09:37:43 +00:00
|
|
|
)
|
|
|
|
|
2014-03-02 09:10:49 +00:00
|
|
|
retlog = dict(
|
2014-07-21 12:08:09 +00:00
|
|
|
type="crafted",
|
|
|
|
request=dict(
|
|
|
|
path=path,
|
|
|
|
method=method,
|
|
|
|
headers=headers.lst,
|
|
|
|
httpversion=httpversion,
|
|
|
|
sni=self.sni,
|
|
|
|
remote_address=self.address(),
|
|
|
|
clientcert=clientcert,
|
2014-03-02 09:10:49 +00:00
|
|
|
),
|
2014-07-21 12:08:09 +00:00
|
|
|
cipher=None,
|
2012-07-23 09:39:31 +00:00
|
|
|
)
|
2014-03-02 09:10:49 +00:00
|
|
|
if self.ssl_established:
|
|
|
|
retlog["cipher"] = self.get_current_cipher()
|
2012-07-23 09:39:31 +00:00
|
|
|
|
2012-07-22 00:30:10 +00:00
|
|
|
try:
|
2013-12-15 05:42:58 +00:00
|
|
|
content = http.read_http_body(
|
2014-07-21 12:08:09 +00:00
|
|
|
self.rfile, headers, None,
|
|
|
|
method, None, True
|
|
|
|
)
|
2012-07-22 00:30:10 +00:00
|
|
|
except http.HttpError, s:
|
|
|
|
s = str(s)
|
|
|
|
self.info(s)
|
2014-07-21 12:08:09 +00:00
|
|
|
return False, dict(type="error", msg=s)
|
2012-07-21 08:50:41 +00:00
|
|
|
|
|
|
|
for i in self.server.anchors:
|
|
|
|
if i[0].match(path):
|
2014-07-21 12:08:09 +00:00
|
|
|
self.info("crafting anchor: %s" % path)
|
2012-10-04 21:30:32 +00:00
|
|
|
aresp = language.parse_response(self.server.request_settings, i[1])
|
2014-03-02 09:10:49 +00:00
|
|
|
again, retlog["response"] = self.serve_crafted(aresp)
|
|
|
|
return again, retlog
|
2012-07-21 08:50:41 +00:00
|
|
|
|
2012-07-24 09:51:43 +00:00
|
|
|
if not self.server.nocraft and path.startswith(self.server.craftanchor):
|
|
|
|
spec = urllib.unquote(path)[len(self.server.craftanchor):]
|
2014-07-21 12:08:09 +00:00
|
|
|
self.info("crafting spec: %s" % spec)
|
2012-07-21 08:50:41 +00:00
|
|
|
try:
|
2012-10-04 21:30:32 +00:00
|
|
|
crafted = language.parse_response(self.server.request_settings, spec)
|
|
|
|
except language.ParseException, v:
|
2014-07-21 12:08:09 +00:00
|
|
|
self.info("Parse error: %s" % v.msg)
|
2013-03-03 03:33:50 +00:00
|
|
|
crafted = language.make_error_response(
|
2014-07-21 12:08:09 +00:00
|
|
|
"Parse Error",
|
|
|
|
"Error parsing response spec: %s\n" % v.msg + v.marked()
|
|
|
|
)
|
2014-03-02 09:10:49 +00:00
|
|
|
again, retlog["response"] = self.serve_crafted(crafted)
|
|
|
|
return again, retlog
|
2012-07-23 09:39:31 +00:00
|
|
|
elif self.server.noweb:
|
2013-03-03 03:33:50 +00:00
|
|
|
crafted = language.make_error_response("Access Denied")
|
2012-10-30 01:46:18 +00:00
|
|
|
language.serve(crafted, self.wfile, self.server.request_settings)
|
2014-07-21 12:08:09 +00:00
|
|
|
return False, dict(type="error", msg="Access denied: web interface disabled")
|
2012-07-21 08:50:41 +00:00
|
|
|
else:
|
2014-07-21 12:08:09 +00:00
|
|
|
self.info("app: %s %s" % (method, path))
|
2014-09-03 15:14:18 +00:00
|
|
|
req = wsgi.Request("http", method, path, headers, content)
|
|
|
|
flow = wsgi.Flow(self.address, req)
|
2012-07-21 08:50:41 +00:00
|
|
|
sn = self.connection.getsockname()
|
2014-01-19 05:20:01 +00:00
|
|
|
a = wsgi.WSGIAdaptor(
|
2012-07-21 08:50:41 +00:00
|
|
|
self.server.app,
|
|
|
|
sn[0],
|
2014-01-28 18:28:20 +00:00
|
|
|
self.server.address.port,
|
2012-07-21 08:50:41 +00:00
|
|
|
version.NAMEVERSION
|
|
|
|
)
|
2014-09-03 15:14:18 +00:00
|
|
|
a.serve(flow, self.wfile)
|
2012-09-30 23:48:26 +00:00
|
|
|
return True, None
|
|
|
|
|
|
|
|
def _log_bytes(self, header, data, hexdump):
|
|
|
|
s = []
|
|
|
|
if hexdump:
|
2014-07-21 12:08:09 +00:00
|
|
|
s.append("%s (hex dump):" % header)
|
2012-09-30 23:48:26 +00:00
|
|
|
for line in netlib.utils.hexdump(data):
|
2014-07-21 12:08:09 +00:00
|
|
|
s.append("\t%s %s %s" % line)
|
2012-09-30 23:48:26 +00:00
|
|
|
else:
|
2014-07-21 12:08:09 +00:00
|
|
|
s.append("%s (unprintables escaped):" % header)
|
2012-09-30 23:48:26 +00:00
|
|
|
s.append(netlib.utils.cleanBin(data))
|
|
|
|
self.info("\n".join(s))
|
2012-07-21 08:50:41 +00:00
|
|
|
|
2012-06-16 19:57:24 +00:00
|
|
|
def handle(self):
|
2013-01-05 03:48:49 +00:00
|
|
|
if self.server.ssl:
|
2012-06-25 23:03:35 +00:00
|
|
|
try:
|
2014-10-08 18:48:16 +00:00
|
|
|
cert, key, chain_file = self.server.ssloptions.get_cert(None)
|
2012-06-25 23:03:35 +00:00
|
|
|
self.convert_to_ssl(
|
2014-03-05 02:03:31 +00:00
|
|
|
cert, key,
|
2014-07-21 12:08:09 +00:00
|
|
|
handle_sni=self.handle_sni,
|
|
|
|
request_client_cert=self.server.ssloptions.request_client_cert,
|
|
|
|
cipher_list=self.server.ssloptions.ciphers,
|
|
|
|
method=self.server.ssloptions.sslversion,
|
2012-06-25 23:03:35 +00:00
|
|
|
)
|
|
|
|
except tcp.NetLibError, v:
|
2012-07-20 01:21:33 +00:00
|
|
|
s = str(v)
|
|
|
|
self.server.add_log(
|
|
|
|
dict(
|
2014-07-21 12:08:09 +00:00
|
|
|
type="error",
|
|
|
|
msg=s
|
2012-07-20 01:21:33 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
self.info(s)
|
2012-07-21 08:50:41 +00:00
|
|
|
return
|
2012-09-30 23:01:02 +00:00
|
|
|
self.settimeout(self.server.timeout)
|
2012-06-24 23:34:29 +00:00
|
|
|
while not self.finished:
|
2012-09-30 23:48:26 +00:00
|
|
|
if self.server.logreq:
|
|
|
|
self.rfile.start_log()
|
|
|
|
if self.server.logresp:
|
|
|
|
self.wfile.start_log()
|
|
|
|
again, log = self.handle_request()
|
|
|
|
if log:
|
|
|
|
if self.server.logreq:
|
2012-10-24 21:59:18 +00:00
|
|
|
log["request_bytes"] = self.rfile.get_log().encode("string_escape")
|
2012-09-30 23:48:26 +00:00
|
|
|
self._log_bytes("Request", log["request_bytes"], self.server.hexdump)
|
|
|
|
if self.server.logresp:
|
2012-10-24 21:59:18 +00:00
|
|
|
log["response_bytes"] = self.wfile.get_log().encode("string_escape")
|
2012-09-30 23:48:26 +00:00
|
|
|
self._log_bytes("Response", log["response_bytes"], self.server.hexdump)
|
|
|
|
self.server.add_log(log)
|
|
|
|
if not again:
|
2012-07-21 08:50:41 +00:00
|
|
|
return
|
2012-06-19 04:57:57 +00:00
|
|
|
|
2012-04-28 22:56:33 +00:00
|
|
|
|
2012-06-19 01:23:07 +00:00
|
|
|
class Pathod(tcp.TCPServer):
|
2012-06-21 02:29:49 +00:00
|
|
|
LOGBUF = 500
|
2014-07-21 12:08:09 +00:00
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self, addr, confdir=CONFDIR, ssl=False, ssloptions=None,
|
|
|
|
craftanchor="/p/", staticdir=None, anchors=None,
|
|
|
|
sizelimit=None, noweb=False, nocraft=False, noapi=False,
|
|
|
|
nohang=False, timeout=None, logreq=False, logresp=False,
|
|
|
|
explain=False, hexdump=False
|
|
|
|
):
|
2012-06-24 04:20:50 +00:00
|
|
|
"""
|
|
|
|
addr: (address, port) tuple. If port is 0, a free port will be
|
|
|
|
automatically chosen.
|
2013-05-12 21:03:48 +00:00
|
|
|
ssloptions: an SSLOptions object.
|
2012-07-24 09:51:43 +00:00
|
|
|
craftanchor: string specifying the path under which to anchor response generation.
|
2012-06-24 04:20:50 +00:00
|
|
|
staticdir: path to a directory of static resources, or None.
|
|
|
|
anchors: A list of (regex, spec) tuples, or None.
|
2012-07-23 02:37:00 +00:00
|
|
|
sizelimit: Limit size of served data.
|
2012-07-26 08:01:51 +00:00
|
|
|
nocraft: Disable response crafting.
|
|
|
|
noapi: Disable the API.
|
|
|
|
nohang: Disable pauses.
|
2012-06-24 04:20:50 +00:00
|
|
|
"""
|
2012-06-19 01:23:07 +00:00
|
|
|
tcp.TCPServer.__init__(self, addr)
|
2013-01-05 03:48:49 +00:00
|
|
|
self.ssl = ssl
|
|
|
|
self.ssloptions = ssloptions or SSLOptions()
|
2012-06-24 03:07:45 +00:00
|
|
|
self.staticdir = staticdir
|
2012-07-24 09:51:43 +00:00
|
|
|
self.craftanchor = craftanchor
|
2012-07-23 02:37:00 +00:00
|
|
|
self.sizelimit = sizelimit
|
2012-07-26 08:01:51 +00:00
|
|
|
self.noweb, self.nocraft, self.noapi, self.nohang = noweb, nocraft, noapi, nohang
|
2012-09-30 23:48:26 +00:00
|
|
|
self.timeout, self.logreq, self.logresp, self.hexdump = timeout, logreq, logresp, hexdump
|
2012-10-30 22:23:53 +00:00
|
|
|
self.explain = explain
|
2012-09-30 23:48:26 +00:00
|
|
|
|
2013-02-24 06:42:40 +00:00
|
|
|
self.app = app.make_app(noapi)
|
2012-06-19 04:57:57 +00:00
|
|
|
self.app.config["pathod"] = self
|
2012-06-21 02:29:49 +00:00
|
|
|
self.log = []
|
|
|
|
self.logid = 0
|
2012-06-24 04:20:50 +00:00
|
|
|
self.anchors = []
|
|
|
|
if anchors:
|
|
|
|
for i in anchors:
|
2012-06-24 04:38:32 +00:00
|
|
|
try:
|
|
|
|
arex = re.compile(i[0])
|
|
|
|
except re.error:
|
2014-07-21 12:08:09 +00:00
|
|
|
raise PathodError("Invalid regex in anchor: %s" % i[0])
|
2012-06-24 04:38:32 +00:00
|
|
|
try:
|
2014-01-19 05:20:01 +00:00
|
|
|
language.parse_response(self.request_settings, i[1])
|
2012-10-04 21:30:32 +00:00
|
|
|
except language.ParseException, v:
|
2014-07-21 12:08:09 +00:00
|
|
|
raise PathodError("Invalid page spec in anchor: '%s', %s" % (i[1], str(v)))
|
2012-07-30 00:53:41 +00:00
|
|
|
self.anchors.append((arex, i[1]))
|
2012-06-07 04:35:54 +00:00
|
|
|
|
2012-10-27 23:56:08 +00:00
|
|
|
def check_policy(self, req, settings):
|
2012-07-23 03:38:06 +00:00
|
|
|
"""
|
|
|
|
A policy check that verifies the request size is withing limits.
|
|
|
|
"""
|
2012-10-27 23:56:08 +00:00
|
|
|
try:
|
2012-10-30 03:04:48 +00:00
|
|
|
l = req.maximum_length(settings)
|
2014-01-19 05:20:01 +00:00
|
|
|
except language.FileAccessDenied:
|
2012-10-27 23:56:08 +00:00
|
|
|
return "File access denied."
|
|
|
|
if self.sizelimit and l > self.sizelimit:
|
2012-07-23 03:38:06 +00:00
|
|
|
return "Response too large."
|
2012-10-27 04:40:22 +00:00
|
|
|
if self.nohang and any([isinstance(i, language.PauseAt) for i in req.actions]):
|
2012-07-26 08:01:51 +00:00
|
|
|
return "Pauses have been disabled."
|
2012-07-23 03:38:06 +00:00
|
|
|
return False
|
|
|
|
|
2012-06-21 04:54:49 +00:00
|
|
|
@property
|
|
|
|
def request_settings(self):
|
2012-06-24 03:07:45 +00:00
|
|
|
return dict(
|
2014-07-21 12:08:09 +00:00
|
|
|
staticdir=self.staticdir
|
2012-06-24 03:07:45 +00:00
|
|
|
)
|
2012-06-21 04:54:49 +00:00
|
|
|
|
2014-01-09 17:04:04 +00:00
|
|
|
def handle_client_connection(self, request, client_address):
|
2012-06-24 23:22:44 +00:00
|
|
|
h = PathodHandler(request, client_address, self)
|
2012-07-29 23:58:29 +00:00
|
|
|
try:
|
|
|
|
h.handle()
|
|
|
|
h.finish()
|
2014-07-21 12:08:09 +00:00
|
|
|
except tcp.NetLibDisconnect: # pragma: no cover
|
2012-07-29 23:58:29 +00:00
|
|
|
h.info("Disconnect")
|
|
|
|
self.add_log(
|
|
|
|
dict(
|
2014-07-21 12:08:09 +00:00
|
|
|
type="error",
|
|
|
|
msg="Disconnect"
|
2012-07-29 23:58:29 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
return
|
2012-10-30 23:32:13 +00:00
|
|
|
except tcp.NetLibTimeout:
|
2012-09-30 23:01:02 +00:00
|
|
|
h.info("Timeout")
|
|
|
|
self.add_log(
|
|
|
|
dict(
|
2014-07-21 12:08:09 +00:00
|
|
|
type="timeout",
|
2012-09-30 23:01:02 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
return
|
2012-06-21 02:29:49 +00:00
|
|
|
|
|
|
|
def add_log(self, d):
|
2012-07-24 10:30:15 +00:00
|
|
|
if not self.noapi:
|
|
|
|
lock = threading.Lock()
|
|
|
|
with lock:
|
|
|
|
d["id"] = self.logid
|
|
|
|
self.log.insert(0, d)
|
|
|
|
if len(self.log) > self.LOGBUF:
|
|
|
|
self.log.pop()
|
|
|
|
self.logid += 1
|
|
|
|
return d["id"]
|
2012-06-21 02:29:49 +00:00
|
|
|
|
|
|
|
def clear_log(self):
|
|
|
|
lock = threading.Lock()
|
|
|
|
with lock:
|
|
|
|
self.log = []
|
|
|
|
|
|
|
|
def log_by_id(self, id):
|
|
|
|
for i in self.log:
|
|
|
|
if i["id"] == id:
|
|
|
|
return i
|
|
|
|
|
|
|
|
def get_log(self):
|
|
|
|
return self.log
|
2014-10-24 04:12:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
def main(parser, args):
|
|
|
|
certs = []
|
|
|
|
for i in args.ssl_certs:
|
|
|
|
parts = i.split("=", 1)
|
|
|
|
if len(parts) == 1:
|
|
|
|
parts = ["*", parts[0]]
|
|
|
|
parts[1] = os.path.expanduser(parts[1])
|
|
|
|
if not os.path.exists(parts[1]):
|
|
|
|
parser.error("Certificate file does not exist: %s"%parts[1])
|
|
|
|
certs.append(parts)
|
|
|
|
|
|
|
|
ssloptions = SSLOptions(
|
|
|
|
cn = args.cn,
|
|
|
|
confdir = args.confdir,
|
|
|
|
not_after_connect = args.ssl_not_after_connect,
|
|
|
|
ciphers = args.ciphers,
|
|
|
|
sslversion = utils.SSLVERSIONS[args.sslversion],
|
|
|
|
certs = certs
|
|
|
|
)
|
|
|
|
|
|
|
|
alst = []
|
|
|
|
for i in args.anchors:
|
|
|
|
parts = utils.parse_anchor_spec(i)
|
|
|
|
if not parts:
|
|
|
|
parser.error("Invalid anchor specification: %s"%i)
|
|
|
|
alst.append(parts)
|
|
|
|
|
|
|
|
root = logging.getLogger()
|
|
|
|
if root.handlers:
|
|
|
|
for handler in root.handlers:
|
|
|
|
root.removeHandler(handler)
|
|
|
|
|
|
|
|
log = logging.getLogger('pathod')
|
|
|
|
log.setLevel(logging.DEBUG)
|
|
|
|
fmt = logging.Formatter(
|
|
|
|
'%(asctime)s: %(message)s',
|
|
|
|
datefmt='%d-%m-%y %H:%M:%S',
|
|
|
|
)
|
|
|
|
if args.logfile:
|
|
|
|
fh = logging.handlers.WatchedFileHandler(args.logfile)
|
|
|
|
fh.setFormatter(fmt)
|
|
|
|
log.addHandler(fh)
|
|
|
|
if not args.daemonize:
|
|
|
|
sh = logging.StreamHandler()
|
|
|
|
sh.setFormatter(fmt)
|
|
|
|
log.addHandler(sh)
|
|
|
|
|
|
|
|
sizelimit = None
|
|
|
|
if args.sizelimit:
|
|
|
|
try:
|
|
|
|
sizelimit = utils.parse_size(args.sizelimit)
|
|
|
|
except ValueError, v:
|
|
|
|
parser.error(v)
|
|
|
|
|
|
|
|
try:
|
|
|
|
pd = Pathod(
|
|
|
|
(args.address, args.port),
|
|
|
|
craftanchor = args.craftanchor,
|
|
|
|
ssl = args.ssl,
|
|
|
|
ssloptions = ssloptions,
|
|
|
|
staticdir = args.staticdir,
|
|
|
|
anchors = alst,
|
|
|
|
sizelimit = sizelimit,
|
|
|
|
noweb = args.noweb,
|
|
|
|
nocraft = args.nocraft,
|
|
|
|
noapi = args.noapi,
|
|
|
|
nohang = args.nohang,
|
|
|
|
timeout = args.timeout,
|
|
|
|
logreq = args.logreq,
|
|
|
|
logresp = args.logresp,
|
|
|
|
hexdump = args.hexdump,
|
|
|
|
explain = args.explain,
|
|
|
|
)
|
|
|
|
except PathodError, v:
|
|
|
|
parser.error(str(v))
|
|
|
|
except language.FileAccessDenied, v:
|
|
|
|
parser.error("%s You probably want to a -d argument."%str(v))
|
|
|
|
|
|
|
|
try:
|
|
|
|
print "%s listening on %s:%s"%(
|
|
|
|
version.NAMEVERSION,
|
|
|
|
pd.address.host,
|
|
|
|
pd.address.port
|
|
|
|
)
|
|
|
|
pd.serve_forever()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
pass
|