From 7440232f6036bb29d1c142cd32a9bb27458b30d1 Mon Sep 17 00:00:00 2001 From: Aldo Cortesi Date: Thu, 20 Oct 2016 09:20:44 +1300 Subject: netlib.version -> mitmproxy.version --- docs/conf.py | 6 +++--- examples/har_dump.py | 2 +- mitmproxy/addons/wsgiapp.py | 2 +- mitmproxy/flow.py | 2 +- mitmproxy/http.py | 2 +- mitmproxy/io_compat.py | 3 ++- mitmproxy/tools/cmdline.py | 2 +- mitmproxy/tools/console/help.py | 2 +- mitmproxy/tools/web/app.py | 2 +- mitmproxy/version.py | 4 ++++ netlib/debug.py | 2 +- netlib/version.py | 4 ---- pathod/pathoc_cmdline.py | 2 +- pathod/pathod.py | 2 +- pathod/pathod_cmdline.py | 2 +- pathod/protocols/http.py | 2 +- setup.py | 2 +- 17 files changed, 22 insertions(+), 21 deletions(-) create mode 100644 mitmproxy/version.py delete mode 100644 netlib/version.py diff --git a/docs/conf.py b/docs/conf.py index e1cbc497..ce942aa9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -5,7 +5,7 @@ import subprocess import sys sys.path.insert(0, os.path.abspath('..')) -import netlib.version +from mitmproxy import version extensions = [ @@ -47,9 +47,9 @@ author = u'The mitmproxy project' # built documents. # # The short X.Y version. -version = netlib.version.VERSION +version = version.VERSION # The full version, including alpha/beta/rc tags. -release = netlib.version.VERSION +release = version.VERSION # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/examples/har_dump.py b/examples/har_dump.py index 95090edb..deed2e70 100644 --- a/examples/har_dump.py +++ b/examples/har_dump.py @@ -14,7 +14,7 @@ import pytz import mitmproxy -from netlib import version +from mitmproxy import version from netlib import strutils from netlib.http import cookies diff --git a/mitmproxy/addons/wsgiapp.py b/mitmproxy/addons/wsgiapp.py index d83a1e2e..0879a87b 100644 --- a/mitmproxy/addons/wsgiapp.py +++ b/mitmproxy/addons/wsgiapp.py @@ -2,7 +2,7 @@ from mitmproxy import ctx from mitmproxy import exceptions from netlib import wsgi -from netlib import version +from mitmproxy import version class WSGIApp: diff --git a/mitmproxy/flow.py b/mitmproxy/flow.py index 13b852ef..2fb7ace0 100644 --- a/mitmproxy/flow.py +++ b/mitmproxy/flow.py @@ -4,8 +4,8 @@ import uuid from mitmproxy import stateobject from mitmproxy import connections +from mitmproxy import version -from netlib import version from typing import Optional # noqa diff --git a/mitmproxy/http.py b/mitmproxy/http.py index 7fe70f9b..4474ca3b 100644 --- a/mitmproxy/http.py +++ b/mitmproxy/http.py @@ -2,7 +2,7 @@ import cgi from mitmproxy import flow from netlib import http -from netlib import version +from mitmproxy import version from netlib import tcp diff --git a/mitmproxy/io_compat.py b/mitmproxy/io_compat.py index fc190e2f..7f8f41b3 100644 --- a/mitmproxy/io_compat.py +++ b/mitmproxy/io_compat.py @@ -4,7 +4,8 @@ This module handles the import of mitmproxy flows generated by old versions. from typing import Any -from netlib import version, strutils +from mitmproxy import version +from netlib import strutils def convert_011_012(data): diff --git a/mitmproxy/tools/cmdline.py b/mitmproxy/tools/cmdline.py index ddc4a5ce..41770631 100644 --- a/mitmproxy/tools/cmdline.py +++ b/mitmproxy/tools/cmdline.py @@ -7,7 +7,7 @@ from mitmproxy import options from mitmproxy import platform from netlib import human from netlib import tcp -from netlib import version +from mitmproxy import version class ParseException(Exception): diff --git a/mitmproxy/tools/console/help.py b/mitmproxy/tools/console/help.py index 752ebf00..dda8bfbc 100644 --- a/mitmproxy/tools/console/help.py +++ b/mitmproxy/tools/console/help.py @@ -6,7 +6,7 @@ from mitmproxy import flowfilter from mitmproxy.tools.console import common from mitmproxy.tools.console import signals -from netlib import version +from mitmproxy import version footer = [ ("heading", 'mitmproxy {} (Python {}) '.format(version.VERSION, platform.python_version())), diff --git a/mitmproxy/tools/web/app.py b/mitmproxy/tools/web/app.py index 89b42186..41701d36 100644 --- a/mitmproxy/tools/web/app.py +++ b/mitmproxy/tools/web/app.py @@ -15,7 +15,7 @@ from mitmproxy import flow from mitmproxy import http from mitmproxy import contentviews from mitmproxy import io -from netlib import version +from mitmproxy import version def convert_flow_to_json_dict(flow: flow.Flow) -> dict: diff --git a/mitmproxy/version.py b/mitmproxy/version.py new file mode 100644 index 00000000..cb670642 --- /dev/null +++ b/mitmproxy/version.py @@ -0,0 +1,4 @@ +IVERSION = (0, 19) +VERSION = ".".join(str(i) for i in IVERSION) +PATHOD = "pathod " + VERSION +MITMPROXY = "mitmproxy " + VERSION diff --git a/netlib/debug.py b/netlib/debug.py index f1b3d792..147fe4b1 100644 --- a/netlib/debug.py +++ b/netlib/debug.py @@ -6,7 +6,7 @@ import signal import platform import traceback -from netlib import version +from mitmproxy import version from OpenSSL import SSL diff --git a/netlib/version.py b/netlib/version.py deleted file mode 100644 index cb670642..00000000 --- a/netlib/version.py +++ /dev/null @@ -1,4 +0,0 @@ -IVERSION = (0, 19) -VERSION = ".".join(str(i) for i in IVERSION) -PATHOD = "pathod " + VERSION -MITMPROXY = "mitmproxy " + VERSION diff --git a/pathod/pathoc_cmdline.py b/pathod/pathoc_cmdline.py index ab330505..2b4b9f9c 100644 --- a/pathod/pathoc_cmdline.py +++ b/pathod/pathoc_cmdline.py @@ -4,7 +4,7 @@ import os import os.path from netlib import tcp -from netlib import version +from mitmproxy import version from netlib.http import user_agents from . import pathoc, language diff --git a/pathod/pathod.py b/pathod/pathod.py index 0f659d40..3692ceff 100644 --- a/pathod/pathod.py +++ b/pathod/pathod.py @@ -7,7 +7,7 @@ import threading from netlib import tcp from netlib import certutils from netlib import websockets -from netlib import version +from mitmproxy import version import urllib from netlib.exceptions import HttpException, HttpReadDisconnect, TcpTimeout, TcpDisconnect, \ diff --git a/pathod/pathod_cmdline.py b/pathod/pathod_cmdline.py index 3531b8f3..e467ef30 100644 --- a/pathod/pathod_cmdline.py +++ b/pathod/pathod_cmdline.py @@ -6,7 +6,7 @@ import re from netlib import tcp from netlib import human -from netlib import version +from mitmproxy import version from . import pathod diff --git a/pathod/protocols/http.py b/pathod/protocols/http.py index 17930320..0822e864 100644 --- a/pathod/protocols/http.py +++ b/pathod/protocols/http.py @@ -1,4 +1,4 @@ -from netlib import version +from mitmproxy import version from netlib.exceptions import TlsException from netlib.http import http1 from .. import language diff --git a/setup.py b/setup.py index f856ff7a..9981ea6c 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages from codecs import open import os -from netlib import version +from mitmproxy import version # Based on https://github.com/pypa/sampleproject/blob/master/setup.py # and https://python-packaging-user-guide.readthedocs.org/ -- cgit v1.2.3 From 069119364d6490e52ba26f2d8001c6b2bf50ab7b Mon Sep 17 00:00:00 2001 From: Aldo Cortesi Date: Thu, 20 Oct 2016 09:35:55 +1300 Subject: Create mitmproxy.utils hierarchy - Add mitproxy.utils.lrucache, mitproxy.utils.data --- mitmproxy/addons/onboardingapp/app.py | 6 ++--- mitmproxy/addons/termlog.py | 4 ++-- mitmproxy/log.py | 4 ++++ mitmproxy/tools/console/common.py | 4 ++-- mitmproxy/tools/console/flowview.py | 4 ++-- mitmproxy/tools/console/master.py | 4 ++-- mitmproxy/utils.py | 40 --------------------------------- mitmproxy/utils/data.py | 33 +++++++++++++++++++++++++++ mitmproxy/utils/lrucache.py | 32 ++++++++++++++++++++++++++ netlib/tutils.py | 6 +++-- netlib/utils.py | 30 ------------------------- pathod/utils.py | 5 +++-- test/mitmproxy/test_examples.py | 5 ++--- test/mitmproxy/test_utils.py | 42 ----------------------------------- test/mitmproxy/test_utils_data.py | 7 ++++++ test/mitmproxy/test_utils_lrucache.py | 34 ++++++++++++++++++++++++++++ test/mitmproxy/tutils.py | 3 ++- test/pathod/tutils.py | 5 +++-- 18 files changed, 135 insertions(+), 133 deletions(-) delete mode 100644 mitmproxy/utils.py create mode 100644 mitmproxy/utils/data.py create mode 100644 mitmproxy/utils/lrucache.py delete mode 100644 test/mitmproxy/test_utils.py create mode 100644 test/mitmproxy/test_utils_data.py create mode 100644 test/mitmproxy/test_utils_lrucache.py diff --git a/mitmproxy/addons/onboardingapp/app.py b/mitmproxy/addons/onboardingapp/app.py index 9e07b75f..50b52214 100644 --- a/mitmproxy/addons/onboardingapp/app.py +++ b/mitmproxy/addons/onboardingapp/app.py @@ -4,11 +4,11 @@ import tornado.template import tornado.web import tornado.wsgi -from mitmproxy import utils +from mitmproxy.utils import data from mitmproxy.proxy import config from mitmproxy.addons import wsgiapp -loader = tornado.template.Loader(utils.pkg_data.path("addons/onboardingapp/templates")) +loader = tornado.template.Loader(data.pkg_data.path("addons/onboardingapp/templates")) class Adapter(tornado.wsgi.WSGIAdapter): @@ -86,7 +86,7 @@ application = tornado.web.Application( r"/static/(.*)", tornado.web.StaticFileHandler, { - "path": utils.pkg_data.path("addons/onboardingapp/static") + "path": data.pkg_data.path("addons/onboardingapp/static") } ), ], diff --git a/mitmproxy/addons/termlog.py b/mitmproxy/addons/termlog.py index 50c32044..05be32d0 100644 --- a/mitmproxy/addons/termlog.py +++ b/mitmproxy/addons/termlog.py @@ -1,6 +1,6 @@ import click -from mitmproxy import utils +from mitmproxy import log class TermLog: @@ -11,7 +11,7 @@ class TermLog: self.options = options def log(self, e): - if self.options.verbosity >= utils.log_tier(e.level): + if self.options.verbosity >= log.log_tier(e.level): click.secho( e.msg, file=self.options.tfile, diff --git a/mitmproxy/log.py b/mitmproxy/log.py index 8c28a9b1..c2456cf1 100644 --- a/mitmproxy/log.py +++ b/mitmproxy/log.py @@ -38,3 +38,7 @@ class Log: def __call__(self, text, level="info"): self.master.add_log(text, level) + + +def log_tier(level): + return dict(error=0, warn=1, info=2, debug=3).get(level) diff --git a/mitmproxy/tools/console/common.py b/mitmproxy/tools/console/common.py index d10d9321..dc4cfe18 100644 --- a/mitmproxy/tools/console/common.py +++ b/mitmproxy/tools/console/common.py @@ -7,7 +7,7 @@ import urwid import urwid.util import netlib -from mitmproxy import utils +from mitmproxy.utils import lrucache from mitmproxy.tools.console import signals from mitmproxy import export from netlib import human @@ -325,7 +325,7 @@ def export_to_clip_or_file(key, scope, flow, writer): else: # other keys writer(exporter(flow)) -flowcache = utils.LRUCache(800) +flowcache = lrucache.LRUCache(800) def raw_format_flow(f): diff --git a/mitmproxy/tools/console/flowview.py b/mitmproxy/tools/console/flowview.py index 64caf474..afebf44e 100644 --- a/mitmproxy/tools/console/flowview.py +++ b/mitmproxy/tools/console/flowview.py @@ -8,7 +8,7 @@ from typing import Optional, Union # noqa from mitmproxy import contentviews from mitmproxy import http -from mitmproxy import utils +from mitmproxy.utils import lrucache from mitmproxy.tools.console import common from mitmproxy.tools.console import flowdetailview from mitmproxy.tools.console import grideditor @@ -121,7 +121,7 @@ class FlowViewHeader(urwid.WidgetWrap): ) -cache = utils.LRUCache(200) +cache = lrucache.LRUCache(200) TAB_REQ = 0 TAB_RESP = 1 diff --git a/mitmproxy/tools/console/master.py b/mitmproxy/tools/console/master.py index b3fd0adb..3cc721b2 100644 --- a/mitmproxy/tools/console/master.py +++ b/mitmproxy/tools/console/master.py @@ -21,7 +21,7 @@ from mitmproxy import exceptions from mitmproxy import master from mitmproxy import io from mitmproxy import flowfilter -from mitmproxy import utils +from mitmproxy import log from mitmproxy.addons import state import mitmproxy.options from mitmproxy.tools.console import flowlist @@ -266,7 +266,7 @@ class ConsoleMaster(master.Master): ) def sig_add_log(self, sender, e, level): - if self.options.verbosity < utils.log_tier(level): + if self.options.verbosity < log.log_tier(level): return if level in ("error", "warn"): diff --git a/mitmproxy/utils.py b/mitmproxy/utils.py deleted file mode 100644 index fb2effd8..00000000 --- a/mitmproxy/utils.py +++ /dev/null @@ -1,40 +0,0 @@ -import netlib.utils - - -pkg_data = netlib.utils.Data(__name__) - - -class LRUCache: - - """ - A simple LRU cache for generated values. - """ - - def __init__(self, size=100): - self.size = size - self.cache = {} - self.cacheList = [] - - def get(self, gen, *args): - """ - gen: A (presumably expensive) generator function. The identity of - gen is NOT taken into account by the cache. - *args: A list of immutable arguments, used to establish identiy by - *the cache, and passed to gen to generate values. - """ - if args in self.cache: - self.cacheList.remove(args) - self.cacheList.insert(0, args) - return self.cache[args] - else: - ret = gen(*args) - self.cacheList.insert(0, args) - self.cache[args] = ret - if len(self.cacheList) > self.size: - d = self.cacheList.pop() - self.cache.pop(d) - return ret - - -def log_tier(level): - return dict(error=0, warn=1, info=2, debug=3).get(level) diff --git a/mitmproxy/utils/data.py b/mitmproxy/utils/data.py new file mode 100644 index 00000000..2e68d184 --- /dev/null +++ b/mitmproxy/utils/data.py @@ -0,0 +1,33 @@ +import os.path +import importlib +import inspect + + +class Data: + + def __init__(self, name): + m = importlib.import_module(name) + dirname = os.path.dirname(inspect.getsourcefile(m)) + self.dirname = os.path.abspath(dirname) + + def push(self, subpath): + """ + Change the data object to a path relative to the module. + """ + self.dirname = os.path.join(self.dirname, subpath) + return self + + def path(self, path): + """ + Returns a path to the package data housed at 'path' under this + module.Path can be a path to a file, or to a directory. + + This function will raise ValueError if the path does not exist. + """ + fullpath = os.path.join(self.dirname, path) + if not os.path.exists(fullpath): + raise ValueError("dataPath: %s does not exist." % fullpath) + return fullpath + + +pkg_data = Data(__name__).push("..") diff --git a/mitmproxy/utils/lrucache.py b/mitmproxy/utils/lrucache.py new file mode 100644 index 00000000..7ad2b7f5 --- /dev/null +++ b/mitmproxy/utils/lrucache.py @@ -0,0 +1,32 @@ + + +class LRUCache: + + """ + A simple LRU cache for generated values. + """ + + def __init__(self, size=100): + self.size = size + self.cache = {} + self.cacheList = [] + + def get(self, gen, *args): + """ + gen: A (presumably expensive) generator function. The identity of + gen is NOT taken into account by the cache. + *args: A list of immutable arguments, used to establish identiy by + *the cache, and passed to gen to generate values. + """ + if args in self.cache: + self.cacheList.remove(args) + self.cacheList.insert(0, args) + return self.cache[args] + else: + ret = gen(*args) + self.cacheList.insert(0, args) + self.cache[args] = ret + if len(self.cacheList) > self.size: + d = self.cacheList.pop() + self.cache.pop(d) + return ret diff --git a/netlib/tutils.py b/netlib/tutils.py index d22fdd1c..6fa2d7b6 100644 --- a/netlib/tutils.py +++ b/netlib/tutils.py @@ -6,7 +6,9 @@ import shutil from contextlib import contextmanager import sys -from netlib import utils, tcp, http +from mitmproxy.utils import data +from netlib import tcp +from netlib import http def treader(bytes): @@ -87,7 +89,7 @@ class RaisesContext: return True -test_data = utils.Data(__name__) +test_data = data.Data(__name__) # FIXME: Temporary workaround during repo merge. test_data.dirname = os.path.join(test_data.dirname, "..", "test", "netlib") diff --git a/netlib/utils.py b/netlib/utils.py index 8cd9ba6e..12b94d74 100644 --- a/netlib/utils.py +++ b/netlib/utils.py @@ -1,7 +1,4 @@ -import os.path import re -import importlib -import inspect def setbit(byte, offset, value): @@ -48,33 +45,6 @@ class BiDi: return self.values.get(n, default) -class Data: - - def __init__(self, name): - m = importlib.import_module(name) - dirname = os.path.dirname(inspect.getsourcefile(m)) - self.dirname = os.path.abspath(dirname) - - def push(self, subpath): - """ - Change the data object to a path relative to the module. - """ - self.dirname = os.path.join(self.dirname, subpath) - return self - - def path(self, path): - """ - Returns a path to the package data housed at 'path' under this - module.Path can be a path to a file, or to a directory. - - This function will raise ValueError if the path does not exist. - """ - fullpath = os.path.join(self.dirname, path) - if not os.path.exists(fullpath): - raise ValueError("dataPath: %s does not exist." % fullpath) - return fullpath - - _label_valid = re.compile(b"(?!-)[A-Z\d-]{1,63}(? Date: Thu, 20 Oct 2016 09:45:18 +1300 Subject: netlib.human -> mitmproxy.utils.human --- mitmproxy/addons/dumper.py | 2 +- mitmproxy/tools/cmdline.py | 2 +- mitmproxy/tools/console/common.py | 2 +- mitmproxy/tools/console/flowdetailview.py | 2 +- mitmproxy/tools/console/statusbar.py | 2 +- mitmproxy/utils/human.py | 64 +++++++++++++++++++++++++++++++ netlib/http/response.py | 2 +- netlib/human.py | 64 ------------------------------- netlib/websockets/frame.py | 2 +- pathod/language/base.py | 2 +- pathod/log.py | 3 +- pathod/pathod_cmdline.py | 2 +- test/mitmproxy/test_utils_human.py | 46 ++++++++++++++++++++++ test/mitmproxy/test_utils_lrucache.py | 4 +- test/netlib/test_human.py | 45 ---------------------- 15 files changed, 123 insertions(+), 121 deletions(-) create mode 100644 mitmproxy/utils/human.py delete mode 100644 netlib/human.py create mode 100644 test/mitmproxy/test_utils_human.py delete mode 100644 test/netlib/test_human.py diff --git a/mitmproxy/addons/dumper.py b/mitmproxy/addons/dumper.py index 04dfb42c..013fa337 100644 --- a/mitmproxy/addons/dumper.py +++ b/mitmproxy/addons/dumper.py @@ -8,7 +8,7 @@ from mitmproxy import contentviews from mitmproxy import ctx from mitmproxy import exceptions from mitmproxy import flowfilter -from netlib import human +from mitmproxy.utils import human from netlib import strutils diff --git a/mitmproxy/tools/cmdline.py b/mitmproxy/tools/cmdline.py index 41770631..b83694ef 100644 --- a/mitmproxy/tools/cmdline.py +++ b/mitmproxy/tools/cmdline.py @@ -5,7 +5,7 @@ from mitmproxy import exceptions from mitmproxy import flowfilter from mitmproxy import options from mitmproxy import platform -from netlib import human +from mitmproxy.utils import human from netlib import tcp from mitmproxy import version diff --git a/mitmproxy/tools/console/common.py b/mitmproxy/tools/console/common.py index dc4cfe18..53411d17 100644 --- a/mitmproxy/tools/console/common.py +++ b/mitmproxy/tools/console/common.py @@ -10,7 +10,7 @@ import netlib from mitmproxy.utils import lrucache from mitmproxy.tools.console import signals from mitmproxy import export -from netlib import human +from mitmproxy.utils import human try: import pyperclip diff --git a/mitmproxy/tools/console/flowdetailview.py b/mitmproxy/tools/console/flowdetailview.py index f13f9a1d..7591c3d1 100644 --- a/mitmproxy/tools/console/flowdetailview.py +++ b/mitmproxy/tools/console/flowdetailview.py @@ -1,7 +1,7 @@ import urwid from mitmproxy.tools.console import common, searchable -from netlib import human +from mitmproxy.utils import human def maybe_timestamp(base, attr): diff --git a/mitmproxy/tools/console/statusbar.py b/mitmproxy/tools/console/statusbar.py index 99f73727..ac41b213 100644 --- a/mitmproxy/tools/console/statusbar.py +++ b/mitmproxy/tools/console/statusbar.py @@ -6,7 +6,7 @@ import netlib.http.url from mitmproxy.tools.console import common from mitmproxy.tools.console import pathedit from mitmproxy.tools.console import signals -from netlib import human +from mitmproxy.utils import human class ActionBar(urwid.WidgetWrap): diff --git a/mitmproxy/utils/human.py b/mitmproxy/utils/human.py new file mode 100644 index 00000000..72e96d30 --- /dev/null +++ b/mitmproxy/utils/human.py @@ -0,0 +1,64 @@ +import datetime +import time + + +SIZE_TABLE = [ + ("b", 1024 ** 0), + ("k", 1024 ** 1), + ("m", 1024 ** 2), + ("g", 1024 ** 3), + ("t", 1024 ** 4), +] + +SIZE_UNITS = dict(SIZE_TABLE) + + +def pretty_size(size): + for bottom, top in zip(SIZE_TABLE, SIZE_TABLE[1:]): + if bottom[1] <= size < top[1]: + suf = bottom[0] + lim = bottom[1] + x = round(size / lim, 2) + if x == int(x): + x = int(x) + return str(x) + suf + return "%s%s" % (size, SIZE_TABLE[0][0]) + + +def parse_size(s): + try: + return int(s) + except ValueError: + pass + for i in SIZE_UNITS.keys(): + if s.endswith(i): + try: + return int(s[:-1]) * SIZE_UNITS[i] + except ValueError: + break + raise ValueError("Invalid size specification.") + + +def pretty_duration(secs): + formatters = [ + (100, "{:.0f}s"), + (10, "{:2.1f}s"), + (1, "{:1.2f}s"), + ] + + for limit, formatter in formatters: + if secs >= limit: + return formatter.format(secs) + # less than 1 sec + return "{:.0f}ms".format(secs * 1000) + + +def format_timestamp(s): + s = time.localtime(s) + d = datetime.datetime.fromtimestamp(time.mktime(s)) + return d.strftime("%Y-%m-%d %H:%M:%S") + + +def format_timestamp_with_milli(s): + d = datetime.datetime.fromtimestamp(s) + return d.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] diff --git a/netlib/http/response.py b/netlib/http/response.py index a8b48be0..12dba92a 100644 --- a/netlib/http/response.py +++ b/netlib/http/response.py @@ -1,6 +1,6 @@ import time from email.utils import parsedate_tz, formatdate, mktime_tz -from netlib import human +from mitmproxy.utils import human from netlib import multidict from netlib.http import cookies from netlib.http import headers as nheaders diff --git a/netlib/human.py b/netlib/human.py deleted file mode 100644 index 72e96d30..00000000 --- a/netlib/human.py +++ /dev/null @@ -1,64 +0,0 @@ -import datetime -import time - - -SIZE_TABLE = [ - ("b", 1024 ** 0), - ("k", 1024 ** 1), - ("m", 1024 ** 2), - ("g", 1024 ** 3), - ("t", 1024 ** 4), -] - -SIZE_UNITS = dict(SIZE_TABLE) - - -def pretty_size(size): - for bottom, top in zip(SIZE_TABLE, SIZE_TABLE[1:]): - if bottom[1] <= size < top[1]: - suf = bottom[0] - lim = bottom[1] - x = round(size / lim, 2) - if x == int(x): - x = int(x) - return str(x) + suf - return "%s%s" % (size, SIZE_TABLE[0][0]) - - -def parse_size(s): - try: - return int(s) - except ValueError: - pass - for i in SIZE_UNITS.keys(): - if s.endswith(i): - try: - return int(s[:-1]) * SIZE_UNITS[i] - except ValueError: - break - raise ValueError("Invalid size specification.") - - -def pretty_duration(secs): - formatters = [ - (100, "{:.0f}s"), - (10, "{:2.1f}s"), - (1, "{:1.2f}s"), - ] - - for limit, formatter in formatters: - if secs >= limit: - return formatter.format(secs) - # less than 1 sec - return "{:.0f}ms".format(secs * 1000) - - -def format_timestamp(s): - s = time.localtime(s) - d = datetime.datetime.fromtimestamp(time.mktime(s)) - return d.strftime("%Y-%m-%d %H:%M:%S") - - -def format_timestamp_with_milli(s): - d = datetime.datetime.fromtimestamp(s) - return d.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] diff --git a/netlib/websockets/frame.py b/netlib/websockets/frame.py index b58fa289..02d74112 100644 --- a/netlib/websockets/frame.py +++ b/netlib/websockets/frame.py @@ -5,7 +5,7 @@ import io from netlib import tcp from netlib import strutils from netlib import utils -from netlib import human +from mitmproxy.utils import human from .masker import Masker diff --git a/pathod/language/base.py b/pathod/language/base.py index 11f0899d..7410bbd4 100644 --- a/pathod/language/base.py +++ b/pathod/language/base.py @@ -5,7 +5,7 @@ import functools import pyparsing as pp from netlib import strutils -from netlib import human +from mitmproxy.utils import human from . import generators, exceptions diff --git a/pathod/log.py b/pathod/log.py index 37100bb1..0d1bca41 100644 --- a/pathod/log.py +++ b/pathod/log.py @@ -1,6 +1,7 @@ import time -from netlib import strutils, human +from netlib import strutils +from mitmproxy.utils import human def write_raw(fp, lines, timestamp=True): diff --git a/pathod/pathod_cmdline.py b/pathod/pathod_cmdline.py index e467ef30..8d6ee2b6 100644 --- a/pathod/pathod_cmdline.py +++ b/pathod/pathod_cmdline.py @@ -5,7 +5,7 @@ import os.path import re from netlib import tcp -from netlib import human +from mitmproxy.utils import human from mitmproxy import version from . import pathod diff --git a/test/mitmproxy/test_utils_human.py b/test/mitmproxy/test_utils_human.py new file mode 100644 index 00000000..08609887 --- /dev/null +++ b/test/mitmproxy/test_utils_human.py @@ -0,0 +1,46 @@ +import time +from mitmproxy.utils import human +from netlib import tutils + + +def test_format_timestamp(): + assert human.format_timestamp(time.time()) + + +def test_format_timestamp_with_milli(): + assert human.format_timestamp_with_milli(time.time()) + + +def test_parse_size(): + assert human.parse_size("0") == 0 + assert human.parse_size("0b") == 0 + assert human.parse_size("1") == 1 + assert human.parse_size("1k") == 1024 + assert human.parse_size("1m") == 1024**2 + assert human.parse_size("1g") == 1024**3 + tutils.raises(ValueError, human.parse_size, "1f") + tutils.raises(ValueError, human.parse_size, "ak") + + +def test_pretty_size(): + assert human.pretty_size(0) == "0b" + assert human.pretty_size(100) == "100b" + assert human.pretty_size(1024) == "1k" + assert human.pretty_size(1024 + (1024 / 2.0)) == "1.5k" + assert human.pretty_size(1024 * 1024) == "1m" + assert human.pretty_size(10 * 1024 * 1024) == "10m" + + +def test_pretty_duration(): + assert human.pretty_duration(0.00001) == "0ms" + assert human.pretty_duration(0.0001) == "0ms" + assert human.pretty_duration(0.001) == "1ms" + assert human.pretty_duration(0.01) == "10ms" + assert human.pretty_duration(0.1) == "100ms" + assert human.pretty_duration(1) == "1.00s" + assert human.pretty_duration(10) == "10.0s" + assert human.pretty_duration(100) == "100s" + assert human.pretty_duration(1000) == "1000s" + assert human.pretty_duration(10000) == "10000s" + assert human.pretty_duration(1.123) == "1.12s" + assert human.pretty_duration(0.123) == "123ms" diff --git a/test/mitmproxy/test_utils_lrucache.py b/test/mitmproxy/test_utils_lrucache.py index f75fb5e8..07b96b4d 100644 --- a/test/mitmproxy/test_utils_lrucache.py +++ b/test/mitmproxy/test_utils_lrucache.py @@ -1,8 +1,8 @@ -from mitmproxy import utils +from mitmproxy.utils import lrucache def test_LRUCache(): - cache = utils.LRUCache(2) + cache = lrucache.LRUCache(2) class Foo: ran = False diff --git a/test/netlib/test_human.py b/test/netlib/test_human.py deleted file mode 100644 index bb97dc54..00000000 --- a/test/netlib/test_human.py +++ /dev/null @@ -1,45 +0,0 @@ -import time -from netlib import human, tutils - - -def test_format_timestamp(): - assert human.format_timestamp(time.time()) - - -def test_format_timestamp_with_milli(): - assert human.format_timestamp_with_milli(time.time()) - - -def test_parse_size(): - assert human.parse_size("0") == 0 - assert human.parse_size("0b") == 0 - assert human.parse_size("1") == 1 - assert human.parse_size("1k") == 1024 - assert human.parse_size("1m") == 1024**2 - assert human.parse_size("1g") == 1024**3 - tutils.raises(ValueError, human.parse_size, "1f") - tutils.raises(ValueError, human.parse_size, "ak") - - -def test_pretty_size(): - assert human.pretty_size(0) == "0b" - assert human.pretty_size(100) == "100b" - assert human.pretty_size(1024) == "1k" - assert human.pretty_size(1024 + (1024 / 2.0)) == "1.5k" - assert human.pretty_size(1024 * 1024) == "1m" - assert human.pretty_size(10 * 1024 * 1024) == "10m" - - -def test_pretty_duration(): - assert human.pretty_duration(0.00001) == "0ms" - assert human.pretty_duration(0.0001) == "0ms" - assert human.pretty_duration(0.001) == "1ms" - assert human.pretty_duration(0.01) == "10ms" - assert human.pretty_duration(0.1) == "100ms" - assert human.pretty_duration(1) == "1.00s" - assert human.pretty_duration(10) == "10.0s" - assert human.pretty_duration(100) == "100s" - assert human.pretty_duration(1000) == "1000s" - assert human.pretty_duration(10000) == "10000s" - assert human.pretty_duration(1.123) == "1.12s" - assert human.pretty_duration(0.123) == "123ms" -- cgit v1.2.3 From f45f4e677e8cddba8160d1e4e02ca8a4515e3456 Mon Sep 17 00:00:00 2001 From: Aldo Cortesi Date: Thu, 20 Oct 2016 10:11:58 +1300 Subject: netlib.strutils -> mitmproxy.utils.strutils --- examples/custom_contentviews.py | 2 +- examples/har_dump.py | 2 +- examples/tcp_message.py | 2 +- mitmproxy/addons/dumper.py | 2 +- mitmproxy/addons/serverplayback.py | 2 +- mitmproxy/contentviews.py | 2 +- mitmproxy/flowfilter.py | 2 +- mitmproxy/io_compat.py | 2 +- mitmproxy/proxy/config.py | 2 +- mitmproxy/proxy/protocol/websockets.py | 2 +- mitmproxy/tools/console/grideditor/col_bytes.py | 2 +- mitmproxy/tools/console/master.py | 4 +- mitmproxy/utils/strutils.py | 142 ++++++++++++++++++++++++ netlib/http/headers.py | 2 +- netlib/http/message.py | 4 +- netlib/http/request.py | 2 +- netlib/strutils.py | 142 ------------------------ netlib/tcp.py | 2 +- netlib/websockets/frame.py | 2 +- netlib/websockets/utils.py | 3 +- netlib/wsgi.py | 4 +- pathod/language/base.py | 2 +- pathod/language/message.py | 2 +- pathod/language/websockets.py | 2 +- pathod/log.py | 2 +- pathod/pathoc.py | 2 +- test/mitmproxy/test_utils_strutils.py | 96 ++++++++++++++++ test/netlib/test_strutils.py | 95 ---------------- 28 files changed, 269 insertions(+), 261 deletions(-) create mode 100644 mitmproxy/utils/strutils.py delete mode 100644 netlib/strutils.py create mode 100644 test/mitmproxy/test_utils_strutils.py delete mode 100644 test/netlib/test_strutils.py diff --git a/examples/custom_contentviews.py b/examples/custom_contentviews.py index 5a63e2a0..3558eaca 100644 --- a/examples/custom_contentviews.py +++ b/examples/custom_contentviews.py @@ -2,7 +2,7 @@ import string import lxml.html import lxml.etree from mitmproxy import contentviews -from netlib import strutils +from mitmproxy.utils import strutils class ViewPigLatin(contentviews.View): diff --git a/examples/har_dump.py b/examples/har_dump.py index deed2e70..d01e6cdd 100644 --- a/examples/har_dump.py +++ b/examples/har_dump.py @@ -15,7 +15,7 @@ import pytz import mitmproxy from mitmproxy import version -from netlib import strutils +from mitmproxy.utils import strutils from netlib.http import cookies HAR = {} diff --git a/examples/tcp_message.py b/examples/tcp_message.py index b431c23f..d7c9c42e 100644 --- a/examples/tcp_message.py +++ b/examples/tcp_message.py @@ -8,7 +8,7 @@ tcp_message Inline Script Hook API Demonstration example cmdline invocation: mitmdump -T --host --tcp ".*" -q -s examples/tcp_message.py """ -from netlib import strutils +from mitmproxy.utils import strutils def tcp_message(tcp_msg): diff --git a/mitmproxy/addons/dumper.py b/mitmproxy/addons/dumper.py index 013fa337..d690c000 100644 --- a/mitmproxy/addons/dumper.py +++ b/mitmproxy/addons/dumper.py @@ -9,7 +9,7 @@ from mitmproxy import ctx from mitmproxy import exceptions from mitmproxy import flowfilter from mitmproxy.utils import human -from netlib import strutils +from mitmproxy.utils import strutils def indent(n, text): diff --git a/mitmproxy/addons/serverplayback.py b/mitmproxy/addons/serverplayback.py index 1161ce23..0b52918c 100644 --- a/mitmproxy/addons/serverplayback.py +++ b/mitmproxy/addons/serverplayback.py @@ -1,7 +1,7 @@ import urllib import hashlib -from netlib import strutils +from mitmproxy.utils import strutils from mitmproxy import exceptions from mitmproxy import ctx from mitmproxy import io diff --git a/mitmproxy/contentviews.py b/mitmproxy/contentviews.py index 9a70b104..07bf09f5 100644 --- a/mitmproxy/contentviews.py +++ b/mitmproxy/contentviews.py @@ -35,7 +35,7 @@ from mitmproxy import exceptions from mitmproxy.contrib.wbxml import ASCommandResponse from netlib import http from netlib import multidict -from netlib import strutils +from mitmproxy.utils import strutils from netlib.http import url try: diff --git a/mitmproxy/flowfilter.py b/mitmproxy/flowfilter.py index f1454fd1..ee5224c6 100644 --- a/mitmproxy/flowfilter.py +++ b/mitmproxy/flowfilter.py @@ -40,7 +40,7 @@ from mitmproxy import http from mitmproxy import tcp from mitmproxy import flow -from netlib import strutils +from mitmproxy.utils import strutils import pyparsing as pp from typing import Callable diff --git a/mitmproxy/io_compat.py b/mitmproxy/io_compat.py index 7f8f41b3..68c747ea 100644 --- a/mitmproxy/io_compat.py +++ b/mitmproxy/io_compat.py @@ -5,7 +5,7 @@ This module handles the import of mitmproxy flows generated by old versions. from typing import Any from mitmproxy import version -from netlib import strutils +from mitmproxy.utils import strutils def convert_011_012(data): diff --git a/mitmproxy/proxy/config.py b/mitmproxy/proxy/config.py index 201dfdf7..a6fc739b 100644 --- a/mitmproxy/proxy/config.py +++ b/mitmproxy/proxy/config.py @@ -4,7 +4,7 @@ import os import re from typing import Any -from netlib import strutils +from mitmproxy.utils import strutils from OpenSSL import SSL, crypto diff --git a/mitmproxy/proxy/protocol/websockets.py b/mitmproxy/proxy/protocol/websockets.py index 816ec92d..636748a1 100644 --- a/mitmproxy/proxy/protocol/websockets.py +++ b/mitmproxy/proxy/protocol/websockets.py @@ -4,7 +4,7 @@ import struct from OpenSSL import SSL from mitmproxy import exceptions from mitmproxy.proxy.protocol import base -from netlib import strutils +from mitmproxy.utils import strutils from netlib import tcp from netlib import websockets diff --git a/mitmproxy/tools/console/grideditor/col_bytes.py b/mitmproxy/tools/console/grideditor/col_bytes.py index c951ce44..f580e947 100644 --- a/mitmproxy/tools/console/grideditor/col_bytes.py +++ b/mitmproxy/tools/console/grideditor/col_bytes.py @@ -4,7 +4,7 @@ from typing import Callable, Optional import urwid from mitmproxy.tools.console import signals from mitmproxy.tools.console.grideditor import base -from netlib import strutils +from mitmproxy.utils import strutils def read_file(filename: str, callback: Callable[..., None], escaped: bool) -> Optional[str]: diff --git a/mitmproxy/tools/console/master.py b/mitmproxy/tools/console/master.py index 3cc721b2..7ff0026e 100644 --- a/mitmproxy/tools/console/master.py +++ b/mitmproxy/tools/console/master.py @@ -35,7 +35,9 @@ from mitmproxy.tools.console import signals from mitmproxy.tools.console import statusbar from mitmproxy.tools.console import window from mitmproxy.flowfilter import FMarked -from netlib import tcp, strutils +from mitmproxy.utils import strutils + +from netlib import tcp EVENTLOG_SIZE = 500 diff --git a/mitmproxy/utils/strutils.py b/mitmproxy/utils/strutils.py new file mode 100644 index 00000000..57cfbc79 --- /dev/null +++ b/mitmproxy/utils/strutils.py @@ -0,0 +1,142 @@ +import re +import codecs + + +def always_bytes(unicode_or_bytes, *encode_args): + if isinstance(unicode_or_bytes, str): + return unicode_or_bytes.encode(*encode_args) + elif isinstance(unicode_or_bytes, bytes) or unicode_or_bytes is None: + return unicode_or_bytes + else: + raise TypeError("Expected str or bytes, but got {}.".format(type(unicode_or_bytes).__name__)) + + +def native(s, *encoding_opts): + """ + Convert :py:class:`bytes` or :py:class:`unicode` to the native + :py:class:`str` type, using latin1 encoding if conversion is necessary. + + https://www.python.org/dev/peps/pep-3333/#a-note-on-string-types + """ + if not isinstance(s, (bytes, str)): + raise TypeError("%r is neither bytes nor unicode" % s) + if isinstance(s, bytes): + return s.decode(*encoding_opts) + return s + + +# Translate control characters to "safe" characters. This implementation initially +# replaced them with the matching control pictures (http://unicode.org/charts/PDF/U2400.pdf), +# but that turned out to render badly with monospace fonts. We are back to "." therefore. +_control_char_trans = { + x: ord(".") # x + 0x2400 for unicode control group pictures + for x in range(32) +} +_control_char_trans[127] = ord(".") # 0x2421 +_control_char_trans_newline = _control_char_trans.copy() +for x in ("\r", "\n", "\t"): + del _control_char_trans_newline[ord(x)] + + +_control_char_trans = str.maketrans(_control_char_trans) +_control_char_trans_newline = str.maketrans(_control_char_trans_newline) + + +def escape_control_characters(text: str, keep_spacing=True) -> str: + """ + Replace all unicode C1 control characters from the given text with a single "." + + Args: + keep_spacing: If True, tabs and newlines will not be replaced. + """ + if not isinstance(text, str): + raise ValueError("text type must be unicode but is {}".format(type(text).__name__)) + + trans = _control_char_trans_newline if keep_spacing else _control_char_trans + return text.translate(trans) + + +def bytes_to_escaped_str(data, keep_spacing=False, escape_single_quotes=False): + """ + Take bytes and return a safe string that can be displayed to the user. + + Single quotes are always escaped, double quotes are never escaped: + "'" + bytes_to_escaped_str(...) + "'" + gives a valid Python string. + + Args: + keep_spacing: If True, tabs and newlines will not be escaped. + """ + + if not isinstance(data, bytes): + raise ValueError("data must be bytes, but is {}".format(data.__class__.__name__)) + # We always insert a double-quote here so that we get a single-quoted string back + # https://stackoverflow.com/questions/29019340/why-does-python-use-different-quotes-for-representing-strings-depending-on-their + ret = repr(b'"' + data).lstrip("b")[2:-1] + if not escape_single_quotes: + ret = re.sub(r"(? bool: + if not s or len(s) == 0: + return False + + return sum( + i < 9 or 13 < i < 32 or 126 < i + for i in s[:100] + ) / len(s[:100]) > 0.3 + + +def is_xml(s: bytes) -> bool: + return s.strip().startswith(b"<") + + +def clean_hanging_newline(t): + """ + Many editors will silently add a newline to the final line of a + document (I'm looking at you, Vim). This function fixes this common + problem at the risk of removing a hanging newline in the rare cases + where the user actually intends it. + """ + if t and t[-1] == "\n": + return t[:-1] + return t + + +def hexdump(s): + """ + Returns: + A generator of (offset, hex, str) tuples + """ + for i in range(0, len(s), 16): + offset = "{:0=10x}".format(i) + part = s[i:i + 16] + x = " ".join("{:0=2x}".format(i) for i in part) + x = x.ljust(47) # 16*2 + 15 + part_repr = native(escape_control_characters( + part.decode("ascii", "replace").replace(u"\ufffd", u"."), + False + )) + yield (offset, x, part_repr) diff --git a/netlib/http/headers.py b/netlib/http/headers.py index 39673f1a..6c30d278 100644 --- a/netlib/http/headers.py +++ b/netlib/http/headers.py @@ -2,7 +2,7 @@ import re import collections from netlib import multidict -from netlib import strutils +from mitmproxy.utils import strutils # See also: http://lucumr.pocoo.org/2013/7/2/the-updated-guide-to-unicode/ diff --git a/netlib/http/message.py b/netlib/http/message.py index 1980b0ab..133a53ce 100644 --- a/netlib/http/message.py +++ b/netlib/http/message.py @@ -2,7 +2,9 @@ import re import warnings from typing import Optional -from netlib import encoding, strutils, basetypes +from mitmproxy.utils import strutils +from netlib import encoding +from netlib import basetypes from netlib.http import headers diff --git a/netlib/http/request.py b/netlib/http/request.py index dd6f4164..3479fa4c 100644 --- a/netlib/http/request.py +++ b/netlib/http/request.py @@ -2,7 +2,7 @@ import re import urllib from netlib import multidict -from netlib import strutils +from mitmproxy.utils import strutils from netlib.http import multipart from netlib.http import cookies from netlib.http import headers as nheaders diff --git a/netlib/strutils.py b/netlib/strutils.py deleted file mode 100644 index 57cfbc79..00000000 --- a/netlib/strutils.py +++ /dev/null @@ -1,142 +0,0 @@ -import re -import codecs - - -def always_bytes(unicode_or_bytes, *encode_args): - if isinstance(unicode_or_bytes, str): - return unicode_or_bytes.encode(*encode_args) - elif isinstance(unicode_or_bytes, bytes) or unicode_or_bytes is None: - return unicode_or_bytes - else: - raise TypeError("Expected str or bytes, but got {}.".format(type(unicode_or_bytes).__name__)) - - -def native(s, *encoding_opts): - """ - Convert :py:class:`bytes` or :py:class:`unicode` to the native - :py:class:`str` type, using latin1 encoding if conversion is necessary. - - https://www.python.org/dev/peps/pep-3333/#a-note-on-string-types - """ - if not isinstance(s, (bytes, str)): - raise TypeError("%r is neither bytes nor unicode" % s) - if isinstance(s, bytes): - return s.decode(*encoding_opts) - return s - - -# Translate control characters to "safe" characters. This implementation initially -# replaced them with the matching control pictures (http://unicode.org/charts/PDF/U2400.pdf), -# but that turned out to render badly with monospace fonts. We are back to "." therefore. -_control_char_trans = { - x: ord(".") # x + 0x2400 for unicode control group pictures - for x in range(32) -} -_control_char_trans[127] = ord(".") # 0x2421 -_control_char_trans_newline = _control_char_trans.copy() -for x in ("\r", "\n", "\t"): - del _control_char_trans_newline[ord(x)] - - -_control_char_trans = str.maketrans(_control_char_trans) -_control_char_trans_newline = str.maketrans(_control_char_trans_newline) - - -def escape_control_characters(text: str, keep_spacing=True) -> str: - """ - Replace all unicode C1 control characters from the given text with a single "." - - Args: - keep_spacing: If True, tabs and newlines will not be replaced. - """ - if not isinstance(text, str): - raise ValueError("text type must be unicode but is {}".format(type(text).__name__)) - - trans = _control_char_trans_newline if keep_spacing else _control_char_trans - return text.translate(trans) - - -def bytes_to_escaped_str(data, keep_spacing=False, escape_single_quotes=False): - """ - Take bytes and return a safe string that can be displayed to the user. - - Single quotes are always escaped, double quotes are never escaped: - "'" + bytes_to_escaped_str(...) + "'" - gives a valid Python string. - - Args: - keep_spacing: If True, tabs and newlines will not be escaped. - """ - - if not isinstance(data, bytes): - raise ValueError("data must be bytes, but is {}".format(data.__class__.__name__)) - # We always insert a double-quote here so that we get a single-quoted string back - # https://stackoverflow.com/questions/29019340/why-does-python-use-different-quotes-for-representing-strings-depending-on-their - ret = repr(b'"' + data).lstrip("b")[2:-1] - if not escape_single_quotes: - ret = re.sub(r"(? bool: - if not s or len(s) == 0: - return False - - return sum( - i < 9 or 13 < i < 32 or 126 < i - for i in s[:100] - ) / len(s[:100]) > 0.3 - - -def is_xml(s: bytes) -> bool: - return s.strip().startswith(b"<") - - -def clean_hanging_newline(t): - """ - Many editors will silently add a newline to the final line of a - document (I'm looking at you, Vim). This function fixes this common - problem at the risk of removing a hanging newline in the rare cases - where the user actually intends it. - """ - if t and t[-1] == "\n": - return t[:-1] - return t - - -def hexdump(s): - """ - Returns: - A generator of (offset, hex, str) tuples - """ - for i in range(0, len(s), 16): - offset = "{:0=10x}".format(i) - part = s[i:i + 16] - x = " ".join("{:0=2x}".format(i) for i in part) - x = x.ljust(47) # 16*2 + 15 - part_repr = native(escape_control_characters( - part.decode("ascii", "replace").replace(u"\ufffd", u"."), - False - )) - yield (offset, x, part_repr) diff --git a/netlib/tcp.py b/netlib/tcp.py index aeb1d447..aed79388 100644 --- a/netlib/tcp.py +++ b/netlib/tcp.py @@ -10,7 +10,7 @@ import binascii from typing import Optional # noqa -from netlib import strutils +from mitmproxy.utils import strutils import certifi from backports import ssl_match_hostname diff --git a/netlib/websockets/frame.py b/netlib/websockets/frame.py index 02d74112..e022a95c 100644 --- a/netlib/websockets/frame.py +++ b/netlib/websockets/frame.py @@ -3,7 +3,7 @@ import struct import io from netlib import tcp -from netlib import strutils +from mitmproxy.utils import strutils from netlib import utils from mitmproxy.utils import human from .masker import Masker diff --git a/netlib/websockets/utils.py b/netlib/websockets/utils.py index fdec074e..98043662 100644 --- a/netlib/websockets/utils.py +++ b/netlib/websockets/utils.py @@ -8,7 +8,8 @@ import base64 import hashlib import os -from netlib import http, strutils +from netlib import http +from mitmproxy.utils import strutils MAGIC = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11' VERSION = "13" diff --git a/netlib/wsgi.py b/netlib/wsgi.py index 11e4aba9..5a54cd70 100644 --- a/netlib/wsgi.py +++ b/netlib/wsgi.py @@ -3,7 +3,9 @@ import traceback import urllib import io -from netlib import http, tcp, strutils +from netlib import http +from netlib import tcp +from mitmproxy.utils import strutils class ClientConn: diff --git a/pathod/language/base.py b/pathod/language/base.py index 7410bbd4..44a888c0 100644 --- a/pathod/language/base.py +++ b/pathod/language/base.py @@ -4,7 +4,7 @@ import abc import functools import pyparsing as pp -from netlib import strutils +from mitmproxy.utils import strutils from mitmproxy.utils import human from . import generators, exceptions diff --git a/pathod/language/message.py b/pathod/language/message.py index 03b4a2cf..6cdaaa0b 100644 --- a/pathod/language/message.py +++ b/pathod/language/message.py @@ -1,6 +1,6 @@ import abc from . import actions, exceptions -from netlib import strutils +from mitmproxy.utils import strutils LOG_TRUNCATE = 1024 diff --git a/pathod/language/websockets.py b/pathod/language/websockets.py index 417944af..d2291f82 100644 --- a/pathod/language/websockets.py +++ b/pathod/language/websockets.py @@ -1,7 +1,7 @@ import random import string import netlib.websockets -from netlib import strutils +from mitmproxy.utils import strutils import pyparsing as pp from . import base, generators, actions, message diff --git a/pathod/log.py b/pathod/log.py index 0d1bca41..4e5f355f 100644 --- a/pathod/log.py +++ b/pathod/log.py @@ -1,6 +1,6 @@ import time -from netlib import strutils +from mitmproxy.utils import strutils from mitmproxy.utils import human diff --git a/pathod/pathoc.py b/pathod/pathoc.py index e9fa5c43..0cf08a60 100644 --- a/pathod/pathoc.py +++ b/pathod/pathoc.py @@ -12,7 +12,7 @@ import OpenSSL.crypto import logging from netlib.tutils import treq -from netlib import strutils +from mitmproxy.utils import strutils from netlib import tcp, certutils, websockets, socks from netlib import exceptions from netlib.http import http1 diff --git a/test/mitmproxy/test_utils_strutils.py b/test/mitmproxy/test_utils_strutils.py new file mode 100644 index 00000000..2843688f --- /dev/null +++ b/test/mitmproxy/test_utils_strutils.py @@ -0,0 +1,96 @@ +from mitmproxy.utils import strutils +from netlib import tutils + + +def test_always_bytes(): + assert strutils.always_bytes(bytes(bytearray(range(256)))) == bytes(bytearray(range(256))) + assert strutils.always_bytes("foo") == b"foo" + with tutils.raises(ValueError): + strutils.always_bytes(u"\u2605", "ascii") + with tutils.raises(TypeError): + strutils.always_bytes(42, "ascii") + + +def test_native(): + with tutils.raises(TypeError): + strutils.native(42) + assert strutils.native(u"foo") == u"foo" + assert strutils.native(b"foo") == u"foo" + + +def test_escape_control_characters(): + assert strutils.escape_control_characters(u"one") == u"one" + assert strutils.escape_control_characters(u"\00ne") == u".ne" + assert strutils.escape_control_characters(u"\nne") == u"\nne" + assert strutils.escape_control_characters(u"\nne", False) == u".ne" + assert strutils.escape_control_characters(u"\u2605") == u"\u2605" + assert ( + strutils.escape_control_characters(bytes(bytearray(range(128))).decode()) == + u'.........\t\n..\r.................. !"#$%&\'()*+,-./0123456789:;<' + u'=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~.' + ) + assert ( + strutils.escape_control_characters(bytes(bytearray(range(128))).decode(), False) == + u'................................ !"#$%&\'()*+,-./0123456789:;<' + u'=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~.' + ) + + with tutils.raises(ValueError): + strutils.escape_control_characters(b"foo") + + +def test_bytes_to_escaped_str(): + assert strutils.bytes_to_escaped_str(b"foo") == "foo" + assert strutils.bytes_to_escaped_str(b"\b") == r"\x08" + assert strutils.bytes_to_escaped_str(br"&!?=\)") == r"&!?=\\)" + assert strutils.bytes_to_escaped_str(b'\xc3\xbc') == r"\xc3\xbc" + assert strutils.bytes_to_escaped_str(b"'") == r"'" + assert strutils.bytes_to_escaped_str(b'"') == r'"' + + assert strutils.bytes_to_escaped_str(b"'", escape_single_quotes=True) == r"\'" + assert strutils.bytes_to_escaped_str(b'"', escape_single_quotes=True) == r'"' + + assert strutils.bytes_to_escaped_str(b"\r\n\t") == "\\r\\n\\t" + assert strutils.bytes_to_escaped_str(b"\r\n\t", True) == "\r\n\t" + + assert strutils.bytes_to_escaped_str(b"\n", True) == "\n" + assert strutils.bytes_to_escaped_str(b"\\n", True) == "\\ \\ n".replace(" ", "") + assert strutils.bytes_to_escaped_str(b"\\\n", True) == "\\ \\ \n".replace(" ", "") + assert strutils.bytes_to_escaped_str(b"\\\\n", True) == "\\ \\ \\ \\ n".replace(" ", "") + + with tutils.raises(ValueError): + strutils.bytes_to_escaped_str(u"such unicode") + + +def test_escaped_str_to_bytes(): + assert strutils.escaped_str_to_bytes("foo") == b"foo" + assert strutils.escaped_str_to_bytes("\x08") == b"\b" + assert strutils.escaped_str_to_bytes("&!?=\\\\)") == br"&!?=\)" + assert strutils.escaped_str_to_bytes(u"\\x08") == b"\b" + assert strutils.escaped_str_to_bytes(u"&!?=\\\\)") == br"&!?=\)" + assert strutils.escaped_str_to_bytes(u"\u00fc") == b'\xc3\xbc' + + with tutils.raises(ValueError): + strutils.escaped_str_to_bytes(b"very byte") + + +def test_is_mostly_bin(): + assert not strutils.is_mostly_bin(b"foo\xFF") + assert strutils.is_mostly_bin(b"foo" + b"\xFF" * 10) + assert not strutils.is_mostly_bin("") + + +def test_is_xml(): + assert not strutils.is_xml(b"foo") + assert strutils.is_xml(b"?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~.' - ) - assert ( - strutils.escape_control_characters(bytes(bytearray(range(128))).decode(), False) == - u'................................ !"#$%&\'()*+,-./0123456789:;<' - u'=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~.' - ) - - with tutils.raises(ValueError): - strutils.escape_control_characters(b"foo") - - -def test_bytes_to_escaped_str(): - assert strutils.bytes_to_escaped_str(b"foo") == "foo" - assert strutils.bytes_to_escaped_str(b"\b") == r"\x08" - assert strutils.bytes_to_escaped_str(br"&!?=\)") == r"&!?=\\)" - assert strutils.bytes_to_escaped_str(b'\xc3\xbc') == r"\xc3\xbc" - assert strutils.bytes_to_escaped_str(b"'") == r"'" - assert strutils.bytes_to_escaped_str(b'"') == r'"' - - assert strutils.bytes_to_escaped_str(b"'", escape_single_quotes=True) == r"\'" - assert strutils.bytes_to_escaped_str(b'"', escape_single_quotes=True) == r'"' - - assert strutils.bytes_to_escaped_str(b"\r\n\t") == "\\r\\n\\t" - assert strutils.bytes_to_escaped_str(b"\r\n\t", True) == "\r\n\t" - - assert strutils.bytes_to_escaped_str(b"\n", True) == "\n" - assert strutils.bytes_to_escaped_str(b"\\n", True) == "\\ \\ n".replace(" ", "") - assert strutils.bytes_to_escaped_str(b"\\\n", True) == "\\ \\ \n".replace(" ", "") - assert strutils.bytes_to_escaped_str(b"\\\\n", True) == "\\ \\ \\ \\ n".replace(" ", "") - - with tutils.raises(ValueError): - strutils.bytes_to_escaped_str(u"such unicode") - - -def test_escaped_str_to_bytes(): - assert strutils.escaped_str_to_bytes("foo") == b"foo" - assert strutils.escaped_str_to_bytes("\x08") == b"\b" - assert strutils.escaped_str_to_bytes("&!?=\\\\)") == br"&!?=\)" - assert strutils.escaped_str_to_bytes(u"\\x08") == b"\b" - assert strutils.escaped_str_to_bytes(u"&!?=\\\\)") == br"&!?=\)" - assert strutils.escaped_str_to_bytes(u"\u00fc") == b'\xc3\xbc' - - with tutils.raises(ValueError): - strutils.escaped_str_to_bytes(b"very byte") - - -def test_is_mostly_bin(): - assert not strutils.is_mostly_bin(b"foo\xFF") - assert strutils.is_mostly_bin(b"foo" + b"\xFF" * 10) - assert not strutils.is_mostly_bin("") - - -def test_is_xml(): - assert not strutils.is_xml(b"foo") - assert strutils.is_xml(b" Date: Thu, 20 Oct 2016 10:22:23 +1300 Subject: mitmproxy.types.[basethread,multidict,serializable] --- mitmproxy/contentviews.py | 2 +- mitmproxy/master.py | 2 +- mitmproxy/proxy/protocol/http2.py | 2 +- mitmproxy/proxy/protocol/http_replay.py | 2 +- mitmproxy/script/concurrent.py | 2 +- mitmproxy/stateobject.py | 4 +- mitmproxy/tcp.py | 4 +- mitmproxy/types/__init__.py | 0 mitmproxy/types/basethread.py | 14 ++ mitmproxy/types/multidict.py | 298 ++++++++++++++++++++++++++++++ mitmproxy/types/serializable.py | 32 ++++ mitmproxy/utils/__init__.py | 0 netlib/basethread.py | 14 -- netlib/basetypes.py | 32 ---- netlib/certutils.py | 4 +- netlib/http/cookies.py | 2 +- netlib/http/headers.py | 2 +- netlib/http/message.py | 6 +- netlib/http/request.py | 2 +- netlib/http/response.py | 2 +- netlib/multidict.py | 298 ------------------------------ netlib/tcp.py | 6 +- pathod/pathoc.py | 2 +- pathod/test.py | 2 +- test/mitmproxy/test_contentview.py | 2 +- test/mitmproxy/test_types_multidict.py | 247 +++++++++++++++++++++++++ test/mitmproxy/test_types_serializable.py | 28 +++ test/netlib/test_basetypes.py | 28 --- test/netlib/test_multidict.py | 247 ------------------------- 29 files changed, 643 insertions(+), 643 deletions(-) create mode 100644 mitmproxy/types/__init__.py create mode 100644 mitmproxy/types/basethread.py create mode 100644 mitmproxy/types/multidict.py create mode 100644 mitmproxy/types/serializable.py create mode 100644 mitmproxy/utils/__init__.py delete mode 100644 netlib/basethread.py delete mode 100644 netlib/basetypes.py delete mode 100644 netlib/multidict.py create mode 100644 test/mitmproxy/test_types_multidict.py create mode 100644 test/mitmproxy/test_types_serializable.py delete mode 100644 test/netlib/test_basetypes.py delete mode 100644 test/netlib/test_multidict.py diff --git a/mitmproxy/contentviews.py b/mitmproxy/contentviews.py index 07bf09f5..a171f36b 100644 --- a/mitmproxy/contentviews.py +++ b/mitmproxy/contentviews.py @@ -34,7 +34,7 @@ from PIL import Image from mitmproxy import exceptions from mitmproxy.contrib.wbxml import ASCommandResponse from netlib import http -from netlib import multidict +from mitmproxy.types import multidict from mitmproxy.utils import strutils from netlib.http import url diff --git a/mitmproxy/master.py b/mitmproxy/master.py index 1fc00112..2e57e57d 100644 --- a/mitmproxy/master.py +++ b/mitmproxy/master.py @@ -14,7 +14,7 @@ from mitmproxy import http from mitmproxy import log from mitmproxy import io from mitmproxy.proxy.protocol import http_replay -from netlib import basethread +from mitmproxy.types import basethread import netlib.http from . import ctx as mitmproxy_ctx diff --git a/mitmproxy/proxy/protocol/http2.py b/mitmproxy/proxy/protocol/http2.py index cbd8b34c..93ac51bc 100644 --- a/mitmproxy/proxy/protocol/http2.py +++ b/mitmproxy/proxy/protocol/http2.py @@ -15,7 +15,7 @@ from mitmproxy.proxy.protocol import base from mitmproxy.proxy.protocol import http as httpbase import netlib.http from netlib import tcp -from netlib import basethread +from mitmproxy.types import basethread from netlib.http import http2 diff --git a/mitmproxy/proxy/protocol/http_replay.py b/mitmproxy/proxy/protocol/http_replay.py index bf0697be..eef5a109 100644 --- a/mitmproxy/proxy/protocol/http_replay.py +++ b/mitmproxy/proxy/protocol/http_replay.py @@ -8,7 +8,7 @@ from mitmproxy import http from mitmproxy import flow from mitmproxy import connections from netlib.http import http1 -from netlib import basethread +from mitmproxy.types import basethread # TODO: Doesn't really belong into mitmproxy.proxy.protocol... diff --git a/mitmproxy/script/concurrent.py b/mitmproxy/script/concurrent.py index dc72e5b7..2fd7ad8d 100644 --- a/mitmproxy/script/concurrent.py +++ b/mitmproxy/script/concurrent.py @@ -4,7 +4,7 @@ offload computations from mitmproxy's main master thread. """ from mitmproxy import events -from netlib import basethread +from mitmproxy.types import basethread class ScriptThread(basethread.BaseThread): diff --git a/mitmproxy/stateobject.py b/mitmproxy/stateobject.py index f4415ecf..1ab744a5 100644 --- a/mitmproxy/stateobject.py +++ b/mitmproxy/stateobject.py @@ -1,7 +1,7 @@ from typing import Any from typing import List -import netlib.basetypes +from mitmproxy.types import serializable def _is_list(cls): @@ -10,7 +10,7 @@ def _is_list(cls): return issubclass(cls, List) or is_list_bugfix -class StateObject(netlib.basetypes.Serializable): +class StateObject(serializable.Serializable): """ An object with serializable state. diff --git a/mitmproxy/tcp.py b/mitmproxy/tcp.py index af54c9d4..d73be98d 100644 --- a/mitmproxy/tcp.py +++ b/mitmproxy/tcp.py @@ -2,11 +2,11 @@ import time from typing import List -import netlib.basetypes from mitmproxy import flow +from mitmproxy.types import serializable -class TCPMessage(netlib.basetypes.Serializable): +class TCPMessage(serializable.Serializable): def __init__(self, from_client, content, timestamp=None): self.content = content diff --git a/mitmproxy/types/__init__.py b/mitmproxy/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/mitmproxy/types/basethread.py b/mitmproxy/types/basethread.py new file mode 100644 index 00000000..a3c81d19 --- /dev/null +++ b/mitmproxy/types/basethread.py @@ -0,0 +1,14 @@ +import time +import threading + + +class BaseThread(threading.Thread): + def __init__(self, name, *args, **kwargs): + super().__init__(name=name, *args, **kwargs) + self._thread_started = time.time() + + def _threadinfo(self): + return "%s - age: %is" % ( + self.name, + int(time.time() - self._thread_started) + ) diff --git a/mitmproxy/types/multidict.py b/mitmproxy/types/multidict.py new file mode 100644 index 00000000..d351e48b --- /dev/null +++ b/mitmproxy/types/multidict.py @@ -0,0 +1,298 @@ +from abc import ABCMeta, abstractmethod + + +try: + from collections.abc import MutableMapping +except ImportError: # pragma: no cover + from collections import MutableMapping # Workaround for Python < 3.3 + +from mitmproxy.types import serializable + + +class _MultiDict(MutableMapping, serializable.Serializable, metaclass=ABCMeta): + def __repr__(self): + fields = ( + repr(field) + for field in self.fields + ) + return "{cls}[{fields}]".format( + cls=type(self).__name__, + fields=", ".join(fields) + ) + + @staticmethod + @abstractmethod + def _reduce_values(values): + """ + If a user accesses multidict["foo"], this method + reduces all values for "foo" to a single value that is returned. + For example, HTTP headers are folded, whereas we will just take + the first cookie we found with that name. + """ + + @staticmethod + @abstractmethod + def _kconv(key): + """ + This method converts a key to its canonical representation. + For example, HTTP headers are case-insensitive, so this method returns key.lower(). + """ + + def __getitem__(self, key): + values = self.get_all(key) + if not values: + raise KeyError(key) + return self._reduce_values(values) + + def __setitem__(self, key, value): + self.set_all(key, [value]) + + def __delitem__(self, key): + if key not in self: + raise KeyError(key) + key = self._kconv(key) + self.fields = tuple( + field for field in self.fields + if key != self._kconv(field[0]) + ) + + def __iter__(self): + seen = set() + for key, _ in self.fields: + key_kconv = self._kconv(key) + if key_kconv not in seen: + seen.add(key_kconv) + yield key + + def __len__(self): + return len(set(self._kconv(key) for key, _ in self.fields)) + + def __eq__(self, other): + if isinstance(other, MultiDict): + return self.fields == other.fields + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def get_all(self, key): + """ + Return the list of all values for a given key. + If that key is not in the MultiDict, the return value will be an empty list. + """ + key = self._kconv(key) + return [ + value + for k, value in self.fields + if self._kconv(k) == key + ] + + def set_all(self, key, values): + """ + Remove the old values for a key and add new ones. + """ + key_kconv = self._kconv(key) + + new_fields = [] + for field in self.fields: + if self._kconv(field[0]) == key_kconv: + if values: + new_fields.append( + (field[0], values.pop(0)) + ) + else: + new_fields.append(field) + while values: + new_fields.append( + (key, values.pop(0)) + ) + self.fields = tuple(new_fields) + + def add(self, key, value): + """ + Add an additional value for the given key at the bottom. + """ + self.insert(len(self.fields), key, value) + + def insert(self, index, key, value): + """ + Insert an additional value for the given key at the specified position. + """ + item = (key, value) + self.fields = self.fields[:index] + (item,) + self.fields[index:] + + def keys(self, multi=False): + """ + Get all keys. + + Args: + multi(bool): + If True, one key per value will be returned. + If False, duplicate keys will only be returned once. + """ + return ( + k + for k, _ in self.items(multi) + ) + + def values(self, multi=False): + """ + Get all values. + + Args: + multi(bool): + If True, all values will be returned. + If False, only the first value per key will be returned. + """ + return ( + v + for _, v in self.items(multi) + ) + + def items(self, multi=False): + """ + Get all (key, value) tuples. + + Args: + multi(bool): + If True, all (key, value) pairs will be returned + If False, only the first (key, value) pair per unique key will be returned. + """ + if multi: + return self.fields + else: + return super().items() + + def collect(self): + """ + Returns a list of (key, value) tuples, where values are either + singular if there is only one matching item for a key, or a list + if there are more than one. The order of the keys matches the order + in the underlying fields list. + """ + coll = [] + for key in self: + values = self.get_all(key) + if len(values) == 1: + coll.append([key, values[0]]) + else: + coll.append([key, values]) + return coll + + def to_dict(self): + """ + Get the MultiDict as a plain Python dict. + Keys with multiple values are returned as lists. + + Example: + + .. code-block:: python + + # Simple dict with duplicate values. + >>> d = MultiDict([("name", "value"), ("a", False), ("a", 42)]) + >>> d.to_dict() + { + "name": "value", + "a": [False, 42] + } + """ + return { + k: v for k, v in self.collect() + } + + def get_state(self): + return self.fields + + def set_state(self, state): + self.fields = tuple(tuple(x) for x in state) + + @classmethod + def from_state(cls, state): + return cls(state) + + +class MultiDict(_MultiDict): + def __init__(self, fields=()): + super().__init__() + self.fields = tuple( + tuple(i) for i in fields + ) + + @staticmethod + def _reduce_values(values): + return values[0] + + @staticmethod + def _kconv(key): + return key + + +class ImmutableMultiDict(MultiDict, metaclass=ABCMeta): + def _immutable(self, *_): + raise TypeError('{} objects are immutable'.format(self.__class__.__name__)) + + __delitem__ = set_all = insert = _immutable + + def __hash__(self): + return hash(self.fields) + + def with_delitem(self, key): + """ + Returns: + An updated ImmutableMultiDict. The original object will not be modified. + """ + ret = self.copy() + # FIXME: This is filthy... + super(ImmutableMultiDict, ret).__delitem__(key) + return ret + + def with_set_all(self, key, values): + """ + Returns: + An updated ImmutableMultiDict. The original object will not be modified. + """ + ret = self.copy() + # FIXME: This is filthy... + super(ImmutableMultiDict, ret).set_all(key, values) + return ret + + def with_insert(self, index, key, value): + """ + Returns: + An updated ImmutableMultiDict. The original object will not be modified. + """ + ret = self.copy() + # FIXME: This is filthy... + super(ImmutableMultiDict, ret).insert(index, key, value) + return ret + + +class MultiDictView(_MultiDict): + """ + The MultiDictView provides the MultiDict interface over calculated data. + The view itself contains no state - data is retrieved from the parent on + request, and stored back to the parent on change. + """ + def __init__(self, getter, setter): + self._getter = getter + self._setter = setter + super().__init__() + + @staticmethod + def _kconv(key): + # All request-attributes are case-sensitive. + return key + + @staticmethod + def _reduce_values(values): + # We just return the first element if + # multiple elements exist with the same key. + return values[0] + + @property + def fields(self): + return self._getter() + + @fields.setter + def fields(self, value): + self._setter(value) diff --git a/mitmproxy/types/serializable.py b/mitmproxy/types/serializable.py new file mode 100644 index 00000000..49892ffc --- /dev/null +++ b/mitmproxy/types/serializable.py @@ -0,0 +1,32 @@ +import abc + + +class Serializable(metaclass=abc.ABCMeta): + """ + Abstract Base Class that defines an API to save an object's state and restore it later on. + """ + + @classmethod + @abc.abstractmethod + def from_state(cls, state): + """ + Create a new object from the given state. + """ + raise NotImplementedError() + + @abc.abstractmethod + def get_state(self): + """ + Retrieve object state. + """ + raise NotImplementedError() + + @abc.abstractmethod + def set_state(self, state): + """ + Set object state to the given state. + """ + raise NotImplementedError() + + def copy(self): + return self.from_state(self.get_state()) diff --git a/mitmproxy/utils/__init__.py b/mitmproxy/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/netlib/basethread.py b/netlib/basethread.py deleted file mode 100644 index a3c81d19..00000000 --- a/netlib/basethread.py +++ /dev/null @@ -1,14 +0,0 @@ -import time -import threading - - -class BaseThread(threading.Thread): - def __init__(self, name, *args, **kwargs): - super().__init__(name=name, *args, **kwargs) - self._thread_started = time.time() - - def _threadinfo(self): - return "%s - age: %is" % ( - self.name, - int(time.time() - self._thread_started) - ) diff --git a/netlib/basetypes.py b/netlib/basetypes.py deleted file mode 100644 index 49892ffc..00000000 --- a/netlib/basetypes.py +++ /dev/null @@ -1,32 +0,0 @@ -import abc - - -class Serializable(metaclass=abc.ABCMeta): - """ - Abstract Base Class that defines an API to save an object's state and restore it later on. - """ - - @classmethod - @abc.abstractmethod - def from_state(cls, state): - """ - Create a new object from the given state. - """ - raise NotImplementedError() - - @abc.abstractmethod - def get_state(self): - """ - Retrieve object state. - """ - raise NotImplementedError() - - @abc.abstractmethod - def set_state(self, state): - """ - Set object state to the given state. - """ - raise NotImplementedError() - - def copy(self): - return self.from_state(self.get_state()) diff --git a/netlib/certutils.py b/netlib/certutils.py index 6a97f99e..9cb8a40e 100644 --- a/netlib/certutils.py +++ b/netlib/certutils.py @@ -10,7 +10,7 @@ from pyasn1.codec.der.decoder import decode from pyasn1.error import PyAsn1Error import OpenSSL -from netlib import basetypes +from mitmproxy.types import serializable # Default expiry must not be too long: https://github.com/mitmproxy/mitmproxy/issues/815 @@ -373,7 +373,7 @@ class _GeneralNames(univ.SequenceOf): constraint.ValueSizeConstraint(1, 1024) -class SSLCert(basetypes.Serializable): +class SSLCert(serializable.Serializable): def __init__(self, cert): """ diff --git a/netlib/http/cookies.py b/netlib/http/cookies.py index cb816ca0..9f32fa5e 100644 --- a/netlib/http/cookies.py +++ b/netlib/http/cookies.py @@ -3,7 +3,7 @@ import email.utils import re import time -from netlib import multidict +from mitmproxy.types import multidict """ A flexible module for cookie parsing and manipulation. diff --git a/netlib/http/headers.py b/netlib/http/headers.py index 6c30d278..8fc0cd43 100644 --- a/netlib/http/headers.py +++ b/netlib/http/headers.py @@ -1,7 +1,7 @@ import re import collections -from netlib import multidict +from mitmproxy.types import multidict from mitmproxy.utils import strutils # See also: http://lucumr.pocoo.org/2013/7/2/the-updated-guide-to-unicode/ diff --git a/netlib/http/message.py b/netlib/http/message.py index 133a53ce..62c3aa38 100644 --- a/netlib/http/message.py +++ b/netlib/http/message.py @@ -4,7 +4,7 @@ from typing import Optional from mitmproxy.utils import strutils from netlib import encoding -from netlib import basetypes +from mitmproxy.types import serializable from netlib.http import headers @@ -17,7 +17,7 @@ def _always_bytes(x): return strutils.always_bytes(x, "utf-8", "surrogateescape") -class MessageData(basetypes.Serializable): +class MessageData(serializable.Serializable): def __eq__(self, other): if isinstance(other, MessageData): return self.__dict__ == other.__dict__ @@ -43,7 +43,7 @@ class MessageData(basetypes.Serializable): return cls(**state) -class Message(basetypes.Serializable): +class Message(serializable.Serializable): def __eq__(self, other): if isinstance(other, Message): return self.data == other.data diff --git a/netlib/http/request.py b/netlib/http/request.py index 3479fa4c..16b0c986 100644 --- a/netlib/http/request.py +++ b/netlib/http/request.py @@ -1,7 +1,7 @@ import re import urllib -from netlib import multidict +from mitmproxy.types import multidict from mitmproxy.utils import strutils from netlib.http import multipart from netlib.http import cookies diff --git a/netlib/http/response.py b/netlib/http/response.py index 12dba92a..4d1d5d24 100644 --- a/netlib/http/response.py +++ b/netlib/http/response.py @@ -1,7 +1,7 @@ import time from email.utils import parsedate_tz, formatdate, mktime_tz from mitmproxy.utils import human -from netlib import multidict +from mitmproxy.types import multidict from netlib.http import cookies from netlib.http import headers as nheaders from netlib.http import message diff --git a/netlib/multidict.py b/netlib/multidict.py deleted file mode 100644 index 191d1cc6..00000000 --- a/netlib/multidict.py +++ /dev/null @@ -1,298 +0,0 @@ -from abc import ABCMeta, abstractmethod - - -try: - from collections.abc import MutableMapping -except ImportError: # pragma: no cover - from collections import MutableMapping # Workaround for Python < 3.3 - -from netlib import basetypes - - -class _MultiDict(MutableMapping, basetypes.Serializable, metaclass=ABCMeta): - def __repr__(self): - fields = ( - repr(field) - for field in self.fields - ) - return "{cls}[{fields}]".format( - cls=type(self).__name__, - fields=", ".join(fields) - ) - - @staticmethod - @abstractmethod - def _reduce_values(values): - """ - If a user accesses multidict["foo"], this method - reduces all values for "foo" to a single value that is returned. - For example, HTTP headers are folded, whereas we will just take - the first cookie we found with that name. - """ - - @staticmethod - @abstractmethod - def _kconv(key): - """ - This method converts a key to its canonical representation. - For example, HTTP headers are case-insensitive, so this method returns key.lower(). - """ - - def __getitem__(self, key): - values = self.get_all(key) - if not values: - raise KeyError(key) - return self._reduce_values(values) - - def __setitem__(self, key, value): - self.set_all(key, [value]) - - def __delitem__(self, key): - if key not in self: - raise KeyError(key) - key = self._kconv(key) - self.fields = tuple( - field for field in self.fields - if key != self._kconv(field[0]) - ) - - def __iter__(self): - seen = set() - for key, _ in self.fields: - key_kconv = self._kconv(key) - if key_kconv not in seen: - seen.add(key_kconv) - yield key - - def __len__(self): - return len(set(self._kconv(key) for key, _ in self.fields)) - - def __eq__(self, other): - if isinstance(other, MultiDict): - return self.fields == other.fields - return False - - def __ne__(self, other): - return not self.__eq__(other) - - def get_all(self, key): - """ - Return the list of all values for a given key. - If that key is not in the MultiDict, the return value will be an empty list. - """ - key = self._kconv(key) - return [ - value - for k, value in self.fields - if self._kconv(k) == key - ] - - def set_all(self, key, values): - """ - Remove the old values for a key and add new ones. - """ - key_kconv = self._kconv(key) - - new_fields = [] - for field in self.fields: - if self._kconv(field[0]) == key_kconv: - if values: - new_fields.append( - (field[0], values.pop(0)) - ) - else: - new_fields.append(field) - while values: - new_fields.append( - (key, values.pop(0)) - ) - self.fields = tuple(new_fields) - - def add(self, key, value): - """ - Add an additional value for the given key at the bottom. - """ - self.insert(len(self.fields), key, value) - - def insert(self, index, key, value): - """ - Insert an additional value for the given key at the specified position. - """ - item = (key, value) - self.fields = self.fields[:index] + (item,) + self.fields[index:] - - def keys(self, multi=False): - """ - Get all keys. - - Args: - multi(bool): - If True, one key per value will be returned. - If False, duplicate keys will only be returned once. - """ - return ( - k - for k, _ in self.items(multi) - ) - - def values(self, multi=False): - """ - Get all values. - - Args: - multi(bool): - If True, all values will be returned. - If False, only the first value per key will be returned. - """ - return ( - v - for _, v in self.items(multi) - ) - - def items(self, multi=False): - """ - Get all (key, value) tuples. - - Args: - multi(bool): - If True, all (key, value) pairs will be returned - If False, only the first (key, value) pair per unique key will be returned. - """ - if multi: - return self.fields - else: - return super().items() - - def collect(self): - """ - Returns a list of (key, value) tuples, where values are either - singular if there is only one matching item for a key, or a list - if there are more than one. The order of the keys matches the order - in the underlying fields list. - """ - coll = [] - for key in self: - values = self.get_all(key) - if len(values) == 1: - coll.append([key, values[0]]) - else: - coll.append([key, values]) - return coll - - def to_dict(self): - """ - Get the MultiDict as a plain Python dict. - Keys with multiple values are returned as lists. - - Example: - - .. code-block:: python - - # Simple dict with duplicate values. - >>> d = MultiDict([("name", "value"), ("a", False), ("a", 42)]) - >>> d.to_dict() - { - "name": "value", - "a": [False, 42] - } - """ - return { - k: v for k, v in self.collect() - } - - def get_state(self): - return self.fields - - def set_state(self, state): - self.fields = tuple(tuple(x) for x in state) - - @classmethod - def from_state(cls, state): - return cls(state) - - -class MultiDict(_MultiDict): - def __init__(self, fields=()): - super().__init__() - self.fields = tuple( - tuple(i) for i in fields - ) - - @staticmethod - def _reduce_values(values): - return values[0] - - @staticmethod - def _kconv(key): - return key - - -class ImmutableMultiDict(MultiDict, metaclass=ABCMeta): - def _immutable(self, *_): - raise TypeError('{} objects are immutable'.format(self.__class__.__name__)) - - __delitem__ = set_all = insert = _immutable - - def __hash__(self): - return hash(self.fields) - - def with_delitem(self, key): - """ - Returns: - An updated ImmutableMultiDict. The original object will not be modified. - """ - ret = self.copy() - # FIXME: This is filthy... - super(ImmutableMultiDict, ret).__delitem__(key) - return ret - - def with_set_all(self, key, values): - """ - Returns: - An updated ImmutableMultiDict. The original object will not be modified. - """ - ret = self.copy() - # FIXME: This is filthy... - super(ImmutableMultiDict, ret).set_all(key, values) - return ret - - def with_insert(self, index, key, value): - """ - Returns: - An updated ImmutableMultiDict. The original object will not be modified. - """ - ret = self.copy() - # FIXME: This is filthy... - super(ImmutableMultiDict, ret).insert(index, key, value) - return ret - - -class MultiDictView(_MultiDict): - """ - The MultiDictView provides the MultiDict interface over calculated data. - The view itself contains no state - data is retrieved from the parent on - request, and stored back to the parent on change. - """ - def __init__(self, getter, setter): - self._getter = getter - self._setter = setter - super().__init__() - - @staticmethod - def _kconv(key): - # All request-attributes are case-sensitive. - return key - - @staticmethod - def _reduce_values(values): - # We just return the first element if - # multiple elements exist with the same key. - return values[0] - - @property - def fields(self): - return self._getter() - - @fields.setter - def fields(self, value): - self._setter(value) diff --git a/netlib/tcp.py b/netlib/tcp.py index aed79388..4fde657f 100644 --- a/netlib/tcp.py +++ b/netlib/tcp.py @@ -19,9 +19,9 @@ from OpenSSL import SSL from netlib import certutils from netlib import version_check -from netlib import basetypes +from mitmproxy.types import serializable from netlib import exceptions -from netlib import basethread +from mitmproxy.types import basethread # This is a rather hackish way to make sure that # the latest version of pyOpenSSL is actually installed. @@ -292,7 +292,7 @@ class Reader(_FileLike): raise NotImplementedError("Can only peek into (pyOpenSSL) sockets") -class Address(basetypes.Serializable): +class Address(serializable.Serializable): """ This class wraps an IPv4/IPv6 tuple to provide named attributes and diff --git a/pathod/pathoc.py b/pathod/pathoc.py index 0cf08a60..caa9accb 100644 --- a/pathod/pathoc.py +++ b/pathod/pathoc.py @@ -16,7 +16,7 @@ from mitmproxy.utils import strutils from netlib import tcp, certutils, websockets, socks from netlib import exceptions from netlib.http import http1 -from netlib import basethread +from mitmproxy.types import basethread from . import log, language from .protocols import http2 diff --git a/pathod/test.py b/pathod/test.py index c92cc50b..b819d723 100644 --- a/pathod/test.py +++ b/pathod/test.py @@ -3,7 +3,7 @@ import time import queue from . import pathod -from netlib import basethread +from mitmproxy.types import basethread class Daemon: diff --git a/test/mitmproxy/test_contentview.py b/test/mitmproxy/test_contentview.py index d63ee50e..f113e294 100644 --- a/test/mitmproxy/test_contentview.py +++ b/test/mitmproxy/test_contentview.py @@ -2,7 +2,7 @@ import mock from mitmproxy.exceptions import ContentViewException from netlib.http import Headers from netlib.http import url -from netlib import multidict +from mitmproxy.types import multidict import mitmproxy.contentviews as cv from . import tutils diff --git a/test/mitmproxy/test_types_multidict.py b/test/mitmproxy/test_types_multidict.py new file mode 100644 index 00000000..ada33bf7 --- /dev/null +++ b/test/mitmproxy/test_types_multidict.py @@ -0,0 +1,247 @@ +from netlib import tutils +from mitmproxy.types import multidict + + +class _TMulti: + @staticmethod + def _kconv(key): + return key.lower() + + +class TMultiDict(_TMulti, multidict.MultiDict): + pass + + +class TImmutableMultiDict(_TMulti, multidict.ImmutableMultiDict): + pass + + +class TestMultiDict: + @staticmethod + def _multi(): + return TMultiDict(( + ("foo", "bar"), + ("bar", "baz"), + ("Bar", "bam") + )) + + def test_init(self): + md = TMultiDict() + assert len(md) == 0 + + md = TMultiDict([("foo", "bar")]) + assert len(md) == 1 + assert md.fields == (("foo", "bar"),) + + def test_repr(self): + assert repr(self._multi()) == ( + "TMultiDict[('foo', 'bar'), ('bar', 'baz'), ('Bar', 'bam')]" + ) + + def test_getitem(self): + md = TMultiDict([("foo", "bar")]) + assert "foo" in md + assert "Foo" in md + assert md["foo"] == "bar" + + with tutils.raises(KeyError): + assert md["bar"] + + md_multi = TMultiDict( + [("foo", "a"), ("foo", "b")] + ) + assert md_multi["foo"] == "a" + + def test_setitem(self): + md = TMultiDict() + md["foo"] = "bar" + assert md.fields == (("foo", "bar"),) + + md["foo"] = "baz" + assert md.fields == (("foo", "baz"),) + + md["bar"] = "bam" + assert md.fields == (("foo", "baz"), ("bar", "bam")) + + def test_delitem(self): + md = self._multi() + del md["foo"] + assert "foo" not in md + assert "bar" in md + + with tutils.raises(KeyError): + del md["foo"] + + del md["bar"] + assert md.fields == () + + def test_iter(self): + md = self._multi() + assert list(md.__iter__()) == ["foo", "bar"] + + def test_len(self): + md = TMultiDict() + assert len(md) == 0 + + md = self._multi() + assert len(md) == 2 + + def test_eq(self): + assert TMultiDict() == TMultiDict() + assert not (TMultiDict() == 42) + + md1 = self._multi() + md2 = self._multi() + assert md1 == md2 + md1.fields = md1.fields[1:] + md1.fields[:1] + assert not (md1 == md2) + + def test_ne(self): + assert not TMultiDict() != TMultiDict() + assert TMultiDict() != self._multi() + assert TMultiDict() != 42 + + def test_hash(self): + """ + If a class defines mutable objects and implements an __eq__() method, + it should not implement __hash__(), since the implementation of hashable + collections requires that a key's hash value is immutable. + """ + with tutils.raises(TypeError): + assert hash(TMultiDict()) + + def test_get_all(self): + md = self._multi() + assert md.get_all("foo") == ["bar"] + assert md.get_all("bar") == ["baz", "bam"] + assert md.get_all("baz") == [] + + def test_set_all(self): + md = TMultiDict() + md.set_all("foo", ["bar", "baz"]) + assert md.fields == (("foo", "bar"), ("foo", "baz")) + + md = TMultiDict(( + ("a", "b"), + ("x", "x"), + ("c", "d"), + ("X", "X"), + ("e", "f"), + )) + md.set_all("x", ["1", "2", "3"]) + assert md.fields == ( + ("a", "b"), + ("x", "1"), + ("c", "d"), + ("X", "2"), + ("e", "f"), + ("x", "3"), + ) + md.set_all("x", ["4"]) + assert md.fields == ( + ("a", "b"), + ("x", "4"), + ("c", "d"), + ("e", "f"), + ) + + def test_add(self): + md = self._multi() + md.add("foo", "foo") + assert md.fields == ( + ("foo", "bar"), + ("bar", "baz"), + ("Bar", "bam"), + ("foo", "foo") + ) + + def test_insert(self): + md = TMultiDict([("b", "b")]) + md.insert(0, "a", "a") + md.insert(2, "c", "c") + assert md.fields == (("a", "a"), ("b", "b"), ("c", "c")) + + def test_keys(self): + md = self._multi() + assert list(md.keys()) == ["foo", "bar"] + assert list(md.keys(multi=True)) == ["foo", "bar", "Bar"] + + def test_values(self): + md = self._multi() + assert list(md.values()) == ["bar", "baz"] + assert list(md.values(multi=True)) == ["bar", "baz", "bam"] + + def test_items(self): + md = self._multi() + assert list(md.items()) == [("foo", "bar"), ("bar", "baz")] + assert list(md.items(multi=True)) == [("foo", "bar"), ("bar", "baz"), ("Bar", "bam")] + + def test_to_dict(self): + md = self._multi() + assert md.to_dict() == { + "foo": "bar", + "bar": ["baz", "bam"] + } + + def test_state(self): + md = self._multi() + assert len(md.get_state()) == 3 + assert md == TMultiDict.from_state(md.get_state()) + + md2 = TMultiDict() + assert md != md2 + md2.set_state(md.get_state()) + assert md == md2 + + +class TestImmutableMultiDict: + def test_modify(self): + md = TImmutableMultiDict() + with tutils.raises(TypeError): + md["foo"] = "bar" + + with tutils.raises(TypeError): + del md["foo"] + + with tutils.raises(TypeError): + md.add("foo", "bar") + + def test_hash(self): + assert hash(TImmutableMultiDict()) + + def test_with_delitem(self): + md = TImmutableMultiDict([("foo", "bar")]) + assert md.with_delitem("foo").fields == () + assert md.fields == (("foo", "bar"),) + + def test_with_set_all(self): + md = TImmutableMultiDict() + assert md.with_set_all("foo", ["bar"]).fields == (("foo", "bar"),) + assert md.fields == () + + def test_with_insert(self): + md = TImmutableMultiDict() + assert md.with_insert(0, "foo", "bar").fields == (("foo", "bar"),) + + +class TParent: + def __init__(self): + self.vals = tuple() + + def setter(self, vals): + self.vals = vals + + def getter(self): + return self.vals + + +class TestMultiDictView: + def test_modify(self): + p = TParent() + tv = multidict.MultiDictView(p.getter, p.setter) + assert len(tv) == 0 + tv["a"] = "b" + assert p.vals == (("a", "b"),) + tv["c"] = "b" + assert p.vals == (("a", "b"), ("c", "b")) + assert tv["a"] == "b" diff --git a/test/mitmproxy/test_types_serializable.py b/test/mitmproxy/test_types_serializable.py new file mode 100644 index 00000000..dd4a3778 --- /dev/null +++ b/test/mitmproxy/test_types_serializable.py @@ -0,0 +1,28 @@ +from mitmproxy.types import serializable + + +class SerializableDummy(serializable.Serializable): + def __init__(self, i): + self.i = i + + def get_state(self): + return self.i + + def set_state(self, i): + self.i = i + + def from_state(self, state): + return type(self)(state) + + +class TestSerializable: + + def test_copy(self): + a = SerializableDummy(42) + assert a.i == 42 + b = a.copy() + assert b.i == 42 + + a.set_state(1) + assert a.i == 1 + assert b.i == 42 diff --git a/test/netlib/test_basetypes.py b/test/netlib/test_basetypes.py deleted file mode 100644 index aa415784..00000000 --- a/test/netlib/test_basetypes.py +++ /dev/null @@ -1,28 +0,0 @@ -from netlib import basetypes - - -class SerializableDummy(basetypes.Serializable): - def __init__(self, i): - self.i = i - - def get_state(self): - return self.i - - def set_state(self, i): - self.i = i - - def from_state(self, state): - return type(self)(state) - - -class TestSerializable: - - def test_copy(self): - a = SerializableDummy(42) - assert a.i == 42 - b = a.copy() - assert b.i == 42 - - a.set_state(1) - assert a.i == 1 - assert b.i == 42 diff --git a/test/netlib/test_multidict.py b/test/netlib/test_multidict.py deleted file mode 100644 index a9523fd9..00000000 --- a/test/netlib/test_multidict.py +++ /dev/null @@ -1,247 +0,0 @@ -from netlib import tutils -from netlib.multidict import MultiDict, ImmutableMultiDict, MultiDictView - - -class _TMulti: - @staticmethod - def _kconv(key): - return key.lower() - - -class TMultiDict(_TMulti, MultiDict): - pass - - -class TImmutableMultiDict(_TMulti, ImmutableMultiDict): - pass - - -class TestMultiDict: - @staticmethod - def _multi(): - return TMultiDict(( - ("foo", "bar"), - ("bar", "baz"), - ("Bar", "bam") - )) - - def test_init(self): - md = TMultiDict() - assert len(md) == 0 - - md = TMultiDict([("foo", "bar")]) - assert len(md) == 1 - assert md.fields == (("foo", "bar"),) - - def test_repr(self): - assert repr(self._multi()) == ( - "TMultiDict[('foo', 'bar'), ('bar', 'baz'), ('Bar', 'bam')]" - ) - - def test_getitem(self): - md = TMultiDict([("foo", "bar")]) - assert "foo" in md - assert "Foo" in md - assert md["foo"] == "bar" - - with tutils.raises(KeyError): - assert md["bar"] - - md_multi = TMultiDict( - [("foo", "a"), ("foo", "b")] - ) - assert md_multi["foo"] == "a" - - def test_setitem(self): - md = TMultiDict() - md["foo"] = "bar" - assert md.fields == (("foo", "bar"),) - - md["foo"] = "baz" - assert md.fields == (("foo", "baz"),) - - md["bar"] = "bam" - assert md.fields == (("foo", "baz"), ("bar", "bam")) - - def test_delitem(self): - md = self._multi() - del md["foo"] - assert "foo" not in md - assert "bar" in md - - with tutils.raises(KeyError): - del md["foo"] - - del md["bar"] - assert md.fields == () - - def test_iter(self): - md = self._multi() - assert list(md.__iter__()) == ["foo", "bar"] - - def test_len(self): - md = TMultiDict() - assert len(md) == 0 - - md = self._multi() - assert len(md) == 2 - - def test_eq(self): - assert TMultiDict() == TMultiDict() - assert not (TMultiDict() == 42) - - md1 = self._multi() - md2 = self._multi() - assert md1 == md2 - md1.fields = md1.fields[1:] + md1.fields[:1] - assert not (md1 == md2) - - def test_ne(self): - assert not TMultiDict() != TMultiDict() - assert TMultiDict() != self._multi() - assert TMultiDict() != 42 - - def test_hash(self): - """ - If a class defines mutable objects and implements an __eq__() method, - it should not implement __hash__(), since the implementation of hashable - collections requires that a key's hash value is immutable. - """ - with tutils.raises(TypeError): - assert hash(TMultiDict()) - - def test_get_all(self): - md = self._multi() - assert md.get_all("foo") == ["bar"] - assert md.get_all("bar") == ["baz", "bam"] - assert md.get_all("baz") == [] - - def test_set_all(self): - md = TMultiDict() - md.set_all("foo", ["bar", "baz"]) - assert md.fields == (("foo", "bar"), ("foo", "baz")) - - md = TMultiDict(( - ("a", "b"), - ("x", "x"), - ("c", "d"), - ("X", "X"), - ("e", "f"), - )) - md.set_all("x", ["1", "2", "3"]) - assert md.fields == ( - ("a", "b"), - ("x", "1"), - ("c", "d"), - ("X", "2"), - ("e", "f"), - ("x", "3"), - ) - md.set_all("x", ["4"]) - assert md.fields == ( - ("a", "b"), - ("x", "4"), - ("c", "d"), - ("e", "f"), - ) - - def test_add(self): - md = self._multi() - md.add("foo", "foo") - assert md.fields == ( - ("foo", "bar"), - ("bar", "baz"), - ("Bar", "bam"), - ("foo", "foo") - ) - - def test_insert(self): - md = TMultiDict([("b", "b")]) - md.insert(0, "a", "a") - md.insert(2, "c", "c") - assert md.fields == (("a", "a"), ("b", "b"), ("c", "c")) - - def test_keys(self): - md = self._multi() - assert list(md.keys()) == ["foo", "bar"] - assert list(md.keys(multi=True)) == ["foo", "bar", "Bar"] - - def test_values(self): - md = self._multi() - assert list(md.values()) == ["bar", "baz"] - assert list(md.values(multi=True)) == ["bar", "baz", "bam"] - - def test_items(self): - md = self._multi() - assert list(md.items()) == [("foo", "bar"), ("bar", "baz")] - assert list(md.items(multi=True)) == [("foo", "bar"), ("bar", "baz"), ("Bar", "bam")] - - def test_to_dict(self): - md = self._multi() - assert md.to_dict() == { - "foo": "bar", - "bar": ["baz", "bam"] - } - - def test_state(self): - md = self._multi() - assert len(md.get_state()) == 3 - assert md == TMultiDict.from_state(md.get_state()) - - md2 = TMultiDict() - assert md != md2 - md2.set_state(md.get_state()) - assert md == md2 - - -class TestImmutableMultiDict: - def test_modify(self): - md = TImmutableMultiDict() - with tutils.raises(TypeError): - md["foo"] = "bar" - - with tutils.raises(TypeError): - del md["foo"] - - with tutils.raises(TypeError): - md.add("foo", "bar") - - def test_hash(self): - assert hash(TImmutableMultiDict()) - - def test_with_delitem(self): - md = TImmutableMultiDict([("foo", "bar")]) - assert md.with_delitem("foo").fields == () - assert md.fields == (("foo", "bar"),) - - def test_with_set_all(self): - md = TImmutableMultiDict() - assert md.with_set_all("foo", ["bar"]).fields == (("foo", "bar"),) - assert md.fields == () - - def test_with_insert(self): - md = TImmutableMultiDict() - assert md.with_insert(0, "foo", "bar").fields == (("foo", "bar"),) - - -class TParent: - def __init__(self): - self.vals = tuple() - - def setter(self, vals): - self.vals = vals - - def getter(self): - return self.vals - - -class TestMultiDictView: - def test_modify(self): - p = TParent() - tv = MultiDictView(p.getter, p.setter) - assert len(tv) == 0 - tv["a"] = "b" - assert p.vals == (("a", "b"),) - tv["c"] = "b" - assert p.vals == (("a", "b"), ("c", "b")) - assert tv["a"] == "b" -- cgit v1.2.3 From a684585e7cf7099ea61f23b3176b492883f19f00 Mon Sep 17 00:00:00 2001 From: Aldo Cortesi Date: Thu, 20 Oct 2016 10:25:36 +1300 Subject: netlib.debug -> mitmproxy.utils.debug --- mitmproxy/tools/main.py | 2 +- mitmproxy/utils/debug.py | 120 +++++++++++++++++++++++++++++++++++++ netlib/debug.py | 120 ------------------------------------- test/mitmproxy/test_utils_debug.py | 23 +++++++ test/netlib/test_debug.py | 23 ------- 5 files changed, 144 insertions(+), 144 deletions(-) create mode 100644 mitmproxy/utils/debug.py delete mode 100644 netlib/debug.py create mode 100644 test/mitmproxy/test_utils_debug.py delete mode 100644 test/netlib/test_debug.py diff --git a/mitmproxy/tools/main.py b/mitmproxy/tools/main.py index 5d4e8dc9..f3526de9 100644 --- a/mitmproxy/tools/main.py +++ b/mitmproxy/tools/main.py @@ -7,7 +7,7 @@ from mitmproxy import exceptions from mitmproxy.proxy import config from mitmproxy.proxy import server from netlib import version_check -from netlib import debug +from mitmproxy.utils import debug def assert_utf8_env(): diff --git a/mitmproxy/utils/debug.py b/mitmproxy/utils/debug.py new file mode 100644 index 00000000..147fe4b1 --- /dev/null +++ b/mitmproxy/utils/debug.py @@ -0,0 +1,120 @@ +import gc +import os +import sys +import threading +import signal +import platform +import traceback + +from mitmproxy import version + +from OpenSSL import SSL + + +def sysinfo(): + data = [ + "Mitmproxy version: %s" % version.VERSION, + "Python version: %s" % platform.python_version(), + "Platform: %s" % platform.platform(), + "SSL version: %s" % SSL.SSLeay_version(SSL.SSLEAY_VERSION).decode(), + ] + d = platform.linux_distribution() + t = "Linux distro: %s %s %s" % d + if d[0]: # pragma: no-cover + data.append(t) + + d = platform.mac_ver() + t = "Mac version: %s %s %s" % d + if d[0]: # pragma: no-cover + data.append(t) + + d = platform.win32_ver() + t = "Windows version: %s %s %s %s" % d + if d[0]: # pragma: no-cover + data.append(t) + + return "\n".join(data) + + +def dump_info(signal=None, frame=None, file=sys.stdout, testing=False): # pragma: no cover + print("****************************************************", file=file) + print("Summary", file=file) + print("=======", file=file) + + try: + import psutil + except: + print("(psutil not installed, skipping some debug info)", file=file) + else: + p = psutil.Process() + print("num threads: ", p.num_threads(), file=file) + if hasattr(p, "num_fds"): + print("num fds: ", p.num_fds(), file=file) + print("memory: ", p.memory_info(), file=file) + + print(file=file) + print("Files", file=file) + print("=====", file=file) + for i in p.open_files(): + print(i, file=file) + + print(file=file) + print("Connections", file=file) + print("===========", file=file) + for i in p.connections(): + print(i, file=file) + + print(file=file) + print("Threads", file=file) + print("=======", file=file) + bthreads = [] + for i in threading.enumerate(): + if hasattr(i, "_threadinfo"): + bthreads.append(i) + else: + print(i.name, file=file) + bthreads.sort(key=lambda x: x._thread_started) + for i in bthreads: + print(i._threadinfo(), file=file) + + print(file=file) + print("Memory", file=file) + print("=======", file=file) + gc.collect() + d = {} + for i in gc.get_objects(): + t = str(type(i)) + if "mitmproxy" in t or "netlib" in t: + d[t] = d.setdefault(t, 0) + 1 + itms = list(d.items()) + itms.sort(key=lambda x: x[1]) + for i in itms[-20:]: + print(i[1], i[0], file=file) + print("****************************************************", file=file) + + if not testing: + sys.exit(1) + + +def dump_stacks(signal=None, frame=None, file=sys.stdout, testing=False): + id2name = dict([(th.ident, th.name) for th in threading.enumerate()]) + code = [] + for threadId, stack in sys._current_frames().items(): + code.append( + "\n# Thread: %s(%d)" % ( + id2name.get(threadId, ""), threadId + ) + ) + for filename, lineno, name, line in traceback.extract_stack(stack): + code.append('File: "%s", line %d, in %s' % (filename, lineno, name)) + if line: + code.append(" %s" % (line.strip())) + print("\n".join(code), file=file) + if not testing: + sys.exit(1) + + +def register_info_dumpers(): + if os.name != "nt": + signal.signal(signal.SIGUSR1, dump_info) + signal.signal(signal.SIGUSR2, dump_stacks) diff --git a/netlib/debug.py b/netlib/debug.py deleted file mode 100644 index 147fe4b1..00000000 --- a/netlib/debug.py +++ /dev/null @@ -1,120 +0,0 @@ -import gc -import os -import sys -import threading -import signal -import platform -import traceback - -from mitmproxy import version - -from OpenSSL import SSL - - -def sysinfo(): - data = [ - "Mitmproxy version: %s" % version.VERSION, - "Python version: %s" % platform.python_version(), - "Platform: %s" % platform.platform(), - "SSL version: %s" % SSL.SSLeay_version(SSL.SSLEAY_VERSION).decode(), - ] - d = platform.linux_distribution() - t = "Linux distro: %s %s %s" % d - if d[0]: # pragma: no-cover - data.append(t) - - d = platform.mac_ver() - t = "Mac version: %s %s %s" % d - if d[0]: # pragma: no-cover - data.append(t) - - d = platform.win32_ver() - t = "Windows version: %s %s %s %s" % d - if d[0]: # pragma: no-cover - data.append(t) - - return "\n".join(data) - - -def dump_info(signal=None, frame=None, file=sys.stdout, testing=False): # pragma: no cover - print("****************************************************", file=file) - print("Summary", file=file) - print("=======", file=file) - - try: - import psutil - except: - print("(psutil not installed, skipping some debug info)", file=file) - else: - p = psutil.Process() - print("num threads: ", p.num_threads(), file=file) - if hasattr(p, "num_fds"): - print("num fds: ", p.num_fds(), file=file) - print("memory: ", p.memory_info(), file=file) - - print(file=file) - print("Files", file=file) - print("=====", file=file) - for i in p.open_files(): - print(i, file=file) - - print(file=file) - print("Connections", file=file) - print("===========", file=file) - for i in p.connections(): - print(i, file=file) - - print(file=file) - print("Threads", file=file) - print("=======", file=file) - bthreads = [] - for i in threading.enumerate(): - if hasattr(i, "_threadinfo"): - bthreads.append(i) - else: - print(i.name, file=file) - bthreads.sort(key=lambda x: x._thread_started) - for i in bthreads: - print(i._threadinfo(), file=file) - - print(file=file) - print("Memory", file=file) - print("=======", file=file) - gc.collect() - d = {} - for i in gc.get_objects(): - t = str(type(i)) - if "mitmproxy" in t or "netlib" in t: - d[t] = d.setdefault(t, 0) + 1 - itms = list(d.items()) - itms.sort(key=lambda x: x[1]) - for i in itms[-20:]: - print(i[1], i[0], file=file) - print("****************************************************", file=file) - - if not testing: - sys.exit(1) - - -def dump_stacks(signal=None, frame=None, file=sys.stdout, testing=False): - id2name = dict([(th.ident, th.name) for th in threading.enumerate()]) - code = [] - for threadId, stack in sys._current_frames().items(): - code.append( - "\n# Thread: %s(%d)" % ( - id2name.get(threadId, ""), threadId - ) - ) - for filename, lineno, name, line in traceback.extract_stack(stack): - code.append('File: "%s", line %d, in %s' % (filename, lineno, name)) - if line: - code.append(" %s" % (line.strip())) - print("\n".join(code), file=file) - if not testing: - sys.exit(1) - - -def register_info_dumpers(): - if os.name != "nt": - signal.signal(signal.SIGUSR1, dump_info) - signal.signal(signal.SIGUSR2, dump_stacks) diff --git a/test/mitmproxy/test_utils_debug.py b/test/mitmproxy/test_utils_debug.py new file mode 100644 index 00000000..9acf8192 --- /dev/null +++ b/test/mitmproxy/test_utils_debug.py @@ -0,0 +1,23 @@ +import io + +from mitmproxy.utils import debug + + +def test_dump_info(): + cs = io.StringIO() + debug.dump_info(None, None, file=cs, testing=True) + assert cs.getvalue() + + +def test_dump_stacks(): + cs = io.StringIO() + debug.dump_stacks(None, None, file=cs, testing=True) + assert cs.getvalue() + + +def test_sysinfo(): + assert debug.sysinfo() + + +def test_register_info_dumpers(): + debug.register_info_dumpers() diff --git a/test/netlib/test_debug.py b/test/netlib/test_debug.py deleted file mode 100644 index bdb85c9e..00000000 --- a/test/netlib/test_debug.py +++ /dev/null @@ -1,23 +0,0 @@ -import io - -from netlib import debug - - -def test_dump_info(): - cs = io.StringIO() - debug.dump_info(None, None, file=cs, testing=True) - assert cs.getvalue() - - -def test_dump_stacks(): - cs = io.StringIO() - debug.dump_stacks(None, None, file=cs, testing=True) - assert cs.getvalue() - - -def test_sysinfo(): - assert debug.sysinfo() - - -def test_register_info_dumpers(): - debug.register_info_dumpers() -- cgit v1.2.3 From 95551265852e6ff05ab5e5204e1a919f66fa4eae Mon Sep 17 00:00:00 2001 From: Aldo Cortesi Date: Thu, 20 Oct 2016 10:32:09 +1300 Subject: netlib.utils.BiDi -> mitmproxy.types.bidi.BiDi --- mitmproxy/types/bidi.py | 29 +++++++++++++++++++++++++++++ netlib/socks.py | 13 +++++++------ netlib/utils.py | 29 ----------------------------- netlib/websockets/frame.py | 5 +++-- pathod/protocols/http2.py | 4 ++-- test/mitmproxy/test_types_bidi.py | 11 +++++++++++ test/netlib/test_utils.py | 11 +---------- 7 files changed, 53 insertions(+), 49 deletions(-) create mode 100644 mitmproxy/types/bidi.py create mode 100644 test/mitmproxy/test_types_bidi.py diff --git a/mitmproxy/types/bidi.py b/mitmproxy/types/bidi.py new file mode 100644 index 00000000..0982a34a --- /dev/null +++ b/mitmproxy/types/bidi.py @@ -0,0 +1,29 @@ + + +class BiDi: + + """ + A wee utility class for keeping bi-directional mappings, like field + constants in protocols. Names are attributes on the object, dict-like + access maps values to names: + + CONST = BiDi(a=1, b=2) + assert CONST.a == 1 + assert CONST.get_name(1) == "a" + """ + + def __init__(self, **kwargs): + self.names = kwargs + self.values = {} + for k, v in kwargs.items(): + self.values[v] = k + if len(self.names) != len(self.values): + raise ValueError("Duplicate values not allowed.") + + def __getattr__(self, k): + if k in self.names: + return self.names[k] + raise AttributeError("No such attribute: %s", k) + + def get_name(self, n, default=None): + return self.values.get(n, default) diff --git a/netlib/socks.py b/netlib/socks.py index 9f1adb98..30fe1c9d 100644 --- a/netlib/socks.py +++ b/netlib/socks.py @@ -3,6 +3,7 @@ import array import ipaddress from netlib import tcp, utils +from mitmproxy.types import bidi class SocksError(Exception): @@ -10,24 +11,24 @@ class SocksError(Exception): super().__init__(message) self.code = code -VERSION = utils.BiDi( +VERSION = bidi.BiDi( SOCKS4=0x04, SOCKS5=0x05 ) -CMD = utils.BiDi( +CMD = bidi.BiDi( CONNECT=0x01, BIND=0x02, UDP_ASSOCIATE=0x03 ) -ATYP = utils.BiDi( +ATYP = bidi.BiDi( IPV4_ADDRESS=0x01, DOMAINNAME=0x03, IPV6_ADDRESS=0x04 ) -REP = utils.BiDi( +REP = bidi.BiDi( SUCCEEDED=0x00, GENERAL_SOCKS_SERVER_FAILURE=0x01, CONNECTION_NOT_ALLOWED_BY_RULESET=0x02, @@ -39,14 +40,14 @@ REP = utils.BiDi( ADDRESS_TYPE_NOT_SUPPORTED=0x08, ) -METHOD = utils.BiDi( +METHOD = bidi.BiDi( NO_AUTHENTICATION_REQUIRED=0x00, GSSAPI=0x01, USERNAME_PASSWORD=0x02, NO_ACCEPTABLE_METHODS=0xFF ) -USERNAME_PASSWORD_VERSION = utils.BiDi( +USERNAME_PASSWORD_VERSION = bidi.BiDi( DEFAULT=0x01 ) diff --git a/netlib/utils.py b/netlib/utils.py index 12b94d74..779eaa27 100644 --- a/netlib/utils.py +++ b/netlib/utils.py @@ -16,35 +16,6 @@ def getbit(byte, offset): return bool(byte & mask) -class BiDi: - - """ - A wee utility class for keeping bi-directional mappings, like field - constants in protocols. Names are attributes on the object, dict-like - access maps values to names: - - CONST = BiDi(a=1, b=2) - assert CONST.a == 1 - assert CONST.get_name(1) == "a" - """ - - def __init__(self, **kwargs): - self.names = kwargs - self.values = {} - for k, v in kwargs.items(): - self.values[v] = k - if len(self.names) != len(self.values): - raise ValueError("Duplicate values not allowed.") - - def __getattr__(self, k): - if k in self.names: - return self.names[k] - raise AttributeError("No such attribute: %s", k) - - def get_name(self, n, default=None): - return self.values.get(n, default) - - _label_valid = re.compile(b"(?!-)[A-Z\d-]{1,63}(? Date: Thu, 20 Oct 2016 10:38:57 +1300 Subject: netlib.utils.[get,set]bit -> mitmproxy.utils.bits --- mitmproxy/tools/main.py | 2 +- mitmproxy/utils/bits.py | 15 +++++++++++ mitmproxy/utils/version_check.py | 43 ++++++++++++++++++++++++++++++ netlib/tcp.py | 2 +- netlib/utils.py | 16 ----------- netlib/version_check.py | 43 ------------------------------ netlib/websockets/frame.py | 22 +++++++-------- test/mitmproxy/test_utils_version_check.py | 25 +++++++++++++++++ test/netlib/test_version_check.py | 25 ----------------- 9 files changed, 96 insertions(+), 97 deletions(-) create mode 100644 mitmproxy/utils/bits.py create mode 100644 mitmproxy/utils/version_check.py delete mode 100644 netlib/version_check.py create mode 100644 test/mitmproxy/test_utils_version_check.py delete mode 100644 test/netlib/test_version_check.py diff --git a/mitmproxy/tools/main.py b/mitmproxy/tools/main.py index f3526de9..9fce9234 100644 --- a/mitmproxy/tools/main.py +++ b/mitmproxy/tools/main.py @@ -6,7 +6,7 @@ from mitmproxy.tools import cmdline from mitmproxy import exceptions from mitmproxy.proxy import config from mitmproxy.proxy import server -from netlib import version_check +from mitmproxy.utils import version_check from mitmproxy.utils import debug diff --git a/mitmproxy/utils/bits.py b/mitmproxy/utils/bits.py new file mode 100644 index 00000000..ec0d40ef --- /dev/null +++ b/mitmproxy/utils/bits.py @@ -0,0 +1,15 @@ + + +def setbit(byte, offset, value): + """ + Set a bit in a byte to 1 if value is truthy, 0 if not. + """ + if value: + return byte | (1 << offset) + else: + return byte & ~(1 << offset) + + +def getbit(byte, offset): + mask = 1 << offset + return bool(byte & mask) diff --git a/mitmproxy/utils/version_check.py b/mitmproxy/utils/version_check.py new file mode 100644 index 00000000..547c031c --- /dev/null +++ b/mitmproxy/utils/version_check.py @@ -0,0 +1,43 @@ +""" +Having installed a wrong version of pyOpenSSL or netlib is unfortunately a +very common source of error. Check before every start that both versions +are somewhat okay. +""" +import sys +import inspect +import os.path + +import OpenSSL + +PYOPENSSL_MIN_VERSION = (0, 15) + + +def check_pyopenssl_version(min_version=PYOPENSSL_MIN_VERSION, fp=sys.stderr): + min_version_str = u".".join(str(x) for x in min_version) + try: + v = tuple(int(x) for x in OpenSSL.__version__.split(".")[:2]) + except ValueError: + print( + u"Cannot parse pyOpenSSL version: {}" + u"mitmproxy requires pyOpenSSL {} or greater.".format( + OpenSSL.__version__, min_version_str + ), + file=fp + ) + return + if v < min_version: + print( + u"You are using an outdated version of pyOpenSSL: " + u"mitmproxy requires pyOpenSSL {} or greater.".format(min_version_str), + file=fp + ) + # Some users apparently have multiple versions of pyOpenSSL installed. + # Report which one we got. + pyopenssl_path = os.path.dirname(inspect.getfile(OpenSSL)) + print( + u"Your pyOpenSSL {} installation is located at {}".format( + OpenSSL.__version__, pyopenssl_path + ), + file=fp + ) + sys.exit(1) diff --git a/netlib/tcp.py b/netlib/tcp.py index 4fde657f..4379c9b5 100644 --- a/netlib/tcp.py +++ b/netlib/tcp.py @@ -18,7 +18,7 @@ import OpenSSL from OpenSSL import SSL from netlib import certutils -from netlib import version_check +from mitmproxy.utils import version_check from mitmproxy.types import serializable from netlib import exceptions from mitmproxy.types import basethread diff --git a/netlib/utils.py b/netlib/utils.py index 779eaa27..7b007cb5 100644 --- a/netlib/utils.py +++ b/netlib/utils.py @@ -1,21 +1,5 @@ import re - -def setbit(byte, offset, value): - """ - Set a bit in a byte to 1 if value is truthy, 0 if not. - """ - if value: - return byte | (1 << offset) - else: - return byte & ~(1 << offset) - - -def getbit(byte, offset): - mask = 1 << offset - return bool(byte & mask) - - _label_valid = re.compile(b"(?!-)[A-Z\d-]{1,63}(? 125 indicates you need to read more bytes diff --git a/test/mitmproxy/test_utils_version_check.py b/test/mitmproxy/test_utils_version_check.py new file mode 100644 index 00000000..5c8d8c8c --- /dev/null +++ b/test/mitmproxy/test_utils_version_check.py @@ -0,0 +1,25 @@ +import io +import mock +from mitmproxy.utils import version_check + + +@mock.patch("sys.exit") +def test_check_pyopenssl_version(sexit): + fp = io.StringIO() + version_check.check_pyopenssl_version(fp=fp) + assert not fp.getvalue() + assert not sexit.called + + version_check.check_pyopenssl_version((9999,), fp=fp) + assert "outdated" in fp.getvalue() + assert sexit.called + + +@mock.patch("sys.exit") +@mock.patch("OpenSSL.__version__") +def test_unparseable_pyopenssl_version(version, sexit): + version.split.return_value = ["foo", "bar"] + fp = io.StringIO() + version_check.check_pyopenssl_version(fp=fp) + assert "Cannot parse" in fp.getvalue() + assert not sexit.called diff --git a/test/netlib/test_version_check.py b/test/netlib/test_version_check.py deleted file mode 100644 index 01820dad..00000000 --- a/test/netlib/test_version_check.py +++ /dev/null @@ -1,25 +0,0 @@ -import io -import mock -from netlib import version_check - - -@mock.patch("sys.exit") -def test_check_pyopenssl_version(sexit): - fp = io.StringIO() - version_check.check_pyopenssl_version(fp=fp) - assert not fp.getvalue() - assert not sexit.called - - version_check.check_pyopenssl_version((9999,), fp=fp) - assert "outdated" in fp.getvalue() - assert sexit.called - - -@mock.patch("sys.exit") -@mock.patch("OpenSSL.__version__") -def test_unparseable_pyopenssl_version(version, sexit): - version.split.return_value = ["foo", "bar"] - fp = io.StringIO() - version_check.check_pyopenssl_version(fp=fp) - assert "Cannot parse" in fp.getvalue() - assert not sexit.called -- cgit v1.2.3 From 9870844b38c84e7446b15909758497cecb26301e Mon Sep 17 00:00:00 2001 From: Aldo Cortesi Date: Thu, 20 Oct 2016 10:46:47 +1300 Subject: netlib.utils -> netlib.check Now only contains is_valid_[host,port] Intermediate step - this will be in mitproxy.net soon. --- mitmproxy/proxy/protocol/tls.py | 4 ++-- netlib/check.py | 22 ++++++++++++++++++++++ netlib/http/http1/read.py | 4 ++-- netlib/http/url.py | 6 +++--- netlib/socks.py | 5 +++-- netlib/utils.py | 22 ---------------------- test/mitmproxy/test_flow.py | 2 +- test/mitmproxy/tutils.py | 1 - test/netlib/test_check.py | 10 ++++++++++ test/netlib/test_utils.py | 10 ---------- 10 files changed, 43 insertions(+), 43 deletions(-) create mode 100644 netlib/check.py delete mode 100644 netlib/utils.py create mode 100644 test/netlib/test_check.py delete mode 100644 test/netlib/test_utils.py diff --git a/mitmproxy/proxy/protocol/tls.py b/mitmproxy/proxy/protocol/tls.py index 940ab9ea..7b6b506c 100644 --- a/mitmproxy/proxy/protocol/tls.py +++ b/mitmproxy/proxy/protocol/tls.py @@ -7,7 +7,7 @@ import netlib.exceptions from mitmproxy import exceptions from mitmproxy.contrib.tls import _constructs from mitmproxy.proxy.protocol import base -from netlib import utils +from netlib import check # taken from https://testssl.sh/openssl-rfc.mappping.html @@ -265,7 +265,7 @@ class TlsClientHello: extension.type == 0x00 and len(extension.server_names) == 1 and extension.server_names[0].type == 0 and - utils.is_valid_host(extension.server_names[0].name) + check.is_valid_host(extension.server_names[0].name) ) if is_valid_sni_extension: return extension.server_names[0].name.decode("idna") diff --git a/netlib/check.py b/netlib/check.py new file mode 100644 index 00000000..7b007cb5 --- /dev/null +++ b/netlib/check.py @@ -0,0 +1,22 @@ +import re + +_label_valid = re.compile(b"(?!-)[A-Z\d-]{1,63}(? bool: + """ + Checks if a hostname is valid. + """ + try: + host.decode("idna") + except ValueError: + return False + if len(host) > 255: + return False + if host and host[-1:] == b".": + host = host[:-1] + return all(_label_valid.match(x) for x in host.split(b".")) + + +def is_valid_port(port): + return 0 <= port <= 65535 diff --git a/netlib/http/http1/read.py b/netlib/http/http1/read.py index 4c00a96a..89b73c5a 100644 --- a/netlib/http/http1/read.py +++ b/netlib/http/http1/read.py @@ -6,7 +6,7 @@ from netlib.http import request from netlib.http import response from netlib.http import headers from netlib.http import url -from netlib import utils +from netlib import check from netlib import exceptions @@ -274,7 +274,7 @@ def _parse_authority_form(hostport): try: host, port = hostport.split(b":") port = int(port) - if not utils.is_valid_host(host) or not utils.is_valid_port(port): + if not check.is_valid_host(host) or not check.is_valid_port(port): raise ValueError() except ValueError: raise exceptions.HttpSyntaxException("Invalid host specification: {}".format(hostport)) diff --git a/netlib/http/url.py b/netlib/http/url.py index 67e22efa..3ca58120 100644 --- a/netlib/http/url.py +++ b/netlib/http/url.py @@ -2,7 +2,7 @@ import urllib from typing import Sequence from typing import Tuple -from netlib import utils +from netlib import check # PY2 workaround @@ -62,9 +62,9 @@ def parse(url): if not full_path.startswith(b"/"): full_path = b"/" + full_path - if not utils.is_valid_host(host): + if not check.is_valid_host(host): raise ValueError("Invalid Host") - if not utils.is_valid_port(port): + if not check.is_valid_port(port): raise ValueError("Invalid Port") return parsed.scheme, host, port, full_path diff --git a/netlib/socks.py b/netlib/socks.py index 30fe1c9d..377308a8 100644 --- a/netlib/socks.py +++ b/netlib/socks.py @@ -2,7 +2,8 @@ import struct import array import ipaddress -from netlib import tcp, utils +from netlib import tcp +from netlib import check from mitmproxy.types import bidi @@ -204,7 +205,7 @@ class Message: elif atyp == ATYP.DOMAINNAME: length, = struct.unpack("!B", f.safe_read(1)) host = f.safe_read(length) - if not utils.is_valid_host(host): + if not check.is_valid_host(host): raise SocksError(REP.GENERAL_SOCKS_SERVER_FAILURE, "Invalid hostname: %s" % host) host = host.decode("idna") use_ipv6 = False diff --git a/netlib/utils.py b/netlib/utils.py deleted file mode 100644 index 7b007cb5..00000000 --- a/netlib/utils.py +++ /dev/null @@ -1,22 +0,0 @@ -import re - -_label_valid = re.compile(b"(?!-)[A-Z\d-]{1,63}(? bool: - """ - Checks if a hostname is valid. - """ - try: - host.decode("idna") - except ValueError: - return False - if len(host) > 255: - return False - if host and host[-1:] == b".": - host = host[:-1] - return all(_label_valid.match(x) for x in host.split(b".")) - - -def is_valid_port(port): - return 0 <= port <= 65535 diff --git a/test/mitmproxy/test_flow.py b/test/mitmproxy/test_flow.py index 9ebcca34..9b7e7395 100644 --- a/test/mitmproxy/test_flow.py +++ b/test/mitmproxy/test_flow.py @@ -1,7 +1,7 @@ import mock import io -import netlib.utils +import netlib.tutils from netlib.http import Headers import mitmproxy.io from mitmproxy import flowfilter, options diff --git a/test/mitmproxy/tutils.py b/test/mitmproxy/tutils.py index 71dd20a4..aa70c0e8 100644 --- a/test/mitmproxy/tutils.py +++ b/test/mitmproxy/tutils.py @@ -9,7 +9,6 @@ from unittest.case import SkipTest import io -import netlib.utils import netlib.tutils from mitmproxy import controller from mitmproxy import connections diff --git a/test/netlib/test_check.py b/test/netlib/test_check.py new file mode 100644 index 00000000..6a1388f4 --- /dev/null +++ b/test/netlib/test_check.py @@ -0,0 +1,10 @@ +# coding=utf-8 + +from netlib import check + + +def test_is_valid_host(): + assert not check.is_valid_host(b"") + assert check.is_valid_host(b"one.two") + assert not check.is_valid_host(b"one" * 255) + assert check.is_valid_host(b"one.two.") diff --git a/test/netlib/test_utils.py b/test/netlib/test_utils.py deleted file mode 100644 index db33fa31..00000000 --- a/test/netlib/test_utils.py +++ /dev/null @@ -1,10 +0,0 @@ -# coding=utf-8 - -from netlib import utils - - -def test_is_valid_host(): - assert not utils.is_valid_host(b"") - assert utils.is_valid_host(b"one.two") - assert not utils.is_valid_host(b"one" * 255) - assert utils.is_valid_host(b"one.two.") -- cgit v1.2.3 From f964d49853a3f0d22e0f6d4cff7cfbc49008e40e Mon Sep 17 00:00:00 2001 From: Aldo Cortesi Date: Thu, 20 Oct 2016 11:02:52 +1300 Subject: netlib.certutils -> mitmproxy.certs --- mitmproxy/certs.py | 481 ++++++++++++++++++++++++++++++++++++++++++ mitmproxy/connections.py | 6 +- mitmproxy/proxy/config.py | 4 +- netlib/certutils.py | 481 ------------------------------------------ netlib/tcp.py | 12 +- pathod/pathoc.py | 12 +- pathod/pathod.py | 22 +- test/mitmproxy/test_certs.py | 181 ++++++++++++++++ test/mitmproxy/test_server.py | 33 +-- test/netlib/test_certutils.py | 180 ---------------- test/netlib/test_tcp.py | 53 ++--- 11 files changed, 738 insertions(+), 727 deletions(-) create mode 100644 mitmproxy/certs.py delete mode 100644 netlib/certutils.py create mode 100644 test/mitmproxy/test_certs.py delete mode 100644 test/netlib/test_certutils.py diff --git a/mitmproxy/certs.py b/mitmproxy/certs.py new file mode 100644 index 00000000..9cb8a40e --- /dev/null +++ b/mitmproxy/certs.py @@ -0,0 +1,481 @@ +import os +import ssl +import time +import datetime +import ipaddress + +import sys +from pyasn1.type import univ, constraint, char, namedtype, tag +from pyasn1.codec.der.decoder import decode +from pyasn1.error import PyAsn1Error +import OpenSSL + +from mitmproxy.types import serializable + +# Default expiry must not be too long: https://github.com/mitmproxy/mitmproxy/issues/815 + +DEFAULT_EXP = 94608000 # = 24 * 60 * 60 * 365 * 3 +# Generated with "openssl dhparam". It's too slow to generate this on startup. +DEFAULT_DHPARAM = b""" +-----BEGIN DH PARAMETERS----- +MIICCAKCAgEAyT6LzpwVFS3gryIo29J5icvgxCnCebcdSe/NHMkD8dKJf8suFCg3 +O2+dguLakSVif/t6dhImxInJk230HmfC8q93hdcg/j8rLGJYDKu3ik6H//BAHKIv +j5O9yjU3rXCfmVJQic2Nne39sg3CreAepEts2TvYHhVv3TEAzEqCtOuTjgDv0ntJ +Gwpj+BJBRQGG9NvprX1YGJ7WOFBP/hWU7d6tgvE6Xa7T/u9QIKpYHMIkcN/l3ZFB +chZEqVlyrcngtSXCROTPcDOQ6Q8QzhaBJS+Z6rcsd7X+haiQqvoFcmaJ08Ks6LQC +ZIL2EtYJw8V8z7C0igVEBIADZBI6OTbuuhDwRw//zU1uq52Oc48CIZlGxTYG/Evq +o9EWAXUYVzWkDSTeBH1r4z/qLPE2cnhtMxbFxuvK53jGB0emy2y1Ei6IhKshJ5qX +IB/aE7SSHyQ3MDHHkCmQJCsOd4Mo26YX61NZ+n501XjqpCBQ2+DfZCBh8Va2wDyv +A2Ryg9SUz8j0AXViRNMJgJrr446yro/FuJZwnQcO3WQnXeqSBnURqKjmqkeFP+d8 +6mk2tqJaY507lRNqtGlLnj7f5RNoBFJDCLBNurVgfvq9TCVWKDIFD4vZRjCrnl6I +rD693XKIHUCWOjMh1if6omGXKHH40QuME2gNa50+YPn1iYDl88uDbbMCAQI= +-----END DH PARAMETERS----- +""" + + +def create_ca(o, cn, exp): + key = OpenSSL.crypto.PKey() + key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048) + cert = OpenSSL.crypto.X509() + cert.set_serial_number(int(time.time() * 10000)) + cert.set_version(2) + cert.get_subject().CN = cn + cert.get_subject().O = o + cert.gmtime_adj_notBefore(-3600 * 48) + cert.gmtime_adj_notAfter(exp) + cert.set_issuer(cert.get_subject()) + cert.set_pubkey(key) + cert.add_extensions([ + OpenSSL.crypto.X509Extension( + b"basicConstraints", + True, + b"CA:TRUE" + ), + OpenSSL.crypto.X509Extension( + b"nsCertType", + False, + b"sslCA" + ), + OpenSSL.crypto.X509Extension( + b"extendedKeyUsage", + False, + b"serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC" + ), + OpenSSL.crypto.X509Extension( + b"keyUsage", + True, + b"keyCertSign, cRLSign" + ), + OpenSSL.crypto.X509Extension( + b"subjectKeyIdentifier", + False, + b"hash", + subject=cert + ), + ]) + cert.sign(key, "sha256") + return key, cert + + +def dummy_cert(privkey, cacert, commonname, sans): + """ + Generates a dummy certificate. + + privkey: CA private key + cacert: CA certificate + commonname: Common name for the generated certificate. + sans: A list of Subject Alternate Names. + + Returns cert if operation succeeded, None if not. + """ + ss = [] + for i in sans: + try: + ipaddress.ip_address(i.decode("ascii")) + except ValueError: + ss.append(b"DNS: %s" % i) + else: + ss.append(b"IP: %s" % i) + ss = b", ".join(ss) + + cert = OpenSSL.crypto.X509() + cert.gmtime_adj_notBefore(-3600 * 48) + cert.gmtime_adj_notAfter(DEFAULT_EXP) + cert.set_issuer(cacert.get_subject()) + if commonname is not None: + cert.get_subject().CN = commonname + cert.set_serial_number(int(time.time() * 10000)) + if ss: + cert.set_version(2) + cert.add_extensions( + [OpenSSL.crypto.X509Extension(b"subjectAltName", False, ss)]) + cert.set_pubkey(cacert.get_pubkey()) + cert.sign(privkey, "sha256") + return SSLCert(cert) + + +# DNTree did not pass TestCertStore.test_sans_change and is temporarily replaced by a simple dict. +# +# class _Node(UserDict.UserDict): +# def __init__(self): +# UserDict.UserDict.__init__(self) +# self.value = None +# +# +# class DNTree: +# """ +# Domain store that knows about wildcards. DNS wildcards are very +# restricted - the only valid variety is an asterisk on the left-most +# domain component, i.e.: +# +# *.foo.com +# """ +# def __init__(self): +# self.d = _Node() +# +# def add(self, dn, cert): +# parts = dn.split(".") +# parts.reverse() +# current = self.d +# for i in parts: +# current = current.setdefault(i, _Node()) +# current.value = cert +# +# def get(self, dn): +# parts = dn.split(".") +# current = self.d +# for i in reversed(parts): +# if i in current: +# current = current[i] +# elif "*" in current: +# return current["*"].value +# else: +# return None +# return current.value + + +class CertStoreEntry: + + def __init__(self, cert, privatekey, chain_file): + self.cert = cert + self.privatekey = privatekey + self.chain_file = chain_file + + +class CertStore: + + """ + Implements an in-memory certificate store. + """ + STORE_CAP = 100 + + def __init__( + self, + default_privatekey, + default_ca, + default_chain_file, + dhparams): + self.default_privatekey = default_privatekey + self.default_ca = default_ca + self.default_chain_file = default_chain_file + self.dhparams = dhparams + self.certs = dict() + self.expire_queue = [] + + def expire(self, entry): + self.expire_queue.append(entry) + if len(self.expire_queue) > self.STORE_CAP: + d = self.expire_queue.pop(0) + for k, v in list(self.certs.items()): + if v == d: + del self.certs[k] + + @staticmethod + def load_dhparam(path): + + # netlib<=0.10 doesn't generate a dhparam file. + # Create it now if neccessary. + if not os.path.exists(path): + with open(path, "wb") as f: + f.write(DEFAULT_DHPARAM) + + bio = OpenSSL.SSL._lib.BIO_new_file(path.encode(sys.getfilesystemencoding()), b"r") + if bio != OpenSSL.SSL._ffi.NULL: + bio = OpenSSL.SSL._ffi.gc(bio, OpenSSL.SSL._lib.BIO_free) + dh = OpenSSL.SSL._lib.PEM_read_bio_DHparams( + bio, + OpenSSL.SSL._ffi.NULL, + OpenSSL.SSL._ffi.NULL, + OpenSSL.SSL._ffi.NULL) + dh = OpenSSL.SSL._ffi.gc(dh, OpenSSL.SSL._lib.DH_free) + return dh + + @classmethod + def from_store(cls, path, basename): + ca_path = os.path.join(path, basename + "-ca.pem") + if not os.path.exists(ca_path): + key, ca = cls.create_store(path, basename) + else: + with open(ca_path, "rb") as f: + raw = f.read() + ca = OpenSSL.crypto.load_certificate( + OpenSSL.crypto.FILETYPE_PEM, + raw) + key = OpenSSL.crypto.load_privatekey( + OpenSSL.crypto.FILETYPE_PEM, + raw) + dh_path = os.path.join(path, basename + "-dhparam.pem") + dh = cls.load_dhparam(dh_path) + return cls(key, ca, ca_path, dh) + + @staticmethod + def create_store(path, basename, o=None, cn=None, expiry=DEFAULT_EXP): + if not os.path.exists(path): + os.makedirs(path) + + o = o or basename + cn = cn or basename + + key, ca = create_ca(o=o, cn=cn, exp=expiry) + # Dump the CA plus private key + with open(os.path.join(path, basename + "-ca.pem"), "wb") as f: + f.write( + OpenSSL.crypto.dump_privatekey( + OpenSSL.crypto.FILETYPE_PEM, + key)) + f.write( + OpenSSL.crypto.dump_certificate( + OpenSSL.crypto.FILETYPE_PEM, + ca)) + + # Dump the certificate in PEM format + with open(os.path.join(path, basename + "-ca-cert.pem"), "wb") as f: + f.write( + OpenSSL.crypto.dump_certificate( + OpenSSL.crypto.FILETYPE_PEM, + ca)) + + # Create a .cer file with the same contents for Android + with open(os.path.join(path, basename + "-ca-cert.cer"), "wb") as f: + f.write( + OpenSSL.crypto.dump_certificate( + OpenSSL.crypto.FILETYPE_PEM, + ca)) + + # Dump the certificate in PKCS12 format for Windows devices + with open(os.path.join(path, basename + "-ca-cert.p12"), "wb") as f: + p12 = OpenSSL.crypto.PKCS12() + p12.set_certificate(ca) + p12.set_privatekey(key) + f.write(p12.export()) + + with open(os.path.join(path, basename + "-dhparam.pem"), "wb") as f: + f.write(DEFAULT_DHPARAM) + + return key, ca + + def add_cert_file(self, spec, path): + with open(path, "rb") as f: + raw = f.read() + cert = SSLCert( + OpenSSL.crypto.load_certificate( + OpenSSL.crypto.FILETYPE_PEM, + raw)) + try: + privatekey = OpenSSL.crypto.load_privatekey( + OpenSSL.crypto.FILETYPE_PEM, + raw) + except Exception: + privatekey = self.default_privatekey + self.add_cert( + CertStoreEntry(cert, privatekey, path), + spec + ) + + def add_cert(self, entry, *names): + """ + Adds a cert to the certstore. We register the CN in the cert plus + any SANs, and also the list of names provided as an argument. + """ + if entry.cert.cn: + self.certs[entry.cert.cn] = entry + for i in entry.cert.altnames: + self.certs[i] = entry + for i in names: + self.certs[i] = entry + + @staticmethod + def asterisk_forms(dn): + if dn is None: + return [] + parts = dn.split(b".") + parts.reverse() + curr_dn = b"" + dn_forms = [b"*"] + for part in parts[:-1]: + curr_dn = b"." + part + curr_dn # .example.com + dn_forms.append(b"*" + curr_dn) # *.example.com + if parts[-1] != b"*": + dn_forms.append(parts[-1] + curr_dn) + return dn_forms + + def get_cert(self, commonname, sans): + """ + Returns an (cert, privkey, cert_chain) tuple. + + commonname: Common name for the generated certificate. Must be a + valid, plain-ASCII, IDNA-encoded domain name. + + sans: A list of Subject Alternate Names. + """ + + potential_keys = self.asterisk_forms(commonname) + for s in sans: + potential_keys.extend(self.asterisk_forms(s)) + potential_keys.append((commonname, tuple(sans))) + + name = next( + filter(lambda key: key in self.certs, potential_keys), + None + ) + if name: + entry = self.certs[name] + else: + entry = CertStoreEntry( + cert=dummy_cert( + self.default_privatekey, + self.default_ca, + commonname, + sans), + privatekey=self.default_privatekey, + chain_file=self.default_chain_file) + self.certs[(commonname, tuple(sans))] = entry + self.expire(entry) + + return entry.cert, entry.privatekey, entry.chain_file + + +class _GeneralName(univ.Choice): + # We are only interested in dNSNames. We use a default handler to ignore + # other types. + # TODO: We should also handle iPAddresses. + componentType = namedtype.NamedTypes( + namedtype.NamedType('dNSName', char.IA5String().subtype( + implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2) + ) + ), + ) + + +class _GeneralNames(univ.SequenceOf): + componentType = _GeneralName() + sizeSpec = univ.SequenceOf.sizeSpec + \ + constraint.ValueSizeConstraint(1, 1024) + + +class SSLCert(serializable.Serializable): + + def __init__(self, cert): + """ + Returns a (common name, [subject alternative names]) tuple. + """ + self.x509 = cert + + def __eq__(self, other): + return self.digest("sha256") == other.digest("sha256") + + def __ne__(self, other): + return not self.__eq__(other) + + def get_state(self): + return self.to_pem() + + def set_state(self, state): + self.x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, state) + + @classmethod + def from_state(cls, state): + return cls.from_pem(state) + + @classmethod + def from_pem(cls, txt): + x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, txt) + return cls(x509) + + @classmethod + def from_der(cls, der): + pem = ssl.DER_cert_to_PEM_cert(der) + return cls.from_pem(pem) + + def to_pem(self): + return OpenSSL.crypto.dump_certificate( + OpenSSL.crypto.FILETYPE_PEM, + self.x509) + + def digest(self, name): + return self.x509.digest(name) + + @property + def issuer(self): + return self.x509.get_issuer().get_components() + + @property + def notbefore(self): + t = self.x509.get_notBefore() + return datetime.datetime.strptime(t.decode("ascii"), "%Y%m%d%H%M%SZ") + + @property + def notafter(self): + t = self.x509.get_notAfter() + return datetime.datetime.strptime(t.decode("ascii"), "%Y%m%d%H%M%SZ") + + @property + def has_expired(self): + return self.x509.has_expired() + + @property + def subject(self): + return self.x509.get_subject().get_components() + + @property + def serial(self): + return self.x509.get_serial_number() + + @property + def keyinfo(self): + pk = self.x509.get_pubkey() + types = { + OpenSSL.crypto.TYPE_RSA: "RSA", + OpenSSL.crypto.TYPE_DSA: "DSA", + } + return ( + types.get(pk.type(), "UNKNOWN"), + pk.bits() + ) + + @property + def cn(self): + c = None + for i in self.subject: + if i[0] == b"CN": + c = i[1] + return c + + @property + def altnames(self): + """ + Returns: + All DNS altnames. + """ + # tcp.TCPClient.convert_to_ssl assumes that this property only contains DNS altnames for hostname verification. + altnames = [] + for i in range(self.x509.get_extension_count()): + ext = self.x509.get_extension(i) + if ext.get_short_name() == b"subjectAltName": + try: + dec = decode(ext.get_data(), asn1Spec=_GeneralNames()) + except PyAsn1Error: + continue + for i in dec[0]: + altnames.append(i[0].asOctets()) + return altnames diff --git a/mitmproxy/connections.py b/mitmproxy/connections.py index bf7a12aa..6b39ac20 100644 --- a/mitmproxy/connections.py +++ b/mitmproxy/connections.py @@ -4,7 +4,7 @@ import copy import os from mitmproxy import stateobject -from netlib import certutils +from mitmproxy import certs from netlib import tcp @@ -57,7 +57,7 @@ class ClientConnection(tcp.BaseHandler, stateobject.StateObject): _stateobject_attributes = dict( address=tcp.Address, ssl_established=bool, - clientcert=certutils.SSLCert, + clientcert=certs.SSLCert, timestamp_start=float, timestamp_ssl_setup=float, timestamp_end=float, @@ -151,7 +151,7 @@ class ServerConnection(tcp.TCPClient, stateobject.StateObject): ip_address=tcp.Address, source_address=tcp.Address, ssl_established=bool, - cert=certutils.SSLCert, + cert=certs.SSLCert, sni=str, timestamp_start=float, timestamp_tcp_setup=float, diff --git a/mitmproxy/proxy/config.py b/mitmproxy/proxy/config.py index a6fc739b..86b68ee5 100644 --- a/mitmproxy/proxy/config.py +++ b/mitmproxy/proxy/config.py @@ -10,7 +10,7 @@ from OpenSSL import SSL, crypto from mitmproxy import exceptions from mitmproxy import options as moptions -from netlib import certutils +from mitmproxy import certs from netlib import tcp from netlib.http import authentication from netlib.http import url @@ -106,7 +106,7 @@ class ProxyConfig: "Certificate Authority parent directory does not exist: %s" % os.path.dirname(options.cadir) ) - self.certstore = certutils.CertStore.from_store( + self.certstore = certs.CertStore.from_store( certstore_path, CONF_BASENAME ) diff --git a/netlib/certutils.py b/netlib/certutils.py deleted file mode 100644 index 9cb8a40e..00000000 --- a/netlib/certutils.py +++ /dev/null @@ -1,481 +0,0 @@ -import os -import ssl -import time -import datetime -import ipaddress - -import sys -from pyasn1.type import univ, constraint, char, namedtype, tag -from pyasn1.codec.der.decoder import decode -from pyasn1.error import PyAsn1Error -import OpenSSL - -from mitmproxy.types import serializable - -# Default expiry must not be too long: https://github.com/mitmproxy/mitmproxy/issues/815 - -DEFAULT_EXP = 94608000 # = 24 * 60 * 60 * 365 * 3 -# Generated with "openssl dhparam". It's too slow to generate this on startup. -DEFAULT_DHPARAM = b""" ------BEGIN DH PARAMETERS----- -MIICCAKCAgEAyT6LzpwVFS3gryIo29J5icvgxCnCebcdSe/NHMkD8dKJf8suFCg3 -O2+dguLakSVif/t6dhImxInJk230HmfC8q93hdcg/j8rLGJYDKu3ik6H//BAHKIv -j5O9yjU3rXCfmVJQic2Nne39sg3CreAepEts2TvYHhVv3TEAzEqCtOuTjgDv0ntJ -Gwpj+BJBRQGG9NvprX1YGJ7WOFBP/hWU7d6tgvE6Xa7T/u9QIKpYHMIkcN/l3ZFB -chZEqVlyrcngtSXCROTPcDOQ6Q8QzhaBJS+Z6rcsd7X+haiQqvoFcmaJ08Ks6LQC -ZIL2EtYJw8V8z7C0igVEBIADZBI6OTbuuhDwRw//zU1uq52Oc48CIZlGxTYG/Evq -o9EWAXUYVzWkDSTeBH1r4z/qLPE2cnhtMxbFxuvK53jGB0emy2y1Ei6IhKshJ5qX -IB/aE7SSHyQ3MDHHkCmQJCsOd4Mo26YX61NZ+n501XjqpCBQ2+DfZCBh8Va2wDyv -A2Ryg9SUz8j0AXViRNMJgJrr446yro/FuJZwnQcO3WQnXeqSBnURqKjmqkeFP+d8 -6mk2tqJaY507lRNqtGlLnj7f5RNoBFJDCLBNurVgfvq9TCVWKDIFD4vZRjCrnl6I -rD693XKIHUCWOjMh1if6omGXKHH40QuME2gNa50+YPn1iYDl88uDbbMCAQI= ------END DH PARAMETERS----- -""" - - -def create_ca(o, cn, exp): - key = OpenSSL.crypto.PKey() - key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048) - cert = OpenSSL.crypto.X509() - cert.set_serial_number(int(time.time() * 10000)) - cert.set_version(2) - cert.get_subject().CN = cn - cert.get_subject().O = o - cert.gmtime_adj_notBefore(-3600 * 48) - cert.gmtime_adj_notAfter(exp) - cert.set_issuer(cert.get_subject()) - cert.set_pubkey(key) - cert.add_extensions([ - OpenSSL.crypto.X509Extension( - b"basicConstraints", - True, - b"CA:TRUE" - ), - OpenSSL.crypto.X509Extension( - b"nsCertType", - False, - b"sslCA" - ), - OpenSSL.crypto.X509Extension( - b"extendedKeyUsage", - False, - b"serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC" - ), - OpenSSL.crypto.X509Extension( - b"keyUsage", - True, - b"keyCertSign, cRLSign" - ), - OpenSSL.crypto.X509Extension( - b"subjectKeyIdentifier", - False, - b"hash", - subject=cert - ), - ]) - cert.sign(key, "sha256") - return key, cert - - -def dummy_cert(privkey, cacert, commonname, sans): - """ - Generates a dummy certificate. - - privkey: CA private key - cacert: CA certificate - commonname: Common name for the generated certificate. - sans: A list of Subject Alternate Names. - - Returns cert if operation succeeded, None if not. - """ - ss = [] - for i in sans: - try: - ipaddress.ip_address(i.decode("ascii")) - except ValueError: - ss.append(b"DNS: %s" % i) - else: - ss.append(b"IP: %s" % i) - ss = b", ".join(ss) - - cert = OpenSSL.crypto.X509() - cert.gmtime_adj_notBefore(-3600 * 48) - cert.gmtime_adj_notAfter(DEFAULT_EXP) - cert.set_issuer(cacert.get_subject()) - if commonname is not None: - cert.get_subject().CN = commonname - cert.set_serial_number(int(time.time() * 10000)) - if ss: - cert.set_version(2) - cert.add_extensions( - [OpenSSL.crypto.X509Extension(b"subjectAltName", False, ss)]) - cert.set_pubkey(cacert.get_pubkey()) - cert.sign(privkey, "sha256") - return SSLCert(cert) - - -# DNTree did not pass TestCertStore.test_sans_change and is temporarily replaced by a simple dict. -# -# class _Node(UserDict.UserDict): -# def __init__(self): -# UserDict.UserDict.__init__(self) -# self.value = None -# -# -# class DNTree: -# """ -# Domain store that knows about wildcards. DNS wildcards are very -# restricted - the only valid variety is an asterisk on the left-most -# domain component, i.e.: -# -# *.foo.com -# """ -# def __init__(self): -# self.d = _Node() -# -# def add(self, dn, cert): -# parts = dn.split(".") -# parts.reverse() -# current = self.d -# for i in parts: -# current = current.setdefault(i, _Node()) -# current.value = cert -# -# def get(self, dn): -# parts = dn.split(".") -# current = self.d -# for i in reversed(parts): -# if i in current: -# current = current[i] -# elif "*" in current: -# return current["*"].value -# else: -# return None -# return current.value - - -class CertStoreEntry: - - def __init__(self, cert, privatekey, chain_file): - self.cert = cert - self.privatekey = privatekey - self.chain_file = chain_file - - -class CertStore: - - """ - Implements an in-memory certificate store. - """ - STORE_CAP = 100 - - def __init__( - self, - default_privatekey, - default_ca, - default_chain_file, - dhparams): - self.default_privatekey = default_privatekey - self.default_ca = default_ca - self.default_chain_file = default_chain_file - self.dhparams = dhparams - self.certs = dict() - self.expire_queue = [] - - def expire(self, entry): - self.expire_queue.append(entry) - if len(self.expire_queue) > self.STORE_CAP: - d = self.expire_queue.pop(0) - for k, v in list(self.certs.items()): - if v == d: - del self.certs[k] - - @staticmethod - def load_dhparam(path): - - # netlib<=0.10 doesn't generate a dhparam file. - # Create it now if neccessary. - if not os.path.exists(path): - with open(path, "wb") as f: - f.write(DEFAULT_DHPARAM) - - bio = OpenSSL.SSL._lib.BIO_new_file(path.encode(sys.getfilesystemencoding()), b"r") - if bio != OpenSSL.SSL._ffi.NULL: - bio = OpenSSL.SSL._ffi.gc(bio, OpenSSL.SSL._lib.BIO_free) - dh = OpenSSL.SSL._lib.PEM_read_bio_DHparams( - bio, - OpenSSL.SSL._ffi.NULL, - OpenSSL.SSL._ffi.NULL, - OpenSSL.SSL._ffi.NULL) - dh = OpenSSL.SSL._ffi.gc(dh, OpenSSL.SSL._lib.DH_free) - return dh - - @classmethod - def from_store(cls, path, basename): - ca_path = os.path.join(path, basename + "-ca.pem") - if not os.path.exists(ca_path): - key, ca = cls.create_store(path, basename) - else: - with open(ca_path, "rb") as f: - raw = f.read() - ca = OpenSSL.crypto.load_certificate( - OpenSSL.crypto.FILETYPE_PEM, - raw) - key = OpenSSL.crypto.load_privatekey( - OpenSSL.crypto.FILETYPE_PEM, - raw) - dh_path = os.path.join(path, basename + "-dhparam.pem") - dh = cls.load_dhparam(dh_path) - return cls(key, ca, ca_path, dh) - - @staticmethod - def create_store(path, basename, o=None, cn=None, expiry=DEFAULT_EXP): - if not os.path.exists(path): - os.makedirs(path) - - o = o or basename - cn = cn or basename - - key, ca = create_ca(o=o, cn=cn, exp=expiry) - # Dump the CA plus private key - with open(os.path.join(path, basename + "-ca.pem"), "wb") as f: - f.write( - OpenSSL.crypto.dump_privatekey( - OpenSSL.crypto.FILETYPE_PEM, - key)) - f.write( - OpenSSL.crypto.dump_certificate( - OpenSSL.crypto.FILETYPE_PEM, - ca)) - - # Dump the certificate in PEM format - with open(os.path.join(path, basename + "-ca-cert.pem"), "wb") as f: - f.write( - OpenSSL.crypto.dump_certificate( - OpenSSL.crypto.FILETYPE_PEM, - ca)) - - # Create a .cer file with the same contents for Android - with open(os.path.join(path, basename + "-ca-cert.cer"), "wb") as f: - f.write( - OpenSSL.crypto.dump_certificate( - OpenSSL.crypto.FILETYPE_PEM, - ca)) - - # Dump the certificate in PKCS12 format for Windows devices - with open(os.path.join(path, basename + "-ca-cert.p12"), "wb") as f: - p12 = OpenSSL.crypto.PKCS12() - p12.set_certificate(ca) - p12.set_privatekey(key) - f.write(p12.export()) - - with open(os.path.join(path, basename + "-dhparam.pem"), "wb") as f: - f.write(DEFAULT_DHPARAM) - - return key, ca - - def add_cert_file(self, spec, path): - with open(path, "rb") as f: - raw = f.read() - cert = SSLCert( - OpenSSL.crypto.load_certificate( - OpenSSL.crypto.FILETYPE_PEM, - raw)) - try: - privatekey = OpenSSL.crypto.load_privatekey( - OpenSSL.crypto.FILETYPE_PEM, - raw) - except Exception: - privatekey = self.default_privatekey - self.add_cert( - CertStoreEntry(cert, privatekey, path), - spec - ) - - def add_cert(self, entry, *names): - """ - Adds a cert to the certstore. We register the CN in the cert plus - any SANs, and also the list of names provided as an argument. - """ - if entry.cert.cn: - self.certs[entry.cert.cn] = entry - for i in entry.cert.altnames: - self.certs[i] = entry - for i in names: - self.certs[i] = entry - - @staticmethod - def asterisk_forms(dn): - if dn is None: - return [] - parts = dn.split(b".") - parts.reverse() - curr_dn = b"" - dn_forms = [b"*"] - for part in parts[:-1]: - curr_dn = b"." + part + curr_dn # .example.com - dn_forms.append(b"*" + curr_dn) # *.example.com - if parts[-1] != b"*": - dn_forms.append(parts[-1] + curr_dn) - return dn_forms - - def get_cert(self, commonname, sans): - """ - Returns an (cert, privkey, cert_chain) tuple. - - commonname: Common name for the generated certificate. Must be a - valid, plain-ASCII, IDNA-encoded domain name. - - sans: A list of Subject Alternate Names. - """ - - potential_keys = self.asterisk_forms(commonname) - for s in sans: - potential_keys.extend(self.asterisk_forms(s)) - potential_keys.append((commonname, tuple(sans))) - - name = next( - filter(lambda key: key in self.certs, potential_keys), - None - ) - if name: - entry = self.certs[name] - else: - entry = CertStoreEntry( - cert=dummy_cert( - self.default_privatekey, - self.default_ca, - commonname, - sans), - privatekey=self.default_privatekey, - chain_file=self.default_chain_file) - self.certs[(commonname, tuple(sans))] = entry - self.expire(entry) - - return entry.cert, entry.privatekey, entry.chain_file - - -class _GeneralName(univ.Choice): - # We are only interested in dNSNames. We use a default handler to ignore - # other types. - # TODO: We should also handle iPAddresses. - componentType = namedtype.NamedTypes( - namedtype.NamedType('dNSName', char.IA5String().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2) - ) - ), - ) - - -class _GeneralNames(univ.SequenceOf): - componentType = _GeneralName() - sizeSpec = univ.SequenceOf.sizeSpec + \ - constraint.ValueSizeConstraint(1, 1024) - - -class SSLCert(serializable.Serializable): - - def __init__(self, cert): - """ - Returns a (common name, [subject alternative names]) tuple. - """ - self.x509 = cert - - def __eq__(self, other): - return self.digest("sha256") == other.digest("sha256") - - def __ne__(self, other): - return not self.__eq__(other) - - def get_state(self): - return self.to_pem() - - def set_state(self, state): - self.x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, state) - - @classmethod - def from_state(cls, state): - return cls.from_pem(state) - - @classmethod - def from_pem(cls, txt): - x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, txt) - return cls(x509) - - @classmethod - def from_der(cls, der): - pem = ssl.DER_cert_to_PEM_cert(der) - return cls.from_pem(pem) - - def to_pem(self): - return OpenSSL.crypto.dump_certificate( - OpenSSL.crypto.FILETYPE_PEM, - self.x509) - - def digest(self, name): - return self.x509.digest(name) - - @property - def issuer(self): - return self.x509.get_issuer().get_components() - - @property - def notbefore(self): - t = self.x509.get_notBefore() - return datetime.datetime.strptime(t.decode("ascii"), "%Y%m%d%H%M%SZ") - - @property - def notafter(self): - t = self.x509.get_notAfter() - return datetime.datetime.strptime(t.decode("ascii"), "%Y%m%d%H%M%SZ") - - @property - def has_expired(self): - return self.x509.has_expired() - - @property - def subject(self): - return self.x509.get_subject().get_components() - - @property - def serial(self): - return self.x509.get_serial_number() - - @property - def keyinfo(self): - pk = self.x509.get_pubkey() - types = { - OpenSSL.crypto.TYPE_RSA: "RSA", - OpenSSL.crypto.TYPE_DSA: "DSA", - } - return ( - types.get(pk.type(), "UNKNOWN"), - pk.bits() - ) - - @property - def cn(self): - c = None - for i in self.subject: - if i[0] == b"CN": - c = i[1] - return c - - @property - def altnames(self): - """ - Returns: - All DNS altnames. - """ - # tcp.TCPClient.convert_to_ssl assumes that this property only contains DNS altnames for hostname verification. - altnames = [] - for i in range(self.x509.get_extension_count()): - ext = self.x509.get_extension(i) - if ext.get_short_name() == b"subjectAltName": - try: - dec = decode(ext.get_data(), asn1Spec=_GeneralNames()) - except PyAsn1Error: - continue - for i in dec[0]: - altnames.append(i[0].asOctets()) - return altnames diff --git a/netlib/tcp.py b/netlib/tcp.py index 4379c9b5..6e323957 100644 --- a/netlib/tcp.py +++ b/netlib/tcp.py @@ -17,7 +17,7 @@ from backports import ssl_match_hostname import OpenSSL from OpenSSL import SSL -from netlib import certutils +from mitmproxy import certs from mitmproxy.utils import version_check from mitmproxy.types import serializable from netlib import exceptions @@ -685,11 +685,11 @@ class TCPClient(_Connection): if verification_mode == SSL.VERIFY_PEER and self.ssl_verification_error: raise self.ssl_verification_error - self.cert = certutils.SSLCert(self.connection.get_peer_certificate()) + self.cert = certs.SSLCert(self.connection.get_peer_certificate()) # Keep all server certificates in a list for i in self.connection.get_peer_cert_chain(): - self.server_certs.append(certutils.SSLCert(i)) + self.server_certs.append(certs.SSLCert(i)) # Validate TLS Hostname try: @@ -782,7 +782,7 @@ class BaseHandler(_Connection): extra_chain_certs=None, **sslctx_kwargs): """ - cert: A certutils.SSLCert object or the path to a certificate + cert: A certs.SSLCert object or the path to a certificate chain file. handle_sni: SNI handler, should take a connection object. Server @@ -810,7 +810,7 @@ class BaseHandler(_Connection): context = self._create_ssl_context(ca_pemfile=chain_file, **sslctx_kwargs) context.use_privatekey(key) - if isinstance(cert, certutils.SSLCert): + if isinstance(cert, certs.SSLCert): context.use_certificate(cert.x509) else: context.use_certificate_chain_file(cert) @@ -825,7 +825,7 @@ class BaseHandler(_Connection): if request_client_cert: def save_cert(conn_, cert, errno_, depth_, preverify_ok_): - self.clientcert = certutils.SSLCert(cert) + self.clientcert = certs.SSLCert(cert) # Return true to prevent cert verification error return True context.set_verify(SSL.VERIFY_PEER, save_cert) diff --git a/pathod/pathoc.py b/pathod/pathoc.py index caa9accb..39dedf05 100644 --- a/pathod/pathoc.py +++ b/pathod/pathoc.py @@ -13,13 +13,17 @@ import logging from netlib.tutils import treq from mitmproxy.utils import strutils -from netlib import tcp, certutils, websockets, socks +from netlib import tcp +from mitmproxy import certs +from netlib import websockets +from netlib import socks from netlib import exceptions from netlib.http import http1 from mitmproxy.types import basethread -from . import log, language -from .protocols import http2 +from pathod import log +from pathod import language +from pathod.protocols import http2 logging.getLogger("hpack").setLevel(logging.WARNING) @@ -76,7 +80,7 @@ class SSLInfo: } t = types.get(pk.type(), "Uknown") parts.append("\tPubkey: %s bit %s" % (pk.bits(), t)) - s = certutils.SSLCert(i) + s = certs.SSLCert(i) if s.altnames: parts.append("\tSANs: %s" % " ".join(strutils.native(n, "utf8") for n in s.altnames)) return "\n".join(parts) diff --git a/pathod/pathod.py b/pathod/pathod.py index 3692ceff..5d951350 100644 --- a/pathod/pathod.py +++ b/pathod/pathod.py @@ -5,15 +5,17 @@ import sys import threading from netlib import tcp -from netlib import certutils +from mitmproxy import certs as mcerts from netlib import websockets from mitmproxy import version import urllib -from netlib.exceptions import HttpException, HttpReadDisconnect, TcpTimeout, TcpDisconnect, \ - TlsException +from netlib import exceptions -from . import language, utils, log, protocols +from pathod import language +from pathod import utils +from pathod import log +from pathod import protocols DEFAULT_CERT_DOMAIN = b"pathod.net" @@ -52,7 +54,7 @@ class SSLOptions: self.ssl_options = ssl_options self.ciphers = ciphers self.alpn_select = alpn_select - self.certstore = certutils.CertStore.from_store( + self.certstore = mcerts.CertStore.from_store( os.path.expanduser(confdir), CERTSTORE_BASENAME ) @@ -128,9 +130,9 @@ class PathodHandler(tcp.BaseHandler): with logger.ctx() as lg: try: req = self.protocol.read_request(self.rfile) - except HttpReadDisconnect: + except exceptions.HttpReadDisconnect: return None, None - except HttpException as s: + except exceptions.HttpException as s: s = str(s) lg(s) return None, dict(type="error", msg=s) @@ -252,7 +254,7 @@ class PathodHandler(tcp.BaseHandler): options=self.server.ssloptions.ssl_options, alpn_select=self.server.ssloptions.alpn_select, ) - except TlsException as v: + except exceptions.TlsException as v: s = str(v) self.server.add_log( dict( @@ -384,7 +386,7 @@ class Pathod(tcp.TCPServer): try: h.handle() h.finish() - except TcpDisconnect: # pragma: no cover + except exceptions.TcpDisconnect: # pragma: no cover log.write_raw(self.logfp, "Disconnect") self.add_log( dict( @@ -393,7 +395,7 @@ class Pathod(tcp.TCPServer): ) ) return - except TcpTimeout: + except exceptions.TcpTimeout: log.write_raw(self.logfp, "Timeout") self.add_log( dict( diff --git a/test/mitmproxy/test_certs.py b/test/mitmproxy/test_certs.py new file mode 100644 index 00000000..35407fd6 --- /dev/null +++ b/test/mitmproxy/test_certs.py @@ -0,0 +1,181 @@ +import os +from mitmproxy import certs +from netlib import tutils + +# class TestDNTree: +# def test_simple(self): +# d = certs.DNTree() +# d.add("foo.com", "foo") +# d.add("bar.com", "bar") +# assert d.get("foo.com") == "foo" +# assert d.get("bar.com") == "bar" +# assert not d.get("oink.com") +# assert not d.get("oink") +# assert not d.get("") +# assert not d.get("oink.oink") +# +# d.add("*.match.org", "match") +# assert not d.get("match.org") +# assert d.get("foo.match.org") == "match" +# assert d.get("foo.foo.match.org") == "match" +# +# def test_wildcard(self): +# d = certs.DNTree() +# d.add("foo.com", "foo") +# assert not d.get("*.foo.com") +# d.add("*.foo.com", "wild") +# +# d = certs.DNTree() +# d.add("*", "foo") +# assert d.get("foo.com") == "foo" +# assert d.get("*.foo.com") == "foo" +# assert d.get("com") == "foo" + + +class TestCertStore: + + def test_create_explicit(self): + with tutils.tmpdir() as d: + ca = certs.CertStore.from_store(d, "test") + assert ca.get_cert(b"foo", []) + + ca2 = certs.CertStore.from_store(d, "test") + assert ca2.get_cert(b"foo", []) + + assert ca.default_ca.get_serial_number() == ca2.default_ca.get_serial_number() + + def test_create_no_common_name(self): + with tutils.tmpdir() as d: + ca = certs.CertStore.from_store(d, "test") + assert ca.get_cert(None, [])[0].cn is None + + def test_create_tmp(self): + with tutils.tmpdir() as d: + ca = certs.CertStore.from_store(d, "test") + assert ca.get_cert(b"foo.com", []) + assert ca.get_cert(b"foo.com", []) + assert ca.get_cert(b"*.foo.com", []) + + r = ca.get_cert(b"*.foo.com", []) + assert r[1] == ca.default_privatekey + + def test_sans(self): + with tutils.tmpdir() as d: + ca = certs.CertStore.from_store(d, "test") + c1 = ca.get_cert(b"foo.com", [b"*.bar.com"]) + ca.get_cert(b"foo.bar.com", []) + # assert c1 == c2 + c3 = ca.get_cert(b"bar.com", []) + assert not c1 == c3 + + def test_sans_change(self): + with tutils.tmpdir() as d: + ca = certs.CertStore.from_store(d, "test") + ca.get_cert(b"foo.com", [b"*.bar.com"]) + cert, key, chain_file = ca.get_cert(b"foo.bar.com", [b"*.baz.com"]) + assert b"*.baz.com" in cert.altnames + + def test_expire(self): + with tutils.tmpdir() as d: + ca = certs.CertStore.from_store(d, "test") + ca.STORE_CAP = 3 + ca.get_cert(b"one.com", []) + ca.get_cert(b"two.com", []) + ca.get_cert(b"three.com", []) + + assert (b"one.com", ()) in ca.certs + assert (b"two.com", ()) in ca.certs + assert (b"three.com", ()) in ca.certs + + ca.get_cert(b"one.com", []) + + assert (b"one.com", ()) in ca.certs + assert (b"two.com", ()) in ca.certs + assert (b"three.com", ()) in ca.certs + + ca.get_cert(b"four.com", []) + + assert (b"one.com", ()) not in ca.certs + assert (b"two.com", ()) in ca.certs + assert (b"three.com", ()) in ca.certs + assert (b"four.com", ()) in ca.certs + + def test_overrides(self): + with tutils.tmpdir() as d: + ca1 = certs.CertStore.from_store(os.path.join(d, "ca1"), "test") + ca2 = certs.CertStore.from_store(os.path.join(d, "ca2"), "test") + assert not ca1.default_ca.get_serial_number( + ) == ca2.default_ca.get_serial_number() + + dc = ca2.get_cert(b"foo.com", [b"sans.example.com"]) + dcp = os.path.join(d, "dc") + f = open(dcp, "wb") + f.write(dc[0].to_pem()) + f.close() + ca1.add_cert_file(b"foo.com", dcp) + + ret = ca1.get_cert(b"foo.com", []) + assert ret[0].serial == dc[0].serial + + +class TestDummyCert: + + def test_with_ca(self): + with tutils.tmpdir() as d: + ca = certs.CertStore.from_store(d, "test") + r = certs.dummy_cert( + ca.default_privatekey, + ca.default_ca, + b"foo.com", + [b"one.com", b"two.com", b"*.three.com"] + ) + assert r.cn == b"foo.com" + + r = certs.dummy_cert( + ca.default_privatekey, + ca.default_ca, + None, + [] + ) + assert r.cn is None + + +class TestSSLCert: + + def test_simple(self): + with open(tutils.test_data.path("data/text_cert"), "rb") as f: + d = f.read() + c1 = certs.SSLCert.from_pem(d) + assert c1.cn == b"google.com" + assert len(c1.altnames) == 436 + + with open(tutils.test_data.path("data/text_cert_2"), "rb") as f: + d = f.read() + c2 = certs.SSLCert.from_pem(d) + assert c2.cn == b"www.inode.co.nz" + assert len(c2.altnames) == 2 + assert c2.digest("sha1") + assert c2.notbefore + assert c2.notafter + assert c2.subject + assert c2.keyinfo == ("RSA", 2048) + assert c2.serial + assert c2.issuer + assert c2.to_pem() + assert c2.has_expired is not None + + assert not c1 == c2 + assert c1 != c2 + + def test_err_broken_sans(self): + with open(tutils.test_data.path("data/text_cert_weird1"), "rb") as f: + d = f.read() + c = certs.SSLCert.from_pem(d) + # This breaks unless we ignore a decoding error. + assert c.altnames is not None + + def test_der(self): + with open(tutils.test_data.path("data/dercert"), "rb") as f: + d = f.read() + s = certs.SSLCert.from_der(d) + assert s.cn diff --git a/test/mitmproxy/test_server.py b/test/mitmproxy/test_server.py index 93a82954..cadc67a8 100644 --- a/test/mitmproxy/test_server.py +++ b/test/mitmproxy/test_server.py @@ -9,13 +9,16 @@ from mitmproxy.addons import script from mitmproxy import http from mitmproxy.proxy.config import HostMatcher, parse_server_spec import netlib.http -from netlib import tcp, socks -from netlib.certutils import SSLCert -from netlib.exceptions import HttpReadDisconnect, HttpException -from netlib.http import authentication, http1 +from netlib import tcp +from netlib import socks +from mitmproxy import certs +from netlib import exceptions +from netlib.http import authentication +from netlib.http import http1 from netlib.tcp import Address from netlib.tutils import raises -from pathod import pathoc, pathod +from pathod import pathoc +from pathod import pathod from . import tutils, tservers @@ -144,9 +147,9 @@ class TcpMixin: # Test that we get the original SSL cert if self.ssl: - i_cert = SSLCert(i.sslinfo.certchain[0]) - i2_cert = SSLCert(i2.sslinfo.certchain[0]) - n_cert = SSLCert(n.sslinfo.certchain[0]) + i_cert = certs.SSLCert(i.sslinfo.certchain[0]) + i2_cert = certs.SSLCert(i2.sslinfo.certchain[0]) + n_cert = certs.SSLCert(n.sslinfo.certchain[0]) assert i_cert == i2_cert assert i_cert != n_cert @@ -156,7 +159,7 @@ class TcpMixin: # mitmproxy responds with bad gateway assert self.pathod(spec).status_code == 502 self._ignore_on() - with raises(HttpException): + with raises(exceptions.HttpException): self.pathod(spec) # pathoc tries to parse answer as HTTP self._ignore_off() @@ -190,9 +193,9 @@ class TcpMixin: # Test that we get the original SSL cert if self.ssl: - i_cert = SSLCert(i.sslinfo.certchain[0]) - i2_cert = SSLCert(i2.sslinfo.certchain[0]) - n_cert = SSLCert(n.sslinfo.certchain[0]) + i_cert = certs.SSLCert(i.sslinfo.certchain[0]) + i2_cert = certs.SSLCert(i2.sslinfo.certchain[0]) + n_cert = certs.SSLCert(n.sslinfo.certchain[0]) assert i_cert == i2_cert == n_cert @@ -830,7 +833,7 @@ class TestKillRequest(tservers.HTTPProxyTest): masterclass = MasterKillRequest def test_kill(self): - with raises(HttpReadDisconnect): + with raises(exceptions.HttpReadDisconnect): self.pathod("200") # Nothing should have hit the server assert not self.server.last_log() @@ -847,7 +850,7 @@ class TestKillResponse(tservers.HTTPProxyTest): masterclass = MasterKillResponse def test_kill(self): - with raises(HttpReadDisconnect): + with raises(exceptions.HttpReadDisconnect): self.pathod("200") # The server should have seen a request assert self.server.last_log() @@ -1050,7 +1053,7 @@ class AddUpstreamCertsToClientChainMixin: def test_add_upstream_certs_to_client_chain(self): with open(self.servercert, "rb") as f: d = f.read() - upstreamCert = SSLCert.from_pem(d) + upstreamCert = certs.SSLCert.from_pem(d) p = self.pathoc() with p.connect(): upstream_cert_found_in_client_chain = False diff --git a/test/netlib/test_certutils.py b/test/netlib/test_certutils.py deleted file mode 100644 index cf9a671b..00000000 --- a/test/netlib/test_certutils.py +++ /dev/null @@ -1,180 +0,0 @@ -import os -from netlib import certutils, tutils - -# class TestDNTree: -# def test_simple(self): -# d = certutils.DNTree() -# d.add("foo.com", "foo") -# d.add("bar.com", "bar") -# assert d.get("foo.com") == "foo" -# assert d.get("bar.com") == "bar" -# assert not d.get("oink.com") -# assert not d.get("oink") -# assert not d.get("") -# assert not d.get("oink.oink") -# -# d.add("*.match.org", "match") -# assert not d.get("match.org") -# assert d.get("foo.match.org") == "match" -# assert d.get("foo.foo.match.org") == "match" -# -# def test_wildcard(self): -# d = certutils.DNTree() -# d.add("foo.com", "foo") -# assert not d.get("*.foo.com") -# d.add("*.foo.com", "wild") -# -# d = certutils.DNTree() -# d.add("*", "foo") -# assert d.get("foo.com") == "foo" -# assert d.get("*.foo.com") == "foo" -# assert d.get("com") == "foo" - - -class TestCertStore: - - def test_create_explicit(self): - with tutils.tmpdir() as d: - ca = certutils.CertStore.from_store(d, "test") - assert ca.get_cert(b"foo", []) - - ca2 = certutils.CertStore.from_store(d, "test") - assert ca2.get_cert(b"foo", []) - - assert ca.default_ca.get_serial_number() == ca2.default_ca.get_serial_number() - - def test_create_no_common_name(self): - with tutils.tmpdir() as d: - ca = certutils.CertStore.from_store(d, "test") - assert ca.get_cert(None, [])[0].cn is None - - def test_create_tmp(self): - with tutils.tmpdir() as d: - ca = certutils.CertStore.from_store(d, "test") - assert ca.get_cert(b"foo.com", []) - assert ca.get_cert(b"foo.com", []) - assert ca.get_cert(b"*.foo.com", []) - - r = ca.get_cert(b"*.foo.com", []) - assert r[1] == ca.default_privatekey - - def test_sans(self): - with tutils.tmpdir() as d: - ca = certutils.CertStore.from_store(d, "test") - c1 = ca.get_cert(b"foo.com", [b"*.bar.com"]) - ca.get_cert(b"foo.bar.com", []) - # assert c1 == c2 - c3 = ca.get_cert(b"bar.com", []) - assert not c1 == c3 - - def test_sans_change(self): - with tutils.tmpdir() as d: - ca = certutils.CertStore.from_store(d, "test") - ca.get_cert(b"foo.com", [b"*.bar.com"]) - cert, key, chain_file = ca.get_cert(b"foo.bar.com", [b"*.baz.com"]) - assert b"*.baz.com" in cert.altnames - - def test_expire(self): - with tutils.tmpdir() as d: - ca = certutils.CertStore.from_store(d, "test") - ca.STORE_CAP = 3 - ca.get_cert(b"one.com", []) - ca.get_cert(b"two.com", []) - ca.get_cert(b"three.com", []) - - assert (b"one.com", ()) in ca.certs - assert (b"two.com", ()) in ca.certs - assert (b"three.com", ()) in ca.certs - - ca.get_cert(b"one.com", []) - - assert (b"one.com", ()) in ca.certs - assert (b"two.com", ()) in ca.certs - assert (b"three.com", ()) in ca.certs - - ca.get_cert(b"four.com", []) - - assert (b"one.com", ()) not in ca.certs - assert (b"two.com", ()) in ca.certs - assert (b"three.com", ()) in ca.certs - assert (b"four.com", ()) in ca.certs - - def test_overrides(self): - with tutils.tmpdir() as d: - ca1 = certutils.CertStore.from_store(os.path.join(d, "ca1"), "test") - ca2 = certutils.CertStore.from_store(os.path.join(d, "ca2"), "test") - assert not ca1.default_ca.get_serial_number( - ) == ca2.default_ca.get_serial_number() - - dc = ca2.get_cert(b"foo.com", [b"sans.example.com"]) - dcp = os.path.join(d, "dc") - f = open(dcp, "wb") - f.write(dc[0].to_pem()) - f.close() - ca1.add_cert_file(b"foo.com", dcp) - - ret = ca1.get_cert(b"foo.com", []) - assert ret[0].serial == dc[0].serial - - -class TestDummyCert: - - def test_with_ca(self): - with tutils.tmpdir() as d: - ca = certutils.CertStore.from_store(d, "test") - r = certutils.dummy_cert( - ca.default_privatekey, - ca.default_ca, - b"foo.com", - [b"one.com", b"two.com", b"*.three.com"] - ) - assert r.cn == b"foo.com" - - r = certutils.dummy_cert( - ca.default_privatekey, - ca.default_ca, - None, - [] - ) - assert r.cn is None - - -class TestSSLCert: - - def test_simple(self): - with open(tutils.test_data.path("data/text_cert"), "rb") as f: - d = f.read() - c1 = certutils.SSLCert.from_pem(d) - assert c1.cn == b"google.com" - assert len(c1.altnames) == 436 - - with open(tutils.test_data.path("data/text_cert_2"), "rb") as f: - d = f.read() - c2 = certutils.SSLCert.from_pem(d) - assert c2.cn == b"www.inode.co.nz" - assert len(c2.altnames) == 2 - assert c2.digest("sha1") - assert c2.notbefore - assert c2.notafter - assert c2.subject - assert c2.keyinfo == ("RSA", 2048) - assert c2.serial - assert c2.issuer - assert c2.to_pem() - assert c2.has_expired is not None - - assert not c1 == c2 - assert c1 != c2 - - def test_err_broken_sans(self): - with open(tutils.test_data.path("data/text_cert_weird1"), "rb") as f: - d = f.read() - c = certutils.SSLCert.from_pem(d) - # This breaks unless we ignore a decoding error. - assert c.altnames is not None - - def test_der(self): - with open(tutils.test_data.path("data/dercert"), "rb") as f: - d = f.read() - s = certutils.SSLCert.from_der(d) - assert s.cn diff --git a/test/netlib/test_tcp.py b/test/netlib/test_tcp.py index 797a5a04..2c1b92dc 100644 --- a/test/netlib/test_tcp.py +++ b/test/netlib/test_tcp.py @@ -9,9 +9,10 @@ import mock from OpenSSL import SSL -from netlib import tcp, certutils, tutils -from netlib.exceptions import InvalidCertificateException, TcpReadIncomplete, TlsException, \ - TcpTimeout, TcpDisconnect, TcpException, NetlibException +from mitmproxy import certs +from netlib import tcp +from netlib import tutils +from netlib import exceptions from . import tservers @@ -108,7 +109,7 @@ class TestServerBind(tservers.ServerTestBase): with c.connect(): assert c.rfile.readline() == str(("127.0.0.1", random_port)).encode() return - except TcpException: # port probably already in use + except exceptions.TcpException: # port probably already in use pass @@ -155,7 +156,7 @@ class TestFinishFail(tservers.ServerTestBase): c = tcp.TCPClient(("127.0.0.1", self.port)) with c.connect(): c.wfile.write(b"foo\n") - c.wfile.flush = mock.Mock(side_effect=TcpDisconnect) + c.wfile.flush = mock.Mock(side_effect=exceptions.TcpDisconnect) c.finish() @@ -195,7 +196,7 @@ class TestSSLv3Only(tservers.ServerTestBase): def test_failure(self): c = tcp.TCPClient(("127.0.0.1", self.port)) with c.connect(): - tutils.raises(TlsException, c.convert_to_ssl, sni="foo.com") + tutils.raises(exceptions.TlsException, c.convert_to_ssl, sni="foo.com") class TestSSLUpstreamCertVerificationWBadServerCert(tservers.ServerTestBase): @@ -236,7 +237,7 @@ class TestSSLUpstreamCertVerificationWBadServerCert(tservers.ServerTestBase): def test_mode_strict_should_fail(self): c = tcp.TCPClient(("127.0.0.1", self.port)) with c.connect(): - with tutils.raises(InvalidCertificateException): + with tutils.raises(exceptions.InvalidCertificateException): c.convert_to_ssl( sni="example.mitmproxy.org", verify_options=SSL.VERIFY_PEER, @@ -261,7 +262,7 @@ class TestSSLUpstreamCertVerificationWBadHostname(tservers.ServerTestBase): def test_should_fail_without_sni(self): c = tcp.TCPClient(("127.0.0.1", self.port)) with c.connect(): - with tutils.raises(TlsException): + with tutils.raises(exceptions.TlsException): c.convert_to_ssl( verify_options=SSL.VERIFY_PEER, ca_pemfile=tutils.test_data.path("data/verificationcerts/trusted-root.crt") @@ -270,7 +271,7 @@ class TestSSLUpstreamCertVerificationWBadHostname(tservers.ServerTestBase): def test_should_fail(self): c = tcp.TCPClient(("127.0.0.1", self.port)) with c.connect(): - with tutils.raises(InvalidCertificateException): + with tutils.raises(exceptions.InvalidCertificateException): c.convert_to_ssl( sni="mitmproxy.org", verify_options=SSL.VERIFY_PEER, @@ -348,7 +349,7 @@ class TestSSLClientCert(tservers.ServerTestBase): c = tcp.TCPClient(("127.0.0.1", self.port)) with c.connect(): tutils.raises( - TlsException, + exceptions.TlsException, c.convert_to_ssl, cert=tutils.test_data.path("data/clientcert/make") ) @@ -454,7 +455,7 @@ class TestSSLDisconnect(tservers.ServerTestBase): # Excercise SSL.ZeroReturnError c.rfile.read(10) c.close() - tutils.raises(TcpDisconnect, c.wfile.write, b"foo") + tutils.raises(exceptions.TcpDisconnect, c.wfile.write, b"foo") tutils.raises(queue.Empty, self.q.get_nowait) @@ -469,7 +470,7 @@ class TestSSLHardDisconnect(tservers.ServerTestBase): # Exercise SSL.SysCallError c.rfile.read(10) c.close() - tutils.raises(TcpDisconnect, c.wfile.write, b"foo") + tutils.raises(exceptions.TcpDisconnect, c.wfile.write, b"foo") class TestDisconnect(tservers.ServerTestBase): @@ -492,7 +493,7 @@ class TestServerTimeOut(tservers.ServerTestBase): self.settimeout(0.01) try: self.rfile.read(10) - except TcpTimeout: + except exceptions.TcpTimeout: self.timeout = True def test_timeout(self): @@ -510,7 +511,7 @@ class TestTimeOut(tservers.ServerTestBase): with c.connect(): c.settimeout(0.1) assert c.gettimeout() == 0.1 - tutils.raises(TcpTimeout, c.rfile.read, 10) + tutils.raises(exceptions.TcpTimeout, c.rfile.read, 10) class TestALPNClient(tservers.ServerTestBase): @@ -562,13 +563,13 @@ class TestSSLTimeOut(tservers.ServerTestBase): with c.connect(): c.convert_to_ssl() c.settimeout(0.1) - tutils.raises(TcpTimeout, c.rfile.read, 10) + tutils.raises(exceptions.TcpTimeout, c.rfile.read, 10) class TestDHParams(tservers.ServerTestBase): handler = HangHandler ssl = dict( - dhparams=certutils.CertStore.load_dhparam( + dhparams=certs.CertStore.load_dhparam( tutils.test_data.path("data/dhparam.pem"), ), cipher_list="DHE-RSA-AES256-SHA" @@ -584,7 +585,7 @@ class TestDHParams(tservers.ServerTestBase): def test_create_dhparams(self): with tutils.tmpdir() as d: filename = os.path.join(d, "dhparam.pem") - certutils.CertStore.load_dhparam(filename) + certs.CertStore.load_dhparam(filename) assert os.path.exists(filename) @@ -592,7 +593,7 @@ class TestTCPClient: def test_conerr(self): c = tcp.TCPClient(("127.0.0.1", 0)) - tutils.raises(TcpException, c.connect) + tutils.raises(exceptions.TcpException, c.connect) class TestFileLike: @@ -661,7 +662,7 @@ class TestFileLike: o = mock.MagicMock() o.flush = mock.MagicMock(side_effect=socket.error) s.o = o - tutils.raises(TcpDisconnect, s.flush) + tutils.raises(exceptions.TcpDisconnect, s.flush) def test_reader_read_error(self): s = BytesIO(b"foobar\nfoobar") @@ -669,7 +670,7 @@ class TestFileLike: o = mock.MagicMock() o.read = mock.MagicMock(side_effect=socket.error) s.o = o - tutils.raises(TcpDisconnect, s.read, 10) + tutils.raises(exceptions.TcpDisconnect, s.read, 10) def test_reset_timestamps(self): s = BytesIO(b"foobar\nfoobar") @@ -700,24 +701,24 @@ class TestFileLike: s = mock.MagicMock() s.read = mock.MagicMock(side_effect=SSL.Error()) s = tcp.Reader(s) - tutils.raises(TlsException, s.read, 1) + tutils.raises(exceptions.TlsException, s.read, 1) def test_read_syscall_ssl_error(self): s = mock.MagicMock() s.read = mock.MagicMock(side_effect=SSL.SysCallError()) s = tcp.Reader(s) - tutils.raises(TlsException, s.read, 1) + tutils.raises(exceptions.TlsException, s.read, 1) def test_reader_readline_disconnect(self): o = mock.MagicMock() o.read = mock.MagicMock(side_effect=socket.error) s = tcp.Reader(o) - tutils.raises(TcpDisconnect, s.readline, 10) + tutils.raises(exceptions.TcpDisconnect, s.readline, 10) def test_reader_incomplete_error(self): s = BytesIO(b"foobar") s = tcp.Reader(s) - tutils.raises(TcpReadIncomplete, s.safe_read, 10) + tutils.raises(exceptions.TcpReadIncomplete, s.safe_read, 10) class TestPeek(tservers.ServerTestBase): @@ -738,11 +739,11 @@ class TestPeek(tservers.ServerTestBase): assert c.rfile.readline() == testval c.close() - with tutils.raises(NetlibException): + with tutils.raises(exceptions.NetlibException): if c.rfile.peek(1) == b"": # Workaround for Python 2 on Unix: # Peeking a closed connection does not raise an exception here. - raise NetlibException() + raise exceptions.NetlibException() class TestPeekSSL(TestPeek): -- cgit v1.2.3 From 301d52d9d05f2c5f074fe68c73acc1c32e518020 Mon Sep 17 00:00:00 2001 From: Aldo Cortesi Date: Thu, 20 Oct 2016 11:06:57 +1300 Subject: netlib.encoding -> netlib.http.encoding Encoding is highly specific to http, and only used within this module. --- netlib/encoding.py | 175 -------------------------------------- netlib/http/encoding.py | 175 ++++++++++++++++++++++++++++++++++++++ netlib/http/message.py | 2 +- test/netlib/http/test_encoding.py | 73 ++++++++++++++++ test/netlib/test_encoding.py | 72 ---------------- 5 files changed, 249 insertions(+), 248 deletions(-) delete mode 100644 netlib/encoding.py create mode 100644 netlib/http/encoding.py create mode 100644 test/netlib/http/test_encoding.py delete mode 100644 test/netlib/test_encoding.py diff --git a/netlib/encoding.py b/netlib/encoding.py deleted file mode 100644 index e123a033..00000000 --- a/netlib/encoding.py +++ /dev/null @@ -1,175 +0,0 @@ -""" -Utility functions for decoding response bodies. -""" - -import codecs -import collections -from io import BytesIO - -import gzip -import zlib -import brotli - -from typing import Union - - -# We have a shared single-element cache for encoding and decoding. -# This is quite useful in practice, e.g. -# flow.request.content = flow.request.content.replace(b"foo", b"bar") -# does not require an .encode() call if content does not contain b"foo" -CachedDecode = collections.namedtuple("CachedDecode", "encoded encoding errors decoded") -_cache = CachedDecode(None, None, None, None) - - -def decode(encoded: Union[str, bytes], encoding: str, errors: str='strict') -> Union[str, bytes]: - """ - Decode the given input object - - Returns: - The decoded value - - Raises: - ValueError, if decoding fails. - """ - if len(encoded) == 0: - return encoded - - global _cache - cached = ( - isinstance(encoded, bytes) and - _cache.encoded == encoded and - _cache.encoding == encoding and - _cache.errors == errors - ) - if cached: - return _cache.decoded - try: - try: - decoded = custom_decode[encoding](encoded) - except KeyError: - decoded = codecs.decode(encoded, encoding, errors) - if encoding in ("gzip", "deflate", "br"): - _cache = CachedDecode(encoded, encoding, errors, decoded) - return decoded - except TypeError: - raise - except Exception as e: - raise ValueError("{} when decoding {} with {}: {}".format( - type(e).__name__, - repr(encoded)[:10], - repr(encoding), - repr(e), - )) - - -def encode(decoded: Union[str, bytes], encoding: str, errors: str='strict') -> Union[str, bytes]: - """ - Encode the given input object - - Returns: - The encoded value - - Raises: - ValueError, if encoding fails. - """ - if len(decoded) == 0: - return decoded - - global _cache - cached = ( - isinstance(decoded, bytes) and - _cache.decoded == decoded and - _cache.encoding == encoding and - _cache.errors == errors - ) - if cached: - return _cache.encoded - try: - try: - value = decoded - if isinstance(value, str): - value = decoded.encode() - encoded = custom_encode[encoding](value) - except KeyError: - encoded = codecs.encode(decoded, encoding, errors) - if encoding in ("gzip", "deflate", "br"): - _cache = CachedDecode(encoded, encoding, errors, decoded) - return encoded - except TypeError: - raise - except Exception as e: - raise ValueError("{} when encoding {} with {}: {}".format( - type(e).__name__, - repr(decoded)[:10], - repr(encoding), - repr(e), - )) - - -def identity(content): - """ - Returns content unchanged. Identity is the default value of - Accept-Encoding headers. - """ - return content - - -def decode_gzip(content): - gfile = gzip.GzipFile(fileobj=BytesIO(content)) - return gfile.read() - - -def encode_gzip(content): - s = BytesIO() - gf = gzip.GzipFile(fileobj=s, mode='wb') - gf.write(content) - gf.close() - return s.getvalue() - - -def decode_brotli(content): - return brotli.decompress(content) - - -def encode_brotli(content): - return brotli.compress(content) - - -def decode_deflate(content): - """ - Returns decompressed data for DEFLATE. Some servers may respond with - compressed data without a zlib header or checksum. An undocumented - feature of zlib permits the lenient decompression of data missing both - values. - - http://bugs.python.org/issue5784 - """ - try: - return zlib.decompress(content) - except zlib.error: - return zlib.decompress(content, -15) - - -def encode_deflate(content): - """ - Returns compressed content, always including zlib header and checksum. - """ - return zlib.compress(content) - - -custom_decode = { - "none": identity, - "identity": identity, - "gzip": decode_gzip, - "deflate": decode_deflate, - "br": decode_brotli, -} -custom_encode = { - "none": identity, - "identity": identity, - "gzip": encode_gzip, - "deflate": encode_deflate, - "br": encode_brotli, -} - -__all__ = ["encode", "decode"] diff --git a/netlib/http/encoding.py b/netlib/http/encoding.py new file mode 100644 index 00000000..e123a033 --- /dev/null +++ b/netlib/http/encoding.py @@ -0,0 +1,175 @@ +""" +Utility functions for decoding response bodies. +""" + +import codecs +import collections +from io import BytesIO + +import gzip +import zlib +import brotli + +from typing import Union + + +# We have a shared single-element cache for encoding and decoding. +# This is quite useful in practice, e.g. +# flow.request.content = flow.request.content.replace(b"foo", b"bar") +# does not require an .encode() call if content does not contain b"foo" +CachedDecode = collections.namedtuple("CachedDecode", "encoded encoding errors decoded") +_cache = CachedDecode(None, None, None, None) + + +def decode(encoded: Union[str, bytes], encoding: str, errors: str='strict') -> Union[str, bytes]: + """ + Decode the given input object + + Returns: + The decoded value + + Raises: + ValueError, if decoding fails. + """ + if len(encoded) == 0: + return encoded + + global _cache + cached = ( + isinstance(encoded, bytes) and + _cache.encoded == encoded and + _cache.encoding == encoding and + _cache.errors == errors + ) + if cached: + return _cache.decoded + try: + try: + decoded = custom_decode[encoding](encoded) + except KeyError: + decoded = codecs.decode(encoded, encoding, errors) + if encoding in ("gzip", "deflate", "br"): + _cache = CachedDecode(encoded, encoding, errors, decoded) + return decoded + except TypeError: + raise + except Exception as e: + raise ValueError("{} when decoding {} with {}: {}".format( + type(e).__name__, + repr(encoded)[:10], + repr(encoding), + repr(e), + )) + + +def encode(decoded: Union[str, bytes], encoding: str, errors: str='strict') -> Union[str, bytes]: + """ + Encode the given input object + + Returns: + The encoded value + + Raises: + ValueError, if encoding fails. + """ + if len(decoded) == 0: + return decoded + + global _cache + cached = ( + isinstance(decoded, bytes) and + _cache.decoded == decoded and + _cache.encoding == encoding and + _cache.errors == errors + ) + if cached: + return _cache.encoded + try: + try: + value = decoded + if isinstance(value, str): + value = decoded.encode() + encoded = custom_encode[encoding](value) + except KeyError: + encoded = codecs.encode(decoded, encoding, errors) + if encoding in ("gzip", "deflate", "br"): + _cache = CachedDecode(encoded, encoding, errors, decoded) + return encoded + except TypeError: + raise + except Exception as e: + raise ValueError("{} when encoding {} with {}: {}".format( + type(e).__name__, + repr(decoded)[:10], + repr(encoding), + repr(e), + )) + + +def identity(content): + """ + Returns content unchanged. Identity is the default value of + Accept-Encoding headers. + """ + return content + + +def decode_gzip(content): + gfile = gzip.GzipFile(fileobj=BytesIO(content)) + return gfile.read() + + +def encode_gzip(content): + s = BytesIO() + gf = gzip.GzipFile(fileobj=s, mode='wb') + gf.write(content) + gf.close() + return s.getvalue() + + +def decode_brotli(content): + return brotli.decompress(content) + + +def encode_brotli(content): + return brotli.compress(content) + + +def decode_deflate(content): + """ + Returns decompressed data for DEFLATE. Some servers may respond with + compressed data without a zlib header or checksum. An undocumented + feature of zlib permits the lenient decompression of data missing both + values. + + http://bugs.python.org/issue5784 + """ + try: + return zlib.decompress(content) + except zlib.error: + return zlib.decompress(content, -15) + + +def encode_deflate(content): + """ + Returns compressed content, always including zlib header and checksum. + """ + return zlib.compress(content) + + +custom_decode = { + "none": identity, + "identity": identity, + "gzip": decode_gzip, + "deflate": decode_deflate, + "br": decode_brotli, +} +custom_encode = { + "none": identity, + "identity": identity, + "gzip": encode_gzip, + "deflate": encode_deflate, + "br": encode_brotli, +} + +__all__ = ["encode", "decode"] diff --git a/netlib/http/message.py b/netlib/http/message.py index 62c3aa38..772a124e 100644 --- a/netlib/http/message.py +++ b/netlib/http/message.py @@ -3,7 +3,7 @@ import warnings from typing import Optional from mitmproxy.utils import strutils -from netlib import encoding +from netlib.http import encoding from mitmproxy.types import serializable from netlib.http import headers diff --git a/test/netlib/http/test_encoding.py b/test/netlib/http/test_encoding.py new file mode 100644 index 00000000..681f9bfc --- /dev/null +++ b/test/netlib/http/test_encoding.py @@ -0,0 +1,73 @@ +import mock +import pytest + +from netlib.http import encoding +from netlib import tutils + + +@pytest.mark.parametrize("encoder", [ + 'identity', + 'none', +]) +def test_identity(encoder): + assert b"string" == encoding.decode(b"string", encoder) + assert b"string" == encoding.encode(b"string", encoder) + with tutils.raises(ValueError): + encoding.encode(b"string", "nonexistent encoding") + + +@pytest.mark.parametrize("encoder", [ + 'gzip', + 'br', + 'deflate', +]) +def test_encoders(encoder): + assert "" == encoding.decode("", encoder) + assert b"" == encoding.decode(b"", encoder) + + assert "string" == encoding.decode( + encoding.encode( + "string", + encoder + ), + encoder + ) + assert b"string" == encoding.decode( + encoding.encode( + b"string", + encoder + ), + encoder + ) + + with tutils.raises(ValueError): + encoding.decode(b"foobar", encoder) + + +def test_cache(): + decode_gzip = mock.MagicMock() + decode_gzip.return_value = b"decoded" + encode_gzip = mock.MagicMock() + encode_gzip.return_value = b"encoded" + + with mock.patch.dict(encoding.custom_decode, gzip=decode_gzip): + with mock.patch.dict(encoding.custom_encode, gzip=encode_gzip): + assert encoding.decode(b"encoded", "gzip") == b"decoded" + assert decode_gzip.call_count == 1 + + # should be cached + assert encoding.decode(b"encoded", "gzip") == b"decoded" + assert decode_gzip.call_count == 1 + + # the other way around as well + assert encoding.encode(b"decoded", "gzip") == b"encoded" + assert encode_gzip.call_count == 0 + + # different encoding + decode_gzip.return_value = b"bar" + assert encoding.encode(b"decoded", "deflate") != b"decoded" + assert encode_gzip.call_count == 0 + + # This is not in the cache anymore + assert encoding.encode(b"decoded", "gzip") == b"encoded" + assert encode_gzip.call_count == 1 diff --git a/test/netlib/test_encoding.py b/test/netlib/test_encoding.py deleted file mode 100644 index e1175ef0..00000000 --- a/test/netlib/test_encoding.py +++ /dev/null @@ -1,72 +0,0 @@ -import mock -import pytest - -from netlib import encoding, tutils - - -@pytest.mark.parametrize("encoder", [ - 'identity', - 'none', -]) -def test_identity(encoder): - assert b"string" == encoding.decode(b"string", encoder) - assert b"string" == encoding.encode(b"string", encoder) - with tutils.raises(ValueError): - encoding.encode(b"string", "nonexistent encoding") - - -@pytest.mark.parametrize("encoder", [ - 'gzip', - 'br', - 'deflate', -]) -def test_encoders(encoder): - assert "" == encoding.decode("", encoder) - assert b"" == encoding.decode(b"", encoder) - - assert "string" == encoding.decode( - encoding.encode( - "string", - encoder - ), - encoder - ) - assert b"string" == encoding.decode( - encoding.encode( - b"string", - encoder - ), - encoder - ) - - with tutils.raises(ValueError): - encoding.decode(b"foobar", encoder) - - -def test_cache(): - decode_gzip = mock.MagicMock() - decode_gzip.return_value = b"decoded" - encode_gzip = mock.MagicMock() - encode_gzip.return_value = b"encoded" - - with mock.patch.dict(encoding.custom_decode, gzip=decode_gzip): - with mock.patch.dict(encoding.custom_encode, gzip=encode_gzip): - assert encoding.decode(b"encoded", "gzip") == b"decoded" - assert decode_gzip.call_count == 1 - - # should be cached - assert encoding.decode(b"encoded", "gzip") == b"decoded" - assert decode_gzip.call_count == 1 - - # the other way around as well - assert encoding.encode(b"decoded", "gzip") == b"encoded" - assert encode_gzip.call_count == 0 - - # different encoding - decode_gzip.return_value = b"bar" - assert encoding.encode(b"decoded", "deflate") != b"decoded" - assert encode_gzip.call_count == 0 - - # This is not in the cache anymore - assert encoding.encode(b"decoded", "gzip") == b"encoded" - assert encode_gzip.call_count == 1 -- cgit v1.2.3 From 01a449b5cb1106a867a6b73cd4877e9b2ec68171 Mon Sep 17 00:00:00 2001 From: Aldo Cortesi Date: Thu, 20 Oct 2016 11:27:05 +1300 Subject: netlib.exceptions.* -> mitmproxy.exceptions --- mitmproxy/addons/streambodies.py | 2 +- mitmproxy/exceptions.py | 62 +++++++++++++++++++++++++++++- mitmproxy/proxy/modes/socks_proxy.py | 3 +- mitmproxy/proxy/protocol/base.py | 3 +- mitmproxy/proxy/protocol/http.py | 19 +++++---- mitmproxy/proxy/protocol/http2.py | 1 - mitmproxy/proxy/protocol/http_replay.py | 3 +- mitmproxy/proxy/protocol/rawtcp.py | 3 +- mitmproxy/proxy/protocol/tls.py | 7 ++-- mitmproxy/proxy/protocol/websockets.py | 3 +- mitmproxy/proxy/root_context.py | 3 +- mitmproxy/proxy/server.py | 3 +- netlib/exceptions.py | 59 ---------------------------- netlib/http/http1/assemble.py | 2 +- netlib/http/http1/read.py | 2 +- netlib/http/http2/framereader.py | 4 +- netlib/tcp.py | 2 +- pathod/language/writer.py | 4 +- pathod/pathoc.py | 2 +- pathod/pathod.py | 2 +- pathod/protocols/http.py | 4 +- pathod/protocols/websockets.py | 6 +-- test/mitmproxy/protocol/test_http2.py | 28 +++++++------- test/mitmproxy/protocol/test_websockets.py | 11 +++--- test/mitmproxy/test_proxy.py | 4 +- test/mitmproxy/test_server.py | 2 +- test/mitmproxy/tservers.py | 7 ++-- test/netlib/http/http1/test_assemble.py | 6 +-- test/netlib/http/http1/test_read.py | 55 +++++++++++++------------- test/netlib/test_tcp.py | 2 +- test/pathod/test_log.py | 4 +- test/pathod/test_pathoc.py | 4 +- test/pathod/test_pathod.py | 10 ++--- test/pathod/test_protocols_http2.py | 4 +- 34 files changed, 164 insertions(+), 172 deletions(-) delete mode 100644 netlib/exceptions.py diff --git a/mitmproxy/addons/streambodies.py b/mitmproxy/addons/streambodies.py index b3e5d6b2..bd8958b0 100644 --- a/mitmproxy/addons/streambodies.py +++ b/mitmproxy/addons/streambodies.py @@ -1,5 +1,5 @@ from netlib.http import http1 -from netlib import exceptions +from mitmproxy import exceptions from mitmproxy import ctx diff --git a/mitmproxy/exceptions.py b/mitmproxy/exceptions.py index 64cc457a..82022d31 100644 --- a/mitmproxy/exceptions.py +++ b/mitmproxy/exceptions.py @@ -1,7 +1,10 @@ """ We try to be very hygienic regarding the exceptions we throw: -Every Exception mitmproxy raises shall be a subclass of ProxyException. +- Every exception that might be externally visible to users shall be a subclass + of ProxyException.p +- Every exception in the base net module shall be a subclass + of NetlibException, and will not be propagated directly to users. See also: http://lucumr.pocoo.org/2014/10/16/on-error-handling/ """ @@ -100,3 +103,60 @@ class AddonError(Exception): class AddonHalt(Exception): pass + + +""" + Every net Exception raised shall be a subclass of NetlibException. +""" + + +class NetlibException(Exception): + """ + Base class for all exceptions thrown by netlib. + """ + def __init__(self, message=None): + super().__init__(message) + + +class Disconnect: + """Immediate EOF""" + + +class HttpException(NetlibException): + pass + + +class HttpReadDisconnect(HttpException, Disconnect): + pass + + +class HttpSyntaxException(HttpException): + pass + + +class TcpException(NetlibException): + pass + + +class TcpDisconnect(TcpException, Disconnect): + pass + + +class TcpReadIncomplete(TcpException): + pass + + +class TcpTimeout(TcpException): + pass + + +class TlsException(NetlibException): + pass + + +class InvalidCertificateException(TlsException): + pass + + +class Timeout(TcpException): + pass diff --git a/mitmproxy/proxy/modes/socks_proxy.py b/mitmproxy/proxy/modes/socks_proxy.py index 4f8473bd..175453b0 100644 --- a/mitmproxy/proxy/modes/socks_proxy.py +++ b/mitmproxy/proxy/modes/socks_proxy.py @@ -1,4 +1,3 @@ -import netlib.exceptions from mitmproxy import exceptions from mitmproxy.proxy import protocol from netlib import socks @@ -48,7 +47,7 @@ class Socks5Proxy(protocol.Layer, protocol.ServerConnectionMixin): connect_reply.to_file(self.client_conn.wfile) self.client_conn.wfile.flush() - except (socks.SocksError, netlib.exceptions.TcpException) as e: + except (socks.SocksError, exceptions.TcpException) as e: raise exceptions.Socks5ProtocolException("SOCKS5 mode failure: %s" % repr(e)) self.server_conn.address = connect_request.addr diff --git a/mitmproxy/proxy/protocol/base.py b/mitmproxy/proxy/protocol/base.py index 00d50721..97e90051 100644 --- a/mitmproxy/proxy/protocol/base.py +++ b/mitmproxy/proxy/protocol/base.py @@ -1,4 +1,3 @@ -import netlib.exceptions from mitmproxy import exceptions from mitmproxy import connections @@ -177,7 +176,7 @@ class ServerConnectionMixin: self.channel.ask("serverconnect", self.server_conn) try: self.server_conn.connect() - except netlib.exceptions.TcpException as e: + except exceptions.TcpException as e: raise exceptions.ProtocolException( "Server connection to {} failed: {}".format( repr(self.server_conn.address), str(e) diff --git a/mitmproxy/proxy/protocol/http.py b/mitmproxy/proxy/protocol/http.py index ec018f89..6bc06502 100644 --- a/mitmproxy/proxy/protocol/http.py +++ b/mitmproxy/proxy/protocol/http.py @@ -1,5 +1,4 @@ import h2.exceptions -import netlib.exceptions import time import traceback from mitmproxy import exceptions @@ -46,7 +45,7 @@ class _HttpTransmissionLayer(base.Layer): def send_response(self, response): if response.data.content is None: - raise netlib.exceptions.HttpException("Cannot assemble flow with missing content") + raise exceptions.HttpException("Cannot assemble flow with missing content") self.send_response_headers(response) self.send_response_body(response, [response.data.content]) @@ -146,10 +145,10 @@ class HttpLayer(base.Layer): request = self.get_request_from_client(f) # Make sure that the incoming request matches our expectations self.validate_request(request) - except netlib.exceptions.HttpReadDisconnect: + except exceptions.HttpReadDisconnect: # don't throw an error for disconnects that happen before/between requests. return - except netlib.exceptions.HttpException as e: + except exceptions.HttpException as e: # We optimistically guess there might be an HTTP client on the # other end self.send_error_response(400, repr(e)) @@ -173,7 +172,7 @@ class HttpLayer(base.Layer): if self.mode == "regular" and request.first_line_format == "authority": self.handle_regular_mode_connect(request) return - except (exceptions.ProtocolException, netlib.exceptions.NetlibException) as e: + except (exceptions.ProtocolException, exceptions.NetlibException) as e: # HTTPS tasting means that ordinary errors like resolution and # connection errors can happen here. self.send_error_response(502, repr(e)) @@ -224,7 +223,7 @@ class HttpLayer(base.Layer): self.handle_upstream_mode_connect(f.request.copy()) return - except (exceptions.ProtocolException, netlib.exceptions.NetlibException) as e: + except (exceptions.ProtocolException, exceptions.NetlibException) as e: self.send_error_response(502, repr(e)) if not f.response: f.error = flow.Error(str(e)) @@ -254,7 +253,7 @@ class HttpLayer(base.Layer): try: response = http.make_error_response(code, message, headers) self.send_response(response) - except (netlib.exceptions.NetlibException, h2.exceptions.H2Error, exceptions.Http2ProtocolException): + except (exceptions.NetlibException, h2.exceptions.H2Error, exceptions.Http2ProtocolException): self.log(traceback.format_exc(), "debug") def change_upstream_proxy_server(self, address): @@ -300,7 +299,7 @@ class HttpLayer(base.Layer): try: get_response() - except netlib.exceptions.NetlibException as e: + except exceptions.NetlibException as e: self.log( "server communication error: %s" % repr(e), level="debug" @@ -396,7 +395,7 @@ class HttpLayer(base.Layer): def validate_request(self, request): if request.first_line_format == "absolute" and request.scheme != "http": - raise netlib.exceptions.HttpException("Invalid request scheme: %s" % request.scheme) + raise exceptions.HttpException("Invalid request scheme: %s" % request.scheme) expected_request_forms = { "regular": ("authority", "absolute",), @@ -409,7 +408,7 @@ class HttpLayer(base.Layer): err_message = "Invalid HTTP request form (expected: %s, got: %s)" % ( " or ".join(allowed_request_forms), request.first_line_format ) - raise netlib.exceptions.HttpException(err_message) + raise exceptions.HttpException(err_message) if self.mode == "regular" and request.first_line_format == "absolute": request.first_line_format = "relative" diff --git a/mitmproxy/proxy/protocol/http2.py b/mitmproxy/proxy/protocol/http2.py index 93ac51bc..d0b0720d 100644 --- a/mitmproxy/proxy/protocol/http2.py +++ b/mitmproxy/proxy/protocol/http2.py @@ -8,7 +8,6 @@ from h2 import connection from h2 import events import queue -import netlib.exceptions from mitmproxy import exceptions from mitmproxy import http from mitmproxy.proxy.protocol import base diff --git a/mitmproxy/proxy/protocol/http_replay.py b/mitmproxy/proxy/protocol/http_replay.py index eef5a109..7efb0782 100644 --- a/mitmproxy/proxy/protocol/http_replay.py +++ b/mitmproxy/proxy/protocol/http_replay.py @@ -1,6 +1,5 @@ import traceback -import netlib.exceptions from mitmproxy import log from mitmproxy import controller from mitmproxy import exceptions @@ -97,7 +96,7 @@ class RequestReplayThread(basethread.BaseThread): response_reply = self.channel.ask("response", self.f) if response_reply == exceptions.Kill: raise exceptions.Kill() - except (exceptions.ReplayException, netlib.exceptions.NetlibException) as e: + except (exceptions.ReplayException, exceptions.NetlibException) as e: self.f.error = flow.Error(str(e)) if self.channel: self.channel.ask("error", self.f) diff --git a/mitmproxy/proxy/protocol/rawtcp.py b/mitmproxy/proxy/protocol/rawtcp.py index 513b90b3..d117fb41 100644 --- a/mitmproxy/proxy/protocol/rawtcp.py +++ b/mitmproxy/proxy/protocol/rawtcp.py @@ -2,7 +2,6 @@ import socket from OpenSSL import SSL -import netlib.exceptions import netlib.tcp from mitmproxy import tcp from mitmproxy import flow @@ -56,7 +55,7 @@ class RawTCPLayer(base.Layer): self.channel.ask("tcp_message", f) dst.sendall(tcp_message.content) - except (socket.error, netlib.exceptions.TcpException, SSL.Error) as e: + except (socket.error, exceptions.TcpException, SSL.Error) as e: if not self.ignore: f.error = flow.Error("TCP connection closed unexpectedly: {}".format(repr(e))) self.channel.tell("tcp_error", f) diff --git a/mitmproxy/proxy/protocol/tls.py b/mitmproxy/proxy/protocol/tls.py index 7b6b506c..b106aa0c 100644 --- a/mitmproxy/proxy/protocol/tls.py +++ b/mitmproxy/proxy/protocol/tls.py @@ -3,7 +3,6 @@ from typing import Optional # noqa from typing import Union import construct -import netlib.exceptions from mitmproxy import exceptions from mitmproxy.contrib.tls import _constructs from mitmproxy.proxy.protocol import base @@ -484,7 +483,7 @@ class TlsLayer(base.Layer): # The reason for this might be difficult to find, so we try to peek here to see if it # raises ann error. self.client_conn.rfile.peek(1) - except netlib.exceptions.TlsException as e: + except exceptions.TlsException as e: raise exceptions.ClientHandshakeException( "Cannot establish TLS with client (sni: {sni}): {e}".format( sni=self._client_hello.sni, e=repr(e) @@ -528,9 +527,9 @@ class TlsLayer(base.Layer): if tls_cert_err is not None: self.log(str(tls_cert_err), "warn") self.log("Ignoring server verification error, continuing with connection", "warn") - except netlib.exceptions.InvalidCertificateException as e: + except exceptions.InvalidCertificateException as e: raise exceptions.InvalidServerCertificate(str(e)) - except netlib.exceptions.TlsException as e: + except exceptions.TlsException as e: raise exceptions.TlsProtocolException( "Cannot establish TLS with {address} (sni: {sni}): {e}".format( address=repr(self.server_conn.address), diff --git a/mitmproxy/proxy/protocol/websockets.py b/mitmproxy/proxy/protocol/websockets.py index 636748a1..f84a1dc5 100644 --- a/mitmproxy/proxy/protocol/websockets.py +++ b/mitmproxy/proxy/protocol/websockets.py @@ -1,4 +1,3 @@ -import netlib.exceptions import socket import struct from OpenSSL import SSL @@ -105,7 +104,7 @@ class WebSocketsLayer(base.Layer): if not self._handle_frame(frame, source_conn, other_conn, is_server): return - except (socket.error, netlib.exceptions.TcpException, SSL.Error) as e: + except (socket.error, exceptions.TcpException, SSL.Error) as e: self.log("WebSockets connection closed unexpectedly by {}: {}".format( "server" if is_server else "client", repr(e)), "info") except Exception as e: # pragma: no cover diff --git a/mitmproxy/proxy/root_context.py b/mitmproxy/proxy/root_context.py index 6a99d9cf..eacf7e0b 100644 --- a/mitmproxy/proxy/root_context.py +++ b/mitmproxy/proxy/root_context.py @@ -1,4 +1,3 @@ -import netlib.exceptions from mitmproxy import log from mitmproxy import exceptions from mitmproxy.proxy import protocol @@ -43,7 +42,7 @@ class RootContext: def _next_layer(self, top_layer): try: d = top_layer.client_conn.rfile.peek(3) - except netlib.exceptions.TcpException as e: + except exceptions.TcpException as e: raise exceptions.ProtocolException(str(e)) client_tls = protocol.is_tls_record_magic(d) diff --git a/mitmproxy/proxy/server.py b/mitmproxy/proxy/server.py index b876f9ce..8472660a 100644 --- a/mitmproxy/proxy/server.py +++ b/mitmproxy/proxy/server.py @@ -2,7 +2,6 @@ import socket import sys import traceback -import netlib.exceptions from mitmproxy import exceptions from mitmproxy import connections from mitmproxy import http @@ -138,7 +137,7 @@ class ConnectionHandler: try: error_response = http.make_error_response(502, repr(e)) self.client_conn.send(http1.assemble_response(error_response)) - except netlib.exceptions.TcpException: + except exceptions.TcpException: pass except Exception: self.log(traceback.format_exc(), "error") diff --git a/netlib/exceptions.py b/netlib/exceptions.py deleted file mode 100644 index d0b15d27..00000000 --- a/netlib/exceptions.py +++ /dev/null @@ -1,59 +0,0 @@ -""" -We try to be very hygienic regarding the exceptions we throw: -Every Exception netlib raises shall be a subclass of NetlibException. - - -See also: http://lucumr.pocoo.org/2014/10/16/on-error-handling/ -""" - - -class NetlibException(Exception): - """ - Base class for all exceptions thrown by netlib. - """ - def __init__(self, message=None): - super().__init__(message) - - -class Disconnect: - """Immediate EOF""" - - -class HttpException(NetlibException): - pass - - -class HttpReadDisconnect(HttpException, Disconnect): - pass - - -class HttpSyntaxException(HttpException): - pass - - -class TcpException(NetlibException): - pass - - -class TcpDisconnect(TcpException, Disconnect): - pass - - -class TcpReadIncomplete(TcpException): - pass - - -class TcpTimeout(TcpException): - pass - - -class TlsException(NetlibException): - pass - - -class InvalidCertificateException(TlsException): - pass - - -class Timeout(TcpException): - pass diff --git a/netlib/http/http1/assemble.py b/netlib/http/http1/assemble.py index 3d65da34..e0a91ad8 100644 --- a/netlib/http/http1/assemble.py +++ b/netlib/http/http1/assemble.py @@ -1,5 +1,5 @@ import netlib.http.url -from netlib import exceptions +from mitmproxy import exceptions def assemble_request(request): diff --git a/netlib/http/http1/read.py b/netlib/http/http1/read.py index 89b73c5a..e6b22863 100644 --- a/netlib/http/http1/read.py +++ b/netlib/http/http1/read.py @@ -7,7 +7,7 @@ from netlib.http import response from netlib.http import headers from netlib.http import url from netlib import check -from netlib import exceptions +from mitmproxy import exceptions def get_header_tokens(headers, key): diff --git a/netlib/http/http2/framereader.py b/netlib/http/http2/framereader.py index 8b7cfffb..6a164919 100644 --- a/netlib/http/http2/framereader.py +++ b/netlib/http/http2/framereader.py @@ -1,7 +1,7 @@ import codecs import hyperframe -from ...exceptions import HttpException +from mitmproxy import exceptions def read_raw_frame(rfile): @@ -9,7 +9,7 @@ def read_raw_frame(rfile): length = int(codecs.encode(header[:3], 'hex_codec'), 16) if length == 4740180: - raise HttpException("Length field looks more like HTTP/1.1:\n{}".format(rfile.read(-1))) + raise exceptions.HttpException("Length field looks more like HTTP/1.1:\n{}".format(rfile.read(-1))) body = rfile.safe_read(length) return [header, body] diff --git a/netlib/tcp.py b/netlib/tcp.py index 6e323957..ac368a9c 100644 --- a/netlib/tcp.py +++ b/netlib/tcp.py @@ -20,7 +20,7 @@ from OpenSSL import SSL from mitmproxy import certs from mitmproxy.utils import version_check from mitmproxy.types import serializable -from netlib import exceptions +from mitmproxy import exceptions from mitmproxy.types import basethread # This is a rather hackish way to make sure that diff --git a/pathod/language/writer.py b/pathod/language/writer.py index b8081989..ac0f44da 100644 --- a/pathod/language/writer.py +++ b/pathod/language/writer.py @@ -1,5 +1,5 @@ import time -from netlib.exceptions import TcpDisconnect +from mitmproxy import exceptions BLOCKSIZE = 1024 # It's not clear what the upper limit for time.sleep is. It's lower than the @@ -62,5 +62,5 @@ def write_values(fp, vals, actions, sofar=0, blocksize=BLOCKSIZE): return True elif a[1] == "inject": send_chunk(fp, a[2], blocksize, 0, len(a[2])) - except TcpDisconnect: # pragma: no cover + except exceptions.TcpDisconnect: # pragma: no cover return True diff --git a/pathod/pathoc.py b/pathod/pathoc.py index 39dedf05..b67f6ee2 100644 --- a/pathod/pathoc.py +++ b/pathod/pathoc.py @@ -17,7 +17,7 @@ from netlib import tcp from mitmproxy import certs from netlib import websockets from netlib import socks -from netlib import exceptions +from mitmproxy import exceptions from netlib.http import http1 from mitmproxy.types import basethread diff --git a/pathod/pathod.py b/pathod/pathod.py index 5d951350..746998c5 100644 --- a/pathod/pathod.py +++ b/pathod/pathod.py @@ -10,7 +10,7 @@ from netlib import websockets from mitmproxy import version import urllib -from netlib import exceptions +from mitmproxy import exceptions from pathod import language from pathod import utils diff --git a/pathod/protocols/http.py b/pathod/protocols/http.py index 0822e864..a20a58a1 100644 --- a/pathod/protocols/http.py +++ b/pathod/protocols/http.py @@ -1,5 +1,5 @@ from mitmproxy import version -from netlib.exceptions import TlsException +from mitmproxy import exceptions from netlib.http import http1 from .. import language @@ -37,7 +37,7 @@ class HTTPProtocol: options=self.pathod_handler.server.ssloptions.ssl_options, alpn_select=self.pathod_handler.server.ssloptions.alpn_select, ) - except TlsException as v: + except exceptions.TlsException as v: s = str(v) lg(s) return None, dict(type="error", msg=s) diff --git a/pathod/protocols/websockets.py b/pathod/protocols/websockets.py index df83461a..585a48e3 100644 --- a/pathod/protocols/websockets.py +++ b/pathod/protocols/websockets.py @@ -1,8 +1,8 @@ import time from netlib import websockets -from .. import language -from netlib.exceptions import NetlibException +from pathod import language +from mitmproxy import exceptions class WebsocketsProtocol: @@ -16,7 +16,7 @@ class WebsocketsProtocol: started = time.time() try: frm = websockets.Frame.from_file(self.pathod_handler.rfile) - except NetlibException as e: + except exceptions.NetlibException as e: lg("Error reading websocket frame: %s" % e) return None, None ended = time.time() diff --git a/test/mitmproxy/protocol/test_http2.py b/test/mitmproxy/protocol/test_http2.py index a2efdc47..b624489f 100644 --- a/test/mitmproxy/protocol/test_http2.py +++ b/test/mitmproxy/protocol/test_http2.py @@ -13,7 +13,7 @@ from mitmproxy.proxy.config import ProxyConfig import netlib from ...netlib import tservers as netlib_tservers -from netlib.exceptions import HttpException +from mitmproxy import exceptions from netlib.http import http1, http2 from .. import tservers @@ -61,10 +61,10 @@ class _Http2ServerBase(netlib_tservers.ServerTestBase): try: raw = b''.join(http2.read_raw_frame(self.rfile)) events = h2_conn.receive_data(raw) - except HttpException: + except exceptions.HttpException: print(traceback.format_exc()) assert False - except netlib.exceptions.TcpDisconnect: + except exceptions.TcpDisconnect: break except: print(traceback.format_exc()) @@ -77,7 +77,7 @@ class _Http2ServerBase(netlib_tservers.ServerTestBase): if not self.server.handle_server_event(event, h2_conn, self.rfile, self.wfile): done = True break - except netlib.exceptions.TcpDisconnect: + except exceptions.TcpDisconnect: done = True except: done = True @@ -252,7 +252,7 @@ class TestSimple(_Http2Test): try: raw = b''.join(http2.read_raw_frame(client.rfile)) events = h2_conn.receive_data(raw) - except HttpException: + except exceptions.HttpException: print(traceback.format_exc()) assert False @@ -329,7 +329,7 @@ class TestRequestWithPriority(_Http2Test): try: raw = b''.join(http2.read_raw_frame(client.rfile)) events = h2_conn.receive_data(raw) - except HttpException: + except exceptions.HttpException: print(traceback.format_exc()) assert False @@ -368,7 +368,7 @@ class TestRequestWithPriority(_Http2Test): try: raw = b''.join(http2.read_raw_frame(client.rfile)) events = h2_conn.receive_data(raw) - except HttpException: + except exceptions.HttpException: print(traceback.format_exc()) assert False @@ -440,7 +440,7 @@ class TestPriority(_Http2Test): try: raw = b''.join(http2.read_raw_frame(client.rfile)) events = h2_conn.receive_data(raw) - except HttpException: + except exceptions.HttpException: print(traceback.format_exc()) assert False @@ -517,7 +517,7 @@ class TestPriorityWithExistingStream(_Http2Test): try: raw = b''.join(http2.read_raw_frame(client.rfile)) events = h2_conn.receive_data(raw) - except HttpException: + except exceptions.HttpException: print(traceback.format_exc()) assert False @@ -568,7 +568,7 @@ class TestStreamResetFromServer(_Http2Test): try: raw = b''.join(http2.read_raw_frame(client.rfile)) events = h2_conn.receive_data(raw) - except HttpException: + except exceptions.HttpException: print(traceback.format_exc()) assert False @@ -618,7 +618,7 @@ class TestBodySizeLimit(_Http2Test): try: raw = b''.join(http2.read_raw_frame(client.rfile)) events = h2_conn.receive_data(raw) - except HttpException: + except exceptions.HttpException: print(traceback.format_exc()) assert False @@ -703,7 +703,7 @@ class TestPushPromise(_Http2Test): try: raw = b''.join(http2.read_raw_frame(client.rfile)) events = h2_conn.receive_data(raw) - except HttpException: + except exceptions.HttpException: print(traceback.format_exc()) assert False except: @@ -756,7 +756,7 @@ class TestPushPromise(_Http2Test): try: raw = b''.join(http2.read_raw_frame(client.rfile)) events = h2_conn.receive_data(raw) - except HttpException: + except exceptions.HttpException: print(traceback.format_exc()) assert False @@ -816,7 +816,7 @@ class TestConnectionLost(_Http2Test): try: raw = b''.join(http2.read_raw_frame(client.rfile)) h2_conn.receive_data(raw) - except HttpException: + except exceptions.HttpException: print(traceback.format_exc()) assert False except: diff --git a/test/mitmproxy/protocol/test_websockets.py b/test/mitmproxy/protocol/test_websockets.py index ef520d87..f22e95d5 100644 --- a/test/mitmproxy/protocol/test_websockets.py +++ b/test/mitmproxy/protocol/test_websockets.py @@ -4,6 +4,7 @@ import tempfile import traceback from mitmproxy import options +from mitmproxy import exceptions from mitmproxy.proxy.config import ProxyConfig import netlib @@ -249,7 +250,7 @@ class TestClose(_WebSocketsTest): wfile.write(bytes(frame)) wfile.flush() - with pytest.raises(netlib.exceptions.TcpDisconnect): + with pytest.raises(exceptions.TcpDisconnect): websockets.Frame.from_file(rfile) def test_close(self): @@ -258,7 +259,7 @@ class TestClose(_WebSocketsTest): client.wfile.write(bytes(websockets.Frame(fin=1, opcode=websockets.OPCODE.CLOSE))) client.wfile.flush() - with pytest.raises(netlib.exceptions.TcpDisconnect): + with pytest.raises(exceptions.TcpDisconnect): websockets.Frame.from_file(client.rfile) def test_close_payload_1(self): @@ -267,7 +268,7 @@ class TestClose(_WebSocketsTest): client.wfile.write(bytes(websockets.Frame(fin=1, opcode=websockets.OPCODE.CLOSE, payload=b'\00\42'))) client.wfile.flush() - with pytest.raises(netlib.exceptions.TcpDisconnect): + with pytest.raises(exceptions.TcpDisconnect): websockets.Frame.from_file(client.rfile) def test_close_payload_2(self): @@ -276,7 +277,7 @@ class TestClose(_WebSocketsTest): client.wfile.write(bytes(websockets.Frame(fin=1, opcode=websockets.OPCODE.CLOSE, payload=b'\00\42foobar'))) client.wfile.flush() - with pytest.raises(netlib.exceptions.TcpDisconnect): + with pytest.raises(exceptions.TcpDisconnect): websockets.Frame.from_file(client.rfile) @@ -290,7 +291,7 @@ class TestInvalidFrame(_WebSocketsTest): def test_invalid_frame(self): client = self._setup_connection() - # with pytest.raises(netlib.exceptions.TcpDisconnect): + # with pytest.raises(exceptions.TcpDisconnect): frame = websockets.Frame.from_file(client.rfile) assert frame.header.opcode == 15 assert frame.payload == b'foobar' diff --git a/test/mitmproxy/test_proxy.py b/test/mitmproxy/test_proxy.py index 7d401184..c0d978d2 100644 --- a/test/mitmproxy/test_proxy.py +++ b/test/mitmproxy/test_proxy.py @@ -8,7 +8,7 @@ from mitmproxy.proxy import ProxyConfig from mitmproxy import connections from mitmproxy.proxy.server import DummyServer, ProxyServer, ConnectionHandler from mitmproxy.proxy import config -from netlib.exceptions import TcpDisconnect +from mitmproxy import exceptions from pathod import test from netlib.http import http1 from . import tutils @@ -40,7 +40,7 @@ class TestServerConnection: sc.connect() sc.connection = mock.Mock() sc.connection.recv = mock.Mock(return_value=False) - sc.connection.flush = mock.Mock(side_effect=TcpDisconnect) + sc.connection.flush = mock.Mock(side_effect=exceptions.TcpDisconnect) sc.finish() self.d.shutdown() diff --git a/test/mitmproxy/test_server.py b/test/mitmproxy/test_server.py index cadc67a8..79fd6f86 100644 --- a/test/mitmproxy/test_server.py +++ b/test/mitmproxy/test_server.py @@ -12,7 +12,7 @@ import netlib.http from netlib import tcp from netlib import socks from mitmproxy import certs -from netlib import exceptions +from mitmproxy import exceptions from netlib.http import authentication from netlib.http import http1 from netlib.tcp import Address diff --git a/test/mitmproxy/tservers.py b/test/mitmproxy/tservers.py index 1243bca0..e07102e1 100644 --- a/test/mitmproxy/tservers.py +++ b/test/mitmproxy/tservers.py @@ -10,8 +10,9 @@ from mitmproxy import master from mitmproxy.addons import state import pathod.test import pathod.pathoc -from mitmproxy import controller, options -import netlib.exceptions +from mitmproxy import controller +from mitmproxy import options +from mitmproxy import exceptions class TestMaster(master.Master): @@ -98,7 +99,7 @@ class ProxyTestBase: def teardown(self): try: self.server.wait_for_silence() - except netlib.exceptions.Timeout: + except exceptions.Timeout: # FIXME: Track down the Windows sync issues if sys.platform != "win32": raise diff --git a/test/netlib/http/http1/test_assemble.py b/test/netlib/http/http1/test_assemble.py index 5d7e007e..d5a5e5fb 100644 --- a/test/netlib/http/http1/test_assemble.py +++ b/test/netlib/http/http1/test_assemble.py @@ -1,4 +1,4 @@ -from netlib.exceptions import HttpException +from mitmproxy import exceptions from netlib.http import Headers from netlib.http.http1.assemble import ( assemble_request, assemble_request_head, assemble_response, @@ -18,7 +18,7 @@ def test_assemble_request(): b"content" ) - with raises(HttpException): + with raises(exceptions.HttpException): assemble_request(treq(content=None)) @@ -39,7 +39,7 @@ def test_assemble_response(): b"message" ) - with raises(HttpException): + with raises(exceptions.HttpException): assemble_response(tresp(content=None)) diff --git a/test/netlib/http/http1/test_read.py b/test/netlib/http/http1/test_read.py index f25cd3e2..9777e2e2 100644 --- a/test/netlib/http/http1/test_read.py +++ b/test/netlib/http/http1/test_read.py @@ -2,7 +2,7 @@ from io import BytesIO from mock import Mock import pytest -from netlib.exceptions import HttpException, HttpSyntaxException, HttpReadDisconnect, TcpDisconnect +from mitmproxy import exceptions from netlib.http import Headers from netlib.http.http1.read import ( read_request, read_response, read_request_head, @@ -11,7 +11,6 @@ from netlib.http.http1.read import ( _read_headers, _read_chunked, get_header_tokens ) from netlib.tutils import treq, tresp, raises -from netlib import exceptions def test_get_header_tokens(): @@ -117,12 +116,12 @@ class TestReadBody: def test_known_size_limit(self): rfile = BytesIO(b"foobar") - with raises(HttpException): + with raises(exceptions.HttpException): b"".join(read_body(rfile, 3, 2)) def test_known_size_too_short(self): rfile = BytesIO(b"foo") - with raises(HttpException): + with raises(exceptions.HttpException): b"".join(read_body(rfile, 6)) def test_unknown_size(self): @@ -132,7 +131,7 @@ class TestReadBody: def test_unknown_size_limit(self): rfile = BytesIO(b"foobar") - with raises(HttpException): + with raises(exceptions.HttpException): b"".join(read_body(rfile, -1, 3)) def test_max_chunk_size(self): @@ -186,7 +185,7 @@ def test_expected_http_body_size(): # explicit length for val in (b"foo", b"-7"): - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): expected_http_body_size( treq(headers=Headers(content_length=val)) ) @@ -210,13 +209,13 @@ def test_get_first_line(): rfile = BytesIO(b"\r\nfoo\r\nbar") assert _get_first_line(rfile) == b"foo" - with raises(HttpReadDisconnect): + with raises(exceptions.HttpReadDisconnect): rfile = BytesIO(b"") _get_first_line(rfile) - with raises(HttpReadDisconnect): + with raises(exceptions.HttpReadDisconnect): rfile = Mock() - rfile.readline.side_effect = TcpDisconnect + rfile.readline.side_effect = exceptions.TcpDisconnect _get_first_line(rfile) @@ -233,23 +232,23 @@ def test_read_request_line(): assert (t(b"GET http://foo:42/bar HTTP/1.1") == ("absolute", b"GET", b"http", b"foo", 42, b"/bar", b"HTTP/1.1")) - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): t(b"GET / WTF/1.1") - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): t(b"this is not http") - with raises(HttpReadDisconnect): + with raises(exceptions.HttpReadDisconnect): t(b"") def test_parse_authority_form(): assert _parse_authority_form(b"foo:42") == (b"foo", 42) - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): _parse_authority_form(b"foo") - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): _parse_authority_form(b"foo:bar") - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): _parse_authority_form(b"foo:99999999") - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): _parse_authority_form(b"f\x00oo:80") @@ -263,14 +262,14 @@ def test_read_response_line(): # https://github.com/mitmproxy/mitmproxy/issues/784 assert t(b"HTTP/1.1 200 Non-Autoris\xc3\xa9") == (b"HTTP/1.1", 200, b"Non-Autoris\xc3\xa9") - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): assert t(b"HTTP/1.1") - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): t(b"HTTP/1.1 OK OK") - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): t(b"WTF/1.1 200 OK") - with raises(HttpReadDisconnect): + with raises(exceptions.HttpReadDisconnect): t(b"") @@ -279,11 +278,11 @@ def test_check_http_version(): _check_http_version(b"HTTP/1.0") _check_http_version(b"HTTP/1.1") _check_http_version(b"HTTP/2.0") - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): _check_http_version(b"WTF/1.0") - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): _check_http_version(b"HTTP/1.10") - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): _check_http_version(b"HTTP/1.b") @@ -322,17 +321,17 @@ class TestReadHeaders: def test_read_continued_err(self): data = b"\tfoo: bar\r\n" - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): self._read(data) def test_read_err(self): data = b"foo" - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): self._read(data) def test_read_empty_name(self): data = b":foo" - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): self._read(data) def test_read_empty_value(self): @@ -346,7 +345,7 @@ def test_read_chunked(): req.headers["Transfer-Encoding"] = "chunked" data = b"1\r\na\r\n0\r\n" - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): b"".join(_read_chunked(BytesIO(data))) data = b"1\r\na\r\n0\r\n\r\n" @@ -364,7 +363,7 @@ def test_read_chunked(): b"".join(_read_chunked(BytesIO(data))) data = b"foo\r\nfoo" - with raises(HttpSyntaxException): + with raises(exceptions.HttpSyntaxException): b"".join(_read_chunked(BytesIO(data))) data = b"5\r\naaaaa\r\n0\r\n\r\n" diff --git a/test/netlib/test_tcp.py b/test/netlib/test_tcp.py index 2c1b92dc..d61e1d91 100644 --- a/test/netlib/test_tcp.py +++ b/test/netlib/test_tcp.py @@ -12,7 +12,7 @@ from OpenSSL import SSL from mitmproxy import certs from netlib import tcp from netlib import tutils -from netlib import exceptions +from mitmproxy import exceptions from . import tservers diff --git a/test/pathod/test_log.py b/test/pathod/test_log.py index deb0f613..8890e7d9 100644 --- a/test/pathod/test_log.py +++ b/test/pathod/test_log.py @@ -1,7 +1,7 @@ import io from pathod import log -from netlib.exceptions import TcpDisconnect +from mitmproxy import exceptions class DummyIO(io.StringIO): @@ -20,6 +20,6 @@ def test_disconnect(): try: with l.ctx() as lg: lg("Test") - except TcpDisconnect: + except exceptions.TcpDisconnect: pass assert "Test" in outf.getvalue() diff --git a/test/pathod/test_pathoc.py b/test/pathod/test_pathoc.py index f9670d73..d26eb15d 100644 --- a/test/pathod/test_pathoc.py +++ b/test/pathod/test_pathoc.py @@ -3,9 +3,9 @@ from mock import Mock from netlib import http from netlib import tcp -from netlib.exceptions import NetlibException from netlib.http import http1 from netlib.tutils import raises +from mitmproxy import exceptions from pathod import pathoc, language from pathod.protocols.http2 import HTTP2StateProtocol @@ -36,7 +36,7 @@ class PathocTestDaemon(tutils.DaemonTests): r = r.freeze(language.Settings()) try: c.request(r) - except NetlibException: + except exceptions.NetlibException: pass self.d.wait_for_silence() return s.getvalue() diff --git a/test/pathod/test_pathod.py b/test/pathod/test_pathod.py index 89d7c562..402cd638 100644 --- a/test/pathod/test_pathod.py +++ b/test/pathod/test_pathod.py @@ -2,7 +2,7 @@ import io from pathod import pathod from netlib import tcp -from netlib.exceptions import HttpException, TlsException +from mitmproxy import exceptions from . import tutils @@ -157,7 +157,7 @@ class CommonTests(tutils.DaemonTests): def test_invalid_content_length(self): tutils.raises( - HttpException, + exceptions.HttpException, self.pathoc, ["get:/:h'content-length'='foo'"] ) @@ -166,7 +166,7 @@ class CommonTests(tutils.DaemonTests): assert "Unparseable Content Length" in l["msg"] def test_invalid_headers(self): - tutils.raises(HttpException, self.pathoc, ["get:/:h'\t'='foo'"]) + tutils.raises(exceptions.HttpException, self.pathoc, ["get:/:h'\t'='foo'"]) l = self.d.last_log() assert l["type"] == "error" assert "Invalid headers" in l["msg"] @@ -225,7 +225,7 @@ class TestDaemon(CommonTests): def test_connect_err(self): tutils.raises( - HttpException, + exceptions.HttpException, self.pathoc, [r"get:'http://foo.com/p/202':da"], connect_to=("localhost", self.d.port) @@ -241,7 +241,7 @@ class TestDaemonSSL(CommonTests): c.wbufsize = 0 with c.connect(): c.wfile.write(b"\0\0\0\0") - tutils.raises(TlsException, c.convert_to_ssl) + tutils.raises(exceptions.TlsException, c.convert_to_ssl) l = self.d.last_log() assert l["type"] == "error" assert "SSL" in l["msg"] diff --git a/test/pathod/test_protocols_http2.py b/test/pathod/test_protocols_http2.py index 7300cc1d..bb69bd10 100644 --- a/test/pathod/test_protocols_http2.py +++ b/test/pathod/test_protocols_http2.py @@ -4,8 +4,8 @@ import codecs import hyperframe from netlib import tcp, http from netlib.tutils import raises -from netlib.exceptions import TcpDisconnect from netlib.http import http2 +from mitmproxy import exceptions from ..netlib import tservers as netlib_tservers @@ -132,7 +132,7 @@ class TestPerformServerConnectionPreface(netlib_tservers.ServerTestBase): protocol.perform_server_connection_preface() assert protocol.connection_preface_performed - with raises(TcpDisconnect): + with raises(exceptions.TcpDisconnect): protocol.perform_server_connection_preface(force=True) -- cgit v1.2.3 From 9491d8589a265be33a5d6fac92c1213fd5b719d4 Mon Sep 17 00:00:00 2001 From: Aldo Cortesi Date: Thu, 20 Oct 2016 11:29:53 +1300 Subject: Improve exception hierarchy ProxyException -> MitmproxyException NetlibException inherits from MitmproxyException --- mitmproxy/exceptions.py | 30 +++++++++++++++--------------- mitmproxy/proxy/protocol/rawtcp.py | 1 + 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/mitmproxy/exceptions.py b/mitmproxy/exceptions.py index 82022d31..a7ecf17f 100644 --- a/mitmproxy/exceptions.py +++ b/mitmproxy/exceptions.py @@ -2,7 +2,7 @@ We try to be very hygienic regarding the exceptions we throw: - Every exception that might be externally visible to users shall be a subclass - of ProxyException.p + of MitmproxyException.p - Every exception in the base net module shall be a subclass of NetlibException, and will not be propagated directly to users. @@ -10,7 +10,7 @@ See also: http://lucumr.pocoo.org/2014/10/16/on-error-handling/ """ -class ProxyException(Exception): +class MitmproxyException(Exception): """ Base class for all exceptions thrown by mitmproxy. @@ -20,7 +20,7 @@ class ProxyException(Exception): super().__init__(message) -class Kill(ProxyException): +class Kill(MitmproxyException): """ Signal that both client and server connection(s) should be killed immediately. @@ -28,7 +28,7 @@ class Kill(ProxyException): pass -class ProtocolException(ProxyException): +class ProtocolException(MitmproxyException): """ ProtocolExceptions are caused by invalid user input, unavailable network resources, or other events that are outside of our influence. @@ -69,48 +69,48 @@ class Http2ZombieException(ProtocolException): pass -class ServerException(ProxyException): +class ServerException(MitmproxyException): pass -class ContentViewException(ProxyException): +class ContentViewException(MitmproxyException): pass -class ReplayException(ProxyException): +class ReplayException(MitmproxyException): pass -class FlowReadException(ProxyException): +class FlowReadException(MitmproxyException): pass -class ControlException(ProxyException): +class ControlException(MitmproxyException): pass -class SetServerNotAllowedException(ProxyException): +class SetServerNotAllowedException(MitmproxyException): pass -class OptionsError(Exception): +class OptionsError(MitmproxyException): pass -class AddonError(Exception): +class AddonError(MitmproxyException): pass -class AddonHalt(Exception): +class AddonHalt(MitmproxyException): pass """ - Every net Exception raised shall be a subclass of NetlibException. + Net-layer exceptions """ -class NetlibException(Exception): +class NetlibException(MitmproxyException): """ Base class for all exceptions thrown by netlib. """ diff --git a/mitmproxy/proxy/protocol/rawtcp.py b/mitmproxy/proxy/protocol/rawtcp.py index d117fb41..8230c50b 100644 --- a/mitmproxy/proxy/protocol/rawtcp.py +++ b/mitmproxy/proxy/protocol/rawtcp.py @@ -5,6 +5,7 @@ from OpenSSL import SSL import netlib.tcp from mitmproxy import tcp from mitmproxy import flow +from mitmproxy import exceptions from mitmproxy.proxy.protocol import base -- cgit v1.2.3 From 853e03a5e753354fad3a3fa5384ef3a09384ef43 Mon Sep 17 00:00:00 2001 From: Aldo Cortesi Date: Thu, 20 Oct 2016 11:42:55 +1300 Subject: netlib.tutils -> mitmproxy.test.tutils There's a LOT more to be done refactoring our different conflicting test utils. --- mitmproxy/test/tutils.py | 130 ++++++++++++++++++++++++++ netlib/tutils.py | 132 --------------------------- pathod/pathoc.py | 2 +- test/mitmproxy/addons/test_dumper.py | 10 +- test/mitmproxy/addons/test_serverplayback.py | 6 +- test/mitmproxy/addons/test_stickycookie.py | 2 +- test/mitmproxy/console/test_master.py | 12 +-- test/mitmproxy/mastertest.py | 6 +- test/mitmproxy/protocol/test_http1.py | 2 +- test/mitmproxy/test_certs.py | 2 +- test/mitmproxy/test_contentview.py | 4 +- test/mitmproxy/test_controller.py | 2 +- test/mitmproxy/test_examples.py | 2 +- test/mitmproxy/test_flow.py | 28 +++--- test/mitmproxy/test_flow_export.py | 8 +- test/mitmproxy/test_optmanager.py | 2 +- test/mitmproxy/test_server.py | 8 +- test/mitmproxy/test_types_bidi.py | 2 +- test/mitmproxy/test_types_multidict.py | 2 +- test/mitmproxy/test_utils_human.py | 2 +- test/mitmproxy/test_utils_strutils.py | 2 +- test/mitmproxy/tutils.py | 8 +- test/netlib/http/http1/test_assemble.py | 2 +- test/netlib/http/http1/test_read.py | 2 +- test/netlib/http/test_authentication.py | 2 +- test/netlib/http/test_cookies.py | 2 +- test/netlib/http/test_encoding.py | 2 +- test/netlib/http/test_headers.py | 2 +- test/netlib/http/test_message.py | 58 ++++++------ test/netlib/http/test_request.py | 2 +- test/netlib/http/test_response.py | 2 +- test/netlib/http/test_url.py | 2 +- test/netlib/test_socks.py | 4 +- test/netlib/test_tcp.py | 2 +- test/netlib/tservers.py | 2 +- test/netlib/websockets/test_frame.py | 2 +- test/pathod/test_pathoc.py | 2 +- test/pathod/test_protocols_http2.py | 2 +- test/pathod/tutils.py | 2 +- 39 files changed, 233 insertions(+), 233 deletions(-) create mode 100644 mitmproxy/test/tutils.py delete mode 100644 netlib/tutils.py diff --git a/mitmproxy/test/tutils.py b/mitmproxy/test/tutils.py new file mode 100644 index 00000000..fc7c0eb9 --- /dev/null +++ b/mitmproxy/test/tutils.py @@ -0,0 +1,130 @@ +from io import BytesIO +import tempfile +import os +import time +import shutil +from contextlib import contextmanager +import sys + +from mitmproxy.utils import data +from netlib import tcp +from netlib import http + + +def treader(bytes): + """ + Construct a tcp.Read object from bytes. + """ + fp = BytesIO(bytes) + return tcp.Reader(fp) + + +@contextmanager +def tmpdir(*args, **kwargs): + orig_workdir = os.getcwd() + temp_workdir = tempfile.mkdtemp(*args, **kwargs) + os.chdir(temp_workdir) + + yield temp_workdir + + os.chdir(orig_workdir) + shutil.rmtree(temp_workdir) + + +def _check_exception(expected, actual, exc_tb): + if isinstance(expected, str): + if expected.lower() not in str(actual).lower(): + raise AssertionError( + "Expected %s, but caught %s" % ( + repr(expected), repr(actual) + ) + ) + else: + if not isinstance(actual, expected): + raise AssertionError( + "Expected %s, but caught %s %s" % ( + expected.__name__, actual.__class__.__name__, repr(actual) + ) + ) + + +def raises(expected_exception, obj=None, *args, **kwargs): + """ + Assert that a callable raises a specified exception. + + :exc An exception class or a string. If a class, assert that an + exception of this type is raised. If a string, assert that the string + occurs in the string representation of the exception, based on a + case-insenstivie match. + + :obj A callable object. + + :args Arguments to be passsed to the callable. + + :kwargs Arguments to be passed to the callable. + """ + if obj is None: + return RaisesContext(expected_exception) + else: + try: + ret = obj(*args, **kwargs) + except Exception as actual: + _check_exception(expected_exception, actual, sys.exc_info()[2]) + else: + raise AssertionError("No exception raised. Return value: {}".format(ret)) + + +class RaisesContext: + def __init__(self, expected_exception): + self.expected_exception = expected_exception + + def __enter__(self): + return + + def __exit__(self, exc_type, exc_val, exc_tb): + if not exc_type: + raise AssertionError("No exception raised.") + else: + _check_exception(self.expected_exception, exc_val, exc_tb) + return True + + +test_data = data.Data(__name__).push("../../test/netlib") + + +def treq(**kwargs): + """ + Returns: + netlib.http.Request + """ + default = dict( + first_line_format="relative", + method=b"GET", + scheme=b"http", + host=b"address", + port=22, + path=b"/path", + http_version=b"HTTP/1.1", + headers=http.Headers(((b"header", b"qvalue"), (b"content-length", b"7"))), + content=b"content" + ) + default.update(kwargs) + return http.Request(**default) + + +def tresp(**kwargs): + """ + Returns: + netlib.http.Response + """ + default = dict( + http_version=b"HTTP/1.1", + status_code=200, + reason=b"OK", + headers=http.Headers(((b"header-response", b"svalue"), (b"content-length", b"7"))), + content=b"message", + timestamp_start=time.time(), + timestamp_end=time.time(), + ) + default.update(kwargs) + return http.Response(**default) diff --git a/netlib/tutils.py b/netlib/tutils.py deleted file mode 100644 index 6fa2d7b6..00000000 --- a/netlib/tutils.py +++ /dev/null @@ -1,132 +0,0 @@ -from io import BytesIO -import tempfile -import os -import time -import shutil -from contextlib import contextmanager -import sys - -from mitmproxy.utils import data -from netlib import tcp -from netlib import http - - -def treader(bytes): - """ - Construct a tcp.Read object from bytes. - """ - fp = BytesIO(bytes) - return tcp.Reader(fp) - - -@contextmanager -def tmpdir(*args, **kwargs): - orig_workdir = os.getcwd() - temp_workdir = tempfile.mkdtemp(*args, **kwargs) - os.chdir(temp_workdir) - - yield temp_workdir - - os.chdir(orig_workdir) - shutil.rmtree(temp_workdir) - - -def _check_exception(expected, actual, exc_tb): - if isinstance(expected, str): - if expected.lower() not in str(actual).lower(): - raise AssertionError( - "Expected %s, but caught %s" % ( - repr(expected), repr(actual) - ) - ) - else: - if not isinstance(actual, expected): - raise AssertionError( - "Expected %s, but caught %s %s" % ( - expected.__name__, actual.__class__.__name__, repr(actual) - ) - ) - - -def raises(expected_exception, obj=None, *args, **kwargs): - """ - Assert that a callable raises a specified exception. - - :exc An exception class or a string. If a class, assert that an - exception of this type is raised. If a string, assert that the string - occurs in the string representation of the exception, based on a - case-insenstivie match. - - :obj A callable object. - - :args Arguments to be passsed to the callable. - - :kwargs Arguments to be passed to the callable. - """ - if obj is None: - return RaisesContext(expected_exception) - else: - try: - ret = obj(*args, **kwargs) - except Exception as actual: - _check_exception(expected_exception, actual, sys.exc_info()[2]) - else: - raise AssertionError("No exception raised. Return value: {}".format(ret)) - - -class RaisesContext: - def __init__(self, expected_exception): - self.expected_exception = expected_exception - - def __enter__(self): - return - - def __exit__(self, exc_type, exc_val, exc_tb): - if not exc_type: - raise AssertionError("No exception raised.") - else: - _check_exception(self.expected_exception, exc_val, exc_tb) - return True - - -test_data = data.Data(__name__) -# FIXME: Temporary workaround during repo merge. -test_data.dirname = os.path.join(test_data.dirname, "..", "test", "netlib") - - -def treq(**kwargs): - """ - Returns: - netlib.http.Request - """ - default = dict( - first_line_format="relative", - method=b"GET", - scheme=b"http", - host=b"address", - port=22, - path=b"/path", - http_version=b"HTTP/1.1", - headers=http.Headers(((b"header", b"qvalue"), (b"content-length", b"7"))), - content=b"content" - ) - default.update(kwargs) - return http.Request(**default) - - -def tresp(**kwargs): - """ - Returns: - netlib.http.Response - """ - default = dict( - http_version=b"HTTP/1.1", - status_code=200, - reason=b"OK", - headers=http.Headers(((b"header-response", b"svalue"), (b"content-length", b"7"))), - content=b"message", - timestamp_start=time.time(), - timestamp_end=time.time(), - ) - default.update(kwargs) - return http.Response(**default) diff --git a/pathod/pathoc.py b/pathod/pathoc.py index b67f6ee2..0978277a 100644 --- a/pathod/pathoc.py +++ b/pathod/pathoc.py @@ -11,7 +11,7 @@ import time import OpenSSL.crypto import logging -from netlib.tutils import treq +from mitmproxy.test.tutils import treq from mitmproxy.utils import strutils from netlib import tcp from mitmproxy import certs diff --git a/test/mitmproxy/addons/test_dumper.py b/test/mitmproxy/addons/test_dumper.py index e49f91bc..5f1b2dbd 100644 --- a/test/mitmproxy/addons/test_dumper.py +++ b/test/mitmproxy/addons/test_dumper.py @@ -7,7 +7,7 @@ from mitmproxy import exceptions from mitmproxy.tools import dump from mitmproxy import http from mitmproxy import proxy -import netlib.tutils +import mitmproxy.test.tutils import mock @@ -38,11 +38,11 @@ class TestDumper(mastertest.MasterTest): sio = io.StringIO() d.configure(dump.Options(tfile = sio, flow_detail = 4), updated) flow = tutils.tflow() - flow.request = netlib.tutils.treq() + flow.request = mitmproxy.test.tutils.treq() flow.request.stickycookie = True flow.client_conn = mock.MagicMock() flow.client_conn.address.host = "foo" - flow.response = netlib.tutils.tresp(content=None) + flow.response = mitmproxy.test.tutils.tresp(content=None) flow.response.is_replay = True flow.response.status_code = 300 d.response(flow) @@ -50,7 +50,7 @@ class TestDumper(mastertest.MasterTest): sio = io.StringIO() d.configure(dump.Options(tfile = sio, flow_detail = 4), updated) - flow = tutils.tflow(resp=netlib.tutils.tresp(content=b"{")) + flow = tutils.tflow(resp=mitmproxy.test.tutils.tresp(content=b"{")) flow.response.headers["content-type"] = "application/json" flow.response.status_code = 400 d.response(flow) @@ -60,7 +60,7 @@ class TestDumper(mastertest.MasterTest): d.configure(dump.Options(tfile = sio), updated) flow = tutils.tflow() flow.request.content = None - flow.response = http.HTTPResponse.wrap(netlib.tutils.tresp()) + flow.response = http.HTTPResponse.wrap(mitmproxy.test.tutils.tresp()) flow.response.content = None d.response(flow) assert "content missing" in sio.getvalue() diff --git a/test/mitmproxy/addons/test_serverplayback.py b/test/mitmproxy/addons/test_serverplayback.py index 649b3c22..c8dd8704 100644 --- a/test/mitmproxy/addons/test_serverplayback.py +++ b/test/mitmproxy/addons/test_serverplayback.py @@ -1,6 +1,6 @@ from .. import tutils, mastertest -import netlib.tutils +import mitmproxy.test.tutils from mitmproxy.addons import serverplayback from mitmproxy import options from mitmproxy import proxy @@ -244,7 +244,7 @@ class TestServerPlayback: m.addons.add(s) f = tutils.tflow() - f.response = netlib.tutils.tresp(content=f.request.content) + f.response = mitmproxy.test.tutils.tresp(content=f.request.content) s.load([f, f]) tf = tutils.tflow() @@ -273,7 +273,7 @@ class TestServerPlayback: m.addons.add(s) f = tutils.tflow() - f.response = netlib.tutils.tresp(content=f.request.content) + f.response = mitmproxy.test.tutils.tresp(content=f.request.content) s.load([f]) f = tutils.tflow() diff --git a/test/mitmproxy/addons/test_stickycookie.py b/test/mitmproxy/addons/test_stickycookie.py index 29c9e198..28c35b28 100644 --- a/test/mitmproxy/addons/test_stickycookie.py +++ b/test/mitmproxy/addons/test_stickycookie.py @@ -3,7 +3,7 @@ from mitmproxy.addons import stickycookie from mitmproxy import master from mitmproxy import options from mitmproxy import proxy -from netlib import tutils as ntutils +from mitmproxy.test import tutils as ntutils def test_domain_match(): diff --git a/test/mitmproxy/console/test_master.py b/test/mitmproxy/console/test_master.py index da29a2b1..6d4bb7f9 100644 --- a/test/mitmproxy/console/test_master.py +++ b/test/mitmproxy/console/test_master.py @@ -1,6 +1,6 @@ import gc -import netlib.tutils +import mitmproxy.test.tutils from mitmproxy.tools import console from mitmproxy import proxy from mitmproxy.tools.console import common @@ -59,13 +59,13 @@ class TestConsoleState: def _add_response(self, state): f = self._add_request(state) - f.response = netlib.tutils.tresp() + f.response = mitmproxy.test.tutils.tresp() state.update_flow(f) def test_add_response(self): c = console.master.ConsoleState() f = self._add_request(c) - f.response = netlib.tutils.tresp() + f.response = mitmproxy.test.tutils.tresp() c.focus = None c.update_flow(f) @@ -127,12 +127,12 @@ class TestMaster(mastertest.MasterTest): def test_intercept(self): """regression test for https://github.com/mitmproxy/mitmproxy/issues/1605""" m = self.mkmaster(intercept="~b bar") - f = tutils.tflow(req=netlib.tutils.treq(content=b"foo")) + f = tutils.tflow(req=mitmproxy.test.tutils.treq(content=b"foo")) m.request(f) assert not m.state.flows[0].intercepted - f = tutils.tflow(req=netlib.tutils.treq(content=b"bar")) + f = tutils.tflow(req=mitmproxy.test.tutils.treq(content=b"bar")) m.request(f) assert m.state.flows[1].intercepted - f = tutils.tflow(resp=netlib.tutils.tresp(content=b"bar")) + f = tutils.tflow(resp=mitmproxy.test.tutils.tresp(content=b"bar")) m.request(f) assert m.state.flows[2].intercepted diff --git a/test/mitmproxy/mastertest.py b/test/mitmproxy/mastertest.py index 0b57c2d4..cdfb5ad5 100644 --- a/test/mitmproxy/mastertest.py +++ b/test/mitmproxy/mastertest.py @@ -1,7 +1,7 @@ import contextlib from . import tutils -import netlib.tutils +import mitmproxy.test.tutils from mitmproxy import master from mitmproxy import io @@ -17,13 +17,13 @@ class TestMaster: class MasterTest: def cycle(self, master, content): - f = tutils.tflow(req=netlib.tutils.treq(content=content)) + f = tutils.tflow(req=mitmproxy.test.tutils.treq(content=content)) master.clientconnect(f.client_conn) master.serverconnect(f.server_conn) master.request(f) if not f.error: f.response = http.HTTPResponse.wrap( - netlib.tutils.tresp(content=content) + mitmproxy.test.tutils.tresp(content=content) ) master.response(f) master.clientdisconnect(f) diff --git a/test/mitmproxy/protocol/test_http1.py b/test/mitmproxy/protocol/test_http1.py index 8701c8e6..d18ff411 100644 --- a/test/mitmproxy/protocol/test_http1.py +++ b/test/mitmproxy/protocol/test_http1.py @@ -1,6 +1,6 @@ from netlib.http import http1 from netlib.tcp import TCPClient -from netlib.tutils import treq +from mitmproxy.test.tutils import treq from .. import tutils, tservers diff --git a/test/mitmproxy/test_certs.py b/test/mitmproxy/test_certs.py index 35407fd6..2e706fa6 100644 --- a/test/mitmproxy/test_certs.py +++ b/test/mitmproxy/test_certs.py @@ -1,6 +1,6 @@ import os from mitmproxy import certs -from netlib import tutils +from mitmproxy.test import tutils # class TestDNTree: # def test_simple(self): diff --git a/test/mitmproxy/test_contentview.py b/test/mitmproxy/test_contentview.py index f113e294..2c5e1c85 100644 --- a/test/mitmproxy/test_contentview.py +++ b/test/mitmproxy/test_contentview.py @@ -6,7 +6,7 @@ from mitmproxy.types import multidict import mitmproxy.contentviews as cv from . import tutils -import netlib.tutils +import mitmproxy.test.tutils try: import pyamf @@ -232,7 +232,7 @@ def test_get_content_view(): def test_get_message_content_view(): - r = netlib.tutils.treq() + r = mitmproxy.test.tutils.treq() desc, lines, err = cv.get_message_content_view(cv.get("Raw"), r) assert desc == "Raw" diff --git a/test/mitmproxy/test_controller.py b/test/mitmproxy/test_controller.py index 071638a9..3bcb7013 100644 --- a/test/mitmproxy/test_controller.py +++ b/test/mitmproxy/test_controller.py @@ -9,7 +9,7 @@ import queue from mitmproxy.exceptions import Kill, ControlException from mitmproxy import proxy from mitmproxy import master -from netlib.tutils import raises +from mitmproxy.test.tutils import raises class TMsg: diff --git a/test/mitmproxy/test_examples.py b/test/mitmproxy/test_examples.py index 60d4a1a5..48193fe7 100644 --- a/test/mitmproxy/test_examples.py +++ b/test/mitmproxy/test_examples.py @@ -9,7 +9,7 @@ from mitmproxy.addons import script from mitmproxy.utils import data from mitmproxy import master -from netlib import tutils as netutils +from mitmproxy.test import tutils as netutils from netlib.http import Headers from netlib.http import cookies diff --git a/test/mitmproxy/test_flow.py b/test/mitmproxy/test_flow.py index 9b7e7395..14899be1 100644 --- a/test/mitmproxy/test_flow.py +++ b/test/mitmproxy/test_flow.py @@ -1,7 +1,7 @@ import mock import io -import netlib.tutils +import mitmproxy.test.tutils from netlib.http import Headers import mitmproxy.io from mitmproxy import flowfilter, options @@ -59,7 +59,7 @@ class TestHTTPFlow: def test_backup(self): f = tutils.tflow() - f.response = http.HTTPResponse.wrap(netlib.tutils.tresp()) + f.response = http.HTTPResponse.wrap(mitmproxy.test.tutils.tresp()) f.request.content = b"foo" assert not f.modified() f.backup() @@ -212,7 +212,7 @@ class TestState: assert c.add_flow(newf) assert c.active_flow_count() == 2 - f.response = http.HTTPResponse.wrap(netlib.tutils.tresp()) + f.response = http.HTTPResponse.wrap(mitmproxy.test.tutils.tresp()) assert c.update_flow(f) assert c.flow_count() == 2 assert c.active_flow_count() == 1 @@ -220,7 +220,7 @@ class TestState: assert not c.update_flow(None) assert c.active_flow_count() == 1 - newf.response = http.HTTPResponse.wrap(netlib.tutils.tresp()) + newf.response = http.HTTPResponse.wrap(mitmproxy.test.tutils.tresp()) assert c.update_flow(newf) assert c.active_flow_count() == 0 @@ -252,7 +252,7 @@ class TestState: c.set_view_filter("~s") assert c.filter_txt == "~s" assert len(c.view) == 0 - f.response = http.HTTPResponse.wrap(netlib.tutils.tresp()) + f.response = http.HTTPResponse.wrap(mitmproxy.test.tutils.tresp()) c.update_flow(f) assert len(c.view) == 1 c.set_view_filter(None) @@ -284,7 +284,7 @@ class TestState: def _add_response(self, state): f = tutils.tflow() state.add_flow(f) - f.response = http.HTTPResponse.wrap(netlib.tutils.tresp()) + f.response = http.HTTPResponse.wrap(mitmproxy.test.tutils.tresp()) state.update_flow(f) def _add_error(self, state): @@ -444,11 +444,11 @@ class TestFlowMaster: fm.addons.add(s) f = tutils.tflow(req=None) fm.clientconnect(f.client_conn) - f.request = http.HTTPRequest.wrap(netlib.tutils.treq()) + f.request = http.HTTPRequest.wrap(mitmproxy.test.tutils.treq()) fm.request(f) assert s.flow_count() == 1 - f.response = http.HTTPResponse.wrap(netlib.tutils.tresp()) + f.response = http.HTTPResponse.wrap(mitmproxy.test.tutils.tresp()) fm.response(f) assert s.flow_count() == 1 @@ -473,7 +473,7 @@ class TestRequest: assert r.get_state() == r2.get_state() def test_get_url(self): - r = http.HTTPRequest.wrap(netlib.tutils.treq()) + r = http.HTTPRequest.wrap(mitmproxy.test.tutils.treq()) assert r.url == "http://address:22/path" @@ -494,7 +494,7 @@ class TestRequest: assert r.pretty_url == "https://foo.com:22/path" def test_replace(self): - r = http.HTTPRequest.wrap(netlib.tutils.treq()) + r = http.HTTPRequest.wrap(mitmproxy.test.tutils.treq()) r.path = "path/foo" r.headers["Foo"] = "fOo" r.content = b"afoob" @@ -504,7 +504,7 @@ class TestRequest: assert r.headers["boo"] == "boo" def test_constrain_encoding(self): - r = http.HTTPRequest.wrap(netlib.tutils.treq()) + r = http.HTTPRequest.wrap(mitmproxy.test.tutils.treq()) r.headers["accept-encoding"] = "gzip, oink" r.constrain_encoding() assert "oink" not in r.headers["accept-encoding"] @@ -514,7 +514,7 @@ class TestRequest: assert "oink" not in r.headers["accept-encoding"] def test_get_content_type(self): - resp = http.HTTPResponse.wrap(netlib.tutils.tresp()) + resp = http.HTTPResponse.wrap(mitmproxy.test.tutils.tresp()) resp.headers = Headers(content_type="text/plain") assert resp.headers["content-type"] == "text/plain" @@ -528,7 +528,7 @@ class TestResponse: assert resp2.get_state() == resp.get_state() def test_replace(self): - r = http.HTTPResponse.wrap(netlib.tutils.tresp()) + r = http.HTTPResponse.wrap(mitmproxy.test.tutils.tresp()) r.headers["Foo"] = "fOo" r.content = b"afoob" assert r.replace("foo(?i)", "boo") == 3 @@ -536,7 +536,7 @@ class TestResponse: assert r.headers["boo"] == "boo" def test_get_content_type(self): - resp = http.HTTPResponse.wrap(netlib.tutils.tresp()) + resp = http.HTTPResponse.wrap(mitmproxy.test.tutils.tresp()) resp.headers = Headers(content_type="text/plain") assert resp.headers["content-type"] == "text/plain" diff --git a/test/mitmproxy/test_flow_export.py b/test/mitmproxy/test_flow_export.py index aafd5a1c..8ef2b7ee 100644 --- a/test/mitmproxy/test_flow_export.py +++ b/test/mitmproxy/test_flow_export.py @@ -1,6 +1,6 @@ import re -import netlib.tutils +import mitmproxy.test.tutils from netlib.http import Headers from mitmproxy import export # heh from . import tutils @@ -20,15 +20,15 @@ def python_equals(testdata, text): def req_get(): - return netlib.tutils.treq(method=b'GET', content=b'', path=b"/path?a=foo&a=bar&b=baz") + return mitmproxy.test.tutils.treq(method=b'GET', content=b'', path=b"/path?a=foo&a=bar&b=baz") def req_post(): - return netlib.tutils.treq(method=b'POST', headers=()) + return mitmproxy.test.tutils.treq(method=b'POST', headers=()) def req_patch(): - return netlib.tutils.treq(method=b'PATCH', path=b"/path?query=param") + return mitmproxy.test.tutils.treq(method=b'PATCH', path=b"/path?query=param") class TestExportCurlCommand: diff --git a/test/mitmproxy/test_optmanager.py b/test/mitmproxy/test_optmanager.py index 1d677bd3..3c845707 100644 --- a/test/mitmproxy/test_optmanager.py +++ b/test/mitmproxy/test_optmanager.py @@ -2,7 +2,7 @@ import copy from mitmproxy import optmanager from mitmproxy import exceptions -from netlib import tutils +from mitmproxy.test import tutils class TO(optmanager.OptManager): diff --git a/test/mitmproxy/test_server.py b/test/mitmproxy/test_server.py index 79fd6f86..f7f13443 100644 --- a/test/mitmproxy/test_server.py +++ b/test/mitmproxy/test_server.py @@ -2,7 +2,7 @@ import os import socket import time -import netlib.tutils +import mitmproxy.test.tutils from mitmproxy import controller from mitmproxy import options from mitmproxy.addons import script @@ -16,7 +16,7 @@ from mitmproxy import exceptions from netlib.http import authentication from netlib.http import http1 from netlib.tcp import Address -from netlib.tutils import raises +from mitmproxy.test.tutils import raises from pathod import pathoc from pathod import pathod @@ -794,7 +794,7 @@ class TestStreamRequest(tservers.HTTPProxyTest): class MasterFakeResponse(tservers.TestMaster): @controller.handler def request(self, f): - f.response = http.HTTPResponse.wrap(netlib.tutils.tresp()) + f.response = http.HTTPResponse.wrap(mitmproxy.test.tutils.tresp()) class TestFakeResponse(tservers.HTTPProxyTest): @@ -873,7 +873,7 @@ class MasterIncomplete(tservers.TestMaster): @controller.handler def request(self, f): - resp = http.HTTPResponse.wrap(netlib.tutils.tresp()) + resp = http.HTTPResponse.wrap(mitmproxy.test.tutils.tresp()) resp.content = None f.response = resp diff --git a/test/mitmproxy/test_types_bidi.py b/test/mitmproxy/test_types_bidi.py index a012c568..0494ac9d 100644 --- a/test/mitmproxy/test_types_bidi.py +++ b/test/mitmproxy/test_types_bidi.py @@ -1,5 +1,5 @@ from mitmproxy.types import bidi -from netlib import tutils +from mitmproxy.test import tutils def test_bidi(): diff --git a/test/mitmproxy/test_types_multidict.py b/test/mitmproxy/test_types_multidict.py index ada33bf7..d566905c 100644 --- a/test/mitmproxy/test_types_multidict.py +++ b/test/mitmproxy/test_types_multidict.py @@ -1,4 +1,4 @@ -from netlib import tutils +from mitmproxy.test import tutils from mitmproxy.types import multidict diff --git a/test/mitmproxy/test_utils_human.py b/test/mitmproxy/test_utils_human.py index 08609887..443c8f66 100644 --- a/test/mitmproxy/test_utils_human.py +++ b/test/mitmproxy/test_utils_human.py @@ -1,6 +1,6 @@ import time from mitmproxy.utils import human -from netlib import tutils +from mitmproxy.test import tutils def test_format_timestamp(): diff --git a/test/mitmproxy/test_utils_strutils.py b/test/mitmproxy/test_utils_strutils.py index 2843688f..d4c2883c 100644 --- a/test/mitmproxy/test_utils_strutils.py +++ b/test/mitmproxy/test_utils_strutils.py @@ -1,5 +1,5 @@ from mitmproxy.utils import strutils -from netlib import tutils +from mitmproxy.test import tutils def test_always_bytes(): diff --git a/test/mitmproxy/tutils.py b/test/mitmproxy/tutils.py index aa70c0e8..c83223f6 100644 --- a/test/mitmproxy/tutils.py +++ b/test/mitmproxy/tutils.py @@ -9,7 +9,7 @@ from unittest.case import SkipTest import io -import netlib.tutils +import mitmproxy.test.tutils from mitmproxy import controller from mitmproxy import connections from mitmproxy import flow @@ -102,9 +102,9 @@ def tflow(client_conn=True, server_conn=True, req=True, resp=None, err=None): if server_conn is True: server_conn = tserver_conn() if req is True: - req = netlib.tutils.treq() + req = mitmproxy.test.tutils.treq() if resp is True: - resp = netlib.tutils.tresp() + resp = mitmproxy.test.tutils.tresp() if err is True: err = terr() @@ -197,7 +197,7 @@ class MockParser(argparse.ArgumentParser): raise Exception(message) -raises = netlib.tutils.raises +raises = mitmproxy.test.tutils.raises @contextmanager diff --git a/test/netlib/http/http1/test_assemble.py b/test/netlib/http/http1/test_assemble.py index d5a5e5fb..dac5fdad 100644 --- a/test/netlib/http/http1/test_assemble.py +++ b/test/netlib/http/http1/test_assemble.py @@ -5,7 +5,7 @@ from netlib.http.http1.assemble import ( assemble_response_head, _assemble_request_line, _assemble_request_headers, _assemble_response_headers, assemble_body) -from netlib.tutils import treq, raises, tresp +from mitmproxy.test.tutils import treq, raises, tresp def test_assemble_request(): diff --git a/test/netlib/http/http1/test_read.py b/test/netlib/http/http1/test_read.py index 9777e2e2..eb96968c 100644 --- a/test/netlib/http/http1/test_read.py +++ b/test/netlib/http/http1/test_read.py @@ -10,7 +10,7 @@ from netlib.http.http1.read import ( _read_request_line, _parse_authority_form, _read_response_line, _check_http_version, _read_headers, _read_chunked, get_header_tokens ) -from netlib.tutils import treq, tresp, raises +from mitmproxy.test.tutils import treq, tresp, raises def test_get_header_tokens(): diff --git a/test/netlib/http/test_authentication.py b/test/netlib/http/test_authentication.py index 95d72447..5e04bbc5 100644 --- a/test/netlib/http/test_authentication.py +++ b/test/netlib/http/test_authentication.py @@ -1,6 +1,6 @@ import binascii -from netlib import tutils +from mitmproxy.test import tutils from netlib.http import authentication, Headers diff --git a/test/netlib/http/test_cookies.py b/test/netlib/http/test_cookies.py index 80ffb0a6..ca10a69c 100644 --- a/test/netlib/http/test_cookies.py +++ b/test/netlib/http/test_cookies.py @@ -1,7 +1,7 @@ import time from netlib.http import cookies -from netlib.tutils import raises +from mitmproxy.test.tutils import raises import mock diff --git a/test/netlib/http/test_encoding.py b/test/netlib/http/test_encoding.py index 681f9bfc..89600709 100644 --- a/test/netlib/http/test_encoding.py +++ b/test/netlib/http/test_encoding.py @@ -2,7 +2,7 @@ import mock import pytest from netlib.http import encoding -from netlib import tutils +from mitmproxy.test import tutils @pytest.mark.parametrize("encoder", [ diff --git a/test/netlib/http/test_headers.py b/test/netlib/http/test_headers.py index 63f16897..cac77d57 100644 --- a/test/netlib/http/test_headers.py +++ b/test/netlib/http/test_headers.py @@ -1,7 +1,7 @@ import collections from netlib.http.headers import Headers, parse_content_type, assemble_content_type -from netlib.tutils import raises +from mitmproxy.test.tutils import raises class TestHeaders: diff --git a/test/netlib/http/test_message.py b/test/netlib/http/test_message.py index 8374f8f6..2bc8824f 100644 --- a/test/netlib/http/test_message.py +++ b/test/netlib/http/test_message.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -from netlib.tutils import tresp -from netlib import http, tutils +from mitmproxy.test import tutils +from netlib import http def _test_passthrough_attr(message, attr): @@ -37,12 +37,12 @@ def _test_decoded_attr(message, attr): class TestMessageData: def test_eq_ne(self): - data = tresp(timestamp_start=42, timestamp_end=42).data - same = tresp(timestamp_start=42, timestamp_end=42).data + data = tutils.tresp(timestamp_start=42, timestamp_end=42).data + same = tutils.tresp(timestamp_start=42, timestamp_end=42).data assert data == same assert not data != same - other = tresp(content=b"foo").data + other = tutils.tresp(content=b"foo").data assert not data == other assert data != other @@ -52,28 +52,28 @@ class TestMessageData: class TestMessage: def test_init(self): - resp = tresp() + resp = tutils.tresp() assert resp.data def test_eq_ne(self): - resp = tresp(timestamp_start=42, timestamp_end=42) - same = tresp(timestamp_start=42, timestamp_end=42) + resp = tutils.tresp(timestamp_start=42, timestamp_end=42) + same = tutils.tresp(timestamp_start=42, timestamp_end=42) assert resp == same assert not resp != same - other = tresp(timestamp_start=0, timestamp_end=0) + other = tutils.tresp(timestamp_start=0, timestamp_end=0) assert not resp == other assert resp != other assert resp != 0 def test_serializable(self): - resp = tresp() + resp = tutils.tresp() resp2 = http.Response.from_state(resp.get_state()) assert resp == resp2 def test_content_length_update(self): - resp = tresp() + resp = tutils.tresp() resp.content = b"foo" assert resp.data.content == b"foo" assert resp.headers["content-length"] == "3" @@ -85,19 +85,19 @@ class TestMessage: assert resp.headers["content-length"] == "0" def test_headers(self): - _test_passthrough_attr(tresp(), "headers") + _test_passthrough_attr(tutils.tresp(), "headers") def test_timestamp_start(self): - _test_passthrough_attr(tresp(), "timestamp_start") + _test_passthrough_attr(tutils.tresp(), "timestamp_start") def test_timestamp_end(self): - _test_passthrough_attr(tresp(), "timestamp_end") + _test_passthrough_attr(tutils.tresp(), "timestamp_end") def test_http_version(self): - _test_decoded_attr(tresp(), "http_version") + _test_decoded_attr(tutils.tresp(), "http_version") def test_replace(self): - r = tresp() + r = tutils.tresp() r.content = b"foofootoo" r.replace(b"foo", "gg") assert r.content == b"ggggtoo" @@ -109,7 +109,7 @@ class TestMessage: class TestMessageContentEncoding: def test_simple(self): - r = tresp() + r = tutils.tresp() assert r.raw_content == b"message" assert "content-encoding" not in r.headers r.encode("gzip") @@ -120,7 +120,7 @@ class TestMessageContentEncoding: assert r.raw_content != b"message" def test_modify(self): - r = tresp() + r = tutils.tresp() assert "content-encoding" not in r.headers r.encode("gzip") @@ -133,7 +133,7 @@ class TestMessageContentEncoding: r.content = u"foo" def test_unknown_ce(self): - r = tresp() + r = tutils.tresp() r.headers["content-encoding"] = "zopfli" r.raw_content = b"foo" with tutils.raises(ValueError): @@ -142,7 +142,7 @@ class TestMessageContentEncoding: assert r.get_content(strict=False) == b"foo" def test_cannot_decode(self): - r = tresp() + r = tutils.tresp() r.encode("gzip") r.raw_content = b"foo" with tutils.raises(ValueError): @@ -160,7 +160,7 @@ class TestMessageContentEncoding: assert "content-encoding" not in r.headers def test_none(self): - r = tresp(content=None) + r = tutils.tresp(content=None) assert r.content is None r.content = b"foo" assert r.content is not None @@ -168,7 +168,7 @@ class TestMessageContentEncoding: assert r.content is None def test_cannot_encode(self): - r = tresp() + r = tutils.tresp() r.encode("gzip") r.content = None assert r.headers["content-encoding"] @@ -187,7 +187,7 @@ class TestMessageContentEncoding: class TestMessageText: def test_simple(self): - r = tresp(content=b'\xfc') + r = tutils.tresp(content=b'\xfc') assert r.raw_content == b"\xfc" assert r.content == b"\xfc" assert r.text == u"ü" @@ -204,12 +204,12 @@ class TestMessageText: assert r.text == u"ü" def test_guess_json(self): - r = tresp(content=b'"\xc3\xbc"') + r = tutils.tresp(content=b'"\xc3\xbc"') r.headers["content-type"] = "application/json" assert r.text == u'"ü"' def test_none(self): - r = tresp(content=None) + r = tutils.tresp(content=None) assert r.text is None r.text = u"foo" assert r.text is not None @@ -217,7 +217,7 @@ class TestMessageText: assert r.text is None def test_modify(self): - r = tresp() + r = tutils.tresp() r.text = u"ü" assert r.raw_content == b"\xfc" @@ -228,7 +228,7 @@ class TestMessageText: assert r.headers["content-length"] == "2" def test_unknown_ce(self): - r = tresp() + r = tutils.tresp() r.headers["content-type"] = "text/html; charset=wtf" r.raw_content = b"foo" with tutils.raises(ValueError): @@ -236,7 +236,7 @@ class TestMessageText: assert r.get_text(strict=False) == u"foo" def test_cannot_decode(self): - r = tresp() + r = tutils.tresp() r.headers["content-type"] = "text/html; charset=utf8" r.raw_content = b"\xFF" with tutils.raises(ValueError): @@ -245,7 +245,7 @@ class TestMessageText: assert r.get_text(strict=False) == '\udcff' def test_cannot_encode(self): - r = tresp() + r = tutils.tresp() r.content = None assert "content-type" not in r.headers assert r.raw_content is None diff --git a/test/netlib/http/test_request.py b/test/netlib/http/test_request.py index 336dc86d..ecfc1ba6 100644 --- a/test/netlib/http/test_request.py +++ b/test/netlib/http/test_request.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- from netlib.http import Headers -from netlib.tutils import treq, raises +from mitmproxy.test.tutils import treq, raises from .test_message import _test_decoded_attr, _test_passthrough_attr diff --git a/test/netlib/http/test_response.py b/test/netlib/http/test_response.py index 725f2b33..4a6fac62 100644 --- a/test/netlib/http/test_response.py +++ b/test/netlib/http/test_response.py @@ -5,7 +5,7 @@ import time from netlib.http import Headers from netlib.http import Response from netlib.http.cookies import CookieAttrs -from netlib.tutils import raises, tresp +from mitmproxy.test.tutils import raises, tresp from .test_message import _test_passthrough_attr, _test_decoded_attr diff --git a/test/netlib/http/test_url.py b/test/netlib/http/test_url.py index 631ed8a9..7cea6c58 100644 --- a/test/netlib/http/test_url.py +++ b/test/netlib/http/test_url.py @@ -1,4 +1,4 @@ -from netlib import tutils +from mitmproxy.test import tutils from netlib.http import url diff --git a/test/netlib/test_socks.py b/test/netlib/test_socks.py index 17e08054..0603f34b 100644 --- a/test/netlib/test_socks.py +++ b/test/netlib/test_socks.py @@ -1,6 +1,8 @@ import ipaddress from io import BytesIO -from netlib import socks, tcp, tutils +from netlib import socks +from netlib import tcp +from mitmproxy.test import tutils def test_client_greeting(): diff --git a/test/netlib/test_tcp.py b/test/netlib/test_tcp.py index d61e1d91..594ee21c 100644 --- a/test/netlib/test_tcp.py +++ b/test/netlib/test_tcp.py @@ -11,7 +11,7 @@ from OpenSSL import SSL from mitmproxy import certs from netlib import tcp -from netlib import tutils +from mitmproxy.test import tutils from mitmproxy import exceptions from . import tservers diff --git a/test/netlib/tservers.py b/test/netlib/tservers.py index b344e25c..2fae8ba6 100644 --- a/test/netlib/tservers.py +++ b/test/netlib/tservers.py @@ -4,7 +4,7 @@ import io import OpenSSL from netlib import tcp -from netlib import tutils +from mitmproxy.test import tutils class _ServerThread(threading.Thread): diff --git a/test/netlib/websockets/test_frame.py b/test/netlib/websockets/test_frame.py index a039dcb1..3b7c9ed4 100644 --- a/test/netlib/websockets/test_frame.py +++ b/test/netlib/websockets/test_frame.py @@ -3,7 +3,7 @@ import codecs import pytest from netlib import websockets -from netlib import tutils +from mitmproxy.test import tutils class TestFrameHeader: diff --git a/test/pathod/test_pathoc.py b/test/pathod/test_pathoc.py index d26eb15d..98dc9825 100644 --- a/test/pathod/test_pathoc.py +++ b/test/pathod/test_pathoc.py @@ -4,7 +4,7 @@ from mock import Mock from netlib import http from netlib import tcp from netlib.http import http1 -from netlib.tutils import raises +from mitmproxy.test.tutils import raises from mitmproxy import exceptions from pathod import pathoc, language diff --git a/test/pathod/test_protocols_http2.py b/test/pathod/test_protocols_http2.py index bb69bd10..a7303115 100644 --- a/test/pathod/test_protocols_http2.py +++ b/test/pathod/test_protocols_http2.py @@ -3,7 +3,7 @@ import codecs import hyperframe from netlib import tcp, http -from netlib.tutils import raises +from mitmproxy.test.tutils import raises from netlib.http import http2 from mitmproxy import exceptions diff --git a/test/pathod/tutils.py b/test/pathod/tutils.py index 171d97a4..16dec187 100644 --- a/test/pathod/tutils.py +++ b/test/pathod/tutils.py @@ -8,7 +8,7 @@ import urllib from mitmproxy.utils import data from netlib import tcp -from netlib import tutils +from mitmproxy.test import tutils from pathod import language from pathod import pathoc -- cgit v1.2.3 From 8430f857b504a3e7406dc36e54dc32783569d0dd Mon Sep 17 00:00:00 2001 From: Aldo Cortesi Date: Thu, 20 Oct 2016 11:56:38 +1300 Subject: The final piece: netlib -> mitproxy.net --- .appveyor.yml | 2 +- .travis.yml | 2 +- MANIFEST.in | 3 +- README.rst | 7 +- docs/conf.py | 5 +- docs/dev/testing.rst | 2 +- docs/modd.conf | 2 +- examples/har_dump.py | 2 +- mitmproxy/addons/stickycookie.py | 2 +- mitmproxy/addons/streambodies.py | 2 +- mitmproxy/addons/wsgiapp.py | 2 +- mitmproxy/certs.py | 2 +- mitmproxy/connections.py | 2 +- mitmproxy/contentviews.py | 4 +- mitmproxy/exceptions.py | 2 +- mitmproxy/export.py | 6 +- mitmproxy/http.py | 12 +- mitmproxy/master.py | 4 +- mitmproxy/net/__init__.py | 0 mitmproxy/net/check.py | 22 + mitmproxy/net/http/__init__.py | 15 + mitmproxy/net/http/authentication.py | 176 ++++ mitmproxy/net/http/cookies.py | 384 ++++++++ mitmproxy/net/http/encoding.py | 175 ++++ mitmproxy/net/http/headers.py | 221 +++++ mitmproxy/net/http/http1/__init__.py | 24 + mitmproxy/net/http/http1/assemble.py | 100 +++ mitmproxy/net/http/http1/read.py | 377 ++++++++ mitmproxy/net/http/http2/__init__.py | 8 + mitmproxy/net/http/http2/framereader.py | 25 + mitmproxy/net/http/http2/utils.py | 37 + mitmproxy/net/http/message.py | 300 +++++++ mitmproxy/net/http/multipart.py | 32 + mitmproxy/net/http/request.py | 405 +++++++++ mitmproxy/net/http/response.py | 192 ++++ mitmproxy/net/http/status_codes.py | 104 +++ mitmproxy/net/http/url.py | 127 +++ mitmproxy/net/http/user_agents.py | 50 ++ mitmproxy/net/socks.py | 234 +++++ mitmproxy/net/tcp.py | 989 +++++++++++++++++++++ mitmproxy/net/websockets/__init__.py | 35 + mitmproxy/net/websockets/frame.py | 274 ++++++ mitmproxy/net/websockets/masker.py | 25 + mitmproxy/net/websockets/utils.py | 90 ++ mitmproxy/net/wsgi.py | 166 ++++ mitmproxy/proxy/config.py | 6 +- mitmproxy/proxy/modes/socks_proxy.py | 2 +- mitmproxy/proxy/protocol/http.py | 10 +- mitmproxy/proxy/protocol/http1.py | 2 +- mitmproxy/proxy/protocol/http2.py | 12 +- mitmproxy/proxy/protocol/http_replay.py | 2 +- mitmproxy/proxy/protocol/rawtcp.py | 4 +- mitmproxy/proxy/protocol/tls.py | 2 +- mitmproxy/proxy/protocol/websockets.py | 4 +- mitmproxy/proxy/server.py | 4 +- mitmproxy/test/tutils.py | 10 +- mitmproxy/tools/cmdline.py | 2 +- mitmproxy/tools/console/common.py | 6 +- mitmproxy/tools/console/flowlist.py | 4 +- mitmproxy/tools/console/flowview.py | 4 +- mitmproxy/tools/console/grideditor/col_subgrid.py | 2 +- mitmproxy/tools/console/grideditor/editors.py | 2 +- mitmproxy/tools/console/master.py | 2 +- mitmproxy/tools/console/statusbar.py | 4 +- mitmproxy/tools/dump.py | 2 +- mitmproxy/tools/web/master.py | 2 +- mitmproxy/utils/debug.py | 2 +- mitmproxy/utils/version_check.py | 5 +- netlib/__init__.py | 0 netlib/check.py | 22 - netlib/http/__init__.py | 15 - netlib/http/authentication.py | 176 ---- netlib/http/cookies.py | 384 -------- netlib/http/encoding.py | 175 ---- netlib/http/headers.py | 221 ----- netlib/http/http1/__init__.py | 24 - netlib/http/http1/assemble.py | 100 --- netlib/http/http1/read.py | 377 -------- netlib/http/http2/__init__.py | 8 - netlib/http/http2/framereader.py | 25 - netlib/http/http2/utils.py | 37 - netlib/http/message.py | 300 ------- netlib/http/multipart.py | 32 - netlib/http/request.py | 405 --------- netlib/http/response.py | 192 ---- netlib/http/status_codes.py | 104 --- netlib/http/url.py | 127 --- netlib/http/user_agents.py | 50 -- netlib/socks.py | 234 ----- netlib/tcp.py | 989 --------------------- netlib/websockets/__init__.py | 35 - netlib/websockets/frame.py | 274 ------ netlib/websockets/masker.py | 25 - netlib/websockets/utils.py | 90 -- netlib/wsgi.py | 166 ---- pathod/language/http.py | 12 +- pathod/language/http2.py | 4 +- pathod/language/websockets.py | 18 +- pathod/pathoc.py | 8 +- pathod/pathoc_cmdline.py | 4 +- pathod/pathod.py | 4 +- pathod/pathod_cmdline.py | 2 +- pathod/protocols/http.py | 2 +- pathod/protocols/http2.py | 22 +- pathod/protocols/websockets.py | 2 +- release/README.mkd | 2 +- release/rtool.py | 2 +- setup.py | 1 - test/mitmproxy/net/__init__.py | 0 test/mitmproxy/net/data/clientcert/.gitignore | 3 + test/mitmproxy/net/data/clientcert/client.cnf | 5 + test/mitmproxy/net/data/clientcert/client.pem | 42 + test/mitmproxy/net/data/clientcert/make | 8 + test/mitmproxy/net/data/dercert | Bin 0 -> 1838 bytes test/mitmproxy/net/data/dhparam.pem | 13 + test/mitmproxy/net/data/htpasswd | 1 + test/mitmproxy/net/data/server.crt | 14 + test/mitmproxy/net/data/server.key | 15 + test/mitmproxy/net/data/text_cert | 145 +++ test/mitmproxy/net/data/text_cert_2 | 39 + test/mitmproxy/net/data/text_cert_weird1 | 31 + .../net/data/verificationcerts/9da13359.0 | 21 + .../net/data/verificationcerts/generate.py | 66 ++ .../net/data/verificationcerts/self-signed.crt | 19 + .../net/data/verificationcerts/self-signed.key | 27 + .../net/data/verificationcerts/trusted-leaf.crt | 18 + .../net/data/verificationcerts/trusted-leaf.key | 27 + .../net/data/verificationcerts/trusted-root.crt | 21 + .../net/data/verificationcerts/trusted-root.key | 27 + .../net/data/verificationcerts/trusted-root.srl | 1 + test/mitmproxy/net/http/__init__.py | 0 test/mitmproxy/net/http/http1/__init__.py | 0 test/mitmproxy/net/http/http1/test_assemble.py | 101 +++ test/mitmproxy/net/http/http1/test_read.py | 371 ++++++++ test/mitmproxy/net/http/http2/__init__.py | 0 test/mitmproxy/net/http/http2/test_framereader.py | 1 + test/mitmproxy/net/http/test_authentication.py | 122 +++ test/mitmproxy/net/http/test_cookies.py | 365 ++++++++ test/mitmproxy/net/http/test_encoding.py | 73 ++ test/mitmproxy/net/http/test_headers.py | 106 +++ test/mitmproxy/net/http/test_message.py | 271 ++++++ test/mitmproxy/net/http/test_multipart.py | 24 + test/mitmproxy/net/http/test_request.py | 271 ++++++ test/mitmproxy/net/http/test_response.py | 145 +++ test/mitmproxy/net/http/test_status_codes.py | 6 + test/mitmproxy/net/http/test_url.py | 102 +++ test/mitmproxy/net/http/test_user_agents.py | 6 + test/mitmproxy/net/test_check.py | 10 + test/mitmproxy/net/test_imports.py | 1 + test/mitmproxy/net/test_socks.py | 189 ++++ test/mitmproxy/net/test_tcp.py | 802 +++++++++++++++++ test/mitmproxy/net/test_wsgi.py | 106 +++ test/mitmproxy/net/tools/getcertnames | 27 + test/mitmproxy/net/tservers.py | 113 +++ test/mitmproxy/net/websockets/__init__.py | 0 test/mitmproxy/net/websockets/test_frame.py | 164 ++++ test/mitmproxy/net/websockets/test_masker.py | 23 + test/mitmproxy/net/websockets/test_utils.py | 105 +++ test/mitmproxy/protocol/test_http1.py | 4 +- test/mitmproxy/protocol/test_http2.py | 16 +- test/mitmproxy/protocol/test_websockets.py | 14 +- test/mitmproxy/test_contentview.py | 4 +- test/mitmproxy/test_custom_contentview.py | 2 +- test/mitmproxy/test_examples.py | 4 +- test/mitmproxy/test_flow.py | 2 +- test/mitmproxy/test_flow_export.py | 2 +- test/mitmproxy/test_proxy.py | 2 +- test/mitmproxy/test_server.py | 18 +- test/netlib/__init__.py | 0 test/netlib/data/clientcert/.gitignore | 3 - test/netlib/data/clientcert/client.cnf | 5 - test/netlib/data/clientcert/client.pem | 42 - test/netlib/data/clientcert/make | 8 - test/netlib/data/dercert | Bin 1838 -> 0 bytes test/netlib/data/dhparam.pem | 13 - test/netlib/data/htpasswd | 1 - test/netlib/data/server.crt | 14 - test/netlib/data/server.key | 15 - test/netlib/data/text_cert | 145 --- test/netlib/data/text_cert_2 | 39 - test/netlib/data/text_cert_weird1 | 31 - test/netlib/data/verificationcerts/9da13359.0 | 21 - test/netlib/data/verificationcerts/generate.py | 66 -- test/netlib/data/verificationcerts/self-signed.crt | 19 - test/netlib/data/verificationcerts/self-signed.key | 27 - .../netlib/data/verificationcerts/trusted-leaf.crt | 18 - .../netlib/data/verificationcerts/trusted-leaf.key | 27 - .../netlib/data/verificationcerts/trusted-root.crt | 21 - .../netlib/data/verificationcerts/trusted-root.key | 27 - .../netlib/data/verificationcerts/trusted-root.srl | 1 - test/netlib/http/__init__.py | 0 test/netlib/http/http1/__init__.py | 0 test/netlib/http/http1/test_assemble.py | 101 --- test/netlib/http/http1/test_read.py | 371 -------- test/netlib/http/http2/__init__.py | 0 test/netlib/http/http2/test_framereader.py | 1 - test/netlib/http/test_authentication.py | 122 --- test/netlib/http/test_cookies.py | 365 -------- test/netlib/http/test_encoding.py | 73 -- test/netlib/http/test_headers.py | 106 --- test/netlib/http/test_message.py | 271 ------ test/netlib/http/test_multipart.py | 24 - test/netlib/http/test_request.py | 271 ------ test/netlib/http/test_response.py | 145 --- test/netlib/http/test_status_codes.py | 6 - test/netlib/http/test_url.py | 102 --- test/netlib/http/test_user_agents.py | 6 - test/netlib/test_check.py | 10 - test/netlib/test_imports.py | 1 - test/netlib/test_socks.py | 189 ---- test/netlib/test_tcp.py | 802 ----------------- test/netlib/test_wsgi.py | 106 --- test/netlib/tools/getcertnames | 27 - test/netlib/tservers.py | 113 --- test/netlib/websockets/__init__.py | 0 test/netlib/websockets/test_frame.py | 164 ---- test/netlib/websockets/test_masker.py | 23 - test/netlib/websockets/test_utils.py | 105 --- test/pathod/test_language_http2.py | 4 +- test/pathod/test_language_websocket.py | 12 +- test/pathod/test_pathoc.py | 6 +- test/pathod/test_pathod.py | 2 +- test/pathod/test_protocols_http2.py | 26 +- test/pathod/tutils.py | 2 +- tox.ini | 2 +- 225 files changed, 8812 insertions(+), 8819 deletions(-) create mode 100644 mitmproxy/net/__init__.py create mode 100644 mitmproxy/net/check.py create mode 100644 mitmproxy/net/http/__init__.py create mode 100644 mitmproxy/net/http/authentication.py create mode 100644 mitmproxy/net/http/cookies.py create mode 100644 mitmproxy/net/http/encoding.py create mode 100644 mitmproxy/net/http/headers.py create mode 100644 mitmproxy/net/http/http1/__init__.py create mode 100644 mitmproxy/net/http/http1/assemble.py create mode 100644 mitmproxy/net/http/http1/read.py create mode 100644 mitmproxy/net/http/http2/__init__.py create mode 100644 mitmproxy/net/http/http2/framereader.py create mode 100644 mitmproxy/net/http/http2/utils.py create mode 100644 mitmproxy/net/http/message.py create mode 100644 mitmproxy/net/http/multipart.py create mode 100644 mitmproxy/net/http/request.py create mode 100644 mitmproxy/net/http/response.py create mode 100644 mitmproxy/net/http/status_codes.py create mode 100644 mitmproxy/net/http/url.py create mode 100644 mitmproxy/net/http/user_agents.py create mode 100644 mitmproxy/net/socks.py create mode 100644 mitmproxy/net/tcp.py create mode 100644 mitmproxy/net/websockets/__init__.py create mode 100644 mitmproxy/net/websockets/frame.py create mode 100644 mitmproxy/net/websockets/masker.py create mode 100644 mitmproxy/net/websockets/utils.py create mode 100644 mitmproxy/net/wsgi.py delete mode 100644 netlib/__init__.py delete mode 100644 netlib/check.py delete mode 100644 netlib/http/__init__.py delete mode 100644 netlib/http/authentication.py delete mode 100644 netlib/http/cookies.py delete mode 100644 netlib/http/encoding.py delete mode 100644 netlib/http/headers.py delete mode 100644 netlib/http/http1/__init__.py delete mode 100644 netlib/http/http1/assemble.py delete mode 100644 netlib/http/http1/read.py delete mode 100644 netlib/http/http2/__init__.py delete mode 100644 netlib/http/http2/framereader.py delete mode 100644 netlib/http/http2/utils.py delete mode 100644 netlib/http/message.py delete mode 100644 netlib/http/multipart.py delete mode 100644 netlib/http/request.py delete mode 100644 netlib/http/response.py delete mode 100644 netlib/http/status_codes.py delete mode 100644 netlib/http/url.py delete mode 100644 netlib/http/user_agents.py delete mode 100644 netlib/socks.py delete mode 100644 netlib/tcp.py delete mode 100644 netlib/websockets/__init__.py delete mode 100644 netlib/websockets/frame.py delete mode 100644 netlib/websockets/masker.py delete mode 100644 netlib/websockets/utils.py delete mode 100644 netlib/wsgi.py create mode 100644 test/mitmproxy/net/__init__.py create mode 100644 test/mitmproxy/net/data/clientcert/.gitignore create mode 100644 test/mitmproxy/net/data/clientcert/client.cnf create mode 100644 test/mitmproxy/net/data/clientcert/client.pem create mode 100644 test/mitmproxy/net/data/clientcert/make create mode 100644 test/mitmproxy/net/data/dercert create mode 100644 test/mitmproxy/net/data/dhparam.pem create mode 100644 test/mitmproxy/net/data/htpasswd create mode 100644 test/mitmproxy/net/data/server.crt create mode 100644 test/mitmproxy/net/data/server.key create mode 100644 test/mitmproxy/net/data/text_cert create mode 100644 test/mitmproxy/net/data/text_cert_2 create mode 100644 test/mitmproxy/net/data/text_cert_weird1 create mode 100644 test/mitmproxy/net/data/verificationcerts/9da13359.0 create mode 100644 test/mitmproxy/net/data/verificationcerts/generate.py create mode 100644 test/mitmproxy/net/data/verificationcerts/self-signed.crt create mode 100644 test/mitmproxy/net/data/verificationcerts/self-signed.key create mode 100644 test/mitmproxy/net/data/verificationcerts/trusted-leaf.crt create mode 100644 test/mitmproxy/net/data/verificationcerts/trusted-leaf.key create mode 100644 test/mitmproxy/net/data/verificationcerts/trusted-root.crt create mode 100644 test/mitmproxy/net/data/verificationcerts/trusted-root.key create mode 100644 test/mitmproxy/net/data/verificationcerts/trusted-root.srl create mode 100644 test/mitmproxy/net/http/__init__.py create mode 100644 test/mitmproxy/net/http/http1/__init__.py create mode 100644 test/mitmproxy/net/http/http1/test_assemble.py create mode 100644 test/mitmproxy/net/http/http1/test_read.py create mode 100644 test/mitmproxy/net/http/http2/__init__.py create mode 100644 test/mitmproxy/net/http/http2/test_framereader.py create mode 100644 test/mitmproxy/net/http/test_authentication.py create mode 100644 test/mitmproxy/net/http/test_cookies.py create mode 100644 test/mitmproxy/net/http/test_encoding.py create mode 100644 test/mitmproxy/net/http/test_headers.py create mode 100644 test/mitmproxy/net/http/test_message.py create mode 100644 test/mitmproxy/net/http/test_multipart.py create mode 100644 test/mitmproxy/net/http/test_request.py create mode 100644 test/mitmproxy/net/http/test_response.py create mode 100644 test/mitmproxy/net/http/test_status_codes.py create mode 100644 test/mitmproxy/net/http/test_url.py create mode 100644 test/mitmproxy/net/http/test_user_agents.py create mode 100644 test/mitmproxy/net/test_check.py create mode 100644 test/mitmproxy/net/test_imports.py create mode 100644 test/mitmproxy/net/test_socks.py create mode 100644 test/mitmproxy/net/test_tcp.py create mode 100644 test/mitmproxy/net/test_wsgi.py create mode 100644 test/mitmproxy/net/tools/getcertnames create mode 100644 test/mitmproxy/net/tservers.py create mode 100644 test/mitmproxy/net/websockets/__init__.py create mode 100644 test/mitmproxy/net/websockets/test_frame.py create mode 100644 test/mitmproxy/net/websockets/test_masker.py create mode 100644 test/mitmproxy/net/websockets/test_utils.py delete mode 100644 test/netlib/__init__.py delete mode 100644 test/netlib/data/clientcert/.gitignore delete mode 100644 test/netlib/data/clientcert/client.cnf delete mode 100644 test/netlib/data/clientcert/client.pem delete mode 100644 test/netlib/data/clientcert/make delete mode 100644 test/netlib/data/dercert delete mode 100644 test/netlib/data/dhparam.pem delete mode 100644 test/netlib/data/htpasswd delete mode 100644 test/netlib/data/server.crt delete mode 100644 test/netlib/data/server.key delete mode 100644 test/netlib/data/text_cert delete mode 100644 test/netlib/data/text_cert_2 delete mode 100644 test/netlib/data/text_cert_weird1 delete mode 100644 test/netlib/data/verificationcerts/9da13359.0 delete mode 100644 test/netlib/data/verificationcerts/generate.py delete mode 100644 test/netlib/data/verificationcerts/self-signed.crt delete mode 100644 test/netlib/data/verificationcerts/self-signed.key delete mode 100644 test/netlib/data/verificationcerts/trusted-leaf.crt delete mode 100644 test/netlib/data/verificationcerts/trusted-leaf.key delete mode 100644 test/netlib/data/verificationcerts/trusted-root.crt delete mode 100644 test/netlib/data/verificationcerts/trusted-root.key delete mode 100644 test/netlib/data/verificationcerts/trusted-root.srl delete mode 100644 test/netlib/http/__init__.py delete mode 100644 test/netlib/http/http1/__init__.py delete mode 100644 test/netlib/http/http1/test_assemble.py delete mode 100644 test/netlib/http/http1/test_read.py delete mode 100644 test/netlib/http/http2/__init__.py delete mode 100644 test/netlib/http/http2/test_framereader.py delete mode 100644 test/netlib/http/test_authentication.py delete mode 100644 test/netlib/http/test_cookies.py delete mode 100644 test/netlib/http/test_encoding.py delete mode 100644 test/netlib/http/test_headers.py delete mode 100644 test/netlib/http/test_message.py delete mode 100644 test/netlib/http/test_multipart.py delete mode 100644 test/netlib/http/test_request.py delete mode 100644 test/netlib/http/test_response.py delete mode 100644 test/netlib/http/test_status_codes.py delete mode 100644 test/netlib/http/test_url.py delete mode 100644 test/netlib/http/test_user_agents.py delete mode 100644 test/netlib/test_check.py delete mode 100644 test/netlib/test_imports.py delete mode 100644 test/netlib/test_socks.py delete mode 100644 test/netlib/test_tcp.py delete mode 100644 test/netlib/test_wsgi.py delete mode 100644 test/netlib/tools/getcertnames delete mode 100644 test/netlib/tservers.py delete mode 100644 test/netlib/websockets/__init__.py delete mode 100644 test/netlib/websockets/test_frame.py delete mode 100644 test/netlib/websockets/test_masker.py delete mode 100644 test/netlib/websockets/test_utils.py diff --git a/.appveyor.yml b/.appveyor.yml index 38868628..746e0fd1 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -23,7 +23,7 @@ install: - "pip install -U tox" test_script: - - ps: "tox -- --cov netlib --cov mitmproxy --cov pathod -v" + - ps: "tox -- --cov mitmproxy --cov pathod -v" deploy_script: ps: | diff --git a/.travis.yml b/.travis.yml index d160a96e..d7cb7027 100644 --- a/.travis.yml +++ b/.travis.yml @@ -49,7 +49,7 @@ install: fi - pip install tox -script: tox -- --cov netlib --cov mitmproxy --cov pathod -v +script: tox -- --cov mitmproxy --cov pathod -v after_success: - | diff --git a/MANIFEST.in b/MANIFEST.in index 440a14ed..404936e8 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,3 @@ graft mitmproxy graft pathod -graft netlib -recursive-exclude * *.pyc *.pyo *.swo *.swp *.map \ No newline at end of file +recursive-exclude * *.pyc *.pyo *.swo *.swp *.map diff --git a/README.rst b/README.rst index 146a2c43..cdeada2d 100644 --- a/README.rst +++ b/README.rst @@ -3,8 +3,7 @@ mitmproxy |travis| |appveyor| |coverage| |latest_release| |python_versions| -This repository contains the **mitmproxy** and **pathod** projects, as well as -their shared networking library, **netlib**. +This repository contains the **mitmproxy** and **pathod** projects. ``mitmproxy`` is an interactive, SSL-capable intercepting proxy with a console interface. @@ -64,7 +63,7 @@ virtualenv_ installed (you can find installation instructions for virtualenv The *dev* script will create a virtualenv environment in a directory called "venv", and install all mandatory and optional dependencies into it. The -primary mitmproxy components - mitmproxy, netlib and pathod - are installed as +primary mitmproxy components - mitmproxy and pathod - are installed as "editable", so any changes to the source in the repository will be reflected live in the virtualenv. @@ -144,7 +143,7 @@ PR checks will fail and block merging. We are using this command to check for st .. code-block:: text - flake8 --jobs 8 --count mitmproxy netlib pathod examples test + flake8 --jobs 8 --count mitmproxy pathod examples test .. |mitmproxy_site| image:: https://shields.mitmproxy.org/api/https%3A%2F%2F-mitmproxy.org-blue.svg diff --git a/docs/conf.py b/docs/conf.py index ce942aa9..d7793b79 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -231,10 +231,7 @@ def linkcode_resolve(domain, info): _, line = inspect.getsourcelines(obj) except (TypeError, IOError): return None - if spath.rfind("netlib") > -1: - off = spath.rfind("netlib") - mpath = spath[off:] - elif spath.rfind("mitmproxy") > -1: + if spath.rfind("mitmproxy") > -1: off = spath.rfind("mitmproxy") mpath = spath[off:] else: diff --git a/docs/dev/testing.rst b/docs/dev/testing.rst index 315b9bc9..b86efce1 100644 --- a/docs/dev/testing.rst +++ b/docs/dev/testing.rst @@ -10,7 +10,7 @@ suitable extension to the test suite. Our tests are written for the `py.test`_ or nose_ test frameworks. At the point where you send your pull request, a command like this: ->>> py.test --cov mitmproxy --cov netlib +>>> py.test --cov mitmproxy Should give output something like this: diff --git a/docs/modd.conf b/docs/modd.conf index bc91238c..0e8f0dce 100644 --- a/docs/modd.conf +++ b/docs/modd.conf @@ -1,6 +1,6 @@ @build = ./_build -** !_build/** ../netlib/**/*.py ../mitmproxy/**/*.py { +** !_build/** ../mitmproxy/**/*.py { prep: sphinx-build -W -d @build/doctrees -b html . @build/html daemon: devd -m @build/html } diff --git a/examples/har_dump.py b/examples/har_dump.py index d01e6cdd..560b9adc 100644 --- a/examples/har_dump.py +++ b/examples/har_dump.py @@ -16,7 +16,7 @@ import mitmproxy from mitmproxy import version from mitmproxy.utils import strutils -from netlib.http import cookies +from mitmproxy.net.http import cookies HAR = {} diff --git a/mitmproxy/addons/stickycookie.py b/mitmproxy/addons/stickycookie.py index d89bd92d..27d78646 100644 --- a/mitmproxy/addons/stickycookie.py +++ b/mitmproxy/addons/stickycookie.py @@ -1,7 +1,7 @@ import collections from http import cookiejar -from netlib.http import cookies +from mitmproxy.net.http import cookies from mitmproxy import exceptions from mitmproxy import flowfilter diff --git a/mitmproxy/addons/streambodies.py b/mitmproxy/addons/streambodies.py index bd8958b0..88cb74f3 100644 --- a/mitmproxy/addons/streambodies.py +++ b/mitmproxy/addons/streambodies.py @@ -1,4 +1,4 @@ -from netlib.http import http1 +from mitmproxy.net.http import http1 from mitmproxy import exceptions from mitmproxy import ctx diff --git a/mitmproxy/addons/wsgiapp.py b/mitmproxy/addons/wsgiapp.py index 0879a87b..85d09a0a 100644 --- a/mitmproxy/addons/wsgiapp.py +++ b/mitmproxy/addons/wsgiapp.py @@ -1,7 +1,7 @@ from mitmproxy import ctx from mitmproxy import exceptions -from netlib import wsgi +from mitmproxy.net import wsgi from mitmproxy import version diff --git a/mitmproxy/certs.py b/mitmproxy/certs.py index 9cb8a40e..83db4e6e 100644 --- a/mitmproxy/certs.py +++ b/mitmproxy/certs.py @@ -193,7 +193,7 @@ class CertStore: @staticmethod def load_dhparam(path): - # netlib<=0.10 doesn't generate a dhparam file. + # mitmproxy<=0.10 doesn't generate a dhparam file. # Create it now if neccessary. if not os.path.exists(path): with open(path, "wb") as f: diff --git a/mitmproxy/connections.py b/mitmproxy/connections.py index 6b39ac20..015d0689 100644 --- a/mitmproxy/connections.py +++ b/mitmproxy/connections.py @@ -5,7 +5,7 @@ import os from mitmproxy import stateobject from mitmproxy import certs -from netlib import tcp +from mitmproxy.net import tcp class ClientConnection(tcp.BaseHandler, stateobject.StateObject): diff --git a/mitmproxy/contentviews.py b/mitmproxy/contentviews.py index a171f36b..b7f15d8d 100644 --- a/mitmproxy/contentviews.py +++ b/mitmproxy/contentviews.py @@ -33,10 +33,10 @@ from PIL import ExifTags from PIL import Image from mitmproxy import exceptions from mitmproxy.contrib.wbxml import ASCommandResponse -from netlib import http +from mitmproxy.net import http from mitmproxy.types import multidict from mitmproxy.utils import strutils -from netlib.http import url +from mitmproxy.net.http import url try: import pyamf diff --git a/mitmproxy/exceptions.py b/mitmproxy/exceptions.py index a7ecf17f..309b8189 100644 --- a/mitmproxy/exceptions.py +++ b/mitmproxy/exceptions.py @@ -112,7 +112,7 @@ class AddonHalt(MitmproxyException): class NetlibException(MitmproxyException): """ - Base class for all exceptions thrown by netlib. + Base class for all exceptions thrown by mitmproxy.net. """ def __init__(self, message=None): super().__init__(message) diff --git a/mitmproxy/export.py b/mitmproxy/export.py index e5f4d34a..d9a88849 100644 --- a/mitmproxy/export.py +++ b/mitmproxy/export.py @@ -3,7 +3,7 @@ import re import textwrap import urllib -import netlib.http +import mitmproxy.net.http def _native(s): @@ -89,9 +89,9 @@ def python_code(flow): return code -def is_json(headers: netlib.http.Headers, content: bytes) -> bool: +def is_json(headers: mitmproxy.net.http.Headers, content: bytes) -> bool: if headers: - ct = netlib.http.parse_content_type(headers.get("content-type", "")) + ct = mitmproxy.net.http.parse_content_type(headers.get("content-type", "")) if ct and "%s/%s" % (ct[0], ct[1]) == "application/json": try: return json.loads(content.decode("utf8", "surrogateescape")) diff --git a/mitmproxy/http.py b/mitmproxy/http.py index 4474ca3b..99e126fe 100644 --- a/mitmproxy/http.py +++ b/mitmproxy/http.py @@ -1,9 +1,9 @@ import cgi from mitmproxy import flow -from netlib import http +from mitmproxy.net import http from mitmproxy import version -from netlib import tcp +from mitmproxy.net import tcp class HTTPRequest(http.Request): @@ -12,7 +12,7 @@ class HTTPRequest(http.Request): A mitmproxy HTTP request. """ - # This is a very thin wrapper on top of :py:class:`netlib.http.Request` and + # This is a very thin wrapper on top of :py:class:`mitmproxy.net.http.Request` and # may be removed in the future. def __init__( @@ -73,7 +73,7 @@ class HTTPRequest(http.Request): @classmethod def wrap(self, request): """ - Wraps an existing :py:class:`netlib.http.Request`. + Wraps an existing :py:class:`mitmproxy.net.http.Request`. """ req = HTTPRequest( first_line_format=request.data.first_line_format, @@ -99,7 +99,7 @@ class HTTPResponse(http.Response): """ A mitmproxy HTTP response. """ - # This is a very thin wrapper on top of :py:class:`netlib.http.Response` and + # This is a very thin wrapper on top of :py:class:`mitmproxy.net.http.Response` and # may be removed in the future. def __init__( @@ -131,7 +131,7 @@ class HTTPResponse(http.Response): @classmethod def wrap(self, response): """ - Wraps an existing :py:class:`netlib.http.Response`. + Wraps an existing :py:class:`mitmproxy.net.http.Response`. """ resp = HTTPResponse( http_version=response.data.http_version, diff --git a/mitmproxy/master.py b/mitmproxy/master.py index 2e57e57d..31ce17a3 100644 --- a/mitmproxy/master.py +++ b/mitmproxy/master.py @@ -15,7 +15,7 @@ from mitmproxy import log from mitmproxy import io from mitmproxy.proxy.protocol import http_replay from mitmproxy.types import basethread -import netlib.http +import mitmproxy.net.http from . import ctx as mitmproxy_ctx @@ -122,7 +122,7 @@ class Master: s = connections.ServerConnection.make_dummy((host, port)) f = http.HTTPFlow(c, s) - headers = netlib.http.Headers() + headers = mitmproxy.net.http.Headers() req = http.HTTPRequest( "absolute", diff --git a/mitmproxy/net/__init__.py b/mitmproxy/net/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/mitmproxy/net/check.py b/mitmproxy/net/check.py new file mode 100644 index 00000000..7b007cb5 --- /dev/null +++ b/mitmproxy/net/check.py @@ -0,0 +1,22 @@ +import re + +_label_valid = re.compile(b"(?!-)[A-Z\d-]{1,63}(? bool: + """ + Checks if a hostname is valid. + """ + try: + host.decode("idna") + except ValueError: + return False + if len(host) > 255: + return False + if host and host[-1:] == b".": + host = host[:-1] + return all(_label_valid.match(x) for x in host.split(b".")) + + +def is_valid_port(port): + return 0 <= port <= 65535 diff --git a/mitmproxy/net/http/__init__.py b/mitmproxy/net/http/__init__.py new file mode 100644 index 00000000..7e290bdc --- /dev/null +++ b/mitmproxy/net/http/__init__.py @@ -0,0 +1,15 @@ +from mitmproxy.net.http.request import Request +from mitmproxy.net.http.response import Response +from mitmproxy.net.http.message import Message +from mitmproxy.net.http.headers import Headers, parse_content_type +from mitmproxy.net.http.message import decoded +from mitmproxy.net.http import http1, http2, status_codes, multipart + +__all__ = [ + "Request", + "Response", + "Message", + "Headers", "parse_content_type", + "decoded", + "http1", "http2", "status_codes", "multipart", +] diff --git a/mitmproxy/net/http/authentication.py b/mitmproxy/net/http/authentication.py new file mode 100644 index 00000000..a65279e4 --- /dev/null +++ b/mitmproxy/net/http/authentication.py @@ -0,0 +1,176 @@ +import argparse +import binascii + + +def parse_http_basic_auth(s): + words = s.split() + if len(words) != 2: + return None + scheme = words[0] + try: + user = binascii.a2b_base64(words[1]).decode("utf8", "replace") + except binascii.Error: + return None + parts = user.split(':') + if len(parts) != 2: + return None + return scheme, parts[0], parts[1] + + +def assemble_http_basic_auth(scheme, username, password): + v = binascii.b2a_base64((username + ":" + password).encode("utf8")).decode("ascii") + return scheme + " " + v + + +class NullProxyAuth: + + """ + No proxy auth at all (returns empty challange headers) + """ + + def __init__(self, password_manager): + self.password_manager = password_manager + + def clean(self, headers_): + """ + Clean up authentication headers, so they're not passed upstream. + """ + + def authenticate(self, headers_): + """ + Tests that the user is allowed to use the proxy + """ + return True + + def auth_challenge_headers(self): + """ + Returns a dictionary containing the headers require to challenge the user + """ + return {} + + +class BasicAuth(NullProxyAuth): + CHALLENGE_HEADER = None + AUTH_HEADER = None + + def __init__(self, password_manager, realm): + NullProxyAuth.__init__(self, password_manager) + self.realm = realm + + def clean(self, headers): + del headers[self.AUTH_HEADER] + + def authenticate(self, headers): + auth_value = headers.get(self.AUTH_HEADER) + if not auth_value: + return False + parts = parse_http_basic_auth(auth_value) + if not parts: + return False + scheme, username, password = parts + if scheme.lower() != 'basic': + return False + if not self.password_manager.test(username, password): + return False + self.username = username + return True + + def auth_challenge_headers(self): + return {self.CHALLENGE_HEADER: 'Basic realm="%s"' % self.realm} + + +class BasicWebsiteAuth(BasicAuth): + CHALLENGE_HEADER = 'WWW-Authenticate' + AUTH_HEADER = 'Authorization' + + +class BasicProxyAuth(BasicAuth): + CHALLENGE_HEADER = 'Proxy-Authenticate' + AUTH_HEADER = 'Proxy-Authorization' + + +class PassMan: + + def test(self, username_, password_token_): + return False + + +class PassManNonAnon(PassMan): + + """ + Ensure the user specifies a username, accept any password. + """ + + def test(self, username, password_token_): + if username: + return True + return False + + +class PassManHtpasswd(PassMan): + + """ + Read usernames and passwords from an htpasswd file + """ + + def __init__(self, path): + """ + Raises ValueError if htpasswd file is invalid. + """ + import passlib.apache + self.htpasswd = passlib.apache.HtpasswdFile(path) + + def test(self, username, password_token): + return bool(self.htpasswd.check_password(username, password_token)) + + +class PassManSingleUser(PassMan): + + def __init__(self, username, password): + self.username, self.password = username, password + + def test(self, username, password_token): + return self.username == username and self.password == password_token + + +class AuthAction(argparse.Action): + + """ + Helper class to allow seamless integration int argparse. Example usage: + parser.add_argument( + "--nonanonymous", + action=NonanonymousAuthAction, nargs=0, + help="Allow access to any user long as a credentials are specified." + ) + """ + + def __call__(self, parser, namespace, values, option_string=None): + passman = self.getPasswordManager(values) + authenticator = BasicProxyAuth(passman, "mitmproxy") + setattr(namespace, self.dest, authenticator) + + def getPasswordManager(self, s): # pragma: no cover + raise NotImplementedError() + + +class SingleuserAuthAction(AuthAction): + + def getPasswordManager(self, s): + if len(s.split(':')) != 2: + raise argparse.ArgumentTypeError( + "Invalid single-user specification. Please use the format username:password" + ) + username, password = s.split(':') + return PassManSingleUser(username, password) + + +class NonanonymousAuthAction(AuthAction): + + def getPasswordManager(self, s): + return PassManNonAnon() + + +class HtpasswdAuthAction(AuthAction): + + def getPasswordManager(self, s): + return PassManHtpasswd(s) diff --git a/mitmproxy/net/http/cookies.py b/mitmproxy/net/http/cookies.py new file mode 100644 index 00000000..9f32fa5e --- /dev/null +++ b/mitmproxy/net/http/cookies.py @@ -0,0 +1,384 @@ +import collections +import email.utils +import re +import time + +from mitmproxy.types import multidict + +""" +A flexible module for cookie parsing and manipulation. + +This module differs from usual standards-compliant cookie modules in a number +of ways. We try to be as permissive as possible, and to retain even mal-formed +information. Duplicate cookies are preserved in parsing, and can be set in +formatting. We do attempt to escape and quote values where needed, but will not +reject data that violate the specs. + +Parsing accepts the formats in RFC6265 and partially RFC2109 and RFC2965. We +also parse the comma-separated variant of Set-Cookie that allows multiple +cookies to be set in a single header. Serialization follows RFC6265. + + http://tools.ietf.org/html/rfc6265 + http://tools.ietf.org/html/rfc2109 + http://tools.ietf.org/html/rfc2965 +""" + +_cookie_params = set(( + 'expires', 'path', 'comment', 'max-age', + 'secure', 'httponly', 'version', +)) + +ESCAPE = re.compile(r"([\"\\])") + + +class CookieAttrs(multidict.ImmutableMultiDict): + @staticmethod + def _kconv(key): + return key.lower() + + @staticmethod + def _reduce_values(values): + # See the StickyCookieTest for a weird cookie that only makes sense + # if we take the last part. + return values[-1] + +SetCookie = collections.namedtuple("SetCookie", ["value", "attrs"]) + + +def _read_until(s, start, term): + """ + Read until one of the characters in term is reached. + """ + if start == len(s): + return "", start + 1 + for i in range(start, len(s)): + if s[i] in term: + return s[start:i], i + return s[start:i + 1], i + 1 + + +def _read_quoted_string(s, start): + """ + start: offset to the first quote of the string to be read + + A sort of loose super-set of the various quoted string specifications. + + RFC6265 disallows backslashes or double quotes within quoted strings. + Prior RFCs use backslashes to escape. This leaves us free to apply + backslash escaping by default and be compatible with everything. + """ + escaping = False + ret = [] + # Skip the first quote + i = start # initialize in case the loop doesn't run. + for i in range(start + 1, len(s)): + if escaping: + ret.append(s[i]) + escaping = False + elif s[i] == '"': + break + elif s[i] == "\\": + escaping = True + else: + ret.append(s[i]) + return "".join(ret), i + 1 + + +def _read_key(s, start, delims=";="): + """ + Read a key - the LHS of a token/value pair in a cookie. + """ + return _read_until(s, start, delims) + + +def _read_value(s, start, delims): + """ + Reads a value - the RHS of a token/value pair in a cookie. + """ + if start >= len(s): + return "", start + elif s[start] == '"': + return _read_quoted_string(s, start) + else: + return _read_until(s, start, delims) + + +def _read_cookie_pairs(s, off=0): + """ + Read pairs of lhs=rhs values from Cookie headers. + + off: start offset + """ + pairs = [] + + while True: + lhs, off = _read_key(s, off) + lhs = lhs.lstrip() + + if lhs: + rhs = None + if off < len(s) and s[off] == "=": + rhs, off = _read_value(s, off + 1, ";") + + pairs.append([lhs, rhs]) + + off += 1 + + if not off < len(s): + break + + return pairs, off + + +def _read_set_cookie_pairs(s, off=0): + """ + Read pairs of lhs=rhs values from SetCookie headers while handling multiple cookies. + + off: start offset + specials: attributes that are treated specially + """ + cookies = [] + pairs = [] + + while True: + lhs, off = _read_key(s, off, ";=,") + lhs = lhs.lstrip() + + if lhs: + rhs = None + if off < len(s) and s[off] == "=": + rhs, off = _read_value(s, off + 1, ";,") + + # Special handliing of attributes + if lhs.lower() == "expires": + # 'expires' values can contain commas in them so they need to + # be handled separately. + + # We actually bank on the fact that the expires value WILL + # contain a comma. Things will fail, if they don't. + + # '3' is just a heuristic we use to determine whether we've + # only read a part of the expires value and we should read more. + if len(rhs) <= 3: + trail, off = _read_value(s, off + 1, ";,") + rhs = rhs + "," + trail + + pairs.append([lhs, rhs]) + + # comma marks the beginning of a new cookie + if off < len(s) and s[off] == ",": + cookies.append(pairs) + pairs = [] + + off += 1 + + if not off < len(s): + break + + if pairs or not cookies: + cookies.append(pairs) + + return cookies, off + + +def _has_special(s): + for i in s: + if i in '",;\\': + return True + o = ord(i) + if o < 0x21 or o > 0x7e: + return True + return False + + +def _format_pairs(pairs, specials=(), sep="; "): + """ + specials: A lower-cased list of keys that will not be quoted. + """ + vals = [] + for k, v in pairs: + if v is None: + vals.append(k) + else: + if k.lower() not in specials and _has_special(v): + v = ESCAPE.sub(r"\\\1", v) + v = '"%s"' % v + vals.append("%s=%s" % (k, v)) + return sep.join(vals) + + +def _format_set_cookie_pairs(lst): + return _format_pairs( + lst, + specials=("expires", "path") + ) + + +def parse_cookie_header(line): + """ + Parse a Cookie header value. + Returns a list of (lhs, rhs) tuples. + """ + pairs, off_ = _read_cookie_pairs(line) + return pairs + + +def parse_cookie_headers(cookie_headers): + cookie_list = [] + for header in cookie_headers: + cookie_list.extend(parse_cookie_header(header)) + return cookie_list + + +def format_cookie_header(lst): + """ + Formats a Cookie header value. + """ + return _format_pairs(lst) + + +def parse_set_cookie_header(line): + """ + Parse a Set-Cookie header value + + Returns a list of (name, value, attrs) tuples, where attrs is a + CookieAttrs dict of attributes. No attempt is made to parse attribute + values - they are treated purely as strings. + """ + cookie_pairs, off = _read_set_cookie_pairs(line) + cookies = [ + (pairs[0][0], pairs[0][1], CookieAttrs(tuple(x) for x in pairs[1:])) + for pairs in cookie_pairs if pairs + ] + return cookies + + +def parse_set_cookie_headers(headers): + rv = [] + for header in headers: + cookies = parse_set_cookie_header(header) + if cookies: + for name, value, attrs in cookies: + rv.append((name, SetCookie(value, attrs))) + return rv + + +def format_set_cookie_header(set_cookies): + """ + Formats a Set-Cookie header value. + """ + + rv = [] + + for set_cookie in set_cookies: + name, value, attrs = set_cookie + + pairs = [(name, value)] + pairs.extend( + attrs.fields if hasattr(attrs, "fields") else attrs + ) + + rv.append(_format_set_cookie_pairs(pairs)) + + return ", ".join(rv) + + +def refresh_set_cookie_header(c, delta): + """ + Args: + c: A Set-Cookie string + delta: Time delta in seconds + Returns: + A refreshed Set-Cookie string + """ + + name, value, attrs = parse_set_cookie_header(c)[0] + if not name or not value: + raise ValueError("Invalid Cookie") + + if "expires" in attrs: + e = email.utils.parsedate_tz(attrs["expires"]) + if e: + f = email.utils.mktime_tz(e) + delta + attrs = attrs.with_set_all("expires", [email.utils.formatdate(f)]) + else: + # This can happen when the expires tag is invalid. + # reddit.com sends a an expires tag like this: "Thu, 31 Dec + # 2037 23:59:59 GMT", which is valid RFC 1123, but not + # strictly correct according to the cookie spec. Browsers + # appear to parse this tolerantly - maybe we should too. + # For now, we just ignore this. + attrs = attrs.with_delitem("expires") + + rv = format_set_cookie_header([(name, value, attrs)]) + if not rv: + raise ValueError("Invalid Cookie") + return rv + + +def get_expiration_ts(cookie_attrs): + """ + Determines the time when the cookie will be expired. + + Considering both 'expires' and 'max-age' parameters. + + Returns: timestamp of when the cookie will expire. + None, if no expiration time is set. + """ + if 'expires' in cookie_attrs: + e = email.utils.parsedate_tz(cookie_attrs["expires"]) + if e: + return email.utils.mktime_tz(e) + + elif 'max-age' in cookie_attrs: + try: + max_age = int(cookie_attrs['Max-Age']) + except ValueError: + pass + else: + now_ts = time.time() + return now_ts + max_age + + return None + + +def is_expired(cookie_attrs): + """ + Determines whether a cookie has expired. + + Returns: boolean + """ + + exp_ts = get_expiration_ts(cookie_attrs) + now_ts = time.time() + + # If no expiration information was provided with the cookie + if exp_ts is None: + return False + else: + return exp_ts <= now_ts + + +def group_cookies(pairs): + """ + Converts a list of pairs to a (name, value, attrs) for each cookie. + """ + + if not pairs: + return [] + + cookie_list = [] + + # First pair is always a new cookie + name, value = pairs[0] + attrs = [] + + for k, v in pairs[1:]: + if k.lower() in _cookie_params: + attrs.append((k, v)) + else: + cookie_list.append((name, value, CookieAttrs(attrs))) + name, value, attrs = k, v, [] + + cookie_list.append((name, value, CookieAttrs(attrs))) + return cookie_list diff --git a/mitmproxy/net/http/encoding.py b/mitmproxy/net/http/encoding.py new file mode 100644 index 00000000..e123a033 --- /dev/null +++ b/mitmproxy/net/http/encoding.py @@ -0,0 +1,175 @@ +""" +Utility functions for decoding response bodies. +""" + +import codecs +import collections +from io import BytesIO + +import gzip +import zlib +import brotli + +from typing import Union + + +# We have a shared single-element cache for encoding and decoding. +# This is quite useful in practice, e.g. +# flow.request.content = flow.request.content.replace(b"foo", b"bar") +# does not require an .encode() call if content does not contain b"foo" +CachedDecode = collections.namedtuple("CachedDecode", "encoded encoding errors decoded") +_cache = CachedDecode(None, None, None, None) + + +def decode(encoded: Union[str, bytes], encoding: str, errors: str='strict') -> Union[str, bytes]: + """ + Decode the given input object + + Returns: + The decoded value + + Raises: + ValueError, if decoding fails. + """ + if len(encoded) == 0: + return encoded + + global _cache + cached = ( + isinstance(encoded, bytes) and + _cache.encoded == encoded and + _cache.encoding == encoding and + _cache.errors == errors + ) + if cached: + return _cache.decoded + try: + try: + decoded = custom_decode[encoding](encoded) + except KeyError: + decoded = codecs.decode(encoded, encoding, errors) + if encoding in ("gzip", "deflate", "br"): + _cache = CachedDecode(encoded, encoding, errors, decoded) + return decoded + except TypeError: + raise + except Exception as e: + raise ValueError("{} when decoding {} with {}: {}".format( + type(e).__name__, + repr(encoded)[:10], + repr(encoding), + repr(e), + )) + + +def encode(decoded: Union[str, bytes], encoding: str, errors: str='strict') -> Union[str, bytes]: + """ + Encode the given input object + + Returns: + The encoded value + + Raises: + ValueError, if encoding fails. + """ + if len(decoded) == 0: + return decoded + + global _cache + cached = ( + isinstance(decoded, bytes) and + _cache.decoded == decoded and + _cache.encoding == encoding and + _cache.errors == errors + ) + if cached: + return _cache.encoded + try: + try: + value = decoded + if isinstance(value, str): + value = decoded.encode() + encoded = custom_encode[encoding](value) + except KeyError: + encoded = codecs.encode(decoded, encoding, errors) + if encoding in ("gzip", "deflate", "br"): + _cache = CachedDecode(encoded, encoding, errors, decoded) + return encoded + except TypeError: + raise + except Exception as e: + raise ValueError("{} when encoding {} with {}: {}".format( + type(e).__name__, + repr(decoded)[:10], + repr(encoding), + repr(e), + )) + + +def identity(content): + """ + Returns content unchanged. Identity is the default value of + Accept-Encoding headers. + """ + return content + + +def decode_gzip(content): + gfile = gzip.GzipFile(fileobj=BytesIO(content)) + return gfile.read() + + +def encode_gzip(content): + s = BytesIO() + gf = gzip.GzipFile(fileobj=s, mode='wb') + gf.write(content) + gf.close() + return s.getvalue() + + +def decode_brotli(content): + return brotli.decompress(content) + + +def encode_brotli(content): + return brotli.compress(content) + + +def decode_deflate(content): + """ + Returns decompressed data for DEFLATE. Some servers may respond with + compressed data without a zlib header or checksum. An undocumented + feature of zlib permits the lenient decompression of data missing both + values. + + http://bugs.python.org/issue5784 + """ + try: + return zlib.decompress(content) + except zlib.error: + return zlib.decompress(content, -15) + + +def encode_deflate(content): + """ + Returns compressed content, always including zlib header and checksum. + """ + return zlib.compress(content) + + +custom_decode = { + "none": identity, + "identity": identity, + "gzip": decode_gzip, + "deflate": decode_deflate, + "br": decode_brotli, +} +custom_encode = { + "none": identity, + "identity": identity, + "gzip": encode_gzip, + "deflate": encode_deflate, + "br": encode_brotli, +} + +__all__ = ["encode", "decode"] diff --git a/mitmproxy/net/http/headers.py b/mitmproxy/net/http/headers.py new file mode 100644 index 00000000..8fc0cd43 --- /dev/null +++ b/mitmproxy/net/http/headers.py @@ -0,0 +1,221 @@ +import re + +import collections +from mitmproxy.types import multidict +from mitmproxy.utils import strutils + +# See also: http://lucumr.pocoo.org/2013/7/2/the-updated-guide-to-unicode/ + + +# While headers _should_ be ASCII, it's not uncommon for certain headers to be utf-8 encoded. +def _native(x): + return x.decode("utf-8", "surrogateescape") + + +def _always_bytes(x): + return strutils.always_bytes(x, "utf-8", "surrogateescape") + + +class Headers(multidict.MultiDict): + """ + Header class which allows both convenient access to individual headers as well as + direct access to the underlying raw data. Provides a full dictionary interface. + + Example: + + .. code-block:: python + + # Create headers with keyword arguments + >>> h = Headers(host="example.com", content_type="application/xml") + + # Headers mostly behave like a normal dict. + >>> h["Host"] + "example.com" + + # HTTP Headers are case insensitive + >>> h["host"] + "example.com" + + # Headers can also be created from a list of raw (header_name, header_value) byte tuples + >>> h = Headers([ + (b"Host",b"example.com"), + (b"Accept",b"text/html"), + (b"accept",b"application/xml") + ]) + + # Multiple headers are folded into a single header as per RFC7230 + >>> h["Accept"] + "text/html, application/xml" + + # Setting a header removes all existing headers with the same name. + >>> h["Accept"] = "application/text" + >>> h["Accept"] + "application/text" + + # bytes(h) returns a HTTP1 header block. + >>> print(bytes(h)) + Host: example.com + Accept: application/text + + # For full control, the raw header fields can be accessed + >>> h.fields + + Caveats: + For use with the "Set-Cookie" header, see :py:meth:`get_all`. + """ + + def __init__(self, fields=(), **headers): + """ + Args: + fields: (optional) list of ``(name, value)`` header byte tuples, + e.g. ``[(b"Host", b"example.com")]``. All names and values must be bytes. + **headers: Additional headers to set. Will overwrite existing values from `fields`. + For convenience, underscores in header names will be transformed to dashes - + this behaviour does not extend to other methods. + If ``**headers`` contains multiple keys that have equal ``.lower()`` s, + the behavior is undefined. + """ + super().__init__(fields) + + for key, value in self.fields: + if not isinstance(key, bytes) or not isinstance(value, bytes): + raise TypeError("Header fields must be bytes.") + + # content_type -> content-type + headers = { + _always_bytes(name).replace(b"_", b"-"): _always_bytes(value) + for name, value in headers.items() + } + self.update(headers) + + @staticmethod + def _reduce_values(values): + # Headers can be folded + return ", ".join(values) + + @staticmethod + def _kconv(key): + # Headers are case-insensitive + return key.lower() + + def __bytes__(self): + if self.fields: + return b"\r\n".join(b": ".join(field) for field in self.fields) + b"\r\n" + else: + return b"" + + def __delitem__(self, key): + key = _always_bytes(key) + super().__delitem__(key) + + def __iter__(self): + for x in super().__iter__(): + yield _native(x) + + def get_all(self, name): + """ + Like :py:meth:`get`, but does not fold multiple headers into a single one. + This is useful for Set-Cookie headers, which do not support folding. + See also: https://tools.ietf.org/html/rfc7230#section-3.2.2 + """ + name = _always_bytes(name) + return [ + _native(x) for x in + super().get_all(name) + ] + + def set_all(self, name, values): + """ + Explicitly set multiple headers for the given key. + See: :py:meth:`get_all` + """ + name = _always_bytes(name) + values = [_always_bytes(x) for x in values] + return super().set_all(name, values) + + def insert(self, index, key, value): + key = _always_bytes(key) + value = _always_bytes(value) + super().insert(index, key, value) + + def items(self, multi=False): + if multi: + return ( + (_native(k), _native(v)) + for k, v in self.fields + ) + else: + return super().items() + + def replace(self, pattern, repl, flags=0, count=0): + """ + Replaces a regular expression pattern with repl in each "name: value" + header line. + + Returns: + The number of replacements made. + """ + if isinstance(pattern, str): + pattern = strutils.escaped_str_to_bytes(pattern) + if isinstance(repl, str): + repl = strutils.escaped_str_to_bytes(repl) + pattern = re.compile(pattern, flags) + replacements = 0 + flag_count = count > 0 + fields = [] + for name, value in self.fields: + line, n = pattern.subn(repl, name + b": " + value, count=count) + try: + name, value = line.split(b": ", 1) + except ValueError: + # We get a ValueError if the replacement removed the ": " + # There's not much we can do about this, so we just keep the header as-is. + pass + else: + replacements += n + if flag_count: + count -= n + if count == 0: + break + fields.append((name, value)) + self.fields = tuple(fields) + return replacements + + +def parse_content_type(c): + """ + A simple parser for content-type values. Returns a (type, subtype, + parameters) tuple, where type and subtype are strings, and parameters + is a dict. If the string could not be parsed, return None. + + E.g. the following string: + + text/html; charset=UTF-8 + + Returns: + + ("text", "html", {"charset": "UTF-8"}) + """ + parts = c.split(";", 1) + ts = parts[0].split("/", 1) + if len(ts) != 2: + return None + d = collections.OrderedDict() + if len(parts) == 2: + for i in parts[1].split(";"): + clause = i.split("=", 1) + if len(clause) == 2: + d[clause[0].strip()] = clause[1].strip() + return ts[0].lower(), ts[1].lower(), d + + +def assemble_content_type(type, subtype, parameters): + if not parameters: + return "{}/{}".format(type, subtype) + params = "; ".join( + "{}={}".format(k, v) + for k, v in parameters.items() + ) + return "{}/{}; {}".format( + type, subtype, params + ) diff --git a/mitmproxy/net/http/http1/__init__.py b/mitmproxy/net/http/http1/__init__.py new file mode 100644 index 00000000..e4bf01c5 --- /dev/null +++ b/mitmproxy/net/http/http1/__init__.py @@ -0,0 +1,24 @@ +from .read import ( + read_request, read_request_head, + read_response, read_response_head, + read_body, + connection_close, + expected_http_body_size, +) +from .assemble import ( + assemble_request, assemble_request_head, + assemble_response, assemble_response_head, + assemble_body, +) + + +__all__ = [ + "read_request", "read_request_head", + "read_response", "read_response_head", + "read_body", + "connection_close", + "expected_http_body_size", + "assemble_request", "assemble_request_head", + "assemble_response", "assemble_response_head", + "assemble_body", +] diff --git a/mitmproxy/net/http/http1/assemble.py b/mitmproxy/net/http/http1/assemble.py new file mode 100644 index 00000000..d718589f --- /dev/null +++ b/mitmproxy/net/http/http1/assemble.py @@ -0,0 +1,100 @@ +import mitmproxy.net.http.url +from mitmproxy import exceptions + + +def assemble_request(request): + if request.data.content is None: + raise exceptions.HttpException("Cannot assemble flow with missing content") + head = assemble_request_head(request) + body = b"".join(assemble_body(request.data.headers, [request.data.content])) + return head + body + + +def assemble_request_head(request): + first_line = _assemble_request_line(request.data) + headers = _assemble_request_headers(request.data) + return b"%s\r\n%s\r\n" % (first_line, headers) + + +def assemble_response(response): + if response.data.content is None: + raise exceptions.HttpException("Cannot assemble flow with missing content") + head = assemble_response_head(response) + body = b"".join(assemble_body(response.data.headers, [response.data.content])) + return head + body + + +def assemble_response_head(response): + first_line = _assemble_response_line(response.data) + headers = _assemble_response_headers(response.data) + return b"%s\r\n%s\r\n" % (first_line, headers) + + +def assemble_body(headers, body_chunks): + if "chunked" in headers.get("transfer-encoding", "").lower(): + for chunk in body_chunks: + if chunk: + yield b"%x\r\n%s\r\n" % (len(chunk), chunk) + yield b"0\r\n\r\n" + else: + for chunk in body_chunks: + yield chunk + + +def _assemble_request_line(request_data): + """ + Args: + request_data (mitmproxy.net.http.request.RequestData) + """ + form = request_data.first_line_format + if form == "relative": + return b"%s %s %s" % ( + request_data.method, + request_data.path, + request_data.http_version + ) + elif form == "authority": + return b"%s %s:%d %s" % ( + request_data.method, + request_data.host, + request_data.port, + request_data.http_version + ) + elif form == "absolute": + return b"%s %s://%s:%d%s %s" % ( + request_data.method, + request_data.scheme, + request_data.host, + request_data.port, + request_data.path, + request_data.http_version + ) + else: + raise RuntimeError("Invalid request form") + + +def _assemble_request_headers(request_data): + """ + Args: + request_data (mitmproxy.net.http.request.RequestData) + """ + headers = request_data.headers.copy() + if "host" not in headers and request_data.scheme and request_data.host and request_data.port: + headers["host"] = mitmproxy.net.http.url.hostport( + request_data.scheme, + request_data.host, + request_data.port + ) + return bytes(headers) + + +def _assemble_response_line(response_data): + return b"%s %d %s" % ( + response_data.http_version, + response_data.status_code, + response_data.reason, + ) + + +def _assemble_response_headers(response): + return bytes(response.headers) diff --git a/mitmproxy/net/http/http1/read.py b/mitmproxy/net/http/http1/read.py new file mode 100644 index 00000000..6eb30709 --- /dev/null +++ b/mitmproxy/net/http/http1/read.py @@ -0,0 +1,377 @@ +import time +import sys +import re + +from mitmproxy.net.http import request +from mitmproxy.net.http import response +from mitmproxy.net.http import headers +from mitmproxy.net.http import url +from mitmproxy.net import check +from mitmproxy import exceptions + + +def get_header_tokens(headers, key): + """ + Retrieve all tokens for a header key. A number of different headers + follow a pattern where each header line can containe comma-separated + tokens, and headers can be set multiple times. + """ + if key not in headers: + return [] + tokens = headers[key].split(",") + return [token.strip() for token in tokens] + + +def read_request(rfile, body_size_limit=None): + request = read_request_head(rfile) + expected_body_size = expected_http_body_size(request) + request.data.content = b"".join(read_body(rfile, expected_body_size, limit=body_size_limit)) + request.timestamp_end = time.time() + return request + + +def read_request_head(rfile): + """ + Parse an HTTP request head (request line + headers) from an input stream + + Args: + rfile: The input stream + + Returns: + The HTTP request object (without body) + + Raises: + exceptions.HttpReadDisconnect: No bytes can be read from rfile. + exceptions.HttpSyntaxException: The input is malformed HTTP. + exceptions.HttpException: Any other error occured. + """ + timestamp_start = time.time() + if hasattr(rfile, "reset_timestamps"): + rfile.reset_timestamps() + + form, method, scheme, host, port, path, http_version = _read_request_line(rfile) + headers = _read_headers(rfile) + + if hasattr(rfile, "first_byte_timestamp"): + # more accurate timestamp_start + timestamp_start = rfile.first_byte_timestamp + + return request.Request( + form, method, scheme, host, port, path, http_version, headers, None, timestamp_start + ) + + +def read_response(rfile, request, body_size_limit=None): + response = read_response_head(rfile) + expected_body_size = expected_http_body_size(request, response) + response.data.content = b"".join(read_body(rfile, expected_body_size, body_size_limit)) + response.timestamp_end = time.time() + return response + + +def read_response_head(rfile): + """ + Parse an HTTP response head (response line + headers) from an input stream + + Args: + rfile: The input stream + + Returns: + The HTTP request object (without body) + + Raises: + exceptions.HttpReadDisconnect: No bytes can be read from rfile. + exceptions.HttpSyntaxException: The input is malformed HTTP. + exceptions.HttpException: Any other error occured. + """ + + timestamp_start = time.time() + if hasattr(rfile, "reset_timestamps"): + rfile.reset_timestamps() + + http_version, status_code, message = _read_response_line(rfile) + headers = _read_headers(rfile) + + if hasattr(rfile, "first_byte_timestamp"): + # more accurate timestamp_start + timestamp_start = rfile.first_byte_timestamp + + return response.Response(http_version, status_code, message, headers, None, timestamp_start) + + +def read_body(rfile, expected_size, limit=None, max_chunk_size=4096): + """ + Read an HTTP message body + + Args: + rfile: The input stream + expected_size: The expected body size (see :py:meth:`expected_body_size`) + limit: Maximum body size + max_chunk_size: Maximium chunk size that gets yielded + + Returns: + A generator that yields byte chunks of the content. + + Raises: + exceptions.HttpException, if an error occurs + + Caveats: + max_chunk_size is not considered if the transfer encoding is chunked. + """ + if not limit or limit < 0: + limit = sys.maxsize + if not max_chunk_size: + max_chunk_size = limit + + if expected_size is None: + for x in _read_chunked(rfile, limit): + yield x + elif expected_size >= 0: + if limit is not None and expected_size > limit: + raise exceptions.HttpException( + "HTTP Body too large. " + "Limit is {}, content length was advertised as {}".format(limit, expected_size) + ) + bytes_left = expected_size + while bytes_left: + chunk_size = min(bytes_left, max_chunk_size) + content = rfile.read(chunk_size) + if len(content) < chunk_size: + raise exceptions.HttpException("Unexpected EOF") + yield content + bytes_left -= chunk_size + else: + bytes_left = limit + while bytes_left: + chunk_size = min(bytes_left, max_chunk_size) + content = rfile.read(chunk_size) + if not content: + return + yield content + bytes_left -= chunk_size + not_done = rfile.read(1) + if not_done: + raise exceptions.HttpException("HTTP body too large. Limit is {}.".format(limit)) + + +def connection_close(http_version, headers): + """ + Checks the message to see if the client connection should be closed + according to RFC 2616 Section 8.1. + """ + # At first, check if we have an explicit Connection header. + if "connection" in headers: + tokens = get_header_tokens(headers, "connection") + if "close" in tokens: + return True + elif "keep-alive" in tokens: + return False + + # If we don't have a Connection header, HTTP 1.1 connections are assumed to + # be persistent + return http_version != "HTTP/1.1" and http_version != b"HTTP/1.1" # FIXME: Remove one case. + + +def expected_http_body_size(request, response=None): + """ + Returns: + The expected body length: + - a positive integer, if the size is known in advance + - None, if the size in unknown in advance (chunked encoding) + - -1, if all data should be read until end of stream. + + Raises: + exceptions.HttpSyntaxException, if the content length header is invalid + """ + # Determine response size according to + # http://tools.ietf.org/html/rfc7230#section-3.3 + if not response: + headers = request.headers + response_code = None + is_request = True + else: + headers = response.headers + response_code = response.status_code + is_request = False + + if is_request: + if headers.get("expect", "").lower() == "100-continue": + return 0 + else: + if request.method.upper() == "HEAD": + return 0 + if 100 <= response_code <= 199: + return 0 + if response_code == 200 and request.method.upper() == "CONNECT": + return 0 + if response_code in (204, 304): + return 0 + + if "chunked" in headers.get("transfer-encoding", "").lower(): + return None + if "content-length" in headers: + try: + size = int(headers["content-length"]) + if size < 0: + raise ValueError() + return size + except ValueError: + raise exceptions.HttpSyntaxException("Unparseable Content Length") + if is_request: + return 0 + return -1 + + +def _get_first_line(rfile): + try: + line = rfile.readline() + if line == b"\r\n" or line == b"\n": + # Possible leftover from previous message + line = rfile.readline() + except exceptions.TcpDisconnect: + raise exceptions.HttpReadDisconnect("Remote disconnected") + if not line: + raise exceptions.HttpReadDisconnect("Remote disconnected") + return line.strip() + + +def _read_request_line(rfile): + try: + line = _get_first_line(rfile) + except exceptions.HttpReadDisconnect: + # We want to provide a better error message. + raise exceptions.HttpReadDisconnect("Client disconnected") + + try: + method, path, http_version = line.split() + + if path == b"*" or path.startswith(b"/"): + form = "relative" + scheme, host, port = None, None, None + elif method == b"CONNECT": + form = "authority" + host, port = _parse_authority_form(path) + scheme, path = None, None + else: + form = "absolute" + scheme, host, port, path = url.parse(path) + + _check_http_version(http_version) + except ValueError: + raise exceptions.HttpSyntaxException("Bad HTTP request line: {}".format(line)) + + return form, method, scheme, host, port, path, http_version + + +def _parse_authority_form(hostport): + """ + Returns (host, port) if hostport is a valid authority-form host specification. + http://tools.ietf.org/html/draft-luotonen-web-proxy-tunneling-01 section 3.1 + + Raises: + ValueError, if the input is malformed + """ + try: + host, port = hostport.split(b":") + port = int(port) + if not check.is_valid_host(host) or not check.is_valid_port(port): + raise ValueError() + except ValueError: + raise exceptions.HttpSyntaxException("Invalid host specification: {}".format(hostport)) + + return host, port + + +def _read_response_line(rfile): + try: + line = _get_first_line(rfile) + except exceptions.HttpReadDisconnect: + # We want to provide a better error message. + raise exceptions.HttpReadDisconnect("Server disconnected") + + try: + parts = line.split(None, 2) + if len(parts) == 2: # handle missing message gracefully + parts.append(b"") + + http_version, status_code, message = parts + status_code = int(status_code) + _check_http_version(http_version) + + except ValueError: + raise exceptions.HttpSyntaxException("Bad HTTP response line: {}".format(line)) + + return http_version, status_code, message + + +def _check_http_version(http_version): + if not re.match(br"^HTTP/\d\.\d$", http_version): + raise exceptions.HttpSyntaxException("Unknown HTTP version: {}".format(http_version)) + + +def _read_headers(rfile): + """ + Read a set of headers. + Stop once a blank line is reached. + + Returns: + A headers object + + Raises: + exceptions.HttpSyntaxException + """ + ret = [] + while True: + line = rfile.readline() + if not line or line == b"\r\n" or line == b"\n": + break + if line[0] in b" \t": + if not ret: + raise exceptions.HttpSyntaxException("Invalid headers") + # continued header + ret[-1] = (ret[-1][0], ret[-1][1] + b'\r\n ' + line.strip()) + else: + try: + name, value = line.split(b":", 1) + value = value.strip() + if not name: + raise ValueError() + ret.append((name, value)) + except ValueError: + raise exceptions.HttpSyntaxException( + "Invalid header line: %s" % repr(line) + ) + return headers.Headers(ret) + + +def _read_chunked(rfile, limit=sys.maxsize): + """ + Read a HTTP body with chunked transfer encoding. + + Args: + rfile: the input file + limit: A positive integer + """ + total = 0 + while True: + line = rfile.readline(128) + if line == b"": + raise exceptions.HttpException("Connection closed prematurely") + if line != b"\r\n" and line != b"\n": + try: + length = int(line, 16) + except ValueError: + raise exceptions.HttpSyntaxException("Invalid chunked encoding length: {}".format(line)) + total += length + if total > limit: + raise exceptions.HttpException( + "HTTP Body too large. Limit is {}, " + "chunked content longer than {}".format(limit, total) + ) + chunk = rfile.read(length) + suffix = rfile.readline(5) + if suffix != b"\r\n": + raise exceptions.HttpSyntaxException("Malformed chunked body") + if length == 0: + return + yield chunk diff --git a/mitmproxy/net/http/http2/__init__.py b/mitmproxy/net/http/http2/__init__.py new file mode 100644 index 00000000..7027006b --- /dev/null +++ b/mitmproxy/net/http/http2/__init__.py @@ -0,0 +1,8 @@ +from mitmproxy.net.http.http2.framereader import read_raw_frame, parse_frame +from mitmproxy.net.http.http2.utils import parse_headers + +__all__ = [ + "read_raw_frame", + "parse_frame", + "parse_headers", +] diff --git a/mitmproxy/net/http/http2/framereader.py b/mitmproxy/net/http/http2/framereader.py new file mode 100644 index 00000000..6a164919 --- /dev/null +++ b/mitmproxy/net/http/http2/framereader.py @@ -0,0 +1,25 @@ +import codecs + +import hyperframe +from mitmproxy import exceptions + + +def read_raw_frame(rfile): + header = rfile.safe_read(9) + length = int(codecs.encode(header[:3], 'hex_codec'), 16) + + if length == 4740180: + raise exceptions.HttpException("Length field looks more like HTTP/1.1:\n{}".format(rfile.read(-1))) + + body = rfile.safe_read(length) + return [header, body] + + +def parse_frame(header, body=None): + if body is None: + body = header[9:] + header = header[:9] + + frame, length = hyperframe.frame.Frame.parse_frame_header(header) + frame.parse_body(memoryview(body)) + return frame diff --git a/mitmproxy/net/http/http2/utils.py b/mitmproxy/net/http/http2/utils.py new file mode 100644 index 00000000..62a59c72 --- /dev/null +++ b/mitmproxy/net/http/http2/utils.py @@ -0,0 +1,37 @@ +from mitmproxy.net.http import url + + +def parse_headers(headers): + authority = headers.get(':authority', '').encode() + method = headers.get(':method', 'GET').encode() + scheme = headers.get(':scheme', 'https').encode() + path = headers.get(':path', '/').encode() + + headers.pop(":method", None) + headers.pop(":scheme", None) + headers.pop(":path", None) + + host = None + port = None + + if path == b'*' or path.startswith(b"/"): + first_line_format = "relative" + elif method == b'CONNECT': # pragma: no cover + raise NotImplementedError("CONNECT over HTTP/2 is not implemented.") + else: # pragma: no cover + first_line_format = "absolute" + # FIXME: verify if path or :host contains what we need + scheme, host, port, _ = url.parse(path) + + if authority: + host, _, port = authority.partition(b':') + + if not host: + host = b'localhost' + + if not port: + port = 443 if scheme == b'https' else 80 + + port = int(port) + + return first_line_format, method, scheme, host, port, path diff --git a/mitmproxy/net/http/message.py b/mitmproxy/net/http/message.py new file mode 100644 index 00000000..af1d16be --- /dev/null +++ b/mitmproxy/net/http/message.py @@ -0,0 +1,300 @@ +import re +import warnings +from typing import Optional + +from mitmproxy.utils import strutils +from mitmproxy.net.http import encoding +from mitmproxy.types import serializable +from mitmproxy.net.http import headers + + +# While headers _should_ be ASCII, it's not uncommon for certain headers to be utf-8 encoded. +def _native(x): + return x.decode("utf-8", "surrogateescape") + + +def _always_bytes(x): + return strutils.always_bytes(x, "utf-8", "surrogateescape") + + +class MessageData(serializable.Serializable): + def __eq__(self, other): + if isinstance(other, MessageData): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def set_state(self, state): + for k, v in state.items(): + if k == "headers": + v = headers.Headers.from_state(v) + setattr(self, k, v) + + def get_state(self): + state = vars(self).copy() + state["headers"] = state["headers"].get_state() + return state + + @classmethod + def from_state(cls, state): + state["headers"] = headers.Headers.from_state(state["headers"]) + return cls(**state) + + +class Message(serializable.Serializable): + def __eq__(self, other): + if isinstance(other, Message): + return self.data == other.data + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def get_state(self): + return self.data.get_state() + + def set_state(self, state): + self.data.set_state(state) + + @classmethod + def from_state(cls, state): + state["headers"] = headers.Headers.from_state(state["headers"]) + return cls(**state) + + @property + def headers(self): + """ + Message headers object + + Returns: + mitmproxy.net.http.Headers + """ + return self.data.headers + + @headers.setter + def headers(self, h): + self.data.headers = h + + @property + def raw_content(self) -> bytes: + """ + The raw (encoded) HTTP message body + + See also: :py:attr:`content`, :py:class:`text` + """ + return self.data.content + + @raw_content.setter + def raw_content(self, content): + self.data.content = content + + def get_content(self, strict: bool=True) -> bytes: + """ + The HTTP message body decoded with the content-encoding header (e.g. gzip) + + Raises: + ValueError, when the content-encoding is invalid and strict is True. + + See also: :py:class:`raw_content`, :py:attr:`text` + """ + if self.raw_content is None: + return None + ce = self.headers.get("content-encoding") + if ce: + try: + return encoding.decode(self.raw_content, ce) + except ValueError: + if strict: + raise + return self.raw_content + else: + return self.raw_content + + def set_content(self, value): + if value is None: + self.raw_content = None + return + if not isinstance(value, bytes): + raise TypeError( + "Message content must be bytes, not {}. " + "Please use .text if you want to assign a str." + .format(type(value).__name__) + ) + ce = self.headers.get("content-encoding") + try: + self.raw_content = encoding.encode(value, ce or "identity") + except ValueError: + # So we have an invalid content-encoding? + # Let's remove it! + del self.headers["content-encoding"] + self.raw_content = value + self.headers["content-length"] = str(len(self.raw_content)) + + content = property(get_content, set_content) + + @property + def http_version(self): + """ + Version string, e.g. "HTTP/1.1" + """ + return _native(self.data.http_version) + + @http_version.setter + def http_version(self, http_version): + self.data.http_version = _always_bytes(http_version) + + @property + def timestamp_start(self): + """ + First byte timestamp + """ + return self.data.timestamp_start + + @timestamp_start.setter + def timestamp_start(self, timestamp_start): + self.data.timestamp_start = timestamp_start + + @property + def timestamp_end(self): + """ + Last byte timestamp + """ + return self.data.timestamp_end + + @timestamp_end.setter + def timestamp_end(self, timestamp_end): + self.data.timestamp_end = timestamp_end + + def _get_content_type_charset(self) -> Optional[str]: + ct = headers.parse_content_type(self.headers.get("content-type", "")) + if ct: + return ct[2].get("charset") + + def _guess_encoding(self) -> str: + enc = self._get_content_type_charset() + if enc: + return enc + + if "json" in self.headers.get("content-type", ""): + return "utf8" + else: + # We may also want to check for HTML meta tags here at some point. + return "latin-1" + + def get_text(self, strict: bool=True) -> str: + """ + The HTTP message body decoded with both content-encoding header (e.g. gzip) + and content-type header charset. + + Raises: + ValueError, when either content-encoding or charset is invalid and strict is True. + + See also: :py:attr:`content`, :py:class:`raw_content` + """ + if self.raw_content is None: + return None + enc = self._guess_encoding() + + content = self.get_content(strict) + try: + return encoding.decode(content, enc) + except ValueError: + if strict: + raise + return content.decode("utf8", "surrogateescape") + + def set_text(self, text): + if text is None: + self.content = None + return + enc = self._guess_encoding() + + try: + self.content = encoding.encode(text, enc) + except ValueError: + # Fall back to UTF-8 and update the content-type header. + ct = headers.parse_content_type(self.headers.get("content-type", "")) or ("text", "plain", {}) + ct[2]["charset"] = "utf-8" + self.headers["content-type"] = headers.assemble_content_type(*ct) + enc = "utf8" + self.content = text.encode(enc, "surrogateescape") + + text = property(get_text, set_text) + + def decode(self, strict=True): + """ + Decodes body based on the current Content-Encoding header, then + removes the header. If there is no Content-Encoding header, no + action is taken. + + Raises: + ValueError, when the content-encoding is invalid and strict is True. + """ + self.raw_content = self.get_content(strict) + self.headers.pop("content-encoding", None) + + def encode(self, e): + """ + Encodes body with the encoding e, where e is "gzip", "deflate", "identity", or "br". + Any existing content-encodings are overwritten, + the content is not decoded beforehand. + + Raises: + ValueError, when the specified content-encoding is invalid. + """ + self.headers["content-encoding"] = e + self.content = self.raw_content + if "content-encoding" not in self.headers: + raise ValueError("Invalid content encoding {}".format(repr(e))) + + def replace(self, pattern, repl, flags=0, count=0): + """ + Replaces a regular expression pattern with repl in both the headers + and the body of the message. Encoded body will be decoded + before replacement, and re-encoded afterwards. + + Returns: + The number of replacements made. + """ + if isinstance(pattern, str): + pattern = strutils.escaped_str_to_bytes(pattern) + if isinstance(repl, str): + repl = strutils.escaped_str_to_bytes(repl) + replacements = 0 + if self.content: + self.content, replacements = re.subn( + pattern, repl, self.content, flags=flags, count=count + ) + replacements += self.headers.replace(pattern, repl, flags=flags, count=count) + return replacements + + # Legacy + + @property + def body(self): # pragma: no cover + warnings.warn(".body is deprecated, use .content instead.", DeprecationWarning) + return self.content + + @body.setter + def body(self, body): # pragma: no cover + warnings.warn(".body is deprecated, use .content instead.", DeprecationWarning) + self.content = body + + +class decoded: + """ + Deprecated: You can now directly use :py:attr:`content`. + :py:attr:`raw_content` has the encoded content. + """ + + def __init__(self, message): # pragma no cover + warnings.warn("decoded() is deprecated, you can now directly use .content instead. " + ".raw_content has the encoded content.", DeprecationWarning) + + def __enter__(self): # pragma no cover + pass + + def __exit__(self, type, value, tb): # pragma no cover + pass diff --git a/mitmproxy/net/http/multipart.py b/mitmproxy/net/http/multipart.py new file mode 100644 index 00000000..a854d47f --- /dev/null +++ b/mitmproxy/net/http/multipart.py @@ -0,0 +1,32 @@ +import re + +from mitmproxy.net.http import headers + + +def decode(hdrs, content): + """ + Takes a multipart boundary encoded string and returns list of (key, value) tuples. + """ + v = hdrs.get("content-type") + if v: + v = headers.parse_content_type(v) + if not v: + return [] + try: + boundary = v[2]["boundary"].encode("ascii") + except (KeyError, UnicodeError): + return [] + + rx = re.compile(br'\bname="([^"]+)"') + r = [] + + for i in content.split(b"--" + boundary): + parts = i.splitlines() + if len(parts) > 1 and parts[0][0:2] != b"--": + match = rx.search(parts[1]) + if match: + key = match.group(1) + value = b"".join(parts[3 + parts[2:].index(b""):]) + r.append((key, value)) + return r + return [] diff --git a/mitmproxy/net/http/request.py b/mitmproxy/net/http/request.py new file mode 100644 index 00000000..c3d85363 --- /dev/null +++ b/mitmproxy/net/http/request.py @@ -0,0 +1,405 @@ +import re +import urllib + +from mitmproxy.types import multidict +from mitmproxy.utils import strutils +from mitmproxy.net.http import multipart +from mitmproxy.net.http import cookies +from mitmproxy.net.http import headers as nheaders +from mitmproxy.net.http import message +import mitmproxy.net.http.url + +# This regex extracts & splits the host header into host and port. +# Handles the edge case of IPv6 addresses containing colons. +# https://bugzilla.mozilla.org/show_bug.cgi?id=45891 +host_header_re = re.compile(r"^(?P[^:]+|\[.+\])(?::(?P\d+))?$") + + +class RequestData(message.MessageData): + def __init__( + self, + first_line_format, + method, + scheme, + host, + port, + path, + http_version, + headers=(), + content=None, + timestamp_start=None, + timestamp_end=None + ): + if isinstance(method, str): + method = method.encode("ascii", "strict") + if isinstance(scheme, str): + scheme = scheme.encode("ascii", "strict") + if isinstance(host, str): + host = host.encode("idna", "strict") + if isinstance(path, str): + path = path.encode("ascii", "strict") + if isinstance(http_version, str): + http_version = http_version.encode("ascii", "strict") + if not isinstance(headers, nheaders.Headers): + headers = nheaders.Headers(headers) + if isinstance(content, str): + raise ValueError("Content must be bytes, not {}".format(type(content).__name__)) + + self.first_line_format = first_line_format + self.method = method + self.scheme = scheme + self.host = host + self.port = port + self.path = path + self.http_version = http_version + self.headers = headers + self.content = content + self.timestamp_start = timestamp_start + self.timestamp_end = timestamp_end + + +class Request(message.Message): + """ + An HTTP request. + """ + def __init__(self, *args, **kwargs): + super().__init__() + self.data = RequestData(*args, **kwargs) + + def __repr__(self): + if self.host and self.port: + hostport = "{}:{}".format(self.host, self.port) + else: + hostport = "" + path = self.path or "" + return "Request({} {}{})".format( + self.method, hostport, path + ) + + def replace(self, pattern, repl, flags=0, count=0): + """ + Replaces a regular expression pattern with repl in the headers, the + request path and the body of the request. Encoded content will be + decoded before replacement, and re-encoded afterwards. + + Returns: + The number of replacements made. + """ + if isinstance(pattern, str): + pattern = strutils.escaped_str_to_bytes(pattern) + if isinstance(repl, str): + repl = strutils.escaped_str_to_bytes(repl) + + c = super().replace(pattern, repl, flags, count) + self.path, pc = re.subn( + pattern, repl, self.data.path, flags=flags, count=count + ) + c += pc + return c + + @property + def first_line_format(self): + """ + HTTP request form as defined in `RFC7230 `_. + + origin-form and asterisk-form are subsumed as "relative". + """ + return self.data.first_line_format + + @first_line_format.setter + def first_line_format(self, first_line_format): + self.data.first_line_format = first_line_format + + @property + def method(self): + """ + HTTP request method, e.g. "GET". + """ + return message._native(self.data.method).upper() + + @method.setter + def method(self, method): + self.data.method = message._always_bytes(method) + + @property + def scheme(self): + """ + HTTP request scheme, which should be "http" or "https". + """ + if not self.data.scheme: + return self.data.scheme + return message._native(self.data.scheme) + + @scheme.setter + def scheme(self, scheme): + self.data.scheme = message._always_bytes(scheme) + + @property + def host(self): + """ + Target host. This may be parsed from the raw request + (e.g. from a ``GET http://example.com/ HTTP/1.1`` request line) + or inferred from the proxy mode (e.g. an IP in transparent mode). + + Setting the host attribute also updates the host header, if present. + """ + if not self.data.host: + return self.data.host + try: + return self.data.host.decode("idna") + except UnicodeError: + return self.data.host.decode("utf8", "surrogateescape") + + @host.setter + def host(self, host): + if isinstance(host, str): + try: + # There's no non-strict mode for IDNA encoding. + # We don't want this operation to fail though, so we try + # utf8 as a last resort. + host = host.encode("idna", "strict") + except UnicodeError: + host = host.encode("utf8", "surrogateescape") + + self.data.host = host + + # Update host header + if "host" in self.headers: + if host: + self.headers["host"] = host + else: + self.headers.pop("host") + + @property + def port(self): + """ + Target port + """ + return self.data.port + + @port.setter + def port(self, port): + self.data.port = port + + @property + def path(self): + """ + HTTP request path, e.g. "/index.html". + Guaranteed to start with a slash, except for OPTIONS requests, which may just be "*". + """ + if self.data.path is None: + return None + else: + return message._native(self.data.path) + + @path.setter + def path(self, path): + self.data.path = message._always_bytes(path) + + @property + def url(self): + """ + The URL string, constructed from the request's URL components + """ + if self.first_line_format == "authority": + return "%s:%d" % (self.host, self.port) + return mitmproxy.net.http.url.unparse(self.scheme, self.host, self.port, self.path) + + @url.setter + def url(self, url): + self.scheme, self.host, self.port, self.path = mitmproxy.net.http.url.parse(url) + + def _parse_host_header(self): + """Extract the host and port from Host header""" + if "host" not in self.headers: + return None, None + host, port = self.headers["host"], None + m = host_header_re.match(host) + if m: + host = m.group("host").strip("[]") + if m.group("port"): + port = int(m.group("port")) + return host, port + + @property + def pretty_host(self): + """ + Similar to :py:attr:`host`, but using the Host headers as an additional preferred data source. + This is useful in transparent mode where :py:attr:`host` is only an IP address, + but may not reflect the actual destination as the Host header could be spoofed. + """ + host, port = self._parse_host_header() + if not host: + return self.host + if not port: + port = 443 if self.scheme == 'https' else 80 + # Prefer the original address if host header has an unexpected form + return host if port == self.port else self.host + + @property + def pretty_url(self): + """ + Like :py:attr:`url`, but using :py:attr:`pretty_host` instead of :py:attr:`host`. + """ + if self.first_line_format == "authority": + return "%s:%d" % (self.pretty_host, self.port) + return mitmproxy.net.http.url.unparse(self.scheme, self.pretty_host, self.port, self.path) + + @property + def query(self) -> multidict.MultiDictView: + """ + The request query string as an :py:class:`~mitmproxy.net.multidict.MultiDictView` object. + """ + return multidict.MultiDictView( + self._get_query, + self._set_query + ) + + def _get_query(self): + query = urllib.parse.urlparse(self.url).query + return tuple(mitmproxy.net.http.url.decode(query)) + + def _set_query(self, query_data): + query = mitmproxy.net.http.url.encode(query_data) + _, _, path, params, _, fragment = urllib.parse.urlparse(self.url) + self.path = urllib.parse.urlunparse(["", "", path, params, query, fragment]) + + @query.setter + def query(self, value): + self._set_query(value) + + @property + def cookies(self) -> multidict.MultiDictView: + """ + The request cookies. + + An empty :py:class:`~mitmproxy.net.multidict.MultiDictView` object if the cookie monster ate them all. + """ + return multidict.MultiDictView( + self._get_cookies, + self._set_cookies + ) + + def _get_cookies(self): + h = self.headers.get_all("Cookie") + return tuple(cookies.parse_cookie_headers(h)) + + def _set_cookies(self, value): + self.headers["cookie"] = cookies.format_cookie_header(value) + + @cookies.setter + def cookies(self, value): + self._set_cookies(value) + + @property + def path_components(self): + """ + The URL's path components as a tuple of strings. + Components are unquoted. + """ + path = urllib.parse.urlparse(self.url).path + # This needs to be a tuple so that it's immutable. + # Otherwise, this would fail silently: + # request.path_components.append("foo") + return tuple(mitmproxy.net.http.url.unquote(i) for i in path.split("/") if i) + + @path_components.setter + def path_components(self, components): + components = map(lambda x: mitmproxy.net.http.url.quote(x, safe=""), components) + path = "/" + "/".join(components) + _, _, _, params, query, fragment = urllib.parse.urlparse(self.url) + self.path = urllib.parse.urlunparse(["", "", path, params, query, fragment]) + + def anticache(self): + """ + Modifies this request to remove headers that might produce a cached + response. That is, we remove ETags and If-Modified-Since headers. + """ + delheaders = [ + "if-modified-since", + "if-none-match", + ] + for i in delheaders: + self.headers.pop(i, None) + + def anticomp(self): + """ + Modifies this request to remove headers that will compress the + resource's data. + """ + self.headers["accept-encoding"] = "identity" + + def constrain_encoding(self): + """ + Limits the permissible Accept-Encoding values, based on what we can + decode appropriately. + """ + accept_encoding = self.headers.get("accept-encoding") + if accept_encoding: + self.headers["accept-encoding"] = ( + ', '.join( + e + for e in {"gzip", "identity", "deflate", "br"} + if e in accept_encoding + ) + ) + + @property + def urlencoded_form(self): + """ + The URL-encoded form data as an :py:class:`~mitmproxy.net.multidict.MultiDictView` object. + An empty multidict.MultiDictView if the content-type indicates non-form data + or the content could not be parsed. + """ + return multidict.MultiDictView( + self._get_urlencoded_form, + self._set_urlencoded_form + ) + + def _get_urlencoded_form(self): + is_valid_content_type = "application/x-www-form-urlencoded" in self.headers.get("content-type", "").lower() + if is_valid_content_type: + try: + return tuple(mitmproxy.net.http.url.decode(self.content)) + except ValueError: + pass + return () + + def _set_urlencoded_form(self, form_data): + """ + Sets the body to the URL-encoded form data, and adds the appropriate content-type header. + This will overwrite the existing content if there is one. + """ + self.headers["content-type"] = "application/x-www-form-urlencoded" + self.content = mitmproxy.net.http.url.encode(form_data).encode() + + @urlencoded_form.setter + def urlencoded_form(self, value): + self._set_urlencoded_form(value) + + @property + def multipart_form(self): + """ + The multipart form data as an :py:class:`~mitmproxy.net.multidict.MultiDictView` object. + None if the content-type indicates non-form data. + """ + return multidict.MultiDictView( + self._get_multipart_form, + self._set_multipart_form + ) + + def _get_multipart_form(self): + is_valid_content_type = "multipart/form-data" in self.headers.get("content-type", "").lower() + if is_valid_content_type: + try: + return multipart.decode(self.headers, self.content) + except ValueError: + pass + return () + + def _set_multipart_form(self, value): + raise NotImplementedError() + + @multipart_form.setter + def multipart_form(self, value): + self._set_multipart_form(value) diff --git a/mitmproxy/net/http/response.py b/mitmproxy/net/http/response.py new file mode 100644 index 00000000..b458a3d8 --- /dev/null +++ b/mitmproxy/net/http/response.py @@ -0,0 +1,192 @@ +import time +from email.utils import parsedate_tz, formatdate, mktime_tz +from mitmproxy.utils import human +from mitmproxy.types import multidict +from mitmproxy.net.http import cookies +from mitmproxy.net.http import headers as nheaders +from mitmproxy.net.http import message +from mitmproxy.net.http import status_codes +from typing import AnyStr +from typing import Dict +from typing import Iterable +from typing import Tuple +from typing import Union + + +class ResponseData(message.MessageData): + def __init__( + self, + http_version, + status_code, + reason=None, + headers=(), + content=None, + timestamp_start=None, + timestamp_end=None + ): + if isinstance(http_version, str): + http_version = http_version.encode("ascii", "strict") + if isinstance(reason, str): + reason = reason.encode("ascii", "strict") + if not isinstance(headers, nheaders.Headers): + headers = nheaders.Headers(headers) + if isinstance(content, str): + raise ValueError("Content must be bytes, not {}".format(type(content).__name__)) + + self.http_version = http_version + self.status_code = status_code + self.reason = reason + self.headers = headers + self.content = content + self.timestamp_start = timestamp_start + self.timestamp_end = timestamp_end + + +class Response(message.Message): + """ + An HTTP response. + """ + def __init__(self, *args, **kwargs): + super().__init__() + self.data = ResponseData(*args, **kwargs) + + def __repr__(self): + if self.raw_content: + details = "{}, {}".format( + self.headers.get("content-type", "unknown content type"), + human.pretty_size(len(self.raw_content)) + ) + else: + details = "no content" + return "Response({status_code} {reason}, {details})".format( + status_code=self.status_code, + reason=self.reason, + details=details + ) + + @classmethod + def make( + cls, + status_code: int=200, + content: AnyStr=b"", + headers: Union[Dict[AnyStr, AnyStr], Iterable[Tuple[bytes, bytes]]]=() + ): + """ + Simplified API for creating response objects. + """ + resp = cls( + b"HTTP/1.1", + status_code, + status_codes.RESPONSES.get(status_code, "").encode(), + (), + None + ) + + # Headers can be list or dict, we differentiate here. + if isinstance(headers, dict): + resp.headers = nheaders.Headers(**headers) + elif isinstance(headers, Iterable): + resp.headers = nheaders.Headers(headers) + else: + raise TypeError("Expected headers to be an iterable or dict, but is {}.".format( + type(headers).__name__ + )) + + # Assign this manually to update the content-length header. + if isinstance(content, bytes): + resp.content = content + elif isinstance(content, str): + resp.text = content + else: + raise TypeError("Expected content to be str or bytes, but is {}.".format( + type(content).__name__ + )) + + return resp + + @property + def status_code(self): + """ + HTTP Status Code, e.g. ``200``. + """ + return self.data.status_code + + @status_code.setter + def status_code(self, status_code): + self.data.status_code = status_code + + @property + def reason(self): + """ + HTTP Reason Phrase, e.g. "Not Found". + This is always :py:obj:`None` for HTTP2 requests, because HTTP2 responses do not contain a reason phrase. + """ + return message._native(self.data.reason) + + @reason.setter + def reason(self, reason): + self.data.reason = message._always_bytes(reason) + + @property + def cookies(self) -> multidict.MultiDictView: + """ + The response cookies. A possibly empty + :py:class:`~mitmproxy.net.multidict.MultiDictView`, where the keys are cookie + name strings, and values are (value, attr) tuples. Value is a string, + and attr is an MultiDictView containing cookie attributes. Within + attrs, unary attributes (e.g. HTTPOnly) are indicated by a Null value. + + Caveats: + Updating the attr + """ + return multidict.MultiDictView( + self._get_cookies, + self._set_cookies + ) + + def _get_cookies(self): + h = self.headers.get_all("set-cookie") + return tuple(cookies.parse_set_cookie_headers(h)) + + def _set_cookies(self, value): + cookie_headers = [] + for k, v in value: + header = cookies.format_set_cookie_header([(k, v[0], v[1])]) + cookie_headers.append(header) + self.headers.set_all("set-cookie", cookie_headers) + + @cookies.setter + def cookies(self, value): + self._set_cookies(value) + + def refresh(self, now=None): + """ + This fairly complex and heuristic function refreshes a server + response for replay. + + - It adjusts date, expires and last-modified headers. + - It adjusts cookie expiration. + """ + if not now: + now = time.time() + delta = now - self.timestamp_start + refresh_headers = [ + "date", + "expires", + "last-modified", + ] + for i in refresh_headers: + if i in self.headers: + d = parsedate_tz(self.headers[i]) + if d: + new = mktime_tz(d) + delta + self.headers[i] = formatdate(new) + c = [] + for set_cookie_header in self.headers.get_all("set-cookie"): + try: + refreshed = cookies.refresh_set_cookie_header(set_cookie_header, delta) + except ValueError: + refreshed = set_cookie_header + c.append(refreshed) + if c: + self.headers.set_all("set-cookie", c) diff --git a/mitmproxy/net/http/status_codes.py b/mitmproxy/net/http/status_codes.py new file mode 100644 index 00000000..5a83cd73 --- /dev/null +++ b/mitmproxy/net/http/status_codes.py @@ -0,0 +1,104 @@ +CONTINUE = 100 +SWITCHING = 101 +OK = 200 +CREATED = 201 +ACCEPTED = 202 +NON_AUTHORITATIVE_INFORMATION = 203 +NO_CONTENT = 204 +RESET_CONTENT = 205 +PARTIAL_CONTENT = 206 +MULTI_STATUS = 207 + +MULTIPLE_CHOICE = 300 +MOVED_PERMANENTLY = 301 +FOUND = 302 +SEE_OTHER = 303 +NOT_MODIFIED = 304 +USE_PROXY = 305 +TEMPORARY_REDIRECT = 307 + +BAD_REQUEST = 400 +UNAUTHORIZED = 401 +PAYMENT_REQUIRED = 402 +FORBIDDEN = 403 +NOT_FOUND = 404 +NOT_ALLOWED = 405 +NOT_ACCEPTABLE = 406 +PROXY_AUTH_REQUIRED = 407 +REQUEST_TIMEOUT = 408 +CONFLICT = 409 +GONE = 410 +LENGTH_REQUIRED = 411 +PRECONDITION_FAILED = 412 +REQUEST_ENTITY_TOO_LARGE = 413 +REQUEST_URI_TOO_LONG = 414 +UNSUPPORTED_MEDIA_TYPE = 415 +REQUESTED_RANGE_NOT_SATISFIABLE = 416 +EXPECTATION_FAILED = 417 +IM_A_TEAPOT = 418 + +INTERNAL_SERVER_ERROR = 500 +NOT_IMPLEMENTED = 501 +BAD_GATEWAY = 502 +SERVICE_UNAVAILABLE = 503 +GATEWAY_TIMEOUT = 504 +HTTP_VERSION_NOT_SUPPORTED = 505 +INSUFFICIENT_STORAGE_SPACE = 507 +NOT_EXTENDED = 510 + +RESPONSES = { + # 100 + CONTINUE: "Continue", + SWITCHING: "Switching Protocols", + + # 200 + OK: "OK", + CREATED: "Created", + ACCEPTED: "Accepted", + NON_AUTHORITATIVE_INFORMATION: "Non-Authoritative Information", + NO_CONTENT: "No Content", + RESET_CONTENT: "Reset Content.", + PARTIAL_CONTENT: "Partial Content", + MULTI_STATUS: "Multi-Status", + + # 300 + MULTIPLE_CHOICE: "Multiple Choices", + MOVED_PERMANENTLY: "Moved Permanently", + FOUND: "Found", + SEE_OTHER: "See Other", + NOT_MODIFIED: "Not Modified", + USE_PROXY: "Use Proxy", + # 306 not defined?? + TEMPORARY_REDIRECT: "Temporary Redirect", + + # 400 + BAD_REQUEST: "Bad Request", + UNAUTHORIZED: "Unauthorized", + PAYMENT_REQUIRED: "Payment Required", + FORBIDDEN: "Forbidden", + NOT_FOUND: "Not Found", + NOT_ALLOWED: "Method Not Allowed", + NOT_ACCEPTABLE: "Not Acceptable", + PROXY_AUTH_REQUIRED: "Proxy Authentication Required", + REQUEST_TIMEOUT: "Request Time-out", + CONFLICT: "Conflict", + GONE: "Gone", + LENGTH_REQUIRED: "Length Required", + PRECONDITION_FAILED: "Precondition Failed", + REQUEST_ENTITY_TOO_LARGE: "Request Entity Too Large", + REQUEST_URI_TOO_LONG: "Request-URI Too Long", + UNSUPPORTED_MEDIA_TYPE: "Unsupported Media Type", + REQUESTED_RANGE_NOT_SATISFIABLE: "Requested Range not satisfiable", + EXPECTATION_FAILED: "Expectation Failed", + IM_A_TEAPOT: "I'm a teapot", + + # 500 + INTERNAL_SERVER_ERROR: "Internal Server Error", + NOT_IMPLEMENTED: "Not Implemented", + BAD_GATEWAY: "Bad Gateway", + SERVICE_UNAVAILABLE: "Service Unavailable", + GATEWAY_TIMEOUT: "Gateway Time-out", + HTTP_VERSION_NOT_SUPPORTED: "HTTP Version not supported", + INSUFFICIENT_STORAGE_SPACE: "Insufficient Storage Space", + NOT_EXTENDED: "Not Extended" +} diff --git a/mitmproxy/net/http/url.py b/mitmproxy/net/http/url.py new file mode 100644 index 00000000..ff3d5264 --- /dev/null +++ b/mitmproxy/net/http/url.py @@ -0,0 +1,127 @@ +import urllib +from typing import Sequence +from typing import Tuple + +from mitmproxy.net import check + + +# PY2 workaround +def decode_parse_result(result, enc): + if hasattr(result, "decode"): + return result.decode(enc) + else: + return urllib.parse.ParseResult(*[x.decode(enc) for x in result]) + + +# PY2 workaround +def encode_parse_result(result, enc): + if hasattr(result, "encode"): + return result.encode(enc) + else: + return urllib.parse.ParseResult(*[x.encode(enc) for x in result]) + + +def parse(url): + """ + URL-parsing function that checks that + - port is an integer 0-65535 + - host is a valid IDNA-encoded hostname with no null-bytes + - path is valid ASCII + + Args: + A URL (as bytes or as unicode) + + Returns: + A (scheme, host, port, path) tuple + + Raises: + ValueError, if the URL is not properly formatted. + """ + parsed = urllib.parse.urlparse(url) + + if not parsed.hostname: + raise ValueError("No hostname given") + + if isinstance(url, bytes): + host = parsed.hostname + + # this should not raise a ValueError, + # but we try to be very forgiving here and accept just everything. + # decode_parse_result(parsed, "ascii") + else: + host = parsed.hostname.encode("idna") + parsed = encode_parse_result(parsed, "ascii") + + port = parsed.port + if not port: + port = 443 if parsed.scheme == b"https" else 80 + + full_path = urllib.parse.urlunparse( + (b"", b"", parsed.path, parsed.params, parsed.query, parsed.fragment) + ) + if not full_path.startswith(b"/"): + full_path = b"/" + full_path + + if not check.is_valid_host(host): + raise ValueError("Invalid Host") + if not check.is_valid_port(port): + raise ValueError("Invalid Port") + + return parsed.scheme, host, port, full_path + + +def unparse(scheme, host, port, path=""): + """ + Returns a URL string, constructed from the specified components. + + Args: + All args must be str. + """ + if path == "*": + path = "" + return "%s://%s%s" % (scheme, hostport(scheme, host, port), path) + + +def encode(s: Sequence[Tuple[str, str]]) -> str: + """ + Takes a list of (key, value) tuples and returns a urlencoded string. + """ + return urllib.parse.urlencode(s, False, errors="surrogateescape") + + +def decode(s): + """ + Takes a urlencoded string and returns a list of surrogate-escaped (key, value) tuples. + """ + return urllib.parse.parse_qsl(s, keep_blank_values=True, errors='surrogateescape') + + +def quote(b: str, safe: str="/") -> str: + """ + Returns: + An ascii-encodable str. + """ + return urllib.parse.quote(b, safe=safe, errors="surrogateescape") + + +def unquote(s: str) -> str: + """ + Args: + s: A surrogate-escaped str + Returns: + A surrogate-escaped str + """ + return urllib.parse.unquote(s, errors="surrogateescape") + + +def hostport(scheme, host, port): + """ + Returns the host component, with a port specifcation if needed. + """ + if (port, scheme) in [(80, "http"), (443, "https"), (80, b"http"), (443, b"https")]: + return host + else: + if isinstance(host, bytes): + return b"%s:%d" % (host, port) + else: + return "%s:%d" % (host, port) diff --git a/mitmproxy/net/http/user_agents.py b/mitmproxy/net/http/user_agents.py new file mode 100644 index 00000000..d0ca2f21 --- /dev/null +++ b/mitmproxy/net/http/user_agents.py @@ -0,0 +1,50 @@ +""" + A small collection of useful user-agent header strings. These should be + kept reasonably current to reflect common usage. +""" + +# pylint: line-too-long + +# A collection of (name, shortcut, string) tuples. + +UASTRINGS = [ + ("android", + "a", + "Mozilla/5.0 (Linux; U; Android 4.1.1; en-gb; Nexus 7 Build/JRO03D) AFL/01.04.02"), # noqa + ("blackberry", + "l", + "Mozilla/5.0 (BlackBerry; U; BlackBerry 9900; en) AppleWebKit/534.11+ (KHTML, like Gecko) Version/7.1.0.346 Mobile Safari/534.11+"), # noqa + ("bingbot", + "b", + "Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)"), # noqa + ("chrome", + "c", + "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1"), # noqa + ("firefox", + "f", + "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:14.0) Gecko/20120405 Firefox/14.0a1"), # noqa + ("googlebot", + "g", + "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"), # noqa + ("ie9", + "i", + "Mozilla/5.0 (Windows; U; MSIE 9.0; WIndows NT 9.0; en-US)"), # noqa + ("ipad", + "p", + "Mozilla/5.0 (iPad; CPU OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B176 Safari/7534.48.3"), # noqa + ("iphone", + "h", + "Mozilla/5.0 (iPhone; CPU iPhone OS 4_2_1 like Mac OS X) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148a Safari/6533.18.5"), # noqa + ("safari", + "s", + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/534.55.3 (KHTML, like Gecko) Version/5.1.3 Safari/534.53.10"), # noqa +] + + +def get_by_shortcut(s): + """ + Retrieve a user agent entry by shortcut. + """ + for i in UASTRINGS: + if s == i[1]: + return i diff --git a/mitmproxy/net/socks.py b/mitmproxy/net/socks.py new file mode 100644 index 00000000..115949f5 --- /dev/null +++ b/mitmproxy/net/socks.py @@ -0,0 +1,234 @@ +import struct +import array +import ipaddress + +from mitmproxy.net import tcp +from mitmproxy.net import check +from mitmproxy.types import bidi + + +class SocksError(Exception): + def __init__(self, code, message): + super().__init__(message) + self.code = code + +VERSION = bidi.BiDi( + SOCKS4=0x04, + SOCKS5=0x05 +) + +CMD = bidi.BiDi( + CONNECT=0x01, + BIND=0x02, + UDP_ASSOCIATE=0x03 +) + +ATYP = bidi.BiDi( + IPV4_ADDRESS=0x01, + DOMAINNAME=0x03, + IPV6_ADDRESS=0x04 +) + +REP = bidi.BiDi( + SUCCEEDED=0x00, + GENERAL_SOCKS_SERVER_FAILURE=0x01, + CONNECTION_NOT_ALLOWED_BY_RULESET=0x02, + NETWORK_UNREACHABLE=0x03, + HOST_UNREACHABLE=0x04, + CONNECTION_REFUSED=0x05, + TTL_EXPIRED=0x06, + COMMAND_NOT_SUPPORTED=0x07, + ADDRESS_TYPE_NOT_SUPPORTED=0x08, +) + +METHOD = bidi.BiDi( + NO_AUTHENTICATION_REQUIRED=0x00, + GSSAPI=0x01, + USERNAME_PASSWORD=0x02, + NO_ACCEPTABLE_METHODS=0xFF +) + +USERNAME_PASSWORD_VERSION = bidi.BiDi( + DEFAULT=0x01 +) + + +class ClientGreeting: + __slots__ = ("ver", "methods") + + def __init__(self, ver, methods): + self.ver = ver + self.methods = array.array("B") + self.methods.extend(methods) + + def assert_socks5(self): + if self.ver != VERSION.SOCKS5: + if self.ver == ord("G") and len(self.methods) == ord("E"): + guess = "Probably not a SOCKS request but a regular HTTP request. " + else: + guess = "" + + raise SocksError( + REP.GENERAL_SOCKS_SERVER_FAILURE, + guess + "Invalid SOCKS version. Expected 0x05, got 0x%x" % self.ver + ) + + @classmethod + def from_file(cls, f, fail_early=False): + """ + :param fail_early: If true, a SocksError will be raised if the first byte does not indicate socks5. + """ + ver, nmethods = struct.unpack("!BB", f.safe_read(2)) + client_greeting = cls(ver, []) + if fail_early: + client_greeting.assert_socks5() + client_greeting.methods.fromstring(f.safe_read(nmethods)) + return client_greeting + + def to_file(self, f): + f.write(struct.pack("!BB", self.ver, len(self.methods))) + f.write(self.methods.tostring()) + + +class ServerGreeting: + __slots__ = ("ver", "method") + + def __init__(self, ver, method): + self.ver = ver + self.method = method + + def assert_socks5(self): + if self.ver != VERSION.SOCKS5: + if self.ver == ord("H") and self.method == ord("T"): + guess = "Probably not a SOCKS request but a regular HTTP response. " + else: + guess = "" + + raise SocksError( + REP.GENERAL_SOCKS_SERVER_FAILURE, + guess + "Invalid SOCKS version. Expected 0x05, got 0x%x" % self.ver + ) + + @classmethod + def from_file(cls, f): + ver, method = struct.unpack("!BB", f.safe_read(2)) + return cls(ver, method) + + def to_file(self, f): + f.write(struct.pack("!BB", self.ver, self.method)) + + +class UsernamePasswordAuth: + __slots__ = ("ver", "username", "password") + + def __init__(self, ver, username, password): + self.ver = ver + self.username = username + self.password = password + + def assert_authver1(self): + if self.ver != USERNAME_PASSWORD_VERSION.DEFAULT: + raise SocksError( + 0, + "Invalid auth version. Expected 0x01, got 0x%x" % self.ver + ) + + @classmethod + def from_file(cls, f): + ver, ulen = struct.unpack("!BB", f.safe_read(2)) + username = f.safe_read(ulen) + plen, = struct.unpack("!B", f.safe_read(1)) + password = f.safe_read(plen) + return cls(ver, username.decode(), password.decode()) + + def to_file(self, f): + f.write(struct.pack("!BB", self.ver, len(self.username))) + f.write(self.username.encode()) + f.write(struct.pack("!B", len(self.password))) + f.write(self.password.encode()) + + +class UsernamePasswordAuthResponse: + __slots__ = ("ver", "status") + + def __init__(self, ver, status): + self.ver = ver + self.status = status + + def assert_authver1(self): + if self.ver != USERNAME_PASSWORD_VERSION.DEFAULT: + raise SocksError( + 0, + "Invalid auth version. Expected 0x01, got 0x%x" % self.ver + ) + + @classmethod + def from_file(cls, f): + ver, status = struct.unpack("!BB", f.safe_read(2)) + return cls(ver, status) + + def to_file(self, f): + f.write(struct.pack("!BB", self.ver, self.status)) + + +class Message: + __slots__ = ("ver", "msg", "atyp", "addr") + + def __init__(self, ver, msg, atyp, addr): + self.ver = ver + self.msg = msg + self.atyp = atyp + self.addr = tcp.Address.wrap(addr) + + def assert_socks5(self): + if self.ver != VERSION.SOCKS5: + raise SocksError( + REP.GENERAL_SOCKS_SERVER_FAILURE, + "Invalid SOCKS version. Expected 0x05, got 0x%x" % self.ver + ) + + @classmethod + def from_file(cls, f): + ver, msg, rsv, atyp = struct.unpack("!BBBB", f.safe_read(4)) + if rsv != 0x00: + raise SocksError( + REP.GENERAL_SOCKS_SERVER_FAILURE, + "Socks Request: Invalid reserved byte: %s" % rsv + ) + if atyp == ATYP.IPV4_ADDRESS: + # We use tnoa here as ntop is not commonly available on Windows. + host = ipaddress.IPv4Address(f.safe_read(4)).compressed + use_ipv6 = False + elif atyp == ATYP.IPV6_ADDRESS: + host = ipaddress.IPv6Address(f.safe_read(16)).compressed + use_ipv6 = True + elif atyp == ATYP.DOMAINNAME: + length, = struct.unpack("!B", f.safe_read(1)) + host = f.safe_read(length) + if not check.is_valid_host(host): + raise SocksError(REP.GENERAL_SOCKS_SERVER_FAILURE, "Invalid hostname: %s" % host) + host = host.decode("idna") + use_ipv6 = False + else: + raise SocksError(REP.ADDRESS_TYPE_NOT_SUPPORTED, + "Socks Request: Unknown ATYP: %s" % atyp) + + port, = struct.unpack("!H", f.safe_read(2)) + addr = tcp.Address((host, port), use_ipv6=use_ipv6) + return cls(ver, msg, atyp, addr) + + def to_file(self, f): + f.write(struct.pack("!BBBB", self.ver, self.msg, 0x00, self.atyp)) + if self.atyp == ATYP.IPV4_ADDRESS: + f.write(ipaddress.IPv4Address(self.addr.host).packed) + elif self.atyp == ATYP.IPV6_ADDRESS: + f.write(ipaddress.IPv6Address(self.addr.host).packed) + elif self.atyp == ATYP.DOMAINNAME: + f.write(struct.pack("!B", len(self.addr.host))) + f.write(self.addr.host.encode("idna")) + else: + raise SocksError( + REP.ADDRESS_TYPE_NOT_SUPPORTED, + "Unknown ATYP: %s" % self.atyp + ) + f.write(struct.pack("!H", self.addr.port)) diff --git a/mitmproxy/net/tcp.py b/mitmproxy/net/tcp.py new file mode 100644 index 00000000..ac368a9c --- /dev/null +++ b/mitmproxy/net/tcp.py @@ -0,0 +1,989 @@ +import os +import select +import socket +import sys +import threading +import time +import traceback + +import binascii + +from typing import Optional # noqa + +from mitmproxy.utils import strutils + +import certifi +from backports import ssl_match_hostname +import OpenSSL +from OpenSSL import SSL + +from mitmproxy import certs +from mitmproxy.utils import version_check +from mitmproxy.types import serializable +from mitmproxy import exceptions +from mitmproxy.types import basethread + +# This is a rather hackish way to make sure that +# the latest version of pyOpenSSL is actually installed. +version_check.check_pyopenssl_version() + +socket_fileobject = socket.SocketIO + +EINTR = 4 +if os.environ.get("NO_ALPN"): + HAS_ALPN = False +else: + HAS_ALPN = SSL._lib.Cryptography_HAS_ALPN + +# To enable all SSL methods use: SSLv23 +# then add options to disable certain methods +# https://bugs.launchpad.net/pyopenssl/+bug/1020632/comments/3 +SSL_BASIC_OPTIONS = ( + SSL.OP_CIPHER_SERVER_PREFERENCE +) +if hasattr(SSL, "OP_NO_COMPRESSION"): + SSL_BASIC_OPTIONS |= SSL.OP_NO_COMPRESSION + +SSL_DEFAULT_METHOD = SSL.SSLv23_METHOD +SSL_DEFAULT_OPTIONS = ( + SSL.OP_NO_SSLv2 | + SSL.OP_NO_SSLv3 | + SSL_BASIC_OPTIONS +) +if hasattr(SSL, "OP_NO_COMPRESSION"): + SSL_DEFAULT_OPTIONS |= SSL.OP_NO_COMPRESSION + +""" +Map a reasonable SSL version specification into the format OpenSSL expects. +Don't ask... +https://bugs.launchpad.net/pyopenssl/+bug/1020632/comments/3 +""" +sslversion_choices = { + "all": (SSL.SSLv23_METHOD, SSL_BASIC_OPTIONS), + # SSLv23_METHOD + NO_SSLv2 + NO_SSLv3 == TLS 1.0+ + # TLSv1_METHOD would be TLS 1.0 only + "secure": (SSL.SSLv23_METHOD, (SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3 | SSL_BASIC_OPTIONS)), + "SSLv2": (SSL.SSLv2_METHOD, SSL_BASIC_OPTIONS), + "SSLv3": (SSL.SSLv3_METHOD, SSL_BASIC_OPTIONS), + "TLSv1": (SSL.TLSv1_METHOD, SSL_BASIC_OPTIONS), + "TLSv1_1": (SSL.TLSv1_1_METHOD, SSL_BASIC_OPTIONS), + "TLSv1_2": (SSL.TLSv1_2_METHOD, SSL_BASIC_OPTIONS), +} + + +class SSLKeyLogger: + + def __init__(self, filename): + self.filename = filename + self.f = None + self.lock = threading.Lock() + + # required for functools.wraps, which pyOpenSSL uses. + __name__ = "SSLKeyLogger" + + def __call__(self, connection, where, ret): + if where == SSL.SSL_CB_HANDSHAKE_DONE and ret == 1: + with self.lock: + if not self.f: + d = os.path.dirname(self.filename) + if not os.path.isdir(d): + os.makedirs(d) + self.f = open(self.filename, "ab") + self.f.write(b"\r\n") + client_random = binascii.hexlify(connection.client_random()) + masterkey = binascii.hexlify(connection.master_key()) + self.f.write(b"CLIENT_RANDOM %s %s\r\n" % (client_random, masterkey)) + self.f.flush() + + def close(self): + with self.lock: + if self.f: + self.f.close() + + @staticmethod + def create_logfun(filename): + if filename: + return SSLKeyLogger(filename) + return False + +log_ssl_key = SSLKeyLogger.create_logfun( + os.getenv("MITMPROXY_SSLKEYLOGFILE") or os.getenv("SSLKEYLOGFILE")) + + +class _FileLike: + BLOCKSIZE = 1024 * 32 + + def __init__(self, o): + self.o = o + self._log = None + self.first_byte_timestamp = None + + def set_descriptor(self, o): + self.o = o + + def __getattr__(self, attr): + return getattr(self.o, attr) + + def start_log(self): + """ + Starts or resets the log. + + This will store all bytes read or written. + """ + self._log = [] + + def stop_log(self): + """ + Stops the log. + """ + self._log = None + + def is_logging(self): + return self._log is not None + + def get_log(self): + """ + Returns the log as a string. + """ + if not self.is_logging(): + raise ValueError("Not logging!") + return b"".join(self._log) + + def add_log(self, v): + if self.is_logging(): + self._log.append(v) + + def reset_timestamps(self): + self.first_byte_timestamp = None + + +class Writer(_FileLike): + + def flush(self): + """ + May raise exceptions.TcpDisconnect + """ + if hasattr(self.o, "flush"): + try: + self.o.flush() + except (socket.error, IOError) as v: + raise exceptions.TcpDisconnect(str(v)) + + def write(self, v): + """ + May raise exceptions.TcpDisconnect + """ + if v: + self.first_byte_timestamp = self.first_byte_timestamp or time.time() + try: + if hasattr(self.o, "sendall"): + self.add_log(v) + return self.o.sendall(v) + else: + r = self.o.write(v) + self.add_log(v[:r]) + return r + except (SSL.Error, socket.error) as e: + raise exceptions.TcpDisconnect(str(e)) + + +class Reader(_FileLike): + + def read(self, length): + """ + If length is -1, we read until connection closes. + """ + result = b'' + start = time.time() + while length == -1 or length > 0: + if length == -1 or length > self.BLOCKSIZE: + rlen = self.BLOCKSIZE + else: + rlen = length + try: + data = self.o.read(rlen) + except SSL.ZeroReturnError: + # TLS connection was shut down cleanly + break + except (SSL.WantWriteError, SSL.WantReadError): + # From the OpenSSL docs: + # If the underlying BIO is non-blocking, SSL_read() will also return when the + # underlying BIO could not satisfy the needs of SSL_read() to continue the + # operation. In this case a call to SSL_get_error with the return value of + # SSL_read() will yield SSL_ERROR_WANT_READ or SSL_ERROR_WANT_WRITE. + if (time.time() - start) < self.o.gettimeout(): + time.sleep(0.1) + continue + else: + raise exceptions.TcpTimeout() + except socket.timeout: + raise exceptions.TcpTimeout() + except socket.error as e: + raise exceptions.TcpDisconnect(str(e)) + except SSL.SysCallError as e: + if e.args == (-1, 'Unexpected EOF'): + break + raise exceptions.TlsException(str(e)) + except SSL.Error as e: + raise exceptions.TlsException(str(e)) + self.first_byte_timestamp = self.first_byte_timestamp or time.time() + if not data: + break + result += data + if length != -1: + length -= len(data) + self.add_log(result) + return result + + def readline(self, size=None): + result = b'' + bytes_read = 0 + while True: + if size is not None and bytes_read >= size: + break + ch = self.read(1) + bytes_read += 1 + if not ch: + break + else: + result += ch + if ch == b'\n': + break + return result + + def safe_read(self, length): + """ + Like .read, but is guaranteed to either return length bytes, or + raise an exception. + """ + result = self.read(length) + if length != -1 and len(result) != length: + if not result: + raise exceptions.TcpDisconnect() + else: + raise exceptions.TcpReadIncomplete( + "Expected %s bytes, got %s" % (length, len(result)) + ) + return result + + def peek(self, length): + """ + Tries to peek into the underlying file object. + + Returns: + Up to the next N bytes if peeking is successful. + + Raises: + exceptions.TcpException if there was an error with the socket + TlsException if there was an error with pyOpenSSL. + NotImplementedError if the underlying file object is not a [pyOpenSSL] socket + """ + if isinstance(self.o, socket_fileobject): + try: + return self.o._sock.recv(length, socket.MSG_PEEK) + except socket.error as e: + raise exceptions.TcpException(repr(e)) + elif isinstance(self.o, SSL.Connection): + try: + return self.o.recv(length, socket.MSG_PEEK) + except SSL.Error as e: + raise exceptions.TlsException(str(e)) + else: + raise NotImplementedError("Can only peek into (pyOpenSSL) sockets") + + +class Address(serializable.Serializable): + + """ + This class wraps an IPv4/IPv6 tuple to provide named attributes and + ipv6 information. + """ + + def __init__(self, address, use_ipv6=False): + self.address = tuple(address) + self.use_ipv6 = use_ipv6 + + def get_state(self): + return { + "address": self.address, + "use_ipv6": self.use_ipv6 + } + + def set_state(self, state): + self.address = state["address"] + self.use_ipv6 = state["use_ipv6"] + + @classmethod + def from_state(cls, state): + return Address(**state) + + @classmethod + def wrap(cls, t): + if isinstance(t, cls): + return t + else: + return cls(t) + + def __call__(self): + return self.address + + @property + def host(self): + return self.address[0] + + @property + def port(self): + return self.address[1] + + @property + def use_ipv6(self): + return self.family == socket.AF_INET6 + + @use_ipv6.setter + def use_ipv6(self, b): + self.family = socket.AF_INET6 if b else socket.AF_INET + + def __repr__(self): + return "{}:{}".format(self.host, self.port) + + def __eq__(self, other): + if not other: + return False + other = Address.wrap(other) + return (self.address, self.family) == (other.address, other.family) + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(self.address) ^ 42 # different hash than the tuple alone. + + +def ssl_read_select(rlist, timeout): + """ + This is a wrapper around select.select() which also works for SSL.Connections + by taking ssl_connection.pending() into account. + + Caveats: + If .pending() > 0 for any of the connections in rlist, we avoid the select syscall + and **will not include any other connections which may or may not be ready**. + + Args: + rlist: wait until ready for reading + + Returns: + subset of rlist which is ready for reading. + """ + return [ + conn for conn in rlist + if isinstance(conn, SSL.Connection) and conn.pending() > 0 + ] or select.select(rlist, (), (), timeout)[0] + + +def close_socket(sock): + """ + Does a hard close of a socket, without emitting a RST. + """ + try: + # We already indicate that we close our end. + # may raise "Transport endpoint is not connected" on Linux + sock.shutdown(socket.SHUT_WR) + + # Section 4.2.2.13 of RFC 1122 tells us that a close() with any pending + # readable data could lead to an immediate RST being sent (which is the + # case on Windows). + # http://ia600609.us.archive.org/22/items/TheUltimateSo_lingerPageOrWhyIsMyTcpNotReliable/the-ultimate-so_linger-page-or-why-is-my-tcp-not-reliable.html + # + # This in turn results in the following issue: If we send an error page + # to the client and then close the socket, the RST may be received by + # the client before the error page and the users sees a connection + # error rather than the error page. Thus, we try to empty the read + # buffer on Windows first. (see + # https://github.com/mitmproxy/mitmproxy/issues/527#issuecomment-93782988) + # + + if os.name == "nt": # pragma: no cover + # We cannot rely on the shutdown()-followed-by-read()-eof technique + # proposed by the page above: Some remote machines just don't send + # a TCP FIN, which would leave us in the unfortunate situation that + # recv() would block infinitely. As a workaround, we set a timeout + # here even if we are in blocking mode. + sock.settimeout(sock.gettimeout() or 20) + + # limit at a megabyte so that we don't read infinitely + for _ in range(1024 ** 3 // 4096): + # may raise a timeout/disconnect exception. + if not sock.recv(4096): + break + + # Now we can close the other half as well. + sock.shutdown(socket.SHUT_RD) + + except socket.error: + pass + + sock.close() + + +class _Connection: + + rbufsize = -1 + wbufsize = -1 + + def _makefile(self): + """ + Set up .rfile and .wfile attributes from .connection + """ + # Ideally, we would use the Buffered IO in Python 3 by default. + # Unfortunately, the implementation of .peek() is broken for n>1 bytes, + # as it may just return what's left in the buffer and not all the bytes we want. + # As a workaround, we just use unbuffered sockets directly. + # https://mail.python.org/pipermail/python-dev/2009-June/089986.html + self.rfile = Reader(socket.SocketIO(self.connection, "rb")) + self.wfile = Writer(socket.SocketIO(self.connection, "wb")) + + def __init__(self, connection): + if connection: + self.connection = connection + self.ip_address = Address(connection.getpeername()) + self._makefile() + else: + self.connection = None + self.ip_address = None + self.rfile = None + self.wfile = None + + self.ssl_established = False + self.finished = False + + def get_current_cipher(self): + if not self.ssl_established: + return None + + name = self.connection.get_cipher_name() + bits = self.connection.get_cipher_bits() + version = self.connection.get_cipher_version() + return name, bits, version + + def finish(self): + self.finished = True + # If we have an SSL connection, wfile.close == connection.close + # (We call _FileLike.set_descriptor(conn)) + # Closing the socket is not our task, therefore we don't call close + # then. + if not isinstance(self.connection, SSL.Connection): + if not getattr(self.wfile, "closed", False): + try: + self.wfile.flush() + self.wfile.close() + except exceptions.TcpDisconnect: + pass + + self.rfile.close() + else: + try: + self.connection.shutdown() + except SSL.Error: + pass + + def _create_ssl_context(self, + method=SSL_DEFAULT_METHOD, + options=SSL_DEFAULT_OPTIONS, + verify_options=SSL.VERIFY_NONE, + ca_path=None, + ca_pemfile=None, + cipher_list=None, + alpn_protos=None, + alpn_select=None, + alpn_select_callback=None, + sni=None, + ): + """ + Creates an SSL Context. + + :param method: One of SSLv2_METHOD, SSLv3_METHOD, SSLv23_METHOD, TLSv1_METHOD, TLSv1_1_METHOD, or TLSv1_2_METHOD + :param options: A bit field consisting of OpenSSL.SSL.OP_* values + :param verify_options: A bit field consisting of OpenSSL.SSL.VERIFY_* values + :param ca_path: Path to a directory of trusted CA certificates prepared using the c_rehash tool + :param ca_pemfile: Path to a PEM formatted trusted CA certificate + :param cipher_list: A textual OpenSSL cipher list, see https://www.openssl.org/docs/apps/ciphers.html + :rtype : SSL.Context + """ + context = SSL.Context(method) + # Options (NO_SSLv2/3) + if options is not None: + context.set_options(options) + + # Verify Options (NONE/PEER and trusted CAs) + if verify_options is not None: + def verify_cert(conn, x509, errno, err_depth, is_cert_verified): + if not is_cert_verified: + self.ssl_verification_error = exceptions.InvalidCertificateException( + "Certificate Verification Error for {}: {} (errno: {}, depth: {})".format( + sni, + strutils.native(SSL._ffi.string(SSL._lib.X509_verify_cert_error_string(errno)), "utf8"), + errno, + err_depth + ) + ) + return is_cert_verified + + context.set_verify(verify_options, verify_cert) + if ca_path is None and ca_pemfile is None: + ca_pemfile = certifi.where() + context.load_verify_locations(ca_pemfile, ca_path) + + # Workaround for + # https://github.com/pyca/pyopenssl/issues/190 + # https://github.com/mitmproxy/mitmproxy/issues/472 + # Options already set before are not cleared. + context.set_mode(SSL._lib.SSL_MODE_AUTO_RETRY) + + # Cipher List + if cipher_list: + try: + context.set_cipher_list(cipher_list) + + # TODO: maybe change this to with newer pyOpenSSL APIs + context.set_tmp_ecdh(OpenSSL.crypto.get_elliptic_curve('prime256v1')) + except SSL.Error as v: + raise exceptions.TlsException("SSL cipher specification error: %s" % str(v)) + + # SSLKEYLOGFILE + if log_ssl_key: + context.set_info_callback(log_ssl_key) + + if HAS_ALPN: + if alpn_protos is not None: + # advertise application layer protocols + context.set_alpn_protos(alpn_protos) + elif alpn_select is not None and alpn_select_callback is None: + # select application layer protocol + def alpn_select_callback(conn_, options): + if alpn_select in options: + return bytes(alpn_select) + else: # pragma no cover + return options[0] + context.set_alpn_select_callback(alpn_select_callback) + elif alpn_select_callback is not None and alpn_select is None: + context.set_alpn_select_callback(alpn_select_callback) + elif alpn_select_callback is not None and alpn_select is not None: + raise exceptions.TlsException("ALPN error: only define alpn_select (string) OR alpn_select_callback (method).") + + return context + + +class ConnectionCloser: + def __init__(self, conn): + self.conn = conn + self._canceled = False + + def pop(self): + """ + Cancel the current closer, and return a fresh one. + """ + self._canceled = True + return ConnectionCloser(self.conn) + + def __enter__(self): + return self + + def __exit__(self, *args): + if not self._canceled: + self.conn.close() + + +class TCPClient(_Connection): + + def __init__(self, address, source_address=None, spoof_source_address=None): + super().__init__(None) + self.address = address + self.source_address = source_address + self.cert = None + self.server_certs = [] + self.ssl_verification_error = None # type: Optional[exceptions.InvalidCertificateException] + self.sni = None + self.spoof_source_address = spoof_source_address + + @property + def address(self): + return self.__address + + @address.setter + def address(self, address): + if address: + self.__address = Address.wrap(address) + else: + self.__address = None + + @property + def source_address(self): + return self.__source_address + + @source_address.setter + def source_address(self, source_address): + if source_address: + self.__source_address = Address.wrap(source_address) + else: + self.__source_address = None + + def close(self): + # Make sure to close the real socket, not the SSL proxy. + # OpenSSL is really good at screwing up, i.e. when trying to recv from a failed connection, + # it tries to renegotiate... + if isinstance(self.connection, SSL.Connection): + close_socket(self.connection._socket) + else: + close_socket(self.connection) + + def create_ssl_context(self, cert=None, alpn_protos=None, **sslctx_kwargs): + context = self._create_ssl_context( + alpn_protos=alpn_protos, + **sslctx_kwargs) + # Client Certs + if cert: + try: + context.use_privatekey_file(cert) + context.use_certificate_file(cert) + except SSL.Error as v: + raise exceptions.TlsException("SSL client certificate error: %s" % str(v)) + return context + + def convert_to_ssl(self, sni=None, alpn_protos=None, **sslctx_kwargs): + """ + cert: Path to a file containing both client cert and private key. + + options: A bit field consisting of OpenSSL.SSL.OP_* values + verify_options: A bit field consisting of OpenSSL.SSL.VERIFY_* values + ca_path: Path to a directory of trusted CA certificates prepared using the c_rehash tool + ca_pemfile: Path to a PEM formatted trusted CA certificate + """ + verification_mode = sslctx_kwargs.get('verify_options', None) + if verification_mode == SSL.VERIFY_PEER and not sni: + raise exceptions.TlsException("Cannot validate certificate hostname without SNI") + + context = self.create_ssl_context( + alpn_protos=alpn_protos, + sni=sni, + **sslctx_kwargs + ) + self.connection = SSL.Connection(context, self.connection) + if sni: + self.sni = sni + self.connection.set_tlsext_host_name(sni.encode("idna")) + self.connection.set_connect_state() + try: + self.connection.do_handshake() + except SSL.Error as v: + if self.ssl_verification_error: + raise self.ssl_verification_error + else: + raise exceptions.TlsException("SSL handshake error: %s" % repr(v)) + else: + # Fix for pre v1.0 OpenSSL, which doesn't throw an exception on + # certificate validation failure + if verification_mode == SSL.VERIFY_PEER and self.ssl_verification_error: + raise self.ssl_verification_error + + self.cert = certs.SSLCert(self.connection.get_peer_certificate()) + + # Keep all server certificates in a list + for i in self.connection.get_peer_cert_chain(): + self.server_certs.append(certs.SSLCert(i)) + + # Validate TLS Hostname + try: + crt = dict( + subjectAltName=[("DNS", x.decode("ascii", "strict")) for x in self.cert.altnames] + ) + if self.cert.cn: + crt["subject"] = [[["commonName", self.cert.cn.decode("ascii", "strict")]]] + if sni: + hostname = sni + else: + hostname = "no-hostname" + ssl_match_hostname.match_hostname(crt, hostname) + except (ValueError, ssl_match_hostname.CertificateError) as e: + self.ssl_verification_error = exceptions.InvalidCertificateException( + "Certificate Verification Error for {}: {}".format( + sni or repr(self.address), + str(e) + ) + ) + if verification_mode == SSL.VERIFY_PEER: + raise self.ssl_verification_error + + self.ssl_established = True + self.rfile.set_descriptor(self.connection) + self.wfile.set_descriptor(self.connection) + + def makesocket(self): + # some parties (cuckoo sandbox) need to hook this + return socket.socket(self.address.family, socket.SOCK_STREAM) + + def connect(self): + try: + connection = self.makesocket() + + if self.spoof_source_address: + try: + # 19 is `IP_TRANSPARENT`, which is only available on Python 3.3+ on some OSes + if not connection.getsockopt(socket.SOL_IP, 19): + connection.setsockopt(socket.SOL_IP, 19, 1) + except socket.error as e: + raise exceptions.TcpException( + "Failed to spoof the source address: " + e.strerror + ) + if self.source_address: + connection.bind(self.source_address()) + connection.connect(self.address()) + self.source_address = Address(connection.getsockname()) + except (socket.error, IOError) as err: + raise exceptions.TcpException( + 'Error connecting to "%s": %s' % + (self.address.host, err) + ) + self.connection = connection + self.ip_address = Address(connection.getpeername()) + self._makefile() + return ConnectionCloser(self) + + def settimeout(self, n): + self.connection.settimeout(n) + + def gettimeout(self): + return self.connection.gettimeout() + + def get_alpn_proto_negotiated(self): + if HAS_ALPN and self.ssl_established: + return self.connection.get_alpn_proto_negotiated() + else: + return b"" + + +class BaseHandler(_Connection): + + """ + The instantiator is expected to call the handle() and finish() methods. + """ + + def __init__(self, connection, address, server): + super().__init__(connection) + self.address = Address.wrap(address) + self.server = server + self.clientcert = None + + def create_ssl_context(self, + cert, key, + handle_sni=None, + request_client_cert=None, + chain_file=None, + dhparams=None, + extra_chain_certs=None, + **sslctx_kwargs): + """ + cert: A certs.SSLCert object or the path to a certificate + chain file. + + handle_sni: SNI handler, should take a connection object. Server + name can be retrieved like this: + + connection.get_servername() + + And you can specify the connection keys as follows: + + new_context = Context(TLSv1_METHOD) + new_context.use_privatekey(key) + new_context.use_certificate(cert) + connection.set_context(new_context) + + The request_client_cert argument requires some explanation. We're + supposed to be able to do this with no negative effects - if the + client has no cert to present, we're notified and proceed as usual. + Unfortunately, Android seems to have a bug (tested on 4.2.2) - when + an Android client is asked to present a certificate it does not + have, it hangs up, which is frankly bogus. Some time down the track + we may be able to make the proper behaviour the default again, but + until then we're conservative. + """ + + context = self._create_ssl_context(ca_pemfile=chain_file, **sslctx_kwargs) + + context.use_privatekey(key) + if isinstance(cert, certs.SSLCert): + context.use_certificate(cert.x509) + else: + context.use_certificate_chain_file(cert) + + if extra_chain_certs: + for i in extra_chain_certs: + context.add_extra_chain_cert(i.x509) + + if handle_sni: + # SNI callback happens during do_handshake() + context.set_tlsext_servername_callback(handle_sni) + + if request_client_cert: + def save_cert(conn_, cert, errno_, depth_, preverify_ok_): + self.clientcert = certs.SSLCert(cert) + # Return true to prevent cert verification error + return True + context.set_verify(SSL.VERIFY_PEER, save_cert) + + if dhparams: + SSL._lib.SSL_CTX_set_tmp_dh(context._context, dhparams) + + return context + + def convert_to_ssl(self, cert, key, **sslctx_kwargs): + """ + Convert connection to SSL. + For a list of parameters, see BaseHandler._create_ssl_context(...) + """ + + context = self.create_ssl_context( + cert, + key, + **sslctx_kwargs) + self.connection = SSL.Connection(context, self.connection) + self.connection.set_accept_state() + try: + self.connection.do_handshake() + except SSL.Error as v: + raise exceptions.TlsException("SSL handshake error: %s" % repr(v)) + self.ssl_established = True + self.rfile.set_descriptor(self.connection) + self.wfile.set_descriptor(self.connection) + + def handle(self): # pragma: no cover + raise NotImplementedError + + def settimeout(self, n): + self.connection.settimeout(n) + + def get_alpn_proto_negotiated(self): + if HAS_ALPN and self.ssl_established: + return self.connection.get_alpn_proto_negotiated() + else: + return b"" + + +class Counter: + def __init__(self): + self._count = 0 + self._lock = threading.Lock() + + @property + def count(self): + with self._lock: + return self._count + + def __enter__(self): + with self._lock: + self._count += 1 + + def __exit__(self, *args): + with self._lock: + self._count -= 1 + + +class TCPServer: + request_queue_size = 20 + + def __init__(self, address): + self.address = Address.wrap(address) + self.__is_shut_down = threading.Event() + self.__shutdown_request = False + self.socket = socket.socket(self.address.family, socket.SOCK_STREAM) + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.socket.bind(self.address()) + self.address = Address.wrap(self.socket.getsockname()) + self.socket.listen(self.request_queue_size) + self.handler_counter = Counter() + + def connection_thread(self, connection, client_address): + with self.handler_counter: + client_address = Address(client_address) + try: + self.handle_client_connection(connection, client_address) + except: + self.handle_error(connection, client_address) + finally: + close_socket(connection) + + def serve_forever(self, poll_interval=0.1): + self.__is_shut_down.clear() + try: + while not self.__shutdown_request: + try: + r, w_, e_ = select.select( + [self.socket], [], [], poll_interval) + except select.error as ex: # pragma: no cover + if ex[0] == EINTR: + continue + else: + raise + if self.socket in r: + connection, client_address = self.socket.accept() + t = basethread.BaseThread( + "TCPConnectionHandler (%s: %s:%s -> %s:%s)" % ( + self.__class__.__name__, + client_address[0], + client_address[1], + self.address.host, + self.address.port + ), + target=self.connection_thread, + args=(connection, client_address), + ) + t.setDaemon(1) + try: + t.start() + except threading.ThreadError: + self.handle_error(connection, Address(client_address)) + connection.close() + finally: + self.__shutdown_request = False + self.__is_shut_down.set() + + def shutdown(self): + self.__shutdown_request = True + self.__is_shut_down.wait() + self.socket.close() + self.handle_shutdown() + + def handle_error(self, connection_, client_address, fp=sys.stderr): + """ + Called when handle_client_connection raises an exception. + """ + # If a thread has persisted after interpreter exit, the module might be + # none. + if traceback: + exc = str(traceback.format_exc()) + print(u'-' * 40, file=fp) + print( + u"Error in processing of request from %s" % repr(client_address), file=fp) + print(exc, file=fp) + print(u'-' * 40, file=fp) + + def handle_client_connection(self, conn, client_address): # pragma: no cover + """ + Called after client connection. + """ + raise NotImplementedError + + def handle_shutdown(self): + """ + Called after server shutdown. + """ + + def wait_for_silence(self, timeout=5): + start = time.time() + while 1: + if time.time() - start >= timeout: + raise exceptions.Timeout( + "%s service threads still alive" % + self.handler_counter.count + ) + if self.handler_counter.count == 0: + return diff --git a/mitmproxy/net/websockets/__init__.py b/mitmproxy/net/websockets/__init__.py new file mode 100644 index 00000000..2d6f0a0c --- /dev/null +++ b/mitmproxy/net/websockets/__init__.py @@ -0,0 +1,35 @@ +from .frame import FrameHeader +from .frame import Frame +from .frame import OPCODE +from .frame import CLOSE_REASON +from .masker import Masker +from .utils import MAGIC +from .utils import VERSION +from .utils import client_handshake_headers +from .utils import server_handshake_headers +from .utils import check_handshake +from .utils import check_client_version +from .utils import create_server_nonce +from .utils import get_extensions +from .utils import get_protocol +from .utils import get_client_key +from .utils import get_server_accept + +__all__ = [ + "FrameHeader", + "Frame", + "OPCODE", + "CLOSE_REASON", + "Masker", + "MAGIC", + "VERSION", + "client_handshake_headers", + "server_handshake_headers", + "check_handshake", + "check_client_version", + "create_server_nonce", + "get_extensions", + "get_protocol", + "get_client_key", + "get_server_accept", +] diff --git a/mitmproxy/net/websockets/frame.py b/mitmproxy/net/websockets/frame.py new file mode 100644 index 00000000..bd5f67dd --- /dev/null +++ b/mitmproxy/net/websockets/frame.py @@ -0,0 +1,274 @@ +import os +import struct +import io + +from mitmproxy.net import tcp +from mitmproxy.utils import strutils +from mitmproxy.utils import bits +from mitmproxy.utils import human +from mitmproxy.types import bidi +from .masker import Masker + + +MAX_16_BIT_INT = (1 << 16) +MAX_64_BIT_INT = (1 << 64) + +DEFAULT = object() + +# RFC 6455, Section 5.2 - Base Framing Protocol +OPCODE = bidi.BiDi( + CONTINUE=0x00, + TEXT=0x01, + BINARY=0x02, + CLOSE=0x08, + PING=0x09, + PONG=0x0a +) + +# RFC 6455, Section 7.4.1 - Defined Status Codes +CLOSE_REASON = bidi.BiDi( + NORMAL_CLOSURE=1000, + GOING_AWAY=1001, + PROTOCOL_ERROR=1002, + UNSUPPORTED_DATA=1003, + RESERVED=1004, + RESERVED_NO_STATUS=1005, + RESERVED_ABNORMAL_CLOSURE=1006, + INVALID_PAYLOAD_DATA=1007, + POLICY_VIOLATION=1008, + MESSAGE_TOO_BIG=1009, + MANDATORY_EXTENSION=1010, + INTERNAL_ERROR=1011, + RESERVED_TLS_HANDHSAKE_FAILED=1015, +) + + +class FrameHeader: + + def __init__( + self, + opcode=OPCODE.TEXT, + payload_length=0, + fin=False, + rsv1=False, + rsv2=False, + rsv3=False, + masking_key=DEFAULT, + mask=DEFAULT, + length_code=DEFAULT + ): + if not 0 <= opcode < 2 ** 4: + raise ValueError("opcode must be 0-16") + self.opcode = opcode + self.payload_length = payload_length + self.fin = fin + self.rsv1 = rsv1 + self.rsv2 = rsv2 + self.rsv3 = rsv3 + + if length_code is DEFAULT: + self.length_code = self._make_length_code(self.payload_length) + else: + self.length_code = length_code + + if mask is DEFAULT and masking_key is DEFAULT: + self.mask = False + self.masking_key = b"" + elif mask is DEFAULT: + self.mask = 1 + self.masking_key = masking_key + elif masking_key is DEFAULT: + self.mask = mask + self.masking_key = os.urandom(4) + else: + self.mask = mask + self.masking_key = masking_key + + if self.masking_key and len(self.masking_key) != 4: + raise ValueError("Masking key must be 4 bytes.") + + @classmethod + def _make_length_code(self, length): + """ + A websockets frame contains an initial length_code, and an optional + extended length code to represent the actual length if length code is + larger than 125 + """ + if length <= 125: + return length + elif length >= 126 and length <= 65535: + return 126 + else: + return 127 + + def __repr__(self): + vals = [ + "ws frame:", + OPCODE.get_name(self.opcode, hex(self.opcode)).lower() + ] + flags = [] + for i in ["fin", "rsv1", "rsv2", "rsv3", "mask"]: + if getattr(self, i): + flags.append(i) + if flags: + vals.extend([":", "|".join(flags)]) + if self.masking_key: + vals.append(":key=%s" % repr(self.masking_key)) + if self.payload_length: + vals.append(" %s" % human.pretty_size(self.payload_length)) + return "".join(vals) + + def __bytes__(self): + first_byte = bits.setbit(0, 7, self.fin) + first_byte = bits.setbit(first_byte, 6, self.rsv1) + first_byte = bits.setbit(first_byte, 5, self.rsv2) + first_byte = bits.setbit(first_byte, 4, self.rsv3) + first_byte = first_byte | self.opcode + + second_byte = bits.setbit(self.length_code, 7, self.mask) + + b = bytes([first_byte, second_byte]) + + if self.payload_length < 126: + pass + elif self.payload_length < MAX_16_BIT_INT: + # '!H' pack as 16 bit unsigned short + # add 2 byte extended payload length + b += struct.pack('!H', self.payload_length) + elif self.payload_length < MAX_64_BIT_INT: + # '!Q' = pack as 64 bit unsigned long long + # add 8 bytes extended payload length + b += struct.pack('!Q', self.payload_length) + else: + raise ValueError("Payload length exceeds 64bit integer") + + if self.masking_key: + b += self.masking_key + return b + + @classmethod + def from_file(cls, fp): + """ + read a websockets frame header + """ + first_byte, second_byte = fp.safe_read(2) + fin = bits.getbit(first_byte, 7) + rsv1 = bits.getbit(first_byte, 6) + rsv2 = bits.getbit(first_byte, 5) + rsv3 = bits.getbit(first_byte, 4) + opcode = first_byte & 0xF + mask_bit = bits.getbit(second_byte, 7) + length_code = second_byte & 0x7F + + # payload_length > 125 indicates you need to read more bytes + # to get the actual payload length + if length_code <= 125: + payload_length = length_code + elif length_code == 126: + payload_length, = struct.unpack("!H", fp.safe_read(2)) + else: # length_code == 127: + payload_length, = struct.unpack("!Q", fp.safe_read(8)) + + # masking key only present if mask bit set + if mask_bit == 1: + masking_key = fp.safe_read(4) + else: + masking_key = None + + return cls( + fin=fin, + rsv1=rsv1, + rsv2=rsv2, + rsv3=rsv3, + opcode=opcode, + mask=mask_bit, + length_code=length_code, + payload_length=payload_length, + masking_key=masking_key, + ) + + def __eq__(self, other): + if isinstance(other, FrameHeader): + return bytes(self) == bytes(other) + return False + + +class Frame: + """ + Represents a single WebSockets frame. + Constructor takes human readable forms of the frame components. + from_bytes() reads from a file-like object to create a new Frame. + + WebSockets Frame as defined in RFC6455 + + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-------+-+-------------+-------------------------------+ + |F|R|R|R| opcode|M| Payload len | Extended payload length | + |I|S|S|S| (4) |A| (7) | (16/64) | + |N|V|V|V| |S| | (if payload len==126/127) | + | |1|2|3| |K| | | + +-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - + + | Extended payload length continued, if payload len == 127 | + + - - - - - - - - - - - - - - - +-------------------------------+ + | |Masking-key, if MASK set to 1 | + +-------------------------------+-------------------------------+ + | Masking-key (continued) | Payload Data | + +-------------------------------- - - - - - - - - - - - - - - - + + : Payload Data continued ... : + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + | Payload Data continued ... | + +---------------------------------------------------------------+ + """ + + def __init__(self, payload=b"", **kwargs): + self.payload = payload + kwargs["payload_length"] = kwargs.get("payload_length", len(payload)) + self.header = FrameHeader(**kwargs) + + @classmethod + def from_bytes(cls, bytestring): + """ + Construct a websocket frame from an in-memory bytestring + to construct a frame from a stream of bytes, use from_file() directly + """ + return cls.from_file(tcp.Reader(io.BytesIO(bytestring))) + + def __repr__(self): + ret = repr(self.header) + if self.payload: + ret = ret + "\nPayload:\n" + strutils.bytes_to_escaped_str(self.payload) + return ret + + def __bytes__(self): + """ + Serialize the frame to wire format. Returns a string. + """ + b = bytes(self.header) + if self.header.masking_key: + b += Masker(self.header.masking_key)(self.payload) + else: + b += self.payload + return b + + @classmethod + def from_file(cls, fp): + """ + read a websockets frame sent by a server or client + + fp is a "file like" object that could be backed by a network + stream or a disk or an in memory stream reader + """ + header = FrameHeader.from_file(fp) + payload = fp.safe_read(header.payload_length) + + if header.mask == 1 and header.masking_key: + payload = Masker(header.masking_key)(payload) + + frame = cls(payload) + frame.header = header + return frame + + def __eq__(self, other): + if isinstance(other, Frame): + return bytes(self) == bytes(other) + return False diff --git a/mitmproxy/net/websockets/masker.py b/mitmproxy/net/websockets/masker.py new file mode 100644 index 00000000..47b1a688 --- /dev/null +++ b/mitmproxy/net/websockets/masker.py @@ -0,0 +1,25 @@ +class Masker: + """ + Data sent from the server must be masked to prevent malicious clients + from sending data over the wire in predictable patterns. + + Servers do not have to mask data they send to the client. + https://tools.ietf.org/html/rfc6455#section-5.3 + """ + + def __init__(self, key): + self.key = key + self.offset = 0 + + def mask(self, offset, data): + result = bytearray(data) + for i in range(len(data)): + result[i] ^= self.key[offset % 4] + offset += 1 + result = bytes(result) + return result + + def __call__(self, data): + ret = self.mask(self.offset, data) + self.offset += len(ret) + return ret diff --git a/mitmproxy/net/websockets/utils.py b/mitmproxy/net/websockets/utils.py new file mode 100644 index 00000000..d0b168ce --- /dev/null +++ b/mitmproxy/net/websockets/utils.py @@ -0,0 +1,90 @@ +""" +Collection of WebSockets Protocol utility functions (RFC6455) +Spec: https://tools.ietf.org/html/rfc6455 +""" + + +import base64 +import hashlib +import os + +from mitmproxy.net import http +from mitmproxy.utils import strutils + +MAGIC = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11' +VERSION = "13" + + +def client_handshake_headers(version=None, key=None, protocol=None, extensions=None): + """ + Create the headers for a valid HTTP upgrade request. If Key is not + specified, it is generated, and can be found in sec-websocket-key in + the returned header set. + + Returns an instance of http.Headers + """ + if version is None: + version = VERSION + if key is None: + key = base64.b64encode(os.urandom(16)).decode('ascii') + h = http.Headers( + connection="upgrade", + upgrade="websocket", + sec_websocket_version=version, + sec_websocket_key=key, + ) + if protocol is not None: + h['sec-websocket-protocol'] = protocol + if extensions is not None: + h['sec-websocket-extensions'] = extensions + return h + + +def server_handshake_headers(client_key, protocol=None, extensions=None): + """ + The server response is a valid HTTP 101 response. + + Returns an instance of http.Headers + """ + h = http.Headers( + connection="upgrade", + upgrade="websocket", + sec_websocket_accept=create_server_nonce(client_key), + ) + if protocol is not None: + h['sec-websocket-protocol'] = protocol + if extensions is not None: + h['sec-websocket-extensions'] = extensions + return h + + +def check_handshake(headers): + return ( + "upgrade" in headers.get("connection", "").lower() and + headers.get("upgrade", "").lower() == "websocket" and + (headers.get("sec-websocket-key") is not None or headers.get("sec-websocket-accept") is not None) + ) + + +def create_server_nonce(client_nonce): + return base64.b64encode(hashlib.sha1(strutils.always_bytes(client_nonce) + MAGIC).digest()) + + +def check_client_version(headers): + return headers.get("sec-websocket-version", "") == VERSION + + +def get_extensions(headers): + return headers.get("sec-websocket-extensions", None) + + +def get_protocol(headers): + return headers.get("sec-websocket-protocol", None) + + +def get_client_key(headers): + return headers.get("sec-websocket-key", None) + + +def get_server_accept(headers): + return headers.get("sec-websocket-accept", None) diff --git a/mitmproxy/net/wsgi.py b/mitmproxy/net/wsgi.py new file mode 100644 index 00000000..b2705ea1 --- /dev/null +++ b/mitmproxy/net/wsgi.py @@ -0,0 +1,166 @@ +import time +import traceback +import urllib +import io + +from mitmproxy.net import http +from mitmproxy.net import tcp +from mitmproxy.utils import strutils + + +class ClientConn: + + def __init__(self, address): + self.address = tcp.Address.wrap(address) + + +class Flow: + + def __init__(self, address, request): + self.client_conn = ClientConn(address) + self.request = request + + +class Request: + + def __init__(self, scheme, method, path, http_version, headers, content): + self.scheme, self.method, self.path = scheme, method, path + self.headers, self.content = headers, content + self.http_version = http_version + + +def date_time_string(): + """Return the current date and time formatted for a message header.""" + WEEKS = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] + MONTHS = [ + None, + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' + ] + now = time.time() + year, month, day, hh, mm, ss, wd, y_, z_ = time.gmtime(now) + s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + WEEKS[wd], + day, MONTHS[month], year, + hh, mm, ss + ) + return s + + +class WSGIAdaptor: + + def __init__(self, app, domain, port, sversion): + self.app, self.domain, self.port, self.sversion = app, domain, port, sversion + + def make_environ(self, flow, errsoc, **extra): + """ + Raises: + ValueError, if the content-encoding is invalid. + """ + path = strutils.native(flow.request.path, "latin-1") + if '?' in path: + path_info, query = strutils.native(path, "latin-1").split('?', 1) + else: + path_info = path + query = '' + environ = { + 'wsgi.version': (1, 0), + 'wsgi.url_scheme': strutils.native(flow.request.scheme, "latin-1"), + 'wsgi.input': io.BytesIO(flow.request.content or b""), + 'wsgi.errors': errsoc, + 'wsgi.multithread': True, + 'wsgi.multiprocess': False, + 'wsgi.run_once': False, + 'SERVER_SOFTWARE': self.sversion, + 'REQUEST_METHOD': strutils.native(flow.request.method, "latin-1"), + 'SCRIPT_NAME': '', + 'PATH_INFO': urllib.parse.unquote(path_info), + 'QUERY_STRING': query, + 'CONTENT_TYPE': strutils.native(flow.request.headers.get('Content-Type', ''), "latin-1"), + 'CONTENT_LENGTH': strutils.native(flow.request.headers.get('Content-Length', ''), "latin-1"), + 'SERVER_NAME': self.domain, + 'SERVER_PORT': str(self.port), + 'SERVER_PROTOCOL': strutils.native(flow.request.http_version, "latin-1"), + } + environ.update(extra) + if flow.client_conn.address: + environ["REMOTE_ADDR"] = strutils.native(flow.client_conn.address.host, "latin-1") + environ["REMOTE_PORT"] = flow.client_conn.address.port + + for key, value in flow.request.headers.items(): + key = 'HTTP_' + strutils.native(key, "latin-1").upper().replace('-', '_') + if key not in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'): + environ[key] = value + return environ + + def error_page(self, soc, headers_sent, s): + """ + Make a best-effort attempt to write an error page. If headers are + already sent, we just bung the error into the page. + """ + c = """ + +

Internal Server Error

+
{err}"
+ + """.format(err=s).strip().encode() + + if not headers_sent: + soc.write(b"HTTP/1.1 500 Internal Server Error\r\n") + soc.write(b"Content-Type: text/html\r\n") + soc.write("Content-Length: {length}\r\n".format(length=len(c)).encode()) + soc.write(b"\r\n") + soc.write(c) + + def serve(self, request, soc, **env): + state = dict( + response_started=False, + headers_sent=False, + status=None, + headers=None + ) + + def write(data): + if not state["headers_sent"]: + soc.write("HTTP/1.1 {status}\r\n".format(status=state["status"]).encode()) + headers = state["headers"] + if 'server' not in headers: + headers["Server"] = self.sversion + if 'date' not in headers: + headers["Date"] = date_time_string() + soc.write(bytes(headers)) + soc.write(b"\r\n") + state["headers_sent"] = True + if data: + soc.write(data) + soc.flush() + + def start_response(status, headers, exc_info=None): + if exc_info: + if state["headers_sent"]: + raise exc_info[1] + elif state["status"]: + raise AssertionError('Response already started') + state["status"] = status + state["headers"] = http.Headers([[strutils.always_bytes(k), strutils.always_bytes(v)] for k, v in headers]) + if exc_info: + self.error_page(soc, state["headers_sent"], traceback.format_tb(exc_info[2])) + state["headers_sent"] = True + + errs = io.BytesIO() + try: + dataiter = self.app( + self.make_environ(request, errs, **env), start_response + ) + for i in dataiter: + write(i) + if not state["headers_sent"]: + write(b"") + except Exception: + try: + s = traceback.format_exc() + errs.write(s.encode("utf-8", "replace")) + self.error_page(soc, state["headers_sent"], s) + except Exception: # pragma: no cover + pass + return errs.getvalue() diff --git a/mitmproxy/proxy/config.py b/mitmproxy/proxy/config.py index 86b68ee5..7df9e692 100644 --- a/mitmproxy/proxy/config.py +++ b/mitmproxy/proxy/config.py @@ -11,9 +11,9 @@ from OpenSSL import SSL, crypto from mitmproxy import exceptions from mitmproxy import options as moptions from mitmproxy import certs -from netlib import tcp -from netlib.http import authentication -from netlib.http import url +from mitmproxy.net import tcp +from mitmproxy.net.http import authentication +from mitmproxy.net.http import url CONF_BASENAME = "mitmproxy" diff --git a/mitmproxy/proxy/modes/socks_proxy.py b/mitmproxy/proxy/modes/socks_proxy.py index 175453b0..adcd8fc1 100644 --- a/mitmproxy/proxy/modes/socks_proxy.py +++ b/mitmproxy/proxy/modes/socks_proxy.py @@ -1,6 +1,6 @@ from mitmproxy import exceptions from mitmproxy.proxy import protocol -from netlib import socks +from mitmproxy.net import socks class Socks5Proxy(protocol.Layer, protocol.ServerConnectionMixin): diff --git a/mitmproxy/proxy/protocol/http.py b/mitmproxy/proxy/protocol/http.py index 6bc06502..a47fb455 100644 --- a/mitmproxy/proxy/protocol/http.py +++ b/mitmproxy/proxy/protocol/http.py @@ -6,9 +6,9 @@ from mitmproxy import http from mitmproxy import flow from mitmproxy.proxy.protocol import base from mitmproxy.proxy.protocol import websockets as pwebsockets -import netlib.http -from netlib import tcp -from netlib import websockets +import mitmproxy.net.http +from mitmproxy.net import tcp +from mitmproxy.net import websockets class _HttpTransmissionLayer(base.Layer): @@ -422,13 +422,13 @@ class HttpLayer(base.Layer): self.send_response(http.make_error_response( 401, "Authentication Required", - netlib.http.Headers(**self.config.authenticator.auth_challenge_headers()) + mitmproxy.net.http.Headers(**self.config.authenticator.auth_challenge_headers()) )) else: self.send_response(http.make_error_response( 407, "Proxy Authentication Required", - netlib.http.Headers(**self.config.authenticator.auth_challenge_headers()) + mitmproxy.net.http.Headers(**self.config.authenticator.auth_challenge_headers()) )) return False return True diff --git a/mitmproxy/proxy/protocol/http1.py b/mitmproxy/proxy/protocol/http1.py index 968d3249..713c48a7 100644 --- a/mitmproxy/proxy/protocol/http1.py +++ b/mitmproxy/proxy/protocol/http1.py @@ -1,6 +1,6 @@ from mitmproxy import http from mitmproxy.proxy.protocol import http as httpbase -from netlib.http import http1 +from mitmproxy.net.http import http1 class Http1Layer(httpbase._HttpTransmissionLayer): diff --git a/mitmproxy/proxy/protocol/http2.py b/mitmproxy/proxy/protocol/http2.py index d0b0720d..f635e710 100644 --- a/mitmproxy/proxy/protocol/http2.py +++ b/mitmproxy/proxy/protocol/http2.py @@ -12,10 +12,10 @@ from mitmproxy import exceptions from mitmproxy import http from mitmproxy.proxy.protocol import base from mitmproxy.proxy.protocol import http as httpbase -import netlib.http -from netlib import tcp +import mitmproxy.net.http +from mitmproxy.net import tcp from mitmproxy.types import basethread -from netlib.http import http2 +from mitmproxy.net.http import http2 class SafeH2Connection(connection.H2Connection): @@ -148,7 +148,7 @@ class Http2Layer(base.Layer): return True def _handle_request_received(self, eid, event, h2_connection): - headers = netlib.http.Headers([[k, v] for k, v in event.headers]) + headers = mitmproxy.net.http.Headers([[k, v] for k, v in event.headers]) self.streams[eid] = Http2SingleStreamLayer(self, h2_connection, eid, headers) self.streams[eid].timestamp_start = time.time() self.streams[eid].no_body = (event.stream_ended is not None) @@ -162,7 +162,7 @@ class Http2Layer(base.Layer): return True def _handle_response_received(self, eid, event): - headers = netlib.http.Headers([[k, v] for k, v in event.headers]) + headers = mitmproxy.net.http.Headers([[k, v] for k, v in event.headers]) self.streams[eid].queued_data_length = 0 self.streams[eid].timestamp_start = time.time() self.streams[eid].response_headers = headers @@ -239,7 +239,7 @@ class Http2Layer(base.Layer): self.client_conn.h2.push_stream(parent_eid, event.pushed_stream_id, event.headers) self.client_conn.send(self.client_conn.h2.data_to_send()) - headers = netlib.http.Headers([[k, v] for k, v in event.headers]) + headers = mitmproxy.net.http.Headers([[k, v] for k, v in event.headers]) self.streams[event.pushed_stream_id] = Http2SingleStreamLayer(self, h2_connection, event.pushed_stream_id, headers) self.streams[event.pushed_stream_id].timestamp_start = time.time() self.streams[event.pushed_stream_id].pushed = True diff --git a/mitmproxy/proxy/protocol/http_replay.py b/mitmproxy/proxy/protocol/http_replay.py index 7efb0782..c37badd3 100644 --- a/mitmproxy/proxy/protocol/http_replay.py +++ b/mitmproxy/proxy/protocol/http_replay.py @@ -6,7 +6,7 @@ from mitmproxy import exceptions from mitmproxy import http from mitmproxy import flow from mitmproxy import connections -from netlib.http import http1 +from mitmproxy.net.http import http1 from mitmproxy.types import basethread diff --git a/mitmproxy/proxy/protocol/rawtcp.py b/mitmproxy/proxy/protocol/rawtcp.py index 8230c50b..0ec50594 100644 --- a/mitmproxy/proxy/protocol/rawtcp.py +++ b/mitmproxy/proxy/protocol/rawtcp.py @@ -2,7 +2,7 @@ import socket from OpenSSL import SSL -import netlib.tcp +import mitmproxy.net.tcp from mitmproxy import tcp from mitmproxy import flow from mitmproxy import exceptions @@ -31,7 +31,7 @@ class RawTCPLayer(base.Layer): try: while not self.channel.should_exit.is_set(): - r = netlib.tcp.ssl_read_select(conns, 10) + r = mitmproxy.net.tcp.ssl_read_select(conns, 10) for conn in r: dst = server if conn == client else client diff --git a/mitmproxy/proxy/protocol/tls.py b/mitmproxy/proxy/protocol/tls.py index b106aa0c..8a344faf 100644 --- a/mitmproxy/proxy/protocol/tls.py +++ b/mitmproxy/proxy/protocol/tls.py @@ -6,7 +6,7 @@ import construct from mitmproxy import exceptions from mitmproxy.contrib.tls import _constructs from mitmproxy.proxy.protocol import base -from netlib import check +from mitmproxy.net import check # taken from https://testssl.sh/openssl-rfc.mappping.html diff --git a/mitmproxy/proxy/protocol/websockets.py b/mitmproxy/proxy/protocol/websockets.py index f84a1dc5..ca1d05cb 100644 --- a/mitmproxy/proxy/protocol/websockets.py +++ b/mitmproxy/proxy/protocol/websockets.py @@ -4,8 +4,8 @@ from OpenSSL import SSL from mitmproxy import exceptions from mitmproxy.proxy.protocol import base from mitmproxy.utils import strutils -from netlib import tcp -from netlib import websockets +from mitmproxy.net import tcp +from mitmproxy.net import websockets class WebSocketsLayer(base.Layer): diff --git a/mitmproxy/proxy/server.py b/mitmproxy/proxy/server.py index 8472660a..232d7038 100644 --- a/mitmproxy/proxy/server.py +++ b/mitmproxy/proxy/server.py @@ -8,8 +8,8 @@ from mitmproxy import http from mitmproxy import log from mitmproxy.proxy import modes from mitmproxy.proxy import root_context -from netlib import tcp -from netlib.http import http1 +from mitmproxy.net import tcp +from mitmproxy.net.http import http1 class DummyServer: diff --git a/mitmproxy/test/tutils.py b/mitmproxy/test/tutils.py index fc7c0eb9..e925b282 100644 --- a/mitmproxy/test/tutils.py +++ b/mitmproxy/test/tutils.py @@ -7,8 +7,8 @@ from contextlib import contextmanager import sys from mitmproxy.utils import data -from netlib import tcp -from netlib import http +from mitmproxy.net import tcp +from mitmproxy.net import http def treader(bytes): @@ -89,13 +89,13 @@ class RaisesContext: return True -test_data = data.Data(__name__).push("../../test/netlib") +test_data = data.Data(__name__).push("../../test/mitmproxy/net") def treq(**kwargs): """ Returns: - netlib.http.Request + mitmproxy.net.http.Request """ default = dict( first_line_format="relative", @@ -115,7 +115,7 @@ def treq(**kwargs): def tresp(**kwargs): """ Returns: - netlib.http.Response + mitmproxy.net.http.Response """ default = dict( http_version=b"HTTP/1.1", diff --git a/mitmproxy/tools/cmdline.py b/mitmproxy/tools/cmdline.py index b83694ef..2f9ea15c 100644 --- a/mitmproxy/tools/cmdline.py +++ b/mitmproxy/tools/cmdline.py @@ -6,7 +6,7 @@ from mitmproxy import flowfilter from mitmproxy import options from mitmproxy import platform from mitmproxy.utils import human -from netlib import tcp +from mitmproxy.net import tcp from mitmproxy import version diff --git a/mitmproxy/tools/console/common.py b/mitmproxy/tools/console/common.py index 53411d17..90dce5ae 100644 --- a/mitmproxy/tools/console/common.py +++ b/mitmproxy/tools/console/common.py @@ -6,7 +6,7 @@ import os import urwid import urwid.util -import netlib +import mitmproxy.net from mitmproxy.utils import lrucache from mitmproxy.tools.console import signals from mitmproxy import export @@ -226,7 +226,7 @@ def format_flow_data(key, scope, flow): if request.content is None: return None, "Request content is missing" if key == "h": - data += netlib.http.http1.assemble_request(request) + data += mitmproxy.net.http.http1.assemble_request(request) elif key == "c": data += request.get_content(strict=False) else: @@ -240,7 +240,7 @@ def format_flow_data(key, scope, flow): if response.content is None: return None, "Response content is missing" if key == "h": - data += netlib.http.http1.assemble_response(response) + data += mitmproxy.net.http.http1.assemble_response(response) elif key == "c": data += response.get_content(strict=False) else: diff --git a/mitmproxy/tools/console/flowlist.py b/mitmproxy/tools/console/flowlist.py index 31624229..d33348a0 100644 --- a/mitmproxy/tools/console/flowlist.py +++ b/mitmproxy/tools/console/flowlist.py @@ -1,6 +1,6 @@ import urwid -import netlib.http.url +import mitmproxy.net.http.url from mitmproxy import exceptions from mitmproxy.tools.console import common from mitmproxy.tools.console import signals @@ -325,7 +325,7 @@ class FlowListBox(urwid.ListBox): ) def new_request(self, url, method): - parts = netlib.http.url.parse(str(url)) + parts = mitmproxy.net.http.url.parse(str(url)) if not parts: signals.status_message.send(message="Invalid Url") return diff --git a/mitmproxy/tools/console/flowview.py b/mitmproxy/tools/console/flowview.py index afebf44e..4e886ee4 100644 --- a/mitmproxy/tools/console/flowview.py +++ b/mitmproxy/tools/console/flowview.py @@ -16,8 +16,8 @@ from mitmproxy.tools.console import searchable from mitmproxy.tools.console import signals from mitmproxy.tools.console import tabs from mitmproxy import export -from netlib.http import Headers -from netlib.http import status_codes +from mitmproxy.net.http import Headers +from mitmproxy.net.http import status_codes class SearchError(Exception): diff --git a/mitmproxy/tools/console/grideditor/col_subgrid.py b/mitmproxy/tools/console/grideditor/col_subgrid.py index 3147e63d..4db37462 100644 --- a/mitmproxy/tools/console/grideditor/col_subgrid.py +++ b/mitmproxy/tools/console/grideditor/col_subgrid.py @@ -1,7 +1,7 @@ import urwid from mitmproxy.tools.console.grideditor import base from mitmproxy.tools.console import signals -from netlib.http import cookies +from mitmproxy.net.http import cookies class Column(base.Column): diff --git a/mitmproxy/tools/console/grideditor/editors.py b/mitmproxy/tools/console/grideditor/editors.py index 64361af7..5e3f3d42 100644 --- a/mitmproxy/tools/console/grideditor/editors.py +++ b/mitmproxy/tools/console/grideditor/editors.py @@ -9,7 +9,7 @@ from mitmproxy.tools.console.grideditor import col_bytes from mitmproxy.tools.console.grideditor import col_text from mitmproxy.tools.console.grideditor import col_subgrid from mitmproxy.tools.console import signals -from netlib.http import user_agents +from mitmproxy.net.http import user_agents class QueryEditor(base.GridEditor): diff --git a/mitmproxy/tools/console/master.py b/mitmproxy/tools/console/master.py index 7ff0026e..af43c53b 100644 --- a/mitmproxy/tools/console/master.py +++ b/mitmproxy/tools/console/master.py @@ -37,7 +37,7 @@ from mitmproxy.tools.console import window from mitmproxy.flowfilter import FMarked from mitmproxy.utils import strutils -from netlib import tcp +from mitmproxy.net import tcp EVENTLOG_SIZE = 500 diff --git a/mitmproxy/tools/console/statusbar.py b/mitmproxy/tools/console/statusbar.py index ac41b213..74382677 100644 --- a/mitmproxy/tools/console/statusbar.py +++ b/mitmproxy/tools/console/statusbar.py @@ -2,7 +2,7 @@ import os.path import urwid -import netlib.http.url +import mitmproxy.net.http.url from mitmproxy.tools.console import common from mitmproxy.tools.console import pathedit from mitmproxy.tools.console import signals @@ -208,7 +208,7 @@ class StatusBar(urwid.WidgetWrap): if self.master.options.mode in ["reverse", "upstream"]: dst = self.master.server.config.upstream_server - r.append("[dest:%s]" % netlib.http.url.unparse( + r.append("[dest:%s]" % mitmproxy.net.http.url.unparse( dst.scheme, dst.address.host, dst.address.port diff --git a/mitmproxy/tools/dump.py b/mitmproxy/tools/dump.py index 47f69303..e92482f3 100644 --- a/mitmproxy/tools/dump.py +++ b/mitmproxy/tools/dump.py @@ -8,7 +8,7 @@ from mitmproxy import io from mitmproxy import options from mitmproxy import master from mitmproxy.addons import dumper, termlog -from netlib import tcp +from mitmproxy.net import tcp class DumpError(Exception): diff --git a/mitmproxy/tools/web/master.py b/mitmproxy/tools/web/master.py index 2bca4555..619582f3 100644 --- a/mitmproxy/tools/web/master.py +++ b/mitmproxy/tools/web/master.py @@ -13,7 +13,7 @@ from mitmproxy.addons import state from mitmproxy import options from mitmproxy import master from mitmproxy.tools.web import app -from netlib.http import authentication +from mitmproxy.net.http import authentication class Stop(Exception): diff --git a/mitmproxy/utils/debug.py b/mitmproxy/utils/debug.py index 147fe4b1..ac8fedd7 100644 --- a/mitmproxy/utils/debug.py +++ b/mitmproxy/utils/debug.py @@ -84,7 +84,7 @@ def dump_info(signal=None, frame=None, file=sys.stdout, testing=False): # pragm d = {} for i in gc.get_objects(): t = str(type(i)) - if "mitmproxy" in t or "netlib" in t: + if "mitmproxy" in t: d[t] = d.setdefault(t, 0) + 1 itms = list(d.items()) itms.sort(key=lambda x: x[1]) diff --git a/mitmproxy/utils/version_check.py b/mitmproxy/utils/version_check.py index 547c031c..4cf2b9e6 100644 --- a/mitmproxy/utils/version_check.py +++ b/mitmproxy/utils/version_check.py @@ -1,7 +1,6 @@ """ -Having installed a wrong version of pyOpenSSL or netlib is unfortunately a -very common source of error. Check before every start that both versions -are somewhat okay. +Having installed a wrong version of pyOpenSSL is unfortunately a very common +source of error. Check before every start that both versions are somewhat okay. """ import sys import inspect diff --git a/netlib/__init__.py b/netlib/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/netlib/check.py b/netlib/check.py deleted file mode 100644 index 7b007cb5..00000000 --- a/netlib/check.py +++ /dev/null @@ -1,22 +0,0 @@ -import re - -_label_valid = re.compile(b"(?!-)[A-Z\d-]{1,63}(? bool: - """ - Checks if a hostname is valid. - """ - try: - host.decode("idna") - except ValueError: - return False - if len(host) > 255: - return False - if host and host[-1:] == b".": - host = host[:-1] - return all(_label_valid.match(x) for x in host.split(b".")) - - -def is_valid_port(port): - return 0 <= port <= 65535 diff --git a/netlib/http/__init__.py b/netlib/http/__init__.py deleted file mode 100644 index 315f61ac..00000000 --- a/netlib/http/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from netlib.http.request import Request -from netlib.http.response import Response -from netlib.http.message import Message -from netlib.http.headers import Headers, parse_content_type -from netlib.http.message import decoded -from netlib.http import http1, http2, status_codes, multipart - -__all__ = [ - "Request", - "Response", - "Message", - "Headers", "parse_content_type", - "decoded", - "http1", "http2", "status_codes", "multipart", -] diff --git a/netlib/http/authentication.py b/netlib/http/authentication.py deleted file mode 100644 index a65279e4..00000000 --- a/netlib/http/authentication.py +++ /dev/null @@ -1,176 +0,0 @@ -import argparse -import binascii - - -def parse_http_basic_auth(s): - words = s.split() - if len(words) != 2: - return None - scheme = words[0] - try: - user = binascii.a2b_base64(words[1]).decode("utf8", "replace") - except binascii.Error: - return None - parts = user.split(':') - if len(parts) != 2: - return None - return scheme, parts[0], parts[1] - - -def assemble_http_basic_auth(scheme, username, password): - v = binascii.b2a_base64((username + ":" + password).encode("utf8")).decode("ascii") - return scheme + " " + v - - -class NullProxyAuth: - - """ - No proxy auth at all (returns empty challange headers) - """ - - def __init__(self, password_manager): - self.password_manager = password_manager - - def clean(self, headers_): - """ - Clean up authentication headers, so they're not passed upstream. - """ - - def authenticate(self, headers_): - """ - Tests that the user is allowed to use the proxy - """ - return True - - def auth_challenge_headers(self): - """ - Returns a dictionary containing the headers require to challenge the user - """ - return {} - - -class BasicAuth(NullProxyAuth): - CHALLENGE_HEADER = None - AUTH_HEADER = None - - def __init__(self, password_manager, realm): - NullProxyAuth.__init__(self, password_manager) - self.realm = realm - - def clean(self, headers): - del headers[self.AUTH_HEADER] - - def authenticate(self, headers): - auth_value = headers.get(self.AUTH_HEADER) - if not auth_value: - return False - parts = parse_http_basic_auth(auth_value) - if not parts: - return False - scheme, username, password = parts - if scheme.lower() != 'basic': - return False - if not self.password_manager.test(username, password): - return False - self.username = username - return True - - def auth_challenge_headers(self): - return {self.CHALLENGE_HEADER: 'Basic realm="%s"' % self.realm} - - -class BasicWebsiteAuth(BasicAuth): - CHALLENGE_HEADER = 'WWW-Authenticate' - AUTH_HEADER = 'Authorization' - - -class BasicProxyAuth(BasicAuth): - CHALLENGE_HEADER = 'Proxy-Authenticate' - AUTH_HEADER = 'Proxy-Authorization' - - -class PassMan: - - def test(self, username_, password_token_): - return False - - -class PassManNonAnon(PassMan): - - """ - Ensure the user specifies a username, accept any password. - """ - - def test(self, username, password_token_): - if username: - return True - return False - - -class PassManHtpasswd(PassMan): - - """ - Read usernames and passwords from an htpasswd file - """ - - def __init__(self, path): - """ - Raises ValueError if htpasswd file is invalid. - """ - import passlib.apache - self.htpasswd = passlib.apache.HtpasswdFile(path) - - def test(self, username, password_token): - return bool(self.htpasswd.check_password(username, password_token)) - - -class PassManSingleUser(PassMan): - - def __init__(self, username, password): - self.username, self.password = username, password - - def test(self, username, password_token): - return self.username == username and self.password == password_token - - -class AuthAction(argparse.Action): - - """ - Helper class to allow seamless integration int argparse. Example usage: - parser.add_argument( - "--nonanonymous", - action=NonanonymousAuthAction, nargs=0, - help="Allow access to any user long as a credentials are specified." - ) - """ - - def __call__(self, parser, namespace, values, option_string=None): - passman = self.getPasswordManager(values) - authenticator = BasicProxyAuth(passman, "mitmproxy") - setattr(namespace, self.dest, authenticator) - - def getPasswordManager(self, s): # pragma: no cover - raise NotImplementedError() - - -class SingleuserAuthAction(AuthAction): - - def getPasswordManager(self, s): - if len(s.split(':')) != 2: - raise argparse.ArgumentTypeError( - "Invalid single-user specification. Please use the format username:password" - ) - username, password = s.split(':') - return PassManSingleUser(username, password) - - -class NonanonymousAuthAction(AuthAction): - - def getPasswordManager(self, s): - return PassManNonAnon() - - -class HtpasswdAuthAction(AuthAction): - - def getPasswordManager(self, s): - return PassManHtpasswd(s) diff --git a/netlib/http/cookies.py b/netlib/http/cookies.py deleted file mode 100644 index 9f32fa5e..00000000 --- a/netlib/http/cookies.py +++ /dev/null @@ -1,384 +0,0 @@ -import collections -import email.utils -import re -import time - -from mitmproxy.types import multidict - -""" -A flexible module for cookie parsing and manipulation. - -This module differs from usual standards-compliant cookie modules in a number -of ways. We try to be as permissive as possible, and to retain even mal-formed -information. Duplicate cookies are preserved in parsing, and can be set in -formatting. We do attempt to escape and quote values where needed, but will not -reject data that violate the specs. - -Parsing accepts the formats in RFC6265 and partially RFC2109 and RFC2965. We -also parse the comma-separated variant of Set-Cookie that allows multiple -cookies to be set in a single header. Serialization follows RFC6265. - - http://tools.ietf.org/html/rfc6265 - http://tools.ietf.org/html/rfc2109 - http://tools.ietf.org/html/rfc2965 -""" - -_cookie_params = set(( - 'expires', 'path', 'comment', 'max-age', - 'secure', 'httponly', 'version', -)) - -ESCAPE = re.compile(r"([\"\\])") - - -class CookieAttrs(multidict.ImmutableMultiDict): - @staticmethod - def _kconv(key): - return key.lower() - - @staticmethod - def _reduce_values(values): - # See the StickyCookieTest for a weird cookie that only makes sense - # if we take the last part. - return values[-1] - -SetCookie = collections.namedtuple("SetCookie", ["value", "attrs"]) - - -def _read_until(s, start, term): - """ - Read until one of the characters in term is reached. - """ - if start == len(s): - return "", start + 1 - for i in range(start, len(s)): - if s[i] in term: - return s[start:i], i - return s[start:i + 1], i + 1 - - -def _read_quoted_string(s, start): - """ - start: offset to the first quote of the string to be read - - A sort of loose super-set of the various quoted string specifications. - - RFC6265 disallows backslashes or double quotes within quoted strings. - Prior RFCs use backslashes to escape. This leaves us free to apply - backslash escaping by default and be compatible with everything. - """ - escaping = False - ret = [] - # Skip the first quote - i = start # initialize in case the loop doesn't run. - for i in range(start + 1, len(s)): - if escaping: - ret.append(s[i]) - escaping = False - elif s[i] == '"': - break - elif s[i] == "\\": - escaping = True - else: - ret.append(s[i]) - return "".join(ret), i + 1 - - -def _read_key(s, start, delims=";="): - """ - Read a key - the LHS of a token/value pair in a cookie. - """ - return _read_until(s, start, delims) - - -def _read_value(s, start, delims): - """ - Reads a value - the RHS of a token/value pair in a cookie. - """ - if start >= len(s): - return "", start - elif s[start] == '"': - return _read_quoted_string(s, start) - else: - return _read_until(s, start, delims) - - -def _read_cookie_pairs(s, off=0): - """ - Read pairs of lhs=rhs values from Cookie headers. - - off: start offset - """ - pairs = [] - - while True: - lhs, off = _read_key(s, off) - lhs = lhs.lstrip() - - if lhs: - rhs = None - if off < len(s) and s[off] == "=": - rhs, off = _read_value(s, off + 1, ";") - - pairs.append([lhs, rhs]) - - off += 1 - - if not off < len(s): - break - - return pairs, off - - -def _read_set_cookie_pairs(s, off=0): - """ - Read pairs of lhs=rhs values from SetCookie headers while handling multiple cookies. - - off: start offset - specials: attributes that are treated specially - """ - cookies = [] - pairs = [] - - while True: - lhs, off = _read_key(s, off, ";=,") - lhs = lhs.lstrip() - - if lhs: - rhs = None - if off < len(s) and s[off] == "=": - rhs, off = _read_value(s, off + 1, ";,") - - # Special handliing of attributes - if lhs.lower() == "expires": - # 'expires' values can contain commas in them so they need to - # be handled separately. - - # We actually bank on the fact that the expires value WILL - # contain a comma. Things will fail, if they don't. - - # '3' is just a heuristic we use to determine whether we've - # only read a part of the expires value and we should read more. - if len(rhs) <= 3: - trail, off = _read_value(s, off + 1, ";,") - rhs = rhs + "," + trail - - pairs.append([lhs, rhs]) - - # comma marks the beginning of a new cookie - if off < len(s) and s[off] == ",": - cookies.append(pairs) - pairs = [] - - off += 1 - - if not off < len(s): - break - - if pairs or not cookies: - cookies.append(pairs) - - return cookies, off - - -def _has_special(s): - for i in s: - if i in '",;\\': - return True - o = ord(i) - if o < 0x21 or o > 0x7e: - return True - return False - - -def _format_pairs(pairs, specials=(), sep="; "): - """ - specials: A lower-cased list of keys that will not be quoted. - """ - vals = [] - for k, v in pairs: - if v is None: - vals.append(k) - else: - if k.lower() not in specials and _has_special(v): - v = ESCAPE.sub(r"\\\1", v) - v = '"%s"' % v - vals.append("%s=%s" % (k, v)) - return sep.join(vals) - - -def _format_set_cookie_pairs(lst): - return _format_pairs( - lst, - specials=("expires", "path") - ) - - -def parse_cookie_header(line): - """ - Parse a Cookie header value. - Returns a list of (lhs, rhs) tuples. - """ - pairs, off_ = _read_cookie_pairs(line) - return pairs - - -def parse_cookie_headers(cookie_headers): - cookie_list = [] - for header in cookie_headers: - cookie_list.extend(parse_cookie_header(header)) - return cookie_list - - -def format_cookie_header(lst): - """ - Formats a Cookie header value. - """ - return _format_pairs(lst) - - -def parse_set_cookie_header(line): - """ - Parse a Set-Cookie header value - - Returns a list of (name, value, attrs) tuples, where attrs is a - CookieAttrs dict of attributes. No attempt is made to parse attribute - values - they are treated purely as strings. - """ - cookie_pairs, off = _read_set_cookie_pairs(line) - cookies = [ - (pairs[0][0], pairs[0][1], CookieAttrs(tuple(x) for x in pairs[1:])) - for pairs in cookie_pairs if pairs - ] - return cookies - - -def parse_set_cookie_headers(headers): - rv = [] - for header in headers: - cookies = parse_set_cookie_header(header) - if cookies: - for name, value, attrs in cookies: - rv.append((name, SetCookie(value, attrs))) - return rv - - -def format_set_cookie_header(set_cookies): - """ - Formats a Set-Cookie header value. - """ - - rv = [] - - for set_cookie in set_cookies: - name, value, attrs = set_cookie - - pairs = [(name, value)] - pairs.extend( - attrs.fields if hasattr(attrs, "fields") else attrs - ) - - rv.append(_format_set_cookie_pairs(pairs)) - - return ", ".join(rv) - - -def refresh_set_cookie_header(c, delta): - """ - Args: - c: A Set-Cookie string - delta: Time delta in seconds - Returns: - A refreshed Set-Cookie string - """ - - name, value, attrs = parse_set_cookie_header(c)[0] - if not name or not value: - raise ValueError("Invalid Cookie") - - if "expires" in attrs: - e = email.utils.parsedate_tz(attrs["expires"]) - if e: - f = email.utils.mktime_tz(e) + delta - attrs = attrs.with_set_all("expires", [email.utils.formatdate(f)]) - else: - # This can happen when the expires tag is invalid. - # reddit.com sends a an expires tag like this: "Thu, 31 Dec - # 2037 23:59:59 GMT", which is valid RFC 1123, but not - # strictly correct according to the cookie spec. Browsers - # appear to parse this tolerantly - maybe we should too. - # For now, we just ignore this. - attrs = attrs.with_delitem("expires") - - rv = format_set_cookie_header([(name, value, attrs)]) - if not rv: - raise ValueError("Invalid Cookie") - return rv - - -def get_expiration_ts(cookie_attrs): - """ - Determines the time when the cookie will be expired. - - Considering both 'expires' and 'max-age' parameters. - - Returns: timestamp of when the cookie will expire. - None, if no expiration time is set. - """ - if 'expires' in cookie_attrs: - e = email.utils.parsedate_tz(cookie_attrs["expires"]) - if e: - return email.utils.mktime_tz(e) - - elif 'max-age' in cookie_attrs: - try: - max_age = int(cookie_attrs['Max-Age']) - except ValueError: - pass - else: - now_ts = time.time() - return now_ts + max_age - - return None - - -def is_expired(cookie_attrs): - """ - Determines whether a cookie has expired. - - Returns: boolean - """ - - exp_ts = get_expiration_ts(cookie_attrs) - now_ts = time.time() - - # If no expiration information was provided with the cookie - if exp_ts is None: - return False - else: - return exp_ts <= now_ts - - -def group_cookies(pairs): - """ - Converts a list of pairs to a (name, value, attrs) for each cookie. - """ - - if not pairs: - return [] - - cookie_list = [] - - # First pair is always a new cookie - name, value = pairs[0] - attrs = [] - - for k, v in pairs[1:]: - if k.lower() in _cookie_params: - attrs.append((k, v)) - else: - cookie_list.append((name, value, CookieAttrs(attrs))) - name, value, attrs = k, v, [] - - cookie_list.append((name, value, CookieAttrs(attrs))) - return cookie_list diff --git a/netlib/http/encoding.py b/netlib/http/encoding.py deleted file mode 100644 index e123a033..00000000 --- a/netlib/http/encoding.py +++ /dev/null @@ -1,175 +0,0 @@ -""" -Utility functions for decoding response bodies. -""" - -import codecs -import collections -from io import BytesIO - -import gzip -import zlib -import brotli - -from typing import Union - - -# We have a shared single-element cache for encoding and decoding. -# This is quite useful in practice, e.g. -# flow.request.content = flow.request.content.replace(b"foo", b"bar") -# does not require an .encode() call if content does not contain b"foo" -CachedDecode = collections.namedtuple("CachedDecode", "encoded encoding errors decoded") -_cache = CachedDecode(None, None, None, None) - - -def decode(encoded: Union[str, bytes], encoding: str, errors: str='strict') -> Union[str, bytes]: - """ - Decode the given input object - - Returns: - The decoded value - - Raises: - ValueError, if decoding fails. - """ - if len(encoded) == 0: - return encoded - - global _cache - cached = ( - isinstance(encoded, bytes) and - _cache.encoded == encoded and - _cache.encoding == encoding and - _cache.errors == errors - ) - if cached: - return _cache.decoded - try: - try: - decoded = custom_decode[encoding](encoded) - except KeyError: - decoded = codecs.decode(encoded, encoding, errors) - if encoding in ("gzip", "deflate", "br"): - _cache = CachedDecode(encoded, encoding, errors, decoded) - return decoded - except TypeError: - raise - except Exception as e: - raise ValueError("{} when decoding {} with {}: {}".format( - type(e).__name__, - repr(encoded)[:10], - repr(encoding), - repr(e), - )) - - -def encode(decoded: Union[str, bytes], encoding: str, errors: str='strict') -> Union[str, bytes]: - """ - Encode the given input object - - Returns: - The encoded value - - Raises: - ValueError, if encoding fails. - """ - if len(decoded) == 0: - return decoded - - global _cache - cached = ( - isinstance(decoded, bytes) and - _cache.decoded == decoded and - _cache.encoding == encoding and - _cache.errors == errors - ) - if cached: - return _cache.encoded - try: - try: - value = decoded - if isinstance(value, str): - value = decoded.encode() - encoded = custom_encode[encoding](value) - except KeyError: - encoded = codecs.encode(decoded, encoding, errors) - if encoding in ("gzip", "deflate", "br"): - _cache = CachedDecode(encoded, encoding, errors, decoded) - return encoded - except TypeError: - raise - except Exception as e: - raise ValueError("{} when encoding {} with {}: {}".format( - type(e).__name__, - repr(decoded)[:10], - repr(encoding), - repr(e), - )) - - -def identity(content): - """ - Returns content unchanged. Identity is the default value of - Accept-Encoding headers. - """ - return content - - -def decode_gzip(content): - gfile = gzip.GzipFile(fileobj=BytesIO(content)) - return gfile.read() - - -def encode_gzip(content): - s = BytesIO() - gf = gzip.GzipFile(fileobj=s, mode='wb') - gf.write(content) - gf.close() - return s.getvalue() - - -def decode_brotli(content): - return brotli.decompress(content) - - -def encode_brotli(content): - return brotli.compress(content) - - -def decode_deflate(content): - """ - Returns decompressed data for DEFLATE. Some servers may respond with - compressed data without a zlib header or checksum. An undocumented - feature of zlib permits the lenient decompression of data missing both - values. - - http://bugs.python.org/issue5784 - """ - try: - return zlib.decompress(content) - except zlib.error: - return zlib.decompress(content, -15) - - -def encode_deflate(content): - """ - Returns compressed content, always including zlib header and checksum. - """ - return zlib.compress(content) - - -custom_decode = { - "none": identity, - "identity": identity, - "gzip": decode_gzip, - "deflate": decode_deflate, - "br": decode_brotli, -} -custom_encode = { - "none": identity, - "identity": identity, - "gzip": encode_gzip, - "deflate": encode_deflate, - "br": encode_brotli, -} - -__all__ = ["encode", "decode"] diff --git a/netlib/http/headers.py b/netlib/http/headers.py deleted file mode 100644 index 8fc0cd43..00000000 --- a/netlib/http/headers.py +++ /dev/null @@ -1,221 +0,0 @@ -import re - -import collections -from mitmproxy.types import multidict -from mitmproxy.utils import strutils - -# See also: http://lucumr.pocoo.org/2013/7/2/the-updated-guide-to-unicode/ - - -# While headers _should_ be ASCII, it's not uncommon for certain headers to be utf-8 encoded. -def _native(x): - return x.decode("utf-8", "surrogateescape") - - -def _always_bytes(x): - return strutils.always_bytes(x, "utf-8", "surrogateescape") - - -class Headers(multidict.MultiDict): - """ - Header class which allows both convenient access to individual headers as well as - direct access to the underlying raw data. Provides a full dictionary interface. - - Example: - - .. code-block:: python - - # Create headers with keyword arguments - >>> h = Headers(host="example.com", content_type="application/xml") - - # Headers mostly behave like a normal dict. - >>> h["Host"] - "example.com" - - # HTTP Headers are case insensitive - >>> h["host"] - "example.com" - - # Headers can also be created from a list of raw (header_name, header_value) byte tuples - >>> h = Headers([ - (b"Host",b"example.com"), - (b"Accept",b"text/html"), - (b"accept",b"application/xml") - ]) - - # Multiple headers are folded into a single header as per RFC7230 - >>> h["Accept"] - "text/html, application/xml" - - # Setting a header removes all existing headers with the same name. - >>> h["Accept"] = "application/text" - >>> h["Accept"] - "application/text" - - # bytes(h) returns a HTTP1 header block. - >>> print(bytes(h)) - Host: example.com - Accept: application/text - - # For full control, the raw header fields can be accessed - >>> h.fields - - Caveats: - For use with the "Set-Cookie" header, see :py:meth:`get_all`. - """ - - def __init__(self, fields=(), **headers): - """ - Args: - fields: (optional) list of ``(name, value)`` header byte tuples, - e.g. ``[(b"Host", b"example.com")]``. All names and values must be bytes. - **headers: Additional headers to set. Will overwrite existing values from `fields`. - For convenience, underscores in header names will be transformed to dashes - - this behaviour does not extend to other methods. - If ``**headers`` contains multiple keys that have equal ``.lower()`` s, - the behavior is undefined. - """ - super().__init__(fields) - - for key, value in self.fields: - if not isinstance(key, bytes) or not isinstance(value, bytes): - raise TypeError("Header fields must be bytes.") - - # content_type -> content-type - headers = { - _always_bytes(name).replace(b"_", b"-"): _always_bytes(value) - for name, value in headers.items() - } - self.update(headers) - - @staticmethod - def _reduce_values(values): - # Headers can be folded - return ", ".join(values) - - @staticmethod - def _kconv(key): - # Headers are case-insensitive - return key.lower() - - def __bytes__(self): - if self.fields: - return b"\r\n".join(b": ".join(field) for field in self.fields) + b"\r\n" - else: - return b"" - - def __delitem__(self, key): - key = _always_bytes(key) - super().__delitem__(key) - - def __iter__(self): - for x in super().__iter__(): - yield _native(x) - - def get_all(self, name): - """ - Like :py:meth:`get`, but does not fold multiple headers into a single one. - This is useful for Set-Cookie headers, which do not support folding. - See also: https://tools.ietf.org/html/rfc7230#section-3.2.2 - """ - name = _always_bytes(name) - return [ - _native(x) for x in - super().get_all(name) - ] - - def set_all(self, name, values): - """ - Explicitly set multiple headers for the given key. - See: :py:meth:`get_all` - """ - name = _always_bytes(name) - values = [_always_bytes(x) for x in values] - return super().set_all(name, values) - - def insert(self, index, key, value): - key = _always_bytes(key) - value = _always_bytes(value) - super().insert(index, key, value) - - def items(self, multi=False): - if multi: - return ( - (_native(k), _native(v)) - for k, v in self.fields - ) - else: - return super().items() - - def replace(self, pattern, repl, flags=0, count=0): - """ - Replaces a regular expression pattern with repl in each "name: value" - header line. - - Returns: - The number of replacements made. - """ - if isinstance(pattern, str): - pattern = strutils.escaped_str_to_bytes(pattern) - if isinstance(repl, str): - repl = strutils.escaped_str_to_bytes(repl) - pattern = re.compile(pattern, flags) - replacements = 0 - flag_count = count > 0 - fields = [] - for name, value in self.fields: - line, n = pattern.subn(repl, name + b": " + value, count=count) - try: - name, value = line.split(b": ", 1) - except ValueError: - # We get a ValueError if the replacement removed the ": " - # There's not much we can do about this, so we just keep the header as-is. - pass - else: - replacements += n - if flag_count: - count -= n - if count == 0: - break - fields.append((name, value)) - self.fields = tuple(fields) - return replacements - - -def parse_content_type(c): - """ - A simple parser for content-type values. Returns a (type, subtype, - parameters) tuple, where type and subtype are strings, and parameters - is a dict. If the string could not be parsed, return None. - - E.g. the following string: - - text/html; charset=UTF-8 - - Returns: - - ("text", "html", {"charset": "UTF-8"}) - """ - parts = c.split(";", 1) - ts = parts[0].split("/", 1) - if len(ts) != 2: - return None - d = collections.OrderedDict() - if len(parts) == 2: - for i in parts[1].split(";"): - clause = i.split("=", 1) - if len(clause) == 2: - d[clause[0].strip()] = clause[1].strip() - return ts[0].lower(), ts[1].lower(), d - - -def assemble_content_type(type, subtype, parameters): - if not parameters: - return "{}/{}".format(type, subtype) - params = "; ".join( - "{}={}".format(k, v) - for k, v in parameters.items() - ) - return "{}/{}; {}".format( - type, subtype, params - ) diff --git a/netlib/http/http1/__init__.py b/netlib/http/http1/__init__.py deleted file mode 100644 index e4bf01c5..00000000 --- a/netlib/http/http1/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -from .read import ( - read_request, read_request_head, - read_response, read_response_head, - read_body, - connection_close, - expected_http_body_size, -) -from .assemble import ( - assemble_request, assemble_request_head, - assemble_response, assemble_response_head, - assemble_body, -) - - -__all__ = [ - "read_request", "read_request_head", - "read_response", "read_response_head", - "read_body", - "connection_close", - "expected_http_body_size", - "assemble_request", "assemble_request_head", - "assemble_response", "assemble_response_head", - "assemble_body", -] diff --git a/netlib/http/http1/assemble.py b/netlib/http/http1/assemble.py deleted file mode 100644 index e0a91ad8..00000000 --- a/netlib/http/http1/assemble.py +++ /dev/null @@ -1,100 +0,0 @@ -import netlib.http.url -from mitmproxy import exceptions - - -def assemble_request(request): - if request.data.content is None: - raise exceptions.HttpException("Cannot assemble flow with missing content") - head = assemble_request_head(request) - body = b"".join(assemble_body(request.data.headers, [request.data.content])) - return head + body - - -def assemble_request_head(request): - first_line = _assemble_request_line(request.data) - headers = _assemble_request_headers(request.data) - return b"%s\r\n%s\r\n" % (first_line, headers) - - -def assemble_response(response): - if response.data.content is None: - raise exceptions.HttpException("Cannot assemble flow with missing content") - head = assemble_response_head(response) - body = b"".join(assemble_body(response.data.headers, [response.data.content])) - return head + body - - -def assemble_response_head(response): - first_line = _assemble_response_line(response.data) - headers = _assemble_response_headers(response.data) - return b"%s\r\n%s\r\n" % (first_line, headers) - - -def assemble_body(headers, body_chunks): - if "chunked" in headers.get("transfer-encoding", "").lower(): - for chunk in body_chunks: - if chunk: - yield b"%x\r\n%s\r\n" % (len(chunk), chunk) - yield b"0\r\n\r\n" - else: - for chunk in body_chunks: - yield chunk - - -def _assemble_request_line(request_data): - """ - Args: - request_data (netlib.http.request.RequestData) - """ - form = request_data.first_line_format - if form == "relative": - return b"%s %s %s" % ( - request_data.method, - request_data.path, - request_data.http_version - ) - elif form == "authority": - return b"%s %s:%d %s" % ( - request_data.method, - request_data.host, - request_data.port, - request_data.http_version - ) - elif form == "absolute": - return b"%s %s://%s:%d%s %s" % ( - request_data.method, - request_data.scheme, - request_data.host, - request_data.port, - request_data.path, - request_data.http_version - ) - else: - raise RuntimeError("Invalid request form") - - -def _assemble_request_headers(request_data): - """ - Args: - request_data (netlib.http.request.RequestData) - """ - headers = request_data.headers.copy() - if "host" not in headers and request_data.scheme and request_data.host and request_data.port: - headers["host"] = netlib.http.url.hostport( - request_data.scheme, - request_data.host, - request_data.port - ) - return bytes(headers) - - -def _assemble_response_line(response_data): - return b"%s %d %s" % ( - response_data.http_version, - response_data.status_code, - response_data.reason, - ) - - -def _assemble_response_headers(response): - return bytes(response.headers) diff --git a/netlib/http/http1/read.py b/netlib/http/http1/read.py deleted file mode 100644 index e6b22863..00000000 --- a/netlib/http/http1/read.py +++ /dev/null @@ -1,377 +0,0 @@ -import time -import sys -import re - -from netlib.http import request -from netlib.http import response -from netlib.http import headers -from netlib.http import url -from netlib import check -from mitmproxy import exceptions - - -def get_header_tokens(headers, key): - """ - Retrieve all tokens for a header key. A number of different headers - follow a pattern where each header line can containe comma-separated - tokens, and headers can be set multiple times. - """ - if key not in headers: - return [] - tokens = headers[key].split(",") - return [token.strip() for token in tokens] - - -def read_request(rfile, body_size_limit=None): - request = read_request_head(rfile) - expected_body_size = expected_http_body_size(request) - request.data.content = b"".join(read_body(rfile, expected_body_size, limit=body_size_limit)) - request.timestamp_end = time.time() - return request - - -def read_request_head(rfile): - """ - Parse an HTTP request head (request line + headers) from an input stream - - Args: - rfile: The input stream - - Returns: - The HTTP request object (without body) - - Raises: - exceptions.HttpReadDisconnect: No bytes can be read from rfile. - exceptions.HttpSyntaxException: The input is malformed HTTP. - exceptions.HttpException: Any other error occured. - """ - timestamp_start = time.time() - if hasattr(rfile, "reset_timestamps"): - rfile.reset_timestamps() - - form, method, scheme, host, port, path, http_version = _read_request_line(rfile) - headers = _read_headers(rfile) - - if hasattr(rfile, "first_byte_timestamp"): - # more accurate timestamp_start - timestamp_start = rfile.first_byte_timestamp - - return request.Request( - form, method, scheme, host, port, path, http_version, headers, None, timestamp_start - ) - - -def read_response(rfile, request, body_size_limit=None): - response = read_response_head(rfile) - expected_body_size = expected_http_body_size(request, response) - response.data.content = b"".join(read_body(rfile, expected_body_size, body_size_limit)) - response.timestamp_end = time.time() - return response - - -def read_response_head(rfile): - """ - Parse an HTTP response head (response line + headers) from an input stream - - Args: - rfile: The input stream - - Returns: - The HTTP request object (without body) - - Raises: - exceptions.HttpReadDisconnect: No bytes can be read from rfile. - exceptions.HttpSyntaxException: The input is malformed HTTP. - exceptions.HttpException: Any other error occured. - """ - - timestamp_start = time.time() - if hasattr(rfile, "reset_timestamps"): - rfile.reset_timestamps() - - http_version, status_code, message = _read_response_line(rfile) - headers = _read_headers(rfile) - - if hasattr(rfile, "first_byte_timestamp"): - # more accurate timestamp_start - timestamp_start = rfile.first_byte_timestamp - - return response.Response(http_version, status_code, message, headers, None, timestamp_start) - - -def read_body(rfile, expected_size, limit=None, max_chunk_size=4096): - """ - Read an HTTP message body - - Args: - rfile: The input stream - expected_size: The expected body size (see :py:meth:`expected_body_size`) - limit: Maximum body size - max_chunk_size: Maximium chunk size that gets yielded - - Returns: - A generator that yields byte chunks of the content. - - Raises: - exceptions.HttpException, if an error occurs - - Caveats: - max_chunk_size is not considered if the transfer encoding is chunked. - """ - if not limit or limit < 0: - limit = sys.maxsize - if not max_chunk_size: - max_chunk_size = limit - - if expected_size is None: - for x in _read_chunked(rfile, limit): - yield x - elif expected_size >= 0: - if limit is not None and expected_size > limit: - raise exceptions.HttpException( - "HTTP Body too large. " - "Limit is {}, content length was advertised as {}".format(limit, expected_size) - ) - bytes_left = expected_size - while bytes_left: - chunk_size = min(bytes_left, max_chunk_size) - content = rfile.read(chunk_size) - if len(content) < chunk_size: - raise exceptions.HttpException("Unexpected EOF") - yield content - bytes_left -= chunk_size - else: - bytes_left = limit - while bytes_left: - chunk_size = min(bytes_left, max_chunk_size) - content = rfile.read(chunk_size) - if not content: - return - yield content - bytes_left -= chunk_size - not_done = rfile.read(1) - if not_done: - raise exceptions.HttpException("HTTP body too large. Limit is {}.".format(limit)) - - -def connection_close(http_version, headers): - """ - Checks the message to see if the client connection should be closed - according to RFC 2616 Section 8.1. - """ - # At first, check if we have an explicit Connection header. - if "connection" in headers: - tokens = get_header_tokens(headers, "connection") - if "close" in tokens: - return True - elif "keep-alive" in tokens: - return False - - # If we don't have a Connection header, HTTP 1.1 connections are assumed to - # be persistent - return http_version != "HTTP/1.1" and http_version != b"HTTP/1.1" # FIXME: Remove one case. - - -def expected_http_body_size(request, response=None): - """ - Returns: - The expected body length: - - a positive integer, if the size is known in advance - - None, if the size in unknown in advance (chunked encoding) - - -1, if all data should be read until end of stream. - - Raises: - exceptions.HttpSyntaxException, if the content length header is invalid - """ - # Determine response size according to - # http://tools.ietf.org/html/rfc7230#section-3.3 - if not response: - headers = request.headers - response_code = None - is_request = True - else: - headers = response.headers - response_code = response.status_code - is_request = False - - if is_request: - if headers.get("expect", "").lower() == "100-continue": - return 0 - else: - if request.method.upper() == "HEAD": - return 0 - if 100 <= response_code <= 199: - return 0 - if response_code == 200 and request.method.upper() == "CONNECT": - return 0 - if response_code in (204, 304): - return 0 - - if "chunked" in headers.get("transfer-encoding", "").lower(): - return None - if "content-length" in headers: - try: - size = int(headers["content-length"]) - if size < 0: - raise ValueError() - return size - except ValueError: - raise exceptions.HttpSyntaxException("Unparseable Content Length") - if is_request: - return 0 - return -1 - - -def _get_first_line(rfile): - try: - line = rfile.readline() - if line == b"\r\n" or line == b"\n": - # Possible leftover from previous message - line = rfile.readline() - except exceptions.TcpDisconnect: - raise exceptions.HttpReadDisconnect("Remote disconnected") - if not line: - raise exceptions.HttpReadDisconnect("Remote disconnected") - return line.strip() - - -def _read_request_line(rfile): - try: - line = _get_first_line(rfile) - except exceptions.HttpReadDisconnect: - # We want to provide a better error message. - raise exceptions.HttpReadDisconnect("Client disconnected") - - try: - method, path, http_version = line.split() - - if path == b"*" or path.startswith(b"/"): - form = "relative" - scheme, host, port = None, None, None - elif method == b"CONNECT": - form = "authority" - host, port = _parse_authority_form(path) - scheme, path = None, None - else: - form = "absolute" - scheme, host, port, path = url.parse(path) - - _check_http_version(http_version) - except ValueError: - raise exceptions.HttpSyntaxException("Bad HTTP request line: {}".format(line)) - - return form, method, scheme, host, port, path, http_version - - -def _parse_authority_form(hostport): - """ - Returns (host, port) if hostport is a valid authority-form host specification. - http://tools.ietf.org/html/draft-luotonen-web-proxy-tunneling-01 section 3.1 - - Raises: - ValueError, if the input is malformed - """ - try: - host, port = hostport.split(b":") - port = int(port) - if not check.is_valid_host(host) or not check.is_valid_port(port): - raise ValueError() - except ValueError: - raise exceptions.HttpSyntaxException("Invalid host specification: {}".format(hostport)) - - return host, port - - -def _read_response_line(rfile): - try: - line = _get_first_line(rfile) - except exceptions.HttpReadDisconnect: - # We want to provide a better error message. - raise exceptions.HttpReadDisconnect("Server disconnected") - - try: - parts = line.split(None, 2) - if len(parts) == 2: # handle missing message gracefully - parts.append(b"") - - http_version, status_code, message = parts - status_code = int(status_code) - _check_http_version(http_version) - - except ValueError: - raise exceptions.HttpSyntaxException("Bad HTTP response line: {}".format(line)) - - return http_version, status_code, message - - -def _check_http_version(http_version): - if not re.match(br"^HTTP/\d\.\d$", http_version): - raise exceptions.HttpSyntaxException("Unknown HTTP version: {}".format(http_version)) - - -def _read_headers(rfile): - """ - Read a set of headers. - Stop once a blank line is reached. - - Returns: - A headers object - - Raises: - exceptions.HttpSyntaxException - """ - ret = [] - while True: - line = rfile.readline() - if not line or line == b"\r\n" or line == b"\n": - break - if line[0] in b" \t": - if not ret: - raise exceptions.HttpSyntaxException("Invalid headers") - # continued header - ret[-1] = (ret[-1][0], ret[-1][1] + b'\r\n ' + line.strip()) - else: - try: - name, value = line.split(b":", 1) - value = value.strip() - if not name: - raise ValueError() - ret.append((name, value)) - except ValueError: - raise exceptions.HttpSyntaxException( - "Invalid header line: %s" % repr(line) - ) - return headers.Headers(ret) - - -def _read_chunked(rfile, limit=sys.maxsize): - """ - Read a HTTP body with chunked transfer encoding. - - Args: - rfile: the input file - limit: A positive integer - """ - total = 0 - while True: - line = rfile.readline(128) - if line == b"": - raise exceptions.HttpException("Connection closed prematurely") - if line != b"\r\n" and line != b"\n": - try: - length = int(line, 16) - except ValueError: - raise exceptions.HttpSyntaxException("Invalid chunked encoding length: {}".format(line)) - total += length - if total > limit: - raise exceptions.HttpException( - "HTTP Body too large. Limit is {}, " - "chunked content longer than {}".format(limit, total) - ) - chunk = rfile.read(length) - suffix = rfile.readline(5) - if suffix != b"\r\n": - raise exceptions.HttpSyntaxException("Malformed chunked body") - if length == 0: - return - yield chunk diff --git a/netlib/http/http2/__init__.py b/netlib/http/http2/__init__.py deleted file mode 100644 index 20cc63a0..00000000 --- a/netlib/http/http2/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from netlib.http.http2.framereader import read_raw_frame, parse_frame -from netlib.http.http2.utils import parse_headers - -__all__ = [ - "read_raw_frame", - "parse_frame", - "parse_headers", -] diff --git a/netlib/http/http2/framereader.py b/netlib/http/http2/framereader.py deleted file mode 100644 index 6a164919..00000000 --- a/netlib/http/http2/framereader.py +++ /dev/null @@ -1,25 +0,0 @@ -import codecs - -import hyperframe -from mitmproxy import exceptions - - -def read_raw_frame(rfile): - header = rfile.safe_read(9) - length = int(codecs.encode(header[:3], 'hex_codec'), 16) - - if length == 4740180: - raise exceptions.HttpException("Length field looks more like HTTP/1.1:\n{}".format(rfile.read(-1))) - - body = rfile.safe_read(length) - return [header, body] - - -def parse_frame(header, body=None): - if body is None: - body = header[9:] - header = header[:9] - - frame, length = hyperframe.frame.Frame.parse_frame_header(header) - frame.parse_body(memoryview(body)) - return frame diff --git a/netlib/http/http2/utils.py b/netlib/http/http2/utils.py deleted file mode 100644 index 164bacc8..00000000 --- a/netlib/http/http2/utils.py +++ /dev/null @@ -1,37 +0,0 @@ -from netlib.http import url - - -def parse_headers(headers): - authority = headers.get(':authority', '').encode() - method = headers.get(':method', 'GET').encode() - scheme = headers.get(':scheme', 'https').encode() - path = headers.get(':path', '/').encode() - - headers.pop(":method", None) - headers.pop(":scheme", None) - headers.pop(":path", None) - - host = None - port = None - - if path == b'*' or path.startswith(b"/"): - first_line_format = "relative" - elif method == b'CONNECT': # pragma: no cover - raise NotImplementedError("CONNECT over HTTP/2 is not implemented.") - else: # pragma: no cover - first_line_format = "absolute" - # FIXME: verify if path or :host contains what we need - scheme, host, port, _ = url.parse(path) - - if authority: - host, _, port = authority.partition(b':') - - if not host: - host = b'localhost' - - if not port: - port = 443 if scheme == b'https' else 80 - - port = int(port) - - return first_line_format, method, scheme, host, port, path diff --git a/netlib/http/message.py b/netlib/http/message.py deleted file mode 100644 index 772a124e..00000000 --- a/netlib/http/message.py +++ /dev/null @@ -1,300 +0,0 @@ -import re -import warnings -from typing import Optional - -from mitmproxy.utils import strutils -from netlib.http import encoding -from mitmproxy.types import serializable -from netlib.http import headers - - -# While headers _should_ be ASCII, it's not uncommon for certain headers to be utf-8 encoded. -def _native(x): - return x.decode("utf-8", "surrogateescape") - - -def _always_bytes(x): - return strutils.always_bytes(x, "utf-8", "surrogateescape") - - -class MessageData(serializable.Serializable): - def __eq__(self, other): - if isinstance(other, MessageData): - return self.__dict__ == other.__dict__ - return False - - def __ne__(self, other): - return not self.__eq__(other) - - def set_state(self, state): - for k, v in state.items(): - if k == "headers": - v = headers.Headers.from_state(v) - setattr(self, k, v) - - def get_state(self): - state = vars(self).copy() - state["headers"] = state["headers"].get_state() - return state - - @classmethod - def from_state(cls, state): - state["headers"] = headers.Headers.from_state(state["headers"]) - return cls(**state) - - -class Message(serializable.Serializable): - def __eq__(self, other): - if isinstance(other, Message): - return self.data == other.data - return False - - def __ne__(self, other): - return not self.__eq__(other) - - def get_state(self): - return self.data.get_state() - - def set_state(self, state): - self.data.set_state(state) - - @classmethod - def from_state(cls, state): - state["headers"] = headers.Headers.from_state(state["headers"]) - return cls(**state) - - @property - def headers(self): - """ - Message headers object - - Returns: - netlib.http.Headers - """ - return self.data.headers - - @headers.setter - def headers(self, h): - self.data.headers = h - - @property - def raw_content(self) -> bytes: - """ - The raw (encoded) HTTP message body - - See also: :py:attr:`content`, :py:class:`text` - """ - return self.data.content - - @raw_content.setter - def raw_content(self, content): - self.data.content = content - - def get_content(self, strict: bool=True) -> bytes: - """ - The HTTP message body decoded with the content-encoding header (e.g. gzip) - - Raises: - ValueError, when the content-encoding is invalid and strict is True. - - See also: :py:class:`raw_content`, :py:attr:`text` - """ - if self.raw_content is None: - return None - ce = self.headers.get("content-encoding") - if ce: - try: - return encoding.decode(self.raw_content, ce) - except ValueError: - if strict: - raise - return self.raw_content - else: - return self.raw_content - - def set_content(self, value): - if value is None: - self.raw_content = None - return - if not isinstance(value, bytes): - raise TypeError( - "Message content must be bytes, not {}. " - "Please use .text if you want to assign a str." - .format(type(value).__name__) - ) - ce = self.headers.get("content-encoding") - try: - self.raw_content = encoding.encode(value, ce or "identity") - except ValueError: - # So we have an invalid content-encoding? - # Let's remove it! - del self.headers["content-encoding"] - self.raw_content = value - self.headers["content-length"] = str(len(self.raw_content)) - - content = property(get_content, set_content) - - @property - def http_version(self): - """ - Version string, e.g. "HTTP/1.1" - """ - return _native(self.data.http_version) - - @http_version.setter - def http_version(self, http_version): - self.data.http_version = _always_bytes(http_version) - - @property - def timestamp_start(self): - """ - First byte timestamp - """ - return self.data.timestamp_start - - @timestamp_start.setter - def timestamp_start(self, timestamp_start): - self.data.timestamp_start = timestamp_start - - @property - def timestamp_end(self): - """ - Last byte timestamp - """ - return self.data.timestamp_end - - @timestamp_end.setter - def timestamp_end(self, timestamp_end): - self.data.timestamp_end = timestamp_end - - def _get_content_type_charset(self) -> Optional[str]: - ct = headers.parse_content_type(self.headers.get("content-type", "")) - if ct: - return ct[2].get("charset") - - def _guess_encoding(self) -> str: - enc = self._get_content_type_charset() - if enc: - return enc - - if "json" in self.headers.get("content-type", ""): - return "utf8" - else: - # We may also want to check for HTML meta tags here at some point. - return "latin-1" - - def get_text(self, strict: bool=True) -> str: - """ - The HTTP message body decoded with both content-encoding header (e.g. gzip) - and content-type header charset. - - Raises: - ValueError, when either content-encoding or charset is invalid and strict is True. - - See also: :py:attr:`content`, :py:class:`raw_content` - """ - if self.raw_content is None: - return None - enc = self._guess_encoding() - - content = self.get_content(strict) - try: - return encoding.decode(content, enc) - except ValueError: - if strict: - raise - return content.decode("utf8", "surrogateescape") - - def set_text(self, text): - if text is None: - self.content = None - return - enc = self._guess_encoding() - - try: - self.content = encoding.encode(text, enc) - except ValueError: - # Fall back to UTF-8 and update the content-type header. - ct = headers.parse_content_type(self.headers.get("content-type", "")) or ("text", "plain", {}) - ct[2]["charset"] = "utf-8" - self.headers["content-type"] = headers.assemble_content_type(*ct) - enc = "utf8" - self.content = text.encode(enc, "surrogateescape") - - text = property(get_text, set_text) - - def decode(self, strict=True): - """ - Decodes body based on the current Content-Encoding header, then - removes the header. If there is no Content-Encoding header, no - action is taken. - - Raises: - ValueError, when the content-encoding is invalid and strict is True. - """ - self.raw_content = self.get_content(strict) - self.headers.pop("content-encoding", None) - - def encode(self, e): - """ - Encodes body with the encoding e, where e is "gzip", "deflate", "identity", or "br". - Any existing content-encodings are overwritten, - the content is not decoded beforehand. - - Raises: - ValueError, when the specified content-encoding is invalid. - """ - self.headers["content-encoding"] = e - self.content = self.raw_content - if "content-encoding" not in self.headers: - raise ValueError("Invalid content encoding {}".format(repr(e))) - - def replace(self, pattern, repl, flags=0, count=0): - """ - Replaces a regular expression pattern with repl in both the headers - and the body of the message. Encoded body will be decoded - before replacement, and re-encoded afterwards. - - Returns: - The number of replacements made. - """ - if isinstance(pattern, str): - pattern = strutils.escaped_str_to_bytes(pattern) - if isinstance(repl, str): - repl = strutils.escaped_str_to_bytes(repl) - replacements = 0 - if self.content: - self.content, replacements = re.subn( - pattern, repl, self.content, flags=flags, count=count - ) - replacements += self.headers.replace(pattern, repl, flags=flags, count=count) - return replacements - - # Legacy - - @property - def body(self): # pragma: no cover - warnings.warn(".body is deprecated, use .content instead.", DeprecationWarning) - return self.content - - @body.setter - def body(self, body): # pragma: no cover - warnings.warn(".body is deprecated, use .content instead.", DeprecationWarning) - self.content = body - - -class decoded: - """ - Deprecated: You can now directly use :py:attr:`content`. - :py:attr:`raw_content` has the encoded content. - """ - - def __init__(self, message): # pragma no cover - warnings.warn("decoded() is deprecated, you can now directly use .content instead. " - ".raw_content has the encoded content.", DeprecationWarning) - - def __enter__(self): # pragma no cover - pass - - def __exit__(self, type, value, tb): # pragma no cover - pass diff --git a/netlib/http/multipart.py b/netlib/http/multipart.py deleted file mode 100644 index 536b2809..00000000 --- a/netlib/http/multipart.py +++ /dev/null @@ -1,32 +0,0 @@ -import re - -from netlib.http import headers - - -def decode(hdrs, content): - """ - Takes a multipart boundary encoded string and returns list of (key, value) tuples. - """ - v = hdrs.get("content-type") - if v: - v = headers.parse_content_type(v) - if not v: - return [] - try: - boundary = v[2]["boundary"].encode("ascii") - except (KeyError, UnicodeError): - return [] - - rx = re.compile(br'\bname="([^"]+)"') - r = [] - - for i in content.split(b"--" + boundary): - parts = i.splitlines() - if len(parts) > 1 and parts[0][0:2] != b"--": - match = rx.search(parts[1]) - if match: - key = match.group(1) - value = b"".join(parts[3 + parts[2:].index(b""):]) - r.append((key, value)) - return r - return [] diff --git a/netlib/http/request.py b/netlib/http/request.py deleted file mode 100644 index 16b0c986..00000000 --- a/netlib/http/request.py +++ /dev/null @@ -1,405 +0,0 @@ -import re -import urllib - -from mitmproxy.types import multidict -from mitmproxy.utils import strutils -from netlib.http import multipart -from netlib.http import cookies -from netlib.http import headers as nheaders -from netlib.http import message -import netlib.http.url - -# This regex extracts & splits the host header into host and port. -# Handles the edge case of IPv6 addresses containing colons. -# https://bugzilla.mozilla.org/show_bug.cgi?id=45891 -host_header_re = re.compile(r"^(?P[^:]+|\[.+\])(?::(?P\d+))?$") - - -class RequestData(message.MessageData): - def __init__( - self, - first_line_format, - method, - scheme, - host, - port, - path, - http_version, - headers=(), - content=None, - timestamp_start=None, - timestamp_end=None - ): - if isinstance(method, str): - method = method.encode("ascii", "strict") - if isinstance(scheme, str): - scheme = scheme.encode("ascii", "strict") - if isinstance(host, str): - host = host.encode("idna", "strict") - if isinstance(path, str): - path = path.encode("ascii", "strict") - if isinstance(http_version, str): - http_version = http_version.encode("ascii", "strict") - if not isinstance(headers, nheaders.Headers): - headers = nheaders.Headers(headers) - if isinstance(content, str): - raise ValueError("Content must be bytes, not {}".format(type(content).__name__)) - - self.first_line_format = first_line_format - self.method = method - self.scheme = scheme - self.host = host - self.port = port - self.path = path - self.http_version = http_version - self.headers = headers - self.content = content - self.timestamp_start = timestamp_start - self.timestamp_end = timestamp_end - - -class Request(message.Message): - """ - An HTTP request. - """ - def __init__(self, *args, **kwargs): - super().__init__() - self.data = RequestData(*args, **kwargs) - - def __repr__(self): - if self.host and self.port: - hostport = "{}:{}".format(self.host, self.port) - else: - hostport = "" - path = self.path or "" - return "Request({} {}{})".format( - self.method, hostport, path - ) - - def replace(self, pattern, repl, flags=0, count=0): - """ - Replaces a regular expression pattern with repl in the headers, the - request path and the body of the request. Encoded content will be - decoded before replacement, and re-encoded afterwards. - - Returns: - The number of replacements made. - """ - if isinstance(pattern, str): - pattern = strutils.escaped_str_to_bytes(pattern) - if isinstance(repl, str): - repl = strutils.escaped_str_to_bytes(repl) - - c = super().replace(pattern, repl, flags, count) - self.path, pc = re.subn( - pattern, repl, self.data.path, flags=flags, count=count - ) - c += pc - return c - - @property - def first_line_format(self): - """ - HTTP request form as defined in `RFC7230 `_. - - origin-form and asterisk-form are subsumed as "relative". - """ - return self.data.first_line_format - - @first_line_format.setter - def first_line_format(self, first_line_format): - self.data.first_line_format = first_line_format - - @property - def method(self): - """ - HTTP request method, e.g. "GET". - """ - return message._native(self.data.method).upper() - - @method.setter - def method(self, method): - self.data.method = message._always_bytes(method) - - @property - def scheme(self): - """ - HTTP request scheme, which should be "http" or "https". - """ - if not self.data.scheme: - return self.data.scheme - return message._native(self.data.scheme) - - @scheme.setter - def scheme(self, scheme): - self.data.scheme = message._always_bytes(scheme) - - @property - def host(self): - """ - Target host. This may be parsed from the raw request - (e.g. from a ``GET http://example.com/ HTTP/1.1`` request line) - or inferred from the proxy mode (e.g. an IP in transparent mode). - - Setting the host attribute also updates the host header, if present. - """ - if not self.data.host: - return self.data.host - try: - return self.data.host.decode("idna") - except UnicodeError: - return self.data.host.decode("utf8", "surrogateescape") - - @host.setter - def host(self, host): - if isinstance(host, str): - try: - # There's no non-strict mode for IDNA encoding. - # We don't want this operation to fail though, so we try - # utf8 as a last resort. - host = host.encode("idna", "strict") - except UnicodeError: - host = host.encode("utf8", "surrogateescape") - - self.data.host = host - - # Update host header - if "host" in self.headers: - if host: - self.headers["host"] = host - else: - self.headers.pop("host") - - @property - def port(self): - """ - Target port - """ - return self.data.port - - @port.setter - def port(self, port): - self.data.port = port - - @property - def path(self): - """ - HTTP request path, e.g. "/index.html". - Guaranteed to start with a slash, except for OPTIONS requests, which may just be "*". - """ - if self.data.path is None: - return None - else: - return message._native(self.data.path) - - @path.setter - def path(self, path): - self.data.path = message._always_bytes(path) - - @property - def url(self): - """ - The URL string, constructed from the request's URL components - """ - if self.first_line_format == "authority": - return "%s:%d" % (self.host, self.port) - return netlib.http.url.unparse(self.scheme, self.host, self.port, self.path) - - @url.setter - def url(self, url): - self.scheme, self.host, self.port, self.path = netlib.http.url.parse(url) - - def _parse_host_header(self): - """Extract the host and port from Host header""" - if "host" not in self.headers: - return None, None - host, port = self.headers["host"], None - m = host_header_re.match(host) - if m: - host = m.group("host").strip("[]") - if m.group("port"): - port = int(m.group("port")) - return host, port - - @property - def pretty_host(self): - """ - Similar to :py:attr:`host`, but using the Host headers as an additional preferred data source. - This is useful in transparent mode where :py:attr:`host` is only an IP address, - but may not reflect the actual destination as the Host header could be spoofed. - """ - host, port = self._parse_host_header() - if not host: - return self.host - if not port: - port = 443 if self.scheme == 'https' else 80 - # Prefer the original address if host header has an unexpected form - return host if port == self.port else self.host - - @property - def pretty_url(self): - """ - Like :py:attr:`url`, but using :py:attr:`pretty_host` instead of :py:attr:`host`. - """ - if self.first_line_format == "authority": - return "%s:%d" % (self.pretty_host, self.port) - return netlib.http.url.unparse(self.scheme, self.pretty_host, self.port, self.path) - - @property - def query(self) -> multidict.MultiDictView: - """ - The request query string as an :py:class:`~netlib.multidict.MultiDictView` object. - """ - return multidict.MultiDictView( - self._get_query, - self._set_query - ) - - def _get_query(self): - query = urllib.parse.urlparse(self.url).query - return tuple(netlib.http.url.decode(query)) - - def _set_query(self, query_data): - query = netlib.http.url.encode(query_data) - _, _, path, params, _, fragment = urllib.parse.urlparse(self.url) - self.path = urllib.parse.urlunparse(["", "", path, params, query, fragment]) - - @query.setter - def query(self, value): - self._set_query(value) - - @property - def cookies(self) -> multidict.MultiDictView: - """ - The request cookies. - - An empty :py:class:`~netlib.multidict.MultiDictView` object if the cookie monster ate them all. - """ - return multidict.MultiDictView( - self._get_cookies, - self._set_cookies - ) - - def _get_cookies(self): - h = self.headers.get_all("Cookie") - return tuple(cookies.parse_cookie_headers(h)) - - def _set_cookies(self, value): - self.headers["cookie"] = cookies.format_cookie_header(value) - - @cookies.setter - def cookies(self, value): - self._set_cookies(value) - - @property - def path_components(self): - """ - The URL's path components as a tuple of strings. - Components are unquoted. - """ - path = urllib.parse.urlparse(self.url).path - # This needs to be a tuple so that it's immutable. - # Otherwise, this would fail silently: - # request.path_components.append("foo") - return tuple(netlib.http.url.unquote(i) for i in path.split("/") if i) - - @path_components.setter - def path_components(self, components): - components = map(lambda x: netlib.http.url.quote(x, safe=""), components) - path = "/" + "/".join(components) - _, _, _, params, query, fragment = urllib.parse.urlparse(self.url) - self.path = urllib.parse.urlunparse(["", "", path, params, query, fragment]) - - def anticache(self): - """ - Modifies this request to remove headers that might produce a cached - response. That is, we remove ETags and If-Modified-Since headers. - """ - delheaders = [ - "if-modified-since", - "if-none-match", - ] - for i in delheaders: - self.headers.pop(i, None) - - def anticomp(self): - """ - Modifies this request to remove headers that will compress the - resource's data. - """ - self.headers["accept-encoding"] = "identity" - - def constrain_encoding(self): - """ - Limits the permissible Accept-Encoding values, based on what we can - decode appropriately. - """ - accept_encoding = self.headers.get("accept-encoding") - if accept_encoding: - self.headers["accept-encoding"] = ( - ', '.join( - e - for e in {"gzip", "identity", "deflate", "br"} - if e in accept_encoding - ) - ) - - @property - def urlencoded_form(self): - """ - The URL-encoded form data as an :py:class:`~netlib.multidict.MultiDictView` object. - An empty multidict.MultiDictView if the content-type indicates non-form data - or the content could not be parsed. - """ - return multidict.MultiDictView( - self._get_urlencoded_form, - self._set_urlencoded_form - ) - - def _get_urlencoded_form(self): - is_valid_content_type = "application/x-www-form-urlencoded" in self.headers.get("content-type", "").lower() - if is_valid_content_type: - try: - return tuple(netlib.http.url.decode(self.content)) - except ValueError: - pass - return () - - def _set_urlencoded_form(self, form_data): - """ - Sets the body to the URL-encoded form data, and adds the appropriate content-type header. - This will overwrite the existing content if there is one. - """ - self.headers["content-type"] = "application/x-www-form-urlencoded" - self.content = netlib.http.url.encode(form_data).encode() - - @urlencoded_form.setter - def urlencoded_form(self, value): - self._set_urlencoded_form(value) - - @property - def multipart_form(self): - """ - The multipart form data as an :py:class:`~netlib.multidict.MultiDictView` object. - None if the content-type indicates non-form data. - """ - return multidict.MultiDictView( - self._get_multipart_form, - self._set_multipart_form - ) - - def _get_multipart_form(self): - is_valid_content_type = "multipart/form-data" in self.headers.get("content-type", "").lower() - if is_valid_content_type: - try: - return multipart.decode(self.headers, self.content) - except ValueError: - pass - return () - - def _set_multipart_form(self, value): - raise NotImplementedError() - - @multipart_form.setter - def multipart_form(self, value): - self._set_multipart_form(value) diff --git a/netlib/http/response.py b/netlib/http/response.py deleted file mode 100644 index 4d1d5d24..00000000 --- a/netlib/http/response.py +++ /dev/null @@ -1,192 +0,0 @@ -import time -from email.utils import parsedate_tz, formatdate, mktime_tz -from mitmproxy.utils import human -from mitmproxy.types import multidict -from netlib.http import cookies -from netlib.http import headers as nheaders -from netlib.http import message -from netlib.http import status_codes -from typing import AnyStr -from typing import Dict -from typing import Iterable -from typing import Tuple -from typing import Union - - -class ResponseData(message.MessageData): - def __init__( - self, - http_version, - status_code, - reason=None, - headers=(), - content=None, - timestamp_start=None, - timestamp_end=None - ): - if isinstance(http_version, str): - http_version = http_version.encode("ascii", "strict") - if isinstance(reason, str): - reason = reason.encode("ascii", "strict") - if not isinstance(headers, nheaders.Headers): - headers = nheaders.Headers(headers) - if isinstance(content, str): - raise ValueError("Content must be bytes, not {}".format(type(content).__name__)) - - self.http_version = http_version - self.status_code = status_code - self.reason = reason - self.headers = headers - self.content = content - self.timestamp_start = timestamp_start - self.timestamp_end = timestamp_end - - -class Response(message.Message): - """ - An HTTP response. - """ - def __init__(self, *args, **kwargs): - super().__init__() - self.data = ResponseData(*args, **kwargs) - - def __repr__(self): - if self.raw_content: - details = "{}, {}".format( - self.headers.get("content-type", "unknown content type"), - human.pretty_size(len(self.raw_content)) - ) - else: - details = "no content" - return "Response({status_code} {reason}, {details})".format( - status_code=self.status_code, - reason=self.reason, - details=details - ) - - @classmethod - def make( - cls, - status_code: int=200, - content: AnyStr=b"", - headers: Union[Dict[AnyStr, AnyStr], Iterable[Tuple[bytes, bytes]]]=() - ): - """ - Simplified API for creating response objects. - """ - resp = cls( - b"HTTP/1.1", - status_code, - status_codes.RESPONSES.get(status_code, "").encode(), - (), - None - ) - - # Headers can be list or dict, we differentiate here. - if isinstance(headers, dict): - resp.headers = nheaders.Headers(**headers) - elif isinstance(headers, Iterable): - resp.headers = nheaders.Headers(headers) - else: - raise TypeError("Expected headers to be an iterable or dict, but is {}.".format( - type(headers).__name__ - )) - - # Assign this manually to update the content-length header. - if isinstance(content, bytes): - resp.content = content - elif isinstance(content, str): - resp.text = content - else: - raise TypeError("Expected content to be str or bytes, but is {}.".format( - type(content).__name__ - )) - - return resp - - @property - def status_code(self): - """ - HTTP Status Code, e.g. ``200``. - """ - return self.data.status_code - - @status_code.setter - def status_code(self, status_code): - self.data.status_code = status_code - - @property - def reason(self): - """ - HTTP Reason Phrase, e.g. "Not Found". - This is always :py:obj:`None` for HTTP2 requests, because HTTP2 responses do not contain a reason phrase. - """ - return message._native(self.data.reason) - - @reason.setter - def reason(self, reason): - self.data.reason = message._always_bytes(reason) - - @property - def cookies(self) -> multidict.MultiDictView: - """ - The response cookies. A possibly empty - :py:class:`~netlib.multidict.MultiDictView`, where the keys are cookie - name strings, and values are (value, attr) tuples. Value is a string, - and attr is an MultiDictView containing cookie attributes. Within - attrs, unary attributes (e.g. HTTPOnly) are indicated by a Null value. - - Caveats: - Updating the attr - """ - return multidict.MultiDictView( - self._get_cookies, - self._set_cookies - ) - - def _get_cookies(self): - h = self.headers.get_all("set-cookie") - return tuple(cookies.parse_set_cookie_headers(h)) - - def _set_cookies(self, value): - cookie_headers = [] - for k, v in value: - header = cookies.format_set_cookie_header([(k, v[0], v[1])]) - cookie_headers.append(header) - self.headers.set_all("set-cookie", cookie_headers) - - @cookies.setter - def cookies(self, value): - self._set_cookies(value) - - def refresh(self, now=None): - """ - This fairly complex and heuristic function refreshes a server - response for replay. - - - It adjusts date, expires and last-modified headers. - - It adjusts cookie expiration. - """ - if not now: - now = time.time() - delta = now - self.timestamp_start - refresh_headers = [ - "date", - "expires", - "last-modified", - ] - for i in refresh_headers: - if i in self.headers: - d = parsedate_tz(self.headers[i]) - if d: - new = mktime_tz(d) + delta - self.headers[i] = formatdate(new) - c = [] - for set_cookie_header in self.headers.get_all("set-cookie"): - try: - refreshed = cookies.refresh_set_cookie_header(set_cookie_header, delta) - except ValueError: - refreshed = set_cookie_header - c.append(refreshed) - if c: - self.headers.set_all("set-cookie", c) diff --git a/netlib/http/status_codes.py b/netlib/http/status_codes.py deleted file mode 100644 index 5a83cd73..00000000 --- a/netlib/http/status_codes.py +++ /dev/null @@ -1,104 +0,0 @@ -CONTINUE = 100 -SWITCHING = 101 -OK = 200 -CREATED = 201 -ACCEPTED = 202 -NON_AUTHORITATIVE_INFORMATION = 203 -NO_CONTENT = 204 -RESET_CONTENT = 205 -PARTIAL_CONTENT = 206 -MULTI_STATUS = 207 - -MULTIPLE_CHOICE = 300 -MOVED_PERMANENTLY = 301 -FOUND = 302 -SEE_OTHER = 303 -NOT_MODIFIED = 304 -USE_PROXY = 305 -TEMPORARY_REDIRECT = 307 - -BAD_REQUEST = 400 -UNAUTHORIZED = 401 -PAYMENT_REQUIRED = 402 -FORBIDDEN = 403 -NOT_FOUND = 404 -NOT_ALLOWED = 405 -NOT_ACCEPTABLE = 406 -PROXY_AUTH_REQUIRED = 407 -REQUEST_TIMEOUT = 408 -CONFLICT = 409 -GONE = 410 -LENGTH_REQUIRED = 411 -PRECONDITION_FAILED = 412 -REQUEST_ENTITY_TOO_LARGE = 413 -REQUEST_URI_TOO_LONG = 414 -UNSUPPORTED_MEDIA_TYPE = 415 -REQUESTED_RANGE_NOT_SATISFIABLE = 416 -EXPECTATION_FAILED = 417 -IM_A_TEAPOT = 418 - -INTERNAL_SERVER_ERROR = 500 -NOT_IMPLEMENTED = 501 -BAD_GATEWAY = 502 -SERVICE_UNAVAILABLE = 503 -GATEWAY_TIMEOUT = 504 -HTTP_VERSION_NOT_SUPPORTED = 505 -INSUFFICIENT_STORAGE_SPACE = 507 -NOT_EXTENDED = 510 - -RESPONSES = { - # 100 - CONTINUE: "Continue", - SWITCHING: "Switching Protocols", - - # 200 - OK: "OK", - CREATED: "Created", - ACCEPTED: "Accepted", - NON_AUTHORITATIVE_INFORMATION: "Non-Authoritative Information", - NO_CONTENT: "No Content", - RESET_CONTENT: "Reset Content.", - PARTIAL_CONTENT: "Partial Content", - MULTI_STATUS: "Multi-Status", - - # 300 - MULTIPLE_CHOICE: "Multiple Choices", - MOVED_PERMANENTLY: "Moved Permanently", - FOUND: "Found", - SEE_OTHER: "See Other", - NOT_MODIFIED: "Not Modified", - USE_PROXY: "Use Proxy", - # 306 not defined?? - TEMPORARY_REDIRECT: "Temporary Redirect", - - # 400 - BAD_REQUEST: "Bad Request", - UNAUTHORIZED: "Unauthorized", - PAYMENT_REQUIRED: "Payment Required", - FORBIDDEN: "Forbidden", - NOT_FOUND: "Not Found", - NOT_ALLOWED: "Method Not Allowed", - NOT_ACCEPTABLE: "Not Acceptable", - PROXY_AUTH_REQUIRED: "Proxy Authentication Required", - REQUEST_TIMEOUT: "Request Time-out", - CONFLICT: "Conflict", - GONE: "Gone", - LENGTH_REQUIRED: "Length Required", - PRECONDITION_FAILED: "Precondition Failed", - REQUEST_ENTITY_TOO_LARGE: "Request Entity Too Large", - REQUEST_URI_TOO_LONG: "Request-URI Too Long", - UNSUPPORTED_MEDIA_TYPE: "Unsupported Media Type", - REQUESTED_RANGE_NOT_SATISFIABLE: "Requested Range not satisfiable", - EXPECTATION_FAILED: "Expectation Failed", - IM_A_TEAPOT: "I'm a teapot", - - # 500 - INTERNAL_SERVER_ERROR: "Internal Server Error", - NOT_IMPLEMENTED: "Not Implemented", - BAD_GATEWAY: "Bad Gateway", - SERVICE_UNAVAILABLE: "Service Unavailable", - GATEWAY_TIMEOUT: "Gateway Time-out", - HTTP_VERSION_NOT_SUPPORTED: "HTTP Version not supported", - INSUFFICIENT_STORAGE_SPACE: "Insufficient Storage Space", - NOT_EXTENDED: "Not Extended" -} diff --git a/netlib/http/url.py b/netlib/http/url.py deleted file mode 100644 index 3ca58120..00000000 --- a/netlib/http/url.py +++ /dev/null @@ -1,127 +0,0 @@ -import urllib -from typing import Sequence -from typing import Tuple - -from netlib import check - - -# PY2 workaround -def decode_parse_result(result, enc): - if hasattr(result, "decode"): - return result.decode(enc) - else: - return urllib.parse.ParseResult(*[x.decode(enc) for x in result]) - - -# PY2 workaround -def encode_parse_result(result, enc): - if hasattr(result, "encode"): - return result.encode(enc) - else: - return urllib.parse.ParseResult(*[x.encode(enc) for x in result]) - - -def parse(url): - """ - URL-parsing function that checks that - - port is an integer 0-65535 - - host is a valid IDNA-encoded hostname with no null-bytes - - path is valid ASCII - - Args: - A URL (as bytes or as unicode) - - Returns: - A (scheme, host, port, path) tuple - - Raises: - ValueError, if the URL is not properly formatted. - """ - parsed = urllib.parse.urlparse(url) - - if not parsed.hostname: - raise ValueError("No hostname given") - - if isinstance(url, bytes): - host = parsed.hostname - - # this should not raise a ValueError, - # but we try to be very forgiving here and accept just everything. - # decode_parse_result(parsed, "ascii") - else: - host = parsed.hostname.encode("idna") - parsed = encode_parse_result(parsed, "ascii") - - port = parsed.port - if not port: - port = 443 if parsed.scheme == b"https" else 80 - - full_path = urllib.parse.urlunparse( - (b"", b"", parsed.path, parsed.params, parsed.query, parsed.fragment) - ) - if not full_path.startswith(b"/"): - full_path = b"/" + full_path - - if not check.is_valid_host(host): - raise ValueError("Invalid Host") - if not check.is_valid_port(port): - raise ValueError("Invalid Port") - - return parsed.scheme, host, port, full_path - - -def unparse(scheme, host, port, path=""): - """ - Returns a URL string, constructed from the specified components. - - Args: - All args must be str. - """ - if path == "*": - path = "" - return "%s://%s%s" % (scheme, hostport(scheme, host, port), path) - - -def encode(s: Sequence[Tuple[str, str]]) -> str: - """ - Takes a list of (key, value) tuples and returns a urlencoded string. - """ - return urllib.parse.urlencode(s, False, errors="surrogateescape") - - -def decode(s): - """ - Takes a urlencoded string and returns a list of surrogate-escaped (key, value) tuples. - """ - return urllib.parse.parse_qsl(s, keep_blank_values=True, errors='surrogateescape') - - -def quote(b: str, safe: str="/") -> str: - """ - Returns: - An ascii-encodable str. - """ - return urllib.parse.quote(b, safe=safe, errors="surrogateescape") - - -def unquote(s: str) -> str: - """ - Args: - s: A surrogate-escaped str - Returns: - A surrogate-escaped str - """ - return urllib.parse.unquote(s, errors="surrogateescape") - - -def hostport(scheme, host, port): - """ - Returns the host component, with a port specifcation if needed. - """ - if (port, scheme) in [(80, "http"), (443, "https"), (80, b"http"), (443, b"https")]: - return host - else: - if isinstance(host, bytes): - return b"%s:%d" % (host, port) - else: - return "%s:%d" % (host, port) diff --git a/netlib/http/user_agents.py b/netlib/http/user_agents.py deleted file mode 100644 index d0ca2f21..00000000 --- a/netlib/http/user_agents.py +++ /dev/null @@ -1,50 +0,0 @@ -""" - A small collection of useful user-agent header strings. These should be - kept reasonably current to reflect common usage. -""" - -# pylint: line-too-long - -# A collection of (name, shortcut, string) tuples. - -UASTRINGS = [ - ("android", - "a", - "Mozilla/5.0 (Linux; U; Android 4.1.1; en-gb; Nexus 7 Build/JRO03D) AFL/01.04.02"), # noqa - ("blackberry", - "l", - "Mozilla/5.0 (BlackBerry; U; BlackBerry 9900; en) AppleWebKit/534.11+ (KHTML, like Gecko) Version/7.1.0.346 Mobile Safari/534.11+"), # noqa - ("bingbot", - "b", - "Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)"), # noqa - ("chrome", - "c", - "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1"), # noqa - ("firefox", - "f", - "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:14.0) Gecko/20120405 Firefox/14.0a1"), # noqa - ("googlebot", - "g", - "Googlebot/2.1 (+http://www.googlebot.com/bot.html)"), # noqa - ("ie9", - "i", - "Mozilla/5.0 (Windows; U; MSIE 9.0; WIndows NT 9.0; en-US)"), # noqa - ("ipad", - "p", - "Mozilla/5.0 (iPad; CPU OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B176 Safari/7534.48.3"), # noqa - ("iphone", - "h", - "Mozilla/5.0 (iPhone; CPU iPhone OS 4_2_1 like Mac OS X) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148a Safari/6533.18.5"), # noqa - ("safari", - "s", - "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/534.55.3 (KHTML, like Gecko) Version/5.1.3 Safari/534.53.10"), # noqa -] - - -def get_by_shortcut(s): - """ - Retrieve a user agent entry by shortcut. - """ - for i in UASTRINGS: - if s == i[1]: - return i diff --git a/netlib/socks.py b/netlib/socks.py deleted file mode 100644 index 377308a8..00000000 --- a/netlib/socks.py +++ /dev/null @@ -1,234 +0,0 @@ -import struct -import array -import ipaddress - -from netlib import tcp -from netlib import check -from mitmproxy.types import bidi - - -class SocksError(Exception): - def __init__(self, code, message): - super().__init__(message) - self.code = code - -VERSION = bidi.BiDi( - SOCKS4=0x04, - SOCKS5=0x05 -) - -CMD = bidi.BiDi( - CONNECT=0x01, - BIND=0x02, - UDP_ASSOCIATE=0x03 -) - -ATYP = bidi.BiDi( - IPV4_ADDRESS=0x01, - DOMAINNAME=0x03, - IPV6_ADDRESS=0x04 -) - -REP = bidi.BiDi( - SUCCEEDED=0x00, - GENERAL_SOCKS_SERVER_FAILURE=0x01, - CONNECTION_NOT_ALLOWED_BY_RULESET=0x02, - NETWORK_UNREACHABLE=0x03, - HOST_UNREACHABLE=0x04, - CONNECTION_REFUSED=0x05, - TTL_EXPIRED=0x06, - COMMAND_NOT_SUPPORTED=0x07, - ADDRESS_TYPE_NOT_SUPPORTED=0x08, -) - -METHOD = bidi.BiDi( - NO_AUTHENTICATION_REQUIRED=0x00, - GSSAPI=0x01, - USERNAME_PASSWORD=0x02, - NO_ACCEPTABLE_METHODS=0xFF -) - -USERNAME_PASSWORD_VERSION = bidi.BiDi( - DEFAULT=0x01 -) - - -class ClientGreeting: - __slots__ = ("ver", "methods") - - def __init__(self, ver, methods): - self.ver = ver - self.methods = array.array("B") - self.methods.extend(methods) - - def assert_socks5(self): - if self.ver != VERSION.SOCKS5: - if self.ver == ord("G") and len(self.methods) == ord("E"): - guess = "Probably not a SOCKS request but a regular HTTP request. " - else: - guess = "" - - raise SocksError( - REP.GENERAL_SOCKS_SERVER_FAILURE, - guess + "Invalid SOCKS version. Expected 0x05, got 0x%x" % self.ver - ) - - @classmethod - def from_file(cls, f, fail_early=False): - """ - :param fail_early: If true, a SocksError will be raised if the first byte does not indicate socks5. - """ - ver, nmethods = struct.unpack("!BB", f.safe_read(2)) - client_greeting = cls(ver, []) - if fail_early: - client_greeting.assert_socks5() - client_greeting.methods.fromstring(f.safe_read(nmethods)) - return client_greeting - - def to_file(self, f): - f.write(struct.pack("!BB", self.ver, len(self.methods))) - f.write(self.methods.tostring()) - - -class ServerGreeting: - __slots__ = ("ver", "method") - - def __init__(self, ver, method): - self.ver = ver - self.method = method - - def assert_socks5(self): - if self.ver != VERSION.SOCKS5: - if self.ver == ord("H") and self.method == ord("T"): - guess = "Probably not a SOCKS request but a regular HTTP response. " - else: - guess = "" - - raise SocksError( - REP.GENERAL_SOCKS_SERVER_FAILURE, - guess + "Invalid SOCKS version. Expected 0x05, got 0x%x" % self.ver - ) - - @classmethod - def from_file(cls, f): - ver, method = struct.unpack("!BB", f.safe_read(2)) - return cls(ver, method) - - def to_file(self, f): - f.write(struct.pack("!BB", self.ver, self.method)) - - -class UsernamePasswordAuth: - __slots__ = ("ver", "username", "password") - - def __init__(self, ver, username, password): - self.ver = ver - self.username = username - self.password = password - - def assert_authver1(self): - if self.ver != USERNAME_PASSWORD_VERSION.DEFAULT: - raise SocksError( - 0, - "Invalid auth version. Expected 0x01, got 0x%x" % self.ver - ) - - @classmethod - def from_file(cls, f): - ver, ulen = struct.unpack("!BB", f.safe_read(2)) - username = f.safe_read(ulen) - plen, = struct.unpack("!B", f.safe_read(1)) - password = f.safe_read(plen) - return cls(ver, username.decode(), password.decode()) - - def to_file(self, f): - f.write(struct.pack("!BB", self.ver, len(self.username))) - f.write(self.username.encode()) - f.write(struct.pack("!B", len(self.password))) - f.write(self.password.encode()) - - -class UsernamePasswordAuthResponse: - __slots__ = ("ver", "status") - - def __init__(self, ver, status): - self.ver = ver - self.status = status - - def assert_authver1(self): - if self.ver != USERNAME_PASSWORD_VERSION.DEFAULT: - raise SocksError( - 0, - "Invalid auth version. Expected 0x01, got 0x%x" % self.ver - ) - - @classmethod - def from_file(cls, f): - ver, status = struct.unpack("!BB", f.safe_read(2)) - return cls(ver, status) - - def to_file(self, f): - f.write(struct.pack("!BB", self.ver, self.status)) - - -class Message: - __slots__ = ("ver", "msg", "atyp", "addr") - - def __init__(self, ver, msg, atyp, addr): - self.ver = ver - self.msg = msg - self.atyp = atyp - self.addr = tcp.Address.wrap(addr) - - def assert_socks5(self): - if self.ver != VERSION.SOCKS5: - raise SocksError( - REP.GENERAL_SOCKS_SERVER_FAILURE, - "Invalid SOCKS version. Expected 0x05, got 0x%x" % self.ver - ) - - @classmethod - def from_file(cls, f): - ver, msg, rsv, atyp = struct.unpack("!BBBB", f.safe_read(4)) - if rsv != 0x00: - raise SocksError( - REP.GENERAL_SOCKS_SERVER_FAILURE, - "Socks Request: Invalid reserved byte: %s" % rsv - ) - if atyp == ATYP.IPV4_ADDRESS: - # We use tnoa here as ntop is not commonly available on Windows. - host = ipaddress.IPv4Address(f.safe_read(4)).compressed - use_ipv6 = False - elif atyp == ATYP.IPV6_ADDRESS: - host = ipaddress.IPv6Address(f.safe_read(16)).compressed - use_ipv6 = True - elif atyp == ATYP.DOMAINNAME: - length, = struct.unpack("!B", f.safe_read(1)) - host = f.safe_read(length) - if not check.is_valid_host(host): - raise SocksError(REP.GENERAL_SOCKS_SERVER_FAILURE, "Invalid hostname: %s" % host) - host = host.decode("idna") - use_ipv6 = False - else: - raise SocksError(REP.ADDRESS_TYPE_NOT_SUPPORTED, - "Socks Request: Unknown ATYP: %s" % atyp) - - port, = struct.unpack("!H", f.safe_read(2)) - addr = tcp.Address((host, port), use_ipv6=use_ipv6) - return cls(ver, msg, atyp, addr) - - def to_file(self, f): - f.write(struct.pack("!BBBB", self.ver, self.msg, 0x00, self.atyp)) - if self.atyp == ATYP.IPV4_ADDRESS: - f.write(ipaddress.IPv4Address(self.addr.host).packed) - elif self.atyp == ATYP.IPV6_ADDRESS: - f.write(ipaddress.IPv6Address(self.addr.host).packed) - elif self.atyp == ATYP.DOMAINNAME: - f.write(struct.pack("!B", len(self.addr.host))) - f.write(self.addr.host.encode("idna")) - else: - raise SocksError( - REP.ADDRESS_TYPE_NOT_SUPPORTED, - "Unknown ATYP: %s" % self.atyp - ) - f.write(struct.pack("!H", self.addr.port)) diff --git a/netlib/tcp.py b/netlib/tcp.py deleted file mode 100644 index ac368a9c..00000000 --- a/netlib/tcp.py +++ /dev/null @@ -1,989 +0,0 @@ -import os -import select -import socket -import sys -import threading -import time -import traceback - -import binascii - -from typing import Optional # noqa - -from mitmproxy.utils import strutils - -import certifi -from backports import ssl_match_hostname -import OpenSSL -from OpenSSL import SSL - -from mitmproxy import certs -from mitmproxy.utils import version_check -from mitmproxy.types import serializable -from mitmproxy import exceptions -from mitmproxy.types import basethread - -# This is a rather hackish way to make sure that -# the latest version of pyOpenSSL is actually installed. -version_check.check_pyopenssl_version() - -socket_fileobject = socket.SocketIO - -EINTR = 4 -if os.environ.get("NO_ALPN"): - HAS_ALPN = False -else: - HAS_ALPN = SSL._lib.Cryptography_HAS_ALPN - -# To enable all SSL methods use: SSLv23 -# then add options to disable certain methods -# https://bugs.launchpad.net/pyopenssl/+bug/1020632/comments/3 -SSL_BASIC_OPTIONS = ( - SSL.OP_CIPHER_SERVER_PREFERENCE -) -if hasattr(SSL, "OP_NO_COMPRESSION"): - SSL_BASIC_OPTIONS |= SSL.OP_NO_COMPRESSION - -SSL_DEFAULT_METHOD = SSL.SSLv23_METHOD -SSL_DEFAULT_OPTIONS = ( - SSL.OP_NO_SSLv2 | - SSL.OP_NO_SSLv3 | - SSL_BASIC_OPTIONS -) -if hasattr(SSL, "OP_NO_COMPRESSION"): - SSL_DEFAULT_OPTIONS |= SSL.OP_NO_COMPRESSION - -""" -Map a reasonable SSL version specification into the format OpenSSL expects. -Don't ask... -https://bugs.launchpad.net/pyopenssl/+bug/1020632/comments/3 -""" -sslversion_choices = { - "all": (SSL.SSLv23_METHOD, SSL_BASIC_OPTIONS), - # SSLv23_METHOD + NO_SSLv2 + NO_SSLv3 == TLS 1.0+ - # TLSv1_METHOD would be TLS 1.0 only - "secure": (SSL.SSLv23_METHOD, (SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3 | SSL_BASIC_OPTIONS)), - "SSLv2": (SSL.SSLv2_METHOD, SSL_BASIC_OPTIONS), - "SSLv3": (SSL.SSLv3_METHOD, SSL_BASIC_OPTIONS), - "TLSv1": (SSL.TLSv1_METHOD, SSL_BASIC_OPTIONS), - "TLSv1_1": (SSL.TLSv1_1_METHOD, SSL_BASIC_OPTIONS), - "TLSv1_2": (SSL.TLSv1_2_METHOD, SSL_BASIC_OPTIONS), -} - - -class SSLKeyLogger: - - def __init__(self, filename): - self.filename = filename - self.f = None - self.lock = threading.Lock() - - # required for functools.wraps, which pyOpenSSL uses. - __name__ = "SSLKeyLogger" - - def __call__(self, connection, where, ret): - if where == SSL.SSL_CB_HANDSHAKE_DONE and ret == 1: - with self.lock: - if not self.f: - d = os.path.dirname(self.filename) - if not os.path.isdir(d): - os.makedirs(d) - self.f = open(self.filename, "ab") - self.f.write(b"\r\n") - client_random = binascii.hexlify(connection.client_random()) - masterkey = binascii.hexlify(connection.master_key()) - self.f.write(b"CLIENT_RANDOM %s %s\r\n" % (client_random, masterkey)) - self.f.flush() - - def close(self): - with self.lock: - if self.f: - self.f.close() - - @staticmethod - def create_logfun(filename): - if filename: - return SSLKeyLogger(filename) - return False - -log_ssl_key = SSLKeyLogger.create_logfun( - os.getenv("MITMPROXY_SSLKEYLOGFILE") or os.getenv("SSLKEYLOGFILE")) - - -class _FileLike: - BLOCKSIZE = 1024 * 32 - - def __init__(self, o): - self.o = o - self._log = None - self.first_byte_timestamp = None - - def set_descriptor(self, o): - self.o = o - - def __getattr__(self, attr): - return getattr(self.o, attr) - - def start_log(self): - """ - Starts or resets the log. - - This will store all bytes read or written. - """ - self._log = [] - - def stop_log(self): - """ - Stops the log. - """ - self._log = None - - def is_logging(self): - return self._log is not None - - def get_log(self): - """ - Returns the log as a string. - """ - if not self.is_logging(): - raise ValueError("Not logging!") - return b"".join(self._log) - - def add_log(self, v): - if self.is_logging(): - self._log.append(v) - - def reset_timestamps(self): - self.first_byte_timestamp = None - - -class Writer(_FileLike): - - def flush(self): - """ - May raise exceptions.TcpDisconnect - """ - if hasattr(self.o, "flush"): - try: - self.o.flush() - except (socket.error, IOError) as v: - raise exceptions.TcpDisconnect(str(v)) - - def write(self, v): - """ - May raise exceptions.TcpDisconnect - """ - if v: - self.first_byte_timestamp = self.first_byte_timestamp or time.time() - try: - if hasattr(self.o, "sendall"): - self.add_log(v) - return self.o.sendall(v) - else: - r = self.o.write(v) - self.add_log(v[:r]) - return r - except (SSL.Error, socket.error) as e: - raise exceptions.TcpDisconnect(str(e)) - - -class Reader(_FileLike): - - def read(self, length): - """ - If length is -1, we read until connection closes. - """ - result = b'' - start = time.time() - while length == -1 or length > 0: - if length == -1 or length > self.BLOCKSIZE: - rlen = self.BLOCKSIZE - else: - rlen = length - try: - data = self.o.read(rlen) - except SSL.ZeroReturnError: - # TLS connection was shut down cleanly - break - except (SSL.WantWriteError, SSL.WantReadError): - # From the OpenSSL docs: - # If the underlying BIO is non-blocking, SSL_read() will also return when the - # underlying BIO could not satisfy the needs of SSL_read() to continue the - # operation. In this case a call to SSL_get_error with the return value of - # SSL_read() will yield SSL_ERROR_WANT_READ or SSL_ERROR_WANT_WRITE. - if (time.time() - start) < self.o.gettimeout(): - time.sleep(0.1) - continue - else: - raise exceptions.TcpTimeout() - except socket.timeout: - raise exceptions.TcpTimeout() - except socket.error as e: - raise exceptions.TcpDisconnect(str(e)) - except SSL.SysCallError as e: - if e.args == (-1, 'Unexpected EOF'): - break - raise exceptions.TlsException(str(e)) - except SSL.Error as e: - raise exceptions.TlsException(str(e)) - self.first_byte_timestamp = self.first_byte_timestamp or time.time() - if not data: - break - result += data - if length != -1: - length -= len(data) - self.add_log(result) - return result - - def readline(self, size=None): - result = b'' - bytes_read = 0 - while True: - if size is not None and bytes_read >= size: - break - ch = self.read(1) - bytes_read += 1 - if not ch: - break - else: - result += ch - if ch == b'\n': - break - return result - - def safe_read(self, length): - """ - Like .read, but is guaranteed to either return length bytes, or - raise an exception. - """ - result = self.read(length) - if length != -1 and len(result) != length: - if not result: - raise exceptions.TcpDisconnect() - else: - raise exceptions.TcpReadIncomplete( - "Expected %s bytes, got %s" % (length, len(result)) - ) - return result - - def peek(self, length): - """ - Tries to peek into the underlying file object. - - Returns: - Up to the next N bytes if peeking is successful. - - Raises: - exceptions.TcpException if there was an error with the socket - TlsException if there was an error with pyOpenSSL. - NotImplementedError if the underlying file object is not a [pyOpenSSL] socket - """ - if isinstance(self.o, socket_fileobject): - try: - return self.o._sock.recv(length, socket.MSG_PEEK) - except socket.error as e: - raise exceptions.TcpException(repr(e)) - elif isinstance(self.o, SSL.Connection): - try: - return self.o.recv(length, socket.MSG_PEEK) - except SSL.Error as e: - raise exceptions.TlsException(str(e)) - else: - raise NotImplementedError("Can only peek into (pyOpenSSL) sockets") - - -class Address(serializable.Serializable): - - """ - This class wraps an IPv4/IPv6 tuple to provide named attributes and - ipv6 information. - """ - - def __init__(self, address, use_ipv6=False): - self.address = tuple(address) - self.use_ipv6 = use_ipv6 - - def get_state(self): - return { - "address": self.address, - "use_ipv6": self.use_ipv6 - } - - def set_state(self, state): - self.address = state["address"] - self.use_ipv6 = state["use_ipv6"] - - @classmethod - def from_state(cls, state): - return Address(**state) - - @classmethod - def wrap(cls, t): - if isinstance(t, cls): - return t - else: - return cls(t) - - def __call__(self): - return self.address - - @property - def host(self): - return self.address[0] - - @property - def port(self): - return self.address[1] - - @property - def use_ipv6(self): - return self.family == socket.AF_INET6 - - @use_ipv6.setter - def use_ipv6(self, b): - self.family = socket.AF_INET6 if b else socket.AF_INET - - def __repr__(self): - return "{}:{}".format(self.host, self.port) - - def __eq__(self, other): - if not other: - return False - other = Address.wrap(other) - return (self.address, self.family) == (other.address, other.family) - - def __ne__(self, other): - return not self.__eq__(other) - - def __hash__(self): - return hash(self.address) ^ 42 # different hash than the tuple alone. - - -def ssl_read_select(rlist, timeout): - """ - This is a wrapper around select.select() which also works for SSL.Connections - by taking ssl_connection.pending() into account. - - Caveats: - If .pending() > 0 for any of the connections in rlist, we avoid the select syscall - and **will not include any other connections which may or may not be ready**. - - Args: - rlist: wait until ready for reading - - Returns: - subset of rlist which is ready for reading. - """ - return [ - conn for conn in rlist - if isinstance(conn, SSL.Connection) and conn.pending() > 0 - ] or select.select(rlist, (), (), timeout)[0] - - -def close_socket(sock): - """ - Does a hard close of a socket, without emitting a RST. - """ - try: - # We already indicate that we close our end. - # may raise "Transport endpoint is not connected" on Linux - sock.shutdown(socket.SHUT_WR) - - # Section 4.2.2.13 of RFC 1122 tells us that a close() with any pending - # readable data could lead to an immediate RST being sent (which is the - # case on Windows). - # http://ia600609.us.archive.org/22/items/TheUltimateSo_lingerPageOrWhyIsMyTcpNotReliable/the-ultimate-so_linger-page-or-why-is-my-tcp-not-reliable.html - # - # This in turn results in the following issue: If we send an error page - # to the client and then close the socket, the RST may be received by - # the client before the error page and the users sees a connection - # error rather than the error page. Thus, we try to empty the read - # buffer on Windows first. (see - # https://github.com/mitmproxy/mitmproxy/issues/527#issuecomment-93782988) - # - - if os.name == "nt": # pragma: no cover - # We cannot rely on the shutdown()-followed-by-read()-eof technique - # proposed by the page above: Some remote machines just don't send - # a TCP FIN, which would leave us in the unfortunate situation that - # recv() would block infinitely. As a workaround, we set a timeout - # here even if we are in blocking mode. - sock.settimeout(sock.gettimeout() or 20) - - # limit at a megabyte so that we don't read infinitely - for _ in range(1024 ** 3 // 4096): - # may raise a timeout/disconnect exception. - if not sock.recv(4096): - break - - # Now we can close the other half as well. - sock.shutdown(socket.SHUT_RD) - - except socket.error: - pass - - sock.close() - - -class _Connection: - - rbufsize = -1 - wbufsize = -1 - - def _makefile(self): - """ - Set up .rfile and .wfile attributes from .connection - """ - # Ideally, we would use the Buffered IO in Python 3 by default. - # Unfortunately, the implementation of .peek() is broken for n>1 bytes, - # as it may just return what's left in the buffer and not all the bytes we want. - # As a workaround, we just use unbuffered sockets directly. - # https://mail.python.org/pipermail/python-dev/2009-June/089986.html - self.rfile = Reader(socket.SocketIO(self.connection, "rb")) - self.wfile = Writer(socket.SocketIO(self.connection, "wb")) - - def __init__(self, connection): - if connection: - self.connection = connection - self.ip_address = Address(connection.getpeername()) - self._makefile() - else: - self.connection = None - self.ip_address = None - self.rfile = None - self.wfile = None - - self.ssl_established = False - self.finished = False - - def get_current_cipher(self): - if not self.ssl_established: - return None - - name = self.connection.get_cipher_name() - bits = self.connection.get_cipher_bits() - version = self.connection.get_cipher_version() - return name, bits, version - - def finish(self): - self.finished = True - # If we have an SSL connection, wfile.close == connection.close - # (We call _FileLike.set_descriptor(conn)) - # Closing the socket is not our task, therefore we don't call close - # then. - if not isinstance(self.connection, SSL.Connection): - if not getattr(self.wfile, "closed", False): - try: - self.wfile.flush() - self.wfile.close() - except exceptions.TcpDisconnect: - pass - - self.rfile.close() - else: - try: - self.connection.shutdown() - except SSL.Error: - pass - - def _create_ssl_context(self, - method=SSL_DEFAULT_METHOD, - options=SSL_DEFAULT_OPTIONS, - verify_options=SSL.VERIFY_NONE, - ca_path=None, - ca_pemfile=None, - cipher_list=None, - alpn_protos=None, - alpn_select=None, - alpn_select_callback=None, - sni=None, - ): - """ - Creates an SSL Context. - - :param method: One of SSLv2_METHOD, SSLv3_METHOD, SSLv23_METHOD, TLSv1_METHOD, TLSv1_1_METHOD, or TLSv1_2_METHOD - :param options: A bit field consisting of OpenSSL.SSL.OP_* values - :param verify_options: A bit field consisting of OpenSSL.SSL.VERIFY_* values - :param ca_path: Path to a directory of trusted CA certificates prepared using the c_rehash tool - :param ca_pemfile: Path to a PEM formatted trusted CA certificate - :param cipher_list: A textual OpenSSL cipher list, see https://www.openssl.org/docs/apps/ciphers.html - :rtype : SSL.Context - """ - context = SSL.Context(method) - # Options (NO_SSLv2/3) - if options is not None: - context.set_options(options) - - # Verify Options (NONE/PEER and trusted CAs) - if verify_options is not None: - def verify_cert(conn, x509, errno, err_depth, is_cert_verified): - if not is_cert_verified: - self.ssl_verification_error = exceptions.InvalidCertificateException( - "Certificate Verification Error for {}: {} (errno: {}, depth: {})".format( - sni, - strutils.native(SSL._ffi.string(SSL._lib.X509_verify_cert_error_string(errno)), "utf8"), - errno, - err_depth - ) - ) - return is_cert_verified - - context.set_verify(verify_options, verify_cert) - if ca_path is None and ca_pemfile is None: - ca_pemfile = certifi.where() - context.load_verify_locations(ca_pemfile, ca_path) - - # Workaround for - # https://github.com/pyca/pyopenssl/issues/190 - # https://github.com/mitmproxy/mitmproxy/issues/472 - # Options already set before are not cleared. - context.set_mode(SSL._lib.SSL_MODE_AUTO_RETRY) - - # Cipher List - if cipher_list: - try: - context.set_cipher_list(cipher_list) - - # TODO: maybe change this to with newer pyOpenSSL APIs - context.set_tmp_ecdh(OpenSSL.crypto.get_elliptic_curve('prime256v1')) - except SSL.Error as v: - raise exceptions.TlsException("SSL cipher specification error: %s" % str(v)) - - # SSLKEYLOGFILE - if log_ssl_key: - context.set_info_callback(log_ssl_key) - - if HAS_ALPN: - if alpn_protos is not None: - # advertise application layer protocols - context.set_alpn_protos(alpn_protos) - elif alpn_select is not None and alpn_select_callback is None: - # select application layer protocol - def alpn_select_callback(conn_, options): - if alpn_select in options: - return bytes(alpn_select) - else: # pragma no cover - return options[0] - context.set_alpn_select_callback(alpn_select_callback) - elif alpn_select_callback is not None and alpn_select is None: - context.set_alpn_select_callback(alpn_select_callback) - elif alpn_select_callback is not None and alpn_select is not None: - raise exceptions.TlsException("ALPN error: only define alpn_select (string) OR alpn_select_callback (method).") - - return context - - -class ConnectionCloser: - def __init__(self, conn): - self.conn = conn - self._canceled = False - - def pop(self): - """ - Cancel the current closer, and return a fresh one. - """ - self._canceled = True - return ConnectionCloser(self.conn) - - def __enter__(self): - return self - - def __exit__(self, *args): - if not self._canceled: - self.conn.close() - - -class TCPClient(_Connection): - - def __init__(self, address, source_address=None, spoof_source_address=None): - super().__init__(None) - self.address = address - self.source_address = source_address - self.cert = None - self.server_certs = [] - self.ssl_verification_error = None # type: Optional[exceptions.InvalidCertificateException] - self.sni = None - self.spoof_source_address = spoof_source_address - - @property - def address(self): - return self.__address - - @address.setter - def address(self, address): - if address: - self.__address = Address.wrap(address) - else: - self.__address = None - - @property - def source_address(self): - return self.__source_address - - @source_address.setter - def source_address(self, source_address): - if source_address: - self.__source_address = Address.wrap(source_address) - else: - self.__source_address = None - - def close(self): - # Make sure to close the real socket, not the SSL proxy. - # OpenSSL is really good at screwing up, i.e. when trying to recv from a failed connection, - # it tries to renegotiate... - if isinstance(self.connection, SSL.Connection): - close_socket(self.connection._socket) - else: - close_socket(self.connection) - - def create_ssl_context(self, cert=None, alpn_protos=None, **sslctx_kwargs): - context = self._create_ssl_context( - alpn_protos=alpn_protos, - **sslctx_kwargs) - # Client Certs - if cert: - try: - context.use_privatekey_file(cert) - context.use_certificate_file(cert) - except SSL.Error as v: - raise exceptions.TlsException("SSL client certificate error: %s" % str(v)) - return context - - def convert_to_ssl(self, sni=None, alpn_protos=None, **sslctx_kwargs): - """ - cert: Path to a file containing both client cert and private key. - - options: A bit field consisting of OpenSSL.SSL.OP_* values - verify_options: A bit field consisting of OpenSSL.SSL.VERIFY_* values - ca_path: Path to a directory of trusted CA certificates prepared using the c_rehash tool - ca_pemfile: Path to a PEM formatted trusted CA certificate - """ - verification_mode = sslctx_kwargs.get('verify_options', None) - if verification_mode == SSL.VERIFY_PEER and not sni: - raise exceptions.TlsException("Cannot validate certificate hostname without SNI") - - context = self.create_ssl_context( - alpn_protos=alpn_protos, - sni=sni, - **sslctx_kwargs - ) - self.connection = SSL.Connection(context, self.connection) - if sni: - self.sni = sni - self.connection.set_tlsext_host_name(sni.encode("idna")) - self.connection.set_connect_state() - try: - self.connection.do_handshake() - except SSL.Error as v: - if self.ssl_verification_error: - raise self.ssl_verification_error - else: - raise exceptions.TlsException("SSL handshake error: %s" % repr(v)) - else: - # Fix for pre v1.0 OpenSSL, which doesn't throw an exception on - # certificate validation failure - if verification_mode == SSL.VERIFY_PEER and self.ssl_verification_error: - raise self.ssl_verification_error - - self.cert = certs.SSLCert(self.connection.get_peer_certificate()) - - # Keep all server certificates in a list - for i in self.connection.get_peer_cert_chain(): - self.server_certs.append(certs.SSLCert(i)) - - # Validate TLS Hostname - try: - crt = dict( - subjectAltName=[("DNS", x.decode("ascii", "strict")) for x in self.cert.altnames] - ) - if self.cert.cn: - crt["subject"] = [[["commonName", self.cert.cn.decode("ascii", "strict")]]] - if sni: - hostname = sni - else: - hostname = "no-hostname" - ssl_match_hostname.match_hostname(crt, hostname) - except (ValueError, ssl_match_hostname.CertificateError) as e: - self.ssl_verification_error = exceptions.InvalidCertificateException( - "Certificate Verification Error for {}: {}".format( - sni or repr(self.address), - str(e) - ) - ) - if verification_mode == SSL.VERIFY_PEER: - raise self.ssl_verification_error - - self.ssl_established = True - self.rfile.set_descriptor(self.connection) - self.wfile.set_descriptor(self.connection) - - def makesocket(self): - # some parties (cuckoo sandbox) need to hook this - return socket.socket(self.address.family, socket.SOCK_STREAM) - - def connect(self): - try: - connection = self.makesocket() - - if self.spoof_source_address: - try: - # 19 is `IP_TRANSPARENT`, which is only available on Python 3.3+ on some OSes - if not connection.getsockopt(socket.SOL_IP, 19): - connection.setsockopt(socket.SOL_IP, 19, 1) - except socket.error as e: - raise exceptions.TcpException( - "Failed to spoof the source address: " + e.strerror - ) - if self.source_address: - connection.bind(self.source_address()) - connection.connect(self.address()) - self.source_address = Address(connection.getsockname()) - except (socket.error, IOError) as err: - raise exceptions.TcpException( - 'Error connecting to "%s": %s' % - (self.address.host, err) - ) - self.connection = connection - self.ip_address = Address(connection.getpeername()) - self._makefile() - return ConnectionCloser(self) - - def settimeout(self, n): - self.connection.settimeout(n) - - def gettimeout(self): - return self.connection.gettimeout() - - def get_alpn_proto_negotiated(self): - if HAS_ALPN and self.ssl_established: - return self.connection.get_alpn_proto_negotiated() - else: - return b"" - - -class BaseHandler(_Connection): - - """ - The instantiator is expected to call the handle() and finish() methods. - """ - - def __init__(self, connection, address, server): - super().__init__(connection) - self.address = Address.wrap(address) - self.server = server - self.clientcert = None - - def create_ssl_context(self, - cert, key, - handle_sni=None, - request_client_cert=None, - chain_file=None, - dhparams=None, - extra_chain_certs=None, - **sslctx_kwargs): - """ - cert: A certs.SSLCert object or the path to a certificate - chain file. - - handle_sni: SNI handler, should take a connection object. Server - name can be retrieved like this: - - connection.get_servername() - - And you can specify the connection keys as follows: - - new_context = Context(TLSv1_METHOD) - new_context.use_privatekey(key) - new_context.use_certificate(cert) - connection.set_context(new_context) - - The request_client_cert argument requires some explanation. We're - supposed to be able to do this with no negative effects - if the - client has no cert to present, we're notified and proceed as usual. - Unfortunately, Android seems to have a bug (tested on 4.2.2) - when - an Android client is asked to present a certificate it does not - have, it hangs up, which is frankly bogus. Some time down the track - we may be able to make the proper behaviour the default again, but - until then we're conservative. - """ - - context = self._create_ssl_context(ca_pemfile=chain_file, **sslctx_kwargs) - - context.use_privatekey(key) - if isinstance(cert, certs.SSLCert): - context.use_certificate(cert.x509) - else: - context.use_certificate_chain_file(cert) - - if extra_chain_certs: - for i in extra_chain_certs: - context.add_extra_chain_cert(i.x509) - - if handle_sni: - # SNI callback happens during do_handshake() - context.set_tlsext_servername_callback(handle_sni) - - if request_client_cert: - def save_cert(conn_, cert, errno_, depth_, preverify_ok_): - self.clientcert = certs.SSLCert(cert) - # Return true to prevent cert verification error - return True - context.set_verify(SSL.VERIFY_PEER, save_cert) - - if dhparams: - SSL._lib.SSL_CTX_set_tmp_dh(context._context, dhparams) - - return context - - def convert_to_ssl(self, cert, key, **sslctx_kwargs): - """ - Convert connection to SSL. - For a list of parameters, see BaseHandler._create_ssl_context(...) - """ - - context = self.create_ssl_context( - cert, - key, - **sslctx_kwargs) - self.connection = SSL.Connection(context, self.connection) - self.connection.set_accept_state() - try: - self.connection.do_handshake() - except SSL.Error as v: - raise exceptions.TlsException("SSL handshake error: %s" % repr(v)) - self.ssl_established = True - self.rfile.set_descriptor(self.connection) - self.wfile.set_descriptor(self.connection) - - def handle(self): # pragma: no cover - raise NotImplementedError - - def settimeout(self, n): - self.connection.settimeout(n) - - def get_alpn_proto_negotiated(self): - if HAS_ALPN and self.ssl_established: - return self.connection.get_alpn_proto_negotiated() - else: - return b"" - - -class Counter: - def __init__(self): - self._count = 0 - self._lock = threading.Lock() - - @property - def count(self): - with self._lock: - return self._count - - def __enter__(self): - with self._lock: - self._count += 1 - - def __exit__(self, *args): - with self._lock: - self._count -= 1 - - -class TCPServer: - request_queue_size = 20 - - def __init__(self, address): - self.address = Address.wrap(address) - self.__is_shut_down = threading.Event() - self.__shutdown_request = False - self.socket = socket.socket(self.address.family, socket.SOCK_STREAM) - self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - self.socket.bind(self.address()) - self.address = Address.wrap(self.socket.getsockname()) - self.socket.listen(self.request_queue_size) - self.handler_counter = Counter() - - def connection_thread(self, connection, client_address): - with self.handler_counter: - client_address = Address(client_address) - try: - self.handle_client_connection(connection, client_address) - except: - self.handle_error(connection, client_address) - finally: - close_socket(connection) - - def serve_forever(self, poll_interval=0.1): - self.__is_shut_down.clear() - try: - while not self.__shutdown_request: - try: - r, w_, e_ = select.select( - [self.socket], [], [], poll_interval) - except select.error as ex: # pragma: no cover - if ex[0] == EINTR: - continue - else: - raise - if self.socket in r: - connection, client_address = self.socket.accept() - t = basethread.BaseThread( - "TCPConnectionHandler (%s: %s:%s -> %s:%s)" % ( - self.__class__.__name__, - client_address[0], - client_address[1], - self.address.host, - self.address.port - ), - target=self.connection_thread, - args=(connection, client_address), - ) - t.setDaemon(1) - try: - t.start() - except threading.ThreadError: - self.handle_error(connection, Address(client_address)) - connection.close() - finally: - self.__shutdown_request = False - self.__is_shut_down.set() - - def shutdown(self): - self.__shutdown_request = True - self.__is_shut_down.wait() - self.socket.close() - self.handle_shutdown() - - def handle_error(self, connection_, client_address, fp=sys.stderr): - """ - Called when handle_client_connection raises an exception. - """ - # If a thread has persisted after interpreter exit, the module might be - # none. - if traceback: - exc = str(traceback.format_exc()) - print(u'-' * 40, file=fp) - print( - u"Error in processing of request from %s" % repr(client_address), file=fp) - print(exc, file=fp) - print(u'-' * 40, file=fp) - - def handle_client_connection(self, conn, client_address): # pragma: no cover - """ - Called after client connection. - """ - raise NotImplementedError - - def handle_shutdown(self): - """ - Called after server shutdown. - """ - - def wait_for_silence(self, timeout=5): - start = time.time() - while 1: - if time.time() - start >= timeout: - raise exceptions.Timeout( - "%s service threads still alive" % - self.handler_counter.count - ) - if self.handler_counter.count == 0: - return diff --git a/netlib/websockets/__init__.py b/netlib/websockets/__init__.py deleted file mode 100644 index 2d6f0a0c..00000000 --- a/netlib/websockets/__init__.py +++ /dev/null @@ -1,35 +0,0 @@ -from .frame import FrameHeader -from .frame import Frame -from .frame import OPCODE -from .frame import CLOSE_REASON -from .masker import Masker -from .utils import MAGIC -from .utils import VERSION -from .utils import client_handshake_headers -from .utils import server_handshake_headers -from .utils import check_handshake -from .utils import check_client_version -from .utils import create_server_nonce -from .utils import get_extensions -from .utils import get_protocol -from .utils import get_client_key -from .utils import get_server_accept - -__all__ = [ - "FrameHeader", - "Frame", - "OPCODE", - "CLOSE_REASON", - "Masker", - "MAGIC", - "VERSION", - "client_handshake_headers", - "server_handshake_headers", - "check_handshake", - "check_client_version", - "create_server_nonce", - "get_extensions", - "get_protocol", - "get_client_key", - "get_server_accept", -] diff --git a/netlib/websockets/frame.py b/netlib/websockets/frame.py deleted file mode 100644 index bc4ae43a..00000000 --- a/netlib/websockets/frame.py +++ /dev/null @@ -1,274 +0,0 @@ -import os -import struct -import io - -from netlib import tcp -from mitmproxy.utils import strutils -from mitmproxy.utils import bits -from mitmproxy.utils import human -from mitmproxy.types import bidi -from .masker import Masker - - -MAX_16_BIT_INT = (1 << 16) -MAX_64_BIT_INT = (1 << 64) - -DEFAULT = object() - -# RFC 6455, Section 5.2 - Base Framing Protocol -OPCODE = bidi.BiDi( - CONTINUE=0x00, - TEXT=0x01, - BINARY=0x02, - CLOSE=0x08, - PING=0x09, - PONG=0x0a -) - -# RFC 6455, Section 7.4.1 - Defined Status Codes -CLOSE_REASON = bidi.BiDi( - NORMAL_CLOSURE=1000, - GOING_AWAY=1001, - PROTOCOL_ERROR=1002, - UNSUPPORTED_DATA=1003, - RESERVED=1004, - RESERVED_NO_STATUS=1005, - RESERVED_ABNORMAL_CLOSURE=1006, - INVALID_PAYLOAD_DATA=1007, - POLICY_VIOLATION=1008, - MESSAGE_TOO_BIG=1009, - MANDATORY_EXTENSION=1010, - INTERNAL_ERROR=1011, - RESERVED_TLS_HANDHSAKE_FAILED=1015, -) - - -class FrameHeader: - - def __init__( - self, - opcode=OPCODE.TEXT, - payload_length=0, - fin=False, - rsv1=False, - rsv2=False, - rsv3=False, - masking_key=DEFAULT, - mask=DEFAULT, - length_code=DEFAULT - ): - if not 0 <= opcode < 2 ** 4: - raise ValueError("opcode must be 0-16") - self.opcode = opcode - self.payload_length = payload_length - self.fin = fin - self.rsv1 = rsv1 - self.rsv2 = rsv2 - self.rsv3 = rsv3 - - if length_code is DEFAULT: - self.length_code = self._make_length_code(self.payload_length) - else: - self.length_code = length_code - - if mask is DEFAULT and masking_key is DEFAULT: - self.mask = False - self.masking_key = b"" - elif mask is DEFAULT: - self.mask = 1 - self.masking_key = masking_key - elif masking_key is DEFAULT: - self.mask = mask - self.masking_key = os.urandom(4) - else: - self.mask = mask - self.masking_key = masking_key - - if self.masking_key and len(self.masking_key) != 4: - raise ValueError("Masking key must be 4 bytes.") - - @classmethod - def _make_length_code(self, length): - """ - A websockets frame contains an initial length_code, and an optional - extended length code to represent the actual length if length code is - larger than 125 - """ - if length <= 125: - return length - elif length >= 126 and length <= 65535: - return 126 - else: - return 127 - - def __repr__(self): - vals = [ - "ws frame:", - OPCODE.get_name(self.opcode, hex(self.opcode)).lower() - ] - flags = [] - for i in ["fin", "rsv1", "rsv2", "rsv3", "mask"]: - if getattr(self, i): - flags.append(i) - if flags: - vals.extend([":", "|".join(flags)]) - if self.masking_key: - vals.append(":key=%s" % repr(self.masking_key)) - if self.payload_length: - vals.append(" %s" % human.pretty_size(self.payload_length)) - return "".join(vals) - - def __bytes__(self): - first_byte = bits.setbit(0, 7, self.fin) - first_byte = bits.setbit(first_byte, 6, self.rsv1) - first_byte = bits.setbit(first_byte, 5, self.rsv2) - first_byte = bits.setbit(first_byte, 4, self.rsv3) - first_byte = first_byte | self.opcode - - second_byte = bits.setbit(self.length_code, 7, self.mask) - - b = bytes([first_byte, second_byte]) - - if self.payload_length < 126: - pass - elif self.payload_length < MAX_16_BIT_INT: - # '!H' pack as 16 bit unsigned short - # add 2 byte extended payload length - b += struct.pack('!H', self.payload_length) - elif self.payload_length < MAX_64_BIT_INT: - # '!Q' = pack as 64 bit unsigned long long - # add 8 bytes extended payload length - b += struct.pack('!Q', self.payload_length) - else: - raise ValueError("Payload length exceeds 64bit integer") - - if self.masking_key: - b += self.masking_key - return b - - @classmethod - def from_file(cls, fp): - """ - read a websockets frame header - """ - first_byte, second_byte = fp.safe_read(2) - fin = bits.getbit(first_byte, 7) - rsv1 = bits.getbit(first_byte, 6) - rsv2 = bits.getbit(first_byte, 5) - rsv3 = bits.getbit(first_byte, 4) - opcode = first_byte & 0xF - mask_bit = bits.getbit(second_byte, 7) - length_code = second_byte & 0x7F - - # payload_length > 125 indicates you need to read more bytes - # to get the actual payload length - if length_code <= 125: - payload_length = length_code - elif length_code == 126: - payload_length, = struct.unpack("!H", fp.safe_read(2)) - else: # length_code == 127: - payload_length, = struct.unpack("!Q", fp.safe_read(8)) - - # masking key only present if mask bit set - if mask_bit == 1: - masking_key = fp.safe_read(4) - else: - masking_key = None - - return cls( - fin=fin, - rsv1=rsv1, - rsv2=rsv2, - rsv3=rsv3, - opcode=opcode, - mask=mask_bit, - length_code=length_code, - payload_length=payload_length, - masking_key=masking_key, - ) - - def __eq__(self, other): - if isinstance(other, FrameHeader): - return bytes(self) == bytes(other) - return False - - -class Frame: - """ - Represents a single WebSockets frame. - Constructor takes human readable forms of the frame components. - from_bytes() reads from a file-like object to create a new Frame. - - WebSockets Frame as defined in RFC6455 - - 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 - +-+-+-+-+-------+-+-------------+-------------------------------+ - |F|R|R|R| opcode|M| Payload len | Extended payload length | - |I|S|S|S| (4) |A| (7) | (16/64) | - |N|V|V|V| |S| | (if payload len==126/127) | - | |1|2|3| |K| | | - +-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - + - | Extended payload length continued, if payload len == 127 | - + - - - - - - - - - - - - - - - +-------------------------------+ - | |Masking-key, if MASK set to 1 | - +-------------------------------+-------------------------------+ - | Masking-key (continued) | Payload Data | - +-------------------------------- - - - - - - - - - - - - - - - + - : Payload Data continued ... : - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - | Payload Data continued ... | - +---------------------------------------------------------------+ - """ - - def __init__(self, payload=b"", **kwargs): - self.payload = payload - kwargs["payload_length"] = kwargs.get("payload_length", len(payload)) - self.header = FrameHeader(**kwargs) - - @classmethod - def from_bytes(cls, bytestring): - """ - Construct a websocket frame from an in-memory bytestring - to construct a frame from a stream of bytes, use from_file() directly - """ - return cls.from_file(tcp.Reader(io.BytesIO(bytestring))) - - def __repr__(self): - ret = repr(self.header) - if self.payload: - ret = ret + "\nPayload:\n" + strutils.bytes_to_escaped_str(self.payload) - return ret - - def __bytes__(self): - """ - Serialize the frame to wire format. Returns a string. - """ - b = bytes(self.header) - if self.header.masking_key: - b += Masker(self.header.masking_key)(self.payload) - else: - b += self.payload - return b - - @classmethod - def from_file(cls, fp): - """ - read a websockets frame sent by a server or client - - fp is a "file like" object that could be backed by a network - stream or a disk or an in memory stream reader - """ - header = FrameHeader.from_file(fp) - payload = fp.safe_read(header.payload_length) - - if header.mask == 1 and header.masking_key: - payload = Masker(header.masking_key)(payload) - - frame = cls(payload) - frame.header = header - return frame - - def __eq__(self, other): - if isinstance(other, Frame): - return bytes(self) == bytes(other) - return False diff --git a/netlib/websockets/masker.py b/netlib/websockets/masker.py deleted file mode 100644 index 47b1a688..00000000 --- a/netlib/websockets/masker.py +++ /dev/null @@ -1,25 +0,0 @@ -class Masker: - """ - Data sent from the server must be masked to prevent malicious clients - from sending data over the wire in predictable patterns. - - Servers do not have to mask data they send to the client. - https://tools.ietf.org/html/rfc6455#section-5.3 - """ - - def __init__(self, key): - self.key = key - self.offset = 0 - - def mask(self, offset, data): - result = bytearray(data) - for i in range(len(data)): - result[i] ^= self.key[offset % 4] - offset += 1 - result = bytes(result) - return result - - def __call__(self, data): - ret = self.mask(self.offset, data) - self.offset += len(ret) - return ret diff --git a/netlib/websockets/utils.py b/netlib/websockets/utils.py deleted file mode 100644 index 98043662..00000000 --- a/netlib/websockets/utils.py +++ /dev/null @@ -1,90 +0,0 @@ -""" -Collection of WebSockets Protocol utility functions (RFC6455) -Spec: https://tools.ietf.org/html/rfc6455 -""" - - -import base64 -import hashlib -import os - -from netlib import http -from mitmproxy.utils import strutils - -MAGIC = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11' -VERSION = "13" - - -def client_handshake_headers(version=None, key=None, protocol=None, extensions=None): - """ - Create the headers for a valid HTTP upgrade request. If Key is not - specified, it is generated, and can be found in sec-websocket-key in - the returned header set. - - Returns an instance of http.Headers - """ - if version is None: - version = VERSION - if key is None: - key = base64.b64encode(os.urandom(16)).decode('ascii') - h = http.Headers( - connection="upgrade", - upgrade="websocket", - sec_websocket_version=version, - sec_websocket_key=key, - ) - if protocol is not None: - h['sec-websocket-protocol'] = protocol - if extensions is not None: - h['sec-websocket-extensions'] = extensions - return h - - -def server_handshake_headers(client_key, protocol=None, extensions=None): - """ - The server response is a valid HTTP 101 response. - - Returns an instance of http.Headers - """ - h = http.Headers( - connection="upgrade", - upgrade="websocket", - sec_websocket_accept=create_server_nonce(client_key), - ) - if protocol is not None: - h['sec-websocket-protocol'] = protocol - if extensions is not None: - h['sec-websocket-extensions'] = extensions - return h - - -def check_handshake(headers): - return ( - "upgrade" in headers.get("connection", "").lower() and - headers.get("upgrade", "").lower() == "websocket" and - (headers.get("sec-websocket-key") is not None or headers.get("sec-websocket-accept") is not None) - ) - - -def create_server_nonce(client_nonce): - return base64.b64encode(hashlib.sha1(strutils.always_bytes(client_nonce) + MAGIC).digest()) - - -def check_client_version(headers): - return headers.get("sec-websocket-version", "") == VERSION - - -def get_extensions(headers): - return headers.get("sec-websocket-extensions", None) - - -def get_protocol(headers): - return headers.get("sec-websocket-protocol", None) - - -def get_client_key(headers): - return headers.get("sec-websocket-key", None) - - -def get_server_accept(headers): - return headers.get("sec-websocket-accept", None) diff --git a/netlib/wsgi.py b/netlib/wsgi.py deleted file mode 100644 index 5a54cd70..00000000 --- a/netlib/wsgi.py +++ /dev/null @@ -1,166 +0,0 @@ -import time -import traceback -import urllib -import io - -from netlib import http -from netlib import tcp -from mitmproxy.utils import strutils - - -class ClientConn: - - def __init__(self, address): - self.address = tcp.Address.wrap(address) - - -class Flow: - - def __init__(self, address, request): - self.client_conn = ClientConn(address) - self.request = request - - -class Request: - - def __init__(self, scheme, method, path, http_version, headers, content): - self.scheme, self.method, self.path = scheme, method, path - self.headers, self.content = headers, content - self.http_version = http_version - - -def date_time_string(): - """Return the current date and time formatted for a message header.""" - WEEKS = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] - MONTHS = [ - None, - 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', - 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' - ] - now = time.time() - year, month, day, hh, mm, ss, wd, y_, z_ = time.gmtime(now) - s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( - WEEKS[wd], - day, MONTHS[month], year, - hh, mm, ss - ) - return s - - -class WSGIAdaptor: - - def __init__(self, app, domain, port, sversion): - self.app, self.domain, self.port, self.sversion = app, domain, port, sversion - - def make_environ(self, flow, errsoc, **extra): - """ - Raises: - ValueError, if the content-encoding is invalid. - """ - path = strutils.native(flow.request.path, "latin-1") - if '?' in path: - path_info, query = strutils.native(path, "latin-1").split('?', 1) - else: - path_info = path - query = '' - environ = { - 'wsgi.version': (1, 0), - 'wsgi.url_scheme': strutils.native(flow.request.scheme, "latin-1"), - 'wsgi.input': io.BytesIO(flow.request.content or b""), - 'wsgi.errors': errsoc, - 'wsgi.multithread': True, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': self.sversion, - 'REQUEST_METHOD': strutils.native(flow.request.method, "latin-1"), - 'SCRIPT_NAME': '', - 'PATH_INFO': urllib.parse.unquote(path_info), - 'QUERY_STRING': query, - 'CONTENT_TYPE': strutils.native(flow.request.headers.get('Content-Type', ''), "latin-1"), - 'CONTENT_LENGTH': strutils.native(flow.request.headers.get('Content-Length', ''), "latin-1"), - 'SERVER_NAME': self.domain, - 'SERVER_PORT': str(self.port), - 'SERVER_PROTOCOL': strutils.native(flow.request.http_version, "latin-1"), - } - environ.update(extra) - if flow.client_conn.address: - environ["REMOTE_ADDR"] = strutils.native(flow.client_conn.address.host, "latin-1") - environ["REMOTE_PORT"] = flow.client_conn.address.port - - for key, value in flow.request.headers.items(): - key = 'HTTP_' + strutils.native(key, "latin-1").upper().replace('-', '_') - if key not in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'): - environ[key] = value - return environ - - def error_page(self, soc, headers_sent, s): - """ - Make a best-effort attempt to write an error page. If headers are - already sent, we just bung the error into the page. - """ - c = """ - -

Internal Server Error

-
{err}"
- - """.format(err=s).strip().encode() - - if not headers_sent: - soc.write(b"HTTP/1.1 500 Internal Server Error\r\n") - soc.write(b"Content-Type: text/html\r\n") - soc.write("Content-Length: {length}\r\n".format(length=len(c)).encode()) - soc.write(b"\r\n") - soc.write(c) - - def serve(self, request, soc, **env): - state = dict( - response_started=False, - headers_sent=False, - status=None, - headers=None - ) - - def write(data): - if not state["headers_sent"]: - soc.write("HTTP/1.1 {status}\r\n".format(status=state["status"]).encode()) - headers = state["headers"] - if 'server' not in headers: - headers["Server"] = self.sversion - if 'date' not in headers: - headers["Date"] = date_time_string() - soc.write(bytes(headers)) - soc.write(b"\r\n") - state["headers_sent"] = True - if data: - soc.write(data) - soc.flush() - - def start_response(status, headers, exc_info=None): - if exc_info: - if state["headers_sent"]: - raise exc_info[1] - elif state["status"]: - raise AssertionError('Response already started') - state["status"] = status - state["headers"] = http.Headers([[strutils.always_bytes(k), strutils.always_bytes(v)] for k, v in headers]) - if exc_info: - self.error_page(soc, state["headers_sent"], traceback.format_tb(exc_info[2])) - state["headers_sent"] = True - - errs = io.BytesIO() - try: - dataiter = self.app( - self.make_environ(request, errs, **env), start_response - ) - for i in dataiter: - write(i) - if not state["headers_sent"]: - write(b"") - except Exception: - try: - s = traceback.format_exc() - errs.write(s.encode("utf-8", "replace")) - self.error_page(soc, state["headers_sent"], s) - except Exception: # pragma: no cover - pass - return errs.getvalue() diff --git a/pathod/language/http.py b/pathod/language/http.py index 32f990bb..8fcf9edc 100644 --- a/pathod/language/http.py +++ b/pathod/language/http.py @@ -2,12 +2,12 @@ import abc import pyparsing as pp -from netlib.http import url -import netlib.websockets -from netlib.http import status_codes, user_agents +from mitmproxy.net.http import url +import mitmproxy.net.websockets +from mitmproxy.net.http import status_codes, user_agents from . import base, exceptions, actions, message -# TODO: use netlib.semantics.protocol assemble method, +# TODO: use mitmproxy.net.semantics.protocol assemble method, # instead of duplicating the HTTP on-the-wire representation here. # see http2 language for an example @@ -198,7 +198,7 @@ class Response(_HTTPMessage): 1, StatusCode(101) ) - headers = netlib.websockets.server_handshake_headers( + headers = mitmproxy.net.websockets.server_handshake_headers( settings.websocket_key ) for i in headers.fields: @@ -310,7 +310,7 @@ class Request(_HTTPMessage): 1, Method("get") ) - for i in netlib.websockets.client_handshake_headers().fields: + for i in mitmproxy.net.websockets.client_handshake_headers().fields: if not get_header(i[0], self.headers): tokens.append( Header( diff --git a/pathod/language/http2.py b/pathod/language/http2.py index 35fc5ba8..08c5f6d7 100644 --- a/pathod/language/http2.py +++ b/pathod/language/http2.py @@ -1,7 +1,7 @@ import pyparsing as pp -from netlib import http -from netlib.http import user_agents, Headers +from mitmproxy.net import http +from mitmproxy.net.http import user_agents, Headers from . import base, message """ diff --git a/pathod/language/websockets.py b/pathod/language/websockets.py index d2291f82..a237381c 100644 --- a/pathod/language/websockets.py +++ b/pathod/language/websockets.py @@ -1,6 +1,6 @@ import random import string -import netlib.websockets +import mitmproxy.net.websockets from mitmproxy.utils import strutils import pyparsing as pp from . import base, generators, actions, message @@ -14,12 +14,12 @@ class WF(base.CaselessLiteral): class OpCode(base.IntField): names = { - "continue": netlib.websockets.OPCODE.CONTINUE, - "text": netlib.websockets.OPCODE.TEXT, - "binary": netlib.websockets.OPCODE.BINARY, - "close": netlib.websockets.OPCODE.CLOSE, - "ping": netlib.websockets.OPCODE.PING, - "pong": netlib.websockets.OPCODE.PONG, + "continue": mitmproxy.net.websockets.OPCODE.CONTINUE, + "text": mitmproxy.net.websockets.OPCODE.TEXT, + "binary": mitmproxy.net.websockets.OPCODE.BINARY, + "close": mitmproxy.net.websockets.OPCODE.CLOSE, + "ping": mitmproxy.net.websockets.OPCODE.PING, + "pong": mitmproxy.net.websockets.OPCODE.PONG, } max = 15 preamble = "c" @@ -215,11 +215,11 @@ class WebsocketFrame(message.Message): v = getattr(self, i, None) if v is not None: frameparts[i] = v.value - frame = netlib.websockets.FrameHeader(**frameparts) + frame = mitmproxy.net.websockets.FrameHeader(**frameparts) vals = [bytes(frame)] if bodygen: if frame.masking_key and not self.rawbody: - masker = netlib.websockets.Masker(frame.masking_key) + masker = mitmproxy.net.websockets.Masker(frame.masking_key) vals.append( generators.TransformGenerator( bodygen, diff --git a/pathod/pathoc.py b/pathod/pathoc.py index 0978277a..066c330c 100644 --- a/pathod/pathoc.py +++ b/pathod/pathoc.py @@ -13,12 +13,12 @@ import logging from mitmproxy.test.tutils import treq from mitmproxy.utils import strutils -from netlib import tcp +from mitmproxy.net import tcp from mitmproxy import certs -from netlib import websockets -from netlib import socks +from mitmproxy.net import websockets +from mitmproxy.net import socks from mitmproxy import exceptions -from netlib.http import http1 +from mitmproxy.net.http import http1 from mitmproxy.types import basethread from pathod import log diff --git a/pathod/pathoc_cmdline.py b/pathod/pathoc_cmdline.py index 2b4b9f9c..3b738d47 100644 --- a/pathod/pathoc_cmdline.py +++ b/pathod/pathoc_cmdline.py @@ -3,9 +3,9 @@ import argparse import os import os.path -from netlib import tcp +from mitmproxy.net import tcp from mitmproxy import version -from netlib.http import user_agents +from mitmproxy.net.http import user_agents from . import pathoc, language diff --git a/pathod/pathod.py b/pathod/pathod.py index 746998c5..a8658361 100644 --- a/pathod/pathod.py +++ b/pathod/pathod.py @@ -4,9 +4,9 @@ import os import sys import threading -from netlib import tcp +from mitmproxy.net import tcp from mitmproxy import certs as mcerts -from netlib import websockets +from mitmproxy.net import websockets from mitmproxy import version import urllib diff --git a/pathod/pathod_cmdline.py b/pathod/pathod_cmdline.py index 8d6ee2b6..ef1e983f 100644 --- a/pathod/pathod_cmdline.py +++ b/pathod/pathod_cmdline.py @@ -4,7 +4,7 @@ import os import os.path import re -from netlib import tcp +from mitmproxy.net import tcp from mitmproxy.utils import human from mitmproxy import version from . import pathod diff --git a/pathod/protocols/http.py b/pathod/protocols/http.py index a20a58a1..4387b4fb 100644 --- a/pathod/protocols/http.py +++ b/pathod/protocols/http.py @@ -1,6 +1,6 @@ from mitmproxy import version from mitmproxy import exceptions -from netlib.http import http1 +from mitmproxy.net.http import http1 from .. import language diff --git a/pathod/protocols/http2.py b/pathod/protocols/http2.py index 838469d6..118163d2 100644 --- a/pathod/protocols/http2.py +++ b/pathod/protocols/http2.py @@ -4,10 +4,10 @@ import time import hyperframe.frame from hpack.hpack import Encoder, Decoder -from netlib.http import http2 -import netlib.http.headers -import netlib.http.response -import netlib.http.request +from mitmproxy.net.http import http2 +import mitmproxy.net.http.headers +import mitmproxy.net.http.response +import mitmproxy.net.http.request from mitmproxy.types import bidi from .. import language @@ -100,7 +100,7 @@ class HTTP2StateProtocol: first_line_format, method, scheme, host, port, path = http2.parse_headers(headers) - request = netlib.http.request.Request( + request = mitmproxy.net.http.request.Request( first_line_format, method, scheme, @@ -148,7 +148,7 @@ class HTTP2StateProtocol: else: timestamp_end = None - response = netlib.http.response.Response( + response = mitmproxy.net.http.response.Response( b"HTTP/2.0", int(headers.get(':status', 502)), b'', @@ -162,15 +162,15 @@ class HTTP2StateProtocol: return response def assemble(self, message): - if isinstance(message, netlib.http.request.Request): + if isinstance(message, mitmproxy.net.http.request.Request): return self.assemble_request(message) - elif isinstance(message, netlib.http.response.Response): + elif isinstance(message, mitmproxy.net.http.response.Response): return self.assemble_response(message) else: raise ValueError("HTTP message not supported.") def assemble_request(self, request): - assert isinstance(request, netlib.http.request.Request) + assert isinstance(request, mitmproxy.net.http.request.Request) authority = self.tcp_handler.sni if self.tcp_handler.sni else self.tcp_handler.address.host if self.tcp_handler.address.port != 443: @@ -194,7 +194,7 @@ class HTTP2StateProtocol: self._create_body(request.body, stream_id))) def assemble_response(self, response): - assert isinstance(response, netlib.http.response.Response) + assert isinstance(response, mitmproxy.net.http.response.Response) headers = response.headers.copy() @@ -394,7 +394,7 @@ class HTTP2StateProtocol: else: self._handle_unexpected_frame(frm) - headers = netlib.http.headers.Headers( + headers = mitmproxy.net.http.headers.Headers( [[k, v] for k, v in self.decoder.decode(header_blocks, raw=True)] ) diff --git a/pathod/protocols/websockets.py b/pathod/protocols/websockets.py index 585a48e3..00ae5aa8 100644 --- a/pathod/protocols/websockets.py +++ b/pathod/protocols/websockets.py @@ -1,6 +1,6 @@ import time -from netlib import websockets +from mitmproxy.net import websockets from pathod import language from mitmproxy import exceptions diff --git a/release/README.mkd b/release/README.mkd index db2ef970..c5505431 100644 --- a/release/README.mkd +++ b/release/README.mkd @@ -7,7 +7,7 @@ - Only if an emergency bugfix is needed, we push a new `0.x.y` bugfix release for a single project. This matches with what we do in `setup.py`: - "netlib>=%s, <%s" % (version.MINORVERSION, version.NEXT_MINORVERSION) + "mitmproxy>=%s, <%s" % (version.MINORVERSION, version.NEXT_MINORVERSION) # Release Checklist diff --git a/release/rtool.py b/release/rtool.py index 3ed9157e..fbca40e2 100755 --- a/release/rtool.py +++ b/release/rtool.py @@ -48,7 +48,7 @@ VENV_PIP = join(VENV_DIR, VENV_BIN, "pip") VENV_PYINSTALLER = join(VENV_DIR, VENV_BIN, "pyinstaller") # Project Configuration -VERSION_FILE = join(ROOT_DIR, "netlib", "version.py") +VERSION_FILE = join(ROOT_DIR, "mitmproxy", "version.py") PROJECT_NAME = "mitmproxy" PYTHON_VERSION = "py2.py3" BDISTS = { diff --git a/setup.py b/setup.py index 9981ea6c..1351ba73 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,6 @@ setup( packages=find_packages(include=[ "mitmproxy", "mitmproxy.*", "pathod", "pathod.*", - "netlib", "netlib.*" ]), include_package_data=True, entry_points={ diff --git a/test/mitmproxy/net/__init__.py b/test/mitmproxy/net/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/test/mitmproxy/net/data/clientcert/.gitignore b/test/mitmproxy/net/data/clientcert/.gitignore new file mode 100644 index 00000000..07bc53d2 --- /dev/null +++ b/test/mitmproxy/net/data/clientcert/.gitignore @@ -0,0 +1,3 @@ +client.crt +client.key +client.req diff --git a/test/mitmproxy/net/data/clientcert/client.cnf b/test/mitmproxy/net/data/clientcert/client.cnf new file mode 100644 index 00000000..5046a944 --- /dev/null +++ b/test/mitmproxy/net/data/clientcert/client.cnf @@ -0,0 +1,5 @@ +[ ssl_client ] +basicConstraints = CA:FALSE +nsCertType = client +keyUsage = digitalSignature, keyEncipherment +extendedKeyUsage = clientAuth diff --git a/test/mitmproxy/net/data/clientcert/client.pem b/test/mitmproxy/net/data/clientcert/client.pem new file mode 100644 index 00000000..4927bca2 --- /dev/null +++ b/test/mitmproxy/net/data/clientcert/client.pem @@ -0,0 +1,42 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAzCpoRjSTfIN24kkNap/GYmP9zVWj0Gk8R5BB/PvvN0OB1Zk0 +EEYPsWCcuhEdK0ehiDZX030doF0DOncKKa6mop/d0x2o+ts42peDhZM6JNUrm6d+ +ZWQVtio33mpp77UMhR093vaA+ExDnmE26kBTVijJ1+fRAVDXG/cmQINEri91Kk/G +3YJ5e45UrohGI5seBZ4vV0xbHtmczFRhYFlGOvYsoIe4Lvz/eFS2pIrTIpYQ2VM/ +SQQl+JFy+NlQRsWG2NrxtKOzMnnDE7YN4I3z5D5eZFo1EtwZ48LNCeSwrEOdfuzP +G5q5qbs5KpE/x85H9umuRwSCIArbMwBYV8a8JwIDAQABAoIBAFE3FV/IDltbmHEP +iky93hbJm+6QgKepFReKpRVTyqb7LaygUvueQyPWQMIriKTsy675nxo8DQr7tQsO +y3YlSZgra/xNMikIB6e82c7K8DgyrDQw/rCqjZB3Xt4VCqsWJDLXnQMSn98lx0g7 +d7Lbf8soUpKWXqfdVpSDTi4fibSX6kshXyfSTpcz4AdoncEpViUfU1xkEEmZrjT8 +1GcCsDC41xdNmzCpqRuZX7DKSFRoB+0hUzsC1oiqM7FD5kixonRd4F5PbRXImIzt +6YCsT2okxTA04jX7yByis7LlOLTlkmLtKQYuc3erOFvwx89s4vW+AeFei+GGNitn +tHfSwbECgYEA7SzV+nN62hAERHlg8cEQT4TxnsWvbronYWcc/ev44eHSPDWL5tPi +GHfSbW6YAq5Wa0I9jMWfXyhOYEC3MZTC5EEeLOB71qVrTwcy/sY66rOrcgjFI76Q +5JFHQ4wy3SWU50KxE0oWJO9LIowprG+pW1vzqC3VF0T7q0FqESrY4LUCgYEA3F7Z +80ndnCUlooJAb+Hfotv7peFf1o6+m1PTRcz1lLnVt5R5lXj86kn+tXEpYZo1RiGR +2rE2N0seeznWCooakHcsBN7/qmFIhhooJNF7yW+JP2I4P2UV5+tJ+8bcs/voUkQD +1x+rGOuMn8nvHBd2+Vharft8eGL2mgooPVI2XusCgYEAlMZpO3+w8pTVeHaDP2MR +7i/AuQ3cbCLNjSX3Y7jgGCFllWspZRRIYXzYPNkA9b2SbBnTLjjRLgnEkFBIGgvs +7O2EFjaCuDRvydUEQhjq4ErwIsopj7B8h0QyZcbOKTbn3uFQ3n68wVJx2Sv/ADHT +FIHrp/WIE96r19Niy34LKXkCgYB2W59VsuOKnMz01l5DeR5C+0HSWxS9SReIl2IO +yEFSKullWyJeLIgyUaGy0990430feKI8whcrZXYumuah7IDN/KOwzhCk8vEfzWao +N7bzfqtJVrh9HA7C7DVlO+6H4JFrtcoWPZUIomJ549w/yz6EN3ckoMC+a/Ck1TW9 +ka1QFwKBgQCywG6TrZz0UmOjyLQZ+8Q4uvZklSW5NAKBkNnyuQ2kd5rzyYgMPE8C +Er8T88fdVIKvkhDyHhwcI7n58xE5Gr7wkwsrk/Hbd9/ZB2GgAPY3cATskK1v1McU +YeX38CU0fUS4aoy26hWQXkViB47IGQ3jWo3ZCtzIJl8DI9/RsBWTnw== +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICYDCCAckCAQEwDQYJKoZIhvcNAQEFBQAwKDESMBAGA1UEAxMJbWl0bXByb3h5 +MRIwEAYDVQQKEwltaXRtcHJveHkwHhcNMTMwMTIwMDEwODEzWhcNMTUxMDE3MDEw +ODEzWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UE +ChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEAzCpoRjSTfIN24kkNap/GYmP9zVWj0Gk8R5BB/PvvN0OB1Zk0 +EEYPsWCcuhEdK0ehiDZX030doF0DOncKKa6mop/d0x2o+ts42peDhZM6JNUrm6d+ +ZWQVtio33mpp77UMhR093vaA+ExDnmE26kBTVijJ1+fRAVDXG/cmQINEri91Kk/G +3YJ5e45UrohGI5seBZ4vV0xbHtmczFRhYFlGOvYsoIe4Lvz/eFS2pIrTIpYQ2VM/ +SQQl+JFy+NlQRsWG2NrxtKOzMnnDE7YN4I3z5D5eZFo1EtwZ48LNCeSwrEOdfuzP +G5q5qbs5KpE/x85H9umuRwSCIArbMwBYV8a8JwIDAQABMA0GCSqGSIb3DQEBBQUA +A4GBAFvI+cd47B85PQ970n2dU/PlA2/Hb1ldrrXh2guR4hX6vYx/uuk5yRI/n0Rd +KOXJ3czO0bd2Fpe3ZoNpkW0pOSDej/Q+58ScuJd0gWCT/Sh1eRk6ZdC0kusOuWoY +bPOPMkG45LPgUMFOnZEsfJP6P5mZIxlbCvSMFC25nPHWlct7 +-----END CERTIFICATE----- diff --git a/test/mitmproxy/net/data/clientcert/make b/test/mitmproxy/net/data/clientcert/make new file mode 100644 index 00000000..d1caea81 --- /dev/null +++ b/test/mitmproxy/net/data/clientcert/make @@ -0,0 +1,8 @@ +#!/bin/sh + +openssl genrsa -out client.key 2048 +openssl req -key client.key -new -out client.req +openssl x509 -req -days 365 -in client.req -signkey client.key -out client.crt -extfile client.cnf -extensions ssl_client +openssl x509 -req -days 1000 -in client.req -CA ~/.mitmproxy/mitmproxy-ca.pem -CAkey ~/.mitmproxy/mitmproxy-ca.pem -set_serial 00001 -out client.crt -extensions ssl_client +cat client.key client.crt > client.pem +openssl x509 -text -noout -in client.pem diff --git a/test/mitmproxy/net/data/dercert b/test/mitmproxy/net/data/dercert new file mode 100644 index 00000000..370252af Binary files /dev/null and b/test/mitmproxy/net/data/dercert differ diff --git a/test/mitmproxy/net/data/dhparam.pem b/test/mitmproxy/net/data/dhparam.pem new file mode 100644 index 00000000..afb41672 --- /dev/null +++ b/test/mitmproxy/net/data/dhparam.pem @@ -0,0 +1,13 @@ +-----BEGIN DH PARAMETERS----- +MIICCAKCAgEAyT6LzpwVFS3gryIo29J5icvgxCnCebcdSe/NHMkD8dKJf8suFCg3 +O2+dguLakSVif/t6dhImxInJk230HmfC8q93hdcg/j8rLGJYDKu3ik6H//BAHKIv +j5O9yjU3rXCfmVJQic2Nne39sg3CreAepEts2TvYHhVv3TEAzEqCtOuTjgDv0ntJ +Gwpj+BJBRQGG9NvprX1YGJ7WOFBP/hWU7d6tgvE6Xa7T/u9QIKpYHMIkcN/l3ZFB +chZEqVlyrcngtSXCROTPcDOQ6Q8QzhaBJS+Z6rcsd7X+haiQqvoFcmaJ08Ks6LQC +ZIL2EtYJw8V8z7C0igVEBIADZBI6OTbuuhDwRw//zU1uq52Oc48CIZlGxTYG/Evq +o9EWAXUYVzWkDSTeBH1r4z/qLPE2cnhtMxbFxuvK53jGB0emy2y1Ei6IhKshJ5qX +IB/aE7SSHyQ3MDHHkCmQJCsOd4Mo26YX61NZ+n501XjqpCBQ2+DfZCBh8Va2wDyv +A2Ryg9SUz8j0AXViRNMJgJrr446yro/FuJZwnQcO3WQnXeqSBnURqKjmqkeFP+d8 +6mk2tqJaY507lRNqtGlLnj7f5RNoBFJDCLBNurVgfvq9TCVWKDIFD4vZRjCrnl6I +rD693XKIHUCWOjMh1if6omGXKHH40QuME2gNa50+YPn1iYDl88uDbbMCAQI= +-----END DH PARAMETERS----- diff --git a/test/mitmproxy/net/data/htpasswd b/test/mitmproxy/net/data/htpasswd new file mode 100644 index 00000000..54c95b8c --- /dev/null +++ b/test/mitmproxy/net/data/htpasswd @@ -0,0 +1 @@ +test:$apr1$/LkYxy3x$WI4.YbiJlu537jLGEW2eu1 diff --git a/test/mitmproxy/net/data/server.crt b/test/mitmproxy/net/data/server.crt new file mode 100644 index 00000000..68f61bac --- /dev/null +++ b/test/mitmproxy/net/data/server.crt @@ -0,0 +1,14 @@ +-----BEGIN CERTIFICATE----- +MIICOzCCAaQCCQDC7f5GsEpo9jANBgkqhkiG9w0BAQUFADBiMQswCQYDVQQGEwJO +WjEOMAwGA1UECBMFT3RhZ28xEDAOBgNVBAcTB0R1bmVkaW4xDzANBgNVBAoTBm5l +dGxpYjEPMA0GA1UECxMGbmV0bGliMQ8wDQYDVQQDEwZuZXRsaWIwHhcNMTIwNjI0 +MjI0MTU0WhcNMjIwNjIyMjI0MTU0WjBiMQswCQYDVQQGEwJOWjEOMAwGA1UECBMF +T3RhZ28xEDAOBgNVBAcTB0R1bmVkaW4xDzANBgNVBAoTBm5ldGxpYjEPMA0GA1UE +CxMGbmV0bGliMQ8wDQYDVQQDEwZuZXRsaWIwgZ8wDQYJKoZIhvcNAQEBBQADgY0A +MIGJAoGBALJSVEl9y3QUSYuXTH0UjBOPQgS0nHmNWej9hjqnA0KWvEnGY+c6yQeP +/rmwswlKw1iVV5o8kRK9Wej88YWQl/hl/xruyeJgGic0+yqY/FcueZxRudwBcWu2 +7+46aEftwLLRF0GwHZxX/HwWME+TcCXGpXGSG2qs921M4iVeBn5hAgMBAAEwDQYJ +KoZIhvcNAQEFBQADgYEAODZCihEv2yr8zmmQZDrfqg2ChxAoOXWF5+W2F/0LAUBf +2bHP+K4XE6BJWmadX1xKngj7SWrhmmTDp1gBAvXURoDaScOkB1iOCOHoIyalscTR +0FvSHKqFF8fgSlfqS6eYaSbXU3zQolvwP+URzIVnGDqgQCWPtjMqLD3Kd5tuwos= +-----END CERTIFICATE----- diff --git a/test/mitmproxy/net/data/server.key b/test/mitmproxy/net/data/server.key new file mode 100644 index 00000000..b1b658ab --- /dev/null +++ b/test/mitmproxy/net/data/server.key @@ -0,0 +1,15 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQCyUlRJfct0FEmLl0x9FIwTj0IEtJx5jVno/YY6pwNClrxJxmPn +OskHj/65sLMJSsNYlVeaPJESvVno/PGFkJf4Zf8a7sniYBonNPsqmPxXLnmcUbnc +AXFrtu/uOmhH7cCy0RdBsB2cV/x8FjBPk3AlxqVxkhtqrPdtTOIlXgZ+YQIDAQAB +AoGAQEpGcSiVTYhy64zk2sOprPOdTa0ALSK1I7cjycmk90D5KXAJXLho+f0ETVZT +dioqO6m8J7NmamcyHznyqcDzyNRqD2hEBDGVRJWmpOjIER/JwWLNNbpeVjsMHV8I +40P5rZMOhBPYlwECSC5NtMwaN472fyGNNze8u37IZKiER/ECQQDe1iY5AG3CgkP3 +tEZB3Vtzcn4PoOr3Utyn1YER34lPqAmeAsWUhmAVEfR3N1HDe1VFD9s2BidhBn1a +/Bgqxz4DAkEAzNw0m+uO0WkD7aEYRBW7SbXCX+3xsbVToIWC1jXFG+XDzSWn++c1 +DMXEElzEJxPDA+FzQUvRTml4P92bTAbGywJAS9H7wWtm7Ubbj33UZfbGdhqfz/uF +109naufXedhgZS0c0JnK1oV+Tc0FLEczV9swIUaK5O/lGDtYDcw3AN84NwJBAIw5 +/1jrOOtm8uVp6+5O4dBmthJsEZEPCZtLSG/Qhoe+EvUN3Zq0fL+tb7USAsKs6ERz +wizj9PWzhDhTPMYhrVkCQGIponZHx6VqiFyLgYUH9+gDTjBhYyI+6yMTYzcRweyL +9Suc2NkS3X2Lp+wCjvVZdwGtStp6Vo8z02b3giIsAIY= +-----END RSA PRIVATE KEY----- diff --git a/test/mitmproxy/net/data/text_cert b/test/mitmproxy/net/data/text_cert new file mode 100644 index 00000000..36ca33b9 --- /dev/null +++ b/test/mitmproxy/net/data/text_cert @@ -0,0 +1,145 @@ +-----BEGIN CERTIFICATE----- +MIIadTCCGd6gAwIBAgIGR09PUAFtMA0GCSqGSIb3DQEBBQUAMEYxCzAJBgNVBAYT +AlVTMRMwEQYDVQQKEwpHb29nbGUgSW5jMSIwIAYDVQQDExlHb29nbGUgSW50ZXJu +ZXQgQXV0aG9yaXR5MB4XDTEyMDExNzEyNTUwNFoXDTEzMDExNzEyNTUwNFowTDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCkNhbGlmb3JuaWExEzARBgNVBAoTCkdvb2ds +ZSBJbmMxEzARBgNVBAMTCmdvb2dsZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0A +MIGJAoGBALofcxR2fud5cyFIeld9pj2vGB5GH0y9tmAYa5t33xbJguKKX/el3tXA +KMNiT1SZzu8ELJ1Ey0GcBAgHA9jVPQd0LGdbEtNIxjblAsWAD/FZlSt8X87h7C5w +2JSefOani0qgQqU6sTdsaCUGZ+Eu7D0lBfT5/Vnl2vV+zI3YmDlpAgMBAAGjghhm +MIIYYjAdBgNVHQ4EFgQUL3+JeC/oL9jZhTp3F550LautzV8wHwYDVR0jBBgwFoAU +v8Aw6/VDET5nup6R+/xq2uNrEiQwWwYDVR0fBFQwUjBQoE6gTIZKaHR0cDovL3d3 +dy5nc3RhdGljLmNvbS9Hb29nbGVJbnRlcm5ldEF1dGhvcml0eS9Hb29nbGVJbnRl +cm5ldEF1dGhvcml0eS5jcmwwZgYIKwYBBQUHAQEEWjBYMFYGCCsGAQUFBzAChkpo +dHRwOi8vd3d3LmdzdGF0aWMuY29tL0dvb2dsZUludGVybmV0QXV0aG9yaXR5L0dv +b2dsZUludGVybmV0QXV0aG9yaXR5LmNydDCCF1kGA1UdEQSCF1AwghdMggpnb29n +bGUuY29tggwqLmdvb2dsZS5jb22CCyouZ29vZ2xlLmFjggsqLmdvb2dsZS5hZIIL +Ki5nb29nbGUuYWWCCyouZ29vZ2xlLmFmggsqLmdvb2dsZS5hZ4ILKi5nb29nbGUu +YW2CCyouZ29vZ2xlLmFzggsqLmdvb2dsZS5hdIILKi5nb29nbGUuYXqCCyouZ29v +Z2xlLmJhggsqLmdvb2dsZS5iZYILKi5nb29nbGUuYmaCCyouZ29vZ2xlLmJnggsq +Lmdvb2dsZS5iaYILKi5nb29nbGUuYmqCCyouZ29vZ2xlLmJzggsqLmdvb2dsZS5i +eYILKi5nb29nbGUuY2GCDCouZ29vZ2xlLmNhdIILKi5nb29nbGUuY2OCCyouZ29v +Z2xlLmNkggsqLmdvb2dsZS5jZoILKi5nb29nbGUuY2eCCyouZ29vZ2xlLmNoggsq +Lmdvb2dsZS5jaYILKi5nb29nbGUuY2yCCyouZ29vZ2xlLmNtggsqLmdvb2dsZS5j +boIOKi5nb29nbGUuY28uYW+CDiouZ29vZ2xlLmNvLmJ3gg4qLmdvb2dsZS5jby5j +a4IOKi5nb29nbGUuY28uY3KCDiouZ29vZ2xlLmNvLmh1gg4qLmdvb2dsZS5jby5p +ZIIOKi5nb29nbGUuY28uaWyCDiouZ29vZ2xlLmNvLmltgg4qLmdvb2dsZS5jby5p +boIOKi5nb29nbGUuY28uamWCDiouZ29vZ2xlLmNvLmpwgg4qLmdvb2dsZS5jby5r +ZYIOKi5nb29nbGUuY28ua3KCDiouZ29vZ2xlLmNvLmxzgg4qLmdvb2dsZS5jby5t +YYIOKi5nb29nbGUuY28ubXqCDiouZ29vZ2xlLmNvLm56gg4qLmdvb2dsZS5jby50 +aIIOKi5nb29nbGUuY28udHqCDiouZ29vZ2xlLmNvLnVngg4qLmdvb2dsZS5jby51 +a4IOKi5nb29nbGUuY28udXqCDiouZ29vZ2xlLmNvLnZlgg4qLmdvb2dsZS5jby52 +aYIOKi5nb29nbGUuY28uemGCDiouZ29vZ2xlLmNvLnptgg4qLmdvb2dsZS5jby56 +d4IPKi5nb29nbGUuY29tLmFmgg8qLmdvb2dsZS5jb20uYWeCDyouZ29vZ2xlLmNv +bS5haYIPKi5nb29nbGUuY29tLmFygg8qLmdvb2dsZS5jb20uYXWCDyouZ29vZ2xl +LmNvbS5iZIIPKi5nb29nbGUuY29tLmJogg8qLmdvb2dsZS5jb20uYm6CDyouZ29v +Z2xlLmNvbS5ib4IPKi5nb29nbGUuY29tLmJygg8qLmdvb2dsZS5jb20uYnmCDyou +Z29vZ2xlLmNvbS5ieoIPKi5nb29nbGUuY29tLmNugg8qLmdvb2dsZS5jb20uY2+C +DyouZ29vZ2xlLmNvbS5jdYIPKi5nb29nbGUuY29tLmN5gg8qLmdvb2dsZS5jb20u +ZG+CDyouZ29vZ2xlLmNvbS5lY4IPKi5nb29nbGUuY29tLmVngg8qLmdvb2dsZS5j +b20uZXSCDyouZ29vZ2xlLmNvbS5maoIPKi5nb29nbGUuY29tLmdlgg8qLmdvb2ds +ZS5jb20uZ2iCDyouZ29vZ2xlLmNvbS5naYIPKi5nb29nbGUuY29tLmdygg8qLmdv +b2dsZS5jb20uZ3SCDyouZ29vZ2xlLmNvbS5oa4IPKi5nb29nbGUuY29tLmlxgg8q +Lmdvb2dsZS5jb20uam2CDyouZ29vZ2xlLmNvbS5qb4IPKi5nb29nbGUuY29tLmto +gg8qLmdvb2dsZS5jb20ua3eCDyouZ29vZ2xlLmNvbS5sYoIPKi5nb29nbGUuY29t +Lmx5gg8qLmdvb2dsZS5jb20ubXSCDyouZ29vZ2xlLmNvbS5teIIPKi5nb29nbGUu +Y29tLm15gg8qLmdvb2dsZS5jb20ubmGCDyouZ29vZ2xlLmNvbS5uZoIPKi5nb29n +bGUuY29tLm5ngg8qLmdvb2dsZS5jb20ubmmCDyouZ29vZ2xlLmNvbS5ucIIPKi5n +b29nbGUuY29tLm5ygg8qLmdvb2dsZS5jb20ub22CDyouZ29vZ2xlLmNvbS5wYYIP +Ki5nb29nbGUuY29tLnBlgg8qLmdvb2dsZS5jb20ucGiCDyouZ29vZ2xlLmNvbS5w +a4IPKi5nb29nbGUuY29tLnBsgg8qLmdvb2dsZS5jb20ucHKCDyouZ29vZ2xlLmNv +bS5weYIPKi5nb29nbGUuY29tLnFhgg8qLmdvb2dsZS5jb20ucnWCDyouZ29vZ2xl +LmNvbS5zYYIPKi5nb29nbGUuY29tLnNigg8qLmdvb2dsZS5jb20uc2eCDyouZ29v +Z2xlLmNvbS5zbIIPKi5nb29nbGUuY29tLnN2gg8qLmdvb2dsZS5jb20udGqCDyou +Z29vZ2xlLmNvbS50boIPKi5nb29nbGUuY29tLnRygg8qLmdvb2dsZS5jb20udHeC +DyouZ29vZ2xlLmNvbS51YYIPKi5nb29nbGUuY29tLnV5gg8qLmdvb2dsZS5jb20u +dmOCDyouZ29vZ2xlLmNvbS52ZYIPKi5nb29nbGUuY29tLnZuggsqLmdvb2dsZS5j +doILKi5nb29nbGUuY3qCCyouZ29vZ2xlLmRlggsqLmdvb2dsZS5kaoILKi5nb29n +bGUuZGuCCyouZ29vZ2xlLmRtggsqLmdvb2dsZS5keoILKi5nb29nbGUuZWWCCyou +Z29vZ2xlLmVzggsqLmdvb2dsZS5maYILKi5nb29nbGUuZm2CCyouZ29vZ2xlLmZy +ggsqLmdvb2dsZS5nYYILKi5nb29nbGUuZ2WCCyouZ29vZ2xlLmdnggsqLmdvb2ds +ZS5nbIILKi5nb29nbGUuZ22CCyouZ29vZ2xlLmdwggsqLmdvb2dsZS5ncoILKi5n +b29nbGUuZ3mCCyouZ29vZ2xlLmhrggsqLmdvb2dsZS5oboILKi5nb29nbGUuaHKC +CyouZ29vZ2xlLmh0ggsqLmdvb2dsZS5odYILKi5nb29nbGUuaWWCCyouZ29vZ2xl +Lmltgg0qLmdvb2dsZS5pbmZvggsqLmdvb2dsZS5pcYILKi5nb29nbGUuaXOCCyou +Z29vZ2xlLml0gg4qLmdvb2dsZS5pdC5hb4ILKi5nb29nbGUuamWCCyouZ29vZ2xl +Lmpvgg0qLmdvb2dsZS5qb2JzggsqLmdvb2dsZS5qcIILKi5nb29nbGUua2eCCyou +Z29vZ2xlLmtpggsqLmdvb2dsZS5reoILKi5nb29nbGUubGGCCyouZ29vZ2xlLmxp +ggsqLmdvb2dsZS5sa4ILKi5nb29nbGUubHSCCyouZ29vZ2xlLmx1ggsqLmdvb2ds +ZS5sdoILKi5nb29nbGUubWSCCyouZ29vZ2xlLm1lggsqLmdvb2dsZS5tZ4ILKi5n +b29nbGUubWuCCyouZ29vZ2xlLm1sggsqLmdvb2dsZS5tboILKi5nb29nbGUubXOC +CyouZ29vZ2xlLm11ggsqLmdvb2dsZS5tdoILKi5nb29nbGUubXeCCyouZ29vZ2xl +Lm5lgg4qLmdvb2dsZS5uZS5qcIIMKi5nb29nbGUubmV0ggsqLmdvb2dsZS5ubIIL +Ki5nb29nbGUubm+CCyouZ29vZ2xlLm5yggsqLmdvb2dsZS5udYIPKi5nb29nbGUu +b2ZmLmFpggsqLmdvb2dsZS5wa4ILKi5nb29nbGUucGyCCyouZ29vZ2xlLnBuggsq +Lmdvb2dsZS5wc4ILKi5nb29nbGUucHSCCyouZ29vZ2xlLnJvggsqLmdvb2dsZS5y +c4ILKi5nb29nbGUucnWCCyouZ29vZ2xlLnJ3ggsqLmdvb2dsZS5zY4ILKi5nb29n +bGUuc2WCCyouZ29vZ2xlLnNoggsqLmdvb2dsZS5zaYILKi5nb29nbGUuc2uCCyou +Z29vZ2xlLnNtggsqLmdvb2dsZS5zboILKi5nb29nbGUuc2+CCyouZ29vZ2xlLnN0 +ggsqLmdvb2dsZS50ZIILKi5nb29nbGUudGeCCyouZ29vZ2xlLnRrggsqLmdvb2ds +ZS50bIILKi5nb29nbGUudG2CCyouZ29vZ2xlLnRuggsqLmdvb2dsZS50b4ILKi5n +b29nbGUudHCCCyouZ29vZ2xlLnR0ggsqLmdvb2dsZS51c4ILKi5nb29nbGUudXqC +CyouZ29vZ2xlLnZnggsqLmdvb2dsZS52dYILKi5nb29nbGUud3OCCWdvb2dsZS5h +Y4IJZ29vZ2xlLmFkgglnb29nbGUuYWWCCWdvb2dsZS5hZoIJZ29vZ2xlLmFnggln +b29nbGUuYW2CCWdvb2dsZS5hc4IJZ29vZ2xlLmF0gglnb29nbGUuYXqCCWdvb2ds +ZS5iYYIJZ29vZ2xlLmJlgglnb29nbGUuYmaCCWdvb2dsZS5iZ4IJZ29vZ2xlLmJp +gglnb29nbGUuYmqCCWdvb2dsZS5ic4IJZ29vZ2xlLmJ5gglnb29nbGUuY2GCCmdv +b2dsZS5jYXSCCWdvb2dsZS5jY4IJZ29vZ2xlLmNkgglnb29nbGUuY2aCCWdvb2ds +ZS5jZ4IJZ29vZ2xlLmNogglnb29nbGUuY2mCCWdvb2dsZS5jbIIJZ29vZ2xlLmNt +gglnb29nbGUuY26CDGdvb2dsZS5jby5hb4IMZ29vZ2xlLmNvLmJ3ggxnb29nbGUu +Y28uY2uCDGdvb2dsZS5jby5jcoIMZ29vZ2xlLmNvLmh1ggxnb29nbGUuY28uaWSC +DGdvb2dsZS5jby5pbIIMZ29vZ2xlLmNvLmltggxnb29nbGUuY28uaW6CDGdvb2ds +ZS5jby5qZYIMZ29vZ2xlLmNvLmpwggxnb29nbGUuY28ua2WCDGdvb2dsZS5jby5r +coIMZ29vZ2xlLmNvLmxzggxnb29nbGUuY28ubWGCDGdvb2dsZS5jby5teoIMZ29v +Z2xlLmNvLm56ggxnb29nbGUuY28udGiCDGdvb2dsZS5jby50eoIMZ29vZ2xlLmNv +LnVnggxnb29nbGUuY28udWuCDGdvb2dsZS5jby51eoIMZ29vZ2xlLmNvLnZlggxn +b29nbGUuY28udmmCDGdvb2dsZS5jby56YYIMZ29vZ2xlLmNvLnptggxnb29nbGUu +Y28ueneCDWdvb2dsZS5jb20uYWaCDWdvb2dsZS5jb20uYWeCDWdvb2dsZS5jb20u +YWmCDWdvb2dsZS5jb20uYXKCDWdvb2dsZS5jb20uYXWCDWdvb2dsZS5jb20uYmSC +DWdvb2dsZS5jb20uYmiCDWdvb2dsZS5jb20uYm6CDWdvb2dsZS5jb20uYm+CDWdv +b2dsZS5jb20uYnKCDWdvb2dsZS5jb20uYnmCDWdvb2dsZS5jb20uYnqCDWdvb2ds +ZS5jb20uY26CDWdvb2dsZS5jb20uY2+CDWdvb2dsZS5jb20uY3WCDWdvb2dsZS5j +b20uY3mCDWdvb2dsZS5jb20uZG+CDWdvb2dsZS5jb20uZWOCDWdvb2dsZS5jb20u +ZWeCDWdvb2dsZS5jb20uZXSCDWdvb2dsZS5jb20uZmqCDWdvb2dsZS5jb20uZ2WC +DWdvb2dsZS5jb20uZ2iCDWdvb2dsZS5jb20uZ2mCDWdvb2dsZS5jb20uZ3KCDWdv +b2dsZS5jb20uZ3SCDWdvb2dsZS5jb20uaGuCDWdvb2dsZS5jb20uaXGCDWdvb2ds +ZS5jb20uam2CDWdvb2dsZS5jb20uam+CDWdvb2dsZS5jb20ua2iCDWdvb2dsZS5j +b20ua3eCDWdvb2dsZS5jb20ubGKCDWdvb2dsZS5jb20ubHmCDWdvb2dsZS5jb20u +bXSCDWdvb2dsZS5jb20ubXiCDWdvb2dsZS5jb20ubXmCDWdvb2dsZS5jb20ubmGC +DWdvb2dsZS5jb20ubmaCDWdvb2dsZS5jb20ubmeCDWdvb2dsZS5jb20ubmmCDWdv +b2dsZS5jb20ubnCCDWdvb2dsZS5jb20ubnKCDWdvb2dsZS5jb20ub22CDWdvb2ds +ZS5jb20ucGGCDWdvb2dsZS5jb20ucGWCDWdvb2dsZS5jb20ucGiCDWdvb2dsZS5j +b20ucGuCDWdvb2dsZS5jb20ucGyCDWdvb2dsZS5jb20ucHKCDWdvb2dsZS5jb20u +cHmCDWdvb2dsZS5jb20ucWGCDWdvb2dsZS5jb20ucnWCDWdvb2dsZS5jb20uc2GC +DWdvb2dsZS5jb20uc2KCDWdvb2dsZS5jb20uc2eCDWdvb2dsZS5jb20uc2yCDWdv +b2dsZS5jb20uc3aCDWdvb2dsZS5jb20udGqCDWdvb2dsZS5jb20udG6CDWdvb2ds +ZS5jb20udHKCDWdvb2dsZS5jb20udHeCDWdvb2dsZS5jb20udWGCDWdvb2dsZS5j +b20udXmCDWdvb2dsZS5jb20udmOCDWdvb2dsZS5jb20udmWCDWdvb2dsZS5jb20u +dm6CCWdvb2dsZS5jdoIJZ29vZ2xlLmN6gglnb29nbGUuZGWCCWdvb2dsZS5kaoIJ +Z29vZ2xlLmRrgglnb29nbGUuZG2CCWdvb2dsZS5keoIJZ29vZ2xlLmVlgglnb29n +bGUuZXOCCWdvb2dsZS5maYIJZ29vZ2xlLmZtgglnb29nbGUuZnKCCWdvb2dsZS5n +YYIJZ29vZ2xlLmdlgglnb29nbGUuZ2eCCWdvb2dsZS5nbIIJZ29vZ2xlLmdtggln +b29nbGUuZ3CCCWdvb2dsZS5ncoIJZ29vZ2xlLmd5gglnb29nbGUuaGuCCWdvb2ds +ZS5oboIJZ29vZ2xlLmhygglnb29nbGUuaHSCCWdvb2dsZS5odYIJZ29vZ2xlLmll +gglnb29nbGUuaW2CC2dvb2dsZS5pbmZvgglnb29nbGUuaXGCCWdvb2dsZS5pc4IJ +Z29vZ2xlLml0ggxnb29nbGUuaXQuYW+CCWdvb2dsZS5qZYIJZ29vZ2xlLmpvggtn +b29nbGUuam9ic4IJZ29vZ2xlLmpwgglnb29nbGUua2eCCWdvb2dsZS5raYIJZ29v +Z2xlLmt6gglnb29nbGUubGGCCWdvb2dsZS5saYIJZ29vZ2xlLmxrgglnb29nbGUu +bHSCCWdvb2dsZS5sdYIJZ29vZ2xlLmx2gglnb29nbGUubWSCCWdvb2dsZS5tZYIJ +Z29vZ2xlLm1ngglnb29nbGUubWuCCWdvb2dsZS5tbIIJZ29vZ2xlLm1ugglnb29n +bGUubXOCCWdvb2dsZS5tdYIJZ29vZ2xlLm12gglnb29nbGUubXeCCWdvb2dsZS5u +ZYIMZ29vZ2xlLm5lLmpwggpnb29nbGUubmV0gglnb29nbGUubmyCCWdvb2dsZS5u +b4IJZ29vZ2xlLm5ygglnb29nbGUubnWCDWdvb2dsZS5vZmYuYWmCCWdvb2dsZS5w +a4IJZ29vZ2xlLnBsgglnb29nbGUucG6CCWdvb2dsZS5wc4IJZ29vZ2xlLnB0ggln +b29nbGUucm+CCWdvb2dsZS5yc4IJZ29vZ2xlLnJ1gglnb29nbGUucneCCWdvb2ds +ZS5zY4IJZ29vZ2xlLnNlgglnb29nbGUuc2iCCWdvb2dsZS5zaYIJZ29vZ2xlLnNr +gglnb29nbGUuc22CCWdvb2dsZS5zboIJZ29vZ2xlLnNvgglnb29nbGUuc3SCCWdv +b2dsZS50ZIIJZ29vZ2xlLnRngglnb29nbGUudGuCCWdvb2dsZS50bIIJZ29vZ2xl +LnRtgglnb29nbGUudG6CCWdvb2dsZS50b4IJZ29vZ2xlLnRwgglnb29nbGUudHSC +CWdvb2dsZS51c4IJZ29vZ2xlLnV6gglnb29nbGUudmeCCWdvb2dsZS52dYIJZ29v +Z2xlLndzMA0GCSqGSIb3DQEBBQUAA4GBAJmZ9RyqpUzrP0UcJnHXoLu/AjIEsIvZ +Y9hq/9bLry8InfmvERYHr4hNetkOYlW0FeDZtCpWxdPUgJjmWgKAK6j0goOFavTV +GptkL8gha4p1QUsdLkd36/cvBXeBYSle787veo46N1k4V6Uv2gaDVkre786CNsHv +Q6MYZ5ClQ+kS +-----END CERTIFICATE----- + diff --git a/test/mitmproxy/net/data/text_cert_2 b/test/mitmproxy/net/data/text_cert_2 new file mode 100644 index 00000000..ffe8faae --- /dev/null +++ b/test/mitmproxy/net/data/text_cert_2 @@ -0,0 +1,39 @@ +-----BEGIN CERTIFICATE----- +MIIGujCCBaKgAwIBAgIDAQlEMA0GCSqGSIb3DQEBBQUAMIGMMQswCQYDVQQGEwJJ +TDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0 +YWwgQ2VydGlmaWNhdGUgU2lnbmluZzE4MDYGA1UEAxMvU3RhcnRDb20gQ2xhc3Mg +MSBQcmltYXJ5IEludGVybWVkaWF0ZSBTZXJ2ZXIgQ0EwHhcNMTAwMTExMTkyNzM2 +WhcNMTEwMTEyMDkxNDU1WjCBtDEgMB4GA1UEDRMXMTI2ODMyLU1DeExzWTZUbjFn +bTdvOTAxCzAJBgNVBAYTAk5aMR4wHAYDVQQKExVQZXJzb25hIE5vdCBWYWxpZGF0 +ZWQxKTAnBgNVBAsTIFN0YXJ0Q29tIEZyZWUgQ2VydGlmaWNhdGUgTWVtYmVyMRgw +FgYDVQQDEw93d3cuaW5vZGUuY28ubnoxHjAcBgkqhkiG9w0BCQEWD2ppbUBpbm9k +ZS5jby5uejCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL6ghWlGhqg+ +V0P58R3SvLRiO9OrdekDxzmQbKwQcc05frnF5Z9vT6ga7YOuXVeXxhYCAo0nr6KI ++y/Lx+QHvP5W0nKbs+svzUQErq2ZZFwhh1e1LbVccrNwkHUzKOq0TTaVdU4k8kDQ +zzYF9tTZb+G5Hv1BJjpwYwe8P4cAiPJPrFFOKTySzHqiYsXlx+vR1l1e3zKavhd+ +LVSoLWWXb13yKODq6vnuiHjUJXl8CfVlBhoGotXU4JR5cbuGoW/8+rkwEdX+YoCv +VCqgdx9IkRFB6uWfN6ocUiFvhA0eknO+ewuVfRLiIaSDB8pNyUWVqu4ngFWtWO1O +YZg0I/32BkcCAwEAAaOCAvkwggL1MAkGA1UdEwQCMAAwCwYDVR0PBAQDAgOoMBMG +A1UdJQQMMAoGCCsGAQUFBwMBMB0GA1UdDgQWBBQfaL2Rj6r8iRlBTgppgE7ZZ5WT +UzAfBgNVHSMEGDAWgBTrQjTQmLCrn/Qbawj3zGQu7w4sRTAnBgNVHREEIDAegg93 +d3cuaW5vZGUuY28ubnqCC2lub2RlLmNvLm56MIIBQgYDVR0gBIIBOTCCATUwggEx +BgsrBgEEAYG1NwECATCCASAwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRz +c2wuY29tL3BvbGljeS5wZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRz +c2wuY29tL2ludGVybWVkaWF0ZS5wZGYwgbcGCCsGAQUFBwICMIGqMBQWDVN0YXJ0 +Q29tIEx0ZC4wAwIBARqBkUxpbWl0ZWQgTGlhYmlsaXR5LCBzZWUgc2VjdGlvbiAq +TGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0aWZpY2F0aW9u +IEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93d3cuc3RhcnRz +c2wuY29tL3BvbGljeS5wZGYwYQYDVR0fBFowWDAqoCigJoYkaHR0cDovL3d3dy5z +dGFydHNzbC5jb20vY3J0MS1jcmwuY3JsMCqgKKAmhiRodHRwOi8vY3JsLnN0YXJ0 +c3NsLmNvbS9jcnQxLWNybC5jcmwwgY4GCCsGAQUFBwEBBIGBMH8wOQYIKwYBBQUH +MAGGLWh0dHA6Ly9vY3NwLnN0YXJ0c3NsLmNvbS9zdWIvY2xhc3MxL3NlcnZlci9j +YTBCBggrBgEFBQcwAoY2aHR0cDovL3d3dy5zdGFydHNzbC5jb20vY2VydHMvc3Vi +LmNsYXNzMS5zZXJ2ZXIuY2EuY3J0MCMGA1UdEgQcMBqGGGh0dHA6Ly93d3cuc3Rh +cnRzc2wuY29tLzANBgkqhkiG9w0BAQUFAAOCAQEAivWID0KT8q1EzWzy+BecsFry +hQhuLFfAsPkHqpNd9OfkRStGBuJlLX+9DQ9TzjqutdY2buNBuDn71buZK+Y5fmjr +28rAT6+WMd+KnCl5WLT5IOS6Z9s3cec5TFQbmOGlepSS9Q6Ts9KsXOHHQvDkQeDq +OV2UqdgXIAyFm5efSL9JXPXntRausNu2s8F2B2rRJe4jPfnUy2LvY8OW1YvjUA++ +vpdWRdfUbJQp55mRfaYMPRnyUm30lAI27QaxgQPFOqDeZUm5llb5eFG/B3f87uhg ++Y1oEykbEvZrIFN4hithioQ0tb+57FKkkG2sW3uemNiQw2qrEo/GAMb1cI50Rg== +-----END CERTIFICATE----- + diff --git a/test/mitmproxy/net/data/text_cert_weird1 b/test/mitmproxy/net/data/text_cert_weird1 new file mode 100644 index 00000000..72b09dcb --- /dev/null +++ b/test/mitmproxy/net/data/text_cert_weird1 @@ -0,0 +1,31 @@ +-----BEGIN CERTIFICATE----- +MIIFNDCCBBygAwIBAgIEDFJFNzANBgkqhkiG9w0BAQUFADCBjDELMAkGA1UEBhMC +REUxHjAcBgNVBAoTFVVuaXZlcnNpdGFldCBNdWVuc3RlcjE6MDgGA1UEAxMxWmVy +dGlmaXppZXJ1bmdzc3RlbGxlIFVuaXZlcnNpdGFldCBNdWVuc3RlciAtIEcwMjEh +MB8GCSqGSIb3DQEJARYSY2FAdW5pLW11ZW5zdGVyLmRlMB4XDTA4MDUyMDEyNDQy +NFoXDTEzMDUxOTEyNDQyNFowezELMAkGA1UEBhMCREUxHjAcBgNVBAoTFVVuaXZl +cnNpdGFldCBNdWVuc3RlcjEuMCwGA1UECxMlWmVudHJ1bSBmdWVyIEluZm9ybWF0 +aW9uc3ZlcmFyYmVpdHVuZzEcMBoGA1UEAxMTd3d3LnVuaS1tdWVuc3Rlci5kZTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMM0WlCj0ew+tyZ1GurBOqFn +AlChKk4S1F9oDzvp3FwOON4H8YFET7p9ZnoWtkfXSlGNMjekqy67dFlLt1sLusSo +tjNdaOrDLYmnGEgnYAT0RFBvErzIybJoD/Vu3NXyhes+L94R9mEMCwYXmSvG51H9 +c5CvguXBofMchDLCM/U6AYpwu3sST5orV3S1Rsa9sndj8sKJAcw195PYwl6EiEBb +M36ltDBlTYEUAg3Z+VSzB09J3U4vSvguVkDCz+szZh5RG3xlN9mlNfzhf4lHrNgV +0BRbKypa5Uuf81wbMcMMqTxKq+A9ysObpn9J3pNUym+Tn2oqHzGgvwZYB4tzXqUC +AwEAAaOCAawwggGoMAkGA1UdEwQCMAAwCwYDVR0PBAQDAgTwMBMGA1UdJQQMMAoG +CCsGAQUFBwMBMB0GA1UdDgQWBBQ3RFo8awewUTq5TpOFf3jOCEKihzAfBgNVHSME +GDAWgBS+nlGiyZJ8u2CL5rBoZHdaUhmhADAjBgNVHREEHDAagRh3d3dhZG1pbkB1 +bmktbXVlbnN0ZXIuZGUwewYDVR0fBHQwcjA3oDWgM4YxaHR0cDovL2NkcDEucGNh +LmRmbi5kZS93d3UtY2EvcHViL2NybC9nX2NhY3JsLmNybDA3oDWgM4YxaHR0cDov +L2NkcDIucGNhLmRmbi5kZS93d3UtY2EvcHViL2NybC9nX2NhY3JsLmNybDCBlgYI +KwYBBQUHAQEEgYkwgYYwQQYIKwYBBQUHMAKGNWh0dHA6Ly9jZHAxLnBjYS5kZm4u +ZGUvd3d1LWNhL3B1Yi9jYWNlcnQvZ19jYWNlcnQuY3J0MEEGCCsGAQUFBzAChjVo +dHRwOi8vY2RwMi5wY2EuZGZuLmRlL3d3dS1jYS9wdWIvY2FjZXJ0L2dfY2FjZXJ0 +LmNydDANBgkqhkiG9w0BAQUFAAOCAQEAFfNpagtcKUSDKss7TcqjYn99FQ4FtWjE +pGmzYL2zX2wsdCGoVQlGkieL9slbQVEUAnBuqM1LPzUNNe9kZpOPV3Rdhq4y8vyS +xkx3G1v5aGxfPUe8KM8yKIOHRqYefNronHJM0fw7KyjQ73xgbIEgkW+kNXaMLcrb +EPC36O2Zna8GP9FQxJRLgcfQCcYdRKGVn0EtRSkz2ym5Rbh/hrmJBbbC2yJGGMI0 +Vu5A9piK0EZPekZIUmhMQynD9QcMfWhTEFr7YZfx9ktxKDW4spnu7YrgICfZNcCm +tfxmnEAFt6a47u9P0w9lpY8+Sx9MNFfTePym+HP4TYha9bIBes+XnA== +-----END CERTIFICATE----- + diff --git a/test/mitmproxy/net/data/verificationcerts/9da13359.0 b/test/mitmproxy/net/data/verificationcerts/9da13359.0 new file mode 100644 index 00000000..b22e4d20 --- /dev/null +++ b/test/mitmproxy/net/data/verificationcerts/9da13359.0 @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDXTCCAkWgAwIBAgIJAPAfPQGCV/Z4MA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV +BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX +aWRnaXRzIFB0eSBMdGQwHhcNMTUxMTAxMTY0ODAxWhcNMTgwODIxMTY0ODAxWjBF +MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50 +ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEArp8LD34JhKCwcQbwIYQMg4+eCgLVN8fwB7+/qOfJbArPs0djFBN+F7c6 +HGvMr24BKUk5u8pn4dPtNurm/vPC8ovNGmcXz62BQJpcMX2veVdRsF7yNwhNacNJ +Arq+70zNMwYBznx0XUxMF6j6nVFf3AW6SU04ylT4Mp3SY/BUUDAdfl1eRo0mPLNS +8rpsN+8YBw1Q7SCuBRVqpOgVIsL88svgQUSOlzvMZPBpG/cmB3BNKNrltwb5iFEI +1jAV7uSj5IcIuNO/246kfsDVPTFMJIzav/CUoidd5UNw+SoFDlzh8sA7L1Bm7D1/ +3KHYSKswGsSR3kynAl10w/SJKDtn8wIDAQABo1AwTjAdBgNVHQ4EFgQUgOcrtxBX +LxbpnOT65d+vpfyWUkgwHwYDVR0jBBgwFoAUgOcrtxBXLxbpnOT65d+vpfyWUkgw +DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAEE9bFmUCA+6cvESKPoi2 +TGSpV652d0xd2U66LpEXeiWRJFLz8YGgoJCx3QFGBscJDXxrLxrBBBV/tCpEqypo +pYIqsawH7M66jpOr83Us3M8JC2eFBZJocMpXxdytWqHik5VKZNx6VQFT8bS7+yVC +VoUKePhlgcg+pmo41qjqieBNKRMh/1tXS77DI1lgO5wZLVrLXcdqWuDpmaQOKJeq +G/nxytCW/YJA7bFn/8Gjy8DYypJSeeaKu7o3P3+ONJHdIMHb+MdcheDBS9AOFSeo +xI0D5EbO9F873O77l7nbD7B0X34HFN0nGczC4poexIpbDFG3hAPekwZ5KC6VwJLc +1Q== +-----END CERTIFICATE----- diff --git a/test/mitmproxy/net/data/verificationcerts/generate.py b/test/mitmproxy/net/data/verificationcerts/generate.py new file mode 100644 index 00000000..6d4d8550 --- /dev/null +++ b/test/mitmproxy/net/data/verificationcerts/generate.py @@ -0,0 +1,66 @@ +""" +Generate SSL test certificates. +""" +import subprocess +import shlex +import os +import shutil + + +ROOT_CA = "trusted-root" +SUBJECT = "/CN=example.mitmproxy.org/" + + +def do(args): + print("> %s" % args) + args = shlex.split(args) + output = subprocess.check_output(args) + return output + + +def genrsa(cert): + do("openssl genrsa -out {cert}.key 2048".format(cert=cert)) + + +def sign(cert): + do("openssl x509 -req -in {cert}.csr " + "-CA {root_ca}.crt " + "-CAkey {root_ca}.key " + "-CAcreateserial " + "-days 1024 " + "-out {cert}.crt".format(root_ca=ROOT_CA, cert=cert) + ) + + +def mkcert(cert, args): + genrsa(cert) + do("openssl req -new -nodes -batch " + "-key {cert}.key " + "{args} " + "-out {cert}.csr".format(cert=cert, args=args) + ) + sign(cert) + os.remove("{cert}.csr".format(cert=cert)) + + +# create trusted root CA +genrsa("trusted-root") +do("openssl req -x509 -new -nodes -batch " + "-key trusted-root.key " + "-days 1024 " + "-out trusted-root.crt" + ) +h = do("openssl x509 -hash -noout -in trusted-root.crt").decode("ascii").strip() +shutil.copyfile("trusted-root.crt", "{}.0".format(h)) + +# create trusted leaf cert. +mkcert("trusted-leaf", "-subj {}".format(SUBJECT)) + +# create self-signed cert +genrsa("self-signed") +do("openssl req -x509 -new -nodes -batch " + "-key self-signed.key " + "-subj {} " + "-days 1024 " + "-out self-signed.crt".format(SUBJECT) + ) diff --git a/test/mitmproxy/net/data/verificationcerts/self-signed.crt b/test/mitmproxy/net/data/verificationcerts/self-signed.crt new file mode 100644 index 00000000..dce2a7e0 --- /dev/null +++ b/test/mitmproxy/net/data/verificationcerts/self-signed.crt @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDEzCCAfugAwIBAgIJAJ945xt1FRsfMA0GCSqGSIb3DQEBCwUAMCAxHjAcBgNV +BAMMFWV4YW1wbGUubWl0bXByb3h5Lm9yZzAeFw0xNTExMDExNjQ4MDJaFw0xODA4 +MjExNjQ4MDJaMCAxHjAcBgNVBAMMFWV4YW1wbGUubWl0bXByb3h5Lm9yZzCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALFxyzPfjgIghOMMnJlW80yB84xC +nJtko3tuyOdozgTCyha2W+NdIKPNZJtWrzN4P0B5PlozCDwfcSYffLs0WZs8LRWv +BfZX8+oX+14qQjKFsiqgO65cTLP3qlPySYPJQQ37vOP1Y5Yf8nQq2mwQdC18hLtT +QOANG6OFoSplpBLsYF+QeoMgqCTa6hrl/5GLmQoDRTjXkv3Sj379AUDMybuBqccm +q5EIqCrE4+xJ8JywJclAVn2YP14baiFrrYCsYYg4sS1Od6xFj+xtpLe7My3AYjB9 +/aeHd8vDiob0cqOW1TFwhqgJKuErfFyg8lZ2hJmStJKyfofWuY/gl/vnvX0CAwEA +AaNQME4wHQYDVR0OBBYEFB8d32zK8eqZIoKw4jXzYzhw4amPMB8GA1UdIwQYMBaA +FB8d32zK8eqZIoKw4jXzYzhw4amPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAJmo2oKv1OEjZ0Q4yELO6BAnHAkmBKpW+zmLyQa8idxtLVkI9uXk3iqY +GWugkmcUZCTVFRWv/QXQQSex+00IY3x2rdHbtuZwcyKiz2u8WEmfW1rOIwBaFJ1i +v7+SA2aZs6vepN2sE56X54c/YbwQooaKZtOb+djWXYMJrc/Ezj0J7oQIJTptYV8v +/3216yCHRp/KCL7yTLtiw25xKuXNu/gkcd8wZOY9rS2qMUD897MJF0MvgJoauRBd +d4XEYCNKkrIRmfqrkiRQfAZpvpoutH6NCk7KuQYcI0BlOHlsnHHcs/w72EEqHwFq +x6476tW/t8GJDZVD74+pNBcLifXxArE= +-----END CERTIFICATE----- diff --git a/test/mitmproxy/net/data/verificationcerts/self-signed.key b/test/mitmproxy/net/data/verificationcerts/self-signed.key new file mode 100644 index 00000000..71a6ad6a --- /dev/null +++ b/test/mitmproxy/net/data/verificationcerts/self-signed.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAsXHLM9+OAiCE4wycmVbzTIHzjEKcm2Sje27I52jOBMLKFrZb +410go81km1avM3g/QHk+WjMIPB9xJh98uzRZmzwtFa8F9lfz6hf7XipCMoWyKqA7 +rlxMs/eqU/JJg8lBDfu84/Vjlh/ydCrabBB0LXyEu1NA4A0bo4WhKmWkEuxgX5B6 +gyCoJNrqGuX/kYuZCgNFONeS/dKPfv0BQMzJu4GpxyarkQioKsTj7EnwnLAlyUBW +fZg/XhtqIWutgKxhiDixLU53rEWP7G2kt7szLcBiMH39p4d3y8OKhvRyo5bVMXCG +qAkq4St8XKDyVnaEmZK0krJ+h9a5j+CX++e9fQIDAQABAoIBAQCT+FvGbych2PJX +0D2KlXqgE0IAdc/YuYymstSwPLKIP9N8KyfnKtK8Jdw+uYOyfRTp8/EuEJ5OXL3j +V6CRD++lRwIlseVb7y5EySjh9oVrUhgn+aSrGucPsHkGNeZeEmbAfWugARLBrvRl +MRMhyHrJL6wT9jIEZInmy9mA3G99IuFW3rS8UR1Yu7zyvhtjvop1xg/wfEUu24Ty +PvMfnwaDcZHCz2tmu2KJvaxSBAG3FKmAqeMvk1Gt5m2keKgw03M+EX0LrM8ybWqn +VwB8tnSyMBLVFLIXMpIiSfpji10+p9fdKFMRF++D6qVwyoxPiIq+yEJapxXiqLea +mkhtJW91AoGBAOvIb7bZvH4wYvi6txs2pygF3ZMjqg/fycnplrmYMrjeeDeeN4v1 +h/5tkN9TeTkHRaN3L7v49NEUDhDyuopLTNfWpYdv63U/BVzvgMm/guacTYkx9whB +OvQ2YekR/WKg7kuyrTZidTDz+mjU+1b8JaWGjiDc6vFwxZA7uWicaGGHAoGBAMCo +y/2AwFGwCR+5bET1nTTyxok6iKo4k6R/7DJe4Bq8VLifoyX3zDlGG/33KN3xVqBU +xnT9gkii1lfX2U+4iM+GOSPl0nG0hOEqEH+vFHszpHybDeNez3FEyIbgOzg6u7sV +NOy+P94L5EMQVEmWp5g6Vm3k9kr92Bd9UacKQPnbAoGAMN8KyMu41i8RVJze9zUM +0K7mjmkGBuRL3x4br7xsRwVVxbF1sfzig0oSjTewGLH5LTi3HC8uD2gowjqNj7yr +4NEM3lXEaDj305uRBkA70bD0IUvJ+FwM7DGZecXQz3Cr8+TFIlCmGc94R+Jddlot +M3IAY69mw0SsroiylYxV1mECgYAcSGtx8rXJCDO+sYTgdsI2ZLGasbogax/ZlWIC +XwU9R4qUc/MKft8/RTiUxvT76BMUhH2B7Tl0GlunF6vyVR/Yf1biGzoSsTKUr40u +gXBbSdCK7mRSjbecZEGf80keTxkCNPHJE4DiwxImej41c2V1JpNLnMI/bhaMFDyp +bgrt4wKBgHFzZgAgM1v07F038tAkIBGrYLukY1ZFBaZoGZ9xHfy/EmLJM3HCHLO5 +8wszMGhMTe2+39EeChwgj0kFaq1YnDiucU74BC57KR1tD59y7l6UnsQXTm4/32j8 +Or6i8GekBibCb97DzzOU0ZK//fNhHTXpDDXsYt5lJUWSmgW+S9Qp +-----END RSA PRIVATE KEY----- diff --git a/test/mitmproxy/net/data/verificationcerts/trusted-leaf.crt b/test/mitmproxy/net/data/verificationcerts/trusted-leaf.crt new file mode 100644 index 00000000..6a92de92 --- /dev/null +++ b/test/mitmproxy/net/data/verificationcerts/trusted-leaf.crt @@ -0,0 +1,18 @@ +-----BEGIN CERTIFICATE----- +MIIC4TCCAckCCQCj6D9oVylb8jANBgkqhkiG9w0BAQsFADBFMQswCQYDVQQGEwJB +VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0 +cyBQdHkgTHRkMB4XDTE1MTEwMTE2NDgwMloXDTE4MDgyMTE2NDgwMlowIDEeMBwG +A1UEAwwVZXhhbXBsZS5taXRtcHJveHkub3JnMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEAy/L5JYHS7QFhSIsjmd6bJTgs2rdqEn6tsmPBVZKZ7SqCAVjW +hPpEu7Q23akmU6Zm9Fp/vENc3jzxQLlEKhrv7eWmFYSOrCYtbJOz3RQorlwjjfdY +LlNQh1wYUXQX3PN3r3dyYtt5vTtXKc8+aP4M4vX7qlbW+4j4LrQfmPjS0XOdYpu3 +wh+i1ZMIhZye3hpCjwnpjTf7/ff45ZFxtkoi1uzEC/+swr1RSvamY8Foe12Re17Z +5ij8ZB0NIdoSk1tDkY3sJ8iNi35+qartl0UYeG9IUXRwDRrPsEKpF4RxY1+X2bdZ +r6PKb/E4CA5JlMvS5SVmrvxjCVqTQBmTjXfxqwIDAQABMA0GCSqGSIb3DQEBCwUA +A4IBAQBmpSZJrTDvzSlo6P7P7x1LoETzHyVjwgPeqGYw6ndGXeJMN9rhhsFvRsiB +I/aHh58MIlSjti7paikDAoFHB3dBvFHR+JUa/ailWEbcZReWRSE3lV6wFiN3G3lU +OyofR7MKnPW7bv8hSqOLqP1mbupXuQFB5M6vPLRwg5VgiCHI/XBiTvzMamzvNAR3 +UHHZtsJkRqzogYm6K9YJaga7jteSx2nNo+ujLwrxeXsLChTyFMJGnVkp5IyKeNfc +qwlzNncb3y+4KnUdNkPEtuydgAxAfuyXufiFBYRcUWbQ5/9ycgF7131ySaj9f/Y2 +kMsv2jg+soKvwwVYCABsk1KSHtfz +-----END CERTIFICATE----- diff --git a/test/mitmproxy/net/data/verificationcerts/trusted-leaf.key b/test/mitmproxy/net/data/verificationcerts/trusted-leaf.key new file mode 100644 index 00000000..783ebf1c --- /dev/null +++ b/test/mitmproxy/net/data/verificationcerts/trusted-leaf.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAy/L5JYHS7QFhSIsjmd6bJTgs2rdqEn6tsmPBVZKZ7SqCAVjW +hPpEu7Q23akmU6Zm9Fp/vENc3jzxQLlEKhrv7eWmFYSOrCYtbJOz3RQorlwjjfdY +LlNQh1wYUXQX3PN3r3dyYtt5vTtXKc8+aP4M4vX7qlbW+4j4LrQfmPjS0XOdYpu3 +wh+i1ZMIhZye3hpCjwnpjTf7/ff45ZFxtkoi1uzEC/+swr1RSvamY8Foe12Re17Z +5ij8ZB0NIdoSk1tDkY3sJ8iNi35+qartl0UYeG9IUXRwDRrPsEKpF4RxY1+X2bdZ +r6PKb/E4CA5JlMvS5SVmrvxjCVqTQBmTjXfxqwIDAQABAoIBAQC956DWq+wbhA1x +3x1nSUBth8E8Z0z9q7dRRFHhvIBXth0X5ADcEa2umj/8ZmSpv2heX2ZRhugSh+yc +t+YgzrRacFwV7ThsU6A4WdBBK2Q19tWke4xAlpOFdtut/Mu7kXkAidiY9ISHD5o5 +9B/I48ZcD3AnTHUiAogV9OL3LbogDD4HasLt4mWkbq8U2thdjxMIvxdg36olJEuo +iAZrAUCPZEXuU89BtvPLUYioe9n90nzkyneGNS0SHxotlEc9ZYK9VTsivtXJb4wB +ptDMCp+TH3tjo8BTGnbnoZEybgyyOEd0UTzxK4DlxnvRVWexFY6NXwPFhIxKlB0Y +Bg8NkAkBAoGBAOiRnmbC5QkqrKrTkLx3fghIHPqgEXPPYgHLSuY3UjTlMb3APXpq +vzQnlCn3QuSse/1fWnQj+9vLVbx1XNgKjzk7dQhn5IUY+mGN4lLmoSnTebxvSQ43 +VAgTYjST9JFmJ3wK4KkWDsEsVao8LAx0h5JEQXUTT5xZpFA2MLztYbgfAoGBAOB/ +MvhLMAwlx8+m/zXMEPLk/KOd2dVZ4q5se8bAT/GiGsi8JUcPnCk140ZZabJqryAp +JFzUHIjfVsS9ejAfocDk1JeIm7Uus4um6fQEKIPMBxI/M/UAwYCXAG9ULXqilbO3 +pTdeeuraVKrTu1Z4ea6x4du1JWKcyDfYfsHepcT1AoGBAM2fskV5G7e3G2MOG3IG +1E/OMpEE5WlXenfLnjVdxDkwS4JRbgnGR7d9JurTyzkTp6ylmfwFtLDoXq15ttTs +wSUBBMCh2tIy+201XV2eu++XIpMQca84C/v352RFTH8hqtdpZqkY74KsCDGzcd6x +SQxxfM5efIzoVPb2crEX0MZRAoGAQ2EqFSfL9flo7UQ8GRN0itJ7mUgJV2WxCZT5 +2X9i/y0eSN1feuKOhjfsTPMNLEWk5kwy48GuBs6xpj8Qa10zGUgVHp4bzdeEgAfK +9DhDSLt1694YZBKkAUpRERj8xXAC6nvWFLZAwjhhbRw7gAqMywgMt/q4i85usYRD +F0ESE/kCgYBbc083PcLmlHbkn/d1i4IcLI6wFk+tZYIEVYDid7xDOgZOBcOTTyYB +BrDzNqbKNexKRt7QHVlwR+VOGMdN5P0hf7oH3SMW23OxBKoQe8pUSGF9a4DjCS1v +vCXMekifb9kIhhUWaG71L8+MaOzNBVAmk1+3NzPZgV/YxHjAWWhGHQ== +-----END RSA PRIVATE KEY----- diff --git a/test/mitmproxy/net/data/verificationcerts/trusted-root.crt b/test/mitmproxy/net/data/verificationcerts/trusted-root.crt new file mode 100644 index 00000000..b22e4d20 --- /dev/null +++ b/test/mitmproxy/net/data/verificationcerts/trusted-root.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDXTCCAkWgAwIBAgIJAPAfPQGCV/Z4MA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV +BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX +aWRnaXRzIFB0eSBMdGQwHhcNMTUxMTAxMTY0ODAxWhcNMTgwODIxMTY0ODAxWjBF +MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50 +ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEArp8LD34JhKCwcQbwIYQMg4+eCgLVN8fwB7+/qOfJbArPs0djFBN+F7c6 +HGvMr24BKUk5u8pn4dPtNurm/vPC8ovNGmcXz62BQJpcMX2veVdRsF7yNwhNacNJ +Arq+70zNMwYBznx0XUxMF6j6nVFf3AW6SU04ylT4Mp3SY/BUUDAdfl1eRo0mPLNS +8rpsN+8YBw1Q7SCuBRVqpOgVIsL88svgQUSOlzvMZPBpG/cmB3BNKNrltwb5iFEI +1jAV7uSj5IcIuNO/246kfsDVPTFMJIzav/CUoidd5UNw+SoFDlzh8sA7L1Bm7D1/ +3KHYSKswGsSR3kynAl10w/SJKDtn8wIDAQABo1AwTjAdBgNVHQ4EFgQUgOcrtxBX +LxbpnOT65d+vpfyWUkgwHwYDVR0jBBgwFoAUgOcrtxBXLxbpnOT65d+vpfyWUkgw +DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAEE9bFmUCA+6cvESKPoi2 +TGSpV652d0xd2U66LpEXeiWRJFLz8YGgoJCx3QFGBscJDXxrLxrBBBV/tCpEqypo +pYIqsawH7M66jpOr83Us3M8JC2eFBZJocMpXxdytWqHik5VKZNx6VQFT8bS7+yVC +VoUKePhlgcg+pmo41qjqieBNKRMh/1tXS77DI1lgO5wZLVrLXcdqWuDpmaQOKJeq +G/nxytCW/YJA7bFn/8Gjy8DYypJSeeaKu7o3P3+ONJHdIMHb+MdcheDBS9AOFSeo +xI0D5EbO9F873O77l7nbD7B0X34HFN0nGczC4poexIpbDFG3hAPekwZ5KC6VwJLc +1Q== +-----END CERTIFICATE----- diff --git a/test/mitmproxy/net/data/verificationcerts/trusted-root.key b/test/mitmproxy/net/data/verificationcerts/trusted-root.key new file mode 100644 index 00000000..05483f77 --- /dev/null +++ b/test/mitmproxy/net/data/verificationcerts/trusted-root.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEArp8LD34JhKCwcQbwIYQMg4+eCgLVN8fwB7+/qOfJbArPs0dj +FBN+F7c6HGvMr24BKUk5u8pn4dPtNurm/vPC8ovNGmcXz62BQJpcMX2veVdRsF7y +NwhNacNJArq+70zNMwYBznx0XUxMF6j6nVFf3AW6SU04ylT4Mp3SY/BUUDAdfl1e +Ro0mPLNS8rpsN+8YBw1Q7SCuBRVqpOgVIsL88svgQUSOlzvMZPBpG/cmB3BNKNrl +twb5iFEI1jAV7uSj5IcIuNO/246kfsDVPTFMJIzav/CUoidd5UNw+SoFDlzh8sA7 +L1Bm7D1/3KHYSKswGsSR3kynAl10w/SJKDtn8wIDAQABAoIBAFgMzjDzpqz/sbhs +fS0JPp4gDtqRbx3/bSMbJvNuXPxjvzNxLZ5z7cLbmyu1l7Jlz6QXzkrI1vTiPdzR +OcUY+RYANF252iHYJTKEIzS5YX/X7dL3LT9eqlpIJEqCC8Dygw3VW5fY3Xwl+sB7 +blNhMuro4HQRwi8UBUrQlcPa7Ui5BBi323Q6en+VjYctkqpJHzNKPSqPTbsdLaK+ +B0XuXxFatM09rmeRKZCL71Lk1T8N/l0hqEzej7zxgVD7vG/x1kMFN4T3yCmXCbPa +izGHYr1EBHglm4qMNWveXCZiVJ+wmwCjdjqvggyHiZFXE2N0OCrWPhxQPdqFf5y7 +bUO9U2ECgYEA6GM1UzRnbVpjb20ezFy7dU7rlWM0nHBfG27M3bcXh4HnPpnvKp0/ +8a1WFi4kkRywrNXx8hFEd43vTbdObLpVXScXRKiY3MHmFk4k4hbWuTpmumCubQZO +AWlX6TE0HRKn1wQahgpQcxcWaDN2xJJmRQ1zVmlnNkT48/4kFgRxyykCgYEAwF08 +ngrF35oYoU/x+KKq2NXGeNUzoZMj568dE1oWW0ZFpqCi+DGT+hAbG3yUOBSaPqy9 +zn1obGo0YRlrayvtebz118kG7a/rzY02VcAPlT/GpEhvkZlXTwEK17zRJc1nJrfP +39QAZWZsaOru9NRIg/8HcdG3JPR2MhRD/De9GbsCgYAaiZnBUq6s8jGAu/lUZRKT +JtwIRzfu1XZG77Q9bXcmZlM99t41A5gVxTGbftF2MMyMMDJc7lPfQzocqd4u1GiD +Jr+le4tZSls4GNxlZS5IIL8ycW/5y0qFJr5/RrsoxsSb7UAKJothWTWZ2Karc/xx +zkNpjsfWjrHPSypbyU4lYQKBgFh1R5/BgnatjO/5LGNSok/uFkOQfxqo6BTtYOh6 +P9efO/5A1lBdtBeE+oIsSphzWO7DTtE6uB9Kw2V3Y/83hw+5RjABoG8Cu+OdMURD +eqb+WeFH8g45Pn31E8Bbcq34g5u5YR0jhz8Z13ZzuojZabNRPmIntxmGVSf4S78a +/plrAoGBANMHNng2lyr03nqnHrOM6NXD+60af0YR/YJ+2d/H40RnXxGJ4DXn7F00 +a4vJFPa97uq+xpd0HE+TE+NIrOdVDXPePD2qzBzMTsctGtj30vLzojMOT+Yf/nvO +WxTL5Q8GruJz2Dn0awSZO2z/3A8S1rmpuVZ/jT5NtRrvOSY6hmxF +-----END RSA PRIVATE KEY----- diff --git a/test/mitmproxy/net/data/verificationcerts/trusted-root.srl b/test/mitmproxy/net/data/verificationcerts/trusted-root.srl new file mode 100644 index 00000000..4ad962ba --- /dev/null +++ b/test/mitmproxy/net/data/verificationcerts/trusted-root.srl @@ -0,0 +1 @@ +A3E83F6857295BF2 diff --git a/test/mitmproxy/net/http/__init__.py b/test/mitmproxy/net/http/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/test/mitmproxy/net/http/http1/__init__.py b/test/mitmproxy/net/http/http1/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/test/mitmproxy/net/http/http1/test_assemble.py b/test/mitmproxy/net/http/http1/test_assemble.py new file mode 100644 index 00000000..e80376e8 --- /dev/null +++ b/test/mitmproxy/net/http/http1/test_assemble.py @@ -0,0 +1,101 @@ +from mitmproxy import exceptions +from mitmproxy.net.http import Headers +from mitmproxy.net.http.http1.assemble import ( + assemble_request, assemble_request_head, assemble_response, + assemble_response_head, _assemble_request_line, _assemble_request_headers, + _assemble_response_headers, + assemble_body) +from mitmproxy.test.tutils import treq, raises, tresp + + +def test_assemble_request(): + assert assemble_request(treq()) == ( + b"GET /path HTTP/1.1\r\n" + b"header: qvalue\r\n" + b"content-length: 7\r\n" + b"host: address:22\r\n" + b"\r\n" + b"content" + ) + + with raises(exceptions.HttpException): + assemble_request(treq(content=None)) + + +def test_assemble_request_head(): + c = assemble_request_head(treq(content=b"foo")) + assert b"GET" in c + assert b"qvalue" in c + assert b"content-length" in c + assert b"foo" not in c + + +def test_assemble_response(): + assert assemble_response(tresp()) == ( + b"HTTP/1.1 200 OK\r\n" + b"header-response: svalue\r\n" + b"content-length: 7\r\n" + b"\r\n" + b"message" + ) + + with raises(exceptions.HttpException): + assemble_response(tresp(content=None)) + + +def test_assemble_response_head(): + c = assemble_response_head(tresp()) + assert b"200" in c + assert b"svalue" in c + assert b"message" not in c + + +def test_assemble_body(): + c = list(assemble_body(Headers(), [b"body"])) + assert c == [b"body"] + + c = list(assemble_body(Headers(transfer_encoding="chunked"), [b"123456789a", b""])) + assert c == [b"a\r\n123456789a\r\n", b"0\r\n\r\n"] + + c = list(assemble_body(Headers(transfer_encoding="chunked"), [b"123456789a"])) + assert c == [b"a\r\n123456789a\r\n", b"0\r\n\r\n"] + + +def test_assemble_request_line(): + assert _assemble_request_line(treq().data) == b"GET /path HTTP/1.1" + + authority_request = treq(method=b"CONNECT", first_line_format="authority").data + assert _assemble_request_line(authority_request) == b"CONNECT address:22 HTTP/1.1" + + absolute_request = treq(first_line_format="absolute").data + assert _assemble_request_line(absolute_request) == b"GET http://address:22/path HTTP/1.1" + + with raises(RuntimeError): + _assemble_request_line(treq(first_line_format="invalid_form").data) + + +def test_assemble_request_headers(): + # https://github.com/mitmproxy/mitmproxy/issues/186 + r = treq(content=b"") + r.headers["Transfer-Encoding"] = "chunked" + c = _assemble_request_headers(r.data) + assert b"Transfer-Encoding" in c + + +def test_assemble_request_headers_host_header(): + r = treq() + r.headers = Headers() + c = _assemble_request_headers(r.data) + assert b"host" in c + + r.host = None + c = _assemble_request_headers(r.data) + assert b"host" not in c + + +def test_assemble_response_headers(): + # https://github.com/mitmproxy/mitmproxy/issues/186 + r = tresp(content=b"") + r.headers["Transfer-Encoding"] = "chunked" + c = _assemble_response_headers(r) + assert b"Transfer-Encoding" in c diff --git a/test/mitmproxy/net/http/http1/test_read.py b/test/mitmproxy/net/http/http1/test_read.py new file mode 100644 index 00000000..20997259 --- /dev/null +++ b/test/mitmproxy/net/http/http1/test_read.py @@ -0,0 +1,371 @@ +from io import BytesIO +from mock import Mock +import pytest + +from mitmproxy import exceptions +from mitmproxy.net.http import Headers +from mitmproxy.net.http.http1.read import ( + read_request, read_response, read_request_head, + read_response_head, read_body, connection_close, expected_http_body_size, _get_first_line, + _read_request_line, _parse_authority_form, _read_response_line, _check_http_version, + _read_headers, _read_chunked, get_header_tokens +) +from mitmproxy.test.tutils import treq, tresp, raises + + +def test_get_header_tokens(): + headers = Headers() + assert get_header_tokens(headers, "foo") == [] + headers["foo"] = "bar" + assert get_header_tokens(headers, "foo") == ["bar"] + headers["foo"] = "bar, voing" + assert get_header_tokens(headers, "foo") == ["bar", "voing"] + headers.set_all("foo", ["bar, voing", "oink"]) + assert get_header_tokens(headers, "foo") == ["bar", "voing", "oink"] + + +@pytest.mark.parametrize("input", [ + b"GET / HTTP/1.1\r\n\r\nskip", + b"GET / HTTP/1.1\r\n\r\nskip", + b"GET / HTTP/1.1\r\n\r\nskip", + b"GET / HTTP/1.1 \r\n\r\nskip", +]) +def test_read_request(input): + rfile = BytesIO(input) + r = read_request(rfile) + assert r.method == "GET" + assert r.content == b"" + assert r.http_version == "HTTP/1.1" + assert r.timestamp_end + assert rfile.read() == b"skip" + + +@pytest.mark.parametrize("input", [ + b"CONNECT :0 0", +]) +def test_read_request_error(input): + rfile = BytesIO(input) + raises(exceptions.HttpException, read_request, rfile) + + +def test_read_request_head(): + rfile = BytesIO( + b"GET / HTTP/1.1\r\n" + b"Content-Length: 4\r\n" + b"\r\n" + b"skip" + ) + rfile.reset_timestamps = Mock() + rfile.first_byte_timestamp = 42 + r = read_request_head(rfile) + assert r.method == "GET" + assert r.headers["Content-Length"] == "4" + assert r.content is None + assert rfile.reset_timestamps.called + assert r.timestamp_start == 42 + assert rfile.read() == b"skip" + + +@pytest.mark.parametrize("input", [ + b"HTTP/1.1 418 I'm a teapot\r\n\r\nbody", + b"HTTP/1.1 418 I'm a teapot\r\n\r\nbody", + b"HTTP/1.1 418 I'm a teapot\r\n\r\nbody", + b"HTTP/1.1 418 I'm a teapot \r\n\r\nbody", +]) +def test_read_response(input): + req = treq() + rfile = BytesIO(input) + r = read_response(rfile, req) + assert r.http_version == "HTTP/1.1" + assert r.status_code == 418 + assert r.reason == "I'm a teapot" + assert r.content == b"body" + assert r.timestamp_end + + +def test_read_response_head(): + rfile = BytesIO( + b"HTTP/1.1 418 I'm a teapot\r\n" + b"Content-Length: 4\r\n" + b"\r\n" + b"skip" + ) + rfile.reset_timestamps = Mock() + rfile.first_byte_timestamp = 42 + r = read_response_head(rfile) + assert r.status_code == 418 + assert r.headers["Content-Length"] == "4" + assert r.content is None + assert rfile.reset_timestamps.called + assert r.timestamp_start == 42 + assert rfile.read() == b"skip" + + +class TestReadBody: + def test_chunked(self): + rfile = BytesIO(b"3\r\nfoo\r\n0\r\n\r\nbar") + body = b"".join(read_body(rfile, None)) + assert body == b"foo" + assert rfile.read() == b"bar" + + def test_known_size(self): + rfile = BytesIO(b"foobar") + body = b"".join(read_body(rfile, 3)) + assert body == b"foo" + assert rfile.read() == b"bar" + + def test_known_size_limit(self): + rfile = BytesIO(b"foobar") + with raises(exceptions.HttpException): + b"".join(read_body(rfile, 3, 2)) + + def test_known_size_too_short(self): + rfile = BytesIO(b"foo") + with raises(exceptions.HttpException): + b"".join(read_body(rfile, 6)) + + def test_unknown_size(self): + rfile = BytesIO(b"foobar") + body = b"".join(read_body(rfile, -1)) + assert body == b"foobar" + + def test_unknown_size_limit(self): + rfile = BytesIO(b"foobar") + with raises(exceptions.HttpException): + b"".join(read_body(rfile, -1, 3)) + + def test_max_chunk_size(self): + rfile = BytesIO(b"123456") + assert list(read_body(rfile, -1, max_chunk_size=None)) == [b"123456"] + rfile = BytesIO(b"123456") + assert list(read_body(rfile, -1, max_chunk_size=1)) == [b"1", b"2", b"3", b"4", b"5", b"6"] + + +def test_connection_close(): + headers = Headers() + assert connection_close(b"HTTP/1.0", headers) + assert not connection_close(b"HTTP/1.1", headers) + + headers["connection"] = "keep-alive" + assert not connection_close(b"HTTP/1.1", headers) + + headers["connection"] = "close" + assert connection_close(b"HTTP/1.1", headers) + + headers["connection"] = "foobar" + assert connection_close(b"HTTP/1.0", headers) + assert not connection_close(b"HTTP/1.1", headers) + + +def test_expected_http_body_size(): + # Expect: 100-continue + assert expected_http_body_size( + treq(headers=Headers(expect="100-continue", content_length="42")) + ) == 0 + + # http://tools.ietf.org/html/rfc7230#section-3.3 + assert expected_http_body_size( + treq(method=b"HEAD"), + tresp(headers=Headers(content_length="42")) + ) == 0 + assert expected_http_body_size( + treq(method=b"CONNECT"), + tresp() + ) == 0 + for code in (100, 204, 304): + assert expected_http_body_size( + treq(), + tresp(status_code=code) + ) == 0 + + # chunked + assert expected_http_body_size( + treq(headers=Headers(transfer_encoding="chunked")), + ) is None + + # explicit length + for val in (b"foo", b"-7"): + with raises(exceptions.HttpSyntaxException): + expected_http_body_size( + treq(headers=Headers(content_length=val)) + ) + assert expected_http_body_size( + treq(headers=Headers(content_length="42")) + ) == 42 + + # no length + assert expected_http_body_size( + treq(headers=Headers()) + ) == 0 + assert expected_http_body_size( + treq(headers=Headers()), tresp(headers=Headers()) + ) == -1 + + +def test_get_first_line(): + rfile = BytesIO(b"foo\r\nbar") + assert _get_first_line(rfile) == b"foo" + + rfile = BytesIO(b"\r\nfoo\r\nbar") + assert _get_first_line(rfile) == b"foo" + + with raises(exceptions.HttpReadDisconnect): + rfile = BytesIO(b"") + _get_first_line(rfile) + + with raises(exceptions.HttpReadDisconnect): + rfile = Mock() + rfile.readline.side_effect = exceptions.TcpDisconnect + _get_first_line(rfile) + + +def test_read_request_line(): + def t(b): + return _read_request_line(BytesIO(b)) + + assert (t(b"GET / HTTP/1.1") == + ("relative", b"GET", None, None, None, b"/", b"HTTP/1.1")) + assert (t(b"OPTIONS * HTTP/1.1") == + ("relative", b"OPTIONS", None, None, None, b"*", b"HTTP/1.1")) + assert (t(b"CONNECT foo:42 HTTP/1.1") == + ("authority", b"CONNECT", None, b"foo", 42, None, b"HTTP/1.1")) + assert (t(b"GET http://foo:42/bar HTTP/1.1") == + ("absolute", b"GET", b"http", b"foo", 42, b"/bar", b"HTTP/1.1")) + + with raises(exceptions.HttpSyntaxException): + t(b"GET / WTF/1.1") + with raises(exceptions.HttpSyntaxException): + t(b"this is not http") + with raises(exceptions.HttpReadDisconnect): + t(b"") + + +def test_parse_authority_form(): + assert _parse_authority_form(b"foo:42") == (b"foo", 42) + with raises(exceptions.HttpSyntaxException): + _parse_authority_form(b"foo") + with raises(exceptions.HttpSyntaxException): + _parse_authority_form(b"foo:bar") + with raises(exceptions.HttpSyntaxException): + _parse_authority_form(b"foo:99999999") + with raises(exceptions.HttpSyntaxException): + _parse_authority_form(b"f\x00oo:80") + + +def test_read_response_line(): + def t(b): + return _read_response_line(BytesIO(b)) + + assert t(b"HTTP/1.1 200 OK") == (b"HTTP/1.1", 200, b"OK") + assert t(b"HTTP/1.1 200") == (b"HTTP/1.1", 200, b"") + + # https://github.com/mitmproxy/mitmproxy/issues/784 + assert t(b"HTTP/1.1 200 Non-Autoris\xc3\xa9") == (b"HTTP/1.1", 200, b"Non-Autoris\xc3\xa9") + + with raises(exceptions.HttpSyntaxException): + assert t(b"HTTP/1.1") + + with raises(exceptions.HttpSyntaxException): + t(b"HTTP/1.1 OK OK") + with raises(exceptions.HttpSyntaxException): + t(b"WTF/1.1 200 OK") + with raises(exceptions.HttpReadDisconnect): + t(b"") + + +def test_check_http_version(): + _check_http_version(b"HTTP/0.9") + _check_http_version(b"HTTP/1.0") + _check_http_version(b"HTTP/1.1") + _check_http_version(b"HTTP/2.0") + with raises(exceptions.HttpSyntaxException): + _check_http_version(b"WTF/1.0") + with raises(exceptions.HttpSyntaxException): + _check_http_version(b"HTTP/1.10") + with raises(exceptions.HttpSyntaxException): + _check_http_version(b"HTTP/1.b") + + +class TestReadHeaders: + @staticmethod + def _read(data): + return _read_headers(BytesIO(data)) + + def test_read_simple(self): + data = ( + b"Header: one\r\n" + b"Header2: two\r\n" + b"\r\n" + ) + headers = self._read(data) + assert headers.fields == ((b"Header", b"one"), (b"Header2", b"two")) + + def test_read_multi(self): + data = ( + b"Header: one\r\n" + b"Header: two\r\n" + b"\r\n" + ) + headers = self._read(data) + assert headers.fields == ((b"Header", b"one"), (b"Header", b"two")) + + def test_read_continued(self): + data = ( + b"Header: one\r\n" + b"\ttwo\r\n" + b"Header2: three\r\n" + b"\r\n" + ) + headers = self._read(data) + assert headers.fields == ((b"Header", b"one\r\n two"), (b"Header2", b"three")) + + def test_read_continued_err(self): + data = b"\tfoo: bar\r\n" + with raises(exceptions.HttpSyntaxException): + self._read(data) + + def test_read_err(self): + data = b"foo" + with raises(exceptions.HttpSyntaxException): + self._read(data) + + def test_read_empty_name(self): + data = b":foo" + with raises(exceptions.HttpSyntaxException): + self._read(data) + + def test_read_empty_value(self): + data = b"bar:" + headers = self._read(data) + assert headers.fields == ((b"bar", b""),) + + +def test_read_chunked(): + req = treq(content=None) + req.headers["Transfer-Encoding"] = "chunked" + + data = b"1\r\na\r\n0\r\n" + with raises(exceptions.HttpSyntaxException): + b"".join(_read_chunked(BytesIO(data))) + + data = b"1\r\na\r\n0\r\n\r\n" + assert b"".join(_read_chunked(BytesIO(data))) == b"a" + + data = b"\r\n\r\n1\r\na\r\n1\r\nb\r\n0\r\n\r\n" + assert b"".join(_read_chunked(BytesIO(data))) == b"ab" + + data = b"\r\n" + with raises("closed prematurely"): + b"".join(_read_chunked(BytesIO(data))) + + data = b"1\r\nfoo" + with raises("malformed chunked body"): + b"".join(_read_chunked(BytesIO(data))) + + data = b"foo\r\nfoo" + with raises(exceptions.HttpSyntaxException): + b"".join(_read_chunked(BytesIO(data))) + + data = b"5\r\naaaaa\r\n0\r\n\r\n" + with raises("too large"): + b"".join(_read_chunked(BytesIO(data), limit=2)) diff --git a/test/mitmproxy/net/http/http2/__init__.py b/test/mitmproxy/net/http/http2/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/test/mitmproxy/net/http/http2/test_framereader.py b/test/mitmproxy/net/http/http2/test_framereader.py new file mode 100644 index 00000000..41b73189 --- /dev/null +++ b/test/mitmproxy/net/http/http2/test_framereader.py @@ -0,0 +1 @@ +# foobar diff --git a/test/mitmproxy/net/http/test_authentication.py b/test/mitmproxy/net/http/test_authentication.py new file mode 100644 index 00000000..11b6e2e0 --- /dev/null +++ b/test/mitmproxy/net/http/test_authentication.py @@ -0,0 +1,122 @@ +import binascii + +from mitmproxy.test import tutils +from mitmproxy.net.http import authentication, Headers + + +def test_parse_http_basic_auth(): + vals = ("basic", "foo", "bar") + assert authentication.parse_http_basic_auth( + authentication.assemble_http_basic_auth(*vals) + ) == vals + assert not authentication.parse_http_basic_auth("") + assert not authentication.parse_http_basic_auth("foo bar") + v = "basic " + binascii.b2a_base64(b"foo").decode("ascii") + assert not authentication.parse_http_basic_auth(v) + + +class TestPassManNonAnon: + + def test_simple(self): + p = authentication.PassManNonAnon() + assert not p.test("", "") + assert p.test("user", "") + + +class TestPassManHtpasswd: + + def test_file_errors(self): + tutils.raises( + "malformed htpasswd file", + authentication.PassManHtpasswd, + tutils.test_data.path("data/server.crt")) + + def test_simple(self): + pm = authentication.PassManHtpasswd(tutils.test_data.path("data/htpasswd")) + + vals = ("basic", "test", "test") + authentication.assemble_http_basic_auth(*vals) + assert pm.test("test", "test") + assert not pm.test("test", "foo") + assert not pm.test("foo", "test") + assert not pm.test("test", "") + assert not pm.test("", "") + + +class TestPassManSingleUser: + + def test_simple(self): + pm = authentication.PassManSingleUser("test", "test") + assert pm.test("test", "test") + assert not pm.test("test", "foo") + assert not pm.test("foo", "test") + + +class TestNullProxyAuth: + + def test_simple(self): + na = authentication.NullProxyAuth(authentication.PassManNonAnon()) + assert not na.auth_challenge_headers() + assert na.authenticate("foo") + na.clean({}) + + +class TestBasicProxyAuth: + + def test_simple(self): + ba = authentication.BasicProxyAuth(authentication.PassManNonAnon(), "test") + headers = Headers() + assert ba.auth_challenge_headers() + assert not ba.authenticate(headers) + + def test_authenticate_clean(self): + ba = authentication.BasicProxyAuth(authentication.PassManNonAnon(), "test") + + headers = Headers() + vals = ("basic", "foo", "bar") + headers[ba.AUTH_HEADER] = authentication.assemble_http_basic_auth(*vals) + assert ba.authenticate(headers) + + ba.clean(headers) + assert ba.AUTH_HEADER not in headers + + headers[ba.AUTH_HEADER] = "" + assert not ba.authenticate(headers) + + headers[ba.AUTH_HEADER] = "foo" + assert not ba.authenticate(headers) + + vals = ("foo", "foo", "bar") + headers[ba.AUTH_HEADER] = authentication.assemble_http_basic_auth(*vals) + assert not ba.authenticate(headers) + + ba = authentication.BasicProxyAuth(authentication.PassMan(), "test") + vals = ("basic", "foo", "bar") + headers[ba.AUTH_HEADER] = authentication.assemble_http_basic_auth(*vals) + assert not ba.authenticate(headers) + + +class Bunch: + pass + + +class TestAuthAction: + + def test_nonanonymous(self): + m = Bunch() + aa = authentication.NonanonymousAuthAction(None, "authenticator") + aa(None, m, None, None) + assert m.authenticator + + def test_singleuser(self): + m = Bunch() + aa = authentication.SingleuserAuthAction(None, "authenticator") + aa(None, m, "foo:bar", None) + assert m.authenticator + tutils.raises("invalid", aa, None, m, "foo", None) + + def test_httppasswd(self): + m = Bunch() + aa = authentication.HtpasswdAuthAction(None, "authenticator") + aa(None, m, tutils.test_data.path("data/htpasswd"), None) + assert m.authenticator diff --git a/test/mitmproxy/net/http/test_cookies.py b/test/mitmproxy/net/http/test_cookies.py new file mode 100644 index 00000000..8c9c0c32 --- /dev/null +++ b/test/mitmproxy/net/http/test_cookies.py @@ -0,0 +1,365 @@ +import time + +from mitmproxy.net.http import cookies +from mitmproxy.test.tutils import raises + +import mock + +cookie_pairs = [ + [ + "", + [] + ], + [ + "one=uno", + [["one", "uno"]] + ], + [ + "one", + [["one", None]] + ], + [ + "one=uno; two=due", + [["one", "uno"], ["two", "due"]] + ], + [ + 'one="uno"; two="\due"', + [["one", "uno"], ["two", "due"]] + ], + [ + 'one="un\\"o"', + [["one", 'un"o']] + ], + [ + 'one="uno,due"', + [["one", 'uno,due']] + ], + [ + "one=uno; two; three=tre", + [["one", "uno"], ["two", None], ["three", "tre"]] + ], + [ + "_lvs2=zHai1+Hq+Tc2vmc2r4GAbdOI5Jopg3EwsdUT9g=; " + "_rcc2=53VdltWl+Ov6ordflA==;", + [ + ["_lvs2", "zHai1+Hq+Tc2vmc2r4GAbdOI5Jopg3EwsdUT9g="], + ["_rcc2", "53VdltWl+Ov6ordflA=="] + ] + ] +] + + +def test_read_key(): + tokens = [ + [("foo", 0), ("foo", 3)], + [("foo", 1), ("oo", 3)], + [(" foo", 0), (" foo", 4)], + [(" foo", 1), ("foo", 4)], + [(" foo;", 1), ("foo", 4)], + [(" foo=", 1), ("foo", 4)], + [(" foo=bar", 1), ("foo", 4)], + ] + for q, a in tokens: + assert cookies._read_key(*q) == a + + +def test_read_quoted_string(): + tokens = [ + [('"foo" x', 0), ("foo", 5)], + [('"f\oo" x', 0), ("foo", 6)], + [(r'"f\\o" x', 0), (r"f\o", 6)], + [(r'"f\\" x', 0), (r"f" + '\\', 5)], + [('"fo\\\"" x', 0), ("fo\"", 6)], + [('"foo" x', 7), ("", 8)], + ] + for q, a in tokens: + assert cookies._read_quoted_string(*q) == a + + +def test_read_cookie_pairs(): + vals = [ + [ + "one", + [["one", None]] + ], + [ + "one=two", + [["one", "two"]] + ], + [ + "one=", + [["one", ""]] + ], + [ + 'one="two"', + [["one", "two"]] + ], + [ + 'one="two"; three=four', + [["one", "two"], ["three", "four"]] + ], + [ + 'one="two"; three=four; five', + [["one", "two"], ["three", "four"], ["five", None]] + ], + [ + 'one="\\"two"; three=four', + [["one", '"two'], ["three", "four"]] + ], + ] + for s, lst in vals: + ret, off = cookies._read_cookie_pairs(s) + assert ret == lst + + +def test_pairs_roundtrips(): + for s, expected in cookie_pairs: + ret, off = cookies._read_cookie_pairs(s) + assert ret == expected + + s2 = cookies._format_pairs(expected) + ret, off = cookies._read_cookie_pairs(s2) + assert ret == expected + + +def test_cookie_roundtrips(): + for s, expected in cookie_pairs: + ret = cookies.parse_cookie_header(s) + assert ret == expected + + s2 = cookies.format_cookie_header(expected) + ret = cookies.parse_cookie_header(s2) + assert ret == expected + + +def test_parse_set_cookie_pairs(): + pairs = [ + [ + "one=uno", + [[ + ["one", "uno"] + ]] + ], + [ + "one=un\x20", + [[ + ["one", "un\x20"] + ]] + ], + [ + "one=uno; foo", + [[ + ["one", "uno"], + ["foo", None] + ]] + ], + [ + "mun=1.390.f60; " + "expires=sun, 11-oct-2015 12:38:31 gmt; path=/; " + "domain=b.aol.com", + [[ + ["mun", "1.390.f60"], + ["expires", "sun, 11-oct-2015 12:38:31 gmt"], + ["path", "/"], + ["domain", "b.aol.com"] + ]] + ], + [ + r'rpb=190%3d1%2616726%3d1%2634832%3d1%2634874%3d1; ' + 'domain=.rubiconproject.com; ' + 'expires=mon, 11-may-2015 21:54:57 gmt; ' + 'path=/', + [[ + ['rpb', r'190%3d1%2616726%3d1%2634832%3d1%2634874%3d1'], + ['domain', '.rubiconproject.com'], + ['expires', 'mon, 11-may-2015 21:54:57 gmt'], + ['path', '/'] + ]] + ], + ] + for s, expected in pairs: + ret, off = cookies._read_set_cookie_pairs(s) + assert ret == expected + + s2 = cookies._format_set_cookie_pairs(expected[0]) + ret2, off = cookies._read_set_cookie_pairs(s2) + assert ret2 == expected + + +def test_parse_set_cookie_header(): + def set_cookie_equal(obs, exp): + assert obs[0] == exp[0] + assert obs[1] == exp[1] + assert obs[2].items(multi=True) == exp[2] + + vals = [ + [ + "", [] + ], + [ + ";", [] + ], + [ + "one=uno", + [ + ("one", "uno", ()) + ] + ], + [ + "one=uno; foo=bar", + [ + ("one", "uno", (("foo", "bar"),)) + ] + ], + [ + "one=uno; foo=bar; foo=baz", + [ + ("one", "uno", (("foo", "bar"), ("foo", "baz"))) + ] + ], + # Comma Separated Variant of Set-Cookie Headers + [ + "foo=bar, doo=dar", + [ + ("foo", "bar", ()), + ("doo", "dar", ()), + ] + ], + [ + "foo=bar; path=/, doo=dar; roo=rar; zoo=zar", + [ + ("foo", "bar", (("path", "/"),)), + ("doo", "dar", (("roo", "rar"), ("zoo", "zar"))), + ] + ], + [ + "foo=bar; expires=Mon, 24 Aug 2037", + [ + ("foo", "bar", (("expires", "Mon, 24 Aug 2037"),)), + ] + ], + [ + "foo=bar; expires=Mon, 24 Aug 2037 00:00:00 GMT, doo=dar", + [ + ("foo", "bar", (("expires", "Mon, 24 Aug 2037 00:00:00 GMT"),)), + ("doo", "dar", ()), + ] + ], + ] + for s, expected in vals: + ret = cookies.parse_set_cookie_header(s) + if expected: + for i in range(len(expected)): + set_cookie_equal(ret[i], expected[i]) + + s2 = cookies.format_set_cookie_header(ret) + ret2 = cookies.parse_set_cookie_header(s2) + for i in range(len(expected)): + set_cookie_equal(ret2[i], expected[i]) + else: + assert not ret + + +def test_refresh_cookie(): + + # Invalid expires format, sent to us by Reddit. + c = "rfoo=bar; Domain=reddit.com; expires=Thu, 31 Dec 2037 23:59:59 GMT; Path=/" + assert cookies.refresh_set_cookie_header(c, 60) + + c = "MOO=BAR; Expires=Tue, 08-Mar-2011 00:20:38 GMT; Path=foo.com; Secure" + assert "00:21:38" in cookies.refresh_set_cookie_header(c, 60) + + c = "foo,bar" + with raises(ValueError): + cookies.refresh_set_cookie_header(c, 60) + + # https://github.com/mitmproxy/mitmproxy/issues/773 + c = ">=A" + assert cookies.refresh_set_cookie_header(c, 60) + + # https://github.com/mitmproxy/mitmproxy/issues/1118 + c = "foo:bar=bla" + assert cookies.refresh_set_cookie_header(c, 0) + c = "foo/bar=bla" + assert cookies.refresh_set_cookie_header(c, 0) + + +@mock.patch('time.time') +def test_get_expiration_ts(*args): + # Freeze time + now_ts = 17 + time.time.return_value = now_ts + + CA = cookies.CookieAttrs + F = cookies.get_expiration_ts + + assert F(CA([("Expires", "Thu, 01-Jan-1970 00:00:00 GMT")])) == 0 + assert F(CA([("Expires", "Mon, 24-Aug-2037 00:00:00 GMT")])) == 2134684800 + + assert F(CA([("Max-Age", "0")])) == now_ts + assert F(CA([("Max-Age", "31")])) == now_ts + 31 + + +def test_is_expired(): + CA = cookies.CookieAttrs + + # A cookie can be expired + # by setting the expire time in the past + assert cookies.is_expired(CA([("Expires", "Thu, 01-Jan-1970 00:00:00 GMT")])) + + # or by setting Max-Age to 0 + assert cookies.is_expired(CA([("Max-Age", "0")])) + + # or both + assert cookies.is_expired(CA([("Expires", "Thu, 01-Jan-1970 00:00:00 GMT"), ("Max-Age", "0")])) + + assert not cookies.is_expired(CA([("Expires", "Mon, 24-Aug-2037 00:00:00 GMT")])) + assert not cookies.is_expired(CA([("Max-Age", "1")])) + assert not cookies.is_expired(CA([("Expires", "Wed, 15-Jul-2037 00:00:00 GMT"), ("Max-Age", "1")])) + + assert not cookies.is_expired(CA([("Max-Age", "nan")])) + assert not cookies.is_expired(CA([("Expires", "false")])) + + +def test_group_cookies(): + CA = cookies.CookieAttrs + groups = [ + [ + "one=uno; foo=bar; foo=baz", + [ + ('one', 'uno', CA([])), + ('foo', 'bar', CA([])), + ('foo', 'baz', CA([])) + ] + ], + [ + "one=uno; Path=/; foo=bar; Max-Age=0; foo=baz; expires=24-08-1993", + [ + ('one', 'uno', CA([('Path', '/')])), + ('foo', 'bar', CA([('Max-Age', '0')])), + ('foo', 'baz', CA([('expires', '24-08-1993')])) + ] + ], + [ + "one=uno;", + [ + ('one', 'uno', CA([])) + ] + ], + [ + "one=uno; Path=/; Max-Age=0; Expires=24-08-1993", + [ + ('one', 'uno', CA([('Path', '/'), ('Max-Age', '0'), ('Expires', '24-08-1993')])) + ] + ], + [ + "path=val; Path=/", + [ + ('path', 'val', CA([('Path', '/')])) + ] + ] + ] + + for c, expected in groups: + observed = cookies.group_cookies(cookies.parse_cookie_header(c)) + assert observed == expected diff --git a/test/mitmproxy/net/http/test_encoding.py b/test/mitmproxy/net/http/test_encoding.py new file mode 100644 index 00000000..d8fa5e76 --- /dev/null +++ b/test/mitmproxy/net/http/test_encoding.py @@ -0,0 +1,73 @@ +import mock +import pytest + +from mitmproxy.net.http import encoding +from mitmproxy.test import tutils + + +@pytest.mark.parametrize("encoder", [ + 'identity', + 'none', +]) +def test_identity(encoder): + assert b"string" == encoding.decode(b"string", encoder) + assert b"string" == encoding.encode(b"string", encoder) + with tutils.raises(ValueError): + encoding.encode(b"string", "nonexistent encoding") + + +@pytest.mark.parametrize("encoder", [ + 'gzip', + 'br', + 'deflate', +]) +def test_encoders(encoder): + assert "" == encoding.decode("", encoder) + assert b"" == encoding.decode(b"", encoder) + + assert "string" == encoding.decode( + encoding.encode( + "string", + encoder + ), + encoder + ) + assert b"string" == encoding.decode( + encoding.encode( + b"string", + encoder + ), + encoder + ) + + with tutils.raises(ValueError): + encoding.decode(b"foobar", encoder) + + +def test_cache(): + decode_gzip = mock.MagicMock() + decode_gzip.return_value = b"decoded" + encode_gzip = mock.MagicMock() + encode_gzip.return_value = b"encoded" + + with mock.patch.dict(encoding.custom_decode, gzip=decode_gzip): + with mock.patch.dict(encoding.custom_encode, gzip=encode_gzip): + assert encoding.decode(b"encoded", "gzip") == b"decoded" + assert decode_gzip.call_count == 1 + + # should be cached + assert encoding.decode(b"encoded", "gzip") == b"decoded" + assert decode_gzip.call_count == 1 + + # the other way around as well + assert encoding.encode(b"decoded", "gzip") == b"encoded" + assert encode_gzip.call_count == 0 + + # different encoding + decode_gzip.return_value = b"bar" + assert encoding.encode(b"decoded", "deflate") != b"decoded" + assert encode_gzip.call_count == 0 + + # This is not in the cache anymore + assert encoding.encode(b"decoded", "gzip") == b"encoded" + assert encode_gzip.call_count == 1 diff --git a/test/mitmproxy/net/http/test_headers.py b/test/mitmproxy/net/http/test_headers.py new file mode 100644 index 00000000..8e0f770d --- /dev/null +++ b/test/mitmproxy/net/http/test_headers.py @@ -0,0 +1,106 @@ +import collections + +from mitmproxy.net.http.headers import Headers, parse_content_type, assemble_content_type +from mitmproxy.test.tutils import raises + + +class TestHeaders: + def _2host(self): + return Headers( + ( + (b"Host", b"example.com"), + (b"host", b"example.org") + ) + ) + + def test_init(self): + headers = Headers() + assert len(headers) == 0 + + headers = Headers([[b"Host", b"example.com"]]) + assert len(headers) == 1 + assert headers["Host"] == "example.com" + + headers = Headers(Host="example.com") + assert len(headers) == 1 + assert headers["Host"] == "example.com" + + headers = Headers( + [[b"Host", b"invalid"]], + Host="example.com" + ) + assert len(headers) == 1 + assert headers["Host"] == "example.com" + + headers = Headers( + [[b"Host", b"invalid"], [b"Accept", b"text/plain"]], + Host="example.com" + ) + assert len(headers) == 2 + assert headers["Host"] == "example.com" + assert headers["Accept"] == "text/plain" + + with raises(TypeError): + Headers([[b"Host", u"not-bytes"]]) + + def test_set(self): + headers = Headers() + headers[u"foo"] = u"1" + headers[b"bar"] = b"2" + headers["baz"] = b"3" + with raises(TypeError): + headers["foobar"] = 42 + assert len(headers) == 3 + + def test_bytes(self): + headers = Headers(Host="example.com") + assert bytes(headers) == b"Host: example.com\r\n" + + headers = Headers([ + [b"Host", b"example.com"], + [b"Accept", b"text/plain"] + ]) + assert bytes(headers) == b"Host: example.com\r\nAccept: text/plain\r\n" + + headers = Headers() + assert bytes(headers) == b"" + + def test_replace_simple(self): + headers = Headers(Host="example.com", Accept="text/plain") + replacements = headers.replace("Host: ", "X-Host: ") + assert replacements == 1 + assert headers["X-Host"] == "example.com" + assert "Host" not in headers + assert headers["Accept"] == "text/plain" + + def test_replace_multi(self): + headers = self._2host() + headers.replace(r"Host: example\.com", r"Host: example.de") + assert headers.get_all("Host") == ["example.de", "example.org"] + + def test_replace_remove_spacer(self): + headers = Headers(Host="example.com") + replacements = headers.replace(r"Host: ", "X-Host ") + assert replacements == 0 + assert headers["Host"] == "example.com" + + def test_replace_with_count(self): + headers = Headers(Host="foobarfoo.com", Accept="foo/bar") + replacements = headers.replace("foo", "bar", count=1) + assert replacements == 1 + + +def test_parse_content_type(): + p = parse_content_type + assert p("text/html") == ("text", "html", {}) + assert p("text") is None + + v = p("text/html; charset=UTF-8") + assert v == ('text', 'html', {'charset': 'UTF-8'}) + + +def test_assemble_content_type(): + p = assemble_content_type + assert p("text", "html", {}) == "text/html" + assert p("text", "html", {"charset": "utf8"}) == "text/html; charset=utf8" + assert p("text", "html", collections.OrderedDict([("charset", "utf8"), ("foo", "bar")])) == "text/html; charset=utf8; foo=bar" diff --git a/test/mitmproxy/net/http/test_message.py b/test/mitmproxy/net/http/test_message.py new file mode 100644 index 00000000..69d029d9 --- /dev/null +++ b/test/mitmproxy/net/http/test_message.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- + +from mitmproxy.test import tutils +from mitmproxy.net import http + + +def _test_passthrough_attr(message, attr): + assert getattr(message, attr) == getattr(message.data, attr) + setattr(message, attr, b"foo") + assert getattr(message.data, attr) == b"foo" + + +def _test_decoded_attr(message, attr): + assert getattr(message, attr) == getattr(message.data, attr).decode("utf8") + # Set str, get raw bytes + setattr(message, attr, "foo") + assert getattr(message.data, attr) == b"foo" + # Set raw bytes, get decoded + setattr(message.data, attr, b"BAR") # use uppercase so that we can also cover request.method + assert getattr(message, attr) == "BAR" + # Set bytes, get raw bytes + setattr(message, attr, b"baz") + assert getattr(message.data, attr) == b"baz" + + # Set UTF8 + setattr(message, attr, "Non-Autorisé") + assert getattr(message.data, attr) == b"Non-Autoris\xc3\xa9" + # Don't fail on garbage + setattr(message.data, attr, b"FOO\xBF\x00BAR") + assert getattr(message, attr).startswith("FOO") + assert getattr(message, attr).endswith("BAR") + # foo.bar = foo.bar should not cause any side effects. + d = getattr(message, attr) + setattr(message, attr, d) + assert getattr(message.data, attr) == b"FOO\xBF\x00BAR" + + +class TestMessageData: + def test_eq_ne(self): + data = tutils.tresp(timestamp_start=42, timestamp_end=42).data + same = tutils.tresp(timestamp_start=42, timestamp_end=42).data + assert data == same + assert not data != same + + other = tutils.tresp(content=b"foo").data + assert not data == other + assert data != other + + assert data != 0 + + +class TestMessage: + + def test_init(self): + resp = tutils.tresp() + assert resp.data + + def test_eq_ne(self): + resp = tutils.tresp(timestamp_start=42, timestamp_end=42) + same = tutils.tresp(timestamp_start=42, timestamp_end=42) + assert resp == same + assert not resp != same + + other = tutils.tresp(timestamp_start=0, timestamp_end=0) + assert not resp == other + assert resp != other + + assert resp != 0 + + def test_serializable(self): + resp = tutils.tresp() + resp2 = http.Response.from_state(resp.get_state()) + assert resp == resp2 + + def test_content_length_update(self): + resp = tutils.tresp() + resp.content = b"foo" + assert resp.data.content == b"foo" + assert resp.headers["content-length"] == "3" + resp.content = b"" + assert resp.data.content == b"" + assert resp.headers["content-length"] == "0" + resp.raw_content = b"bar" + assert resp.data.content == b"bar" + assert resp.headers["content-length"] == "0" + + def test_headers(self): + _test_passthrough_attr(tutils.tresp(), "headers") + + def test_timestamp_start(self): + _test_passthrough_attr(tutils.tresp(), "timestamp_start") + + def test_timestamp_end(self): + _test_passthrough_attr(tutils.tresp(), "timestamp_end") + + def test_http_version(self): + _test_decoded_attr(tutils.tresp(), "http_version") + + def test_replace(self): + r = tutils.tresp() + r.content = b"foofootoo" + r.replace(b"foo", "gg") + assert r.content == b"ggggtoo" + + r.content = b"foofootoo" + r.replace(b"foo", "gg", count=1) + assert r.content == b"ggfootoo" + + +class TestMessageContentEncoding: + def test_simple(self): + r = tutils.tresp() + assert r.raw_content == b"message" + assert "content-encoding" not in r.headers + r.encode("gzip") + + assert r.headers["content-encoding"] + assert r.raw_content != b"message" + assert r.content == b"message" + assert r.raw_content != b"message" + + def test_modify(self): + r = tutils.tresp() + assert "content-encoding" not in r.headers + r.encode("gzip") + + r.content = b"foo" + assert r.raw_content != b"foo" + r.decode() + assert r.raw_content == b"foo" + + with tutils.raises(TypeError): + r.content = u"foo" + + def test_unknown_ce(self): + r = tutils.tresp() + r.headers["content-encoding"] = "zopfli" + r.raw_content = b"foo" + with tutils.raises(ValueError): + assert r.content + assert r.headers["content-encoding"] + assert r.get_content(strict=False) == b"foo" + + def test_cannot_decode(self): + r = tutils.tresp() + r.encode("gzip") + r.raw_content = b"foo" + with tutils.raises(ValueError): + assert r.content + assert r.headers["content-encoding"] + assert r.get_content(strict=False) == b"foo" + + with tutils.raises(ValueError): + r.decode() + assert r.raw_content == b"foo" + assert "content-encoding" in r.headers + + r.decode(strict=False) + assert r.content == b"foo" + assert "content-encoding" not in r.headers + + def test_none(self): + r = tutils.tresp(content=None) + assert r.content is None + r.content = b"foo" + assert r.content is not None + r.content = None + assert r.content is None + + def test_cannot_encode(self): + r = tutils.tresp() + r.encode("gzip") + r.content = None + assert r.headers["content-encoding"] + assert r.raw_content is None + + r.headers["content-encoding"] = "zopfli" + r.content = b"foo" + assert "content-encoding" not in r.headers + assert r.raw_content == b"foo" + + with tutils.raises(ValueError): + r.encode("zopfli") + assert r.raw_content == b"foo" + assert "content-encoding" not in r.headers + + +class TestMessageText: + def test_simple(self): + r = tutils.tresp(content=b'\xfc') + assert r.raw_content == b"\xfc" + assert r.content == b"\xfc" + assert r.text == u"ü" + + r.encode("gzip") + assert r.text == u"ü" + r.decode() + assert r.text == u"ü" + + r.headers["content-type"] = "text/html; charset=latin1" + r.content = b"\xc3\xbc" + assert r.text == u"ü" + r.headers["content-type"] = "text/html; charset=utf8" + assert r.text == u"ü" + + def test_guess_json(self): + r = tutils.tresp(content=b'"\xc3\xbc"') + r.headers["content-type"] = "application/json" + assert r.text == u'"ü"' + + def test_none(self): + r = tutils.tresp(content=None) + assert r.text is None + r.text = u"foo" + assert r.text is not None + r.text = None + assert r.text is None + + def test_modify(self): + r = tutils.tresp() + + r.text = u"ü" + assert r.raw_content == b"\xfc" + + r.headers["content-type"] = "text/html; charset=utf8" + r.text = u"ü" + assert r.raw_content == b"\xc3\xbc" + assert r.headers["content-length"] == "2" + + def test_unknown_ce(self): + r = tutils.tresp() + r.headers["content-type"] = "text/html; charset=wtf" + r.raw_content = b"foo" + with tutils.raises(ValueError): + assert r.text == u"foo" + assert r.get_text(strict=False) == u"foo" + + def test_cannot_decode(self): + r = tutils.tresp() + r.headers["content-type"] = "text/html; charset=utf8" + r.raw_content = b"\xFF" + with tutils.raises(ValueError): + assert r.text + + assert r.get_text(strict=False) == '\udcff' + + def test_cannot_encode(self): + r = tutils.tresp() + r.content = None + assert "content-type" not in r.headers + assert r.raw_content is None + + r.headers["content-type"] = "text/html; charset=latin1; foo=bar" + r.text = u"☃" + assert r.headers["content-type"] == "text/html; charset=utf-8; foo=bar" + assert r.raw_content == b'\xe2\x98\x83' + + r.headers["content-type"] = "gibberish" + r.text = u"☃" + assert r.headers["content-type"] == "text/plain; charset=utf-8" + assert r.raw_content == b'\xe2\x98\x83' + + del r.headers["content-type"] + r.text = u"☃" + assert r.headers["content-type"] == "text/plain; charset=utf-8" + assert r.raw_content == b'\xe2\x98\x83' + + r.headers["content-type"] = "text/html; charset=latin1" + r.text = u'\udcff' + assert r.headers["content-type"] == "text/html; charset=utf-8" + assert r.raw_content == b"\xFF" diff --git a/test/mitmproxy/net/http/test_multipart.py b/test/mitmproxy/net/http/test_multipart.py new file mode 100644 index 00000000..68ae6bbd --- /dev/null +++ b/test/mitmproxy/net/http/test_multipart.py @@ -0,0 +1,24 @@ +from mitmproxy.net.http import Headers +from mitmproxy.net.http import multipart + + +def test_decode(): + boundary = 'somefancyboundary' + headers = Headers( + content_type='multipart/form-data; boundary=' + boundary + ) + content = ( + "--{0}\n" + "Content-Disposition: form-data; name=\"field1\"\n\n" + "value1\n" + "--{0}\n" + "Content-Disposition: form-data; name=\"field2\"\n\n" + "value2\n" + "--{0}--".format(boundary).encode() + ) + + form = multipart.decode(headers, content) + + assert len(form) == 2 + assert form[0] == (b"field1", b"value1") + assert form[1] == (b"field2", b"value2") diff --git a/test/mitmproxy/net/http/test_request.py b/test/mitmproxy/net/http/test_request.py new file mode 100644 index 00000000..9c0ec333 --- /dev/null +++ b/test/mitmproxy/net/http/test_request.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- + +from mitmproxy.net.http import Headers +from mitmproxy.test.tutils import treq, raises +from .test_message import _test_decoded_attr, _test_passthrough_attr + + +class TestRequestData: + def test_init(self): + with raises(ValueError): + treq(headers="foobar") + + assert isinstance(treq(headers=()).headers, Headers) + + +class TestRequestCore: + """ + Tests for addons and the attributes that are directly proxied from the data structure + """ + def test_repr(self): + request = treq() + assert repr(request) == "Request(GET address:22/path)" + request.host = None + assert repr(request) == "Request(GET /path)" + + def replace(self): + r = treq() + r.path = b"foobarfoo" + r.replace(b"foo", "bar") + assert r.path == b"barbarbar" + + r.path = b"foobarfoo" + r.replace(b"foo", "bar", count=1) + assert r.path == b"barbarfoo" + + def test_first_line_format(self): + _test_passthrough_attr(treq(), "first_line_format") + + def test_method(self): + _test_decoded_attr(treq(), "method") + + def test_scheme(self): + _test_decoded_attr(treq(), "scheme") + + def test_port(self): + _test_passthrough_attr(treq(), "port") + + def test_path(self): + req = treq() + _test_decoded_attr(req, "path") + # path can also be None. + req.path = None + assert req.path is None + assert req.data.path is None + + def test_host(self): + request = treq() + assert request.host == request.data.host.decode("idna") + + # Test IDNA encoding + # Set str, get raw bytes + request.host = "ídna.example" + assert request.data.host == b"xn--dna-qma.example" + # Set raw bytes, get decoded + request.data.host = b"xn--idn-gla.example" + assert request.host == "idná.example" + # Set bytes, get raw bytes + request.host = b"xn--dn-qia9b.example" + assert request.data.host == b"xn--dn-qia9b.example" + # IDNA encoding is not bijective + request.host = "fußball" + assert request.host == "fussball" + + # Don't fail on garbage + request.data.host = b"foo\xFF\x00bar" + assert request.host.startswith("foo") + assert request.host.endswith("bar") + # foo.bar = foo.bar should not cause any side effects. + d = request.host + request.host = d + assert request.data.host == b"foo\xFF\x00bar" + + def test_host_header_update(self): + request = treq() + assert "host" not in request.headers + request.host = "example.com" + assert "host" not in request.headers + + request.headers["Host"] = "foo" + request.host = "example.org" + assert request.headers["Host"] == "example.org" + + +class TestRequestUtils: + """ + Tests for additional convenience methods. + """ + def test_url(self): + request = treq() + assert request.url == "http://address:22/path" + + request.url = "https://otheraddress:42/foo" + assert request.scheme == "https" + assert request.host == "otheraddress" + assert request.port == 42 + assert request.path == "/foo" + + with raises(ValueError): + request.url = "not-a-url" + + def test_url_options(self): + request = treq(method=b"OPTIONS", path=b"*") + assert request.url == "http://address:22" + + def test_url_authority(self): + request = treq(first_line_format="authority") + assert request.url == "address:22" + + def test_pretty_host(self): + request = treq() + # Without host header + assert request.pretty_host == "address" + assert request.host == "address" + # Same port as self.port (22) + request.headers["host"] = "other:22" + assert request.pretty_host == "other" + # Different ports + request.headers["host"] = "other" + assert request.pretty_host == "address" + assert request.host == "address" + # Empty host + request.host = None + assert request.pretty_host is None + assert request.host is None + + # Invalid IDNA + request.headers["host"] = ".disqus.com:22" + assert request.pretty_host == ".disqus.com" + + def test_pretty_url(self): + request = treq() + # Without host header + assert request.url == "http://address:22/path" + assert request.pretty_url == "http://address:22/path" + # Same port as self.port (22) + request.headers["host"] = "other:22" + assert request.pretty_url == "http://other:22/path" + # Different ports + request.headers["host"] = "other" + assert request.pretty_url == "http://address:22/path" + + def test_pretty_url_options(self): + request = treq(method=b"OPTIONS", path=b"*") + assert request.pretty_url == "http://address:22" + + def test_pretty_url_authority(self): + request = treq(first_line_format="authority") + assert request.pretty_url == "address:22" + + def test_get_query(self): + request = treq() + assert not request.query + + request.url = "http://localhost:80/foo?bar=42" + assert dict(request.query) == {"bar": "42"} + + def test_set_query(self): + request = treq() + assert not request.query + request.query["foo"] = "bar" + assert request.query["foo"] == "bar" + assert request.path == "/path?foo=bar" + + def test_get_cookies_none(self): + request = treq() + request.headers = Headers() + assert not request.cookies + + def test_get_cookies_single(self): + request = treq() + request.headers = Headers(cookie="cookiename=cookievalue") + assert len(request.cookies) == 1 + assert request.cookies['cookiename'] == 'cookievalue' + + def test_get_cookies_double(self): + request = treq() + request.headers = Headers(cookie="cookiename=cookievalue;othercookiename=othercookievalue") + result = request.cookies + assert len(result) == 2 + assert result['cookiename'] == 'cookievalue' + assert result['othercookiename'] == 'othercookievalue' + + def test_get_cookies_withequalsign(self): + request = treq() + request.headers = Headers(cookie="cookiename=coo=kievalue;othercookiename=othercookievalue") + result = request.cookies + assert len(result) == 2 + assert result['cookiename'] == 'coo=kievalue' + assert result['othercookiename'] == 'othercookievalue' + + def test_set_cookies(self): + request = treq() + request.headers = Headers(cookie="cookiename=cookievalue") + result = request.cookies + result["cookiename"] = "foo" + assert request.cookies["cookiename"] == "foo" + + def test_get_path_components(self): + request = treq(path=b"/foo/bar") + assert request.path_components == ("foo", "bar") + + def test_set_path_components(self): + request = treq() + request.path_components = ["foo", "baz"] + assert request.path == "/foo/baz" + + request.path_components = [] + assert request.path == "/" + + request.path_components = ["foo", "baz"] + request.query["hello"] = "hello" + assert request.path_components == ("foo", "baz") + + request.path_components = ["abc"] + assert request.path == "/abc?hello=hello" + + def test_anticache(self): + request = treq() + request.headers["If-Modified-Since"] = "foo" + request.headers["If-None-Match"] = "bar" + request.anticache() + assert "If-Modified-Since" not in request.headers + assert "If-None-Match" not in request.headers + + def test_anticomp(self): + request = treq() + request.headers["Accept-Encoding"] = "foobar" + request.anticomp() + assert request.headers["Accept-Encoding"] == "identity" + + def test_constrain_encoding(self): + request = treq() + + h = request.headers.copy() + request.constrain_encoding() # no-op if there is no accept_encoding header. + assert request.headers == h + + request.headers["Accept-Encoding"] = "identity, gzip, foo" + request.constrain_encoding() + assert "foo" not in request.headers["Accept-Encoding"] + assert "gzip" in request.headers["Accept-Encoding"] + + def test_get_urlencoded_form(self): + request = treq(content=b"foobar=baz") + assert not request.urlencoded_form + + request.headers["Content-Type"] = "application/x-www-form-urlencoded" + assert list(request.urlencoded_form.items()) == [(b"foobar", b"baz")] + + def test_set_urlencoded_form(self): + request = treq() + request.urlencoded_form = [(b'foo', b'bar'), (b'rab', b'oof')] + assert request.headers["Content-Type"] == "application/x-www-form-urlencoded" + assert request.content + + def test_get_multipart_form(self): + request = treq(content=b"foobar") + assert not request.multipart_form + + request.headers["Content-Type"] = "multipart/form-data" + assert list(request.multipart_form.items()) == [] diff --git a/test/mitmproxy/net/http/test_response.py b/test/mitmproxy/net/http/test_response.py new file mode 100644 index 00000000..239fb6ef --- /dev/null +++ b/test/mitmproxy/net/http/test_response.py @@ -0,0 +1,145 @@ +import email + +import time + +from mitmproxy.net.http import Headers +from mitmproxy.net.http import Response +from mitmproxy.net.http.cookies import CookieAttrs +from mitmproxy.test.tutils import raises, tresp +from .test_message import _test_passthrough_attr, _test_decoded_attr + + +class TestResponseData: + def test_init(self): + with raises(ValueError): + tresp(headers="foobar") + + assert isinstance(tresp(headers=()).headers, Headers) + + +class TestResponseCore: + """ + Tests for addons and the attributes that are directly proxied from the data structure + """ + def test_repr(self): + response = tresp() + assert repr(response) == "Response(200 OK, unknown content type, 7b)" + response.content = None + assert repr(response) == "Response(200 OK, no content)" + + def test_make(self): + r = Response.make() + assert r.status_code == 200 + assert r.content == b"" + + r = Response.make(418, "teatime") + assert r.status_code == 418 + assert r.content == b"teatime" + assert r.headers["content-length"] == "7" + + Response.make(content=b"foo") + Response.make(content="foo") + with raises(TypeError): + Response.make(content=42) + + r = Response.make(headers=[(b"foo", b"bar")]) + assert r.headers["foo"] == "bar" + + r = Response.make(headers=({"foo": "baz"})) + assert r.headers["foo"] == "baz" + + with raises(TypeError): + Response.make(headers=42) + + def test_status_code(self): + _test_passthrough_attr(tresp(), "status_code") + + def test_reason(self): + _test_decoded_attr(tresp(), "reason") + + +class TestResponseUtils: + """ + Tests for additional convenience methods. + """ + def test_get_cookies_none(self): + resp = tresp() + resp.headers = Headers() + assert not resp.cookies + + def test_get_cookies_empty(self): + resp = tresp() + resp.headers = Headers(set_cookie="") + assert not resp.cookies + + def test_get_cookies_simple(self): + resp = tresp() + resp.headers = Headers(set_cookie="cookiename=cookievalue") + result = resp.cookies + assert len(result) == 1 + assert "cookiename" in result + assert result["cookiename"] == ("cookievalue", CookieAttrs()) + + def test_get_cookies_with_parameters(self): + resp = tresp() + cookie = "cookiename=cookievalue;domain=example.com;expires=Wed Oct 21 16:29:41 2015;path=/; HttpOnly" + resp.headers = Headers(set_cookie=cookie) + result = resp.cookies + assert len(result) == 1 + assert "cookiename" in result + assert result["cookiename"][0] == "cookievalue" + attrs = result["cookiename"][1] + assert len(attrs) == 4 + assert attrs["domain"] == "example.com" + assert attrs["expires"] == "Wed Oct 21 16:29:41 2015" + assert attrs["path"] == "/" + assert attrs["httponly"] is None + + def test_get_cookies_no_value(self): + resp = tresp() + resp.headers = Headers(set_cookie="cookiename=; Expires=Thu, 01-Jan-1970 00:00:01 GMT; path=/") + result = resp.cookies + assert len(result) == 1 + assert "cookiename" in result + assert result["cookiename"][0] == "" + assert len(result["cookiename"][1]) == 2 + + def test_get_cookies_twocookies(self): + resp = tresp() + resp.headers = Headers([ + [b"Set-Cookie", b"cookiename=cookievalue"], + [b"Set-Cookie", b"othercookie=othervalue"] + ]) + result = resp.cookies + assert len(result) == 2 + assert "cookiename" in result + assert result["cookiename"] == ("cookievalue", CookieAttrs()) + assert "othercookie" in result + assert result["othercookie"] == ("othervalue", CookieAttrs()) + + def test_set_cookies(self): + resp = tresp() + resp.cookies["foo"] = ("bar", {}) + + assert len(resp.cookies) == 1 + assert resp.cookies["foo"] == ("bar", CookieAttrs()) + + def test_refresh(self): + r = tresp() + n = time.time() + r.headers["date"] = email.utils.formatdate(n) + pre = r.headers["date"] + r.refresh(n) + assert pre == r.headers["date"] + r.refresh(n + 60) + + d = email.utils.parsedate_tz(r.headers["date"]) + d = email.utils.mktime_tz(d) + # Weird that this is not exact... + assert abs(60 - (d - n)) <= 1 + + cookie = "MOO=BAR; Expires=Tue, 08-Mar-2011 00:20:38 GMT; Path=foo.com; Secure" + r.headers["set-cookie"] = cookie + r.refresh() + # Cookie refreshing is tested in test_cookies, we just make sure that it's triggered here. + assert cookie != r.headers["set-cookie"] diff --git a/test/mitmproxy/net/http/test_status_codes.py b/test/mitmproxy/net/http/test_status_codes.py new file mode 100644 index 00000000..098f8eef --- /dev/null +++ b/test/mitmproxy/net/http/test_status_codes.py @@ -0,0 +1,6 @@ +from mitmproxy.net.http import status_codes + + +def test_simple(): + assert status_codes.IM_A_TEAPOT == 418 + assert status_codes.RESPONSES[418] == "I'm a teapot" diff --git a/test/mitmproxy/net/http/test_url.py b/test/mitmproxy/net/http/test_url.py new file mode 100644 index 00000000..94b2eacc --- /dev/null +++ b/test/mitmproxy/net/http/test_url.py @@ -0,0 +1,102 @@ +from mitmproxy.test import tutils +from mitmproxy.net.http import url + + +def test_parse(): + with tutils.raises(ValueError): + url.parse("") + + s, h, po, pa = url.parse(b"http://foo.com:8888/test") + assert s == b"http" + assert h == b"foo.com" + assert po == 8888 + assert pa == b"/test" + + s, h, po, pa = url.parse("http://foo/bar") + assert s == b"http" + assert h == b"foo" + assert po == 80 + assert pa == b"/bar" + + s, h, po, pa = url.parse(b"http://user:pass@foo/bar") + assert s == b"http" + assert h == b"foo" + assert po == 80 + assert pa == b"/bar" + + s, h, po, pa = url.parse(b"http://foo") + assert pa == b"/" + + s, h, po, pa = url.parse(b"https://foo") + assert po == 443 + + with tutils.raises(ValueError): + url.parse(b"https://foo:bar") + + # Invalid IDNA + with tutils.raises(ValueError): + url.parse("http://\xfafoo") + # Invalid PATH + with tutils.raises(ValueError): + url.parse("http:/\xc6/localhost:56121") + # Null byte in host + with tutils.raises(ValueError): + url.parse("http://foo\0") + # Port out of range + _, _, port, _ = url.parse("http://foo:999999") + assert port == 80 + # Invalid IPv6 URL - see http://www.ietf.org/rfc/rfc2732.txt + with tutils.raises(ValueError): + url.parse('http://lo[calhost') + + +def test_unparse(): + assert url.unparse("http", "foo.com", 99, "") == "http://foo.com:99" + assert url.unparse("http", "foo.com", 80, "/bar") == "http://foo.com/bar" + assert url.unparse("https", "foo.com", 80, "") == "https://foo.com:80" + assert url.unparse("https", "foo.com", 443, "") == "https://foo.com" + + +surrogates = bytes(range(256)).decode("utf8", "surrogateescape") + +surrogates_quoted = ( + '%00%01%02%03%04%05%06%07%08%09%0A%0B%0C%0D%0E%0F' + '%10%11%12%13%14%15%16%17%18%19%1A%1B%1C%1D%1E%1F' + '%20%21%22%23%24%25%26%27%28%29%2A%2B%2C-./' + '0123456789%3A%3B%3C%3D%3E%3F' + '%40ABCDEFGHIJKLMNO' + 'PQRSTUVWXYZ%5B%5C%5D%5E_' + '%60abcdefghijklmno' + 'pqrstuvwxyz%7B%7C%7D%7E%7F' + '%80%81%82%83%84%85%86%87%88%89%8A%8B%8C%8D%8E%8F' + '%90%91%92%93%94%95%96%97%98%99%9A%9B%9C%9D%9E%9F' + '%A0%A1%A2%A3%A4%A5%A6%A7%A8%A9%AA%AB%AC%AD%AE%AF' + '%B0%B1%B2%B3%B4%B5%B6%B7%B8%B9%BA%BB%BC%BD%BE%BF' + '%C0%C1%C2%C3%C4%C5%C6%C7%C8%C9%CA%CB%CC%CD%CE%CF' + '%D0%D1%D2%D3%D4%D5%D6%D7%D8%D9%DA%DB%DC%DD%DE%DF' + '%E0%E1%E2%E3%E4%E5%E6%E7%E8%E9%EA%EB%EC%ED%EE%EF' + '%F0%F1%F2%F3%F4%F5%F6%F7%F8%F9%FA%FB%FC%FD%FE%FF' +) + + +def test_encode(): + assert url.encode([('foo', 'bar')]) + assert url.encode([('foo', surrogates)]) + + +def test_decode(): + s = "one=two&three=four" + assert len(url.decode(s)) == 2 + assert url.decode(surrogates) + + +def test_quote(): + assert url.quote("foo") == "foo" + assert url.quote("foo bar") == "foo%20bar" + assert url.quote(surrogates) == surrogates_quoted + + +def test_unquote(): + assert url.unquote("foo") == "foo" + assert url.unquote("foo%20bar") == "foo bar" + assert url.unquote(surrogates_quoted) == surrogates diff --git a/test/mitmproxy/net/http/test_user_agents.py b/test/mitmproxy/net/http/test_user_agents.py new file mode 100644 index 00000000..9e391447 --- /dev/null +++ b/test/mitmproxy/net/http/test_user_agents.py @@ -0,0 +1,6 @@ +from mitmproxy.net.http import user_agents + + +def test_get_shortcut(): + assert user_agents.get_by_shortcut("c")[0] == "chrome" + assert not user_agents.get_by_shortcut("_") diff --git a/test/mitmproxy/net/test_check.py b/test/mitmproxy/net/test_check.py new file mode 100644 index 00000000..18a50157 --- /dev/null +++ b/test/mitmproxy/net/test_check.py @@ -0,0 +1,10 @@ +# coding=utf-8 + +from mitmproxy.net import check + + +def test_is_valid_host(): + assert not check.is_valid_host(b"") + assert check.is_valid_host(b"one.two") + assert not check.is_valid_host(b"one" * 255) + assert check.is_valid_host(b"one.two.") diff --git a/test/mitmproxy/net/test_imports.py b/test/mitmproxy/net/test_imports.py new file mode 100644 index 00000000..b88ef26d --- /dev/null +++ b/test/mitmproxy/net/test_imports.py @@ -0,0 +1 @@ +# These are actually tests! diff --git a/test/mitmproxy/net/test_socks.py b/test/mitmproxy/net/test_socks.py new file mode 100644 index 00000000..b6551faa --- /dev/null +++ b/test/mitmproxy/net/test_socks.py @@ -0,0 +1,189 @@ +import ipaddress +from io import BytesIO +from mitmproxy.net import socks +from mitmproxy.net import tcp +from mitmproxy.test import tutils + + +def test_client_greeting(): + raw = tutils.treader(b"\x05\x02\x00\xBE\xEF") + out = BytesIO() + msg = socks.ClientGreeting.from_file(raw) + msg.assert_socks5() + msg.to_file(out) + + assert out.getvalue() == raw.getvalue()[:-1] + assert msg.ver == 5 + assert len(msg.methods) == 2 + assert 0xBE in msg.methods + assert 0xEF not in msg.methods + + +def test_client_greeting_assert_socks5(): + raw = tutils.treader(b"\x00\x00") + msg = socks.ClientGreeting.from_file(raw) + tutils.raises(socks.SocksError, msg.assert_socks5) + + raw = tutils.treader(b"HTTP/1.1 200 OK" + b" " * 100) + msg = socks.ClientGreeting.from_file(raw) + try: + msg.assert_socks5() + except socks.SocksError as e: + assert "Invalid SOCKS version" in str(e) + assert "HTTP" not in str(e) + else: + assert False + + raw = tutils.treader(b"GET / HTTP/1.1" + b" " * 100) + msg = socks.ClientGreeting.from_file(raw) + try: + msg.assert_socks5() + except socks.SocksError as e: + assert "Invalid SOCKS version" in str(e) + assert "HTTP" in str(e) + else: + assert False + + raw = tutils.treader(b"XX") + tutils.raises( + socks.SocksError, + socks.ClientGreeting.from_file, + raw, + fail_early=True) + + +def test_server_greeting(): + raw = tutils.treader(b"\x05\x02") + out = BytesIO() + msg = socks.ServerGreeting.from_file(raw) + msg.assert_socks5() + msg.to_file(out) + + assert out.getvalue() == raw.getvalue() + assert msg.ver == 5 + assert msg.method == 0x02 + + +def test_server_greeting_assert_socks5(): + raw = tutils.treader(b"HTTP/1.1 200 OK" + b" " * 100) + msg = socks.ServerGreeting.from_file(raw) + try: + msg.assert_socks5() + except socks.SocksError as e: + assert "Invalid SOCKS version" in str(e) + assert "HTTP" in str(e) + else: + assert False + + raw = tutils.treader(b"GET / HTTP/1.1" + b" " * 100) + msg = socks.ServerGreeting.from_file(raw) + try: + msg.assert_socks5() + except socks.SocksError as e: + assert "Invalid SOCKS version" in str(e) + assert "HTTP" not in str(e) + else: + assert False + + +def test_username_password_auth(): + raw = tutils.treader(b"\x01\x03usr\x03psd\xBE\xEF") + out = BytesIO() + auth = socks.UsernamePasswordAuth.from_file(raw) + auth.assert_authver1() + assert raw.read(2) == b"\xBE\xEF" + auth.to_file(out) + + assert out.getvalue() == raw.getvalue()[:-2] + assert auth.ver == socks.USERNAME_PASSWORD_VERSION.DEFAULT + assert auth.username == "usr" + assert auth.password == "psd" + + +def test_username_password_auth_assert_ver1(): + raw = tutils.treader(b"\x02\x03usr\x03psd\xBE\xEF") + auth = socks.UsernamePasswordAuth.from_file(raw) + tutils.raises(socks.SocksError, auth.assert_authver1) + + +def test_username_password_auth_response(): + raw = tutils.treader(b"\x01\x00\xBE\xEF") + out = BytesIO() + auth = socks.UsernamePasswordAuthResponse.from_file(raw) + auth.assert_authver1() + assert raw.read(2) == b"\xBE\xEF" + auth.to_file(out) + + assert out.getvalue() == raw.getvalue()[:-2] + assert auth.ver == socks.USERNAME_PASSWORD_VERSION.DEFAULT + assert auth.status == 0 + + +def test_username_password_auth_response_auth_assert_ver1(): + raw = tutils.treader(b"\x02\x00\xBE\xEF") + auth = socks.UsernamePasswordAuthResponse.from_file(raw) + tutils.raises(socks.SocksError, auth.assert_authver1) + + +def test_message(): + raw = tutils.treader(b"\x05\x01\x00\x03\x0bexample.com\xDE\xAD\xBE\xEF") + out = BytesIO() + msg = socks.Message.from_file(raw) + msg.assert_socks5() + assert raw.read(2) == b"\xBE\xEF" + msg.to_file(out) + + assert out.getvalue() == raw.getvalue()[:-2] + assert msg.ver == 5 + assert msg.msg == 0x01 + assert msg.atyp == 0x03 + assert msg.addr == ("example.com", 0xDEAD) + + +def test_message_assert_socks5(): + raw = tutils.treader(b"\xEE\x01\x00\x03\x0bexample.com\xDE\xAD\xBE\xEF") + msg = socks.Message.from_file(raw) + tutils.raises(socks.SocksError, msg.assert_socks5) + + +def test_message_ipv4(): + # Test ATYP=0x01 (IPV4) + raw = tutils.treader(b"\x05\x01\x00\x01\x7f\x00\x00\x01\xDE\xAD\xBE\xEF") + out = BytesIO() + msg = socks.Message.from_file(raw) + left = raw.read(2) + assert left == b"\xBE\xEF" + msg.to_file(out) + + assert out.getvalue() == raw.getvalue()[:-2] + assert msg.addr == ("127.0.0.1", 0xDEAD) + + +def test_message_ipv6(): + # Test ATYP=0x04 (IPV6) + ipv6_addr = u"2001:db8:85a3:8d3:1319:8a2e:370:7344" + + raw = tutils.treader( + b"\x05\x01\x00\x04" + + ipaddress.IPv6Address(ipv6_addr).packed + + b"\xDE\xAD\xBE\xEF") + out = BytesIO() + msg = socks.Message.from_file(raw) + assert raw.read(2) == b"\xBE\xEF" + msg.to_file(out) + + assert out.getvalue() == raw.getvalue()[:-2] + assert msg.addr.host == ipv6_addr + + +def test_message_invalid_rsv(): + raw = tutils.treader(b"\x05\x01\xFF\x01\x7f\x00\x00\x01\xDE\xAD\xBE\xEF") + tutils.raises(socks.SocksError, socks.Message.from_file, raw) + + +def test_message_unknown_atyp(): + raw = tutils.treader(b"\x05\x02\x00\x02\x7f\x00\x00\x01\xDE\xAD\xBE\xEF") + tutils.raises(socks.SocksError, socks.Message.from_file, raw) + + m = socks.Message(5, 1, 0x02, tcp.Address(("example.com", 5050))) + tutils.raises(socks.SocksError, m.to_file, BytesIO()) diff --git a/test/mitmproxy/net/test_tcp.py b/test/mitmproxy/net/test_tcp.py new file mode 100644 index 00000000..8a7c692a --- /dev/null +++ b/test/mitmproxy/net/test_tcp.py @@ -0,0 +1,802 @@ +from io import BytesIO +import queue +import time +import socket +import random +import os +import threading +import mock + +from OpenSSL import SSL + +from mitmproxy import certs +from mitmproxy.net import tcp +from mitmproxy.test import tutils +from mitmproxy import exceptions + +from . import tservers + + +class EchoHandler(tcp.BaseHandler): + sni = None + + def handle_sni(self, connection): + self.sni = connection.get_servername() + + def handle(self): + v = self.rfile.readline() + self.wfile.write(v) + self.wfile.flush() + + +class ClientCipherListHandler(tcp.BaseHandler): + sni = None + + def handle(self): + self.wfile.write("%s" % self.connection.get_cipher_list()) + self.wfile.flush() + + +class HangHandler(tcp.BaseHandler): + + def handle(self): + # Hang as long as the client connection is alive + while True: + try: + self.connection.setblocking(0) + ret = self.connection.recv(1) + # Client connection is dead... + if ret == "" or ret == b"": + return + except socket.error: + pass + except SSL.WantReadError: + pass + except Exception: + return + time.sleep(0.1) + + +class ALPNHandler(tcp.BaseHandler): + sni = None + + def handle(self): + alp = self.get_alpn_proto_negotiated() + if alp: + self.wfile.write(alp) + else: + self.wfile.write(b"NONE") + self.wfile.flush() + + +class TestServer(tservers.ServerTestBase): + handler = EchoHandler + + def test_echo(self): + testval = b"echo!\n" + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.wfile.write(testval) + c.wfile.flush() + assert c.rfile.readline() == testval + + def test_thread_start_error(self): + with mock.patch.object(threading.Thread, "start", side_effect=threading.ThreadError("nonewthread")) as m: + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + assert not c.rfile.read(1) + assert m.called + assert "nonewthread" in self.q.get_nowait() + self.test_echo() + + +class TestServerBind(tservers.ServerTestBase): + + class handler(tcp.BaseHandler): + + def handle(self): + self.wfile.write(str(self.connection.getpeername()).encode()) + self.wfile.flush() + + def test_bind(self): + """ Test to bind to a given random port. Try again if the random port turned out to be blocked. """ + for i in range(20): + random_port = random.randrange(1024, 65535) + try: + c = tcp.TCPClient( + ("127.0.0.1", self.port), source_address=( + "127.0.0.1", random_port)) + with c.connect(): + assert c.rfile.readline() == str(("127.0.0.1", random_port)).encode() + return + except exceptions.TcpException: # port probably already in use + pass + + +class TestServerIPv6(tservers.ServerTestBase): + handler = EchoHandler + addr = tcp.Address(("localhost", 0), use_ipv6=True) + + def test_echo(self): + testval = b"echo!\n" + c = tcp.TCPClient(tcp.Address(("::1", self.port), use_ipv6=True)) + with c.connect(): + c.wfile.write(testval) + c.wfile.flush() + assert c.rfile.readline() == testval + + +class TestEcho(tservers.ServerTestBase): + handler = EchoHandler + + def test_echo(self): + testval = b"echo!\n" + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.wfile.write(testval) + c.wfile.flush() + assert c.rfile.readline() == testval + + +class HardDisconnectHandler(tcp.BaseHandler): + + def handle(self): + self.connection.close() + + +class TestFinishFail(tservers.ServerTestBase): + + """ + This tests a difficult-to-trigger exception in the .finish() method of + the handler. + """ + handler = EchoHandler + + def test_disconnect_in_finish(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.wfile.write(b"foo\n") + c.wfile.flush = mock.Mock(side_effect=exceptions.TcpDisconnect) + c.finish() + + +class TestServerSSL(tservers.ServerTestBase): + handler = EchoHandler + ssl = dict( + cipher_list="AES256-SHA", + chain_file=tutils.test_data.path("data/server.crt") + ) + + def test_echo(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl(sni="foo.com", options=SSL.OP_ALL) + testval = b"echo!\n" + c.wfile.write(testval) + c.wfile.flush() + assert c.rfile.readline() == testval + + def test_get_current_cipher(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + assert not c.get_current_cipher() + c.convert_to_ssl(sni="foo.com") + ret = c.get_current_cipher() + assert ret + assert "AES" in ret[0] + + +class TestSSLv3Only(tservers.ServerTestBase): + handler = EchoHandler + ssl = dict( + request_client_cert=False, + v3_only=True + ) + + def test_failure(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + tutils.raises(exceptions.TlsException, c.convert_to_ssl, sni="foo.com") + + +class TestSSLUpstreamCertVerificationWBadServerCert(tservers.ServerTestBase): + handler = EchoHandler + + ssl = dict( + cert=tutils.test_data.path("data/verificationcerts/self-signed.crt"), + key=tutils.test_data.path("data/verificationcerts/self-signed.key") + ) + + def test_mode_default_should_pass(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl() + + # Verification errors should be saved even if connection isn't aborted + # aborted + assert c.ssl_verification_error + + testval = b"echo!\n" + c.wfile.write(testval) + c.wfile.flush() + assert c.rfile.readline() == testval + + def test_mode_none_should_pass(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl(verify_options=SSL.VERIFY_NONE) + + # Verification errors should be saved even if connection isn't aborted + assert c.ssl_verification_error + + testval = b"echo!\n" + c.wfile.write(testval) + c.wfile.flush() + assert c.rfile.readline() == testval + + def test_mode_strict_should_fail(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + with tutils.raises(exceptions.InvalidCertificateException): + c.convert_to_ssl( + sni="example.mitmproxy.org", + verify_options=SSL.VERIFY_PEER, + ca_pemfile=tutils.test_data.path("data/verificationcerts/trusted-root.crt") + ) + + assert c.ssl_verification_error + + # Unknown issuing certificate authority for first certificate + assert "errno: 18" in str(c.ssl_verification_error) + assert "depth: 0" in str(c.ssl_verification_error) + + +class TestSSLUpstreamCertVerificationWBadHostname(tservers.ServerTestBase): + handler = EchoHandler + + ssl = dict( + cert=tutils.test_data.path("data/verificationcerts/trusted-leaf.crt"), + key=tutils.test_data.path("data/verificationcerts/trusted-leaf.key") + ) + + def test_should_fail_without_sni(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + with tutils.raises(exceptions.TlsException): + c.convert_to_ssl( + verify_options=SSL.VERIFY_PEER, + ca_pemfile=tutils.test_data.path("data/verificationcerts/trusted-root.crt") + ) + + def test_should_fail(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + with tutils.raises(exceptions.InvalidCertificateException): + c.convert_to_ssl( + sni="mitmproxy.org", + verify_options=SSL.VERIFY_PEER, + ca_pemfile=tutils.test_data.path("data/verificationcerts/trusted-root.crt") + ) + assert c.ssl_verification_error + + +class TestSSLUpstreamCertVerificationWValidCertChain(tservers.ServerTestBase): + handler = EchoHandler + + ssl = dict( + cert=tutils.test_data.path("data/verificationcerts/trusted-leaf.crt"), + key=tutils.test_data.path("data/verificationcerts/trusted-leaf.key") + ) + + def test_mode_strict_w_pemfile_should_pass(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl( + sni="example.mitmproxy.org", + verify_options=SSL.VERIFY_PEER, + ca_pemfile=tutils.test_data.path("data/verificationcerts/trusted-root.crt") + ) + + assert c.ssl_verification_error is None + + testval = b"echo!\n" + c.wfile.write(testval) + c.wfile.flush() + assert c.rfile.readline() == testval + + def test_mode_strict_w_cadir_should_pass(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl( + sni="example.mitmproxy.org", + verify_options=SSL.VERIFY_PEER, + ca_path=tutils.test_data.path("data/verificationcerts/") + ) + + assert c.ssl_verification_error is None + + testval = b"echo!\n" + c.wfile.write(testval) + c.wfile.flush() + assert c.rfile.readline() == testval + + +class TestSSLClientCert(tservers.ServerTestBase): + + class handler(tcp.BaseHandler): + sni = None + + def handle_sni(self, connection): + self.sni = connection.get_servername() + + def handle(self): + self.wfile.write(b"%d\n" % self.clientcert.serial) + self.wfile.flush() + + ssl = dict( + request_client_cert=True, + v3_only=False + ) + + def test_clientcert(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl( + cert=tutils.test_data.path("data/clientcert/client.pem")) + assert c.rfile.readline().strip() == b"1" + + def test_clientcert_err(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + tutils.raises( + exceptions.TlsException, + c.convert_to_ssl, + cert=tutils.test_data.path("data/clientcert/make") + ) + + +class TestSNI(tservers.ServerTestBase): + + class handler(tcp.BaseHandler): + sni = None + + def handle_sni(self, connection): + self.sni = connection.get_servername() + + def handle(self): + self.wfile.write(self.sni) + self.wfile.flush() + + ssl = True + + def test_echo(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl(sni="foo.com") + assert c.sni == "foo.com" + assert c.rfile.readline() == b"foo.com" + + +class TestServerCipherList(tservers.ServerTestBase): + handler = ClientCipherListHandler + ssl = dict( + cipher_list='RC4-SHA' + ) + + def test_echo(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl(sni="foo.com") + assert c.rfile.readline() == b"['RC4-SHA']" + + +class TestServerCurrentCipher(tservers.ServerTestBase): + + class handler(tcp.BaseHandler): + sni = None + + def handle(self): + self.wfile.write(str(self.get_current_cipher()).encode()) + self.wfile.flush() + + ssl = dict( + cipher_list='RC4-SHA' + ) + + def test_echo(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl(sni="foo.com") + assert b"RC4-SHA" in c.rfile.readline() + + +class TestServerCipherListError(tservers.ServerTestBase): + handler = ClientCipherListHandler + ssl = dict( + cipher_list='bogus' + ) + + def test_echo(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + tutils.raises("handshake error", c.convert_to_ssl, sni="foo.com") + + +class TestClientCipherListError(tservers.ServerTestBase): + handler = ClientCipherListHandler + ssl = dict( + cipher_list='RC4-SHA' + ) + + def test_echo(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + tutils.raises( + "cipher specification", + c.convert_to_ssl, + sni="foo.com", + cipher_list="bogus" + ) + + +class TestSSLDisconnect(tservers.ServerTestBase): + + class handler(tcp.BaseHandler): + + def handle(self): + self.finish() + + ssl = True + + def test_echo(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl() + # Excercise SSL.ZeroReturnError + c.rfile.read(10) + c.close() + tutils.raises(exceptions.TcpDisconnect, c.wfile.write, b"foo") + tutils.raises(queue.Empty, self.q.get_nowait) + + +class TestSSLHardDisconnect(tservers.ServerTestBase): + handler = HardDisconnectHandler + ssl = True + + def test_echo(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl() + # Exercise SSL.SysCallError + c.rfile.read(10) + c.close() + tutils.raises(exceptions.TcpDisconnect, c.wfile.write, b"foo") + + +class TestDisconnect(tservers.ServerTestBase): + + def test_echo(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.rfile.read(10) + c.wfile.write(b"foo") + c.close() + c.close() + + +class TestServerTimeOut(tservers.ServerTestBase): + + class handler(tcp.BaseHandler): + + def handle(self): + self.timeout = False + self.settimeout(0.01) + try: + self.rfile.read(10) + except exceptions.TcpTimeout: + self.timeout = True + + def test_timeout(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + time.sleep(0.3) + assert self.last_handler.timeout + + +class TestTimeOut(tservers.ServerTestBase): + handler = HangHandler + + def test_timeout(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.settimeout(0.1) + assert c.gettimeout() == 0.1 + tutils.raises(exceptions.TcpTimeout, c.rfile.read, 10) + + +class TestALPNClient(tservers.ServerTestBase): + handler = ALPNHandler + ssl = dict( + alpn_select=b"bar" + ) + + if tcp.HAS_ALPN: + def test_alpn(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl(alpn_protos=[b"foo", b"bar", b"fasel"]) + assert c.get_alpn_proto_negotiated() == b"bar" + assert c.rfile.readline().strip() == b"bar" + + def test_no_alpn(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl() + assert c.get_alpn_proto_negotiated() == b"" + assert c.rfile.readline().strip() == b"NONE" + + else: + def test_none_alpn(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl(alpn_protos=[b"foo", b"bar", b"fasel"]) + assert c.get_alpn_proto_negotiated() == b"" + assert c.rfile.readline() == b"NONE" + + +class TestNoSSLNoALPNClient(tservers.ServerTestBase): + handler = ALPNHandler + + def test_no_ssl_no_alpn(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + assert c.get_alpn_proto_negotiated() == b"" + assert c.rfile.readline().strip() == b"NONE" + + +class TestSSLTimeOut(tservers.ServerTestBase): + handler = HangHandler + ssl = True + + def test_timeout_client(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl() + c.settimeout(0.1) + tutils.raises(exceptions.TcpTimeout, c.rfile.read, 10) + + +class TestDHParams(tservers.ServerTestBase): + handler = HangHandler + ssl = dict( + dhparams=certs.CertStore.load_dhparam( + tutils.test_data.path("data/dhparam.pem"), + ), + cipher_list="DHE-RSA-AES256-SHA" + ) + + def test_dhparams(self): + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl() + ret = c.get_current_cipher() + assert ret[0] == "DHE-RSA-AES256-SHA" + + def test_create_dhparams(self): + with tutils.tmpdir() as d: + filename = os.path.join(d, "dhparam.pem") + certs.CertStore.load_dhparam(filename) + assert os.path.exists(filename) + + +class TestTCPClient: + + def test_conerr(self): + c = tcp.TCPClient(("127.0.0.1", 0)) + tutils.raises(exceptions.TcpException, c.connect) + + +class TestFileLike: + + def test_blocksize(self): + s = BytesIO(b"1234567890abcdefghijklmnopqrstuvwxyz") + s = tcp.Reader(s) + s.BLOCKSIZE = 2 + assert s.read(1) == b"1" + assert s.read(2) == b"23" + assert s.read(3) == b"456" + assert s.read(4) == b"7890" + d = s.read(-1) + assert d.startswith(b"abc") and d.endswith(b"xyz") + + def test_wrap(self): + s = BytesIO(b"foobar\nfoobar") + s.flush() + s = tcp.Reader(s) + assert s.readline() == b"foobar\n" + assert s.readline() == b"foobar" + # Test __getattr__ + assert s.isatty + + def test_limit(self): + s = BytesIO(b"foobar\nfoobar") + s = tcp.Reader(s) + assert s.readline(3) == b"foo" + + def test_limitless(self): + s = BytesIO(b"f" * (50 * 1024)) + s = tcp.Reader(s) + ret = s.read(-1) + assert len(ret) == 50 * 1024 + + def test_readlog(self): + s = BytesIO(b"foobar\nfoobar") + s = tcp.Reader(s) + assert not s.is_logging() + s.start_log() + assert s.is_logging() + s.readline() + assert s.get_log() == b"foobar\n" + s.read(1) + assert s.get_log() == b"foobar\nf" + s.start_log() + assert s.get_log() == b"" + s.read(1) + assert s.get_log() == b"o" + s.stop_log() + tutils.raises(ValueError, s.get_log) + + def test_writelog(self): + s = BytesIO() + s = tcp.Writer(s) + s.start_log() + assert s.is_logging() + s.write(b"x") + assert s.get_log() == b"x" + s.write(b"x") + assert s.get_log() == b"xx" + + def test_writer_flush_error(self): + s = BytesIO() + s = tcp.Writer(s) + o = mock.MagicMock() + o.flush = mock.MagicMock(side_effect=socket.error) + s.o = o + tutils.raises(exceptions.TcpDisconnect, s.flush) + + def test_reader_read_error(self): + s = BytesIO(b"foobar\nfoobar") + s = tcp.Reader(s) + o = mock.MagicMock() + o.read = mock.MagicMock(side_effect=socket.error) + s.o = o + tutils.raises(exceptions.TcpDisconnect, s.read, 10) + + def test_reset_timestamps(self): + s = BytesIO(b"foobar\nfoobar") + s = tcp.Reader(s) + s.first_byte_timestamp = 500 + s.reset_timestamps() + assert not s.first_byte_timestamp + + def test_first_byte_timestamp_updated_on_read(self): + s = BytesIO(b"foobar\nfoobar") + s = tcp.Reader(s) + s.read(1) + assert s.first_byte_timestamp + expected = s.first_byte_timestamp + s.read(5) + assert s.first_byte_timestamp == expected + + def test_first_byte_timestamp_updated_on_readline(self): + s = BytesIO(b"foobar\nfoobar\nfoobar") + s = tcp.Reader(s) + s.readline() + assert s.first_byte_timestamp + expected = s.first_byte_timestamp + s.readline() + assert s.first_byte_timestamp == expected + + def test_read_ssl_error(self): + s = mock.MagicMock() + s.read = mock.MagicMock(side_effect=SSL.Error()) + s = tcp.Reader(s) + tutils.raises(exceptions.TlsException, s.read, 1) + + def test_read_syscall_ssl_error(self): + s = mock.MagicMock() + s.read = mock.MagicMock(side_effect=SSL.SysCallError()) + s = tcp.Reader(s) + tutils.raises(exceptions.TlsException, s.read, 1) + + def test_reader_readline_disconnect(self): + o = mock.MagicMock() + o.read = mock.MagicMock(side_effect=socket.error) + s = tcp.Reader(o) + tutils.raises(exceptions.TcpDisconnect, s.readline, 10) + + def test_reader_incomplete_error(self): + s = BytesIO(b"foobar") + s = tcp.Reader(s) + tutils.raises(exceptions.TcpReadIncomplete, s.safe_read, 10) + + +class TestPeek(tservers.ServerTestBase): + handler = EchoHandler + + def _connect(self, c): + return c.connect() + + def test_peek(self): + testval = b"peek!\n" + c = tcp.TCPClient(("127.0.0.1", self.port)) + with self._connect(c): + c.wfile.write(testval) + c.wfile.flush() + + assert c.rfile.peek(4) == b"peek" + assert c.rfile.peek(6) == b"peek!\n" + assert c.rfile.readline() == testval + + c.close() + with tutils.raises(exceptions.NetlibException): + if c.rfile.peek(1) == b"": + # Workaround for Python 2 on Unix: + # Peeking a closed connection does not raise an exception here. + raise exceptions.NetlibException() + + +class TestPeekSSL(TestPeek): + ssl = True + + def _connect(self, c): + with c.connect() as conn: + c.convert_to_ssl() + return conn.pop() + + +class TestAddress: + def test_simple(self): + a = tcp.Address(("localhost", 80), True) + assert a.use_ipv6 + b = tcp.Address(("foo.com", 80), True) + assert not a == b + c = tcp.Address(("localhost", 80), True) + assert a == c + assert not a != c + assert repr(a) == "localhost:80" + + +class TestSSLKeyLogger(tservers.ServerTestBase): + handler = EchoHandler + ssl = dict( + cipher_list="AES256-SHA" + ) + + def test_log(self): + testval = b"echo!\n" + _logfun = tcp.log_ssl_key + + with tutils.tmpdir() as d: + logfile = os.path.join(d, "foo", "bar", "logfile") + tcp.log_ssl_key = tcp.SSLKeyLogger(logfile) + + c = tcp.TCPClient(("127.0.0.1", self.port)) + with c.connect(): + c.convert_to_ssl() + c.wfile.write(testval) + c.wfile.flush() + assert c.rfile.readline() == testval + c.finish() + + tcp.log_ssl_key.close() + with open(logfile, "rb") as f: + assert f.read().count(b"CLIENT_RANDOM") == 2 + + tcp.log_ssl_key = _logfun + + def test_create_logfun(self): + assert isinstance( + tcp.SSLKeyLogger.create_logfun("test"), + tcp.SSLKeyLogger) + assert not tcp.SSLKeyLogger.create_logfun(False) diff --git a/test/mitmproxy/net/test_wsgi.py b/test/mitmproxy/net/test_wsgi.py new file mode 100644 index 00000000..b4d6b53f --- /dev/null +++ b/test/mitmproxy/net/test_wsgi.py @@ -0,0 +1,106 @@ +from io import BytesIO +import sys +from mitmproxy.net import wsgi +from mitmproxy.net.http import Headers + + +def tflow(): + headers = Headers(test=b"value") + req = wsgi.Request("http", "GET", "/", "HTTP/1.1", headers, "") + return wsgi.Flow(("127.0.0.1", 8888), req) + + +class ExampleApp: + + def __init__(self): + self.called = False + + def __call__(self, environ, start_response): + self.called = True + status = '200 OK' + response_headers = [('Content-type', 'text/plain')] + start_response(status, response_headers) + return [b'Hello', b' world!\n'] + + +class TestWSGI: + + def test_make_environ(self): + w = wsgi.WSGIAdaptor(None, "foo", 80, "version") + tf = tflow() + assert w.make_environ(tf, None) + + tf.request.path = "/foo?bar=voing" + r = w.make_environ(tf, None) + assert r["QUERY_STRING"] == "bar=voing" + + def test_serve(self): + ta = ExampleApp() + w = wsgi.WSGIAdaptor(ta, "foo", 80, "version") + f = tflow() + f.request.host = "foo" + f.request.port = 80 + + wfile = BytesIO() + err = w.serve(f, wfile) + assert ta.called + assert not err + + val = wfile.getvalue() + assert b"Hello world" in val + assert b"Server:" in val + + def _serve(self, app): + w = wsgi.WSGIAdaptor(app, "foo", 80, "version") + f = tflow() + f.request.host = "foo" + f.request.port = 80 + wfile = BytesIO() + w.serve(f, wfile) + return wfile.getvalue() + + def test_serve_empty_body(self): + def app(environ, start_response): + status = '200 OK' + response_headers = [('Foo', 'bar')] + start_response(status, response_headers) + return [] + assert self._serve(app) + + def test_serve_double_start(self): + def app(environ, start_response): + try: + raise ValueError("foo") + except: + sys.exc_info() + status = '200 OK' + response_headers = [('Content-type', 'text/plain')] + start_response(status, response_headers) + start_response(status, response_headers) + assert b"Internal Server Error" in self._serve(app) + + def test_serve_single_err(self): + def app(environ, start_response): + try: + raise ValueError("foo") + except: + ei = sys.exc_info() + status = '200 OK' + response_headers = [('Content-type', 'text/plain')] + start_response(status, response_headers, ei) + yield b"" + assert b"Internal Server Error" in self._serve(app) + + def test_serve_double_err(self): + def app(environ, start_response): + try: + raise ValueError("foo") + except: + ei = sys.exc_info() + status = '200 OK' + response_headers = [('Content-type', 'text/plain')] + start_response(status, response_headers) + yield b"aaa" + start_response(status, response_headers, ei) + yield b"bbb" + assert b"Internal Server Error" in self._serve(app) diff --git a/test/mitmproxy/net/tools/getcertnames b/test/mitmproxy/net/tools/getcertnames new file mode 100644 index 00000000..d64e5ff5 --- /dev/null +++ b/test/mitmproxy/net/tools/getcertnames @@ -0,0 +1,27 @@ +#!/usr/bin/env python +import sys +sys.path.insert(0, "../../") +from mitmproxy.net import tcp + + +def get_remote_cert(host, port, sni): + c = tcp.TCPClient((host, port)) + c.connect() + c.convert_to_ssl(sni=sni) + return c.cert + +if len(sys.argv) > 2: + port = int(sys.argv[2]) +else: + port = 443 +if len(sys.argv) > 3: + sni = sys.argv[3] +else: + sni = None + +cert = get_remote_cert(sys.argv[1], port, sni) +print("CN:", cert.cn) +if cert.altnames: + print("SANs:") + for i in cert.altnames: + print("\t", i) diff --git a/test/mitmproxy/net/tservers.py b/test/mitmproxy/net/tservers.py new file mode 100644 index 00000000..ba10ef5e --- /dev/null +++ b/test/mitmproxy/net/tservers.py @@ -0,0 +1,113 @@ +import threading +import queue +import io +import OpenSSL + +from mitmproxy.net import tcp +from mitmproxy.test import tutils + + +class _ServerThread(threading.Thread): + + def __init__(self, server): + self.server = server + threading.Thread.__init__(self) + + def run(self): + self.server.serve_forever() + + def shutdown(self): + self.server.shutdown() + + +class _TServer(tcp.TCPServer): + + def __init__(self, ssl, q, handler_klass, addr, **kwargs): + """ + ssl: A dictionary of SSL parameters: + + cert, key, request_client_cert, cipher_list, + dhparams, v3_only + """ + tcp.TCPServer.__init__(self, addr) + + if ssl is True: + self.ssl = dict() + elif isinstance(ssl, dict): + self.ssl = ssl + else: + self.ssl = None + + self.q = q + self.handler_klass = handler_klass + if self.handler_klass is not None: + self.handler_klass.kwargs = kwargs + self.last_handler = None + + def handle_client_connection(self, request, client_address): + h = self.handler_klass(request, client_address, self) + self.last_handler = h + if self.ssl is not None: + cert = self.ssl.get( + "cert", + tutils.test_data.path("data/server.crt")) + raw_key = self.ssl.get( + "key", + tutils.test_data.path("data/server.key")) + key = OpenSSL.crypto.load_privatekey( + OpenSSL.crypto.FILETYPE_PEM, + open(raw_key, "rb").read()) + if self.ssl.get("v3_only", False): + method = OpenSSL.SSL.SSLv3_METHOD + options = OpenSSL.SSL.OP_NO_SSLv2 | OpenSSL.SSL.OP_NO_TLSv1 + else: + method = OpenSSL.SSL.SSLv23_METHOD + options = None + h.convert_to_ssl( + cert, key, + method=method, + options=options, + handle_sni=getattr(h, "handle_sni", None), + request_client_cert=self.ssl.get("request_client_cert", None), + cipher_list=self.ssl.get("cipher_list", None), + dhparams=self.ssl.get("dhparams", None), + chain_file=self.ssl.get("chain_file", None), + alpn_select=self.ssl.get("alpn_select", None) + ) + h.handle() + h.finish() + + def handle_error(self, connection, client_address, fp=None): + s = io.StringIO() + tcp.TCPServer.handle_error(self, connection, client_address, s) + self.q.put(s.getvalue()) + + +class ServerTestBase: + ssl = None + handler = None + addr = ("localhost", 0) + + @classmethod + def setup_class(cls, **kwargs): + cls.q = queue.Queue() + s = cls.makeserver(**kwargs) + cls.port = s.address.port + cls.server = _ServerThread(s) + cls.server.start() + + @classmethod + def makeserver(cls, **kwargs): + ssl = kwargs.pop('ssl', cls.ssl) + return _TServer(ssl, cls.q, cls.handler, cls.addr, **kwargs) + + @classmethod + def teardown_class(cls): + cls.server.shutdown() + + def teardown(self): + self.server.server.wait_for_silence() + + @property + def last_handler(self): + return self.server.server.last_handler diff --git a/test/mitmproxy/net/websockets/__init__.py b/test/mitmproxy/net/websockets/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/test/mitmproxy/net/websockets/test_frame.py b/test/mitmproxy/net/websockets/test_frame.py new file mode 100644 index 00000000..198be3d5 --- /dev/null +++ b/test/mitmproxy/net/websockets/test_frame.py @@ -0,0 +1,164 @@ +import os +import codecs +import pytest + +from mitmproxy.net import websockets +from mitmproxy.test import tutils + + +class TestFrameHeader: + + @pytest.mark.parametrize("input,expected", [ + (0, '0100'), + (125, '017D'), + (126, '017E007E'), + (127, '017E007F'), + (142, '017E008E'), + (65534, '017EFFFE'), + (65535, '017EFFFF'), + (65536, '017F0000000000010000'), + (8589934591, '017F00000001FFFFFFFF'), + (2 ** 64 - 1, '017FFFFFFFFFFFFFFFFF'), + ]) + def test_serialization_length(self, input, expected): + h = websockets.FrameHeader( + opcode=websockets.OPCODE.TEXT, + payload_length=input, + ) + assert bytes(h) == codecs.decode(expected, 'hex') + + def test_serialization_too_large(self): + h = websockets.FrameHeader( + payload_length=2 ** 64 + 1, + ) + with pytest.raises(ValueError): + bytes(h) + + @pytest.mark.parametrize("input,expected", [ + ('0100', 0), + ('017D', 125), + ('017E007E', 126), + ('017E007F', 127), + ('017E008E', 142), + ('017EFFFE', 65534), + ('017EFFFF', 65535), + ('017F0000000000010000', 65536), + ('017F00000001FFFFFFFF', 8589934591), + ('017FFFFFFFFFFFFFFFFF', 2 ** 64 - 1), + ]) + def test_deserialization_length(self, input, expected): + h = websockets.FrameHeader.from_file(tutils.treader(codecs.decode(input, 'hex'))) + assert h.payload_length == expected + + @pytest.mark.parametrize("input,expected", [ + ('0100', (False, None)), + ('018000000000', (True, '00000000')), + ('018012345678', (True, '12345678')), + ]) + def test_deserialization_masking(self, input, expected): + h = websockets.FrameHeader.from_file(tutils.treader(codecs.decode(input, 'hex'))) + assert h.mask == expected[0] + if h.mask: + assert h.masking_key == codecs.decode(expected[1], 'hex') + + def test_equality(self): + h = websockets.FrameHeader(mask=True, masking_key=b'1234') + h2 = websockets.FrameHeader(mask=True, masking_key=b'1234') + assert h == h2 + + h = websockets.FrameHeader(fin=True) + h2 = websockets.FrameHeader(fin=False) + assert h != h2 + + assert h != 'foobar' + + def test_roundtrip(self): + def round(*args, **kwargs): + h = websockets.FrameHeader(*args, **kwargs) + h2 = websockets.FrameHeader.from_file(tutils.treader(bytes(h))) + assert h == h2 + + round() + round(fin=True) + round(rsv1=True) + round(rsv2=True) + round(rsv3=True) + round(payload_length=1) + round(payload_length=100) + round(payload_length=1000) + round(payload_length=10000) + round(opcode=websockets.OPCODE.PING) + round(masking_key=b"test") + + def test_human_readable(self): + f = websockets.FrameHeader( + masking_key=b"test", + fin=True, + payload_length=10 + ) + assert repr(f) + + f = websockets.FrameHeader() + assert repr(f) + + def test_funky(self): + f = websockets.FrameHeader(masking_key=b"test", mask=False) + raw = bytes(f) + f2 = websockets.FrameHeader.from_file(tutils.treader(raw)) + assert not f2.mask + + def test_violations(self): + tutils.raises("opcode", websockets.FrameHeader, opcode=17) + tutils.raises("masking key", websockets.FrameHeader, masking_key=b"x") + + def test_automask(self): + f = websockets.FrameHeader(mask=True) + assert f.masking_key + + f = websockets.FrameHeader(masking_key=b"foob") + assert f.mask + + f = websockets.FrameHeader(masking_key=b"foob", mask=0) + assert not f.mask + assert f.masking_key + + +class TestFrame: + def test_equality(self): + f = websockets.Frame(payload=b'1234') + f2 = websockets.Frame(payload=b'1234') + assert f == f2 + + assert f != b'1234' + + def test_roundtrip(self): + def round(*args, **kwargs): + f = websockets.Frame(*args, **kwargs) + raw = bytes(f) + f2 = websockets.Frame.from_file(tutils.treader(raw)) + assert f == f2 + round(b"test") + round(b"test", fin=1) + round(b"test", rsv1=1) + round(b"test", opcode=websockets.OPCODE.PING) + round(b"test", masking_key=b"test") + + def test_human_readable(self): + f = websockets.Frame() + assert repr(f) + + f = websockets.Frame(b"foobar") + assert "foobar" in repr(f) + + @pytest.mark.parametrize("masked", [True, False]) + @pytest.mark.parametrize("length", [100, 50000, 150000]) + def test_serialization_bijection(self, masked, length): + frame = websockets.Frame( + os.urandom(length), + fin=True, + opcode=websockets.OPCODE.TEXT, + mask=int(masked), + masking_key=(os.urandom(4) if masked else None) + ) + serialized = bytes(frame) + assert frame == websockets.Frame.from_bytes(serialized) diff --git a/test/mitmproxy/net/websockets/test_masker.py b/test/mitmproxy/net/websockets/test_masker.py new file mode 100644 index 00000000..88360c3b --- /dev/null +++ b/test/mitmproxy/net/websockets/test_masker.py @@ -0,0 +1,23 @@ +import codecs +import pytest + +from mitmproxy.net import websockets + + +class TestMasker: + + @pytest.mark.parametrize("input,expected", [ + ([b"a"], '00'), + ([b"four"], '070d1616'), + ([b"fourf"], '070d161607'), + ([b"fourfive"], '070d1616070b1501'), + ([b"a", b"aasdfasdfa", b"asdf"], '000302170504021705040205120605'), + ([b"a" * 50, b"aasdfasdfa", b"asdf"], '00030205000302050003020500030205000302050003020500030205000302050003020500030205000302050003020500030205120605051206050500110702'), # noqa + ]) + def test_masker(self, input, expected): + m = websockets.Masker(b"abcd") + data = b"".join([m(t) for t in input]) + assert data == codecs.decode(expected, 'hex') + + data = websockets.Masker(b"abcd")(data) + assert data == b"".join(input) diff --git a/test/mitmproxy/net/websockets/test_utils.py b/test/mitmproxy/net/websockets/test_utils.py new file mode 100644 index 00000000..6704d39d --- /dev/null +++ b/test/mitmproxy/net/websockets/test_utils.py @@ -0,0 +1,105 @@ +import pytest + +from mitmproxy.net import http +from mitmproxy.net import websockets + + +class TestUtils: + + def test_client_handshake_headers(self): + h = websockets.client_handshake_headers(version='42') + assert h['sec-websocket-version'] == '42' + + h = websockets.client_handshake_headers(key='some-key') + assert h['sec-websocket-key'] == 'some-key' + + h = websockets.client_handshake_headers(protocol='foobar') + assert h['sec-websocket-protocol'] == 'foobar' + + h = websockets.client_handshake_headers(extensions='foo; bar') + assert h['sec-websocket-extensions'] == 'foo; bar' + + def test_server_handshake_headers(self): + h = websockets.server_handshake_headers('some-key') + assert h['sec-websocket-accept'] == '8iILEZtcVdtFD7MDlPKip9ec9nw=' + assert 'sec-websocket-protocol' not in h + assert 'sec-websocket-extensions' not in h + + h = websockets.server_handshake_headers('some-key', 'foobar', 'foo; bar') + assert h['sec-websocket-accept'] == '8iILEZtcVdtFD7MDlPKip9ec9nw=' + assert h['sec-websocket-protocol'] == 'foobar' + assert h['sec-websocket-extensions'] == 'foo; bar' + + @pytest.mark.parametrize("input,expected", [ + ([(b'connection', b'upgrade'), (b'upgrade', b'websocket'), (b'sec-websocket-key', b'foobar')], True), + ([(b'connection', b'upgrade'), (b'upgrade', b'websocket'), (b'sec-websocket-accept', b'foobar')], True), + ([(b'Connection', b'UpgRaDe'), (b'Upgrade', b'WebSocKeT'), (b'Sec-WebSockeT-KeY', b'foobar')], True), + ([(b'Connection', b'UpgRaDe'), (b'Upgrade', b'WebSocKeT'), (b'Sec-WebSockeT-AccePt', b'foobar')], True), + ([(b'connection', b'foo'), (b'upgrade', b'bar'), (b'sec-websocket-key', b'foobar')], False), + ([(b'connection', b'upgrade'), (b'upgrade', b'websocket')], False), + ([(b'connection', b'upgrade'), (b'sec-websocket-key', b'foobar')], False), + ([(b'upgrade', b'websocket'), (b'sec-websocket-key', b'foobar')], False), + ([], False), + ]) + def test_check_handshake(self, input, expected): + h = http.Headers(input) + assert websockets.check_handshake(h) == expected + + @pytest.mark.parametrize("input,expected", [ + ([(b'sec-websocket-version', b'13')], True), + ([(b'Sec-WebSockeT-VerSion', b'13')], True), + ([(b'sec-websocket-version', b'9')], False), + ([(b'sec-websocket-version', b'42')], False), + ([(b'sec-websocket-version', b'')], False), + ([], False), + ]) + def test_check_client_version(self, input, expected): + h = http.Headers(input) + assert websockets.check_client_version(h) == expected + + @pytest.mark.parametrize("input,expected", [ + ('foobar', b'AzhRPA4TNwR6I/riJheN0TfR7+I='), + (b'foobar', b'AzhRPA4TNwR6I/riJheN0TfR7+I='), + ]) + def test_create_server_nonce(self, input, expected): + assert websockets.create_server_nonce(input) == expected + + @pytest.mark.parametrize("input,expected", [ + ([(b'sec-websocket-extensions', b'foo; bar')], 'foo; bar'), + ([(b'Sec-WebSockeT-ExteNsionS', b'foo; bar')], 'foo; bar'), + ([(b'sec-websocket-extensions', b'')], ''), + ([], None), + ]) + def test_get_extensions(self, input, expected): + h = http.Headers(input) + assert websockets.get_extensions(h) == expected + + @pytest.mark.parametrize("input,expected", [ + ([(b'sec-websocket-protocol', b'foobar')], 'foobar'), + ([(b'Sec-WebSockeT-ProTocoL', b'foobar')], 'foobar'), + ([(b'sec-websocket-protocol', b'')], ''), + ([], None), + ]) + def test_get_protocol(self, input, expected): + h = http.Headers(input) + assert websockets.get_protocol(h) == expected + + @pytest.mark.parametrize("input,expected", [ + ([(b'sec-websocket-key', b'foobar')], 'foobar'), + ([(b'Sec-WebSockeT-KeY', b'foobar')], 'foobar'), + ([(b'sec-websocket-key', b'')], ''), + ([], None), + ]) + def test_get_client_key(self, input, expected): + h = http.Headers(input) + assert websockets.get_client_key(h) == expected + + @pytest.mark.parametrize("input,expected", [ + ([(b'sec-websocket-accept', b'foobar')], 'foobar'), + ([(b'Sec-WebSockeT-AccepT', b'foobar')], 'foobar'), + ([(b'sec-websocket-accept', b'')], ''), + ([], None), + ]) + def test_get_server_accept(self, input, expected): + h = http.Headers(input) + assert websockets.get_server_accept(h) == expected diff --git a/test/mitmproxy/protocol/test_http1.py b/test/mitmproxy/protocol/test_http1.py index d18ff411..43c903f3 100644 --- a/test/mitmproxy/protocol/test_http1.py +++ b/test/mitmproxy/protocol/test_http1.py @@ -1,5 +1,5 @@ -from netlib.http import http1 -from netlib.tcp import TCPClient +from mitmproxy.net.http import http1 +from mitmproxy.net.tcp import TCPClient from mitmproxy.test.tutils import treq from .. import tutils, tservers diff --git a/test/mitmproxy/protocol/test_http2.py b/test/mitmproxy/protocol/test_http2.py index b624489f..4629d109 100644 --- a/test/mitmproxy/protocol/test_http2.py +++ b/test/mitmproxy/protocol/test_http2.py @@ -11,10 +11,10 @@ import h2 from mitmproxy import options from mitmproxy.proxy.config import ProxyConfig -import netlib -from ...netlib import tservers as netlib_tservers +import mitmproxy.net +from ...mitmproxy.net import tservers as net_tservers from mitmproxy import exceptions -from netlib.http import http1, http2 +from mitmproxy.net.http import http1, http2 from .. import tservers @@ -28,7 +28,7 @@ logging.getLogger("PIL.PngImagePlugin").setLevel(logging.WARNING) requires_alpn = pytest.mark.skipif( - not netlib.tcp.HAS_ALPN, + not mitmproxy.net.tcp.HAS_ALPN, reason='requires OpenSSL with ALPN support') @@ -37,10 +37,10 @@ requires_alpn = pytest.mark.skipif( # print(msg) -class _Http2ServerBase(netlib_tservers.ServerTestBase): +class _Http2ServerBase(net_tservers.ServerTestBase): ssl = dict(alpn_select=b'h2') - class handler(netlib.tcp.BaseHandler): + class handler(mitmproxy.net.tcp.BaseHandler): def handle(self): h2_conn = h2.connection.H2Connection(client_side=False, header_encoding=False) @@ -122,11 +122,11 @@ class _Http2TestBase: self.server.server.handle_server_event = self.handle_server_event def _setup_connection(self): - client = netlib.tcp.TCPClient(("127.0.0.1", self.proxy.port)) + client = mitmproxy.net.tcp.TCPClient(("127.0.0.1", self.proxy.port)) client.connect() # send CONNECT request - client.wfile.write(http1.assemble_request(netlib.http.Request( + client.wfile.write(http1.assemble_request(mitmproxy.net.http.Request( 'authority', b'CONNECT', b'', diff --git a/test/mitmproxy/protocol/test_websockets.py b/test/mitmproxy/protocol/test_websockets.py index f22e95d5..71cbb5f4 100644 --- a/test/mitmproxy/protocol/test_websockets.py +++ b/test/mitmproxy/protocol/test_websockets.py @@ -7,17 +7,17 @@ from mitmproxy import options from mitmproxy import exceptions from mitmproxy.proxy.config import ProxyConfig -import netlib -from netlib import http -from ...netlib import tservers as netlib_tservers +import mitmproxy.net +from mitmproxy.net import http +from ...mitmproxy.net import tservers as net_tservers from .. import tservers -from netlib import websockets +from mitmproxy.net import websockets -class _WebSocketsServerBase(netlib_tservers.ServerTestBase): +class _WebSocketsServerBase(net_tservers.ServerTestBase): - class handler(netlib.tcp.BaseHandler): + class handler(mitmproxy.net.tcp.BaseHandler): def handle(self): try: @@ -78,7 +78,7 @@ class _WebSocketsTestBase: self.server.server.handle_websockets = self.handle_websockets def _setup_connection(self): - client = netlib.tcp.TCPClient(("127.0.0.1", self.proxy.port)) + client = mitmproxy.net.tcp.TCPClient(("127.0.0.1", self.proxy.port)) client.connect() request = http.Request( diff --git a/test/mitmproxy/test_contentview.py b/test/mitmproxy/test_contentview.py index 2c5e1c85..b0e77ce1 100644 --- a/test/mitmproxy/test_contentview.py +++ b/test/mitmproxy/test_contentview.py @@ -1,7 +1,7 @@ import mock from mitmproxy.exceptions import ContentViewException -from netlib.http import Headers -from netlib.http import url +from mitmproxy.net.http import Headers +from mitmproxy.net.http import url from mitmproxy.types import multidict import mitmproxy.contentviews as cv diff --git a/test/mitmproxy/test_custom_contentview.py b/test/mitmproxy/test_custom_contentview.py index 889fb8b3..28f7fb33 100644 --- a/test/mitmproxy/test_custom_contentview.py +++ b/test/mitmproxy/test_custom_contentview.py @@ -1,5 +1,5 @@ import mitmproxy.contentviews as cv -from netlib.http import Headers +from mitmproxy.net.http import Headers def test_custom_views(): diff --git a/test/mitmproxy/test_examples.py b/test/mitmproxy/test_examples.py index 48193fe7..6972da0c 100644 --- a/test/mitmproxy/test_examples.py +++ b/test/mitmproxy/test_examples.py @@ -10,8 +10,8 @@ from mitmproxy.utils import data from mitmproxy import master from mitmproxy.test import tutils as netutils -from netlib.http import Headers -from netlib.http import cookies +from mitmproxy.net.http import Headers +from mitmproxy.net.http import cookies from . import tutils, mastertest diff --git a/test/mitmproxy/test_flow.py b/test/mitmproxy/test_flow.py index 14899be1..5b9f3835 100644 --- a/test/mitmproxy/test_flow.py +++ b/test/mitmproxy/test_flow.py @@ -2,7 +2,7 @@ import mock import io import mitmproxy.test.tutils -from netlib.http import Headers +from mitmproxy.net.http import Headers import mitmproxy.io from mitmproxy import flowfilter, options from mitmproxy.addons import state diff --git a/test/mitmproxy/test_flow_export.py b/test/mitmproxy/test_flow_export.py index 8ef2b7ee..df0ccb77 100644 --- a/test/mitmproxy/test_flow_export.py +++ b/test/mitmproxy/test_flow_export.py @@ -1,7 +1,7 @@ import re import mitmproxy.test.tutils -from netlib.http import Headers +from mitmproxy.net.http import Headers from mitmproxy import export # heh from . import tutils diff --git a/test/mitmproxy/test_proxy.py b/test/mitmproxy/test_proxy.py index c0d978d2..a9d9bb87 100644 --- a/test/mitmproxy/test_proxy.py +++ b/test/mitmproxy/test_proxy.py @@ -10,7 +10,7 @@ from mitmproxy.proxy.server import DummyServer, ProxyServer, ConnectionHandler from mitmproxy.proxy import config from mitmproxy import exceptions from pathod import test -from netlib.http import http1 +from mitmproxy.net.http import http1 from . import tutils diff --git a/test/mitmproxy/test_server.py b/test/mitmproxy/test_server.py index f7f13443..7dd738f4 100644 --- a/test/mitmproxy/test_server.py +++ b/test/mitmproxy/test_server.py @@ -8,14 +8,14 @@ from mitmproxy import options from mitmproxy.addons import script from mitmproxy import http from mitmproxy.proxy.config import HostMatcher, parse_server_spec -import netlib.http -from netlib import tcp -from netlib import socks +import mitmproxy.net.http +from mitmproxy.net import tcp +from mitmproxy.net import socks from mitmproxy import certs from mitmproxy import exceptions -from netlib.http import authentication -from netlib.http import http1 -from netlib.tcp import Address +from mitmproxy.net.http import authentication +from mitmproxy.net.http import http1 +from mitmproxy.net.tcp import Address from mitmproxy.test.tutils import raises from pathod import pathoc from pathod import pathod @@ -297,7 +297,7 @@ class TestHTTPAuth(tservers.HTTPProxyTest): h'%s'='%s' """ % ( self.server.port, - netlib.http.authentication.BasicProxyAuth.AUTH_HEADER, + mitmproxy.net.http.authentication.BasicProxyAuth.AUTH_HEADER, authentication.assemble_http_basic_auth("basic", "test", "test") )) assert ret.status_code == 202 @@ -314,7 +314,7 @@ class TestHTTPReverseAuth(tservers.ReverseProxyTest): '/p/202' h'%s'='%s' """ % ( - netlib.http.authentication.BasicWebsiteAuth.AUTH_HEADER, + mitmproxy.net.http.authentication.BasicWebsiteAuth.AUTH_HEADER, authentication.assemble_http_basic_auth("basic", "test", "test") )) assert ret.status_code == 202 @@ -438,7 +438,7 @@ class TestHTTPSUpstreamServerVerificationWBadCert(tservers.HTTPProxyTest): def test_verification_w_bad_cert(self): # We only test for a single invalid cert here. # Actual testing of different root-causes (invalid hostname, expired, ...) - # is done in netlib. + # is done in mitmproxy.net. self.config.options.ssl_insecure = False r = self._request() assert r.status_code == 502 diff --git a/test/netlib/__init__.py b/test/netlib/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/test/netlib/data/clientcert/.gitignore b/test/netlib/data/clientcert/.gitignore deleted file mode 100644 index 07bc53d2..00000000 --- a/test/netlib/data/clientcert/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -client.crt -client.key -client.req diff --git a/test/netlib/data/clientcert/client.cnf b/test/netlib/data/clientcert/client.cnf deleted file mode 100644 index 5046a944..00000000 --- a/test/netlib/data/clientcert/client.cnf +++ /dev/null @@ -1,5 +0,0 @@ -[ ssl_client ] -basicConstraints = CA:FALSE -nsCertType = client -keyUsage = digitalSignature, keyEncipherment -extendedKeyUsage = clientAuth diff --git a/test/netlib/data/clientcert/client.pem b/test/netlib/data/clientcert/client.pem deleted file mode 100644 index 4927bca2..00000000 --- a/test/netlib/data/clientcert/client.pem +++ /dev/null @@ -1,42 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEpAIBAAKCAQEAzCpoRjSTfIN24kkNap/GYmP9zVWj0Gk8R5BB/PvvN0OB1Zk0 -EEYPsWCcuhEdK0ehiDZX030doF0DOncKKa6mop/d0x2o+ts42peDhZM6JNUrm6d+ -ZWQVtio33mpp77UMhR093vaA+ExDnmE26kBTVijJ1+fRAVDXG/cmQINEri91Kk/G -3YJ5e45UrohGI5seBZ4vV0xbHtmczFRhYFlGOvYsoIe4Lvz/eFS2pIrTIpYQ2VM/ -SQQl+JFy+NlQRsWG2NrxtKOzMnnDE7YN4I3z5D5eZFo1EtwZ48LNCeSwrEOdfuzP -G5q5qbs5KpE/x85H9umuRwSCIArbMwBYV8a8JwIDAQABAoIBAFE3FV/IDltbmHEP -iky93hbJm+6QgKepFReKpRVTyqb7LaygUvueQyPWQMIriKTsy675nxo8DQr7tQsO -y3YlSZgra/xNMikIB6e82c7K8DgyrDQw/rCqjZB3Xt4VCqsWJDLXnQMSn98lx0g7 -d7Lbf8soUpKWXqfdVpSDTi4fibSX6kshXyfSTpcz4AdoncEpViUfU1xkEEmZrjT8 -1GcCsDC41xdNmzCpqRuZX7DKSFRoB+0hUzsC1oiqM7FD5kixonRd4F5PbRXImIzt -6YCsT2okxTA04jX7yByis7LlOLTlkmLtKQYuc3erOFvwx89s4vW+AeFei+GGNitn -tHfSwbECgYEA7SzV+nN62hAERHlg8cEQT4TxnsWvbronYWcc/ev44eHSPDWL5tPi -GHfSbW6YAq5Wa0I9jMWfXyhOYEC3MZTC5EEeLOB71qVrTwcy/sY66rOrcgjFI76Q -5JFHQ4wy3SWU50KxE0oWJO9LIowprG+pW1vzqC3VF0T7q0FqESrY4LUCgYEA3F7Z -80ndnCUlooJAb+Hfotv7peFf1o6+m1PTRcz1lLnVt5R5lXj86kn+tXEpYZo1RiGR -2rE2N0seeznWCooakHcsBN7/qmFIhhooJNF7yW+JP2I4P2UV5+tJ+8bcs/voUkQD -1x+rGOuMn8nvHBd2+Vharft8eGL2mgooPVI2XusCgYEAlMZpO3+w8pTVeHaDP2MR -7i/AuQ3cbCLNjSX3Y7jgGCFllWspZRRIYXzYPNkA9b2SbBnTLjjRLgnEkFBIGgvs -7O2EFjaCuDRvydUEQhjq4ErwIsopj7B8h0QyZcbOKTbn3uFQ3n68wVJx2Sv/ADHT -FIHrp/WIE96r19Niy34LKXkCgYB2W59VsuOKnMz01l5DeR5C+0HSWxS9SReIl2IO -yEFSKullWyJeLIgyUaGy0990430feKI8whcrZXYumuah7IDN/KOwzhCk8vEfzWao -N7bzfqtJVrh9HA7C7DVlO+6H4JFrtcoWPZUIomJ549w/yz6EN3ckoMC+a/Ck1TW9 -ka1QFwKBgQCywG6TrZz0UmOjyLQZ+8Q4uvZklSW5NAKBkNnyuQ2kd5rzyYgMPE8C -Er8T88fdVIKvkhDyHhwcI7n58xE5Gr7wkwsrk/Hbd9/ZB2GgAPY3cATskK1v1McU -YeX38CU0fUS4aoy26hWQXkViB47IGQ3jWo3ZCtzIJl8DI9/RsBWTnw== ------END RSA PRIVATE KEY----- ------BEGIN CERTIFICATE----- -MIICYDCCAckCAQEwDQYJKoZIhvcNAQEFBQAwKDESMBAGA1UEAxMJbWl0bXByb3h5 -MRIwEAYDVQQKEwltaXRtcHJveHkwHhcNMTMwMTIwMDEwODEzWhcNMTUxMDE3MDEw -ODEzWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UE -ChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEAzCpoRjSTfIN24kkNap/GYmP9zVWj0Gk8R5BB/PvvN0OB1Zk0 -EEYPsWCcuhEdK0ehiDZX030doF0DOncKKa6mop/d0x2o+ts42peDhZM6JNUrm6d+ -ZWQVtio33mpp77UMhR093vaA+ExDnmE26kBTVijJ1+fRAVDXG/cmQINEri91Kk/G -3YJ5e45UrohGI5seBZ4vV0xbHtmczFRhYFlGOvYsoIe4Lvz/eFS2pIrTIpYQ2VM/ -SQQl+JFy+NlQRsWG2NrxtKOzMnnDE7YN4I3z5D5eZFo1EtwZ48LNCeSwrEOdfuzP -G5q5qbs5KpE/x85H9umuRwSCIArbMwBYV8a8JwIDAQABMA0GCSqGSIb3DQEBBQUA -A4GBAFvI+cd47B85PQ970n2dU/PlA2/Hb1ldrrXh2guR4hX6vYx/uuk5yRI/n0Rd -KOXJ3czO0bd2Fpe3ZoNpkW0pOSDej/Q+58ScuJd0gWCT/Sh1eRk6ZdC0kusOuWoY -bPOPMkG45LPgUMFOnZEsfJP6P5mZIxlbCvSMFC25nPHWlct7 ------END CERTIFICATE----- diff --git a/test/netlib/data/clientcert/make b/test/netlib/data/clientcert/make deleted file mode 100644 index d1caea81..00000000 --- a/test/netlib/data/clientcert/make +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh - -openssl genrsa -out client.key 2048 -openssl req -key client.key -new -out client.req -openssl x509 -req -days 365 -in client.req -signkey client.key -out client.crt -extfile client.cnf -extensions ssl_client -openssl x509 -req -days 1000 -in client.req -CA ~/.mitmproxy/mitmproxy-ca.pem -CAkey ~/.mitmproxy/mitmproxy-ca.pem -set_serial 00001 -out client.crt -extensions ssl_client -cat client.key client.crt > client.pem -openssl x509 -text -noout -in client.pem diff --git a/test/netlib/data/dercert b/test/netlib/data/dercert deleted file mode 100644 index 370252af..00000000 Binary files a/test/netlib/data/dercert and /dev/null differ diff --git a/test/netlib/data/dhparam.pem b/test/netlib/data/dhparam.pem deleted file mode 100644 index afb41672..00000000 --- a/test/netlib/data/dhparam.pem +++ /dev/null @@ -1,13 +0,0 @@ ------BEGIN DH PARAMETERS----- -MIICCAKCAgEAyT6LzpwVFS3gryIo29J5icvgxCnCebcdSe/NHMkD8dKJf8suFCg3 -O2+dguLakSVif/t6dhImxInJk230HmfC8q93hdcg/j8rLGJYDKu3ik6H//BAHKIv -j5O9yjU3rXCfmVJQic2Nne39sg3CreAepEts2TvYHhVv3TEAzEqCtOuTjgDv0ntJ -Gwpj+BJBRQGG9NvprX1YGJ7WOFBP/hWU7d6tgvE6Xa7T/u9QIKpYHMIkcN/l3ZFB -chZEqVlyrcngtSXCROTPcDOQ6Q8QzhaBJS+Z6rcsd7X+haiQqvoFcmaJ08Ks6LQC -ZIL2EtYJw8V8z7C0igVEBIADZBI6OTbuuhDwRw//zU1uq52Oc48CIZlGxTYG/Evq -o9EWAXUYVzWkDSTeBH1r4z/qLPE2cnhtMxbFxuvK53jGB0emy2y1Ei6IhKshJ5qX -IB/aE7SSHyQ3MDHHkCmQJCsOd4Mo26YX61NZ+n501XjqpCBQ2+DfZCBh8Va2wDyv -A2Ryg9SUz8j0AXViRNMJgJrr446yro/FuJZwnQcO3WQnXeqSBnURqKjmqkeFP+d8 -6mk2tqJaY507lRNqtGlLnj7f5RNoBFJDCLBNurVgfvq9TCVWKDIFD4vZRjCrnl6I -rD693XKIHUCWOjMh1if6omGXKHH40QuME2gNa50+YPn1iYDl88uDbbMCAQI= ------END DH PARAMETERS----- diff --git a/test/netlib/data/htpasswd b/test/netlib/data/htpasswd deleted file mode 100644 index 54c95b8c..00000000 --- a/test/netlib/data/htpasswd +++ /dev/null @@ -1 +0,0 @@ -test:$apr1$/LkYxy3x$WI4.YbiJlu537jLGEW2eu1 diff --git a/test/netlib/data/server.crt b/test/netlib/data/server.crt deleted file mode 100644 index 68f61bac..00000000 --- a/test/netlib/data/server.crt +++ /dev/null @@ -1,14 +0,0 @@ ------BEGIN CERTIFICATE----- -MIICOzCCAaQCCQDC7f5GsEpo9jANBgkqhkiG9w0BAQUFADBiMQswCQYDVQQGEwJO -WjEOMAwGA1UECBMFT3RhZ28xEDAOBgNVBAcTB0R1bmVkaW4xDzANBgNVBAoTBm5l -dGxpYjEPMA0GA1UECxMGbmV0bGliMQ8wDQYDVQQDEwZuZXRsaWIwHhcNMTIwNjI0 -MjI0MTU0WhcNMjIwNjIyMjI0MTU0WjBiMQswCQYDVQQGEwJOWjEOMAwGA1UECBMF -T3RhZ28xEDAOBgNVBAcTB0R1bmVkaW4xDzANBgNVBAoTBm5ldGxpYjEPMA0GA1UE -CxMGbmV0bGliMQ8wDQYDVQQDEwZuZXRsaWIwgZ8wDQYJKoZIhvcNAQEBBQADgY0A -MIGJAoGBALJSVEl9y3QUSYuXTH0UjBOPQgS0nHmNWej9hjqnA0KWvEnGY+c6yQeP -/rmwswlKw1iVV5o8kRK9Wej88YWQl/hl/xruyeJgGic0+yqY/FcueZxRudwBcWu2 -7+46aEftwLLRF0GwHZxX/HwWME+TcCXGpXGSG2qs921M4iVeBn5hAgMBAAEwDQYJ -KoZIhvcNAQEFBQADgYEAODZCihEv2yr8zmmQZDrfqg2ChxAoOXWF5+W2F/0LAUBf -2bHP+K4XE6BJWmadX1xKngj7SWrhmmTDp1gBAvXURoDaScOkB1iOCOHoIyalscTR -0FvSHKqFF8fgSlfqS6eYaSbXU3zQolvwP+URzIVnGDqgQCWPtjMqLD3Kd5tuwos= ------END CERTIFICATE----- diff --git a/test/netlib/data/server.key b/test/netlib/data/server.key deleted file mode 100644 index b1b658ab..00000000 --- a/test/netlib/data/server.key +++ /dev/null @@ -1,15 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIICXAIBAAKBgQCyUlRJfct0FEmLl0x9FIwTj0IEtJx5jVno/YY6pwNClrxJxmPn -OskHj/65sLMJSsNYlVeaPJESvVno/PGFkJf4Zf8a7sniYBonNPsqmPxXLnmcUbnc -AXFrtu/uOmhH7cCy0RdBsB2cV/x8FjBPk3AlxqVxkhtqrPdtTOIlXgZ+YQIDAQAB -AoGAQEpGcSiVTYhy64zk2sOprPOdTa0ALSK1I7cjycmk90D5KXAJXLho+f0ETVZT -dioqO6m8J7NmamcyHznyqcDzyNRqD2hEBDGVRJWmpOjIER/JwWLNNbpeVjsMHV8I -40P5rZMOhBPYlwECSC5NtMwaN472fyGNNze8u37IZKiER/ECQQDe1iY5AG3CgkP3 -tEZB3Vtzcn4PoOr3Utyn1YER34lPqAmeAsWUhmAVEfR3N1HDe1VFD9s2BidhBn1a -/Bgqxz4DAkEAzNw0m+uO0WkD7aEYRBW7SbXCX+3xsbVToIWC1jXFG+XDzSWn++c1 -DMXEElzEJxPDA+FzQUvRTml4P92bTAbGywJAS9H7wWtm7Ubbj33UZfbGdhqfz/uF -109naufXedhgZS0c0JnK1oV+Tc0FLEczV9swIUaK5O/lGDtYDcw3AN84NwJBAIw5 -/1jrOOtm8uVp6+5O4dBmthJsEZEPCZtLSG/Qhoe+EvUN3Zq0fL+tb7USAsKs6ERz -wizj9PWzhDhTPMYhrVkCQGIponZHx6VqiFyLgYUH9+gDTjBhYyI+6yMTYzcRweyL -9Suc2NkS3X2Lp+wCjvVZdwGtStp6Vo8z02b3giIsAIY= ------END RSA PRIVATE KEY----- diff --git a/test/netlib/data/text_cert b/test/netlib/data/text_cert deleted file mode 100644 index 36ca33b9..00000000 --- a/test/netlib/data/text_cert +++ /dev/null @@ -1,145 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIadTCCGd6gAwIBAgIGR09PUAFtMA0GCSqGSIb3DQEBBQUAMEYxCzAJBgNVBAYT -AlVTMRMwEQYDVQQKEwpHb29nbGUgSW5jMSIwIAYDVQQDExlHb29nbGUgSW50ZXJu -ZXQgQXV0aG9yaXR5MB4XDTEyMDExNzEyNTUwNFoXDTEzMDExNzEyNTUwNFowTDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCkNhbGlmb3JuaWExEzARBgNVBAoTCkdvb2ds -ZSBJbmMxEzARBgNVBAMTCmdvb2dsZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0A -MIGJAoGBALofcxR2fud5cyFIeld9pj2vGB5GH0y9tmAYa5t33xbJguKKX/el3tXA -KMNiT1SZzu8ELJ1Ey0GcBAgHA9jVPQd0LGdbEtNIxjblAsWAD/FZlSt8X87h7C5w -2JSefOani0qgQqU6sTdsaCUGZ+Eu7D0lBfT5/Vnl2vV+zI3YmDlpAgMBAAGjghhm -MIIYYjAdBgNVHQ4EFgQUL3+JeC/oL9jZhTp3F550LautzV8wHwYDVR0jBBgwFoAU -v8Aw6/VDET5nup6R+/xq2uNrEiQwWwYDVR0fBFQwUjBQoE6gTIZKaHR0cDovL3d3 -dy5nc3RhdGljLmNvbS9Hb29nbGVJbnRlcm5ldEF1dGhvcml0eS9Hb29nbGVJbnRl -cm5ldEF1dGhvcml0eS5jcmwwZgYIKwYBBQUHAQEEWjBYMFYGCCsGAQUFBzAChkpo -dHRwOi8vd3d3LmdzdGF0aWMuY29tL0dvb2dsZUludGVybmV0QXV0aG9yaXR5L0dv -b2dsZUludGVybmV0QXV0aG9yaXR5LmNydDCCF1kGA1UdEQSCF1AwghdMggpnb29n -bGUuY29tggwqLmdvb2dsZS5jb22CCyouZ29vZ2xlLmFjggsqLmdvb2dsZS5hZIIL -Ki5nb29nbGUuYWWCCyouZ29vZ2xlLmFmggsqLmdvb2dsZS5hZ4ILKi5nb29nbGUu -YW2CCyouZ29vZ2xlLmFzggsqLmdvb2dsZS5hdIILKi5nb29nbGUuYXqCCyouZ29v -Z2xlLmJhggsqLmdvb2dsZS5iZYILKi5nb29nbGUuYmaCCyouZ29vZ2xlLmJnggsq -Lmdvb2dsZS5iaYILKi5nb29nbGUuYmqCCyouZ29vZ2xlLmJzggsqLmdvb2dsZS5i -eYILKi5nb29nbGUuY2GCDCouZ29vZ2xlLmNhdIILKi5nb29nbGUuY2OCCyouZ29v -Z2xlLmNkggsqLmdvb2dsZS5jZoILKi5nb29nbGUuY2eCCyouZ29vZ2xlLmNoggsq -Lmdvb2dsZS5jaYILKi5nb29nbGUuY2yCCyouZ29vZ2xlLmNtggsqLmdvb2dsZS5j -boIOKi5nb29nbGUuY28uYW+CDiouZ29vZ2xlLmNvLmJ3gg4qLmdvb2dsZS5jby5j -a4IOKi5nb29nbGUuY28uY3KCDiouZ29vZ2xlLmNvLmh1gg4qLmdvb2dsZS5jby5p -ZIIOKi5nb29nbGUuY28uaWyCDiouZ29vZ2xlLmNvLmltgg4qLmdvb2dsZS5jby5p -boIOKi5nb29nbGUuY28uamWCDiouZ29vZ2xlLmNvLmpwgg4qLmdvb2dsZS5jby5r -ZYIOKi5nb29nbGUuY28ua3KCDiouZ29vZ2xlLmNvLmxzgg4qLmdvb2dsZS5jby5t -YYIOKi5nb29nbGUuY28ubXqCDiouZ29vZ2xlLmNvLm56gg4qLmdvb2dsZS5jby50 -aIIOKi5nb29nbGUuY28udHqCDiouZ29vZ2xlLmNvLnVngg4qLmdvb2dsZS5jby51 -a4IOKi5nb29nbGUuY28udXqCDiouZ29vZ2xlLmNvLnZlgg4qLmdvb2dsZS5jby52 -aYIOKi5nb29nbGUuY28uemGCDiouZ29vZ2xlLmNvLnptgg4qLmdvb2dsZS5jby56 -d4IPKi5nb29nbGUuY29tLmFmgg8qLmdvb2dsZS5jb20uYWeCDyouZ29vZ2xlLmNv -bS5haYIPKi5nb29nbGUuY29tLmFygg8qLmdvb2dsZS5jb20uYXWCDyouZ29vZ2xl -LmNvbS5iZIIPKi5nb29nbGUuY29tLmJogg8qLmdvb2dsZS5jb20uYm6CDyouZ29v -Z2xlLmNvbS5ib4IPKi5nb29nbGUuY29tLmJygg8qLmdvb2dsZS5jb20uYnmCDyou -Z29vZ2xlLmNvbS5ieoIPKi5nb29nbGUuY29tLmNugg8qLmdvb2dsZS5jb20uY2+C -DyouZ29vZ2xlLmNvbS5jdYIPKi5nb29nbGUuY29tLmN5gg8qLmdvb2dsZS5jb20u -ZG+CDyouZ29vZ2xlLmNvbS5lY4IPKi5nb29nbGUuY29tLmVngg8qLmdvb2dsZS5j -b20uZXSCDyouZ29vZ2xlLmNvbS5maoIPKi5nb29nbGUuY29tLmdlgg8qLmdvb2ds -ZS5jb20uZ2iCDyouZ29vZ2xlLmNvbS5naYIPKi5nb29nbGUuY29tLmdygg8qLmdv -b2dsZS5jb20uZ3SCDyouZ29vZ2xlLmNvbS5oa4IPKi5nb29nbGUuY29tLmlxgg8q -Lmdvb2dsZS5jb20uam2CDyouZ29vZ2xlLmNvbS5qb4IPKi5nb29nbGUuY29tLmto -gg8qLmdvb2dsZS5jb20ua3eCDyouZ29vZ2xlLmNvbS5sYoIPKi5nb29nbGUuY29t -Lmx5gg8qLmdvb2dsZS5jb20ubXSCDyouZ29vZ2xlLmNvbS5teIIPKi5nb29nbGUu -Y29tLm15gg8qLmdvb2dsZS5jb20ubmGCDyouZ29vZ2xlLmNvbS5uZoIPKi5nb29n -bGUuY29tLm5ngg8qLmdvb2dsZS5jb20ubmmCDyouZ29vZ2xlLmNvbS5ucIIPKi5n -b29nbGUuY29tLm5ygg8qLmdvb2dsZS5jb20ub22CDyouZ29vZ2xlLmNvbS5wYYIP -Ki5nb29nbGUuY29tLnBlgg8qLmdvb2dsZS5jb20ucGiCDyouZ29vZ2xlLmNvbS5w -a4IPKi5nb29nbGUuY29tLnBsgg8qLmdvb2dsZS5jb20ucHKCDyouZ29vZ2xlLmNv -bS5weYIPKi5nb29nbGUuY29tLnFhgg8qLmdvb2dsZS5jb20ucnWCDyouZ29vZ2xl -LmNvbS5zYYIPKi5nb29nbGUuY29tLnNigg8qLmdvb2dsZS5jb20uc2eCDyouZ29v -Z2xlLmNvbS5zbIIPKi5nb29nbGUuY29tLnN2gg8qLmdvb2dsZS5jb20udGqCDyou -Z29vZ2xlLmNvbS50boIPKi5nb29nbGUuY29tLnRygg8qLmdvb2dsZS5jb20udHeC -DyouZ29vZ2xlLmNvbS51YYIPKi5nb29nbGUuY29tLnV5gg8qLmdvb2dsZS5jb20u -dmOCDyouZ29vZ2xlLmNvbS52ZYIPKi5nb29nbGUuY29tLnZuggsqLmdvb2dsZS5j -doILKi5nb29nbGUuY3qCCyouZ29vZ2xlLmRlggsqLmdvb2dsZS5kaoILKi5nb29n -bGUuZGuCCyouZ29vZ2xlLmRtggsqLmdvb2dsZS5keoILKi5nb29nbGUuZWWCCyou -Z29vZ2xlLmVzggsqLmdvb2dsZS5maYILKi5nb29nbGUuZm2CCyouZ29vZ2xlLmZy -ggsqLmdvb2dsZS5nYYILKi5nb29nbGUuZ2WCCyouZ29vZ2xlLmdnggsqLmdvb2ds -ZS5nbIILKi5nb29nbGUuZ22CCyouZ29vZ2xlLmdwggsqLmdvb2dsZS5ncoILKi5n -b29nbGUuZ3mCCyouZ29vZ2xlLmhrggsqLmdvb2dsZS5oboILKi5nb29nbGUuaHKC -CyouZ29vZ2xlLmh0ggsqLmdvb2dsZS5odYILKi5nb29nbGUuaWWCCyouZ29vZ2xl -Lmltgg0qLmdvb2dsZS5pbmZvggsqLmdvb2dsZS5pcYILKi5nb29nbGUuaXOCCyou -Z29vZ2xlLml0gg4qLmdvb2dsZS5pdC5hb4ILKi5nb29nbGUuamWCCyouZ29vZ2xl -Lmpvgg0qLmdvb2dsZS5qb2JzggsqLmdvb2dsZS5qcIILKi5nb29nbGUua2eCCyou -Z29vZ2xlLmtpggsqLmdvb2dsZS5reoILKi5nb29nbGUubGGCCyouZ29vZ2xlLmxp -ggsqLmdvb2dsZS5sa4ILKi5nb29nbGUubHSCCyouZ29vZ2xlLmx1ggsqLmdvb2ds -ZS5sdoILKi5nb29nbGUubWSCCyouZ29vZ2xlLm1lggsqLmdvb2dsZS5tZ4ILKi5n -b29nbGUubWuCCyouZ29vZ2xlLm1sggsqLmdvb2dsZS5tboILKi5nb29nbGUubXOC -CyouZ29vZ2xlLm11ggsqLmdvb2dsZS5tdoILKi5nb29nbGUubXeCCyouZ29vZ2xl -Lm5lgg4qLmdvb2dsZS5uZS5qcIIMKi5nb29nbGUubmV0ggsqLmdvb2dsZS5ubIIL -Ki5nb29nbGUubm+CCyouZ29vZ2xlLm5yggsqLmdvb2dsZS5udYIPKi5nb29nbGUu -b2ZmLmFpggsqLmdvb2dsZS5wa4ILKi5nb29nbGUucGyCCyouZ29vZ2xlLnBuggsq -Lmdvb2dsZS5wc4ILKi5nb29nbGUucHSCCyouZ29vZ2xlLnJvggsqLmdvb2dsZS5y -c4ILKi5nb29nbGUucnWCCyouZ29vZ2xlLnJ3ggsqLmdvb2dsZS5zY4ILKi5nb29n -bGUuc2WCCyouZ29vZ2xlLnNoggsqLmdvb2dsZS5zaYILKi5nb29nbGUuc2uCCyou -Z29vZ2xlLnNtggsqLmdvb2dsZS5zboILKi5nb29nbGUuc2+CCyouZ29vZ2xlLnN0 -ggsqLmdvb2dsZS50ZIILKi5nb29nbGUudGeCCyouZ29vZ2xlLnRrggsqLmdvb2ds -ZS50bIILKi5nb29nbGUudG2CCyouZ29vZ2xlLnRuggsqLmdvb2dsZS50b4ILKi5n -b29nbGUudHCCCyouZ29vZ2xlLnR0ggsqLmdvb2dsZS51c4ILKi5nb29nbGUudXqC -CyouZ29vZ2xlLnZnggsqLmdvb2dsZS52dYILKi5nb29nbGUud3OCCWdvb2dsZS5h -Y4IJZ29vZ2xlLmFkgglnb29nbGUuYWWCCWdvb2dsZS5hZoIJZ29vZ2xlLmFnggln -b29nbGUuYW2CCWdvb2dsZS5hc4IJZ29vZ2xlLmF0gglnb29nbGUuYXqCCWdvb2ds -ZS5iYYIJZ29vZ2xlLmJlgglnb29nbGUuYmaCCWdvb2dsZS5iZ4IJZ29vZ2xlLmJp -gglnb29nbGUuYmqCCWdvb2dsZS5ic4IJZ29vZ2xlLmJ5gglnb29nbGUuY2GCCmdv -b2dsZS5jYXSCCWdvb2dsZS5jY4IJZ29vZ2xlLmNkgglnb29nbGUuY2aCCWdvb2ds -ZS5jZ4IJZ29vZ2xlLmNogglnb29nbGUuY2mCCWdvb2dsZS5jbIIJZ29vZ2xlLmNt -gglnb29nbGUuY26CDGdvb2dsZS5jby5hb4IMZ29vZ2xlLmNvLmJ3ggxnb29nbGUu -Y28uY2uCDGdvb2dsZS5jby5jcoIMZ29vZ2xlLmNvLmh1ggxnb29nbGUuY28uaWSC -DGdvb2dsZS5jby5pbIIMZ29vZ2xlLmNvLmltggxnb29nbGUuY28uaW6CDGdvb2ds -ZS5jby5qZYIMZ29vZ2xlLmNvLmpwggxnb29nbGUuY28ua2WCDGdvb2dsZS5jby5r -coIMZ29vZ2xlLmNvLmxzggxnb29nbGUuY28ubWGCDGdvb2dsZS5jby5teoIMZ29v -Z2xlLmNvLm56ggxnb29nbGUuY28udGiCDGdvb2dsZS5jby50eoIMZ29vZ2xlLmNv -LnVnggxnb29nbGUuY28udWuCDGdvb2dsZS5jby51eoIMZ29vZ2xlLmNvLnZlggxn -b29nbGUuY28udmmCDGdvb2dsZS5jby56YYIMZ29vZ2xlLmNvLnptggxnb29nbGUu -Y28ueneCDWdvb2dsZS5jb20uYWaCDWdvb2dsZS5jb20uYWeCDWdvb2dsZS5jb20u -YWmCDWdvb2dsZS5jb20uYXKCDWdvb2dsZS5jb20uYXWCDWdvb2dsZS5jb20uYmSC -DWdvb2dsZS5jb20uYmiCDWdvb2dsZS5jb20uYm6CDWdvb2dsZS5jb20uYm+CDWdv -b2dsZS5jb20uYnKCDWdvb2dsZS5jb20uYnmCDWdvb2dsZS5jb20uYnqCDWdvb2ds -ZS5jb20uY26CDWdvb2dsZS5jb20uY2+CDWdvb2dsZS5jb20uY3WCDWdvb2dsZS5j -b20uY3mCDWdvb2dsZS5jb20uZG+CDWdvb2dsZS5jb20uZWOCDWdvb2dsZS5jb20u -ZWeCDWdvb2dsZS5jb20uZXSCDWdvb2dsZS5jb20uZmqCDWdvb2dsZS5jb20uZ2WC -DWdvb2dsZS5jb20uZ2iCDWdvb2dsZS5jb20uZ2mCDWdvb2dsZS5jb20uZ3KCDWdv -b2dsZS5jb20uZ3SCDWdvb2dsZS5jb20uaGuCDWdvb2dsZS5jb20uaXGCDWdvb2ds -ZS5jb20uam2CDWdvb2dsZS5jb20uam+CDWdvb2dsZS5jb20ua2iCDWdvb2dsZS5j -b20ua3eCDWdvb2dsZS5jb20ubGKCDWdvb2dsZS5jb20ubHmCDWdvb2dsZS5jb20u -bXSCDWdvb2dsZS5jb20ubXiCDWdvb2dsZS5jb20ubXmCDWdvb2dsZS5jb20ubmGC -DWdvb2dsZS5jb20ubmaCDWdvb2dsZS5jb20ubmeCDWdvb2dsZS5jb20ubmmCDWdv -b2dsZS5jb20ubnCCDWdvb2dsZS5jb20ubnKCDWdvb2dsZS5jb20ub22CDWdvb2ds -ZS5jb20ucGGCDWdvb2dsZS5jb20ucGWCDWdvb2dsZS5jb20ucGiCDWdvb2dsZS5j -b20ucGuCDWdvb2dsZS5jb20ucGyCDWdvb2dsZS5jb20ucHKCDWdvb2dsZS5jb20u -cHmCDWdvb2dsZS5jb20ucWGCDWdvb2dsZS5jb20ucnWCDWdvb2dsZS5jb20uc2GC -DWdvb2dsZS5jb20uc2KCDWdvb2dsZS5jb20uc2eCDWdvb2dsZS5jb20uc2yCDWdv -b2dsZS5jb20uc3aCDWdvb2dsZS5jb20udGqCDWdvb2dsZS5jb20udG6CDWdvb2ds -ZS5jb20udHKCDWdvb2dsZS5jb20udHeCDWdvb2dsZS5jb20udWGCDWdvb2dsZS5j -b20udXmCDWdvb2dsZS5jb20udmOCDWdvb2dsZS5jb20udmWCDWdvb2dsZS5jb20u -dm6CCWdvb2dsZS5jdoIJZ29vZ2xlLmN6gglnb29nbGUuZGWCCWdvb2dsZS5kaoIJ -Z29vZ2xlLmRrgglnb29nbGUuZG2CCWdvb2dsZS5keoIJZ29vZ2xlLmVlgglnb29n -bGUuZXOCCWdvb2dsZS5maYIJZ29vZ2xlLmZtgglnb29nbGUuZnKCCWdvb2dsZS5n -YYIJZ29vZ2xlLmdlgglnb29nbGUuZ2eCCWdvb2dsZS5nbIIJZ29vZ2xlLmdtggln -b29nbGUuZ3CCCWdvb2dsZS5ncoIJZ29vZ2xlLmd5gglnb29nbGUuaGuCCWdvb2ds -ZS5oboIJZ29vZ2xlLmhygglnb29nbGUuaHSCCWdvb2dsZS5odYIJZ29vZ2xlLmll -gglnb29nbGUuaW2CC2dvb2dsZS5pbmZvgglnb29nbGUuaXGCCWdvb2dsZS5pc4IJ -Z29vZ2xlLml0ggxnb29nbGUuaXQuYW+CCWdvb2dsZS5qZYIJZ29vZ2xlLmpvggtn -b29nbGUuam9ic4IJZ29vZ2xlLmpwgglnb29nbGUua2eCCWdvb2dsZS5raYIJZ29v -Z2xlLmt6gglnb29nbGUubGGCCWdvb2dsZS5saYIJZ29vZ2xlLmxrgglnb29nbGUu -bHSCCWdvb2dsZS5sdYIJZ29vZ2xlLmx2gglnb29nbGUubWSCCWdvb2dsZS5tZYIJ -Z29vZ2xlLm1ngglnb29nbGUubWuCCWdvb2dsZS5tbIIJZ29vZ2xlLm1ugglnb29n -bGUubXOCCWdvb2dsZS5tdYIJZ29vZ2xlLm12gglnb29nbGUubXeCCWdvb2dsZS5u -ZYIMZ29vZ2xlLm5lLmpwggpnb29nbGUubmV0gglnb29nbGUubmyCCWdvb2dsZS5u -b4IJZ29vZ2xlLm5ygglnb29nbGUubnWCDWdvb2dsZS5vZmYuYWmCCWdvb2dsZS5w -a4IJZ29vZ2xlLnBsgglnb29nbGUucG6CCWdvb2dsZS5wc4IJZ29vZ2xlLnB0ggln -b29nbGUucm+CCWdvb2dsZS5yc4IJZ29vZ2xlLnJ1gglnb29nbGUucneCCWdvb2ds -ZS5zY4IJZ29vZ2xlLnNlgglnb29nbGUuc2iCCWdvb2dsZS5zaYIJZ29vZ2xlLnNr -gglnb29nbGUuc22CCWdvb2dsZS5zboIJZ29vZ2xlLnNvgglnb29nbGUuc3SCCWdv -b2dsZS50ZIIJZ29vZ2xlLnRngglnb29nbGUudGuCCWdvb2dsZS50bIIJZ29vZ2xl -LnRtgglnb29nbGUudG6CCWdvb2dsZS50b4IJZ29vZ2xlLnRwgglnb29nbGUudHSC -CWdvb2dsZS51c4IJZ29vZ2xlLnV6gglnb29nbGUudmeCCWdvb2dsZS52dYIJZ29v -Z2xlLndzMA0GCSqGSIb3DQEBBQUAA4GBAJmZ9RyqpUzrP0UcJnHXoLu/AjIEsIvZ -Y9hq/9bLry8InfmvERYHr4hNetkOYlW0FeDZtCpWxdPUgJjmWgKAK6j0goOFavTV -GptkL8gha4p1QUsdLkd36/cvBXeBYSle787veo46N1k4V6Uv2gaDVkre786CNsHv -Q6MYZ5ClQ+kS ------END CERTIFICATE----- - diff --git a/test/netlib/data/text_cert_2 b/test/netlib/data/text_cert_2 deleted file mode 100644 index ffe8faae..00000000 --- a/test/netlib/data/text_cert_2 +++ /dev/null @@ -1,39 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIGujCCBaKgAwIBAgIDAQlEMA0GCSqGSIb3DQEBBQUAMIGMMQswCQYDVQQGEwJJ -TDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0 -YWwgQ2VydGlmaWNhdGUgU2lnbmluZzE4MDYGA1UEAxMvU3RhcnRDb20gQ2xhc3Mg -MSBQcmltYXJ5IEludGVybWVkaWF0ZSBTZXJ2ZXIgQ0EwHhcNMTAwMTExMTkyNzM2 -WhcNMTEwMTEyMDkxNDU1WjCBtDEgMB4GA1UEDRMXMTI2ODMyLU1DeExzWTZUbjFn -bTdvOTAxCzAJBgNVBAYTAk5aMR4wHAYDVQQKExVQZXJzb25hIE5vdCBWYWxpZGF0 -ZWQxKTAnBgNVBAsTIFN0YXJ0Q29tIEZyZWUgQ2VydGlmaWNhdGUgTWVtYmVyMRgw -FgYDVQQDEw93d3cuaW5vZGUuY28ubnoxHjAcBgkqhkiG9w0BCQEWD2ppbUBpbm9k -ZS5jby5uejCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL6ghWlGhqg+ -V0P58R3SvLRiO9OrdekDxzmQbKwQcc05frnF5Z9vT6ga7YOuXVeXxhYCAo0nr6KI -+y/Lx+QHvP5W0nKbs+svzUQErq2ZZFwhh1e1LbVccrNwkHUzKOq0TTaVdU4k8kDQ -zzYF9tTZb+G5Hv1BJjpwYwe8P4cAiPJPrFFOKTySzHqiYsXlx+vR1l1e3zKavhd+ -LVSoLWWXb13yKODq6vnuiHjUJXl8CfVlBhoGotXU4JR5cbuGoW/8+rkwEdX+YoCv -VCqgdx9IkRFB6uWfN6ocUiFvhA0eknO+ewuVfRLiIaSDB8pNyUWVqu4ngFWtWO1O -YZg0I/32BkcCAwEAAaOCAvkwggL1MAkGA1UdEwQCMAAwCwYDVR0PBAQDAgOoMBMG -A1UdJQQMMAoGCCsGAQUFBwMBMB0GA1UdDgQWBBQfaL2Rj6r8iRlBTgppgE7ZZ5WT -UzAfBgNVHSMEGDAWgBTrQjTQmLCrn/Qbawj3zGQu7w4sRTAnBgNVHREEIDAegg93 -d3cuaW5vZGUuY28ubnqCC2lub2RlLmNvLm56MIIBQgYDVR0gBIIBOTCCATUwggEx -BgsrBgEEAYG1NwECATCCASAwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRz -c2wuY29tL3BvbGljeS5wZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRz -c2wuY29tL2ludGVybWVkaWF0ZS5wZGYwgbcGCCsGAQUFBwICMIGqMBQWDVN0YXJ0 -Q29tIEx0ZC4wAwIBARqBkUxpbWl0ZWQgTGlhYmlsaXR5LCBzZWUgc2VjdGlvbiAq -TGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0aWZpY2F0aW9u -IEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93d3cuc3RhcnRz -c2wuY29tL3BvbGljeS5wZGYwYQYDVR0fBFowWDAqoCigJoYkaHR0cDovL3d3dy5z -dGFydHNzbC5jb20vY3J0MS1jcmwuY3JsMCqgKKAmhiRodHRwOi8vY3JsLnN0YXJ0 -c3NsLmNvbS9jcnQxLWNybC5jcmwwgY4GCCsGAQUFBwEBBIGBMH8wOQYIKwYBBQUH -MAGGLWh0dHA6Ly9vY3NwLnN0YXJ0c3NsLmNvbS9zdWIvY2xhc3MxL3NlcnZlci9j -YTBCBggrBgEFBQcwAoY2aHR0cDovL3d3dy5zdGFydHNzbC5jb20vY2VydHMvc3Vi -LmNsYXNzMS5zZXJ2ZXIuY2EuY3J0MCMGA1UdEgQcMBqGGGh0dHA6Ly93d3cuc3Rh -cnRzc2wuY29tLzANBgkqhkiG9w0BAQUFAAOCAQEAivWID0KT8q1EzWzy+BecsFry -hQhuLFfAsPkHqpNd9OfkRStGBuJlLX+9DQ9TzjqutdY2buNBuDn71buZK+Y5fmjr -28rAT6+WMd+KnCl5WLT5IOS6Z9s3cec5TFQbmOGlepSS9Q6Ts9KsXOHHQvDkQeDq -OV2UqdgXIAyFm5efSL9JXPXntRausNu2s8F2B2rRJe4jPfnUy2LvY8OW1YvjUA++ -vpdWRdfUbJQp55mRfaYMPRnyUm30lAI27QaxgQPFOqDeZUm5llb5eFG/B3f87uhg -+Y1oEykbEvZrIFN4hithioQ0tb+57FKkkG2sW3uemNiQw2qrEo/GAMb1cI50Rg== ------END CERTIFICATE----- - diff --git a/test/netlib/data/text_cert_weird1 b/test/netlib/data/text_cert_weird1 deleted file mode 100644 index 72b09dcb..00000000 --- a/test/netlib/data/text_cert_weird1 +++ /dev/null @@ -1,31 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIFNDCCBBygAwIBAgIEDFJFNzANBgkqhkiG9w0BAQUFADCBjDELMAkGA1UEBhMC -REUxHjAcBgNVBAoTFVVuaXZlcnNpdGFldCBNdWVuc3RlcjE6MDgGA1UEAxMxWmVy -dGlmaXppZXJ1bmdzc3RlbGxlIFVuaXZlcnNpdGFldCBNdWVuc3RlciAtIEcwMjEh -MB8GCSqGSIb3DQEJARYSY2FAdW5pLW11ZW5zdGVyLmRlMB4XDTA4MDUyMDEyNDQy -NFoXDTEzMDUxOTEyNDQyNFowezELMAkGA1UEBhMCREUxHjAcBgNVBAoTFVVuaXZl -cnNpdGFldCBNdWVuc3RlcjEuMCwGA1UECxMlWmVudHJ1bSBmdWVyIEluZm9ybWF0 -aW9uc3ZlcmFyYmVpdHVuZzEcMBoGA1UEAxMTd3d3LnVuaS1tdWVuc3Rlci5kZTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMM0WlCj0ew+tyZ1GurBOqFn -AlChKk4S1F9oDzvp3FwOON4H8YFET7p9ZnoWtkfXSlGNMjekqy67dFlLt1sLusSo -tjNdaOrDLYmnGEgnYAT0RFBvErzIybJoD/Vu3NXyhes+L94R9mEMCwYXmSvG51H9 -c5CvguXBofMchDLCM/U6AYpwu3sST5orV3S1Rsa9sndj8sKJAcw195PYwl6EiEBb -M36ltDBlTYEUAg3Z+VSzB09J3U4vSvguVkDCz+szZh5RG3xlN9mlNfzhf4lHrNgV -0BRbKypa5Uuf81wbMcMMqTxKq+A9ysObpn9J3pNUym+Tn2oqHzGgvwZYB4tzXqUC -AwEAAaOCAawwggGoMAkGA1UdEwQCMAAwCwYDVR0PBAQDAgTwMBMGA1UdJQQMMAoG -CCsGAQUFBwMBMB0GA1UdDgQWBBQ3RFo8awewUTq5TpOFf3jOCEKihzAfBgNVHSME -GDAWgBS+nlGiyZJ8u2CL5rBoZHdaUhmhADAjBgNVHREEHDAagRh3d3dhZG1pbkB1 -bmktbXVlbnN0ZXIuZGUwewYDVR0fBHQwcjA3oDWgM4YxaHR0cDovL2NkcDEucGNh -LmRmbi5kZS93d3UtY2EvcHViL2NybC9nX2NhY3JsLmNybDA3oDWgM4YxaHR0cDov -L2NkcDIucGNhLmRmbi5kZS93d3UtY2EvcHViL2NybC9nX2NhY3JsLmNybDCBlgYI -KwYBBQUHAQEEgYkwgYYwQQYIKwYBBQUHMAKGNWh0dHA6Ly9jZHAxLnBjYS5kZm4u -ZGUvd3d1LWNhL3B1Yi9jYWNlcnQvZ19jYWNlcnQuY3J0MEEGCCsGAQUFBzAChjVo -dHRwOi8vY2RwMi5wY2EuZGZuLmRlL3d3dS1jYS9wdWIvY2FjZXJ0L2dfY2FjZXJ0 -LmNydDANBgkqhkiG9w0BAQUFAAOCAQEAFfNpagtcKUSDKss7TcqjYn99FQ4FtWjE -pGmzYL2zX2wsdCGoVQlGkieL9slbQVEUAnBuqM1LPzUNNe9kZpOPV3Rdhq4y8vyS -xkx3G1v5aGxfPUe8KM8yKIOHRqYefNronHJM0fw7KyjQ73xgbIEgkW+kNXaMLcrb -EPC36O2Zna8GP9FQxJRLgcfQCcYdRKGVn0EtRSkz2ym5Rbh/hrmJBbbC2yJGGMI0 -Vu5A9piK0EZPekZIUmhMQynD9QcMfWhTEFr7YZfx9ktxKDW4spnu7YrgICfZNcCm -tfxmnEAFt6a47u9P0w9lpY8+Sx9MNFfTePym+HP4TYha9bIBes+XnA== ------END CERTIFICATE----- - diff --git a/test/netlib/data/verificationcerts/9da13359.0 b/test/netlib/data/verificationcerts/9da13359.0 deleted file mode 100644 index b22e4d20..00000000 --- a/test/netlib/data/verificationcerts/9da13359.0 +++ /dev/null @@ -1,21 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDXTCCAkWgAwIBAgIJAPAfPQGCV/Z4MA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV -BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX -aWRnaXRzIFB0eSBMdGQwHhcNMTUxMTAxMTY0ODAxWhcNMTgwODIxMTY0ODAxWjBF -MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50 -ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEArp8LD34JhKCwcQbwIYQMg4+eCgLVN8fwB7+/qOfJbArPs0djFBN+F7c6 -HGvMr24BKUk5u8pn4dPtNurm/vPC8ovNGmcXz62BQJpcMX2veVdRsF7yNwhNacNJ -Arq+70zNMwYBznx0XUxMF6j6nVFf3AW6SU04ylT4Mp3SY/BUUDAdfl1eRo0mPLNS -8rpsN+8YBw1Q7SCuBRVqpOgVIsL88svgQUSOlzvMZPBpG/cmB3BNKNrltwb5iFEI -1jAV7uSj5IcIuNO/246kfsDVPTFMJIzav/CUoidd5UNw+SoFDlzh8sA7L1Bm7D1/ -3KHYSKswGsSR3kynAl10w/SJKDtn8wIDAQABo1AwTjAdBgNVHQ4EFgQUgOcrtxBX -LxbpnOT65d+vpfyWUkgwHwYDVR0jBBgwFoAUgOcrtxBXLxbpnOT65d+vpfyWUkgw -DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAEE9bFmUCA+6cvESKPoi2 -TGSpV652d0xd2U66LpEXeiWRJFLz8YGgoJCx3QFGBscJDXxrLxrBBBV/tCpEqypo -pYIqsawH7M66jpOr83Us3M8JC2eFBZJocMpXxdytWqHik5VKZNx6VQFT8bS7+yVC -VoUKePhlgcg+pmo41qjqieBNKRMh/1tXS77DI1lgO5wZLVrLXcdqWuDpmaQOKJeq -G/nxytCW/YJA7bFn/8Gjy8DYypJSeeaKu7o3P3+ONJHdIMHb+MdcheDBS9AOFSeo -xI0D5EbO9F873O77l7nbD7B0X34HFN0nGczC4poexIpbDFG3hAPekwZ5KC6VwJLc -1Q== ------END CERTIFICATE----- diff --git a/test/netlib/data/verificationcerts/generate.py b/test/netlib/data/verificationcerts/generate.py deleted file mode 100644 index 6d4d8550..00000000 --- a/test/netlib/data/verificationcerts/generate.py +++ /dev/null @@ -1,66 +0,0 @@ -""" -Generate SSL test certificates. -""" -import subprocess -import shlex -import os -import shutil - - -ROOT_CA = "trusted-root" -SUBJECT = "/CN=example.mitmproxy.org/" - - -def do(args): - print("> %s" % args) - args = shlex.split(args) - output = subprocess.check_output(args) - return output - - -def genrsa(cert): - do("openssl genrsa -out {cert}.key 2048".format(cert=cert)) - - -def sign(cert): - do("openssl x509 -req -in {cert}.csr " - "-CA {root_ca}.crt " - "-CAkey {root_ca}.key " - "-CAcreateserial " - "-days 1024 " - "-out {cert}.crt".format(root_ca=ROOT_CA, cert=cert) - ) - - -def mkcert(cert, args): - genrsa(cert) - do("openssl req -new -nodes -batch " - "-key {cert}.key " - "{args} " - "-out {cert}.csr".format(cert=cert, args=args) - ) - sign(cert) - os.remove("{cert}.csr".format(cert=cert)) - - -# create trusted root CA -genrsa("trusted-root") -do("openssl req -x509 -new -nodes -batch " - "-key trusted-root.key " - "-days 1024 " - "-out trusted-root.crt" - ) -h = do("openssl x509 -hash -noout -in trusted-root.crt").decode("ascii").strip() -shutil.copyfile("trusted-root.crt", "{}.0".format(h)) - -# create trusted leaf cert. -mkcert("trusted-leaf", "-subj {}".format(SUBJECT)) - -# create self-signed cert -genrsa("self-signed") -do("openssl req -x509 -new -nodes -batch " - "-key self-signed.key " - "-subj {} " - "-days 1024 " - "-out self-signed.crt".format(SUBJECT) - ) diff --git a/test/netlib/data/verificationcerts/self-signed.crt b/test/netlib/data/verificationcerts/self-signed.crt deleted file mode 100644 index dce2a7e0..00000000 --- a/test/netlib/data/verificationcerts/self-signed.crt +++ /dev/null @@ -1,19 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDEzCCAfugAwIBAgIJAJ945xt1FRsfMA0GCSqGSIb3DQEBCwUAMCAxHjAcBgNV -BAMMFWV4YW1wbGUubWl0bXByb3h5Lm9yZzAeFw0xNTExMDExNjQ4MDJaFw0xODA4 -MjExNjQ4MDJaMCAxHjAcBgNVBAMMFWV4YW1wbGUubWl0bXByb3h5Lm9yZzCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALFxyzPfjgIghOMMnJlW80yB84xC -nJtko3tuyOdozgTCyha2W+NdIKPNZJtWrzN4P0B5PlozCDwfcSYffLs0WZs8LRWv -BfZX8+oX+14qQjKFsiqgO65cTLP3qlPySYPJQQ37vOP1Y5Yf8nQq2mwQdC18hLtT -QOANG6OFoSplpBLsYF+QeoMgqCTa6hrl/5GLmQoDRTjXkv3Sj379AUDMybuBqccm -q5EIqCrE4+xJ8JywJclAVn2YP14baiFrrYCsYYg4sS1Od6xFj+xtpLe7My3AYjB9 -/aeHd8vDiob0cqOW1TFwhqgJKuErfFyg8lZ2hJmStJKyfofWuY/gl/vnvX0CAwEA -AaNQME4wHQYDVR0OBBYEFB8d32zK8eqZIoKw4jXzYzhw4amPMB8GA1UdIwQYMBaA -FB8d32zK8eqZIoKw4jXzYzhw4amPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBAJmo2oKv1OEjZ0Q4yELO6BAnHAkmBKpW+zmLyQa8idxtLVkI9uXk3iqY -GWugkmcUZCTVFRWv/QXQQSex+00IY3x2rdHbtuZwcyKiz2u8WEmfW1rOIwBaFJ1i -v7+SA2aZs6vepN2sE56X54c/YbwQooaKZtOb+djWXYMJrc/Ezj0J7oQIJTptYV8v -/3216yCHRp/KCL7yTLtiw25xKuXNu/gkcd8wZOY9rS2qMUD897MJF0MvgJoauRBd -d4XEYCNKkrIRmfqrkiRQfAZpvpoutH6NCk7KuQYcI0BlOHlsnHHcs/w72EEqHwFq -x6476tW/t8GJDZVD74+pNBcLifXxArE= ------END CERTIFICATE----- diff --git a/test/netlib/data/verificationcerts/self-signed.key b/test/netlib/data/verificationcerts/self-signed.key deleted file mode 100644 index 71a6ad6a..00000000 --- a/test/netlib/data/verificationcerts/self-signed.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEAsXHLM9+OAiCE4wycmVbzTIHzjEKcm2Sje27I52jOBMLKFrZb -410go81km1avM3g/QHk+WjMIPB9xJh98uzRZmzwtFa8F9lfz6hf7XipCMoWyKqA7 -rlxMs/eqU/JJg8lBDfu84/Vjlh/ydCrabBB0LXyEu1NA4A0bo4WhKmWkEuxgX5B6 -gyCoJNrqGuX/kYuZCgNFONeS/dKPfv0BQMzJu4GpxyarkQioKsTj7EnwnLAlyUBW -fZg/XhtqIWutgKxhiDixLU53rEWP7G2kt7szLcBiMH39p4d3y8OKhvRyo5bVMXCG -qAkq4St8XKDyVnaEmZK0krJ+h9a5j+CX++e9fQIDAQABAoIBAQCT+FvGbych2PJX -0D2KlXqgE0IAdc/YuYymstSwPLKIP9N8KyfnKtK8Jdw+uYOyfRTp8/EuEJ5OXL3j -V6CRD++lRwIlseVb7y5EySjh9oVrUhgn+aSrGucPsHkGNeZeEmbAfWugARLBrvRl -MRMhyHrJL6wT9jIEZInmy9mA3G99IuFW3rS8UR1Yu7zyvhtjvop1xg/wfEUu24Ty -PvMfnwaDcZHCz2tmu2KJvaxSBAG3FKmAqeMvk1Gt5m2keKgw03M+EX0LrM8ybWqn -VwB8tnSyMBLVFLIXMpIiSfpji10+p9fdKFMRF++D6qVwyoxPiIq+yEJapxXiqLea -mkhtJW91AoGBAOvIb7bZvH4wYvi6txs2pygF3ZMjqg/fycnplrmYMrjeeDeeN4v1 -h/5tkN9TeTkHRaN3L7v49NEUDhDyuopLTNfWpYdv63U/BVzvgMm/guacTYkx9whB -OvQ2YekR/WKg7kuyrTZidTDz+mjU+1b8JaWGjiDc6vFwxZA7uWicaGGHAoGBAMCo -y/2AwFGwCR+5bET1nTTyxok6iKo4k6R/7DJe4Bq8VLifoyX3zDlGG/33KN3xVqBU -xnT9gkii1lfX2U+4iM+GOSPl0nG0hOEqEH+vFHszpHybDeNez3FEyIbgOzg6u7sV -NOy+P94L5EMQVEmWp5g6Vm3k9kr92Bd9UacKQPnbAoGAMN8KyMu41i8RVJze9zUM -0K7mjmkGBuRL3x4br7xsRwVVxbF1sfzig0oSjTewGLH5LTi3HC8uD2gowjqNj7yr -4NEM3lXEaDj305uRBkA70bD0IUvJ+FwM7DGZecXQz3Cr8+TFIlCmGc94R+Jddlot -M3IAY69mw0SsroiylYxV1mECgYAcSGtx8rXJCDO+sYTgdsI2ZLGasbogax/ZlWIC -XwU9R4qUc/MKft8/RTiUxvT76BMUhH2B7Tl0GlunF6vyVR/Yf1biGzoSsTKUr40u -gXBbSdCK7mRSjbecZEGf80keTxkCNPHJE4DiwxImej41c2V1JpNLnMI/bhaMFDyp -bgrt4wKBgHFzZgAgM1v07F038tAkIBGrYLukY1ZFBaZoGZ9xHfy/EmLJM3HCHLO5 -8wszMGhMTe2+39EeChwgj0kFaq1YnDiucU74BC57KR1tD59y7l6UnsQXTm4/32j8 -Or6i8GekBibCb97DzzOU0ZK//fNhHTXpDDXsYt5lJUWSmgW+S9Qp ------END RSA PRIVATE KEY----- diff --git a/test/netlib/data/verificationcerts/trusted-leaf.crt b/test/netlib/data/verificationcerts/trusted-leaf.crt deleted file mode 100644 index 6a92de92..00000000 --- a/test/netlib/data/verificationcerts/trusted-leaf.crt +++ /dev/null @@ -1,18 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIC4TCCAckCCQCj6D9oVylb8jANBgkqhkiG9w0BAQsFADBFMQswCQYDVQQGEwJB -VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0 -cyBQdHkgTHRkMB4XDTE1MTEwMTE2NDgwMloXDTE4MDgyMTE2NDgwMlowIDEeMBwG -A1UEAwwVZXhhbXBsZS5taXRtcHJveHkub3JnMIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEAy/L5JYHS7QFhSIsjmd6bJTgs2rdqEn6tsmPBVZKZ7SqCAVjW -hPpEu7Q23akmU6Zm9Fp/vENc3jzxQLlEKhrv7eWmFYSOrCYtbJOz3RQorlwjjfdY -LlNQh1wYUXQX3PN3r3dyYtt5vTtXKc8+aP4M4vX7qlbW+4j4LrQfmPjS0XOdYpu3 -wh+i1ZMIhZye3hpCjwnpjTf7/ff45ZFxtkoi1uzEC/+swr1RSvamY8Foe12Re17Z -5ij8ZB0NIdoSk1tDkY3sJ8iNi35+qartl0UYeG9IUXRwDRrPsEKpF4RxY1+X2bdZ -r6PKb/E4CA5JlMvS5SVmrvxjCVqTQBmTjXfxqwIDAQABMA0GCSqGSIb3DQEBCwUA -A4IBAQBmpSZJrTDvzSlo6P7P7x1LoETzHyVjwgPeqGYw6ndGXeJMN9rhhsFvRsiB -I/aHh58MIlSjti7paikDAoFHB3dBvFHR+JUa/ailWEbcZReWRSE3lV6wFiN3G3lU -OyofR7MKnPW7bv8hSqOLqP1mbupXuQFB5M6vPLRwg5VgiCHI/XBiTvzMamzvNAR3 -UHHZtsJkRqzogYm6K9YJaga7jteSx2nNo+ujLwrxeXsLChTyFMJGnVkp5IyKeNfc -qwlzNncb3y+4KnUdNkPEtuydgAxAfuyXufiFBYRcUWbQ5/9ycgF7131ySaj9f/Y2 -kMsv2jg+soKvwwVYCABsk1KSHtfz ------END CERTIFICATE----- diff --git a/test/netlib/data/verificationcerts/trusted-leaf.key b/test/netlib/data/verificationcerts/trusted-leaf.key deleted file mode 100644 index 783ebf1c..00000000 --- a/test/netlib/data/verificationcerts/trusted-leaf.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEpAIBAAKCAQEAy/L5JYHS7QFhSIsjmd6bJTgs2rdqEn6tsmPBVZKZ7SqCAVjW -hPpEu7Q23akmU6Zm9Fp/vENc3jzxQLlEKhrv7eWmFYSOrCYtbJOz3RQorlwjjfdY -LlNQh1wYUXQX3PN3r3dyYtt5vTtXKc8+aP4M4vX7qlbW+4j4LrQfmPjS0XOdYpu3 -wh+i1ZMIhZye3hpCjwnpjTf7/ff45ZFxtkoi1uzEC/+swr1RSvamY8Foe12Re17Z -5ij8ZB0NIdoSk1tDkY3sJ8iNi35+qartl0UYeG9IUXRwDRrPsEKpF4RxY1+X2bdZ -r6PKb/E4CA5JlMvS5SVmrvxjCVqTQBmTjXfxqwIDAQABAoIBAQC956DWq+wbhA1x -3x1nSUBth8E8Z0z9q7dRRFHhvIBXth0X5ADcEa2umj/8ZmSpv2heX2ZRhugSh+yc -t+YgzrRacFwV7ThsU6A4WdBBK2Q19tWke4xAlpOFdtut/Mu7kXkAidiY9ISHD5o5 -9B/I48ZcD3AnTHUiAogV9OL3LbogDD4HasLt4mWkbq8U2thdjxMIvxdg36olJEuo -iAZrAUCPZEXuU89BtvPLUYioe9n90nzkyneGNS0SHxotlEc9ZYK9VTsivtXJb4wB -ptDMCp+TH3tjo8BTGnbnoZEybgyyOEd0UTzxK4DlxnvRVWexFY6NXwPFhIxKlB0Y -Bg8NkAkBAoGBAOiRnmbC5QkqrKrTkLx3fghIHPqgEXPPYgHLSuY3UjTlMb3APXpq -vzQnlCn3QuSse/1fWnQj+9vLVbx1XNgKjzk7dQhn5IUY+mGN4lLmoSnTebxvSQ43 -VAgTYjST9JFmJ3wK4KkWDsEsVao8LAx0h5JEQXUTT5xZpFA2MLztYbgfAoGBAOB/ -MvhLMAwlx8+m/zXMEPLk/KOd2dVZ4q5se8bAT/GiGsi8JUcPnCk140ZZabJqryAp -JFzUHIjfVsS9ejAfocDk1JeIm7Uus4um6fQEKIPMBxI/M/UAwYCXAG9ULXqilbO3 -pTdeeuraVKrTu1Z4ea6x4du1JWKcyDfYfsHepcT1AoGBAM2fskV5G7e3G2MOG3IG -1E/OMpEE5WlXenfLnjVdxDkwS4JRbgnGR7d9JurTyzkTp6ylmfwFtLDoXq15ttTs -wSUBBMCh2tIy+201XV2eu++XIpMQca84C/v352RFTH8hqtdpZqkY74KsCDGzcd6x -SQxxfM5efIzoVPb2crEX0MZRAoGAQ2EqFSfL9flo7UQ8GRN0itJ7mUgJV2WxCZT5 -2X9i/y0eSN1feuKOhjfsTPMNLEWk5kwy48GuBs6xpj8Qa10zGUgVHp4bzdeEgAfK -9DhDSLt1694YZBKkAUpRERj8xXAC6nvWFLZAwjhhbRw7gAqMywgMt/q4i85usYRD -F0ESE/kCgYBbc083PcLmlHbkn/d1i4IcLI6wFk+tZYIEVYDid7xDOgZOBcOTTyYB -BrDzNqbKNexKRt7QHVlwR+VOGMdN5P0hf7oH3SMW23OxBKoQe8pUSGF9a4DjCS1v -vCXMekifb9kIhhUWaG71L8+MaOzNBVAmk1+3NzPZgV/YxHjAWWhGHQ== ------END RSA PRIVATE KEY----- diff --git a/test/netlib/data/verificationcerts/trusted-root.crt b/test/netlib/data/verificationcerts/trusted-root.crt deleted file mode 100644 index b22e4d20..00000000 --- a/test/netlib/data/verificationcerts/trusted-root.crt +++ /dev/null @@ -1,21 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDXTCCAkWgAwIBAgIJAPAfPQGCV/Z4MA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV -BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX -aWRnaXRzIFB0eSBMdGQwHhcNMTUxMTAxMTY0ODAxWhcNMTgwODIxMTY0ODAxWjBF -MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50 -ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEArp8LD34JhKCwcQbwIYQMg4+eCgLVN8fwB7+/qOfJbArPs0djFBN+F7c6 -HGvMr24BKUk5u8pn4dPtNurm/vPC8ovNGmcXz62BQJpcMX2veVdRsF7yNwhNacNJ -Arq+70zNMwYBznx0XUxMF6j6nVFf3AW6SU04ylT4Mp3SY/BUUDAdfl1eRo0mPLNS -8rpsN+8YBw1Q7SCuBRVqpOgVIsL88svgQUSOlzvMZPBpG/cmB3BNKNrltwb5iFEI -1jAV7uSj5IcIuNO/246kfsDVPTFMJIzav/CUoidd5UNw+SoFDlzh8sA7L1Bm7D1/ -3KHYSKswGsSR3kynAl10w/SJKDtn8wIDAQABo1AwTjAdBgNVHQ4EFgQUgOcrtxBX -LxbpnOT65d+vpfyWUkgwHwYDVR0jBBgwFoAUgOcrtxBXLxbpnOT65d+vpfyWUkgw -DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAEE9bFmUCA+6cvESKPoi2 -TGSpV652d0xd2U66LpEXeiWRJFLz8YGgoJCx3QFGBscJDXxrLxrBBBV/tCpEqypo -pYIqsawH7M66jpOr83Us3M8JC2eFBZJocMpXxdytWqHik5VKZNx6VQFT8bS7+yVC -VoUKePhlgcg+pmo41qjqieBNKRMh/1tXS77DI1lgO5wZLVrLXcdqWuDpmaQOKJeq -G/nxytCW/YJA7bFn/8Gjy8DYypJSeeaKu7o3P3+ONJHdIMHb+MdcheDBS9AOFSeo -xI0D5EbO9F873O77l7nbD7B0X34HFN0nGczC4poexIpbDFG3hAPekwZ5KC6VwJLc -1Q== ------END CERTIFICATE----- diff --git a/test/netlib/data/verificationcerts/trusted-root.key b/test/netlib/data/verificationcerts/trusted-root.key deleted file mode 100644 index 05483f77..00000000 --- a/test/netlib/data/verificationcerts/trusted-root.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEArp8LD34JhKCwcQbwIYQMg4+eCgLVN8fwB7+/qOfJbArPs0dj -FBN+F7c6HGvMr24BKUk5u8pn4dPtNurm/vPC8ovNGmcXz62BQJpcMX2veVdRsF7y -NwhNacNJArq+70zNMwYBznx0XUxMF6j6nVFf3AW6SU04ylT4Mp3SY/BUUDAdfl1e -Ro0mPLNS8rpsN+8YBw1Q7SCuBRVqpOgVIsL88svgQUSOlzvMZPBpG/cmB3BNKNrl -twb5iFEI1jAV7uSj5IcIuNO/246kfsDVPTFMJIzav/CUoidd5UNw+SoFDlzh8sA7 -L1Bm7D1/3KHYSKswGsSR3kynAl10w/SJKDtn8wIDAQABAoIBAFgMzjDzpqz/sbhs -fS0JPp4gDtqRbx3/bSMbJvNuXPxjvzNxLZ5z7cLbmyu1l7Jlz6QXzkrI1vTiPdzR -OcUY+RYANF252iHYJTKEIzS5YX/X7dL3LT9eqlpIJEqCC8Dygw3VW5fY3Xwl+sB7 -blNhMuro4HQRwi8UBUrQlcPa7Ui5BBi323Q6en+VjYctkqpJHzNKPSqPTbsdLaK+ -B0XuXxFatM09rmeRKZCL71Lk1T8N/l0hqEzej7zxgVD7vG/x1kMFN4T3yCmXCbPa -izGHYr1EBHglm4qMNWveXCZiVJ+wmwCjdjqvggyHiZFXE2N0OCrWPhxQPdqFf5y7 -bUO9U2ECgYEA6GM1UzRnbVpjb20ezFy7dU7rlWM0nHBfG27M3bcXh4HnPpnvKp0/ -8a1WFi4kkRywrNXx8hFEd43vTbdObLpVXScXRKiY3MHmFk4k4hbWuTpmumCubQZO -AWlX6TE0HRKn1wQahgpQcxcWaDN2xJJmRQ1zVmlnNkT48/4kFgRxyykCgYEAwF08 -ngrF35oYoU/x+KKq2NXGeNUzoZMj568dE1oWW0ZFpqCi+DGT+hAbG3yUOBSaPqy9 -zn1obGo0YRlrayvtebz118kG7a/rzY02VcAPlT/GpEhvkZlXTwEK17zRJc1nJrfP -39QAZWZsaOru9NRIg/8HcdG3JPR2MhRD/De9GbsCgYAaiZnBUq6s8jGAu/lUZRKT -JtwIRzfu1XZG77Q9bXcmZlM99t41A5gVxTGbftF2MMyMMDJc7lPfQzocqd4u1GiD -Jr+le4tZSls4GNxlZS5IIL8ycW/5y0qFJr5/RrsoxsSb7UAKJothWTWZ2Karc/xx -zkNpjsfWjrHPSypbyU4lYQKBgFh1R5/BgnatjO/5LGNSok/uFkOQfxqo6BTtYOh6 -P9efO/5A1lBdtBeE+oIsSphzWO7DTtE6uB9Kw2V3Y/83hw+5RjABoG8Cu+OdMURD -eqb+WeFH8g45Pn31E8Bbcq34g5u5YR0jhz8Z13ZzuojZabNRPmIntxmGVSf4S78a -/plrAoGBANMHNng2lyr03nqnHrOM6NXD+60af0YR/YJ+2d/H40RnXxGJ4DXn7F00 -a4vJFPa97uq+xpd0HE+TE+NIrOdVDXPePD2qzBzMTsctGtj30vLzojMOT+Yf/nvO -WxTL5Q8GruJz2Dn0awSZO2z/3A8S1rmpuVZ/jT5NtRrvOSY6hmxF ------END RSA PRIVATE KEY----- diff --git a/test/netlib/data/verificationcerts/trusted-root.srl b/test/netlib/data/verificationcerts/trusted-root.srl deleted file mode 100644 index 4ad962ba..00000000 --- a/test/netlib/data/verificationcerts/trusted-root.srl +++ /dev/null @@ -1 +0,0 @@ -A3E83F6857295BF2 diff --git a/test/netlib/http/__init__.py b/test/netlib/http/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/test/netlib/http/http1/__init__.py b/test/netlib/http/http1/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/test/netlib/http/http1/test_assemble.py b/test/netlib/http/http1/test_assemble.py deleted file mode 100644 index dac5fdad..00000000 --- a/test/netlib/http/http1/test_assemble.py +++ /dev/null @@ -1,101 +0,0 @@ -from mitmproxy import exceptions -from netlib.http import Headers -from netlib.http.http1.assemble import ( - assemble_request, assemble_request_head, assemble_response, - assemble_response_head, _assemble_request_line, _assemble_request_headers, - _assemble_response_headers, - assemble_body) -from mitmproxy.test.tutils import treq, raises, tresp - - -def test_assemble_request(): - assert assemble_request(treq()) == ( - b"GET /path HTTP/1.1\r\n" - b"header: qvalue\r\n" - b"content-length: 7\r\n" - b"host: address:22\r\n" - b"\r\n" - b"content" - ) - - with raises(exceptions.HttpException): - assemble_request(treq(content=None)) - - -def test_assemble_request_head(): - c = assemble_request_head(treq(content=b"foo")) - assert b"GET" in c - assert b"qvalue" in c - assert b"content-length" in c - assert b"foo" not in c - - -def test_assemble_response(): - assert assemble_response(tresp()) == ( - b"HTTP/1.1 200 OK\r\n" - b"header-response: svalue\r\n" - b"content-length: 7\r\n" - b"\r\n" - b"message" - ) - - with raises(exceptions.HttpException): - assemble_response(tresp(content=None)) - - -def test_assemble_response_head(): - c = assemble_response_head(tresp()) - assert b"200" in c - assert b"svalue" in c - assert b"message" not in c - - -def test_assemble_body(): - c = list(assemble_body(Headers(), [b"body"])) - assert c == [b"body"] - - c = list(assemble_body(Headers(transfer_encoding="chunked"), [b"123456789a", b""])) - assert c == [b"a\r\n123456789a\r\n", b"0\r\n\r\n"] - - c = list(assemble_body(Headers(transfer_encoding="chunked"), [b"123456789a"])) - assert c == [b"a\r\n123456789a\r\n", b"0\r\n\r\n"] - - -def test_assemble_request_line(): - assert _assemble_request_line(treq().data) == b"GET /path HTTP/1.1" - - authority_request = treq(method=b"CONNECT", first_line_format="authority").data - assert _assemble_request_line(authority_request) == b"CONNECT address:22 HTTP/1.1" - - absolute_request = treq(first_line_format="absolute").data - assert _assemble_request_line(absolute_request) == b"GET http://address:22/path HTTP/1.1" - - with raises(RuntimeError): - _assemble_request_line(treq(first_line_format="invalid_form").data) - - -def test_assemble_request_headers(): - # https://github.com/mitmproxy/mitmproxy/issues/186 - r = treq(content=b"") - r.headers["Transfer-Encoding"] = "chunked" - c = _assemble_request_headers(r.data) - assert b"Transfer-Encoding" in c - - -def test_assemble_request_headers_host_header(): - r = treq() - r.headers = Headers() - c = _assemble_request_headers(r.data) - assert b"host" in c - - r.host = None - c = _assemble_request_headers(r.data) - assert b"host" not in c - - -def test_assemble_response_headers(): - # https://github.com/mitmproxy/mitmproxy/issues/186 - r = tresp(content=b"") - r.headers["Transfer-Encoding"] = "chunked" - c = _assemble_response_headers(r) - assert b"Transfer-Encoding" in c diff --git a/test/netlib/http/http1/test_read.py b/test/netlib/http/http1/test_read.py deleted file mode 100644 index eb96968c..00000000 --- a/test/netlib/http/http1/test_read.py +++ /dev/null @@ -1,371 +0,0 @@ -from io import BytesIO -from mock import Mock -import pytest - -from mitmproxy import exceptions -from netlib.http import Headers -from netlib.http.http1.read import ( - read_request, read_response, read_request_head, - read_response_head, read_body, connection_close, expected_http_body_size, _get_first_line, - _read_request_line, _parse_authority_form, _read_response_line, _check_http_version, - _read_headers, _read_chunked, get_header_tokens -) -from mitmproxy.test.tutils import treq, tresp, raises - - -def test_get_header_tokens(): - headers = Headers() - assert get_header_tokens(headers, "foo") == [] - headers["foo"] = "bar" - assert get_header_tokens(headers, "foo") == ["bar"] - headers["foo"] = "bar, voing" - assert get_header_tokens(headers, "foo") == ["bar", "voing"] - headers.set_all("foo", ["bar, voing", "oink"]) - assert get_header_tokens(headers, "foo") == ["bar", "voing", "oink"] - - -@pytest.mark.parametrize("input", [ - b"GET / HTTP/1.1\r\n\r\nskip", - b"GET / HTTP/1.1\r\n\r\nskip", - b"GET / HTTP/1.1\r\n\r\nskip", - b"GET / HTTP/1.1 \r\n\r\nskip", -]) -def test_read_request(input): - rfile = BytesIO(input) - r = read_request(rfile) - assert r.method == "GET" - assert r.content == b"" - assert r.http_version == "HTTP/1.1" - assert r.timestamp_end - assert rfile.read() == b"skip" - - -@pytest.mark.parametrize("input", [ - b"CONNECT :0 0", -]) -def test_read_request_error(input): - rfile = BytesIO(input) - raises(exceptions.HttpException, read_request, rfile) - - -def test_read_request_head(): - rfile = BytesIO( - b"GET / HTTP/1.1\r\n" - b"Content-Length: 4\r\n" - b"\r\n" - b"skip" - ) - rfile.reset_timestamps = Mock() - rfile.first_byte_timestamp = 42 - r = read_request_head(rfile) - assert r.method == "GET" - assert r.headers["Content-Length"] == "4" - assert r.content is None - assert rfile.reset_timestamps.called - assert r.timestamp_start == 42 - assert rfile.read() == b"skip" - - -@pytest.mark.parametrize("input", [ - b"HTTP/1.1 418 I'm a teapot\r\n\r\nbody", - b"HTTP/1.1 418 I'm a teapot\r\n\r\nbody", - b"HTTP/1.1 418 I'm a teapot\r\n\r\nbody", - b"HTTP/1.1 418 I'm a teapot \r\n\r\nbody", -]) -def test_read_response(input): - req = treq() - rfile = BytesIO(input) - r = read_response(rfile, req) - assert r.http_version == "HTTP/1.1" - assert r.status_code == 418 - assert r.reason == "I'm a teapot" - assert r.content == b"body" - assert r.timestamp_end - - -def test_read_response_head(): - rfile = BytesIO( - b"HTTP/1.1 418 I'm a teapot\r\n" - b"Content-Length: 4\r\n" - b"\r\n" - b"skip" - ) - rfile.reset_timestamps = Mock() - rfile.first_byte_timestamp = 42 - r = read_response_head(rfile) - assert r.status_code == 418 - assert r.headers["Content-Length"] == "4" - assert r.content is None - assert rfile.reset_timestamps.called - assert r.timestamp_start == 42 - assert rfile.read() == b"skip" - - -class TestReadBody: - def test_chunked(self): - rfile = BytesIO(b"3\r\nfoo\r\n0\r\n\r\nbar") - body = b"".join(read_body(rfile, None)) - assert body == b"foo" - assert rfile.read() == b"bar" - - def test_known_size(self): - rfile = BytesIO(b"foobar") - body = b"".join(read_body(rfile, 3)) - assert body == b"foo" - assert rfile.read() == b"bar" - - def test_known_size_limit(self): - rfile = BytesIO(b"foobar") - with raises(exceptions.HttpException): - b"".join(read_body(rfile, 3, 2)) - - def test_known_size_too_short(self): - rfile = BytesIO(b"foo") - with raises(exceptions.HttpException): - b"".join(read_body(rfile, 6)) - - def test_unknown_size(self): - rfile = BytesIO(b"foobar") - body = b"".join(read_body(rfile, -1)) - assert body == b"foobar" - - def test_unknown_size_limit(self): - rfile = BytesIO(b"foobar") - with raises(exceptions.HttpException): - b"".join(read_body(rfile, -1, 3)) - - def test_max_chunk_size(self): - rfile = BytesIO(b"123456") - assert list(read_body(rfile, -1, max_chunk_size=None)) == [b"123456"] - rfile = BytesIO(b"123456") - assert list(read_body(rfile, -1, max_chunk_size=1)) == [b"1", b"2", b"3", b"4", b"5", b"6"] - - -def test_connection_close(): - headers = Headers() - assert connection_close(b"HTTP/1.0", headers) - assert not connection_close(b"HTTP/1.1", headers) - - headers["connection"] = "keep-alive" - assert not connection_close(b"HTTP/1.1", headers) - - headers["connection"] = "close" - assert connection_close(b"HTTP/1.1", headers) - - headers["connection"] = "foobar" - assert connection_close(b"HTTP/1.0", headers) - assert not connection_close(b"HTTP/1.1", headers) - - -def test_expected_http_body_size(): - # Expect: 100-continue - assert expected_http_body_size( - treq(headers=Headers(expect="100-continue", content_length="42")) - ) == 0 - - # http://tools.ietf.org/html/rfc7230#section-3.3 - assert expected_http_body_size( - treq(method=b"HEAD"), - tresp(headers=Headers(content_length="42")) - ) == 0 - assert expected_http_body_size( - treq(method=b"CONNECT"), - tresp() - ) == 0 - for code in (100, 204, 304): - assert expected_http_body_size( - treq(), - tresp(status_code=code) - ) == 0 - - # chunked - assert expected_http_body_size( - treq(headers=Headers(transfer_encoding="chunked")), - ) is None - - # explicit length - for val in (b"foo", b"-7"): - with raises(exceptions.HttpSyntaxException): - expected_http_body_size( - treq(headers=Headers(content_length=val)) - ) - assert expected_http_body_size( - treq(headers=Headers(content_length="42")) - ) == 42 - - # no length - assert expected_http_body_size( - treq(headers=Headers()) - ) == 0 - assert expected_http_body_size( - treq(headers=Headers()), tresp(headers=Headers()) - ) == -1 - - -def test_get_first_line(): - rfile = BytesIO(b"foo\r\nbar") - assert _get_first_line(rfile) == b"foo" - - rfile = BytesIO(b"\r\nfoo\r\nbar") - assert _get_first_line(rfile) == b"foo" - - with raises(exceptions.HttpReadDisconnect): - rfile = BytesIO(b"") - _get_first_line(rfile) - - with raises(exceptions.HttpReadDisconnect): - rfile = Mock() - rfile.readline.side_effect = exceptions.TcpDisconnect - _get_first_line(rfile) - - -def test_read_request_line(): - def t(b): - return _read_request_line(BytesIO(b)) - - assert (t(b"GET / HTTP/1.1") == - ("relative", b"GET", None, None, None, b"/", b"HTTP/1.1")) - assert (t(b"OPTIONS * HTTP/1.1") == - ("relative", b"OPTIONS", None, None, None, b"*", b"HTTP/1.1")) - assert (t(b"CONNECT foo:42 HTTP/1.1") == - ("authority", b"CONNECT", None, b"foo", 42, None, b"HTTP/1.1")) - assert (t(b"GET http://foo:42/bar HTTP/1.1") == - ("absolute", b"GET", b"http", b"foo", 42, b"/bar", b"HTTP/1.1")) - - with raises(exceptions.HttpSyntaxException): - t(b"GET / WTF/1.1") - with raises(exceptions.HttpSyntaxException): - t(b"this is not http") - with raises(exceptions.HttpReadDisconnect): - t(b"") - - -def test_parse_authority_form(): - assert _parse_authority_form(b"foo:42") == (b"foo", 42) - with raises(exceptions.HttpSyntaxException): - _parse_authority_form(b"foo") - with raises(exceptions.HttpSyntaxException): - _parse_authority_form(b"foo:bar") - with raises(exceptions.HttpSyntaxException): - _parse_authority_form(b"foo:99999999") - with raises(exceptions.HttpSyntaxException): - _parse_authority_form(b"f\x00oo:80") - - -def test_read_response_line(): - def t(b): - return _read_response_line(BytesIO(b)) - - assert t(b"HTTP/1.1 200 OK") == (b"HTTP/1.1", 200, b"OK") - assert t(b"HTTP/1.1 200") == (b"HTTP/1.1", 200, b"") - - # https://github.com/mitmproxy/mitmproxy/issues/784 - assert t(b"HTTP/1.1 200 Non-Autoris\xc3\xa9") == (b"HTTP/1.1", 200, b"Non-Autoris\xc3\xa9") - - with raises(exceptions.HttpSyntaxException): - assert t(b"HTTP/1.1") - - with raises(exceptions.HttpSyntaxException): - t(b"HTTP/1.1 OK OK") - with raises(exceptions.HttpSyntaxException): - t(b"WTF/1.1 200 OK") - with raises(exceptions.HttpReadDisconnect): - t(b"") - - -def test_check_http_version(): - _check_http_version(b"HTTP/0.9") - _check_http_version(b"HTTP/1.0") - _check_http_version(b"HTTP/1.1") - _check_http_version(b"HTTP/2.0") - with raises(exceptions.HttpSyntaxException): - _check_http_version(b"WTF/1.0") - with raises(exceptions.HttpSyntaxException): - _check_http_version(b"HTTP/1.10") - with raises(exceptions.HttpSyntaxException): - _check_http_version(b"HTTP/1.b") - - -class TestReadHeaders: - @staticmethod - def _read(data): - return _read_headers(BytesIO(data)) - - def test_read_simple(self): - data = ( - b"Header: one\r\n" - b"Header2: two\r\n" - b"\r\n" - ) - headers = self._read(data) - assert headers.fields == ((b"Header", b"one"), (b"Header2", b"two")) - - def test_read_multi(self): - data = ( - b"Header: one\r\n" - b"Header: two\r\n" - b"\r\n" - ) - headers = self._read(data) - assert headers.fields == ((b"Header", b"one"), (b"Header", b"two")) - - def test_read_continued(self): - data = ( - b"Header: one\r\n" - b"\ttwo\r\n" - b"Header2: three\r\n" - b"\r\n" - ) - headers = self._read(data) - assert headers.fields == ((b"Header", b"one\r\n two"), (b"Header2", b"three")) - - def test_read_continued_err(self): - data = b"\tfoo: bar\r\n" - with raises(exceptions.HttpSyntaxException): - self._read(data) - - def test_read_err(self): - data = b"foo" - with raises(exceptions.HttpSyntaxException): - self._read(data) - - def test_read_empty_name(self): - data = b":foo" - with raises(exceptions.HttpSyntaxException): - self._read(data) - - def test_read_empty_value(self): - data = b"bar:" - headers = self._read(data) - assert headers.fields == ((b"bar", b""),) - - -def test_read_chunked(): - req = treq(content=None) - req.headers["Transfer-Encoding"] = "chunked" - - data = b"1\r\na\r\n0\r\n" - with raises(exceptions.HttpSyntaxException): - b"".join(_read_chunked(BytesIO(data))) - - data = b"1\r\na\r\n0\r\n\r\n" - assert b"".join(_read_chunked(BytesIO(data))) == b"a" - - data = b"\r\n\r\n1\r\na\r\n1\r\nb\r\n0\r\n\r\n" - assert b"".join(_read_chunked(BytesIO(data))) == b"ab" - - data = b"\r\n" - with raises("closed prematurely"): - b"".join(_read_chunked(BytesIO(data))) - - data = b"1\r\nfoo" - with raises("malformed chunked body"): - b"".join(_read_chunked(BytesIO(data))) - - data = b"foo\r\nfoo" - with raises(exceptions.HttpSyntaxException): - b"".join(_read_chunked(BytesIO(data))) - - data = b"5\r\naaaaa\r\n0\r\n\r\n" - with raises("too large"): - b"".join(_read_chunked(BytesIO(data), limit=2)) diff --git a/test/netlib/http/http2/__init__.py b/test/netlib/http/http2/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/test/netlib/http/http2/test_framereader.py b/test/netlib/http/http2/test_framereader.py deleted file mode 100644 index 41b73189..00000000 --- a/test/netlib/http/http2/test_framereader.py +++ /dev/null @@ -1 +0,0 @@ -# foobar diff --git a/test/netlib/http/test_authentication.py b/test/netlib/http/test_authentication.py deleted file mode 100644 index 5e04bbc5..00000000 --- a/test/netlib/http/test_authentication.py +++ /dev/null @@ -1,122 +0,0 @@ -import binascii - -from mitmproxy.test import tutils -from netlib.http import authentication, Headers - - -def test_parse_http_basic_auth(): - vals = ("basic", "foo", "bar") - assert authentication.parse_http_basic_auth( - authentication.assemble_http_basic_auth(*vals) - ) == vals - assert not authentication.parse_http_basic_auth("") - assert not authentication.parse_http_basic_auth("foo bar") - v = "basic " + binascii.b2a_base64(b"foo").decode("ascii") - assert not authentication.parse_http_basic_auth(v) - - -class TestPassManNonAnon: - - def test_simple(self): - p = authentication.PassManNonAnon() - assert not p.test("", "") - assert p.test("user", "") - - -class TestPassManHtpasswd: - - def test_file_errors(self): - tutils.raises( - "malformed htpasswd file", - authentication.PassManHtpasswd, - tutils.test_data.path("data/server.crt")) - - def test_simple(self): - pm = authentication.PassManHtpasswd(tutils.test_data.path("data/htpasswd")) - - vals = ("basic", "test", "test") - authentication.assemble_http_basic_auth(*vals) - assert pm.test("test", "test") - assert not pm.test("test", "foo") - assert not pm.test("foo", "test") - assert not pm.test("test", "") - assert not pm.test("", "") - - -class TestPassManSingleUser: - - def test_simple(self): - pm = authentication.PassManSingleUser("test", "test") - assert pm.test("test", "test") - assert not pm.test("test", "foo") - assert not pm.test("foo", "test") - - -class TestNullProxyAuth: - - def test_simple(self): - na = authentication.NullProxyAuth(authentication.PassManNonAnon()) - assert not na.auth_challenge_headers() - assert na.authenticate("foo") - na.clean({}) - - -class TestBasicProxyAuth: - - def test_simple(self): - ba = authentication.BasicProxyAuth(authentication.PassManNonAnon(), "test") - headers = Headers() - assert ba.auth_challenge_headers() - assert not ba.authenticate(headers) - - def test_authenticate_clean(self): - ba = authentication.BasicProxyAuth(authentication.PassManNonAnon(), "test") - - headers = Headers() - vals = ("basic", "foo", "bar") - headers[ba.AUTH_HEADER] = authentication.assemble_http_basic_auth(*vals) - assert ba.authenticate(headers) - - ba.clean(headers) - assert ba.AUTH_HEADER not in headers - - headers[ba.AUTH_HEADER] = "" - assert not ba.authenticate(headers) - - headers[ba.AUTH_HEADER] = "foo" - assert not ba.authenticate(headers) - - vals = ("foo", "foo", "bar") - headers[ba.AUTH_HEADER] = authentication.assemble_http_basic_auth(*vals) - assert not ba.authenticate(headers) - - ba = authentication.BasicProxyAuth(authentication.PassMan(), "test") - vals = ("basic", "foo", "bar") - headers[ba.AUTH_HEADER] = authentication.assemble_http_basic_auth(*vals) - assert not ba.authenticate(headers) - - -class Bunch: - pass - - -class TestAuthAction: - - def test_nonanonymous(self): - m = Bunch() - aa = authentication.NonanonymousAuthAction(None, "authenticator") - aa(None, m, None, None) - assert m.authenticator - - def test_singleuser(self): - m = Bunch() - aa = authentication.SingleuserAuthAction(None, "authenticator") - aa(None, m, "foo:bar", None) - assert m.authenticator - tutils.raises("invalid", aa, None, m, "foo", None) - - def test_httppasswd(self): - m = Bunch() - aa = authentication.HtpasswdAuthAction(None, "authenticator") - aa(None, m, tutils.test_data.path("data/htpasswd"), None) - assert m.authenticator diff --git a/test/netlib/http/test_cookies.py b/test/netlib/http/test_cookies.py deleted file mode 100644 index ca10a69c..00000000 --- a/test/netlib/http/test_cookies.py +++ /dev/null @@ -1,365 +0,0 @@ -import time - -from netlib.http import cookies -from mitmproxy.test.tutils import raises - -import mock - -cookie_pairs = [ - [ - "", - [] - ], - [ - "one=uno", - [["one", "uno"]] - ], - [ - "one", - [["one", None]] - ], - [ - "one=uno; two=due", - [["one", "uno"], ["two", "due"]] - ], - [ - 'one="uno"; two="\due"', - [["one", "uno"], ["two", "due"]] - ], - [ - 'one="un\\"o"', - [["one", 'un"o']] - ], - [ - 'one="uno,due"', - [["one", 'uno,due']] - ], - [ - "one=uno; two; three=tre", - [["one", "uno"], ["two", None], ["three", "tre"]] - ], - [ - "_lvs2=zHai1+Hq+Tc2vmc2r4GAbdOI5Jopg3EwsdUT9g=; " - "_rcc2=53VdltWl+Ov6ordflA==;", - [ - ["_lvs2", "zHai1+Hq+Tc2vmc2r4GAbdOI5Jopg3EwsdUT9g="], - ["_rcc2", "53VdltWl+Ov6ordflA=="] - ] - ] -] - - -def test_read_key(): - tokens = [ - [("foo", 0), ("foo", 3)], - [("foo", 1), ("oo", 3)], - [(" foo", 0), (" foo", 4)], - [(" foo", 1), ("foo", 4)], - [(" foo;", 1), ("foo", 4)], - [(" foo=", 1), ("foo", 4)], - [(" foo=bar", 1), ("foo", 4)], - ] - for q, a in tokens: - assert cookies._read_key(*q) == a - - -def test_read_quoted_string(): - tokens = [ - [('"foo" x', 0), ("foo", 5)], - [('"f\oo" x', 0), ("foo", 6)], - [(r'"f\\o" x', 0), (r"f\o", 6)], - [(r'"f\\" x', 0), (r"f" + '\\', 5)], - [('"fo\\\"" x', 0), ("fo\"", 6)], - [('"foo" x', 7), ("", 8)], - ] - for q, a in tokens: - assert cookies._read_quoted_string(*q) == a - - -def test_read_cookie_pairs(): - vals = [ - [ - "one", - [["one", None]] - ], - [ - "one=two", - [["one", "two"]] - ], - [ - "one=", - [["one", ""]] - ], - [ - 'one="two"', - [["one", "two"]] - ], - [ - 'one="two"; three=four', - [["one", "two"], ["three", "four"]] - ], - [ - 'one="two"; three=four; five', - [["one", "two"], ["three", "four"], ["five", None]] - ], - [ - 'one="\\"two"; three=four', - [["one", '"two'], ["three", "four"]] - ], - ] - for s, lst in vals: - ret, off = cookies._read_cookie_pairs(s) - assert ret == lst - - -def test_pairs_roundtrips(): - for s, expected in cookie_pairs: - ret, off = cookies._read_cookie_pairs(s) - assert ret == expected - - s2 = cookies._format_pairs(expected) - ret, off = cookies._read_cookie_pairs(s2) - assert ret == expected - - -def test_cookie_roundtrips(): - for s, expected in cookie_pairs: - ret = cookies.parse_cookie_header(s) - assert ret == expected - - s2 = cookies.format_cookie_header(expected) - ret = cookies.parse_cookie_header(s2) - assert ret == expected - - -def test_parse_set_cookie_pairs(): - pairs = [ - [ - "one=uno", - [[ - ["one", "uno"] - ]] - ], - [ - "one=un\x20", - [[ - ["one", "un\x20"] - ]] - ], - [ - "one=uno; foo", - [[ - ["one", "uno"], - ["foo", None] - ]] - ], - [ - "mun=1.390.f60; " - "expires=sun, 11-oct-2015 12:38:31 gmt; path=/; " - "domain=b.aol.com", - [[ - ["mun", "1.390.f60"], - ["expires", "sun, 11-oct-2015 12:38:31 gmt"], - ["path", "/"], - ["domain", "b.aol.com"] - ]] - ], - [ - r'rpb=190%3d1%2616726%3d1%2634832%3d1%2634874%3d1; ' - 'domain=.rubiconproject.com; ' - 'expires=mon, 11-may-2015 21:54:57 gmt; ' - 'path=/', - [[ - ['rpb', r'190%3d1%2616726%3d1%2634832%3d1%2634874%3d1'], - ['domain', '.rubiconproject.com'], - ['expires', 'mon, 11-may-2015 21:54:57 gmt'], - ['path', '/'] - ]] - ], - ] - for s, expected in pairs: - ret, off = cookies._read_set_cookie_pairs(s) - assert ret == expected - - s2 = cookies._format_set_cookie_pairs(expected[0]) - ret2, off = cookies._read_set_cookie_pairs(s2) - assert ret2 == expected - - -def test_parse_set_cookie_header(): - def set_cookie_equal(obs, exp): - assert obs[0] == exp[0] - assert obs[1] == exp[1] - assert obs[2].items(multi=True) == exp[2] - - vals = [ - [ - "", [] - ], - [ - ";", [] - ], - [ - "one=uno", - [ - ("one", "uno", ()) - ] - ], - [ - "one=uno; foo=bar", - [ - ("one", "uno", (("foo", "bar"),)) - ] - ], - [ - "one=uno; foo=bar; foo=baz", - [ - ("one", "uno", (("foo", "bar"), ("foo", "baz"))) - ] - ], - # Comma Separated Variant of Set-Cookie Headers - [ - "foo=bar, doo=dar", - [ - ("foo", "bar", ()), - ("doo", "dar", ()), - ] - ], - [ - "foo=bar; path=/, doo=dar; roo=rar; zoo=zar", - [ - ("foo", "bar", (("path", "/"),)), - ("doo", "dar", (("roo", "rar"), ("zoo", "zar"))), - ] - ], - [ - "foo=bar; expires=Mon, 24 Aug 2037", - [ - ("foo", "bar", (("expires", "Mon, 24 Aug 2037"),)), - ] - ], - [ - "foo=bar; expires=Mon, 24 Aug 2037 00:00:00 GMT, doo=dar", - [ - ("foo", "bar", (("expires", "Mon, 24 Aug 2037 00:00:00 GMT"),)), - ("doo", "dar", ()), - ] - ], - ] - for s, expected in vals: - ret = cookies.parse_set_cookie_header(s) - if expected: - for i in range(len(expected)): - set_cookie_equal(ret[i], expected[i]) - - s2 = cookies.format_set_cookie_header(ret) - ret2 = cookies.parse_set_cookie_header(s2) - for i in range(len(expected)): - set_cookie_equal(ret2[i], expected[i]) - else: - assert not ret - - -def test_refresh_cookie(): - - # Invalid expires format, sent to us by Reddit. - c = "rfoo=bar; Domain=reddit.com; expires=Thu, 31 Dec 2037 23:59:59 GMT; Path=/" - assert cookies.refresh_set_cookie_header(c, 60) - - c = "MOO=BAR; Expires=Tue, 08-Mar-2011 00:20:38 GMT; Path=foo.com; Secure" - assert "00:21:38" in cookies.refresh_set_cookie_header(c, 60) - - c = "foo,bar" - with raises(ValueError): - cookies.refresh_set_cookie_header(c, 60) - - # https://github.com/mitmproxy/mitmproxy/issues/773 - c = ">=A" - assert cookies.refresh_set_cookie_header(c, 60) - - # https://github.com/mitmproxy/mitmproxy/issues/1118 - c = "foo:bar=bla" - assert cookies.refresh_set_cookie_header(c, 0) - c = "foo/bar=bla" - assert cookies.refresh_set_cookie_header(c, 0) - - -@mock.patch('time.time') -def test_get_expiration_ts(*args): - # Freeze time - now_ts = 17 - time.time.return_value = now_ts - - CA = cookies.CookieAttrs - F = cookies.get_expiration_ts - - assert F(CA([("Expires", "Thu, 01-Jan-1970 00:00:00 GMT")])) == 0 - assert F(CA([("Expires", "Mon, 24-Aug-2037 00:00:00 GMT")])) == 2134684800 - - assert F(CA([("Max-Age", "0")])) == now_ts - assert F(CA([("Max-Age", "31")])) == now_ts + 31 - - -def test_is_expired(): - CA = cookies.CookieAttrs - - # A cookie can be expired - # by setting the expire time in the past - assert cookies.is_expired(CA([("Expires", "Thu, 01-Jan-1970 00:00:00 GMT")])) - - # or by setting Max-Age to 0 - assert cookies.is_expired(CA([("Max-Age", "0")])) - - # or both - assert cookies.is_expired(CA([("Expires", "Thu, 01-Jan-1970 00:00:00 GMT"), ("Max-Age", "0")])) - - assert not cookies.is_expired(CA([("Expires", "Mon, 24-Aug-2037 00:00:00 GMT")])) - assert not cookies.is_expired(CA([("Max-Age", "1")])) - assert not cookies.is_expired(CA([("Expires", "Wed, 15-Jul-2037 00:00:00 GMT"), ("Max-Age", "1")])) - - assert not cookies.is_expired(CA([("Max-Age", "nan")])) - assert not cookies.is_expired(CA([("Expires", "false")])) - - -def test_group_cookies(): - CA = cookies.CookieAttrs - groups = [ - [ - "one=uno; foo=bar; foo=baz", - [ - ('one', 'uno', CA([])), - ('foo', 'bar', CA([])), - ('foo', 'baz', CA([])) - ] - ], - [ - "one=uno; Path=/; foo=bar; Max-Age=0; foo=baz; expires=24-08-1993", - [ - ('one', 'uno', CA([('Path', '/')])), - ('foo', 'bar', CA([('Max-Age', '0')])), - ('foo', 'baz', CA([('expires', '24-08-1993')])) - ] - ], - [ - "one=uno;", - [ - ('one', 'uno', CA([])) - ] - ], - [ - "one=uno; Path=/; Max-Age=0; Expires=24-08-1993", - [ - ('one', 'uno', CA([('Path', '/'), ('Max-Age', '0'), ('Expires', '24-08-1993')])) - ] - ], - [ - "path=val; Path=/", - [ - ('path', 'val', CA([('Path', '/')])) - ] - ] - ] - - for c, expected in groups: - observed = cookies.group_cookies(cookies.parse_cookie_header(c)) - assert observed == expected diff --git a/test/netlib/http/test_encoding.py b/test/netlib/http/test_encoding.py deleted file mode 100644 index 89600709..00000000 --- a/test/netlib/http/test_encoding.py +++ /dev/null @@ -1,73 +0,0 @@ -import mock -import pytest - -from netlib.http import encoding -from mitmproxy.test import tutils - - -@pytest.mark.parametrize("encoder", [ - 'identity', - 'none', -]) -def test_identity(encoder): - assert b"string" == encoding.decode(b"string", encoder) - assert b"string" == encoding.encode(b"string", encoder) - with tutils.raises(ValueError): - encoding.encode(b"string", "nonexistent encoding") - - -@pytest.mark.parametrize("encoder", [ - 'gzip', - 'br', - 'deflate', -]) -def test_encoders(encoder): - assert "" == encoding.decode("", encoder) - assert b"" == encoding.decode(b"", encoder) - - assert "string" == encoding.decode( - encoding.encode( - "string", - encoder - ), - encoder - ) - assert b"string" == encoding.decode( - encoding.encode( - b"string", - encoder - ), - encoder - ) - - with tutils.raises(ValueError): - encoding.decode(b"foobar", encoder) - - -def test_cache(): - decode_gzip = mock.MagicMock() - decode_gzip.return_value = b"decoded" - encode_gzip = mock.MagicMock() - encode_gzip.return_value = b"encoded" - - with mock.patch.dict(encoding.custom_decode, gzip=decode_gzip): - with mock.patch.dict(encoding.custom_encode, gzip=encode_gzip): - assert encoding.decode(b"encoded", "gzip") == b"decoded" - assert decode_gzip.call_count == 1 - - # should be cached - assert encoding.decode(b"encoded", "gzip") == b"decoded" - assert decode_gzip.call_count == 1 - - # the other way around as well - assert encoding.encode(b"decoded", "gzip") == b"encoded" - assert encode_gzip.call_count == 0 - - # different encoding - decode_gzip.return_value = b"bar" - assert encoding.encode(b"decoded", "deflate") != b"decoded" - assert encode_gzip.call_count == 0 - - # This is not in the cache anymore - assert encoding.encode(b"decoded", "gzip") == b"encoded" - assert encode_gzip.call_count == 1 diff --git a/test/netlib/http/test_headers.py b/test/netlib/http/test_headers.py deleted file mode 100644 index cac77d57..00000000 --- a/test/netlib/http/test_headers.py +++ /dev/null @@ -1,106 +0,0 @@ -import collections - -from netlib.http.headers import Headers, parse_content_type, assemble_content_type -from mitmproxy.test.tutils import raises - - -class TestHeaders: - def _2host(self): - return Headers( - ( - (b"Host", b"example.com"), - (b"host", b"example.org") - ) - ) - - def test_init(self): - headers = Headers() - assert len(headers) == 0 - - headers = Headers([[b"Host", b"example.com"]]) - assert len(headers) == 1 - assert headers["Host"] == "example.com" - - headers = Headers(Host="example.com") - assert len(headers) == 1 - assert headers["Host"] == "example.com" - - headers = Headers( - [[b"Host", b"invalid"]], - Host="example.com" - ) - assert len(headers) == 1 - assert headers["Host"] == "example.com" - - headers = Headers( - [[b"Host", b"invalid"], [b"Accept", b"text/plain"]], - Host="example.com" - ) - assert len(headers) == 2 - assert headers["Host"] == "example.com" - assert headers["Accept"] == "text/plain" - - with raises(TypeError): - Headers([[b"Host", u"not-bytes"]]) - - def test_set(self): - headers = Headers() - headers[u"foo"] = u"1" - headers[b"bar"] = b"2" - headers["baz"] = b"3" - with raises(TypeError): - headers["foobar"] = 42 - assert len(headers) == 3 - - def test_bytes(self): - headers = Headers(Host="example.com") - assert bytes(headers) == b"Host: example.com\r\n" - - headers = Headers([ - [b"Host", b"example.com"], - [b"Accept", b"text/plain"] - ]) - assert bytes(headers) == b"Host: example.com\r\nAccept: text/plain\r\n" - - headers = Headers() - assert bytes(headers) == b"" - - def test_replace_simple(self): - headers = Headers(Host="example.com", Accept="text/plain") - replacements = headers.replace("Host: ", "X-Host: ") - assert replacements == 1 - assert headers["X-Host"] == "example.com" - assert "Host" not in headers - assert headers["Accept"] == "text/plain" - - def test_replace_multi(self): - headers = self._2host() - headers.replace(r"Host: example\.com", r"Host: example.de") - assert headers.get_all("Host") == ["example.de", "example.org"] - - def test_replace_remove_spacer(self): - headers = Headers(Host="example.com") - replacements = headers.replace(r"Host: ", "X-Host ") - assert replacements == 0 - assert headers["Host"] == "example.com" - - def test_replace_with_count(self): - headers = Headers(Host="foobarfoo.com", Accept="foo/bar") - replacements = headers.replace("foo", "bar", count=1) - assert replacements == 1 - - -def test_parse_content_type(): - p = parse_content_type - assert p("text/html") == ("text", "html", {}) - assert p("text") is None - - v = p("text/html; charset=UTF-8") - assert v == ('text', 'html', {'charset': 'UTF-8'}) - - -def test_assemble_content_type(): - p = assemble_content_type - assert p("text", "html", {}) == "text/html" - assert p("text", "html", {"charset": "utf8"}) == "text/html; charset=utf8" - assert p("text", "html", collections.OrderedDict([("charset", "utf8"), ("foo", "bar")])) == "text/html; charset=utf8; foo=bar" diff --git a/test/netlib/http/test_message.py b/test/netlib/http/test_message.py deleted file mode 100644 index 2bc8824f..00000000 --- a/test/netlib/http/test_message.py +++ /dev/null @@ -1,271 +0,0 @@ -# -*- coding: utf-8 -*- - -from mitmproxy.test import tutils -from netlib import http - - -def _test_passthrough_attr(message, attr): - assert getattr(message, attr) == getattr(message.data, attr) - setattr(message, attr, b"foo") - assert getattr(message.data, attr) == b"foo" - - -def _test_decoded_attr(message, attr): - assert getattr(message, attr) == getattr(message.data, attr).decode("utf8") - # Set str, get raw bytes - setattr(message, attr, "foo") - assert getattr(message.data, attr) == b"foo" - # Set raw bytes, get decoded - setattr(message.data, attr, b"BAR") # use uppercase so that we can also cover request.method - assert getattr(message, attr) == "BAR" - # Set bytes, get raw bytes - setattr(message, attr, b"baz") - assert getattr(message.data, attr) == b"baz" - - # Set UTF8 - setattr(message, attr, "Non-Autorisé") - assert getattr(message.data, attr) == b"Non-Autoris\xc3\xa9" - # Don't fail on garbage - setattr(message.data, attr, b"FOO\xBF\x00BAR") - assert getattr(message, attr).startswith("FOO") - assert getattr(message, attr).endswith("BAR") - # foo.bar = foo.bar should not cause any side effects. - d = getattr(message, attr) - setattr(message, attr, d) - assert getattr(message.data, attr) == b"FOO\xBF\x00BAR" - - -class TestMessageData: - def test_eq_ne(self): - data = tutils.tresp(timestamp_start=42, timestamp_end=42).data - same = tutils.tresp(timestamp_start=42, timestamp_end=42).data - assert data == same - assert not data != same - - other = tutils.tresp(content=b"foo").data - assert not data == other - assert data != other - - assert data != 0 - - -class TestMessage: - - def test_init(self): - resp = tutils.tresp() - assert resp.data - - def test_eq_ne(self): - resp = tutils.tresp(timestamp_start=42, timestamp_end=42) - same = tutils.tresp(timestamp_start=42, timestamp_end=42) - assert resp == same - assert not resp != same - - other = tutils.tresp(timestamp_start=0, timestamp_end=0) - assert not resp == other - assert resp != other - - assert resp != 0 - - def test_serializable(self): - resp = tutils.tresp() - resp2 = http.Response.from_state(resp.get_state()) - assert resp == resp2 - - def test_content_length_update(self): - resp = tutils.tresp() - resp.content = b"foo" - assert resp.data.content == b"foo" - assert resp.headers["content-length"] == "3" - resp.content = b"" - assert resp.data.content == b"" - assert resp.headers["content-length"] == "0" - resp.raw_content = b"bar" - assert resp.data.content == b"bar" - assert resp.headers["content-length"] == "0" - - def test_headers(self): - _test_passthrough_attr(tutils.tresp(), "headers") - - def test_timestamp_start(self): - _test_passthrough_attr(tutils.tresp(), "timestamp_start") - - def test_timestamp_end(self): - _test_passthrough_attr(tutils.tresp(), "timestamp_end") - - def test_http_version(self): - _test_decoded_attr(tutils.tresp(), "http_version") - - def test_replace(self): - r = tutils.tresp() - r.content = b"foofootoo" - r.replace(b"foo", "gg") - assert r.content == b"ggggtoo" - - r.content = b"foofootoo" - r.replace(b"foo", "gg", count=1) - assert r.content == b"ggfootoo" - - -class TestMessageContentEncoding: - def test_simple(self): - r = tutils.tresp() - assert r.raw_content == b"message" - assert "content-encoding" not in r.headers - r.encode("gzip") - - assert r.headers["content-encoding"] - assert r.raw_content != b"message" - assert r.content == b"message" - assert r.raw_content != b"message" - - def test_modify(self): - r = tutils.tresp() - assert "content-encoding" not in r.headers - r.encode("gzip") - - r.content = b"foo" - assert r.raw_content != b"foo" - r.decode() - assert r.raw_content == b"foo" - - with tutils.raises(TypeError): - r.content = u"foo" - - def test_unknown_ce(self): - r = tutils.tresp() - r.headers["content-encoding"] = "zopfli" - r.raw_content = b"foo" - with tutils.raises(ValueError): - assert r.content - assert r.headers["content-encoding"] - assert r.get_content(strict=False) == b"foo" - - def test_cannot_decode(self): - r = tutils.tresp() - r.encode("gzip") - r.raw_content = b"foo" - with tutils.raises(ValueError): - assert r.content - assert r.headers["content-encoding"] - assert r.get_content(strict=False) == b"foo" - - with tutils.raises(ValueError): - r.decode() - assert r.raw_content == b"foo" - assert "content-encoding" in r.headers - - r.decode(strict=False) - assert r.content == b"foo" - assert "content-encoding" not in r.headers - - def test_none(self): - r = tutils.tresp(content=None) - assert r.content is None - r.content = b"foo" - assert r.content is not None - r.content = None - assert r.content is None - - def test_cannot_encode(self): - r = tutils.tresp() - r.encode("gzip") - r.content = None - assert r.headers["content-encoding"] - assert r.raw_content is None - - r.headers["content-encoding"] = "zopfli" - r.content = b"foo" - assert "content-encoding" not in r.headers - assert r.raw_content == b"foo" - - with tutils.raises(ValueError): - r.encode("zopfli") - assert r.raw_content == b"foo" - assert "content-encoding" not in r.headers - - -class TestMessageText: - def test_simple(self): - r = tutils.tresp(content=b'\xfc') - assert r.raw_content == b"\xfc" - assert r.content == b"\xfc" - assert r.text == u"ü" - - r.encode("gzip") - assert r.text == u"ü" - r.decode() - assert r.text == u"ü" - - r.headers["content-type"] = "text/html; charset=latin1" - r.content = b"\xc3\xbc" - assert r.text == u"ü" - r.headers["content-type"] = "text/html; charset=utf8" - assert r.text == u"ü" - - def test_guess_json(self): - r = tutils.tresp(content=b'"\xc3\xbc"') - r.headers["content-type"] = "application/json" - assert r.text == u'"ü"' - - def test_none(self): - r = tutils.tresp(content=None) - assert r.text is None - r.text = u"foo" - assert r.text is not None - r.text = None - assert r.text is None - - def test_modify(self): - r = tutils.tresp() - - r.text = u"ü" - assert r.raw_content == b"\xfc" - - r.headers["content-type"] = "text/html; charset=utf8" - r.text = u"ü" - assert r.raw_content == b"\xc3\xbc" - assert r.headers["content-length"] == "2" - - def test_unknown_ce(self): - r = tutils.tresp() - r.headers["content-type"] = "text/html; charset=wtf" - r.raw_content = b"foo" - with tutils.raises(ValueError): - assert r.text == u"foo" - assert r.get_text(strict=False) == u"foo" - - def test_cannot_decode(self): - r = tutils.tresp() - r.headers["content-type"] = "text/html; charset=utf8" - r.raw_content = b"\xFF" - with tutils.raises(ValueError): - assert r.text - - assert r.get_text(strict=False) == '\udcff' - - def test_cannot_encode(self): - r = tutils.tresp() - r.content = None - assert "content-type" not in r.headers - assert r.raw_content is None - - r.headers["content-type"] = "text/html; charset=latin1; foo=bar" - r.text = u"☃" - assert r.headers["content-type"] == "text/html; charset=utf-8; foo=bar" - assert r.raw_content == b'\xe2\x98\x83' - - r.headers["content-type"] = "gibberish" - r.text = u"☃" - assert r.headers["content-type"] == "text/plain; charset=utf-8" - assert r.raw_content == b'\xe2\x98\x83' - - del r.headers["content-type"] - r.text = u"☃" - assert r.headers["content-type"] == "text/plain; charset=utf-8" - assert r.raw_content == b'\xe2\x98\x83' - - r.headers["content-type"] = "text/html; charset=latin1" - r.text = u'\udcff' - assert r.headers["content-type"] == "text/html; charset=utf-8" - assert r.raw_content == b"\xFF" diff --git a/test/netlib/http/test_multipart.py b/test/netlib/http/test_multipart.py deleted file mode 100644 index 1d7e0062..00000000 --- a/test/netlib/http/test_multipart.py +++ /dev/null @@ -1,24 +0,0 @@ -from netlib.http import Headers -from netlib.http import multipart - - -def test_decode(): - boundary = 'somefancyboundary' - headers = Headers( - content_type='multipart/form-data; boundary=' + boundary - ) - content = ( - "--{0}\n" - "Content-Disposition: form-data; name=\"field1\"\n\n" - "value1\n" - "--{0}\n" - "Content-Disposition: form-data; name=\"field2\"\n\n" - "value2\n" - "--{0}--".format(boundary).encode() - ) - - form = multipart.decode(headers, content) - - assert len(form) == 2 - assert form[0] == (b"field1", b"value1") - assert form[1] == (b"field2", b"value2") diff --git a/test/netlib/http/test_request.py b/test/netlib/http/test_request.py deleted file mode 100644 index ecfc1ba6..00000000 --- a/test/netlib/http/test_request.py +++ /dev/null @@ -1,271 +0,0 @@ -# -*- coding: utf-8 -*- - -from netlib.http import Headers -from mitmproxy.test.tutils import treq, raises -from .test_message import _test_decoded_attr, _test_passthrough_attr - - -class TestRequestData: - def test_init(self): - with raises(ValueError): - treq(headers="foobar") - - assert isinstance(treq(headers=()).headers, Headers) - - -class TestRequestCore: - """ - Tests for addons and the attributes that are directly proxied from the data structure - """ - def test_repr(self): - request = treq() - assert repr(request) == "Request(GET address:22/path)" - request.host = None - assert repr(request) == "Request(GET /path)" - - def replace(self): - r = treq() - r.path = b"foobarfoo" - r.replace(b"foo", "bar") - assert r.path == b"barbarbar" - - r.path = b"foobarfoo" - r.replace(b"foo", "bar", count=1) - assert r.path == b"barbarfoo" - - def test_first_line_format(self): - _test_passthrough_attr(treq(), "first_line_format") - - def test_method(self): - _test_decoded_attr(treq(), "method") - - def test_scheme(self): - _test_decoded_attr(treq(), "scheme") - - def test_port(self): - _test_passthrough_attr(treq(), "port") - - def test_path(self): - req = treq() - _test_decoded_attr(req, "path") - # path can also be None. - req.path = None - assert req.path is None - assert req.data.path is None - - def test_host(self): - request = treq() - assert request.host == request.data.host.decode("idna") - - # Test IDNA encoding - # Set str, get raw bytes - request.host = "ídna.example" - assert request.data.host == b"xn--dna-qma.example" - # Set raw bytes, get decoded - request.data.host = b"xn--idn-gla.example" - assert request.host == "idná.example" - # Set bytes, get raw bytes - request.host = b"xn--dn-qia9b.example" - assert request.data.host == b"xn--dn-qia9b.example" - # IDNA encoding is not bijective - request.host = "fußball" - assert request.host == "fussball" - - # Don't fail on garbage - request.data.host = b"foo\xFF\x00bar" - assert request.host.startswith("foo") - assert request.host.endswith("bar") - # foo.bar = foo.bar should not cause any side effects. - d = request.host - request.host = d - assert request.data.host == b"foo\xFF\x00bar" - - def test_host_header_update(self): - request = treq() - assert "host" not in request.headers - request.host = "example.com" - assert "host" not in request.headers - - request.headers["Host"] = "foo" - request.host = "example.org" - assert request.headers["Host"] == "example.org" - - -class TestRequestUtils: - """ - Tests for additional convenience methods. - """ - def test_url(self): - request = treq() - assert request.url == "http://address:22/path" - - request.url = "https://otheraddress:42/foo" - assert request.scheme == "https" - assert request.host == "otheraddress" - assert request.port == 42 - assert request.path == "/foo" - - with raises(ValueError): - request.url = "not-a-url" - - def test_url_options(self): - request = treq(method=b"OPTIONS", path=b"*") - assert request.url == "http://address:22" - - def test_url_authority(self): - request = treq(first_line_format="authority") - assert request.url == "address:22" - - def test_pretty_host(self): - request = treq() - # Without host header - assert request.pretty_host == "address" - assert request.host == "address" - # Same port as self.port (22) - request.headers["host"] = "other:22" - assert request.pretty_host == "other" - # Different ports - request.headers["host"] = "other" - assert request.pretty_host == "address" - assert request.host == "address" - # Empty host - request.host = None - assert request.pretty_host is None - assert request.host is None - - # Invalid IDNA - request.headers["host"] = ".disqus.com:22" - assert request.pretty_host == ".disqus.com" - - def test_pretty_url(self): - request = treq() - # Without host header - assert request.url == "http://address:22/path" - assert request.pretty_url == "http://address:22/path" - # Same port as self.port (22) - request.headers["host"] = "other:22" - assert request.pretty_url == "http://other:22/path" - # Different ports - request.headers["host"] = "other" - assert request.pretty_url == "http://address:22/path" - - def test_pretty_url_options(self): - request = treq(method=b"OPTIONS", path=b"*") - assert request.pretty_url == "http://address:22" - - def test_pretty_url_authority(self): - request = treq(first_line_format="authority") - assert request.pretty_url == "address:22" - - def test_get_query(self): - request = treq() - assert not request.query - - request.url = "http://localhost:80/foo?bar=42" - assert dict(request.query) == {"bar": "42"} - - def test_set_query(self): - request = treq() - assert not request.query - request.query["foo"] = "bar" - assert request.query["foo"] == "bar" - assert request.path == "/path?foo=bar" - - def test_get_cookies_none(self): - request = treq() - request.headers = Headers() - assert not request.cookies - - def test_get_cookies_single(self): - request = treq() - request.headers = Headers(cookie="cookiename=cookievalue") - assert len(request.cookies) == 1 - assert request.cookies['cookiename'] == 'cookievalue' - - def test_get_cookies_double(self): - request = treq() - request.headers = Headers(cookie="cookiename=cookievalue;othercookiename=othercookievalue") - result = request.cookies - assert len(result) == 2 - assert result['cookiename'] == 'cookievalue' - assert result['othercookiename'] == 'othercookievalue' - - def test_get_cookies_withequalsign(self): - request = treq() - request.headers = Headers(cookie="cookiename=coo=kievalue;othercookiename=othercookievalue") - result = request.cookies - assert len(result) == 2 - assert result['cookiename'] == 'coo=kievalue' - assert result['othercookiename'] == 'othercookievalue' - - def test_set_cookies(self): - request = treq() - request.headers = Headers(cookie="cookiename=cookievalue") - result = request.cookies - result["cookiename"] = "foo" - assert request.cookies["cookiename"] == "foo" - - def test_get_path_components(self): - request = treq(path=b"/foo/bar") - assert request.path_components == ("foo", "bar") - - def test_set_path_components(self): - request = treq() - request.path_components = ["foo", "baz"] - assert request.path == "/foo/baz" - - request.path_components = [] - assert request.path == "/" - - request.path_components = ["foo", "baz"] - request.query["hello"] = "hello" - assert request.path_components == ("foo", "baz") - - request.path_components = ["abc"] - assert request.path == "/abc?hello=hello" - - def test_anticache(self): - request = treq() - request.headers["If-Modified-Since"] = "foo" - request.headers["If-None-Match"] = "bar" - request.anticache() - assert "If-Modified-Since" not in request.headers - assert "If-None-Match" not in request.headers - - def test_anticomp(self): - request = treq() - request.headers["Accept-Encoding"] = "foobar" - request.anticomp() - assert request.headers["Accept-Encoding"] == "identity" - - def test_constrain_encoding(self): - request = treq() - - h = request.headers.copy() - request.constrain_encoding() # no-op if there is no accept_encoding header. - assert request.headers == h - - request.headers["Accept-Encoding"] = "identity, gzip, foo" - request.constrain_encoding() - assert "foo" not in request.headers["Accept-Encoding"] - assert "gzip" in request.headers["Accept-Encoding"] - - def test_get_urlencoded_form(self): - request = treq(content=b"foobar=baz") - assert not request.urlencoded_form - - request.headers["Content-Type"] = "application/x-www-form-urlencoded" - assert list(request.urlencoded_form.items()) == [(b"foobar", b"baz")] - - def test_set_urlencoded_form(self): - request = treq() - request.urlencoded_form = [(b'foo', b'bar'), (b'rab', b'oof')] - assert request.headers["Content-Type"] == "application/x-www-form-urlencoded" - assert request.content - - def test_get_multipart_form(self): - request = treq(content=b"foobar") - assert not request.multipart_form - - request.headers["Content-Type"] = "multipart/form-data" - assert list(request.multipart_form.items()) == [] diff --git a/test/netlib/http/test_response.py b/test/netlib/http/test_response.py deleted file mode 100644 index 4a6fac62..00000000 --- a/test/netlib/http/test_response.py +++ /dev/null @@ -1,145 +0,0 @@ -import email - -import time - -from netlib.http import Headers -from netlib.http import Response -from netlib.http.cookies import CookieAttrs -from mitmproxy.test.tutils import raises, tresp -from .test_message import _test_passthrough_attr, _test_decoded_attr - - -class TestResponseData: - def test_init(self): - with raises(ValueError): - tresp(headers="foobar") - - assert isinstance(tresp(headers=()).headers, Headers) - - -class TestResponseCore: - """ - Tests for addons and the attributes that are directly proxied from the data structure - """ - def test_repr(self): - response = tresp() - assert repr(response) == "Response(200 OK, unknown content type, 7b)" - response.content = None - assert repr(response) == "Response(200 OK, no content)" - - def test_make(self): - r = Response.make() - assert r.status_code == 200 - assert r.content == b"" - - r = Response.make(418, "teatime") - assert r.status_code == 418 - assert r.content == b"teatime" - assert r.headers["content-length"] == "7" - - Response.make(content=b"foo") - Response.make(content="foo") - with raises(TypeError): - Response.make(content=42) - - r = Response.make(headers=[(b"foo", b"bar")]) - assert r.headers["foo"] == "bar" - - r = Response.make(headers=({"foo": "baz"})) - assert r.headers["foo"] == "baz" - - with raises(TypeError): - Response.make(headers=42) - - def test_status_code(self): - _test_passthrough_attr(tresp(), "status_code") - - def test_reason(self): - _test_decoded_attr(tresp(), "reason") - - -class TestResponseUtils: - """ - Tests for additional convenience methods. - """ - def test_get_cookies_none(self): - resp = tresp() - resp.headers = Headers() - assert not resp.cookies - - def test_get_cookies_empty(self): - resp = tresp() - resp.headers = Headers(set_cookie="") - assert not resp.cookies - - def test_get_cookies_simple(self): - resp = tresp() - resp.headers = Headers(set_cookie="cookiename=cookievalue") - result = resp.cookies - assert len(result) == 1 - assert "cookiename" in result - assert result["cookiename"] == ("cookievalue", CookieAttrs()) - - def test_get_cookies_with_parameters(self): - resp = tresp() - cookie = "cookiename=cookievalue;domain=example.com;expires=Wed Oct 21 16:29:41 2015;path=/; HttpOnly" - resp.headers = Headers(set_cookie=cookie) - result = resp.cookies - assert len(result) == 1 - assert "cookiename" in result - assert result["cookiename"][0] == "cookievalue" - attrs = result["cookiename"][1] - assert len(attrs) == 4 - assert attrs["domain"] == "example.com" - assert attrs["expires"] == "Wed Oct 21 16:29:41 2015" - assert attrs["path"] == "/" - assert attrs["httponly"] is None - - def test_get_cookies_no_value(self): - resp = tresp() - resp.headers = Headers(set_cookie="cookiename=; Expires=Thu, 01-Jan-1970 00:00:01 GMT; path=/") - result = resp.cookies - assert len(result) == 1 - assert "cookiename" in result - assert result["cookiename"][0] == "" - assert len(result["cookiename"][1]) == 2 - - def test_get_cookies_twocookies(self): - resp = tresp() - resp.headers = Headers([ - [b"Set-Cookie", b"cookiename=cookievalue"], - [b"Set-Cookie", b"othercookie=othervalue"] - ]) - result = resp.cookies - assert len(result) == 2 - assert "cookiename" in result - assert result["cookiename"] == ("cookievalue", CookieAttrs()) - assert "othercookie" in result - assert result["othercookie"] == ("othervalue", CookieAttrs()) - - def test_set_cookies(self): - resp = tresp() - resp.cookies["foo"] = ("bar", {}) - - assert len(resp.cookies) == 1 - assert resp.cookies["foo"] == ("bar", CookieAttrs()) - - def test_refresh(self): - r = tresp() - n = time.time() - r.headers["date"] = email.utils.formatdate(n) - pre = r.headers["date"] - r.refresh(n) - assert pre == r.headers["date"] - r.refresh(n + 60) - - d = email.utils.parsedate_tz(r.headers["date"]) - d = email.utils.mktime_tz(d) - # Weird that this is not exact... - assert abs(60 - (d - n)) <= 1 - - cookie = "MOO=BAR; Expires=Tue, 08-Mar-2011 00:20:38 GMT; Path=foo.com; Secure" - r.headers["set-cookie"] = cookie - r.refresh() - # Cookie refreshing is tested in test_cookies, we just make sure that it's triggered here. - assert cookie != r.headers["set-cookie"] diff --git a/test/netlib/http/test_status_codes.py b/test/netlib/http/test_status_codes.py deleted file mode 100644 index 9fea6b70..00000000 --- a/test/netlib/http/test_status_codes.py +++ /dev/null @@ -1,6 +0,0 @@ -from netlib.http import status_codes - - -def test_simple(): - assert status_codes.IM_A_TEAPOT == 418 - assert status_codes.RESPONSES[418] == "I'm a teapot" diff --git a/test/netlib/http/test_url.py b/test/netlib/http/test_url.py deleted file mode 100644 index 7cea6c58..00000000 --- a/test/netlib/http/test_url.py +++ /dev/null @@ -1,102 +0,0 @@ -from mitmproxy.test import tutils -from netlib.http import url - - -def test_parse(): - with tutils.raises(ValueError): - url.parse("") - - s, h, po, pa = url.parse(b"http://foo.com:8888/test") - assert s == b"http" - assert h == b"foo.com" - assert po == 8888 - assert pa == b"/test" - - s, h, po, pa = url.parse("http://foo/bar") - assert s == b"http" - assert h == b"foo" - assert po == 80 - assert pa == b"/bar" - - s, h, po, pa = url.parse(b"http://user:pass@foo/bar") - assert s == b"http" - assert h == b"foo" - assert po == 80 - assert pa == b"/bar" - - s, h, po, pa = url.parse(b"http://foo") - assert pa == b"/" - - s, h, po, pa = url.parse(b"https://foo") - assert po == 443 - - with tutils.raises(ValueError): - url.parse(b"https://foo:bar") - - # Invalid IDNA - with tutils.raises(ValueError): - url.parse("http://\xfafoo") - # Invalid PATH - with tutils.raises(ValueError): - url.parse("http:/\xc6/localhost:56121") - # Null byte in host - with tutils.raises(ValueError): - url.parse("http://foo\0") - # Port out of range - _, _, port, _ = url.parse("http://foo:999999") - assert port == 80 - # Invalid IPv6 URL - see http://www.ietf.org/rfc/rfc2732.txt - with tutils.raises(ValueError): - url.parse('http://lo[calhost') - - -def test_unparse(): - assert url.unparse("http", "foo.com", 99, "") == "http://foo.com:99" - assert url.unparse("http", "foo.com", 80, "/bar") == "http://foo.com/bar" - assert url.unparse("https", "foo.com", 80, "") == "https://foo.com:80" - assert url.unparse("https", "foo.com", 443, "") == "https://foo.com" - - -surrogates = bytes(range(256)).decode("utf8", "surrogateescape") - -surrogates_quoted = ( - '%00%01%02%03%04%05%06%07%08%09%0A%0B%0C%0D%0E%0F' - '%10%11%12%13%14%15%16%17%18%19%1A%1B%1C%1D%1E%1F' - '%20%21%22%23%24%25%26%27%28%29%2A%2B%2C-./' - '0123456789%3A%3B%3C%3D%3E%3F' - '%40ABCDEFGHIJKLMNO' - 'PQRSTUVWXYZ%5B%5C%5D%5E_' - '%60abcdefghijklmno' - 'pqrstuvwxyz%7B%7C%7D%7E%7F' - '%80%81%82%83%84%85%86%87%88%89%8A%8B%8C%8D%8E%8F' - '%90%91%92%93%94%95%96%97%98%99%9A%9B%9C%9D%9E%9F' - '%A0%A1%A2%A3%A4%A5%A6%A7%A8%A9%AA%AB%AC%AD%AE%AF' - '%B0%B1%B2%B3%B4%B5%B6%B7%B8%B9%BA%BB%BC%BD%BE%BF' - '%C0%C1%C2%C3%C4%C5%C6%C7%C8%C9%CA%CB%CC%CD%CE%CF' - '%D0%D1%D2%D3%D4%D5%D6%D7%D8%D9%DA%DB%DC%DD%DE%DF' - '%E0%E1%E2%E3%E4%E5%E6%E7%E8%E9%EA%EB%EC%ED%EE%EF' - '%F0%F1%F2%F3%F4%F5%F6%F7%F8%F9%FA%FB%FC%FD%FE%FF' -) - - -def test_encode(): - assert url.encode([('foo', 'bar')]) - assert url.encode([('foo', surrogates)]) - - -def test_decode(): - s = "one=two&three=four" - assert len(url.decode(s)) == 2 - assert url.decode(surrogates) - - -def test_quote(): - assert url.quote("foo") == "foo" - assert url.quote("foo bar") == "foo%20bar" - assert url.quote(surrogates) == surrogates_quoted - - -def test_unquote(): - assert url.unquote("foo") == "foo" - assert url.unquote("foo%20bar") == "foo bar" - assert url.unquote(surrogates_quoted) == surrogates diff --git a/test/netlib/http/test_user_agents.py b/test/netlib/http/test_user_agents.py deleted file mode 100644 index 0bf1bba7..00000000 --- a/test/netlib/http/test_user_agents.py +++ /dev/null @@ -1,6 +0,0 @@ -from netlib.http import user_agents - - -def test_get_shortcut(): - assert user_agents.get_by_shortcut("c")[0] == "chrome" - assert not user_agents.get_by_shortcut("_") diff --git a/test/netlib/test_check.py b/test/netlib/test_check.py deleted file mode 100644 index 6a1388f4..00000000 --- a/test/netlib/test_check.py +++ /dev/null @@ -1,10 +0,0 @@ -# coding=utf-8 - -from netlib import check - - -def test_is_valid_host(): - assert not check.is_valid_host(b"") - assert check.is_valid_host(b"one.two") - assert not check.is_valid_host(b"one" * 255) - assert check.is_valid_host(b"one.two.") diff --git a/test/netlib/test_imports.py b/test/netlib/test_imports.py deleted file mode 100644 index b88ef26d..00000000 --- a/test/netlib/test_imports.py +++ /dev/null @@ -1 +0,0 @@ -# These are actually tests! diff --git a/test/netlib/test_socks.py b/test/netlib/test_socks.py deleted file mode 100644 index 0603f34b..00000000 --- a/test/netlib/test_socks.py +++ /dev/null @@ -1,189 +0,0 @@ -import ipaddress -from io import BytesIO -from netlib import socks -from netlib import tcp -from mitmproxy.test import tutils - - -def test_client_greeting(): - raw = tutils.treader(b"\x05\x02\x00\xBE\xEF") - out = BytesIO() - msg = socks.ClientGreeting.from_file(raw) - msg.assert_socks5() - msg.to_file(out) - - assert out.getvalue() == raw.getvalue()[:-1] - assert msg.ver == 5 - assert len(msg.methods) == 2 - assert 0xBE in msg.methods - assert 0xEF not in msg.methods - - -def test_client_greeting_assert_socks5(): - raw = tutils.treader(b"\x00\x00") - msg = socks.ClientGreeting.from_file(raw) - tutils.raises(socks.SocksError, msg.assert_socks5) - - raw = tutils.treader(b"HTTP/1.1 200 OK" + b" " * 100) - msg = socks.ClientGreeting.from_file(raw) - try: - msg.assert_socks5() - except socks.SocksError as e: - assert "Invalid SOCKS version" in str(e) - assert "HTTP" not in str(e) - else: - assert False - - raw = tutils.treader(b"GET / HTTP/1.1" + b" " * 100) - msg = socks.ClientGreeting.from_file(raw) - try: - msg.assert_socks5() - except socks.SocksError as e: - assert "Invalid SOCKS version" in str(e) - assert "HTTP" in str(e) - else: - assert False - - raw = tutils.treader(b"XX") - tutils.raises( - socks.SocksError, - socks.ClientGreeting.from_file, - raw, - fail_early=True) - - -def test_server_greeting(): - raw = tutils.treader(b"\x05\x02") - out = BytesIO() - msg = socks.ServerGreeting.from_file(raw) - msg.assert_socks5() - msg.to_file(out) - - assert out.getvalue() == raw.getvalue() - assert msg.ver == 5 - assert msg.method == 0x02 - - -def test_server_greeting_assert_socks5(): - raw = tutils.treader(b"HTTP/1.1 200 OK" + b" " * 100) - msg = socks.ServerGreeting.from_file(raw) - try: - msg.assert_socks5() - except socks.SocksError as e: - assert "Invalid SOCKS version" in str(e) - assert "HTTP" in str(e) - else: - assert False - - raw = tutils.treader(b"GET / HTTP/1.1" + b" " * 100) - msg = socks.ServerGreeting.from_file(raw) - try: - msg.assert_socks5() - except socks.SocksError as e: - assert "Invalid SOCKS version" in str(e) - assert "HTTP" not in str(e) - else: - assert False - - -def test_username_password_auth(): - raw = tutils.treader(b"\x01\x03usr\x03psd\xBE\xEF") - out = BytesIO() - auth = socks.UsernamePasswordAuth.from_file(raw) - auth.assert_authver1() - assert raw.read(2) == b"\xBE\xEF" - auth.to_file(out) - - assert out.getvalue() == raw.getvalue()[:-2] - assert auth.ver == socks.USERNAME_PASSWORD_VERSION.DEFAULT - assert auth.username == "usr" - assert auth.password == "psd" - - -def test_username_password_auth_assert_ver1(): - raw = tutils.treader(b"\x02\x03usr\x03psd\xBE\xEF") - auth = socks.UsernamePasswordAuth.from_file(raw) - tutils.raises(socks.SocksError, auth.assert_authver1) - - -def test_username_password_auth_response(): - raw = tutils.treader(b"\x01\x00\xBE\xEF") - out = BytesIO() - auth = socks.UsernamePasswordAuthResponse.from_file(raw) - auth.assert_authver1() - assert raw.read(2) == b"\xBE\xEF" - auth.to_file(out) - - assert out.getvalue() == raw.getvalue()[:-2] - assert auth.ver == socks.USERNAME_PASSWORD_VERSION.DEFAULT - assert auth.status == 0 - - -def test_username_password_auth_response_auth_assert_ver1(): - raw = tutils.treader(b"\x02\x00\xBE\xEF") - auth = socks.UsernamePasswordAuthResponse.from_file(raw) - tutils.raises(socks.SocksError, auth.assert_authver1) - - -def test_message(): - raw = tutils.treader(b"\x05\x01\x00\x03\x0bexample.com\xDE\xAD\xBE\xEF") - out = BytesIO() - msg = socks.Message.from_file(raw) - msg.assert_socks5() - assert raw.read(2) == b"\xBE\xEF" - msg.to_file(out) - - assert out.getvalue() == raw.getvalue()[:-2] - assert msg.ver == 5 - assert msg.msg == 0x01 - assert msg.atyp == 0x03 - assert msg.addr == ("example.com", 0xDEAD) - - -def test_message_assert_socks5(): - raw = tutils.treader(b"\xEE\x01\x00\x03\x0bexample.com\xDE\xAD\xBE\xEF") - msg = socks.Message.from_file(raw) - tutils.raises(socks.SocksError, msg.assert_socks5) - - -def test_message_ipv4(): - # Test ATYP=0x01 (IPV4) - raw = tutils.treader(b"\x05\x01\x00\x01\x7f\x00\x00\x01\xDE\xAD\xBE\xEF") - out = BytesIO() - msg = socks.Message.from_file(raw) - left = raw.read(2) - assert left == b"\xBE\xEF" - msg.to_file(out) - - assert out.getvalue() == raw.getvalue()[:-2] - assert msg.addr == ("127.0.0.1", 0xDEAD) - - -def test_message_ipv6(): - # Test ATYP=0x04 (IPV6) - ipv6_addr = u"2001:db8:85a3:8d3:1319:8a2e:370:7344" - - raw = tutils.treader( - b"\x05\x01\x00\x04" + - ipaddress.IPv6Address(ipv6_addr).packed + - b"\xDE\xAD\xBE\xEF") - out = BytesIO() - msg = socks.Message.from_file(raw) - assert raw.read(2) == b"\xBE\xEF" - msg.to_file(out) - - assert out.getvalue() == raw.getvalue()[:-2] - assert msg.addr.host == ipv6_addr - - -def test_message_invalid_rsv(): - raw = tutils.treader(b"\x05\x01\xFF\x01\x7f\x00\x00\x01\xDE\xAD\xBE\xEF") - tutils.raises(socks.SocksError, socks.Message.from_file, raw) - - -def test_message_unknown_atyp(): - raw = tutils.treader(b"\x05\x02\x00\x02\x7f\x00\x00\x01\xDE\xAD\xBE\xEF") - tutils.raises(socks.SocksError, socks.Message.from_file, raw) - - m = socks.Message(5, 1, 0x02, tcp.Address(("example.com", 5050))) - tutils.raises(socks.SocksError, m.to_file, BytesIO()) diff --git a/test/netlib/test_tcp.py b/test/netlib/test_tcp.py deleted file mode 100644 index 594ee21c..00000000 --- a/test/netlib/test_tcp.py +++ /dev/null @@ -1,802 +0,0 @@ -from io import BytesIO -import queue -import time -import socket -import random -import os -import threading -import mock - -from OpenSSL import SSL - -from mitmproxy import certs -from netlib import tcp -from mitmproxy.test import tutils -from mitmproxy import exceptions - -from . import tservers - - -class EchoHandler(tcp.BaseHandler): - sni = None - - def handle_sni(self, connection): - self.sni = connection.get_servername() - - def handle(self): - v = self.rfile.readline() - self.wfile.write(v) - self.wfile.flush() - - -class ClientCipherListHandler(tcp.BaseHandler): - sni = None - - def handle(self): - self.wfile.write("%s" % self.connection.get_cipher_list()) - self.wfile.flush() - - -class HangHandler(tcp.BaseHandler): - - def handle(self): - # Hang as long as the client connection is alive - while True: - try: - self.connection.setblocking(0) - ret = self.connection.recv(1) - # Client connection is dead... - if ret == "" or ret == b"": - return - except socket.error: - pass - except SSL.WantReadError: - pass - except Exception: - return - time.sleep(0.1) - - -class ALPNHandler(tcp.BaseHandler): - sni = None - - def handle(self): - alp = self.get_alpn_proto_negotiated() - if alp: - self.wfile.write(alp) - else: - self.wfile.write(b"NONE") - self.wfile.flush() - - -class TestServer(tservers.ServerTestBase): - handler = EchoHandler - - def test_echo(self): - testval = b"echo!\n" - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.wfile.write(testval) - c.wfile.flush() - assert c.rfile.readline() == testval - - def test_thread_start_error(self): - with mock.patch.object(threading.Thread, "start", side_effect=threading.ThreadError("nonewthread")) as m: - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - assert not c.rfile.read(1) - assert m.called - assert "nonewthread" in self.q.get_nowait() - self.test_echo() - - -class TestServerBind(tservers.ServerTestBase): - - class handler(tcp.BaseHandler): - - def handle(self): - self.wfile.write(str(self.connection.getpeername()).encode()) - self.wfile.flush() - - def test_bind(self): - """ Test to bind to a given random port. Try again if the random port turned out to be blocked. """ - for i in range(20): - random_port = random.randrange(1024, 65535) - try: - c = tcp.TCPClient( - ("127.0.0.1", self.port), source_address=( - "127.0.0.1", random_port)) - with c.connect(): - assert c.rfile.readline() == str(("127.0.0.1", random_port)).encode() - return - except exceptions.TcpException: # port probably already in use - pass - - -class TestServerIPv6(tservers.ServerTestBase): - handler = EchoHandler - addr = tcp.Address(("localhost", 0), use_ipv6=True) - - def test_echo(self): - testval = b"echo!\n" - c = tcp.TCPClient(tcp.Address(("::1", self.port), use_ipv6=True)) - with c.connect(): - c.wfile.write(testval) - c.wfile.flush() - assert c.rfile.readline() == testval - - -class TestEcho(tservers.ServerTestBase): - handler = EchoHandler - - def test_echo(self): - testval = b"echo!\n" - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.wfile.write(testval) - c.wfile.flush() - assert c.rfile.readline() == testval - - -class HardDisconnectHandler(tcp.BaseHandler): - - def handle(self): - self.connection.close() - - -class TestFinishFail(tservers.ServerTestBase): - - """ - This tests a difficult-to-trigger exception in the .finish() method of - the handler. - """ - handler = EchoHandler - - def test_disconnect_in_finish(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.wfile.write(b"foo\n") - c.wfile.flush = mock.Mock(side_effect=exceptions.TcpDisconnect) - c.finish() - - -class TestServerSSL(tservers.ServerTestBase): - handler = EchoHandler - ssl = dict( - cipher_list="AES256-SHA", - chain_file=tutils.test_data.path("data/server.crt") - ) - - def test_echo(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl(sni="foo.com", options=SSL.OP_ALL) - testval = b"echo!\n" - c.wfile.write(testval) - c.wfile.flush() - assert c.rfile.readline() == testval - - def test_get_current_cipher(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - assert not c.get_current_cipher() - c.convert_to_ssl(sni="foo.com") - ret = c.get_current_cipher() - assert ret - assert "AES" in ret[0] - - -class TestSSLv3Only(tservers.ServerTestBase): - handler = EchoHandler - ssl = dict( - request_client_cert=False, - v3_only=True - ) - - def test_failure(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - tutils.raises(exceptions.TlsException, c.convert_to_ssl, sni="foo.com") - - -class TestSSLUpstreamCertVerificationWBadServerCert(tservers.ServerTestBase): - handler = EchoHandler - - ssl = dict( - cert=tutils.test_data.path("data/verificationcerts/self-signed.crt"), - key=tutils.test_data.path("data/verificationcerts/self-signed.key") - ) - - def test_mode_default_should_pass(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl() - - # Verification errors should be saved even if connection isn't aborted - # aborted - assert c.ssl_verification_error - - testval = b"echo!\n" - c.wfile.write(testval) - c.wfile.flush() - assert c.rfile.readline() == testval - - def test_mode_none_should_pass(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl(verify_options=SSL.VERIFY_NONE) - - # Verification errors should be saved even if connection isn't aborted - assert c.ssl_verification_error - - testval = b"echo!\n" - c.wfile.write(testval) - c.wfile.flush() - assert c.rfile.readline() == testval - - def test_mode_strict_should_fail(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - with tutils.raises(exceptions.InvalidCertificateException): - c.convert_to_ssl( - sni="example.mitmproxy.org", - verify_options=SSL.VERIFY_PEER, - ca_pemfile=tutils.test_data.path("data/verificationcerts/trusted-root.crt") - ) - - assert c.ssl_verification_error - - # Unknown issuing certificate authority for first certificate - assert "errno: 18" in str(c.ssl_verification_error) - assert "depth: 0" in str(c.ssl_verification_error) - - -class TestSSLUpstreamCertVerificationWBadHostname(tservers.ServerTestBase): - handler = EchoHandler - - ssl = dict( - cert=tutils.test_data.path("data/verificationcerts/trusted-leaf.crt"), - key=tutils.test_data.path("data/verificationcerts/trusted-leaf.key") - ) - - def test_should_fail_without_sni(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - with tutils.raises(exceptions.TlsException): - c.convert_to_ssl( - verify_options=SSL.VERIFY_PEER, - ca_pemfile=tutils.test_data.path("data/verificationcerts/trusted-root.crt") - ) - - def test_should_fail(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - with tutils.raises(exceptions.InvalidCertificateException): - c.convert_to_ssl( - sni="mitmproxy.org", - verify_options=SSL.VERIFY_PEER, - ca_pemfile=tutils.test_data.path("data/verificationcerts/trusted-root.crt") - ) - assert c.ssl_verification_error - - -class TestSSLUpstreamCertVerificationWValidCertChain(tservers.ServerTestBase): - handler = EchoHandler - - ssl = dict( - cert=tutils.test_data.path("data/verificationcerts/trusted-leaf.crt"), - key=tutils.test_data.path("data/verificationcerts/trusted-leaf.key") - ) - - def test_mode_strict_w_pemfile_should_pass(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl( - sni="example.mitmproxy.org", - verify_options=SSL.VERIFY_PEER, - ca_pemfile=tutils.test_data.path("data/verificationcerts/trusted-root.crt") - ) - - assert c.ssl_verification_error is None - - testval = b"echo!\n" - c.wfile.write(testval) - c.wfile.flush() - assert c.rfile.readline() == testval - - def test_mode_strict_w_cadir_should_pass(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl( - sni="example.mitmproxy.org", - verify_options=SSL.VERIFY_PEER, - ca_path=tutils.test_data.path("data/verificationcerts/") - ) - - assert c.ssl_verification_error is None - - testval = b"echo!\n" - c.wfile.write(testval) - c.wfile.flush() - assert c.rfile.readline() == testval - - -class TestSSLClientCert(tservers.ServerTestBase): - - class handler(tcp.BaseHandler): - sni = None - - def handle_sni(self, connection): - self.sni = connection.get_servername() - - def handle(self): - self.wfile.write(b"%d\n" % self.clientcert.serial) - self.wfile.flush() - - ssl = dict( - request_client_cert=True, - v3_only=False - ) - - def test_clientcert(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl( - cert=tutils.test_data.path("data/clientcert/client.pem")) - assert c.rfile.readline().strip() == b"1" - - def test_clientcert_err(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - tutils.raises( - exceptions.TlsException, - c.convert_to_ssl, - cert=tutils.test_data.path("data/clientcert/make") - ) - - -class TestSNI(tservers.ServerTestBase): - - class handler(tcp.BaseHandler): - sni = None - - def handle_sni(self, connection): - self.sni = connection.get_servername() - - def handle(self): - self.wfile.write(self.sni) - self.wfile.flush() - - ssl = True - - def test_echo(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl(sni="foo.com") - assert c.sni == "foo.com" - assert c.rfile.readline() == b"foo.com" - - -class TestServerCipherList(tservers.ServerTestBase): - handler = ClientCipherListHandler - ssl = dict( - cipher_list='RC4-SHA' - ) - - def test_echo(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl(sni="foo.com") - assert c.rfile.readline() == b"['RC4-SHA']" - - -class TestServerCurrentCipher(tservers.ServerTestBase): - - class handler(tcp.BaseHandler): - sni = None - - def handle(self): - self.wfile.write(str(self.get_current_cipher()).encode()) - self.wfile.flush() - - ssl = dict( - cipher_list='RC4-SHA' - ) - - def test_echo(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl(sni="foo.com") - assert b"RC4-SHA" in c.rfile.readline() - - -class TestServerCipherListError(tservers.ServerTestBase): - handler = ClientCipherListHandler - ssl = dict( - cipher_list='bogus' - ) - - def test_echo(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - tutils.raises("handshake error", c.convert_to_ssl, sni="foo.com") - - -class TestClientCipherListError(tservers.ServerTestBase): - handler = ClientCipherListHandler - ssl = dict( - cipher_list='RC4-SHA' - ) - - def test_echo(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - tutils.raises( - "cipher specification", - c.convert_to_ssl, - sni="foo.com", - cipher_list="bogus" - ) - - -class TestSSLDisconnect(tservers.ServerTestBase): - - class handler(tcp.BaseHandler): - - def handle(self): - self.finish() - - ssl = True - - def test_echo(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl() - # Excercise SSL.ZeroReturnError - c.rfile.read(10) - c.close() - tutils.raises(exceptions.TcpDisconnect, c.wfile.write, b"foo") - tutils.raises(queue.Empty, self.q.get_nowait) - - -class TestSSLHardDisconnect(tservers.ServerTestBase): - handler = HardDisconnectHandler - ssl = True - - def test_echo(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl() - # Exercise SSL.SysCallError - c.rfile.read(10) - c.close() - tutils.raises(exceptions.TcpDisconnect, c.wfile.write, b"foo") - - -class TestDisconnect(tservers.ServerTestBase): - - def test_echo(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.rfile.read(10) - c.wfile.write(b"foo") - c.close() - c.close() - - -class TestServerTimeOut(tservers.ServerTestBase): - - class handler(tcp.BaseHandler): - - def handle(self): - self.timeout = False - self.settimeout(0.01) - try: - self.rfile.read(10) - except exceptions.TcpTimeout: - self.timeout = True - - def test_timeout(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - time.sleep(0.3) - assert self.last_handler.timeout - - -class TestTimeOut(tservers.ServerTestBase): - handler = HangHandler - - def test_timeout(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.settimeout(0.1) - assert c.gettimeout() == 0.1 - tutils.raises(exceptions.TcpTimeout, c.rfile.read, 10) - - -class TestALPNClient(tservers.ServerTestBase): - handler = ALPNHandler - ssl = dict( - alpn_select=b"bar" - ) - - if tcp.HAS_ALPN: - def test_alpn(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl(alpn_protos=[b"foo", b"bar", b"fasel"]) - assert c.get_alpn_proto_negotiated() == b"bar" - assert c.rfile.readline().strip() == b"bar" - - def test_no_alpn(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl() - assert c.get_alpn_proto_negotiated() == b"" - assert c.rfile.readline().strip() == b"NONE" - - else: - def test_none_alpn(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl(alpn_protos=[b"foo", b"bar", b"fasel"]) - assert c.get_alpn_proto_negotiated() == b"" - assert c.rfile.readline() == b"NONE" - - -class TestNoSSLNoALPNClient(tservers.ServerTestBase): - handler = ALPNHandler - - def test_no_ssl_no_alpn(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - assert c.get_alpn_proto_negotiated() == b"" - assert c.rfile.readline().strip() == b"NONE" - - -class TestSSLTimeOut(tservers.ServerTestBase): - handler = HangHandler - ssl = True - - def test_timeout_client(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl() - c.settimeout(0.1) - tutils.raises(exceptions.TcpTimeout, c.rfile.read, 10) - - -class TestDHParams(tservers.ServerTestBase): - handler = HangHandler - ssl = dict( - dhparams=certs.CertStore.load_dhparam( - tutils.test_data.path("data/dhparam.pem"), - ), - cipher_list="DHE-RSA-AES256-SHA" - ) - - def test_dhparams(self): - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl() - ret = c.get_current_cipher() - assert ret[0] == "DHE-RSA-AES256-SHA" - - def test_create_dhparams(self): - with tutils.tmpdir() as d: - filename = os.path.join(d, "dhparam.pem") - certs.CertStore.load_dhparam(filename) - assert os.path.exists(filename) - - -class TestTCPClient: - - def test_conerr(self): - c = tcp.TCPClient(("127.0.0.1", 0)) - tutils.raises(exceptions.TcpException, c.connect) - - -class TestFileLike: - - def test_blocksize(self): - s = BytesIO(b"1234567890abcdefghijklmnopqrstuvwxyz") - s = tcp.Reader(s) - s.BLOCKSIZE = 2 - assert s.read(1) == b"1" - assert s.read(2) == b"23" - assert s.read(3) == b"456" - assert s.read(4) == b"7890" - d = s.read(-1) - assert d.startswith(b"abc") and d.endswith(b"xyz") - - def test_wrap(self): - s = BytesIO(b"foobar\nfoobar") - s.flush() - s = tcp.Reader(s) - assert s.readline() == b"foobar\n" - assert s.readline() == b"foobar" - # Test __getattr__ - assert s.isatty - - def test_limit(self): - s = BytesIO(b"foobar\nfoobar") - s = tcp.Reader(s) - assert s.readline(3) == b"foo" - - def test_limitless(self): - s = BytesIO(b"f" * (50 * 1024)) - s = tcp.Reader(s) - ret = s.read(-1) - assert len(ret) == 50 * 1024 - - def test_readlog(self): - s = BytesIO(b"foobar\nfoobar") - s = tcp.Reader(s) - assert not s.is_logging() - s.start_log() - assert s.is_logging() - s.readline() - assert s.get_log() == b"foobar\n" - s.read(1) - assert s.get_log() == b"foobar\nf" - s.start_log() - assert s.get_log() == b"" - s.read(1) - assert s.get_log() == b"o" - s.stop_log() - tutils.raises(ValueError, s.get_log) - - def test_writelog(self): - s = BytesIO() - s = tcp.Writer(s) - s.start_log() - assert s.is_logging() - s.write(b"x") - assert s.get_log() == b"x" - s.write(b"x") - assert s.get_log() == b"xx" - - def test_writer_flush_error(self): - s = BytesIO() - s = tcp.Writer(s) - o = mock.MagicMock() - o.flush = mock.MagicMock(side_effect=socket.error) - s.o = o - tutils.raises(exceptions.TcpDisconnect, s.flush) - - def test_reader_read_error(self): - s = BytesIO(b"foobar\nfoobar") - s = tcp.Reader(s) - o = mock.MagicMock() - o.read = mock.MagicMock(side_effect=socket.error) - s.o = o - tutils.raises(exceptions.TcpDisconnect, s.read, 10) - - def test_reset_timestamps(self): - s = BytesIO(b"foobar\nfoobar") - s = tcp.Reader(s) - s.first_byte_timestamp = 500 - s.reset_timestamps() - assert not s.first_byte_timestamp - - def test_first_byte_timestamp_updated_on_read(self): - s = BytesIO(b"foobar\nfoobar") - s = tcp.Reader(s) - s.read(1) - assert s.first_byte_timestamp - expected = s.first_byte_timestamp - s.read(5) - assert s.first_byte_timestamp == expected - - def test_first_byte_timestamp_updated_on_readline(self): - s = BytesIO(b"foobar\nfoobar\nfoobar") - s = tcp.Reader(s) - s.readline() - assert s.first_byte_timestamp - expected = s.first_byte_timestamp - s.readline() - assert s.first_byte_timestamp == expected - - def test_read_ssl_error(self): - s = mock.MagicMock() - s.read = mock.MagicMock(side_effect=SSL.Error()) - s = tcp.Reader(s) - tutils.raises(exceptions.TlsException, s.read, 1) - - def test_read_syscall_ssl_error(self): - s = mock.MagicMock() - s.read = mock.MagicMock(side_effect=SSL.SysCallError()) - s = tcp.Reader(s) - tutils.raises(exceptions.TlsException, s.read, 1) - - def test_reader_readline_disconnect(self): - o = mock.MagicMock() - o.read = mock.MagicMock(side_effect=socket.error) - s = tcp.Reader(o) - tutils.raises(exceptions.TcpDisconnect, s.readline, 10) - - def test_reader_incomplete_error(self): - s = BytesIO(b"foobar") - s = tcp.Reader(s) - tutils.raises(exceptions.TcpReadIncomplete, s.safe_read, 10) - - -class TestPeek(tservers.ServerTestBase): - handler = EchoHandler - - def _connect(self, c): - return c.connect() - - def test_peek(self): - testval = b"peek!\n" - c = tcp.TCPClient(("127.0.0.1", self.port)) - with self._connect(c): - c.wfile.write(testval) - c.wfile.flush() - - assert c.rfile.peek(4) == b"peek" - assert c.rfile.peek(6) == b"peek!\n" - assert c.rfile.readline() == testval - - c.close() - with tutils.raises(exceptions.NetlibException): - if c.rfile.peek(1) == b"": - # Workaround for Python 2 on Unix: - # Peeking a closed connection does not raise an exception here. - raise exceptions.NetlibException() - - -class TestPeekSSL(TestPeek): - ssl = True - - def _connect(self, c): - with c.connect() as conn: - c.convert_to_ssl() - return conn.pop() - - -class TestAddress: - def test_simple(self): - a = tcp.Address(("localhost", 80), True) - assert a.use_ipv6 - b = tcp.Address(("foo.com", 80), True) - assert not a == b - c = tcp.Address(("localhost", 80), True) - assert a == c - assert not a != c - assert repr(a) == "localhost:80" - - -class TestSSLKeyLogger(tservers.ServerTestBase): - handler = EchoHandler - ssl = dict( - cipher_list="AES256-SHA" - ) - - def test_log(self): - testval = b"echo!\n" - _logfun = tcp.log_ssl_key - - with tutils.tmpdir() as d: - logfile = os.path.join(d, "foo", "bar", "logfile") - tcp.log_ssl_key = tcp.SSLKeyLogger(logfile) - - c = tcp.TCPClient(("127.0.0.1", self.port)) - with c.connect(): - c.convert_to_ssl() - c.wfile.write(testval) - c.wfile.flush() - assert c.rfile.readline() == testval - c.finish() - - tcp.log_ssl_key.close() - with open(logfile, "rb") as f: - assert f.read().count(b"CLIENT_RANDOM") == 2 - - tcp.log_ssl_key = _logfun - - def test_create_logfun(self): - assert isinstance( - tcp.SSLKeyLogger.create_logfun("test"), - tcp.SSLKeyLogger) - assert not tcp.SSLKeyLogger.create_logfun(False) diff --git a/test/netlib/test_wsgi.py b/test/netlib/test_wsgi.py deleted file mode 100644 index 5c61f81c..00000000 --- a/test/netlib/test_wsgi.py +++ /dev/null @@ -1,106 +0,0 @@ -from io import BytesIO -import sys -from netlib import wsgi -from netlib.http import Headers - - -def tflow(): - headers = Headers(test=b"value") - req = wsgi.Request("http", "GET", "/", "HTTP/1.1", headers, "") - return wsgi.Flow(("127.0.0.1", 8888), req) - - -class ExampleApp: - - def __init__(self): - self.called = False - - def __call__(self, environ, start_response): - self.called = True - status = '200 OK' - response_headers = [('Content-type', 'text/plain')] - start_response(status, response_headers) - return [b'Hello', b' world!\n'] - - -class TestWSGI: - - def test_make_environ(self): - w = wsgi.WSGIAdaptor(None, "foo", 80, "version") - tf = tflow() - assert w.make_environ(tf, None) - - tf.request.path = "/foo?bar=voing" - r = w.make_environ(tf, None) - assert r["QUERY_STRING"] == "bar=voing" - - def test_serve(self): - ta = ExampleApp() - w = wsgi.WSGIAdaptor(ta, "foo", 80, "version") - f = tflow() - f.request.host = "foo" - f.request.port = 80 - - wfile = BytesIO() - err = w.serve(f, wfile) - assert ta.called - assert not err - - val = wfile.getvalue() - assert b"Hello world" in val - assert b"Server:" in val - - def _serve(self, app): - w = wsgi.WSGIAdaptor(app, "foo", 80, "version") - f = tflow() - f.request.host = "foo" - f.request.port = 80 - wfile = BytesIO() - w.serve(f, wfile) - return wfile.getvalue() - - def test_serve_empty_body(self): - def app(environ, start_response): - status = '200 OK' - response_headers = [('Foo', 'bar')] - start_response(status, response_headers) - return [] - assert self._serve(app) - - def test_serve_double_start(self): - def app(environ, start_response): - try: - raise ValueError("foo") - except: - sys.exc_info() - status = '200 OK' - response_headers = [('Content-type', 'text/plain')] - start_response(status, response_headers) - start_response(status, response_headers) - assert b"Internal Server Error" in self._serve(app) - - def test_serve_single_err(self): - def app(environ, start_response): - try: - raise ValueError("foo") - except: - ei = sys.exc_info() - status = '200 OK' - response_headers = [('Content-type', 'text/plain')] - start_response(status, response_headers, ei) - yield b"" - assert b"Internal Server Error" in self._serve(app) - - def test_serve_double_err(self): - def app(environ, start_response): - try: - raise ValueError("foo") - except: - ei = sys.exc_info() - status = '200 OK' - response_headers = [('Content-type', 'text/plain')] - start_response(status, response_headers) - yield b"aaa" - start_response(status, response_headers, ei) - yield b"bbb" - assert b"Internal Server Error" in self._serve(app) diff --git a/test/netlib/tools/getcertnames b/test/netlib/tools/getcertnames deleted file mode 100644 index 0882fccd..00000000 --- a/test/netlib/tools/getcertnames +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python -import sys -sys.path.insert(0, "../../") -from netlib import tcp - - -def get_remote_cert(host, port, sni): - c = tcp.TCPClient((host, port)) - c.connect() - c.convert_to_ssl(sni=sni) - return c.cert - -if len(sys.argv) > 2: - port = int(sys.argv[2]) -else: - port = 443 -if len(sys.argv) > 3: - sni = sys.argv[3] -else: - sni = None - -cert = get_remote_cert(sys.argv[1], port, sni) -print("CN:", cert.cn) -if cert.altnames: - print("SANs:") - for i in cert.altnames: - print("\t", i) diff --git a/test/netlib/tservers.py b/test/netlib/tservers.py deleted file mode 100644 index 2fae8ba6..00000000 --- a/test/netlib/tservers.py +++ /dev/null @@ -1,113 +0,0 @@ -import threading -import queue -import io -import OpenSSL - -from netlib import tcp -from mitmproxy.test import tutils - - -class _ServerThread(threading.Thread): - - def __init__(self, server): - self.server = server - threading.Thread.__init__(self) - - def run(self): - self.server.serve_forever() - - def shutdown(self): - self.server.shutdown() - - -class _TServer(tcp.TCPServer): - - def __init__(self, ssl, q, handler_klass, addr, **kwargs): - """ - ssl: A dictionary of SSL parameters: - - cert, key, request_client_cert, cipher_list, - dhparams, v3_only - """ - tcp.TCPServer.__init__(self, addr) - - if ssl is True: - self.ssl = dict() - elif isinstance(ssl, dict): - self.ssl = ssl - else: - self.ssl = None - - self.q = q - self.handler_klass = handler_klass - if self.handler_klass is not None: - self.handler_klass.kwargs = kwargs - self.last_handler = None - - def handle_client_connection(self, request, client_address): - h = self.handler_klass(request, client_address, self) - self.last_handler = h - if self.ssl is not None: - cert = self.ssl.get( - "cert", - tutils.test_data.path("data/server.crt")) - raw_key = self.ssl.get( - "key", - tutils.test_data.path("data/server.key")) - key = OpenSSL.crypto.load_privatekey( - OpenSSL.crypto.FILETYPE_PEM, - open(raw_key, "rb").read()) - if self.ssl.get("v3_only", False): - method = OpenSSL.SSL.SSLv3_METHOD - options = OpenSSL.SSL.OP_NO_SSLv2 | OpenSSL.SSL.OP_NO_TLSv1 - else: - method = OpenSSL.SSL.SSLv23_METHOD - options = None - h.convert_to_ssl( - cert, key, - method=method, - options=options, - handle_sni=getattr(h, "handle_sni", None), - request_client_cert=self.ssl.get("request_client_cert", None), - cipher_list=self.ssl.get("cipher_list", None), - dhparams=self.ssl.get("dhparams", None), - chain_file=self.ssl.get("chain_file", None), - alpn_select=self.ssl.get("alpn_select", None) - ) - h.handle() - h.finish() - - def handle_error(self, connection, client_address, fp=None): - s = io.StringIO() - tcp.TCPServer.handle_error(self, connection, client_address, s) - self.q.put(s.getvalue()) - - -class ServerTestBase: - ssl = None - handler = None - addr = ("localhost", 0) - - @classmethod - def setup_class(cls, **kwargs): - cls.q = queue.Queue() - s = cls.makeserver(**kwargs) - cls.port = s.address.port - cls.server = _ServerThread(s) - cls.server.start() - - @classmethod - def makeserver(cls, **kwargs): - ssl = kwargs.pop('ssl', cls.ssl) - return _TServer(ssl, cls.q, cls.handler, cls.addr, **kwargs) - - @classmethod - def teardown_class(cls): - cls.server.shutdown() - - def teardown(self): - self.server.server.wait_for_silence() - - @property - def last_handler(self): - return self.server.server.last_handler diff --git a/test/netlib/websockets/__init__.py b/test/netlib/websockets/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/test/netlib/websockets/test_frame.py b/test/netlib/websockets/test_frame.py deleted file mode 100644 index 3b7c9ed4..00000000 --- a/test/netlib/websockets/test_frame.py +++ /dev/null @@ -1,164 +0,0 @@ -import os -import codecs -import pytest - -from netlib import websockets -from mitmproxy.test import tutils - - -class TestFrameHeader: - - @pytest.mark.parametrize("input,expected", [ - (0, '0100'), - (125, '017D'), - (126, '017E007E'), - (127, '017E007F'), - (142, '017E008E'), - (65534, '017EFFFE'), - (65535, '017EFFFF'), - (65536, '017F0000000000010000'), - (8589934591, '017F00000001FFFFFFFF'), - (2 ** 64 - 1, '017FFFFFFFFFFFFFFFFF'), - ]) - def test_serialization_length(self, input, expected): - h = websockets.FrameHeader( - opcode=websockets.OPCODE.TEXT, - payload_length=input, - ) - assert bytes(h) == codecs.decode(expected, 'hex') - - def test_serialization_too_large(self): - h = websockets.FrameHeader( - payload_length=2 ** 64 + 1, - ) - with pytest.raises(ValueError): - bytes(h) - - @pytest.mark.parametrize("input,expected", [ - ('0100', 0), - ('017D', 125), - ('017E007E', 126), - ('017E007F', 127), - ('017E008E', 142), - ('017EFFFE', 65534), - ('017EFFFF', 65535), - ('017F0000000000010000', 65536), - ('017F00000001FFFFFFFF', 8589934591), - ('017FFFFFFFFFFFFFFFFF', 2 ** 64 - 1), - ]) - def test_deserialization_length(self, input, expected): - h = websockets.FrameHeader.from_file(tutils.treader(codecs.decode(input, 'hex'))) - assert h.payload_length == expected - - @pytest.mark.parametrize("input,expected", [ - ('0100', (False, None)), - ('018000000000', (True, '00000000')), - ('018012345678', (True, '12345678')), - ]) - def test_deserialization_masking(self, input, expected): - h = websockets.FrameHeader.from_file(tutils.treader(codecs.decode(input, 'hex'))) - assert h.mask == expected[0] - if h.mask: - assert h.masking_key == codecs.decode(expected[1], 'hex') - - def test_equality(self): - h = websockets.FrameHeader(mask=True, masking_key=b'1234') - h2 = websockets.FrameHeader(mask=True, masking_key=b'1234') - assert h == h2 - - h = websockets.FrameHeader(fin=True) - h2 = websockets.FrameHeader(fin=False) - assert h != h2 - - assert h != 'foobar' - - def test_roundtrip(self): - def round(*args, **kwargs): - h = websockets.FrameHeader(*args, **kwargs) - h2 = websockets.FrameHeader.from_file(tutils.treader(bytes(h))) - assert h == h2 - - round() - round(fin=True) - round(rsv1=True) - round(rsv2=True) - round(rsv3=True) - round(payload_length=1) - round(payload_length=100) - round(payload_length=1000) - round(payload_length=10000) - round(opcode=websockets.OPCODE.PING) - round(masking_key=b"test") - - def test_human_readable(self): - f = websockets.FrameHeader( - masking_key=b"test", - fin=True, - payload_length=10 - ) - assert repr(f) - - f = websockets.FrameHeader() - assert repr(f) - - def test_funky(self): - f = websockets.FrameHeader(masking_key=b"test", mask=False) - raw = bytes(f) - f2 = websockets.FrameHeader.from_file(tutils.treader(raw)) - assert not f2.mask - - def test_violations(self): - tutils.raises("opcode", websockets.FrameHeader, opcode=17) - tutils.raises("masking key", websockets.FrameHeader, masking_key=b"x") - - def test_automask(self): - f = websockets.FrameHeader(mask=True) - assert f.masking_key - - f = websockets.FrameHeader(masking_key=b"foob") - assert f.mask - - f = websockets.FrameHeader(masking_key=b"foob", mask=0) - assert not f.mask - assert f.masking_key - - -class TestFrame: - def test_equality(self): - f = websockets.Frame(payload=b'1234') - f2 = websockets.Frame(payload=b'1234') - assert f == f2 - - assert f != b'1234' - - def test_roundtrip(self): - def round(*args, **kwargs): - f = websockets.Frame(*args, **kwargs) - raw = bytes(f) - f2 = websockets.Frame.from_file(tutils.treader(raw)) - assert f == f2 - round(b"test") - round(b"test", fin=1) - round(b"test", rsv1=1) - round(b"test", opcode=websockets.OPCODE.PING) - round(b"test", masking_key=b"test") - - def test_human_readable(self): - f = websockets.Frame() - assert repr(f) - - f = websockets.Frame(b"foobar") - assert "foobar" in repr(f) - - @pytest.mark.parametrize("masked", [True, False]) - @pytest.mark.parametrize("length", [100, 50000, 150000]) - def test_serialization_bijection(self, masked, length): - frame = websockets.Frame( - os.urandom(length), - fin=True, - opcode=websockets.OPCODE.TEXT, - mask=int(masked), - masking_key=(os.urandom(4) if masked else None) - ) - serialized = bytes(frame) - assert frame == websockets.Frame.from_bytes(serialized) diff --git a/test/netlib/websockets/test_masker.py b/test/netlib/websockets/test_masker.py deleted file mode 100644 index 23a9b876..00000000 --- a/test/netlib/websockets/test_masker.py +++ /dev/null @@ -1,23 +0,0 @@ -import codecs -import pytest - -from netlib import websockets - - -class TestMasker: - - @pytest.mark.parametrize("input,expected", [ - ([b"a"], '00'), - ([b"four"], '070d1616'), - ([b"fourf"], '070d161607'), - ([b"fourfive"], '070d1616070b1501'), - ([b"a", b"aasdfasdfa", b"asdf"], '000302170504021705040205120605'), - ([b"a" * 50, b"aasdfasdfa", b"asdf"], '00030205000302050003020500030205000302050003020500030205000302050003020500030205000302050003020500030205120605051206050500110702'), # noqa - ]) - def test_masker(self, input, expected): - m = websockets.Masker(b"abcd") - data = b"".join([m(t) for t in input]) - assert data == codecs.decode(expected, 'hex') - - data = websockets.Masker(b"abcd")(data) - assert data == b"".join(input) diff --git a/test/netlib/websockets/test_utils.py b/test/netlib/websockets/test_utils.py deleted file mode 100644 index f6f1e40a..00000000 --- a/test/netlib/websockets/test_utils.py +++ /dev/null @@ -1,105 +0,0 @@ -import pytest - -from netlib import http -from netlib import websockets - - -class TestUtils: - - def test_client_handshake_headers(self): - h = websockets.client_handshake_headers(version='42') - assert h['sec-websocket-version'] == '42' - - h = websockets.client_handshake_headers(key='some-key') - assert h['sec-websocket-key'] == 'some-key' - - h = websockets.client_handshake_headers(protocol='foobar') - assert h['sec-websocket-protocol'] == 'foobar' - - h = websockets.client_handshake_headers(extensions='foo; bar') - assert h['sec-websocket-extensions'] == 'foo; bar' - - def test_server_handshake_headers(self): - h = websockets.server_handshake_headers('some-key') - assert h['sec-websocket-accept'] == '8iILEZtcVdtFD7MDlPKip9ec9nw=' - assert 'sec-websocket-protocol' not in h - assert 'sec-websocket-extensions' not in h - - h = websockets.server_handshake_headers('some-key', 'foobar', 'foo; bar') - assert h['sec-websocket-accept'] == '8iILEZtcVdtFD7MDlPKip9ec9nw=' - assert h['sec-websocket-protocol'] == 'foobar' - assert h['sec-websocket-extensions'] == 'foo; bar' - - @pytest.mark.parametrize("input,expected", [ - ([(b'connection', b'upgrade'), (b'upgrade', b'websocket'), (b'sec-websocket-key', b'foobar')], True), - ([(b'connection', b'upgrade'), (b'upgrade', b'websocket'), (b'sec-websocket-accept', b'foobar')], True), - ([(b'Connection', b'UpgRaDe'), (b'Upgrade', b'WebSocKeT'), (b'Sec-WebSockeT-KeY', b'foobar')], True), - ([(b'Connection', b'UpgRaDe'), (b'Upgrade', b'WebSocKeT'), (b'Sec-WebSockeT-AccePt', b'foobar')], True), - ([(b'connection', b'foo'), (b'upgrade', b'bar'), (b'sec-websocket-key', b'foobar')], False), - ([(b'connection', b'upgrade'), (b'upgrade', b'websocket')], False), - ([(b'connection', b'upgrade'), (b'sec-websocket-key', b'foobar')], False), - ([(b'upgrade', b'websocket'), (b'sec-websocket-key', b'foobar')], False), - ([], False), - ]) - def test_check_handshake(self, input, expected): - h = http.Headers(input) - assert websockets.check_handshake(h) == expected - - @pytest.mark.parametrize("input,expected", [ - ([(b'sec-websocket-version', b'13')], True), - ([(b'Sec-WebSockeT-VerSion', b'13')], True), - ([(b'sec-websocket-version', b'9')], False), - ([(b'sec-websocket-version', b'42')], False), - ([(b'sec-websocket-version', b'')], False), - ([], False), - ]) - def test_check_client_version(self, input, expected): - h = http.Headers(input) - assert websockets.check_client_version(h) == expected - - @pytest.mark.parametrize("input,expected", [ - ('foobar', b'AzhRPA4TNwR6I/riJheN0TfR7+I='), - (b'foobar', b'AzhRPA4TNwR6I/riJheN0TfR7+I='), - ]) - def test_create_server_nonce(self, input, expected): - assert websockets.create_server_nonce(input) == expected - - @pytest.mark.parametrize("input,expected", [ - ([(b'sec-websocket-extensions', b'foo; bar')], 'foo; bar'), - ([(b'Sec-WebSockeT-ExteNsionS', b'foo; bar')], 'foo; bar'), - ([(b'sec-websocket-extensions', b'')], ''), - ([], None), - ]) - def test_get_extensions(self, input, expected): - h = http.Headers(input) - assert websockets.get_extensions(h) == expected - - @pytest.mark.parametrize("input,expected", [ - ([(b'sec-websocket-protocol', b'foobar')], 'foobar'), - ([(b'Sec-WebSockeT-ProTocoL', b'foobar')], 'foobar'), - ([(b'sec-websocket-protocol', b'')], ''), - ([], None), - ]) - def test_get_protocol(self, input, expected): - h = http.Headers(input) - assert websockets.get_protocol(h) == expected - - @pytest.mark.parametrize("input,expected", [ - ([(b'sec-websocket-key', b'foobar')], 'foobar'), - ([(b'Sec-WebSockeT-KeY', b'foobar')], 'foobar'), - ([(b'sec-websocket-key', b'')], ''), - ([], None), - ]) - def test_get_client_key(self, input, expected): - h = http.Headers(input) - assert websockets.get_client_key(h) == expected - - @pytest.mark.parametrize("input,expected", [ - ([(b'sec-websocket-accept', b'foobar')], 'foobar'), - ([(b'Sec-WebSockeT-AccepT', b'foobar')], 'foobar'), - ([(b'sec-websocket-accept', b'')], ''), - ([], None), - ]) - def test_get_server_accept(self, input, expected): - h = http.Headers(input) - assert websockets.get_server_accept(h) == expected diff --git a/test/pathod/test_language_http2.py b/test/pathod/test_language_http2.py index 11e100ac..7d3a8e60 100644 --- a/test/pathod/test_language_http2.py +++ b/test/pathod/test_language_http2.py @@ -1,7 +1,7 @@ import io -from netlib import tcp -from netlib.http import user_agents +from mitmproxy.net import tcp +from mitmproxy.net.http import user_agents from pathod import language from pathod.language import http2 diff --git a/test/pathod/test_language_websocket.py b/test/pathod/test_language_websocket.py index 9d533d98..49fbd4e6 100644 --- a/test/pathod/test_language_websocket.py +++ b/test/pathod/test_language_websocket.py @@ -1,6 +1,6 @@ from pathod import language from pathod.language import websockets -import netlib.websockets +import mitmproxy.net.websockets from . import tutils @@ -62,7 +62,7 @@ class TestWebsocketFrame: def test_flags(self): wf = parse_request("wf:fin:mask:rsv1:rsv2:rsv3") - frm = netlib.websockets.Frame.from_bytes(tutils.render(wf)) + frm = mitmproxy.net.websockets.Frame.from_bytes(tutils.render(wf)) assert frm.header.fin assert frm.header.mask assert frm.header.rsv1 @@ -70,7 +70,7 @@ class TestWebsocketFrame: assert frm.header.rsv3 wf = parse_request("wf:-fin:-mask:-rsv1:-rsv2:-rsv3") - frm = netlib.websockets.Frame.from_bytes(tutils.render(wf)) + frm = mitmproxy.net.websockets.Frame.from_bytes(tutils.render(wf)) assert not frm.header.fin assert not frm.header.mask assert not frm.header.rsv1 @@ -80,15 +80,15 @@ class TestWebsocketFrame: def fr(self, spec, **kwargs): settings = language.base.Settings(**kwargs) wf = parse_request(spec) - return netlib.websockets.Frame.from_bytes(tutils.render(wf, settings)) + return mitmproxy.net.websockets.Frame.from_bytes(tutils.render(wf, settings)) def test_construction(self): assert self.fr("wf:c1").header.opcode == 1 assert self.fr("wf:c0").header.opcode == 0 assert self.fr("wf:cbinary").header.opcode ==\ - netlib.websockets.OPCODE.BINARY + mitmproxy.net.websockets.OPCODE.BINARY assert self.fr("wf:ctext").header.opcode ==\ - netlib.websockets.OPCODE.TEXT + mitmproxy.net.websockets.OPCODE.TEXT def test_rawbody(self): frm = self.fr("wf:mask:r'foo'") diff --git a/test/pathod/test_pathoc.py b/test/pathod/test_pathoc.py index 98dc9825..90177ff6 100644 --- a/test/pathod/test_pathoc.py +++ b/test/pathod/test_pathoc.py @@ -1,9 +1,9 @@ import io from mock import Mock -from netlib import http -from netlib import tcp -from netlib.http import http1 +from mitmproxy.net import http +from mitmproxy.net import tcp +from mitmproxy.net.http import http1 from mitmproxy.test.tutils import raises from mitmproxy import exceptions diff --git a/test/pathod/test_pathod.py b/test/pathod/test_pathod.py index 402cd638..8ee7a2fd 100644 --- a/test/pathod/test_pathod.py +++ b/test/pathod/test_pathod.py @@ -1,7 +1,7 @@ import io from pathod import pathod -from netlib import tcp +from mitmproxy.net import tcp from mitmproxy import exceptions from . import tutils diff --git a/test/pathod/test_protocols_http2.py b/test/pathod/test_protocols_http2.py index a7303115..d77702a3 100644 --- a/test/pathod/test_protocols_http2.py +++ b/test/pathod/test_protocols_http2.py @@ -2,12 +2,12 @@ import mock import codecs import hyperframe -from netlib import tcp, http +from mitmproxy.net import tcp, http from mitmproxy.test.tutils import raises -from netlib.http import http2 +from mitmproxy.net.http import http2 from mitmproxy import exceptions -from ..netlib import tservers as netlib_tservers +from ..mitmproxy.net import tservers as net_tservers from pathod.protocols.http2 import HTTP2StateProtocol, TCPHandler @@ -66,7 +66,7 @@ class TestProtocol: assert mock_server_method.called -class TestCheckALPNMatch(netlib_tservers.ServerTestBase): +class TestCheckALPNMatch(net_tservers.ServerTestBase): handler = EchoHandler ssl = dict( alpn_select=b'h2', @@ -82,7 +82,7 @@ class TestCheckALPNMatch(netlib_tservers.ServerTestBase): assert protocol.check_alpn() -class TestCheckALPNMismatch(netlib_tservers.ServerTestBase): +class TestCheckALPNMismatch(net_tservers.ServerTestBase): handler = EchoHandler ssl = dict( alpn_select=None, @@ -99,7 +99,7 @@ class TestCheckALPNMismatch(netlib_tservers.ServerTestBase): protocol.check_alpn() -class TestPerformServerConnectionPreface(netlib_tservers.ServerTestBase): +class TestPerformServerConnectionPreface(net_tservers.ServerTestBase): class handler(tcp.BaseHandler): def handle(self): @@ -136,7 +136,7 @@ class TestPerformServerConnectionPreface(netlib_tservers.ServerTestBase): protocol.perform_server_connection_preface(force=True) -class TestPerformClientConnectionPreface(netlib_tservers.ServerTestBase): +class TestPerformClientConnectionPreface(net_tservers.ServerTestBase): class handler(tcp.BaseHandler): def handle(self): @@ -197,7 +197,7 @@ class TestserverstreamIds: assert self.protocol.current_stream_id == 6 -class TestApplySettings(netlib_tservers.ServerTestBase): +class TestApplySettings(net_tservers.ServerTestBase): class handler(tcp.BaseHandler): def handle(self): # check settings acknowledgement @@ -290,7 +290,7 @@ class TestCreateBody: assert bytes[2] == codecs.decode('0000020001000000013432', 'hex_codec') -class TestReadRequest(netlib_tservers.ServerTestBase): +class TestReadRequest(net_tservers.ServerTestBase): class handler(tcp.BaseHandler): def handle(self): @@ -320,7 +320,7 @@ class TestReadRequest(netlib_tservers.ServerTestBase): assert req.content == b'foobar' -class TestReadRequestRelative(netlib_tservers.ServerTestBase): +class TestReadRequestRelative(net_tservers.ServerTestBase): class handler(tcp.BaseHandler): def handle(self): self.wfile.write( @@ -343,7 +343,7 @@ class TestReadRequestRelative(netlib_tservers.ServerTestBase): assert req.path == "*" -class TestReadRequestAbsolute(netlib_tservers.ServerTestBase): +class TestReadRequestAbsolute(net_tservers.ServerTestBase): class handler(tcp.BaseHandler): def handle(self): self.wfile.write( @@ -367,7 +367,7 @@ class TestReadRequestAbsolute(netlib_tservers.ServerTestBase): assert req.port == 22 -class TestReadResponse(netlib_tservers.ServerTestBase): +class TestReadResponse(net_tservers.ServerTestBase): class handler(tcp.BaseHandler): def handle(self): self.wfile.write( @@ -396,7 +396,7 @@ class TestReadResponse(netlib_tservers.ServerTestBase): assert resp.timestamp_end -class TestReadEmptyResponse(netlib_tservers.ServerTestBase): +class TestReadEmptyResponse(net_tservers.ServerTestBase): class handler(tcp.BaseHandler): def handle(self): self.wfile.write( diff --git a/test/pathod/tutils.py b/test/pathod/tutils.py index 16dec187..ccc3df43 100644 --- a/test/pathod/tutils.py +++ b/test/pathod/tutils.py @@ -7,7 +7,7 @@ import urllib from mitmproxy.utils import data -from netlib import tcp +from mitmproxy.net import tcp from mitmproxy.test import tutils from pathod import language diff --git a/tox.ini b/tox.ini index 8747d2f0..ff1484d0 100644 --- a/tox.ini +++ b/tox.ini @@ -19,4 +19,4 @@ commands = sphinx-build -W -b html -d {envtmpdir}/doctrees . {envtmpdir}/html [testenv:lint] deps = flake8>=2.6.2, <3.1 -commands = flake8 --jobs 8 --count mitmproxy netlib pathod examples test +commands = flake8 --jobs 8 --count mitmproxy pathod examples test -- cgit v1.2.3