aboutsummaryrefslogtreecommitdiffstats
path: root/examples
diff options
context:
space:
mode:
authorMaximilian Hils <git@maximilianhils.com>2016-02-15 14:58:46 +0100
committerMaximilian Hils <git@maximilianhils.com>2016-02-15 14:58:46 +0100
commit33fa49277a821b9d38e8c9bf0bcf2adcfa2f6f04 (patch)
tree31914a601302579ff817504019296fd7e9e46765 /examples
parent36f34f701991b5d474c005ec45e3b66e20f326a8 (diff)
downloadmitmproxy-33fa49277a821b9d38e8c9bf0bcf2adcfa2f6f04.tar.gz
mitmproxy-33fa49277a821b9d38e8c9bf0bcf2adcfa2f6f04.tar.bz2
mitmproxy-33fa49277a821b9d38e8c9bf0bcf2adcfa2f6f04.zip
move mitmproxy
Diffstat (limited to 'examples')
-rw-r--r--examples/README30
-rw-r--r--examples/add_header.py2
-rw-r--r--examples/change_upstream_proxy.py24
-rw-r--r--examples/custom_contentviews.py68
-rw-r--r--examples/dns_spoofing.py50
-rw-r--r--examples/dup_and_replay.py4
-rw-r--r--examples/filt.py16
-rwxr-xr-xexamples/flowbasic44
-rw-r--r--examples/flowwriter.py20
-rw-r--r--examples/har_extractor.py253
-rw-r--r--examples/iframe_injector.py27
-rwxr-xr-xexamples/mitmproxywrapper.py166
-rw-r--r--examples/modify_form.py5
-rw-r--r--examples/modify_querystring.py6
-rw-r--r--examples/modify_response_body.py18
-rw-r--r--examples/nonblocking.py9
-rw-r--r--examples/proxapp.py24
-rwxr-xr-xexamples/read_dumpfile20
-rw-r--r--examples/redirect_requests.py22
-rw-r--r--examples/sslstrip.py40
-rwxr-xr-xexamples/stickycookies42
-rw-r--r--examples/stream.py5
-rw-r--r--examples/stream_modify.py20
-rw-r--r--examples/stub.py79
-rw-r--r--examples/tcp_message.py24
-rw-r--r--examples/tls_passthrough.py136
-rw-r--r--examples/upsidedownternet.py17
27 files changed, 0 insertions, 1171 deletions
diff --git a/examples/README b/examples/README
deleted file mode 100644
index b4dec8e5..00000000
--- a/examples/README
+++ /dev/null
@@ -1,30 +0,0 @@
-Some inline scripts may require additional dependencies, which can be installed using
-`pip install mitmproxy[examples]`.
-
-
-# inline script examples
-add_header.py Simple script that just adds a header to every request.
-change_upstream_proxy.py Dynamically change the upstream proxy
-dns_spoofing.py Use mitmproxy in a DNS spoofing scenario.
-dup_and_replay.py Duplicates each request, changes it, and then replays the modified request.
-filt.py Use mitmproxy's filter expressions in your script.
-flowwriter.py Only write selected flows into a mitmproxy dumpfile.
-iframe_injector.py Inject configurable iframe into pages.
-modify_form.py Modify all form submissions to add a parameter.
-modify_querystring.py Modify all query strings to add a parameters.
-modify_response_body.py Replace arbitrary strings in all responses
-nonblocking.py Demonstrate parallel processing with a blocking script.
-proxapp.py How to embed a WSGI app in a mitmproxy server
-redirect_requests.py Redirect requests or directly reply to them.
-stub.py Script stub with a method definition for every event.
-upsidedownternet.py Rewrites traffic to turn images upside down.
-
-
-# libmproxy examples
-flowbasic Basic use of mitmproxy as a library.
-stickycookies An example of writing a custom proxy with libmproxy.
-
-
-# misc
-read_dumpfile Read a dumpfile generated by mitmproxy.
-mitmproxywrapper.py Bracket mitmproxy run with proxy enable/disable on OS X
diff --git a/examples/add_header.py b/examples/add_header.py
deleted file mode 100644
index cf1b53cc..00000000
--- a/examples/add_header.py
+++ /dev/null
@@ -1,2 +0,0 @@
-def response(context, flow):
- flow.response.headers["newheader"] = "foo"
diff --git a/examples/change_upstream_proxy.py b/examples/change_upstream_proxy.py
deleted file mode 100644
index 9c454897..00000000
--- a/examples/change_upstream_proxy.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# This scripts demonstrates how mitmproxy can switch to a second/different upstream proxy
-# in upstream proxy mode.
-#
-# Usage: mitmdump -U http://default-upstream-proxy.local:8080/ -s change_upstream_proxy.py
-#
-# If you want to change the target server, you should modify flow.request.host and flow.request.port
-
-
-def proxy_address(flow):
- # Poor man's loadbalancing: route every second domain through the alternative proxy.
- if hash(flow.request.host) % 2 == 1:
- return ("localhost", 8082)
- else:
- return ("localhost", 8081)
-
-
-def request(context, flow):
- if flow.request.method == "CONNECT":
- # If the decision is done by domain, one could also modify the server address here.
- # We do it after CONNECT here to have the request data available as well.
- return
- address = proxy_address(flow)
- if flow.live:
- flow.live.change_upstream_proxy_server(address) \ No newline at end of file
diff --git a/examples/custom_contentviews.py b/examples/custom_contentviews.py
deleted file mode 100644
index 17920e51..00000000
--- a/examples/custom_contentviews.py
+++ /dev/null
@@ -1,68 +0,0 @@
-import string
-import lxml.html
-import lxml.etree
-from libmproxy import utils, contentviews
-
-
-class ViewPigLatin(contentviews.View):
- name = "pig_latin_HTML"
- prompt = ("pig latin HTML", "l")
- content_types = ["text/html"]
-
- def __call__(self, data, **metadata):
- if utils.isXML(data):
- parser = lxml.etree.HTMLParser(
- strip_cdata=True,
- remove_blank_text=True
- )
- d = lxml.html.fromstring(data, parser=parser)
- docinfo = d.getroottree().docinfo
-
- def piglify(src):
- words = string.split(src)
- ret = ''
- for word in words:
- idx = -1
- while word[idx] in string.punctuation and (idx * -1) != len(word): idx -= 1
- if word[0].lower() in 'aeiou':
- if idx == -1:
- ret += word[0:] + "hay"
- else:
- ret += word[0:len(word) + idx + 1] + "hay" + word[idx + 1:]
- else:
- if idx == -1:
- ret += word[1:] + word[0] + "ay"
- else:
- ret += word[1:len(word) + idx + 1] + word[0] + "ay" + word[idx + 1:]
- ret += ' '
- return ret.strip()
-
- def recurse(root):
- if hasattr(root, 'text') and root.text:
- root.text = piglify(root.text)
- if hasattr(root, 'tail') and root.tail:
- root.tail = piglify(root.tail)
-
- if len(root):
- for child in root:
- recurse(child)
-
- recurse(d)
-
- s = lxml.etree.tostring(
- d,
- pretty_print=True,
- doctype=docinfo.doctype
- )
- return "HTML", contentviews.format_text(s)
-
-
-pig_view = ViewPigLatin()
-
-
-def start(context, argv):
- context.add_contentview(pig_view)
-
-
-def stop(context):
- context.remove_contentview(pig_view)
diff --git a/examples/dns_spoofing.py b/examples/dns_spoofing.py
deleted file mode 100644
index 7eb79695..00000000
--- a/examples/dns_spoofing.py
+++ /dev/null
@@ -1,50 +0,0 @@
-"""
-This inline scripts makes it possible to use mitmproxy in scenarios where IP spoofing has been used to redirect
-connections to mitmproxy. The way this works is that we rely on either the TLS Server Name Indication (SNI) or the
-Host header of the HTTP request.
-Of course, this is not foolproof - if an HTTPS connection comes without SNI, we don't
-know the actual target and cannot construct a certificate that looks valid.
-Similarly, if there's no Host header or a spoofed Host header, we're out of luck as well.
-Using transparent mode is the better option most of the time.
-
-Usage:
- mitmproxy
- -p 443
- -s dns_spoofing.py
- # Used as the target location if neither SNI nor host header are present.
- -R http://example.com/
- mitmdump
- -p 80
- -R http://localhost:443/
-
- (Setting up a single proxy instance and using iptables to redirect to it
- works as well)
-"""
-import re
-
-
-# This regex extracts splits the host header into host and port.
-# Handles the edge case of IPv6 addresses containing colons.
-# https://bugzilla.mozilla.org/show_bug.cgi?id=45891
-parse_host_header = re.compile(r"^(?P<host>[^:]+|\[.+\])(?::(?P<port>\d+))?$")
-
-
-def request(context, flow):
- if flow.client_conn.ssl_established:
- flow.request.scheme = "https"
- sni = flow.client_conn.connection.get_servername()
- port = 443
- else:
- flow.request.scheme = "http"
- sni = None
- port = 80
-
- host_header = flow.request.pretty_host
- m = parse_host_header.match(host_header)
- if m:
- host_header = m.group("host").strip("[]")
- if m.group("port"):
- port = int(m.group("port"))
-
- flow.request.host = sni or host_header
- flow.request.port = port \ No newline at end of file
diff --git a/examples/dup_and_replay.py b/examples/dup_and_replay.py
deleted file mode 100644
index 9ba91d3b..00000000
--- a/examples/dup_and_replay.py
+++ /dev/null
@@ -1,4 +0,0 @@
-def request(context, flow):
- f = context.duplicate_flow(flow)
- f.request.path = "/changed"
- context.replay_request(f)
diff --git a/examples/filt.py b/examples/filt.py
deleted file mode 100644
index d2daf9a2..00000000
--- a/examples/filt.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# This scripts demonstrates how to use mitmproxy's filter pattern in inline scripts.
-# Usage: mitmdump -s "filt.py FILTER"
-
-from libmproxy import filt
-
-
-def start(context, argv):
- if len(argv) != 2:
- raise ValueError("Usage: -s 'filt.py FILTER'")
- context.filter = filt.parse(argv[1])
-
-
-def response(context, flow):
- if flow.match(context.filter):
- print("Flow matches filter:")
- print(flow)
diff --git a/examples/flowbasic b/examples/flowbasic
deleted file mode 100755
index 78b9eff7..00000000
--- a/examples/flowbasic
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-"""
- This example shows how to build a proxy based on mitmproxy's Flow
- primitives.
-
- Heads Up: In the majority of cases, you want to use inline scripts.
-
- Note that request and response messages are not automatically replied to,
- so we need to implement handlers to do this.
-"""
-from libmproxy import flow
-from libmproxy.proxy import ProxyServer, ProxyConfig
-
-
-class MyMaster(flow.FlowMaster):
- def run(self):
- try:
- flow.FlowMaster.run(self)
- except KeyboardInterrupt:
- self.shutdown()
-
- def handle_request(self, f):
- f = flow.FlowMaster.handle_request(self, f)
- if f:
- f.reply()
- return f
-
- def handle_response(self, f):
- f = flow.FlowMaster.handle_response(self, f)
- if f:
- f.reply()
- print(f)
- return f
-
-
-config = ProxyConfig(
- port=8080,
- # use ~/.mitmproxy/mitmproxy-ca.pem as default CA file.
- cadir="~/.mitmproxy/"
-)
-state = flow.State()
-server = ProxyServer(config)
-m = MyMaster(server, state)
-m.run()
diff --git a/examples/flowwriter.py b/examples/flowwriter.py
deleted file mode 100644
index be2f285e..00000000
--- a/examples/flowwriter.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import random
-import sys
-
-from libmproxy.flow import FlowWriter
-
-
-def start(context, argv):
- if len(argv) != 2:
- raise ValueError('Usage: -s "flowriter.py filename"')
-
- if argv[1] == "-":
- f = sys.stdout
- else:
- f = open(argv[1], "wb")
- context.flow_writer = FlowWriter(f)
-
-
-def response(context, flow):
- if random.choice([True, False]):
- context.flow_writer.add(flow)
diff --git a/examples/har_extractor.py b/examples/har_extractor.py
deleted file mode 100644
index 4e905438..00000000
--- a/examples/har_extractor.py
+++ /dev/null
@@ -1,253 +0,0 @@
-"""
-
- This inline script utilizes harparser.HAR from
- https://github.com/JustusW/harparser to generate a HAR log object.
-"""
-from harparser import HAR
-
-from datetime import datetime
-
-
-class _HARLog(HAR.log):
- # The attributes need to be registered here for them to actually be
- # available later via self. This is due to HAREncodable linking __getattr__
- # to __getitem__. Anything that is set only in __init__ will just be added
- # as key/value pair to self.__classes__.
- __page_list__ = []
- __page_count__ = 0
- __page_ref__ = {}
-
- def __init__(self, page_list):
- self.__page_list__ = page_list
- self.__page_count__ = 0
- self.__page_ref__ = {}
-
- HAR.log.__init__(self, {"version": "1.2",
- "creator": {"name": "MITMPROXY HARExtractor",
- "version": "0.1",
- "comment": ""},
- "pages": [],
- "entries": []})
-
- def reset(self):
- self.__init__(self.__page_list__)
-
- def add(self, obj):
- if isinstance(obj, HAR.pages):
- self['pages'].append(obj)
- if isinstance(obj, HAR.entries):
- self['entries'].append(obj)
-
- def create_page_id(self):
- self.__page_count__ += 1
- return "autopage_%s" % str(self.__page_count__)
-
- def set_page_ref(self, page, ref):
- self.__page_ref__[page] = ref
-
- def get_page_ref(self, page):
- return self.__page_ref__.get(page, None)
-
- def get_page_list(self):
- return self.__page_list__
-
-
-def start(context, argv):
- """
- On start we create a HARLog instance. You will have to adapt this to
- suit your actual needs of HAR generation. As it will probably be
- necessary to cluster logs by IPs or reset them from time to time.
- """
- context.dump_file = None
- if len(argv) > 1:
- context.dump_file = argv[1]
- else:
- raise ValueError(
- 'Usage: -s "har_extractor.py filename" '
- '(- will output to stdout, filenames ending with .zhar '
- 'will result in compressed har)'
- )
- context.HARLog = _HARLog(['https://github.com'])
- context.seen_server = set()
-
-
-def response(context, flow):
- """
- Called when a server response has been received. At the time of this
- message both a request and a response are present and completely done.
- """
- # Values are converted from float seconds to int milliseconds later.
- ssl_time = -.001
- connect_time = -.001
- if flow.server_conn not in context.seen_server:
- # Calculate the connect_time for this server_conn. Afterwards add it to
- # seen list, in order to avoid the connect_time being present in entries
- # that use an existing connection.
- connect_time = flow.server_conn.timestamp_tcp_setup - \
- flow.server_conn.timestamp_start
- context.seen_server.add(flow.server_conn)
-
- if flow.server_conn.timestamp_ssl_setup is not None:
- # Get the ssl_time for this server_conn as the difference between
- # the start of the successful tcp setup and the successful ssl
- # setup. If no ssl setup has been made it is left as -1 since it
- # doesn't apply to this connection.
- ssl_time = flow.server_conn.timestamp_ssl_setup - \
- flow.server_conn.timestamp_tcp_setup
-
- # Calculate the raw timings from the different timestamps present in the
- # request and response object. For lack of a way to measure it dns timings
- # can not be calculated. The same goes for HAR blocked: MITMProxy will open
- # a server connection as soon as it receives the host and port from the
- # client connection. So the time spent waiting is actually spent waiting
- # between request.timestamp_end and response.timestamp_start thus it
- # correlates to HAR wait instead.
- timings_raw = {
- 'send': flow.request.timestamp_end - flow.request.timestamp_start,
- 'wait': flow.response.timestamp_start - flow.request.timestamp_end,
- 'receive': flow.response.timestamp_end - flow.response.timestamp_start,
- 'connect': connect_time,
- 'ssl': ssl_time
- }
-
- # HAR timings are integers in ms, so we have to re-encode the raw timings to
- # that format.
- timings = dict([(key, int(1000 * value))
- for key, value in timings_raw.iteritems()])
-
- # The full_time is the sum of all timings. Timings set to -1 will be ignored
- # as per spec.
- full_time = 0
- for item in timings.values():
- if item > -1:
- full_time += item
-
- started_date_time = datetime.fromtimestamp(
- flow.request.timestamp_start,
- tz=utc).isoformat()
-
- request_query_string = [{"name": k, "value": v}
- for k, v in flow.request.get_query()]
- request_http_version = flow.request.http_version
- # Cookies are shaped as tuples by MITMProxy.
- request_cookies = [{"name": k.strip(), "value": v[0]}
- for k, v in (flow.request.get_cookies() or {}).iteritems()]
- request_headers = [{"name": k, "value": v} for k, v in flow.request.headers]
- request_headers_size = len(str(flow.request.headers))
- request_body_size = len(flow.request.content)
-
- response_http_version = flow.response.http_version
- # Cookies are shaped as tuples by MITMProxy.
- response_cookies = [{"name": k.strip(), "value": v[0]}
- for k, v in (flow.response.get_cookies() or {}).iteritems()]
- response_headers = [{"name": k, "value": v}
- for k, v in flow.response.headers]
- response_headers_size = len(str(flow.response.headers))
- response_body_size = len(flow.response.content)
- response_body_decoded_size = len(flow.response.get_decoded_content())
- response_body_compression = response_body_decoded_size - response_body_size
- response_mime_type = flow.response.headers.get('Content-Type', '')
- response_redirect_url = flow.response.headers.get('Location', '')
-
- entry = HAR.entries(
- {
- "startedDateTime": started_date_time,
- "time": full_time,
- "request": {
- "method": flow.request.method,
- "url": flow.request.url,
- "httpVersion": request_http_version,
- "cookies": request_cookies,
- "headers": request_headers,
- "queryString": request_query_string,
- "headersSize": request_headers_size,
- "bodySize": request_body_size,
- },
- "response": {
- "status": flow.response.status_code,
- "statusText": flow.response.msg,
- "httpVersion": response_http_version,
- "cookies": response_cookies,
- "headers": response_headers,
- "content": {
- "size": response_body_size,
- "compression": response_body_compression,
- "mimeType": response_mime_type},
- "redirectURL": response_redirect_url,
- "headersSize": response_headers_size,
- "bodySize": response_body_size,
- },
- "cache": {},
- "timings": timings,
- })
-
- # If the current url is in the page list of context.HARLog or does not have
- # a referrer we add it as a new pages object.
- if flow.request.url in context.HARLog.get_page_list() or flow.request.headers.get(
- 'Referer',
- None) is None:
- page_id = context.HARLog.create_page_id()
- context.HARLog.add(
- HAR.pages({
- "startedDateTime": entry['startedDateTime'],
- "id": page_id,
- "title": flow.request.url,
- })
- )
- context.HARLog.set_page_ref(flow.request.url, page_id)
- entry['pageref'] = page_id
-
- # Lookup the referer in the page_ref of context.HARLog to point this entries
- # pageref attribute to the right pages object, then set it as a new
- # reference to build a reference tree.
- elif context.HARLog.get_page_ref(flow.request.headers.get('Referer')) is not None:
- entry['pageref'] = context.HARLog.get_page_ref(
- flow.request.headers['Referer']
- )
- context.HARLog.set_page_ref(
- flow.request.headers['Referer'], entry['pageref']
- )
-
- context.HARLog.add(entry)
-
-
-def done(context):
- """
- Called once on script shutdown, after any other events.
- """
- from pprint import pprint
- import json
-
- json_dump = context.HARLog.json()
- compressed_json_dump = context.HARLog.compress()
-
- if context.dump_file == '-':
- context.log(pprint.pformat(json.loads(json_dump)))
- elif context.dump_file.endswith('.zhar'):
- file(context.dump_file, "w").write(compressed_json_dump)
- else:
- file(context.dump_file, "w").write(json_dump)
- context.log(
- "HAR log finished with %s bytes (%s bytes compressed)" % (
- len(json_dump), len(compressed_json_dump)
- )
- )
- context.log(
- "Compression rate is %s%%" % str(
- 100. * len(compressed_json_dump) / len(json_dump)
- )
- )
-
-
-def print_attributes(obj, filter_string=None, hide_privates=False):
- """
- Useful helper method to quickly get all attributes of an object and its
- values.
- """
- for attr in dir(obj):
- if hide_privates and "__" in attr:
- continue
- if filter_string is not None and filter_string not in attr:
- continue
- value = getattr(obj, attr)
- print("%s.%s" % ('obj', attr), value, type(value))
diff --git a/examples/iframe_injector.py b/examples/iframe_injector.py
deleted file mode 100644
index 29de9b63..00000000
--- a/examples/iframe_injector.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Usage: mitmdump -s "iframe_injector.py url"
-# (this script works best with --anticache)
-from bs4 import BeautifulSoup
-from libmproxy.models import decoded
-
-
-def start(context, argv):
- if len(argv) != 2:
- raise ValueError('Usage: -s "iframe_injector.py url"')
- context.iframe_url = argv[1]
-
-
-def response(context, flow):
- if flow.request.host in context.iframe_url:
- return
- with decoded(flow.response): # Remove content encoding (gzip, ...)
- html = BeautifulSoup(flow.response.content)
- if html.body:
- iframe = html.new_tag(
- "iframe",
- src=context.iframe_url,
- frameborder=0,
- height=0,
- width=0)
- html.body.insert(0, iframe)
- flow.response.content = str(html)
- context.log("Iframe inserted.")
diff --git a/examples/mitmproxywrapper.py b/examples/mitmproxywrapper.py
deleted file mode 100755
index 7ea10715..00000000
--- a/examples/mitmproxywrapper.py
+++ /dev/null
@@ -1,166 +0,0 @@
-#!/usr/bin/env python
-#
-# Helper tool to enable/disable OS X proxy and wrap mitmproxy
-#
-# Get usage information with:
-#
-# mitmproxywrapper.py -h
-#
-
-import subprocess
-import re
-import argparse
-import contextlib
-import os
-import sys
-
-
-class Wrapper(object):
-
- def __init__(self, port, extra_arguments=None):
- self.port = port
- self.extra_arguments = extra_arguments
-
- def run_networksetup_command(self, *arguments):
- return subprocess.check_output(
- ['sudo', 'networksetup'] + list(arguments))
-
- def proxy_state_for_service(self, service):
- state = self.run_networksetup_command(
- '-getwebproxy',
- service).splitlines()
- return dict([re.findall(r'([^:]+): (.*)', line)[0] for line in state])
-
- def enable_proxy_for_service(self, service):
- print('Enabling proxy on {}...'.format(service))
- for subcommand in ['-setwebproxy', '-setsecurewebproxy']:
- self.run_networksetup_command(
- subcommand, service, '127.0.0.1', str(
- self.port))
-
- def disable_proxy_for_service(self, service):
- print('Disabling proxy on {}...'.format(service))
- for subcommand in ['-setwebproxystate', '-setsecurewebproxystate']:
- self.run_networksetup_command(subcommand, service, 'Off')
-
- def interface_name_to_service_name_map(self):
- order = self.run_networksetup_command('-listnetworkserviceorder')
- mapping = re.findall(
- r'\(\d+\)\s(.*)$\n\(.*Device: (.+)\)$',
- order,
- re.MULTILINE)
- return dict([(b, a) for (a, b) in mapping])
-
- def run_command_with_input(self, command, input):
- popen = subprocess.Popen(
- command,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE)
- (stdout, stderr) = popen.communicate(input)
- return stdout
-
- def primary_interace_name(self):
- scutil_script = 'get State:/Network/Global/IPv4\nd.show\n'
- stdout = self.run_command_with_input('/usr/sbin/scutil', scutil_script)
- interface, = re.findall(r'PrimaryInterface\s*:\s*(.+)', stdout)
- return interface
-
- def primary_service_name(self):
- return self.interface_name_to_service_name_map()[
- self.primary_interace_name()]
-
- def proxy_enabled_for_service(self, service):
- return self.proxy_state_for_service(service)['Enabled'] == 'Yes'
-
- def toggle_proxy(self):
- new_state = not self.proxy_enabled_for_service(
- self.primary_service_name())
- for service_name in self.connected_service_names():
- if self.proxy_enabled_for_service(service_name) and not new_state:
- self.disable_proxy_for_service(service_name)
- elif not self.proxy_enabled_for_service(service_name) and new_state:
- self.enable_proxy_for_service(service_name)
-
- def connected_service_names(self):
- scutil_script = 'list\n'
- stdout = self.run_command_with_input('/usr/sbin/scutil', scutil_script)
- service_ids = re.findall(r'State:/Network/Service/(.+)/IPv4', stdout)
-
- service_names = []
- for service_id in service_ids:
- scutil_script = 'show Setup:/Network/Service/{}\n'.format(
- service_id)
- stdout = self.run_command_with_input(
- '/usr/sbin/scutil',
- scutil_script)
- service_name, = re.findall(r'UserDefinedName\s*:\s*(.+)', stdout)
- service_names.append(service_name)
-
- return service_names
-
- def wrap_mitmproxy(self):
- with self.wrap_proxy():
- cmd = ['mitmproxy', '-p', str(self.port)]
- if self.extra_arguments:
- cmd.extend(self.extra_arguments)
- subprocess.check_call(cmd)
-
- def wrap_honeyproxy(self):
- with self.wrap_proxy():
- popen = subprocess.Popen('honeyproxy.sh')
- try:
- popen.wait()
- except KeyboardInterrupt:
- popen.terminate()
-
- @contextlib.contextmanager
- def wrap_proxy(self):
- connected_service_names = self.connected_service_names()
- for service_name in connected_service_names:
- if not self.proxy_enabled_for_service(service_name):
- self.enable_proxy_for_service(service_name)
-
- yield
-
- for service_name in connected_service_names:
- if self.proxy_enabled_for_service(service_name):
- self.disable_proxy_for_service(service_name)
-
- @classmethod
- def ensure_superuser(cls):
- if os.getuid() != 0:
- print('Relaunching with sudo...')
- os.execv('/usr/bin/sudo', ['/usr/bin/sudo'] + sys.argv)
-
- @classmethod
- def main(cls):
- parser = argparse.ArgumentParser(
- description='Helper tool for OS X proxy configuration and mitmproxy.',
- epilog='Any additional arguments will be passed on unchanged to mitmproxy.')
- parser.add_argument(
- '-t',
- '--toggle',
- action='store_true',
- help='just toggle the proxy configuration')
-# parser.add_argument('--honeyproxy', action='store_true', help='run honeyproxy instead of mitmproxy')
- parser.add_argument(
- '-p',
- '--port',
- type=int,
- help='override the default port of 8080',
- default=8080)
- args, extra_arguments = parser.parse_known_args()
-
- wrapper = cls(port=args.port, extra_arguments=extra_arguments)
-
- if args.toggle:
- wrapper.toggle_proxy()
-# elif args.honeyproxy:
-# wrapper.wrap_honeyproxy()
- else:
- wrapper.wrap_mitmproxy()
-
-
-if __name__ == '__main__':
- Wrapper.ensure_superuser()
- Wrapper.main()
diff --git a/examples/modify_form.py b/examples/modify_form.py
deleted file mode 100644
index 3e9d15c0..00000000
--- a/examples/modify_form.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def request(context, flow):
- if "application/x-www-form-urlencoded" in flow.request.headers.get("content-type", ""):
- form = flow.request.get_form_urlencoded()
- form["mitmproxy"] = ["rocks"]
- flow.request.set_form_urlencoded(form)
diff --git a/examples/modify_querystring.py b/examples/modify_querystring.py
deleted file mode 100644
index 7f31a48f..00000000
--- a/examples/modify_querystring.py
+++ /dev/null
@@ -1,6 +0,0 @@
-
-def request(context, flow):
- q = flow.request.get_query()
- if q:
- q["mitmproxy"] = ["rocks"]
- flow.request.set_query(q)
diff --git a/examples/modify_response_body.py b/examples/modify_response_body.py
deleted file mode 100644
index a35e1525..00000000
--- a/examples/modify_response_body.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Usage: mitmdump -s "modify_response_body.py mitmproxy bananas"
-# (this script works best with --anticache)
-from libmproxy.models import decoded
-
-
-def start(context, argv):
- if len(argv) != 3:
- raise ValueError('Usage: -s "modify-response-body.py old new"')
- # You may want to use Python's argparse for more sophisticated argument
- # parsing.
- context.old, context.new = argv[1], argv[2]
-
-
-def response(context, flow):
- with decoded(flow.response): # automatically decode gzipped responses.
- flow.response.content = flow.response.content.replace(
- context.old,
- context.new)
diff --git a/examples/nonblocking.py b/examples/nonblocking.py
deleted file mode 100644
index 7bc9c07b..00000000
--- a/examples/nonblocking.py
+++ /dev/null
@@ -1,9 +0,0 @@
-import time
-from libmproxy.script import concurrent
-
-
-@concurrent # Remove this and see what happens
-def request(context, flow):
- print("handle request: %s%s" % (flow.request.host, flow.request.path))
- time.sleep(5)
- print("start request: %s%s" % (flow.request.host, flow.request.path))
diff --git a/examples/proxapp.py b/examples/proxapp.py
deleted file mode 100644
index 4d8e7b58..00000000
--- a/examples/proxapp.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""
-This example shows how to graft a WSGI app onto mitmproxy. In this
-instance, we're using the Flask framework (http://flask.pocoo.org/) to expose
-a single simplest-possible page.
-"""
-from flask import Flask
-
-app = Flask("proxapp")
-
-
-@app.route('/')
-def hello_world():
- return 'Hello World!'
-
-
-# Register the app using the magic domain "proxapp" on port 80. Requests to
-# this domain and port combination will now be routed to the WSGI app instance.
-def start(context, argv):
- context.app_registry.add(app, "proxapp", 80)
-
- # SSL works too, but the magic domain needs to be resolvable from the mitmproxy machine due to mitmproxy's design.
- # mitmproxy will connect to said domain and use serve its certificate (unless --no-upstream-cert is set)
- # but won't send any data.
- context.app_registry.add(app, "example.com", 443)
diff --git a/examples/read_dumpfile b/examples/read_dumpfile
deleted file mode 100755
index b329c0e1..00000000
--- a/examples/read_dumpfile
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-#
-# Simple script showing how to read a mitmproxy dump file
-#
-
-from libmproxy import flow
-import pprint
-import sys
-
-with open(sys.argv[1], "rb") as logfile:
- freader = flow.FlowReader(logfile)
- pp = pprint.PrettyPrinter(indent=4)
- try:
- for f in freader.stream():
- print(f)
- print(f.request.host)
- pp.pprint(f.get_state())
- print("")
- except flow.FlowReadError as v:
- print "Flow file corrupted. Stopped loading."
diff --git a/examples/redirect_requests.py b/examples/redirect_requests.py
deleted file mode 100644
index a3145083..00000000
--- a/examples/redirect_requests.py
+++ /dev/null
@@ -1,22 +0,0 @@
-"""
-This example shows two ways to redirect flows to other destinations.
-"""
-from libmproxy.models import HTTPResponse
-from netlib.http import Headers
-
-def request(context, flow):
- # pretty_host takes the "Host" header of the request into account,
- # which is useful in transparent mode where we usually only have the IP
- # otherwise.
-
- # Method 1: Answer with a locally generated response
- if flow.request.pretty_host.endswith("example.com"):
- resp = HTTPResponse(
- "HTTP/1.1", 200, "OK",
- Headers(Content_Type="text/html"),
- "helloworld")
- flow.reply(resp)
-
- # Method 2: Redirect the request to a different server
- if flow.request.pretty_host.endswith("example.org"):
- flow.request.host = "mitmproxy.org"
diff --git a/examples/sslstrip.py b/examples/sslstrip.py
deleted file mode 100644
index 369427a2..00000000
--- a/examples/sslstrip.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from netlib.http import decoded
-import re
-from six.moves import urllib
-
-def start(context, argv) :
-
- #set of SSL/TLS capable hosts
- context.secure_hosts = set()
-
-def request(context, flow) :
-
- flow.request.headers.pop('If-Modified-Since', None)
- flow.request.headers.pop('Cache-Control', None)
-
- #proxy connections to SSL-enabled hosts
- if flow.request.pretty_host in context.secure_hosts :
- flow.request.scheme = 'https'
- flow.request.port = 443
-
-def response(context, flow) :
-
- with decoded(flow.response) :
- flow.request.headers.pop('Strict-Transport-Security', None)
- flow.request.headers.pop('Public-Key-Pins', None)
-
- #strip links in response body
- flow.response.content = flow.response.content.replace('https://', 'http://')
-
- #strip links in 'Location' header
- if flow.response.headers.get('Location','').startswith('https://'):
- location = flow.response.headers['Location']
- hostname = urllib.parse.urlparse(location).hostname
- if hostname:
- context.secure_hosts.add(hostname)
- flow.response.headers['Location'] = location.replace('https://', 'http://', 1)
-
- #strip secure flag from 'Set-Cookie' headers
- cookies = flow.response.headers.get_all('Set-Cookie')
- cookies = [re.sub(r';\s*secure\s*', '', s) for s in cookies]
- flow.response.headers.set_all('Set-Cookie', cookies)
diff --git a/examples/stickycookies b/examples/stickycookies
deleted file mode 100755
index 7e84f71c..00000000
--- a/examples/stickycookies
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-"""
-This example builds on mitmproxy's base proxying infrastructure to
-implement functionality similar to the "sticky cookies" option.
-
-Heads Up: In the majority of cases, you want to use inline scripts.
-"""
-import os
-from libmproxy import controller, proxy
-from libmproxy.proxy.server import ProxyServer
-
-
-class StickyMaster(controller.Master):
- def __init__(self, server):
- controller.Master.__init__(self, server)
- self.stickyhosts = {}
-
- def run(self):
- try:
- return controller.Master.run(self)
- except KeyboardInterrupt:
- self.shutdown()
-
- def handle_request(self, flow):
- hid = (flow.request.host, flow.request.port)
- if "cookie" in flow.request.headers:
- self.stickyhosts[hid] = flow.request.headers.get_all("cookie")
- elif hid in self.stickyhosts:
- flow.request.headers.set_all("cookie", self.stickyhosts[hid])
- flow.reply()
-
- def handle_response(self, flow):
- hid = (flow.request.host, flow.request.port)
- if "set-cookie" in flow.response.headers:
- self.stickyhosts[hid] = flow.response.headers.get_all("set-cookie")
- flow.reply()
-
-
-config = proxy.ProxyConfig(port=8080)
-server = ProxyServer(config)
-m = StickyMaster(server)
-m.run()
diff --git a/examples/stream.py b/examples/stream.py
deleted file mode 100644
index 3adbe437..00000000
--- a/examples/stream.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def responseheaders(context, flow):
- """
- Enables streaming for all responses.
- """
- flow.response.stream = True
diff --git a/examples/stream_modify.py b/examples/stream_modify.py
deleted file mode 100644
index aa395c03..00000000
--- a/examples/stream_modify.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""
-This inline script modifies a streamed response.
-If you do not need streaming, see the modify_response_body example.
-Be aware that content replacement isn't trivial:
- - If the transfer encoding isn't chunked, you cannot simply change the content length.
- - If you want to replace all occurences of "foobar", make sure to catch the cases
- where one chunk ends with [...]foo" and the next starts with "bar[...].
-"""
-
-
-def modify(chunks):
- """
- chunks is a generator that can be used to iterate over all chunks.
- """
- for chunk in chunks:
- yield chunk.replace("foo", "bar")
-
-
-def responseheaders(context, flow):
- flow.response.stream = modify
diff --git a/examples/stub.py b/examples/stub.py
deleted file mode 100644
index 516b71a5..00000000
--- a/examples/stub.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""
- This is a script stub, with definitions for all events.
-"""
-
-
-def start(context, argv):
- """
- Called once on script startup, before any other events.
- """
- context.log("start")
-
-
-def clientconnect(context, root_layer):
- """
- Called when a client initiates a connection to the proxy. Note that a
- connection can correspond to multiple HTTP requests
- """
- context.log("clientconnect")
-
-
-def request(context, flow):
- """
- Called when a client request has been received.
- """
- context.log("request")
-
-
-def serverconnect(context, server_conn):
- """
- Called when the proxy initiates a connection to the target server. Note that a
- connection can correspond to multiple HTTP requests
- """
- context.log("serverconnect")
-
-
-def responseheaders(context, flow):
- """
- Called when the response headers for a server response have been received,
- but the response body has not been processed yet. Can be used to tell mitmproxy
- to stream the response.
- """
- context.log("responseheaders")
-
-
-def response(context, flow):
- """
- Called when a server response has been received.
- """
- context.log("response")
-
-
-def error(context, flow):
- """
- Called when a flow error has occured, e.g. invalid server responses, or
- interrupted connections. This is distinct from a valid server HTTP error
- response, which is simply a response with an HTTP error code.
- """
- context.log("error")
-
-
-def serverdisconnect(context, server_conn):
- """
- Called when the proxy closes the connection to the target server.
- """
- context.log("serverdisconnect")
-
-
-def clientdisconnect(context, root_layer):
- """
- Called when a client disconnects from the proxy.
- """
- context.log("clientdisconnect")
-
-
-def done(context):
- """
- Called once on script shutdown, after any other events.
- """
- context.log("done")
diff --git a/examples/tcp_message.py b/examples/tcp_message.py
deleted file mode 100644
index c63368e4..00000000
--- a/examples/tcp_message.py
+++ /dev/null
@@ -1,24 +0,0 @@
-'''
-tcp_message Inline Script Hook API Demonstration
-------------------------------------------------
-
-* modifies packets containing "foo" to "bar"
-* prints various details for each packet.
-
-example cmdline invocation:
-mitmdump -T --host --tcp ".*" -q -s examples/tcp_message.py
-'''
-from netlib.utils import clean_bin
-
-def tcp_message(ctx, tcp_msg):
- modified_msg = tcp_msg.message.replace("foo", "bar")
-
- is_modified = False if modified_msg == tcp_msg.message else True
- tcp_msg.message = modified_msg
-
- print("[tcp_message{}] from {} {} to {} {}:\r\n{}".format(
- " (modified)" if is_modified else "",
- "client" if tcp_msg.sender == tcp_msg.client_conn else "server",
- tcp_msg.sender.address,
- "server" if tcp_msg.receiver == tcp_msg.server_conn else "client",
- tcp_msg.receiver.address, clean_bin(tcp_msg.message)))
diff --git a/examples/tls_passthrough.py b/examples/tls_passthrough.py
deleted file mode 100644
index 0d41b725..00000000
--- a/examples/tls_passthrough.py
+++ /dev/null
@@ -1,136 +0,0 @@
-"""
-This inline script allows conditional TLS Interception based
-on a user-defined strategy.
-
-Example:
-
- > mitmdump -s tls_passthrough.py
-
- 1. curl --proxy http://localhost:8080 https://example.com --insecure
- // works - we'll also see the contents in mitmproxy
-
- 2. curl --proxy http://localhost:8080 https://example.com --insecure
- // still works - we'll also see the contents in mitmproxy
-
- 3. curl --proxy http://localhost:8080 https://example.com
- // fails with a certificate error, which we will also see in mitmproxy
-
- 4. curl --proxy http://localhost:8080 https://example.com
- // works again, but mitmproxy does not intercept and we do *not* see the contents
-
-Authors: Maximilian Hils, Matthew Tuusberg
-"""
-from __future__ import (absolute_import, print_function, division)
-import collections
-import random
-
-from enum import Enum
-
-from libmproxy.exceptions import TlsProtocolException
-from libmproxy.protocol import TlsLayer, RawTCPLayer
-
-
-class InterceptionResult(Enum):
- success = True
- failure = False
- skipped = None
-
-
-class _TlsStrategy(object):
- """
- Abstract base class for interception strategies.
- """
- def __init__(self):
- # A server_address -> interception results mapping
- self.history = collections.defaultdict(lambda: collections.deque(maxlen=200))
-
- def should_intercept(self, server_address):
- """
- Returns:
- True, if we should attempt to intercept the connection.
- False, if we want to employ pass-through instead.
- """
- raise NotImplementedError()
-
- def record_success(self, server_address):
- self.history[server_address].append(InterceptionResult.success)
-
- def record_failure(self, server_address):
- self.history[server_address].append(InterceptionResult.failure)
-
- def record_skipped(self, server_address):
- self.history[server_address].append(InterceptionResult.skipped)
-
-
-class ConservativeStrategy(_TlsStrategy):
- """
- Conservative Interception Strategy - only intercept if there haven't been any failed attempts
- in the history.
- """
-
- def should_intercept(self, server_address):
- if InterceptionResult.failure in self.history[server_address]:
- return False
- return True
-
-
-class ProbabilisticStrategy(_TlsStrategy):
- """
- Fixed probability that we intercept a given connection.
- """
- def __init__(self, p):
- self.p = p
- super(ProbabilisticStrategy, self).__init__()
-
- def should_intercept(self, server_address):
- return random.uniform(0, 1) < self.p
-
-
-class TlsFeedback(TlsLayer):
- """
- Monkey-patch _establish_tls_with_client to get feedback if TLS could be established
- successfully on the client connection (which may fail due to cert pinning).
- """
-
- def _establish_tls_with_client(self):
- server_address = self.server_conn.address
- tls_strategy = self.script_context.tls_strategy
-
- try:
- super(TlsFeedback, self)._establish_tls_with_client()
- except TlsProtocolException as e:
- tls_strategy.record_failure(server_address)
- raise e
- else:
- tls_strategy.record_success(server_address)
-
-
-# inline script hooks below.
-
-
-def start(context, argv):
- if len(argv) == 2:
- context.tls_strategy = ProbabilisticStrategy(float(argv[1]))
- else:
- context.tls_strategy = ConservativeStrategy()
-
-
-def next_layer(context, next_layer):
- """
- This hook does the actual magic - if the next layer is planned to be a TLS layer,
- we check if we want to enter pass-through mode instead.
- """
- if isinstance(next_layer, TlsLayer) and next_layer._client_tls:
- server_address = next_layer.server_conn.address
-
- if context.tls_strategy.should_intercept(server_address):
- # We try to intercept.
- # Monkey-Patch the layer to get feedback from the TLSLayer if interception worked.
- next_layer.__class__ = TlsFeedback
- next_layer.script_context = context
- else:
- # We don't intercept - reply with a pass-through layer and add a "skipped" entry.
- context.log("TLS passthrough for %s" % repr(next_layer.server_conn.address), "info")
- next_layer_replacement = RawTCPLayer(next_layer.ctx, logging=False)
- next_layer.reply(next_layer_replacement)
- context.tls_strategy.record_skipped(server_address)
diff --git a/examples/upsidedownternet.py b/examples/upsidedownternet.py
deleted file mode 100644
index f2e73047..00000000
--- a/examples/upsidedownternet.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import cStringIO
-from PIL import Image
-from libmproxy.models import decoded
-
-
-def response(context, flow):
- if flow.response.headers.get("content-type", "").startswith("image"):
- with decoded(flow.response): # automatically decode gzipped responses.
- try:
- s = cStringIO.StringIO(flow.response.content)
- img = Image.open(s).rotate(180)
- s2 = cStringIO.StringIO()
- img.save(s2, "png")
- flow.response.content = s2.getvalue()
- flow.response.headers["content-type"] = "image/png"
- except: # Unknown image types etc.
- pass