aboutsummaryrefslogtreecommitdiffstats
path: root/examples/complex
diff options
context:
space:
mode:
Diffstat (limited to 'examples/complex')
-rw-r--r--examples/complex/dup_and_replay.py14
-rw-r--r--examples/complex/har_dump.py8
-rw-r--r--examples/complex/sslstrip.py2
-rwxr-xr-xexamples/complex/xss_scanner.py4
4 files changed, 17 insertions, 11 deletions
diff --git a/examples/complex/dup_and_replay.py b/examples/complex/dup_and_replay.py
index 2baa1ea6..adcebff3 100644
--- a/examples/complex/dup_and_replay.py
+++ b/examples/complex/dup_and_replay.py
@@ -2,7 +2,13 @@ from mitmproxy import ctx
def request(flow):
- f = flow.copy()
- ctx.master.view.add(f)
- f.request.path = "/changed"
- ctx.master.replay_request(f, block=True)
+ # Avoid an infinite loop by not replaying already replayed requests
+ if flow.request.is_replay:
+ return
+ flow = flow.copy()
+ # Only interactive tools have a view. If we have one, add a duplicate entry
+ # for our flow.
+ if "view" in ctx.master.addons:
+ ctx.master.commands.call("view.add", [flow])
+ flow.request.path = "/changed"
+ ctx.master.commands.call("replay.client", [flow])
diff --git a/examples/complex/har_dump.py b/examples/complex/har_dump.py
index 9e287a19..040c7d28 100644
--- a/examples/complex/har_dump.py
+++ b/examples/complex/har_dump.py
@@ -20,11 +20,11 @@ from mitmproxy import ctx
from mitmproxy.utils import strutils
from mitmproxy.net.http import cookies
-HAR = {} # type: typing.Dict
+HAR: typing.Dict = {}
# A list of server seen till now is maintained so we can avoid
# using 'connect' time for entries that use an existing connection.
-SERVERS_SEEN = set() # type: typing.Set[connections.ServerConnection]
+SERVERS_SEEN: typing.Set[connections.ServerConnection] = set()
def load(l):
@@ -155,12 +155,12 @@ def done():
Called once on script shutdown, after any other events.
"""
if ctx.options.hardump:
- json_dump = json.dumps(HAR, indent=2) # type: str
+ json_dump: str = json.dumps(HAR, indent=2)
if ctx.options.hardump == '-':
mitmproxy.ctx.log(json_dump)
else:
- raw = json_dump.encode() # type: bytes
+ raw: bytes = json_dump.encode()
if ctx.options.hardump.endswith('.zhar'):
raw = zlib.compress(raw, 9)
diff --git a/examples/complex/sslstrip.py b/examples/complex/sslstrip.py
index c3f8c4f7..c862536f 100644
--- a/examples/complex/sslstrip.py
+++ b/examples/complex/sslstrip.py
@@ -9,7 +9,7 @@ import typing # noqa
from mitmproxy import http
# set of SSL/TLS capable hosts
-secure_hosts = set() # type: typing.Set[str]
+secure_hosts: typing.Set[str] = set()
def request(flow: http.HTTPFlow) -> None:
diff --git a/examples/complex/xss_scanner.py b/examples/complex/xss_scanner.py
index 0c0dd0f3..55fc2fe7 100755
--- a/examples/complex/xss_scanner.py
+++ b/examples/complex/xss_scanner.py
@@ -95,7 +95,7 @@ def find_unclaimed_URLs(body: str, requestUrl: bytes) -> None:
return None
class ScriptURLExtractor(HTMLParser):
- script_URLs = [] # type: List[str]
+ script_URLs: List[str] = []
def handle_starttag(self, tag, attrs):
if (tag == "script" or tag == "iframe") and "src" in [name for name, value in attrs]:
@@ -254,7 +254,7 @@ def paths_to_text(html: str, string: str) -> List[str]:
class PathHTMLParser(HTMLParser):
currentPath = ""
- paths = [] # type: List[str]
+ paths: List[str] = []
def handle_starttag(self, tag, attrs):
self.currentPath += ("/" + tag)