aboutsummaryrefslogtreecommitdiffstats
path: root/examples/complex
diff options
context:
space:
mode:
Diffstat (limited to 'examples/complex')
-rw-r--r--examples/complex/dns_spoofing.py2
-rw-r--r--examples/complex/har_dump.py4
-rw-r--r--examples/complex/stream.py2
-rwxr-xr-xexamples/complex/xss_scanner.py17
4 files changed, 15 insertions, 10 deletions
diff --git a/examples/complex/dns_spoofing.py b/examples/complex/dns_spoofing.py
index 632783a7..e28934ab 100644
--- a/examples/complex/dns_spoofing.py
+++ b/examples/complex/dns_spoofing.py
@@ -33,7 +33,7 @@ parse_host_header = re.compile(r"^(?P<host>[^:]+|\[.+\])(?::(?P<port>\d+))?$")
class Rerouter:
def request(self, flow):
- if flow.client_conn.ssl_established:
+ if flow.client_conn.tls_established:
flow.request.scheme = "https"
sni = flow.client_conn.connection.get_servername()
port = 443
diff --git a/examples/complex/har_dump.py b/examples/complex/har_dump.py
index 21bcc341..66a81a7d 100644
--- a/examples/complex/har_dump.py
+++ b/examples/complex/har_dump.py
@@ -58,8 +58,8 @@ def response(flow):
connect_time = (flow.server_conn.timestamp_tcp_setup -
flow.server_conn.timestamp_start)
- if flow.server_conn.timestamp_ssl_setup is not None:
- ssl_time = (flow.server_conn.timestamp_ssl_setup -
+ if flow.server_conn.timestamp_tls_setup is not None:
+ ssl_time = (flow.server_conn.timestamp_tls_setup -
flow.server_conn.timestamp_tcp_setup)
SERVERS_SEEN.add(flow.server_conn)
diff --git a/examples/complex/stream.py b/examples/complex/stream.py
index 1993cf7f..ae365ec5 100644
--- a/examples/complex/stream.py
+++ b/examples/complex/stream.py
@@ -1,6 +1,6 @@
def responseheaders(flow):
"""
Enables streaming for all responses.
- This is equivalent to passing `--stream 0` to mitmproxy.
+ This is equivalent to passing `--set stream_large_bodies=1` to mitmproxy.
"""
flow.response.stream = True
diff --git a/examples/complex/xss_scanner.py b/examples/complex/xss_scanner.py
index a0572d5d..4b35c6c1 100755
--- a/examples/complex/xss_scanner.py
+++ b/examples/complex/xss_scanner.py
@@ -85,14 +85,19 @@ def get_cookies(flow: http.HTTPFlow) -> Cookies:
def find_unclaimed_URLs(body: Union[str, bytes], requestUrl: bytes) -> None:
""" Look for unclaimed URLs in script tags and log them if found"""
+ def getValue(attrs: List[Tuple[str, str]], attrName: str) -> str:
+ for name, value in attrs:
+ if attrName == name:
+ return value
+
class ScriptURLExtractor(HTMLParser):
script_URLs = []
def handle_starttag(self, tag, attrs):
- if tag == "script" and "src" in [name for name, value in attrs]:
- for name, value in attrs:
- if name == "src":
- self.script_URLs.append(value)
+ if (tag == "script" or tag == "iframe") and "src" in [name for name, value in attrs]:
+ self.script_URLs.append(getValue(attrs, "src"))
+ if tag == "link" and getValue(attrs, "rel") == "stylesheet" and "href" in [name for name, value in attrs]:
+ self.script_URLs.append(getValue(attrs, "href"))
parser = ScriptURLExtractor()
try:
@@ -105,7 +110,7 @@ def find_unclaimed_URLs(body: Union[str, bytes], requestUrl: bytes) -> None:
try:
gethostbyname(domain)
except gaierror:
- ctx.log.error("XSS found in %s due to unclaimed URL \"%s\" in script tag." % (requestUrl, url))
+ ctx.log.error("XSS found in %s due to unclaimed URL \"%s\"." % (requestUrl, url))
def test_end_of_URL_injection(original_body: str, request_URL: str, cookies: Cookies) -> VulnData:
@@ -198,7 +203,7 @@ def get_SQLi_data(new_body: str, original_body: str, request_URL: str, injection
}
for dbms, regexes in DBMS_ERRORS.items():
for regex in regexes:
- if re.search(regex, new_body) and not re.search(regex, original_body):
+ if re.search(regex, new_body, re.IGNORECASE) and not re.search(regex, original_body, re.IGNORECASE):
return SQLiData(request_URL,
injection_point,
regex,