aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMaximilian Hils <git@maximilianhils.com>2018-09-07 10:32:40 +0200
committerGitHub <noreply@github.com>2018-09-07 10:32:40 +0200
commit53e776187673a89192cdecd2d806077579f062e1 (patch)
treeced428d5f83424f5c27a8f3e2f1161eba8b77807
parentb7aa325bfe9e392f5f5089a0b6be648e7ccaf6d7 (diff)
parentdcd8ba34ab652d7f8678637377c30e26f7efdd25 (diff)
downloadmitmproxy-53e776187673a89192cdecd2d806077579f062e1.tar.gz
mitmproxy-53e776187673a89192cdecd2d806077579f062e1.tar.bz2
mitmproxy-53e776187673a89192cdecd2d806077579f062e1.zip
Merge pull request #3294 from elijahbal/master
Fix XSS scanner failure in test_xss_scanner.py
-rwxr-xr-xexamples/complex/xss_scanner.py6
-rw-r--r--test/examples/test_xss_scanner.py75
2 files changed, 43 insertions, 38 deletions
diff --git a/examples/complex/xss_scanner.py b/examples/complex/xss_scanner.py
index 55fc2fe7..cdaaf478 100755
--- a/examples/complex/xss_scanner.py
+++ b/examples/complex/xss_scanner.py
@@ -37,9 +37,9 @@ Line: 1029zxcs'd"ao<ac>so[sb]po(pc)se;sl/bsl\eq=3847asd
from html.parser import HTMLParser
from typing import Dict, Union, Tuple, Optional, List, NamedTuple
-from socket import gaierror, gethostbyname
from urllib.parse import urlparse
import re
+import socket
import requests
@@ -109,8 +109,8 @@ def find_unclaimed_URLs(body: str, requestUrl: bytes) -> None:
url_parser = urlparse(url)
domain = url_parser.netloc
try:
- gethostbyname(domain)
- except gaierror:
+ socket.gethostbyname(domain)
+ except socket.gaierror:
ctx.log.error("XSS found in %s due to unclaimed URL \"%s\"." % (requestUrl, url))
diff --git a/test/examples/test_xss_scanner.py b/test/examples/test_xss_scanner.py
index 1d723d53..25237c4f 100644
--- a/test/examples/test_xss_scanner.py
+++ b/test/examples/test_xss_scanner.py
@@ -252,8 +252,7 @@ class TestXSSScanner():
self.text = html
return MockResponse("<html></html>")
- def test_test_end_of_url_injection(self, monkeypatch):
- monkeypatch.setattr(requests, 'get', self.mocked_requests_vuln)
+ def test_test_end_of_url_injection(self, get_request_vuln):
xss_info = xss.test_end_of_URL_injection("<html></html>", "https://example.com/index.html", {})[0]
expected_xss_info = xss.XSSData('https://example.com/index.html/1029zxcs\'d"ao<ac>so[sb]po(pc)se;sl/bsl\\eq=3847asd',
'End of URL',
@@ -263,8 +262,7 @@ class TestXSSScanner():
assert xss_info == expected_xss_info
assert sqli_info is None
- def test_test_referer_injection(self, monkeypatch):
- monkeypatch.setattr(requests, 'get', self.mocked_requests_vuln)
+ def test_test_referer_injection(self, get_request_vuln):
xss_info = xss.test_referer_injection("<html></html>", "https://example.com/", {})[0]
expected_xss_info = xss.XSSData('https://example.com/',
'Referer',
@@ -274,8 +272,7 @@ class TestXSSScanner():
assert xss_info == expected_xss_info
assert sqli_info is None
- def test_test_user_agent_injection(self, monkeypatch):
- monkeypatch.setattr(requests, 'get', self.mocked_requests_vuln)
+ def test_test_user_agent_injection(self, get_request_vuln):
xss_info = xss.test_user_agent_injection("<html></html>", "https://example.com/", {})[0]
expected_xss_info = xss.XSSData('https://example.com/',
'User Agent',
@@ -285,8 +282,8 @@ class TestXSSScanner():
assert xss_info == expected_xss_info
assert sqli_info is None
- def test_test_query_injection(self, monkeypatch):
- monkeypatch.setattr(requests, 'get', self.mocked_requests_vuln)
+ def test_test_query_injection(self, get_request_vuln):
+
xss_info = xss.test_query_injection("<html></html>", "https://example.com/vuln.php?cmd=ls", {})[0]
expected_xss_info = xss.XSSData('https://example.com/vuln.php?cmd=1029zxcs\'d"ao<ac>so[sb]po(pc)se;sl/bsl\\eq=3847asd',
'Query',
@@ -296,16 +293,8 @@ class TestXSSScanner():
assert xss_info == expected_xss_info
assert sqli_info is None
- def mocked_socket_gethostbyname(domain):
- claimed_domains = ["google.com"]
- if domain not in claimed_domains:
- from socket import gaierror
- raise gaierror("[Errno -2] Name or service not known")
- else:
- return '216.58.221.46'
-
- @pytest.fixture
- def logger(self):
+ @pytest.fixture(scope='function')
+ def logger(self, monkeypatch):
class Logger():
def __init__(self):
self.args = []
@@ -315,12 +304,32 @@ class TestXSSScanner():
def error(self, str):
self.args.append(str)
- return Logger()
- def test_find_unclaimed_URLs(self, monkeypatch, logger):
- logger.args = []
+ logger = Logger()
monkeypatch.setattr("mitmproxy.ctx.log", logger)
- monkeypatch.setattr("socket.gethostbyname", self.mocked_socket_gethostbyname)
+ yield logger
+
+ @pytest.fixture(scope='function')
+ def get_request_vuln(self, monkeypatch):
+ monkeypatch.setattr(requests, 'get', self.mocked_requests_vuln)
+
+ @pytest.fixture(scope='function')
+ def get_request_invuln(self, monkeypatch):
+ monkeypatch.setattr(requests, 'get', self.mocked_requests_invuln)
+
+ @pytest.fixture(scope='function')
+ def mock_gethostbyname(self, monkeypatch):
+ def gethostbyname(domain):
+ claimed_domains = ["google.com"]
+ if domain not in claimed_domains:
+ from socket import gaierror
+ raise gaierror("[Errno -2] Name or service not known")
+ else:
+ return '216.58.221.46'
+
+ monkeypatch.setattr("socket.gethostbyname", gethostbyname)
+
+ def test_find_unclaimed_URLs(self, logger, mock_gethostbyname):
xss.find_unclaimed_URLs("<html><script src=\"http://google.com\"></script></html>",
"https://example.com")
assert logger.args == []
@@ -329,14 +338,12 @@ class TestXSSScanner():
assert logger.args[0] == 'XSS found in https://example.com due to unclaimed URL "http://unclaimedDomainName.com".'
xss.find_unclaimed_URLs("<html><iframe src=\"http://unclaimedDomainName.com\"></iframe></html>",
"https://example.com")
- assert logger.args[0] == 'XSS found in https://example.com due to unclaimed URL "http://unclaimedDomainName.com".'
+ assert logger.args[1] == 'XSS found in https://example.com due to unclaimed URL "http://unclaimedDomainName.com".'
xss.find_unclaimed_URLs("<html><link rel=\"stylesheet\" href=\"http://unclaimedDomainName.com\"></html>",
"https://example.com")
- assert logger.args[0] == 'XSS found in https://example.com due to unclaimed URL "http://unclaimedDomainName.com".'
+ assert logger.args[2] == 'XSS found in https://example.com due to unclaimed URL "http://unclaimedDomainName.com".'
- def test_log_XSS_data(self, monkeypatch, logger):
- logger.args = []
- monkeypatch.setattr("mitmproxy.ctx.log", logger)
+ def test_log_XSS_data(self, logger):
xss.log_XSS_data(None)
assert logger.args == []
# self, url: str, injection_point: str, exploit: str, line: str
@@ -350,9 +357,7 @@ class TestXSSScanner():
assert logger.args[3] == 'Suggested Exploit: String'
assert logger.args[4] == 'Line: Line of HTML'
- def test_log_SQLi_data(self, monkeypatch, logger):
- logger.args = []
- monkeypatch.setattr("mitmproxy.ctx.log", logger)
+ def test_log_SQLi_data(self, logger):
xss.log_SQLi_data(None)
assert logger.args == []
xss.log_SQLi_data(xss.SQLiData('https://example.com',
@@ -371,11 +376,11 @@ class TestXSSScanner():
# It only uses the request cookies
assert xss.get_cookies(mocked_flow) == {"cookieName2": "cookieValue2"}
- def test_response(self, monkeypatch, logger):
- logger.args = []
- monkeypatch.setattr("mitmproxy.ctx.log", logger)
- monkeypatch.setattr(requests, 'get', self.mocked_requests_invuln)
- mocked_flow = tflow.tflow(req=tutils.treq(path=b"index.html?q=1"), resp=tutils.tresp(content=b'<html></html>'))
+ def test_response(self, get_request_invuln, logger):
+ mocked_flow = tflow.tflow(
+ req=tutils.treq(path=b"index.html?q=1"),
+ resp=tutils.tresp(content=b'<html></html>')
+ )
xss.response(mocked_flow)
assert logger.args == []