aboutsummaryrefslogtreecommitdiffstats
path: root/mitmproxy/io
diff options
context:
space:
mode:
authormadt1m <blackjuniper@protonmail.com>2018-06-23 03:36:58 +0200
committermadt1m <blackjuniper@protonmail.com>2018-06-23 03:36:58 +0200
commitb686073f56667f7caf11c9013d592a8f091ccd9d (patch)
tree62c110e294712f5978d1318162a56a08833f872c /mitmproxy/io
parente842aa3798ee71812cc68ce0f3b7360edbc023c3 (diff)
downloadmitmproxy-b686073f56667f7caf11c9013d592a8f091ccd9d.tar.gz
mitmproxy-b686073f56667f7caf11c9013d592a8f091ccd9d.tar.bz2
mitmproxy-b686073f56667f7caf11c9013d592a8f091ccd9d.zip
Hooking the view to load from new DB handler
Diffstat (limited to 'mitmproxy/io')
-rw-r--r--mitmproxy/io/__init__.py3
-rw-r--r--mitmproxy/io/db.py22
2 files changed, 18 insertions, 7 deletions
diff --git a/mitmproxy/io/__init__.py b/mitmproxy/io/__init__.py
index 540e6871..854d2505 100644
--- a/mitmproxy/io/__init__.py
+++ b/mitmproxy/io/__init__.py
@@ -1,7 +1,8 @@
from .io import FlowWriter, FlowReader, FilteredFlowWriter, read_flows_from_paths
+from .db import DbHandler
__all__ = [
- "FlowWriter", "FlowReader", "FilteredFlowWriter", "read_flows_from_paths"
+ "FlowWriter", "FlowReader", "FilteredFlowWriter", "read_flows_from_paths", "DbHandler"
]
diff --git a/mitmproxy/io/db.py b/mitmproxy/io/db.py
index f7e13e91..b59650ab 100644
--- a/mitmproxy/io/db.py
+++ b/mitmproxy/io/db.py
@@ -1,5 +1,9 @@
import sqlite3
+import os
+
from mitmproxy.io import protobuf
+from mitmproxy.http import HTTPFlow
+from mitmproxy import exceptions
class DbHandler:
@@ -8,7 +12,11 @@ class DbHandler:
This class is wrapping up connection to SQLITE DB.
"""
- def __init__(self, db_path="tmp.sqlite"):
+ def __init__(self, db_path="/tmp/tmp.sqlite"):
+ if os.path.isfile(db_path):
+ self.db_path = db_path
+ else:
+ raise IOError("Invalid path!")
self.db_path = db_path
self._con = sqlite3.connect(self.db_path)
self._c = self._con.cursor()
@@ -17,16 +25,18 @@ class DbHandler:
def _create_db(self):
with self._con:
self._con.execute('CREATE TABLE IF NOT EXISTS FLOWS('
- 'id INTEGER PRIMARY KEY AUTOINCREMENT,'
+ 'id INTEGER PRIMARY KEY,'
'pbuf_blob BLOB)')
def store(self, flows):
blobs = []
for flow in flows:
- blobs.append(protobuf.dumps(flow))
+ blobs.append((protobuf.dumps(flow),))
with self._con:
- self._con.executemany('INSERT INTO FLOWS values (?)', blobs)
+ self._con.executemany('INSERT INTO FLOWS (pbuf_blob) values (?)', blobs)
def load(self):
- self._c.execute('SELECT * FROM FLOWS')
- return self._c.fetchall()
+ flows = []
+ self._c.execute('SELECT pbuf_blob FROM FLOWS')
+ for row in self._c.fetchall():
+ flows.append(HTTPFlow.from_state(protobuf.loads(row[0])))