Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 5b3c8d8

Browse files
committed
first implementation of asynchronous inter-protocol communication between the sqlmap RESTful API and the sqlmap engine with SQLite
1 parent 7d01eb7 commit 5b3c8d8

2 files changed

Lines changed: 47 additions & 43 deletions

File tree

lib/core/option.py

Lines changed: 10 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
import socket
1414
import string
1515
import sys
16+
import sqlite3
1617
import threading
1718
import time
1819
import urllib2
@@ -1793,29 +1794,21 @@ def _mergeOptions(inputOptions, overrideOptions):
17931794
if hasattr(conf, key) and conf[key] is None:
17941795
conf[key] = value
17951796

1796-
# Logger recorder object, which keeps the log structure
17971797
class LogRecorder(logging.StreamHandler):
1798-
"""
1799-
Logging handler class which only records CUSTOM_LOGGING.PAYLOAD entries
1800-
to a global list.
1801-
"""
1802-
loghist = []
1803-
18041798
def emit(self, record):
18051799
"""
1806-
Simply record the emitted events.
1800+
Record emitted events to temporary database for asynchronous I/O
1801+
communication with the parent process
18071802
"""
1808-
self.loghist.append({'levelname': record.levelname,
1809-
'text': record.msg % record.args if record.args else record.msg,
1810-
'id': len(self.loghist) + 1})
1811-
1812-
if conf.fdLog:
1813-
# TODO: this is very heavy operation and slows down a lot the
1814-
# whole execution of the sqlmap engine, find an alternative
1815-
os.write(conf.fdLog, base64pickle(self.loghist))
1803+
connection = sqlite3.connect(conf.ipc, isolation_level=None)
1804+
cursor = connection.cursor()
1805+
cursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?)",
1806+
(time.strftime("%X"), record.levelname, record.msg % record.args if record.args else record.msg))
1807+
cursor.close()
1808+
connection.close()
18161809

18171810
def _setRestAPILog():
1818-
if hasattr(conf, "fdLog") and conf.fdLog:
1811+
if hasattr(conf, "ipc"):
18191812
logger.removeHandler(LOGGER_HANDLER)
18201813
LOGGER_RECORDER = LogRecorder()
18211814
logger.addHandler(LOGGER_RECORDER)

lib/utils/api.py

Lines changed: 37 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
import os
99
import shutil
10+
import sqlite3
1011
import tempfile
1112

1213
from subprocess import PIPE
@@ -39,7 +40,6 @@
3940

4041
# Local global variables
4142
adminid = ""
42-
pipes = dict()
4343
procs = dict()
4444
tasks = AttribDict()
4545

@@ -115,6 +115,19 @@ def task_new():
115115
taskid = hexencode(os.urandom(16))
116116
tasks[taskid] = init_options()
117117

118+
# Initiate the temporary database for asynchronous I/O with the
119+
# sqlmap engine (children processes)
120+
_, ipc_filepath = tempfile.mkstemp(prefix="sqlmapipc-", suffix=".db", text=False)
121+
connection = sqlite3.connect(ipc_filepath, isolation_level=None)
122+
cursor = connection.cursor()
123+
cursor.execute("DROP TABLE IF EXISTS logs")
124+
cursor.execute("CREATE TABLE logs(id INTEGER PRIMARY KEY AUTOINCREMENT, time TEXT, level TEXT, message TEXT)")
125+
cursor.close()
126+
connection.close()
127+
128+
# Set the temporary database to use for asynchronous I/O communication
129+
tasks[taskid].ipc = ipc_filepath
130+
118131
return jsonize({"taskid": taskid})
119132

120133
@get("/task/<taskid>/destroy")
@@ -242,7 +255,6 @@ def scan_start(taskid):
242255
"""
243256
global tasks
244257
global procs
245-
global pipes
246258

247259
if taskid not in tasks:
248260
abort(500, "Invalid task ID")
@@ -253,16 +265,11 @@ def scan_start(taskid):
253265
tasks[taskid][key] = value
254266

255267
# Overwrite output directory (oDir) value to a temporary directory
256-
tasks[taskid].oDir = tempfile.mkdtemp(prefix="sqlmap-")
268+
tasks[taskid].oDir = tempfile.mkdtemp(prefix="sqlmaptask-")
257269

258270
# Launch sqlmap engine in a separate thread
259271
logger.debug("starting a scan for task ID %s" % taskid)
260272

261-
pipes[taskid] = os.pipe()
262-
263-
# Provide sqlmap engine with the writable pipe for logging
264-
tasks[taskid]["fdLog"] = pipes[taskid][1]
265-
266273
# Launch sqlmap engine
267274
procs[taskid] = execute("python sqlmap.py --pickled-options %s" % base64pickle(tasks[taskid]), shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=False)
268275

@@ -273,7 +280,6 @@ def scan_output(taskid):
273280
"""
274281
Read the standard output of sqlmap core execution
275282
"""
276-
global pipes
277283
global tasks
278284

279285
if taskid not in tasks:
@@ -303,46 +309,51 @@ def scan_delete(taskid):
303309
@get("/scan/<taskid>/log/<start>/<end>")
304310
def scan_log_limited(taskid, start, end):
305311
"""
306-
Retrieve the log messages
312+
Retrieve a subset of log messages
307313
"""
308-
log = None
314+
json_log_messages = {}
309315

310316
if taskid not in tasks:
311317
abort(500, "Invalid task ID")
312318

319+
# Temporary "protection" against SQL injection FTW ;)
313320
if not start.isdigit() or not end.isdigit() or end <= start:
314321
abort(500, "Invalid start or end value, must be digits")
315322

316-
start = max(0, int(start) - 1)
323+
start = max(1, int(start))
317324
end = max(1, int(end))
318-
pickledLog = os.read(pipes[taskid][0], 100000)
319325

320-
try:
321-
log = base64unpickle(pickledLog)
322-
log = log[slice(start, end)]
323-
except (KeyError, IndexError, TypeError), e:
324-
logger.error("handled exception when trying to unpickle logger dictionary in scan_log_limited(): %s" % str(e))
326+
# Read a subset of log messages from the temporary I/O database
327+
connection = sqlite3.connect(tasks[taskid].ipc, isolation_level=None)
328+
cursor = connection.cursor()
329+
cursor.execute("SELECT id, time, level, message FROM logs WHERE id >= %d AND id <= %d" % (start, end))
330+
db_log_messages = cursor.fetchall()
331+
332+
for (id_, time_, level, message) in db_log_messages:
333+
json_log_messages[id_] = {"time": time_, "level": level, "message": message}
325334

326-
return jsonize({"log": log})
335+
return jsonize({"log": json_log_messages})
327336

328337
@get("/scan/<taskid>/log")
329338
def scan_log(taskid):
330339
"""
331340
Retrieve the log messages
332341
"""
333-
log = None
342+
json_log_messages = {}
334343

335344
if taskid not in tasks:
336345
abort(500, "Invalid task ID")
337346

338-
pickledLog = os.read(pipes[taskid][0], 100000)
347+
# Read all log messages from the temporary I/O database
348+
connection = sqlite3.connect(tasks[taskid].ipc, isolation_level=None)
349+
cursor = connection.cursor()
350+
cursor.execute("SELECT id, time, level, message FROM logs")
351+
db_log_messages = cursor.fetchall()
339352

340-
try:
341-
log = base64unpickle(pickledLog)
342-
except (KeyError, IndexError, TypeError), e:
343-
logger.error("handled exception when trying to unpickle logger dictionary in scan_log(): %s" % str(e))
353+
for (id_, time_, level, message) in db_log_messages:
354+
json_log_messages[id_] = {"time": time_, "level": level, "message": message}
344355

345-
return jsonize({"log": log})
356+
return jsonize({"log": json_log_messages})
346357

347358
# Function to handle files inside the output directory
348359
@get("/download/<taskid>/<target>/<filename:path>")

0 commit comments

Comments
 (0)