Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 195d174

Browse files
committed
first test of stdout/stderr redirect to a database when sqlmap is executed from restful API (#297)
1 parent e150316 commit 195d174

3 files changed

Lines changed: 122 additions & 55 deletions

File tree

_sqlmap.py

Lines changed: 19 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
from lib.core.testing import smokeTest
3737
from lib.core.testing import liveTest
3838
from lib.parse.cmdline import cmdLineParser
39+
from lib.utils.api import StdDbOut
3940

4041
def modulePath():
4142
"""
@@ -53,16 +54,22 @@ def main():
5354
try:
5455
paths.SQLMAP_ROOT_PATH = modulePath()
5556
setPaths()
56-
banner()
57-
58-
dataToStdout("[!] legal disclaimer: %s\n\n" % LEGAL_DISCLAIMER, forceOutput=True)
59-
dataToStdout("[*] starting at %s\n\n" % time.strftime("%X"), forceOutput=True)
6057

6158
# Store original command line options for possible later restoration
6259
cmdLineOptions.update(cmdLineParser().__dict__)
63-
6460
init(cmdLineOptions)
6561

62+
if hasattr(conf, "ipc_database"):
63+
# Overwrite system standard output and standard error to write
64+
# to a temporary I/O database
65+
sys.stdout = StdDbOut(type_="stdout")
66+
sys.stderr = StdDbOut(type_="stderr")
67+
68+
banner()
69+
70+
dataToStdout("[!] legal disclaimer: %s\n\n" % LEGAL_DISCLAIMER, forceOutput=True)
71+
dataToStdout("[*] starting at %s\n\n" % time.strftime("%X"), forceOutput=True)
72+
6673
if conf.profile:
6774
profile()
6875
elif conf.smokeTest:
@@ -115,6 +122,13 @@ def main():
115122
except KeyboardInterrupt:
116123
pass
117124

125+
if hasattr(conf, "ipc_database"):
126+
try:
127+
conf.ipc_database_cursor.close()
128+
conf.ipc_database_connection.close()
129+
except KeyboardInterrupt:
130+
pass
131+
118132
# Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program
119133
if conf.get("threads", 0) > 1 or conf.get("dnsServer"):
120134
os._exit(0)

lib/core/option.py

Lines changed: 2 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,6 @@
8787
from lib.core.exception import SqlmapUnsupportedDBMSException
8888
from lib.core.exception import SqlmapUserQuitException
8989
from lib.core.log import FORMATTER
90-
from lib.core.log import LOGGER_HANDLER
9190
from lib.core.optiondict import optDict
9291
from lib.core.purge import purge
9392
from lib.core.settings import ACCESS_ALIASES
@@ -137,6 +136,7 @@
137136
from lib.request.rangehandler import HTTPRangeHandler
138137
from lib.request.redirecthandler import SmartRedirectHandler
139138
from lib.request.templates import getPageTemplate
139+
from lib.utils.api import setRestAPILog
140140
from lib.utils.crawler import crawl
141141
from lib.utils.deps import checkDependencies
142142
from lib.utils.google import Google
@@ -1794,25 +1794,6 @@ def _mergeOptions(inputOptions, overrideOptions):
17941794
if hasattr(conf, key) and conf[key] is None:
17951795
conf[key] = value
17961796

1797-
class LogRecorder(logging.StreamHandler):
1798-
def emit(self, record):
1799-
"""
1800-
Record emitted events to temporary database for asynchronous I/O
1801-
communication with the parent process
1802-
"""
1803-
connection = sqlite3.connect(conf.ipc, isolation_level=None)
1804-
cursor = connection.cursor()
1805-
cursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?)",
1806-
(time.strftime("%X"), record.levelname, record.msg % record.args if record.args else record.msg))
1807-
cursor.close()
1808-
connection.close()
1809-
1810-
def _setRestAPILog():
1811-
if hasattr(conf, "ipc"):
1812-
logger.removeHandler(LOGGER_HANDLER)
1813-
LOGGER_RECORDER = LogRecorder()
1814-
logger.addHandler(LOGGER_RECORDER)
1815-
18161797
def _setTrafficOutputFP():
18171798
if conf.trafficFile:
18181799
infoMsg = "setting file for logging HTTP traffic"
@@ -2084,7 +2065,7 @@ def init(inputOptions=AttribDict(), overrideOptions=False):
20842065
_mergeOptions(inputOptions, overrideOptions)
20852066
_useWizardInterface()
20862067
setVerbosity()
2087-
_setRestAPILog()
2068+
setRestAPILog()
20882069
_saveCmdline()
20892070
_setRequestFromFile()
20902071
_cleanupOptions()

lib/utils/api.py

Lines changed: 101 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,13 @@
55
See the file 'doc/COPYING' for copying permission
66
"""
77

8+
import logging
89
import os
910
import shutil
1011
import sqlite3
12+
import sys
1113
import tempfile
14+
import time
1215

1316
from subprocess import PIPE
1417

@@ -17,10 +20,12 @@
1720
from lib.core.convert import base64unpickle
1821
from lib.core.convert import hexencode
1922
from lib.core.convert import jsonize
23+
from lib.core.data import conf
2024
from lib.core.data import paths
2125
from lib.core.data import logger
2226
from lib.core.datatype import AttribDict
2327
from lib.core.defaults import _defaults
28+
from lib.core.log import LOGGER_HANDLER
2429
from lib.core.optiondict import optDict
2530
from lib.core.subprocessng import Popen as execute
2631
from lib.core.subprocessng import send_all
@@ -43,6 +48,56 @@
4348
procs = dict()
4449
tasks = AttribDict()
4550

51+
# Wrapper functions
52+
class StdDbOut(object):
53+
encoding = "UTF-8"
54+
55+
def __init__(self, type_="stdout"):
56+
# Overwrite system standard output and standard error to write
57+
# to a temporary I/O database
58+
self.type = type_
59+
60+
if self.type == "stdout":
61+
sys.stdout = self
62+
else:
63+
sys.stderr = self
64+
65+
def write(self, string):
66+
if self.type == "stdout":
67+
conf.ipc_database_cursor.execute("INSERT INTO stdout VALUES(NULL, ?, ?)", (time.strftime("%X"), string))
68+
else:
69+
conf.ipc_database_cursor.execute("INSERT INTO stderr VALUES(NULL, ?, ?)", (time.strftime("%X"), string))
70+
71+
def flush(self):
72+
pass
73+
74+
def close(self):
75+
pass
76+
77+
def seek(self):
78+
pass
79+
80+
class LogRecorder(logging.StreamHandler):
81+
def emit(self, record):
82+
"""
83+
Record emitted events to temporary database for asynchronous I/O
84+
communication with the parent process
85+
"""
86+
conf.ipc_database_cursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?)",
87+
(time.strftime("%X"), record.levelname,
88+
record.msg % record.args if record.args else record.msg))
89+
90+
def setRestAPILog():
91+
if hasattr(conf, "ipc_database"):
92+
conf.ipc_database_connection = sqlite3.connect(conf.ipc_database, timeout=1, isolation_level=None)
93+
conf.ipc_database_cursor = conf.ipc_database_connection.cursor()
94+
95+
# Set a logging handler that writes log messages to a temporary
96+
# I/O database
97+
logger.removeHandler(LOGGER_HANDLER)
98+
LOGGER_RECORDER = LogRecorder()
99+
logger.addHandler(LOGGER_RECORDER)
100+
46101
# Generic functions
47102
def is_admin(taskid):
48103
global adminid
@@ -110,23 +165,25 @@ def task_new():
110165
"""
111166
Create new task ID
112167
"""
168+
global procs
113169
global tasks
114170

115171
taskid = hexencode(os.urandom(16))
116172
tasks[taskid] = init_options()
173+
procs[taskid] = AttribDict()
174+
175+
_, ipc_database_filepath = tempfile.mkstemp(prefix="sqlmapipc-", text=False)
117176

118177
# Initiate the temporary database for asynchronous I/O with the
119-
# sqlmap engine (children processes)
120-
_, ipc_filepath = tempfile.mkstemp(prefix="sqlmapipc-", suffix=".db", text=False)
121-
connection = sqlite3.connect(ipc_filepath, isolation_level=None)
122-
cursor = connection.cursor()
123-
cursor.execute("DROP TABLE IF EXISTS logs")
124-
cursor.execute("CREATE TABLE logs(id INTEGER PRIMARY KEY AUTOINCREMENT, time TEXT, level TEXT, message TEXT)")
125-
cursor.close()
126-
connection.close()
178+
# sqlmap engine
179+
procs[taskid].ipc_database_connection = sqlite3.connect(ipc_database_filepath, timeout=1, isolation_level=None)
180+
procs[taskid].ipc_database_cursor = procs[taskid].ipc_database_connection.cursor()
181+
procs[taskid].ipc_database_cursor.execute("CREATE TABLE logs(id INTEGER PRIMARY KEY AUTOINCREMENT, time TEXT, level TEXT, message TEXT)")
182+
procs[taskid].ipc_database_cursor.execute("CREATE TABLE stdout(id INTEGER PRIMARY KEY AUTOINCREMENT, time TEXT, message TEXT)")
183+
procs[taskid].ipc_database_cursor.execute("CREATE TABLE stderr(id INTEGER PRIMARY KEY AUTOINCREMENT, time TEXT, message TEXT)")
127184

128185
# Set the temporary database to use for asynchronous I/O communication
129-
tasks[taskid].ipc = ipc_filepath
186+
tasks[taskid].ipc_database = ipc_database_filepath
130187

131188
return jsonize({"taskid": taskid})
132189

@@ -195,13 +252,14 @@ def cleanup(taskid):
195252

196253
if is_admin(taskid):
197254
for task, options in tasks.items():
198-
if "oDir" in options and options.oDir is not None:
199-
shutil.rmtree(options.oDir)
255+
shutil.rmtree(options.oDir)
256+
shutil.rmtree(options.ipc_database)
200257

201258
admin_task = tasks[adminid]
202259
tasks = AttribDict()
203260
tasks[adminid] = admin_task
204261

262+
205263
return jsonize({"success": True})
206264
else:
207265
abort(401)
@@ -259,19 +317,18 @@ def scan_start(taskid):
259317
if taskid not in tasks:
260318
abort(500, "Invalid task ID")
261319

262-
# Initialize sqlmap engine's options with user's provided options
263-
# within the JSON request
320+
# Initialize sqlmap engine's options with user's provided options, if any
264321
for key, value in request.json.items():
265322
tasks[taskid][key] = value
266323

267-
# Overwrite output directory (oDir) value to a temporary directory
268-
tasks[taskid].oDir = tempfile.mkdtemp(prefix="sqlmaptask-")
324+
# Overwrite output directory value to a temporary directory
325+
tasks[taskid].oDir = tempfile.mkdtemp(prefix="sqlmapoutput-")
269326

270327
# Launch sqlmap engine in a separate thread
271328
logger.debug("starting a scan for task ID %s" % taskid)
272329

273330
# Launch sqlmap engine
274-
procs[taskid] = execute("python sqlmap.py --pickled-options %s" % base64pickle(tasks[taskid]), shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=False)
331+
procs[taskid].child = execute("python sqlmap.py --pickled-options %s" % base64pickle(tasks[taskid]), shell=True, stdin=PIPE)
275332

276333
return jsonize({"success": True})
277334

@@ -280,15 +337,30 @@ def scan_output(taskid):
280337
"""
281338
Read the standard output of sqlmap core execution
282339
"""
340+
global procs
283341
global tasks
284342

343+
json_stdout_message = []
344+
json_stderr_message = []
345+
285346
if taskid not in tasks:
286347
abort(500, "Invalid task ID")
287348

288-
stdout = recv_some(procs[taskid], t=1, e=0, stderr=0)
289-
stderr = recv_some(procs[taskid], t=1, e=0, stderr=1)
349+
# Read all stdout messages from the temporary I/O database
350+
procs[taskid].ipc_database_cursor.execute("SELECT message FROM stdout")
351+
db_stdout_messages = procs[taskid].ipc_database_cursor.fetchall()
352+
353+
for message in db_stdout_messages:
354+
json_stdout_message.append(message)
290355

291-
return jsonize({"stdout": stdout, "stderr": stderr})
356+
# Read all stderr messages from the temporary I/O database
357+
procs[taskid].ipc_database_cursor.execute("SELECT message FROM stderr")
358+
db_stderr_messages = procs[taskid].ipc_database_cursor.fetchall()
359+
360+
for message in db_stderr_messages:
361+
json_stderr_message.append(message)
362+
363+
return jsonize({"stdout": json_stdout_message, "stderr": json_stderr_message})
292364

293365
@get("/scan/<taskid>/delete")
294366
def scan_delete(taskid):
@@ -300,8 +372,8 @@ def scan_delete(taskid):
300372
if taskid not in tasks:
301373
abort(500, "Invalid task ID")
302374

303-
if "oDir" in tasks[taskid] and tasks[taskid].oDir is not None:
304-
shutil.rmtree(tasks[taskid].oDir)
375+
shutil.rmtree(tasks[taskid].oDir)
376+
shutil.rmtree(tasks[taskid].ipc_database)
305377

306378
return jsonize({"success": True})
307379

@@ -311,6 +383,8 @@ def scan_log_limited(taskid, start, end):
311383
"""
312384
Retrieve a subset of log messages
313385
"""
386+
global procs
387+
314388
json_log_messages = {}
315389

316390
if taskid not in tasks:
@@ -324,10 +398,8 @@ def scan_log_limited(taskid, start, end):
324398
end = max(1, int(end))
325399

326400
# Read a subset of log messages from the temporary I/O database
327-
connection = sqlite3.connect(tasks[taskid].ipc, isolation_level=None)
328-
cursor = connection.cursor()
329-
cursor.execute("SELECT id, time, level, message FROM logs WHERE id >= %d AND id <= %d" % (start, end))
330-
db_log_messages = cursor.fetchall()
401+
procs[taskid].ipc_database_cursor.execute("SELECT id, time, level, message FROM logs WHERE id >= %d AND id <= %d" % (start, end))
402+
db_log_messages = procs[taskid].ipc_database_cursor.fetchall()
331403

332404
for (id_, time_, level, message) in db_log_messages:
333405
json_log_messages[id_] = {"time": time_, "level": level, "message": message}
@@ -339,16 +411,16 @@ def scan_log(taskid):
339411
"""
340412
Retrieve the log messages
341413
"""
414+
global procs
415+
342416
json_log_messages = {}
343417

344418
if taskid not in tasks:
345419
abort(500, "Invalid task ID")
346420

347421
# Read all log messages from the temporary I/O database
348-
connection = sqlite3.connect(tasks[taskid].ipc, isolation_level=None)
349-
cursor = connection.cursor()
350-
cursor.execute("SELECT id, time, level, message FROM logs")
351-
db_log_messages = cursor.fetchall()
422+
procs[taskid].ipc_database_cursor.execute("SELECT id, time, level, message FROM logs")
423+
db_log_messages = procs[taskid].ipc_database_cursor.fetchall()
352424

353425
for (id_, time_, level, message) in db_log_messages:
354426
json_log_messages[id_] = {"time": time_, "level": level, "message": message}

0 commit comments

Comments
 (0)