77
88import os
99import shutil
10+ import sqlite3
1011import tempfile
1112
1213from subprocess import PIPE
3940
4041# Local global variables
4142adminid = ""
42- pipes = dict ()
4343procs = dict ()
4444tasks = AttribDict ()
4545
@@ -115,6 +115,19 @@ def task_new():
115115 taskid = hexencode (os .urandom (16 ))
116116 tasks [taskid ] = init_options ()
117117
118+ # Initiate the temporary database for asynchronous I/O with the
119+ # sqlmap engine (children processes)
120+ _ , ipc_filepath = tempfile .mkstemp (prefix = "sqlmapipc-" , suffix = ".db" , text = False )
121+ connection = sqlite3 .connect (ipc_filepath , isolation_level = None )
122+ cursor = connection .cursor ()
123+ cursor .execute ("DROP TABLE IF EXISTS logs" )
124+ cursor .execute ("CREATE TABLE logs(id INTEGER PRIMARY KEY AUTOINCREMENT, time TEXT, level TEXT, message TEXT)" )
125+ cursor .close ()
126+ connection .close ()
127+
128+ # Set the temporary database to use for asynchronous I/O communication
129+ tasks [taskid ].ipc = ipc_filepath
130+
118131 return jsonize ({"taskid" : taskid })
119132
120133@get ("/task/<taskid>/destroy" )
@@ -242,7 +255,6 @@ def scan_start(taskid):
242255 """
243256 global tasks
244257 global procs
245- global pipes
246258
247259 if taskid not in tasks :
248260 abort (500 , "Invalid task ID" )
@@ -253,16 +265,11 @@ def scan_start(taskid):
253265 tasks [taskid ][key ] = value
254266
255267 # Overwrite output directory (oDir) value to a temporary directory
256- tasks [taskid ].oDir = tempfile .mkdtemp (prefix = "sqlmap -" )
268+ tasks [taskid ].oDir = tempfile .mkdtemp (prefix = "sqlmaptask -" )
257269
258270 # Launch sqlmap engine in a separate thread
259271 logger .debug ("starting a scan for task ID %s" % taskid )
260272
261- pipes [taskid ] = os .pipe ()
262-
263- # Provide sqlmap engine with the writable pipe for logging
264- tasks [taskid ]["fdLog" ] = pipes [taskid ][1 ]
265-
266273 # Launch sqlmap engine
267274 procs [taskid ] = execute ("python sqlmap.py --pickled-options %s" % base64pickle (tasks [taskid ]), shell = True , stdin = PIPE , stdout = PIPE , stderr = PIPE , close_fds = False )
268275
@@ -273,7 +280,6 @@ def scan_output(taskid):
273280 """
274281 Read the standard output of sqlmap core execution
275282 """
276- global pipes
277283 global tasks
278284
279285 if taskid not in tasks :
@@ -303,46 +309,51 @@ def scan_delete(taskid):
303309@get ("/scan/<taskid>/log/<start>/<end>" )
304310def scan_log_limited (taskid , start , end ):
305311 """
306- Retrieve the log messages
312+ Retrieve a subset of log messages
307313 """
308- log = None
314+ json_log_messages = {}
309315
310316 if taskid not in tasks :
311317 abort (500 , "Invalid task ID" )
312318
319+ # Temporary "protection" against SQL injection FTW ;)
313320 if not start .isdigit () or not end .isdigit () or end <= start :
314321 abort (500 , "Invalid start or end value, must be digits" )
315322
316- start = max (0 , int (start ) - 1 )
323+ start = max (1 , int (start ))
317324 end = max (1 , int (end ))
318- pickledLog = os .read (pipes [taskid ][0 ], 100000 )
319325
320- try :
321- log = base64unpickle (pickledLog )
322- log = log [slice (start , end )]
323- except (KeyError , IndexError , TypeError ), e :
324- logger .error ("handled exception when trying to unpickle logger dictionary in scan_log_limited(): %s" % str (e ))
326+ # Read a subset of log messages from the temporary I/O database
327+ connection = sqlite3 .connect (tasks [taskid ].ipc , isolation_level = None )
328+ cursor = connection .cursor ()
329+ cursor .execute ("SELECT id, time, level, message FROM logs WHERE id >= %d AND id <= %d" % (start , end ))
330+ db_log_messages = cursor .fetchall ()
331+
332+ for (id_ , time_ , level , message ) in db_log_messages :
333+ json_log_messages [id_ ] = {"time" : time_ , "level" : level , "message" : message }
325334
326- return jsonize ({"log" : log })
335+ return jsonize ({"log" : json_log_messages })
327336
328337@get ("/scan/<taskid>/log" )
329338def scan_log (taskid ):
330339 """
331340 Retrieve the log messages
332341 """
333- log = None
342+ json_log_messages = {}
334343
335344 if taskid not in tasks :
336345 abort (500 , "Invalid task ID" )
337346
338- pickledLog = os .read (pipes [taskid ][0 ], 100000 )
347+ # Read all log messages from the temporary I/O database
348+ connection = sqlite3 .connect (tasks [taskid ].ipc , isolation_level = None )
349+ cursor = connection .cursor ()
350+ cursor .execute ("SELECT id, time, level, message FROM logs" )
351+ db_log_messages = cursor .fetchall ()
339352
340- try :
341- log = base64unpickle (pickledLog )
342- except (KeyError , IndexError , TypeError ), e :
343- logger .error ("handled exception when trying to unpickle logger dictionary in scan_log(): %s" % str (e ))
353+ for (id_ , time_ , level , message ) in db_log_messages :
354+ json_log_messages [id_ ] = {"time" : time_ , "level" : level , "message" : message }
344355
345- return jsonize ({"log" : log })
356+ return jsonize ({"log" : json_log_messages })
346357
347358# Function to handle files inside the output directory
348359@get ("/download/<taskid>/<target>/<filename:path>" )
0 commit comments