Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit fdcde2a

Browse files
committed
In case of bulk file, crawl-scan-crawl-scan...
1 parent 6679d6f commit fdcde2a

4 files changed

Lines changed: 39 additions & 31 deletions

File tree

lib/core/option.py

Lines changed: 3 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,6 @@
133133
from lib.parse.configfile import configFileParser
134134
from lib.parse.payloads import loadBoundaries
135135
from lib.parse.payloads import loadPayloads
136-
from lib.parse.sitemap import parseSitemap
137136
from lib.request.basic import checkCharEncoding
138137
from lib.request.basicauthhandler import SmartHTTPBasicAuthHandler
139138
from lib.request.chunkedhandler import ChunkedHandler
@@ -338,25 +337,6 @@ def _setCrawler():
338337

339338
if not conf.bulkFile:
340339
crawl(conf.url)
341-
else:
342-
targets = getFileItems(conf.bulkFile)
343-
344-
for i in xrange(len(targets)):
345-
try:
346-
target = targets[i]
347-
348-
if not re.search(r"(?i)\Ahttp[s]*://", target):
349-
target = "http://%s" % target
350-
351-
crawl(target)
352-
353-
if conf.verbose in (1, 2):
354-
status = "%d/%d links visited (%d%%)" % (i + 1, len(targets), round(100.0 * (i + 1) / len(targets)))
355-
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True)
356-
except Exception as ex:
357-
if not isinstance(ex, SqlmapUserQuitException):
358-
errMsg = "problem occurred while crawling at '%s' ('%s')" % (target, getSafeExString(ex))
359-
logger.error(errMsg)
360340

361341
def _doSearch():
362342
"""
@@ -1939,7 +1919,6 @@ def _setKnowledgeBaseAttributes(flushAll=True):
19391919
kb.mergeCookies = None
19401920
kb.multipleCtrlC = False
19411921
kb.negativeLogic = False
1942-
kb.normalizeCrawlingChoice = None
19431922
kb.nullConnection = None
19441923
kb.oldMsf = None
19451924
kb.orderByColumns = None
@@ -1993,7 +1972,6 @@ def _setKnowledgeBaseAttributes(flushAll=True):
19931972
kb.reduceTests = None
19941973
kb.tlsSNI = {}
19951974
kb.stickyDBMS = False
1996-
kb.storeCrawlingChoice = None
19971975
kb.storeHashesChoice = None
19981976
kb.suppressResumeInfo = False
19991977
kb.tableFrom = None
@@ -2013,11 +1991,14 @@ def _setKnowledgeBaseAttributes(flushAll=True):
20131991
kb.xpCmdshellAvailable = False
20141992

20151993
if flushAll:
1994+
kb.checkSitemap = None
20161995
kb.headerPaths = {}
20171996
kb.keywords = set(getFileItems(paths.SQL_KEYWORDS))
1997+
kb.normalizeCrawlingChoice = None
20181998
kb.passwordMgr = None
20191999
kb.preprocessFunctions = []
20202000
kb.skipVulnHost = None
2001+
kb.storeCrawlingChoice = None
20212002
kb.tamperFunctions = []
20222003
kb.targets = OrderedSet()
20232004
kb.testedParams = set()

lib/core/settings.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
from thirdparty.six import unichr as _unichr
1919

2020
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
21-
VERSION = "1.3.11.4"
21+
VERSION = "1.3.11.5"
2222
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
2323
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
2424
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)

lib/utils/crawler.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -133,10 +133,12 @@ def crawlThread():
133133
threadData.shared.deeper = set()
134134
threadData.shared.unprocessed = set([target])
135135

136-
message = "do you want to check for the existence of "
137-
message += "site's sitemap(.xml) [y/N] "
136+
if kb.checkSitemap is None:
137+
message = "do you want to check for the existence of "
138+
message += "site's sitemap(.xml) [y/N] "
139+
kb.checkSitemap = readInput(message, default='N', boolean=True)
138140

139-
if readInput(message, default='N', boolean=True):
141+
if kb.checkSitemap:
140142
found = True
141143
items = None
142144
url = _urllib.parse.urljoin(target, "/sitemap.xml")
@@ -158,10 +160,9 @@ def crawlThread():
158160
threadData.shared.unprocessed.update(items)
159161
logger.info("%s links found" % ("no" if not items else len(items)))
160162

161-
infoMsg = "starting crawler"
162-
if conf.bulkFile:
163-
infoMsg += " for target URL '%s'" % target
164-
logger.info(infoMsg)
163+
if not conf.bulkFile:
164+
infoMsg = "starting crawler for target URL '%s'" % target
165+
logger.info(infoMsg)
165166

166167
for i in xrange(conf.crawlDepth):
167168
threadData.shared.count = 0

sqlmap.py

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
from lib.core.common import dataToStdout
4646
from lib.core.common import filterNone
4747
from lib.core.common import getDaysFromLastUpdate
48+
from lib.core.common import getFileItems
4849
from lib.core.common import getSafeExString
4950
from lib.core.common import maskSensitiveData
5051
from lib.core.common import openFile
@@ -57,6 +58,7 @@
5758
from lib.core.common import MKSTEMP_PREFIX
5859
from lib.core.common import setColor
5960
from lib.core.common import unhandledExceptionMessage
61+
from lib.core.compat import xrange
6062
from lib.core.exception import SqlmapBaseException
6163
from lib.core.exception import SqlmapShellQuitException
6264
from lib.core.exception import SqlmapSilentQuitException
@@ -73,6 +75,7 @@
7375
from lib.core.settings import UNICODE_ENCODING
7476
from lib.core.settings import VERSION
7577
from lib.parse.cmdline import cmdLineParser
78+
from lib.utils.crawler import crawl
7679
from thirdparty import six
7780
except KeyboardInterrupt:
7881
errMsg = "user aborted"
@@ -177,7 +180,30 @@ def main():
177180
profile()
178181
else:
179182
try:
180-
start()
183+
if conf.crawlDepth and conf.bulkFile:
184+
targets = getFileItems(conf.bulkFile)
185+
186+
for i in xrange(len(targets)):
187+
try:
188+
kb.targets.clear()
189+
target = targets[i]
190+
191+
if not re.search(r"(?i)\Ahttp[s]*://", target):
192+
target = "http://%s" % target
193+
194+
infoMsg = "starting crawler for target URL '%s' (%d/%d)" % (target, i + 1, len(targets))
195+
logger.info(infoMsg)
196+
197+
crawl(target)
198+
except Exception as ex:
199+
if not isinstance(ex, SqlmapUserQuitException):
200+
errMsg = "problem occurred while crawling '%s' ('%s')" % (target, getSafeExString(ex))
201+
logger.error(errMsg)
202+
else:
203+
if kb.targets:
204+
start()
205+
else:
206+
start()
181207
except Exception as ex:
182208
os._exitcode = 1
183209

0 commit comments

Comments
 (0)