Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit bb18c4d

Browse files
committed
Removing -x as I doubt that anybody uses it
1 parent ce9618c commit bb18c4d

7 files changed

Lines changed: 37 additions & 72 deletions

File tree

lib/core/option.py

Lines changed: 8 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -336,13 +336,10 @@ def _setCrawler():
336336
if not conf.crawlDepth:
337337
return
338338

339-
if not any((conf.bulkFile, conf.sitemapUrl)):
339+
if not conf.bulkFile:
340340
crawl(conf.url)
341341
else:
342-
if conf.bulkFile:
343-
targets = getFileItems(conf.bulkFile)
344-
else:
345-
targets = list(parseSitemap(conf.sitemapUrl))
342+
targets = getFileItems(conf.bulkFile)
346343

347344
for i in xrange(len(targets)):
348345
try:
@@ -443,23 +440,6 @@ def _setBulkMultipleTargets():
443440
warnMsg = "no usable links found (with GET parameters)"
444441
logger.warn(warnMsg)
445442

446-
def _setSitemapTargets():
447-
if not conf.sitemapUrl:
448-
return
449-
450-
infoMsg = "parsing sitemap '%s'" % conf.sitemapUrl
451-
logger.info(infoMsg)
452-
453-
found = False
454-
for item in parseSitemap(conf.sitemapUrl):
455-
if re.match(r"[^ ]+\?(.+)", item, re.I):
456-
found = True
457-
kb.targets.add((item.strip(), None, None, None, None))
458-
459-
if not found and not conf.forms and not conf.crawlDepth:
460-
warnMsg = "no usable links found (with GET parameters)"
461-
logger.warn(warnMsg)
462-
463443
def _findPageForms():
464444
if not conf.forms or conf.crawlDepth:
465445
return
@@ -471,15 +451,13 @@ def _findPageForms():
471451
infoMsg = "searching for forms"
472452
logger.info(infoMsg)
473453

474-
if not any((conf.bulkFile, conf.googleDork, conf.sitemapUrl)):
454+
if not any((conf.bulkFile, conf.googleDork)):
475455
page, _, _ = Request.queryPage(content=True, ignoreSecondOrder=True)
476456
if findPageForms(page, conf.url, True, True):
477457
found = True
478458
else:
479459
if conf.bulkFile:
480460
targets = getFileItems(conf.bulkFile)
481-
elif conf.sitemapUrl:
482-
targets = list(parseSitemap(conf.sitemapUrl))
483461
elif conf.googleDork:
484462
targets = [_[0] for _ in kb.targets]
485463
kb.targets.clear()
@@ -1653,16 +1631,13 @@ def _cleanupOptions():
16531631
if conf.fileDest:
16541632
conf.fileDest = ntToPosixSlashes(normalizePath(conf.fileDest))
16551633

1656-
if conf.sitemapUrl and not conf.sitemapUrl.lower().startswith("http"):
1657-
conf.sitemapUrl = "http%s://%s" % ('s' if conf.forceSSL else '', conf.sitemapUrl)
1658-
16591634
if conf.msfPath:
16601635
conf.msfPath = ntToPosixSlashes(normalizePath(conf.msfPath))
16611636

16621637
if conf.tmpPath:
16631638
conf.tmpPath = ntToPosixSlashes(normalizePath(conf.tmpPath))
16641639

1665-
if any((conf.googleDork, conf.logFile, conf.bulkFile, conf.sitemapUrl, conf.forms, conf.crawlDepth)):
1640+
if any((conf.googleDork, conf.logFile, conf.bulkFile, conf.forms, conf.crawlDepth)):
16661641
conf.multipleTargets = True
16671642

16681643
if conf.optimize:
@@ -2508,8 +2483,8 @@ def _basicOptionValidation():
25082483
errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS
25092484
raise SqlmapSyntaxException(errMsg)
25102485

2511-
if conf.forms and not any((conf.url, conf.googleDork, conf.bulkFile, conf.sitemapUrl)):
2512-
errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g', '-m' or '-x'"
2486+
if conf.forms and not any((conf.url, conf.googleDork, conf.bulkFile)):
2487+
errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g' or '-m'"
25132488
raise SqlmapSyntaxException(errMsg)
25142489

25152490
if conf.crawlExclude and not conf.crawlDepth:
@@ -2610,7 +2585,7 @@ def _basicOptionValidation():
26102585
errMsg = "value for option '--union-char' must be an alpha-numeric value (e.g. 1)"
26112586
raise SqlmapSyntaxException(errMsg)
26122587

2613-
if conf.hashFile and any((conf.direct, conf.url, conf.logFile, conf.bulkFile, conf.googleDork, conf.configFile, conf.requestFile, conf.updateAll, conf.smokeTest, conf.liveTest, conf.wizard, conf.dependencies, conf.purge, conf.sitemapUrl, conf.listTampers)):
2588+
if conf.hashFile and any((conf.direct, conf.url, conf.logFile, conf.bulkFile, conf.googleDork, conf.configFile, conf.requestFile, conf.updateAll, conf.smokeTest, conf.liveTest, conf.wizard, conf.dependencies, conf.purge, conf.listTampers)):
26142589
errMsg = "option '--crack' should be used as a standalone"
26152590
raise SqlmapSyntaxException(errMsg)
26162591

@@ -2677,7 +2652,7 @@ def init():
26772652

26782653
parseTargetDirect()
26792654

2680-
if any((conf.url, conf.logFile, conf.bulkFile, conf.sitemapUrl, conf.requestFile, conf.googleDork, conf.liveTest)):
2655+
if any((conf.url, conf.logFile, conf.bulkFile, conf.requestFile, conf.googleDork, conf.liveTest)):
26812656
_setHostname()
26822657
_setHTTPTimeout()
26832658
_setHTTPExtraHeaders()
@@ -2692,7 +2667,6 @@ def init():
26922667
_setSafeVisit()
26932668
_doSearch()
26942669
_setBulkMultipleTargets()
2695-
_setSitemapTargets()
26962670
_checkTor()
26972671
_setCrawler()
26982672
_findPageForms()

lib/core/optiondict.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
"sessionFile": "string",
2020
"googleDork": "string",
2121
"configFile": "string",
22-
"sitemapUrl": "string",
2322
},
2423

2524
"Request": {

lib/core/settings.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
from thirdparty.six import unichr as _unichr
1919

2020
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
21-
VERSION = "1.3.11.2"
21+
VERSION = "1.3.11.3"
2222
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
2323
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
2424
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)

lib/parse/cmdline.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -130,9 +130,6 @@ def cmdLineParser(argv=None):
130130
target.add_argument("-l", dest="logFile",
131131
help="Parse target(s) from Burp or WebScarab proxy log file")
132132

133-
target.add_argument("-x", dest="sitemapUrl",
134-
help="Parse target(s) from remote sitemap(.xml) file")
135-
136133
target.add_argument("-m", dest="bulkFile",
137134
help="Scan multiple targets given in a textual file ")
138135

@@ -994,8 +991,8 @@ def _format_action_invocation(self, action):
994991
if args.dummy:
995992
args.url = args.url or DUMMY_URL
996993

997-
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, args.requestFile, args.updateAll, args.smokeTest, args.vulnTest, args.liveTest, args.wizard, args.dependencies, args.purge, args.sitemapUrl, args.listTampers, args.hashFile)):
998-
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --list-tampers, --wizard, --update, --purge or --dependencies). "
994+
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, args.requestFile, args.updateAll, args.smokeTest, args.vulnTest, args.liveTest, args.wizard, args.dependencies, args.purge, args.listTampers, args.hashFile)):
995+
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, --list-tampers, --wizard, --update, --purge or --dependencies). "
999996
errMsg += "Use -h for basic and -hh for advanced help\n"
1000997
parser.error(errMsg)
1001998

lib/parse/configfile.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,14 +79,14 @@ def configFileParser(configFile):
7979

8080
mandatory = False
8181

82-
for option in ("direct", "url", "logFile", "bulkFile", "googleDork", "requestFile", "sitemapUrl", "wizard"):
82+
for option in ("direct", "url", "logFile", "bulkFile", "googleDork", "requestFile", "wizard"):
8383
if config.has_option("Target", option) and config.get("Target", option) or cmdLineOptions.get(option):
8484
mandatory = True
8585
break
8686

8787
if not mandatory:
8888
errMsg = "missing a mandatory option in the configuration file "
89-
errMsg += "(direct, url, logFile, bulkFile, googleDork, requestFile, sitemapUrl or wizard)"
89+
errMsg += "(direct, url, logFile, bulkFile, googleDork, requestFile or wizard)"
9090
raise SqlmapMissingMandatoryOptionException(errMsg)
9191

9292
for family, optionData in optDict.items():

lib/utils/crawler.py

Lines changed: 24 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -133,31 +133,30 @@ def crawlThread():
133133
threadData.shared.deeper = set()
134134
threadData.shared.unprocessed = set([target])
135135

136-
if not conf.sitemapUrl:
137-
message = "do you want to check for the existence of "
138-
message += "site's sitemap(.xml) [y/N] "
139-
140-
if readInput(message, default='N', boolean=True):
141-
found = True
142-
items = None
143-
url = _urllib.parse.urljoin(target, "/sitemap.xml")
144-
try:
145-
items = parseSitemap(url)
146-
except SqlmapConnectionException as ex:
147-
if "page not found" in getSafeExString(ex):
148-
found = False
149-
logger.warn("'sitemap.xml' not found")
150-
except:
151-
pass
152-
finally:
153-
if found:
154-
if items:
155-
for item in items:
156-
if re.search(r"(.*?)\?(.+)", item):
157-
threadData.shared.value.add(item)
158-
if conf.crawlDepth > 1:
159-
threadData.shared.unprocessed.update(items)
160-
logger.info("%s links found" % ("no" if not items else len(items)))
136+
message = "do you want to check for the existence of "
137+
message += "site's sitemap(.xml) [y/N] "
138+
139+
if readInput(message, default='N', boolean=True):
140+
found = True
141+
items = None
142+
url = _urllib.parse.urljoin(target, "/sitemap.xml")
143+
try:
144+
items = parseSitemap(url)
145+
except SqlmapConnectionException as ex:
146+
if "page not found" in getSafeExString(ex):
147+
found = False
148+
logger.warn("'sitemap.xml' not found")
149+
except:
150+
pass
151+
finally:
152+
if found:
153+
if items:
154+
for item in items:
155+
if re.search(r"(.*?)\?(.+)", item):
156+
threadData.shared.value.add(item)
157+
if conf.crawlDepth > 1:
158+
threadData.shared.unprocessed.update(items)
159+
logger.info("%s links found" % ("no" if not items else len(items)))
161160

162161
infoMsg = "starting crawler"
163162
if conf.bulkFile:

sqlmap.conf

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,6 @@ requestFile =
3232
# Example: +ext:php +inurl:"&id=" +intext:"powered by "
3333
googleDork =
3434

35-
# Parse target(s) from remote sitemap(.xml) file.
36-
# Example: http://192.168.1.121/sitemap.xml
37-
sitemapUrl =
38-
3935

4036
# These options can be used to specify how to connect to the target URL.
4137
[Request]

0 commit comments

Comments
 (0)