Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 1e7f2d6

Browse files
committed
Implements #1215
1 parent 26bec72 commit 1e7f2d6

5 files changed

Lines changed: 23 additions & 1 deletion

File tree

lib/core/option.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2234,6 +2234,13 @@ def _basicOptionValidation():
22342234
errMsg = "invalid regular expression '%s' ('%s')" % (conf.regexp, ex)
22352235
raise SqlmapSyntaxException(errMsg)
22362236

2237+
if conf.crawlExclude:
2238+
try:
2239+
re.compile(conf.crawlExclude)
2240+
except re.error, ex:
2241+
errMsg = "invalid regular expression '%s' ('%s')" % (conf.crawlExclude, ex)
2242+
raise SqlmapSyntaxException(errMsg)
2243+
22372244
if conf.dumpTable and conf.dumpAll:
22382245
errMsg = "switch '--dump' is incompatible with switch '--dump-all'"
22392246
raise SqlmapSyntaxException(errMsg)
@@ -2250,6 +2257,10 @@ def _basicOptionValidation():
22502257
errMsg = "switch '--forms' requires usage of option '-u' ('--url'), '-g', '-m' or '-x'"
22512258
raise SqlmapSyntaxException(errMsg)
22522259

2260+
if conf.crawlExclude and not conf.crawlDepth:
2261+
errMsg = "option '--crawl-exclude' requires usage of switch '--crawl'"
2262+
raise SqlmapSyntaxException(errMsg)
2263+
22532264
if conf.csrfUrl and not conf.csrfToken:
22542265
errMsg = "option '--csrf-url' requires usage of option '--csrf-token'"
22552266
raise SqlmapSyntaxException(errMsg)

lib/core/optiondict.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -188,6 +188,7 @@
188188
"batch": "boolean",
189189
"charset": "string",
190190
"crawlDepth": "integer",
191+
"crawlExclude": "string",
191192
"csvDel": "string",
192193
"dumpFormat": "string",
193194
"eta": "boolean",

lib/parse/cmdline.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -606,7 +606,10 @@ def cmdLineParser():
606606
help="Force character encoding used for data retrieval")
607607

608608
general.add_option("--crawl", dest="crawlDepth", type="int",
609-
help="Crawl the website starting from the target URL")
609+
help="Crawl the website starting from the target URL")
610+
611+
general.add_option("--crawl-exclude", dest="crawlExclude",
612+
help="Regexp to exclude pages from crawling (e.g. \"logout\")")
610613

611614
general.add_option("--csv-del", dest="csvDel",
612615
help="Delimiting character used in CSV output "

lib/utils/crawler.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,10 @@ def crawlThread():
4848
current = threadData.shared.unprocessed.pop()
4949
if current in visited:
5050
continue
51+
elif conf.crawlExclude and re.search(conf.crawlExclude, current):
52+
dbgMsg = "skipping '%s'" % current
53+
logger.debug(dbgMsg)
54+
continue
5155
else:
5256
visited.add(current)
5357
else:

sqlmap.conf

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -647,6 +647,9 @@ charset =
647647
# Default: 0
648648
crawlDepth = 0
649649

650+
# Regexp to exclude pages from crawling (e.g. "logout").
651+
crawlExclude =
652+
650653
# Delimiting character used in CSV output.
651654
# Default: ,
652655
csvDel = ,

0 commit comments

Comments
 (0)