Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit b2afa87

Browse files
committed
reading page responses in chunks, trimming unnecessary content (especially for large table dumps in full inband cases)
1 parent 2223c88 commit b2afa87

2 files changed

Lines changed: 28 additions & 5 deletions

File tree

lib/core/settings.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -475,3 +475,9 @@
475475

476476
# Reference: http://www.tcpipguide.com/free/t_DNSLabelsNamesandSyntaxRules.htm
477477
MAX_DNS_LABEL = 63
478+
479+
# Connection chunk size (processing large responses in chunks to avoid MemoryError crashes - e.g. large table dump in full UNION/inband injections)
480+
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
481+
482+
# Mark used for trimming unnecessary content in large chunks
483+
LARGE_CHUNK_TRIM_MARKER = "__TRIMMED_CONTENT__"

lib/request/connect.py

Lines changed: 22 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -52,12 +52,14 @@
5252
from lib.core.exception import sqlmapSyntaxException
5353
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
5454
from lib.core.settings import HTTP_SILENT_TIMEOUT
55+
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
5556
from lib.core.settings import META_REFRESH_REGEX
56-
from lib.core.settings import IS_WIN
5757
from lib.core.settings import MIN_TIME_RESPONSES
58-
from lib.core.settings import WARN_TIME_STDEV
58+
from lib.core.settings import IS_WIN
59+
from lib.core.settings import LARGE_CHUNK_TRIM_MARKER
5960
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
6061
from lib.core.settings import URI_HTTP_HEADER
62+
from lib.core.settings import WARN_TIME_STDEV
6163
from lib.request.basic import decodePage
6264
from lib.request.basic import forgeHeaders
6365
from lib.request.basic import processResponse
@@ -117,6 +119,21 @@ def __retryProxy(**kwargs):
117119
kwargs['retrying'] = True
118120
return Connect.__getPageProxy(**kwargs)
119121

122+
@staticmethod
123+
def __connReadProxy(conn):
124+
retVal = ""
125+
while True:
126+
_ = conn.read(MAX_CONNECTION_CHUNK_SIZE)
127+
if len(_) == MAX_CONNECTION_CHUNK_SIZE:
128+
warnMsg = "large response detected. This could take a while"
129+
singleTimeWarnMessage(warnMsg)
130+
_ = re.sub(r"(?si)%s.+?%s" % (kb.chars.stop, kb.chars.start), "%s%s%s" % (kb.chars.stop, LARGE_CHUNK_TRIM_MARKER, kb.chars.start), _)
131+
retVal += _
132+
else:
133+
retVal += _
134+
break
135+
return retVal
136+
120137
@staticmethod
121138
def getPage(**kwargs):
122139
"""
@@ -205,7 +222,7 @@ def getPage(**kwargs):
205222

206223
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
207224
conn = multipartOpener.open(unicodeencode(url), multipart)
208-
page = conn.read()
225+
page = Connect.__connReadProxy(conn)
209226
responseHeaders = conn.info()
210227
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
211228
page = decodePage(page, responseHeaders.get(HTTPHEADER.CONTENT_ENCODING), responseHeaders.get(HTTPHEADER.CONTENT_TYPE))
@@ -306,11 +323,11 @@ def getPage(**kwargs):
306323
# Get HTTP response
307324
if hasattr(conn, 'redurl'):
308325
page = threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
309-
else conn.read()
326+
else Connect.__connReadProxy(conn)
310327
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
311328
code = conn.redcode
312329
else:
313-
page = conn.read()
330+
page = Connect.__connReadProxy(conn)
314331

315332
code = code or conn.code
316333
responseHeaders = conn.info()

0 commit comments

Comments
 (0)