Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 9bce226

Browse files
committed
Minor bug fix and adjustment to deal with Keep-Alive also against Google (-g)
1 parent 6f03a9a commit 9bce226

3 files changed

Lines changed: 24 additions & 17 deletions

File tree

lib/core/option.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -265,18 +265,26 @@ def __setGoogleDorking():
265265
the results and save the testable hosts into the knowledge base.
266266
"""
267267

268-
global proxyHandler
269-
270268
if not conf.googleDork:
271269
return
272270

271+
global keepAliveHandler
272+
global proxyHandler
273+
273274
debugMsg = "initializing Google dorking requests"
274275
logger.debug(debugMsg)
275276

276277
logMsg = "first request to Google to get the session cookie"
277278
logger.info(logMsg)
278279

279-
googleObj = Google(proxyHandler)
280+
handlers = [ proxyHandler ]
281+
282+
# Use Keep-Alive (persistent HTTP connection) only if a proxy is not set
283+
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
284+
if conf.keepAlive and not conf.proxy:
285+
handlers.append(keepAliveHandler)
286+
287+
googleObj = Google(handlers)
280288
googleObj.getCookie()
281289

282290
matches = googleObj.search(conf.googleDork)

lib/request/basic.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,6 @@ def parseResponse(page, headers):
8888
if absFilePath not in kb.absFilePaths:
8989
kb.absFilePaths.add(absFilePath)
9090

91-
9291
def decodePage(page, contentEncoding, contentType):
9392
"""
9493
Decode compressed/charset HTTP response

lib/utils/google.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -41,10 +41,13 @@ class Google:
4141
line option '-g <google dork>'
4242
"""
4343

44-
def __init__(self, proxy):
44+
def __init__(self, handlers):
4545
self.__matches = []
4646
self.__cj = cookielib.LWPCookieJar()
47-
self.opener = urllib2.build_opener(proxy, urllib2.HTTPCookieProcessor(self.__cj))
47+
48+
handlers.append(urllib2.HTTPCookieProcessor(self.__cj))
49+
50+
self.opener = urllib2.build_opener(*handlers)
4851
self.opener.addheaders = conf.httpHeaders
4952

5053
def __parsePage(self, page):
@@ -83,7 +86,7 @@ def getCookie(self):
8386
_ = conn.info()
8487
except urllib2.HTTPError, e:
8588
_ = e.info()
86-
except urllib2.URLError, e:
89+
except urllib2.URLError, _:
8790
errMsg = "unable to connect to Google"
8891
raise sqlmapConnectionException, errMsg
8992

@@ -107,19 +110,16 @@ def search(self, googleDork):
107110
try:
108111
conn = self.opener.open(url)
109112

110-
requestMsg = "HTTP request:\nGET %s HTTP/1.1" % url
111-
#requestHeaders = "\n".join(["%s: %s" % (header, value) for header, value in conn.headers.items()])
112-
#requestMsg += "\n%s" % requestHeaders
113+
requestMsg = "HTTP request:\nGET %s HTTP/1.1\n" % url
114+
requestMsg += "\n".join(["%s: %s" % (header, value) for header, value in conn.headers.items()])
113115
requestMsg += "\n"
114116
logger.log(9, requestMsg)
115117

116-
page = conn.read()
117-
code = conn.code
118-
status = conn.msg
118+
page = conn.read()
119+
code = conn.code
120+
status = conn.msg
119121
responseHeaders = conn.info()
120-
121-
encoding = responseHeaders.get("Content-Encoding")
122-
page = decodePage(page, encoding)
122+
page = decodePage(page, responseHeaders.get("Content-Encoding"), responseHeaders.get("Content-Type"))
123123

124124
responseMsg = "HTTP response (%s - %d):\n" % (status, code)
125125

@@ -137,7 +137,7 @@ def search(self, googleDork):
137137
warnMsg += "to get error page information (%d)" % e.code
138138
logger.warn(warnMsg)
139139
return None
140-
except (urllib2.URLError, socket.error, socket.timeout), e:
140+
except (urllib2.URLError, socket.error, socket.timeout), _:
141141
errMsg = "unable to connect to Google"
142142
raise sqlmapConnectionException, errMsg
143143

0 commit comments

Comments
 (0)