mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-08 21:51:29 +00:00
sqlmap 0.8-rc3: Merge from Miroslav Stampar's branch fixing a bug when verbosity > 2, another major bug with urlencoding/urldecoding of POST data and Cookies, adding --drop-set-cookie option, implementing support to automatically decode gzip and deflate HTTP responses, support for Google dork page result (--gpage) and a minor code cleanup.
This commit is contained in:
@@ -22,17 +22,17 @@ with sqlmap; if not, write to the Free Software Foundation, Inc., 51
|
||||
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
"""
|
||||
|
||||
|
||||
|
||||
import gzip
|
||||
import os
|
||||
import re
|
||||
import StringIO
|
||||
import zlib
|
||||
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.parse.headers import headersParser
|
||||
from lib.parse.html import htmlParser
|
||||
|
||||
|
||||
def forgeHeaders(cookie, ua):
|
||||
"""
|
||||
Prepare HTTP Cookie and HTTP User-Agent headers to use when performing
|
||||
@@ -51,17 +51,12 @@ def forgeHeaders(cookie, ua):
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def parseResponse(page, headers):
|
||||
"""
|
||||
@param page: the page to parse to feed the knowledge base htmlFp
|
||||
(back-end DBMS fingerprint based upon DBMS error messages return
|
||||
through the web application) list and absFilePaths (absolute file
|
||||
paths) set.
|
||||
|
||||
@todo: in the future parse the page content scrolling an XML file to
|
||||
identify the dynamic language used and, most, the absolute path,
|
||||
like for DBMS error messages (ERRORS_XML), see above.
|
||||
"""
|
||||
|
||||
if headers:
|
||||
@@ -73,11 +68,29 @@ def parseResponse(page, headers):
|
||||
# Detect injectable page absolute system path
|
||||
# NOTE: this regular expression works if the remote web application
|
||||
# is written in PHP and debug/error messages are enabled.
|
||||
absFilePathsRegExp = ( " in <b>(.*?)</b> on line", "([\w]\:[\/\\\\]+)" )
|
||||
absFilePathsRegExp = ( r" in <b>(.*?)</b> on line", r"\b[A-Za-z]:(\\[\w.\\]*)?", r"/[/\w.]+" )
|
||||
|
||||
for absFilePathRegExp in absFilePathsRegExp:
|
||||
absFilePaths = re.findall(absFilePathRegExp, page, re.I)
|
||||
reobj = re.compile(absFilePathRegExp)
|
||||
|
||||
for match in reobj.finditer(page):
|
||||
absFilePath = match.group()
|
||||
|
||||
for absFilePath in absFilePaths:
|
||||
if absFilePath not in kb.absFilePaths:
|
||||
kb.absFilePaths.add(os.path.dirname(absFilePath))
|
||||
|
||||
def decodePage(page, encoding):
|
||||
"""
|
||||
Decode gzip/deflate HTTP response
|
||||
"""
|
||||
|
||||
if str(encoding).lower() in ('gzip', 'x-gzip', 'deflate'):
|
||||
if encoding == 'deflate':
|
||||
# http://stackoverflow.com/questions/1089662/python-inflate-and-deflate-implementations
|
||||
data = StringIO.StringIO(zlib.decompress(page, -15))
|
||||
else:
|
||||
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(page))
|
||||
|
||||
page = data.read()
|
||||
|
||||
return page
|
||||
|
||||
12
lib/request/certhandler.py
Normal file
12
lib/request/certhandler.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import httplib
|
||||
import urllib2
|
||||
|
||||
class HTTPSCertAuthHandler(urllib2.HTTPSHandler):
|
||||
def __init__(self, key_file, cert_file):
|
||||
urllib2.HTTPSHandler.__init__(self)
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
def https_open(self, req):
|
||||
return self.do_open(self.getConnection, req)
|
||||
def getConnection(self, host):
|
||||
return httplib.HTTPSConnection(host, key_file=self.key_file, cert_file=self.cert_file)
|
||||
@@ -22,15 +22,12 @@ with sqlmap; if not, write to the Free Software Foundation, Inc., 51
|
||||
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
"""
|
||||
|
||||
|
||||
|
||||
import re
|
||||
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import logger
|
||||
from lib.core.session import setMatchRatio
|
||||
|
||||
|
||||
def comparison(page, headers=None, getSeqMatcher=False):
|
||||
regExpResults = None
|
||||
|
||||
@@ -73,15 +70,16 @@ def comparison(page, headers=None, getSeqMatcher=False):
|
||||
|
||||
# If the url is stable and we did not set yet the match ratio and the
|
||||
# current injected value changes the url page content
|
||||
if conf.matchRatio == None:
|
||||
if conf.md5hash != None and ratio > 0.6 and ratio < 1:
|
||||
if conf.matchRatio is None:
|
||||
if conf.md5hash is not None and ratio > 0.6 and ratio < 1:
|
||||
logger.debug("setting match ratio to %.3f" % ratio)
|
||||
conf.matchRatio = ratio
|
||||
elif conf.md5hash == None or ( conf.md5hash != None and ratio < 0.6 ):
|
||||
|
||||
elif conf.md5hash is None or ( conf.md5hash is not None and ratio < 0.6 ):
|
||||
logger.debug("setting match ratio to default value 0.900")
|
||||
conf.matchRatio = 0.900
|
||||
|
||||
if conf.matchRatio != None:
|
||||
if conf.matchRatio is not None:
|
||||
setMatchRatio()
|
||||
|
||||
# If it has been requested to return the ratio and not a comparison
|
||||
@@ -93,7 +91,7 @@ def comparison(page, headers=None, getSeqMatcher=False):
|
||||
# hash of the original one
|
||||
# NOTE: old implementation, it did not handle automatically the fact
|
||||
# that the url could be not stable (due to VIEWSTATE, counter, etc.)
|
||||
#elif conf.md5hash != None:
|
||||
#elif conf.md5hash is not None:
|
||||
# return conf.md5hash == md5hash(page)
|
||||
|
||||
# If the url is not stable it returns sequence matcher between the
|
||||
|
||||
@@ -22,8 +22,6 @@ with sqlmap; if not, write to the Free Software Foundation, Inc., 51
|
||||
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
"""
|
||||
|
||||
|
||||
|
||||
import httplib
|
||||
import re
|
||||
import socket
|
||||
@@ -33,11 +31,13 @@ import urlparse
|
||||
import traceback
|
||||
|
||||
from lib.contrib import multipartpost
|
||||
from lib.core.common import sanitizeCookie
|
||||
from lib.core.convert import urlencode
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.exception import sqlmapConnectionException
|
||||
from lib.request.basic import decodePage
|
||||
from lib.request.basic import forgeHeaders
|
||||
from lib.request.basic import parseResponse
|
||||
from lib.request.comparison import comparison
|
||||
@@ -48,12 +48,10 @@ class Connect:
|
||||
This class defines methods used to perform HTTP requests
|
||||
"""
|
||||
|
||||
|
||||
@staticmethod
|
||||
def __getPageProxy(**kwargs):
|
||||
return Connect.getPage(**kwargs)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def getPage(**kwargs):
|
||||
"""
|
||||
@@ -61,7 +59,7 @@ class Connect:
|
||||
the target url page content
|
||||
"""
|
||||
|
||||
if conf.delay != None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
||||
if conf.delay is not None and isinstance(conf.delay, (int, float)) and conf.delay > 0:
|
||||
time.sleep(conf.delay)
|
||||
|
||||
url = kwargs.get('url', conf.url).replace(" ", "%20")
|
||||
@@ -85,23 +83,24 @@ class Connect:
|
||||
else:
|
||||
requestMsg += "%s" % urlparse.urlsplit(url)[2] or "/"
|
||||
|
||||
if silent is True:
|
||||
if silent:
|
||||
socket.setdefaulttimeout(3)
|
||||
|
||||
if direct:
|
||||
if "?" in url:
|
||||
url, params = url.split("?")
|
||||
params = urlencode(params).replace("%%", "%")
|
||||
params = urlencode(params)
|
||||
url = "%s?%s" % (url, params)
|
||||
requestMsg += "?%s" % params
|
||||
|
||||
if post:
|
||||
post = urlencode(post).replace("%%", "%")
|
||||
|
||||
elif multipart:
|
||||
multipartOpener = urllib2.build_opener(multipartpost.MultipartPostHandler)
|
||||
conn = multipartOpener.open(url, multipart)
|
||||
page = conn.read()
|
||||
responseHeaders = conn.info()
|
||||
|
||||
encoding = responseHeaders.get("Content-Encoding")
|
||||
page = decodePage(page, encoding)
|
||||
|
||||
return page
|
||||
|
||||
@@ -110,7 +109,7 @@ class Connect:
|
||||
get = conf.parameters["GET"]
|
||||
|
||||
if get:
|
||||
get = urlencode(get).replace("%%", "%")
|
||||
get = urlencode(get)
|
||||
url = "%s?%s" % (url, get)
|
||||
requestMsg += "?%s" % get
|
||||
|
||||
@@ -118,18 +117,11 @@ class Connect:
|
||||
if conf.parameters.has_key("POST") and not post:
|
||||
post = conf.parameters["POST"]
|
||||
|
||||
post = urlencode(post).replace("%%", "%")
|
||||
|
||||
requestMsg += " HTTP/1.1"
|
||||
|
||||
if cookie:
|
||||
# TODO: sure about encoding the cookie?
|
||||
#cookie = urlencode(cookie).replace("%%", "%")
|
||||
cookie = cookie.replace("%%", "%")
|
||||
|
||||
try:
|
||||
# Perform HTTP request
|
||||
headers = forgeHeaders(cookie, ua)
|
||||
headers = forgeHeaders(sanitizeCookie(cookie), ua)
|
||||
req = urllib2.Request(url, post, headers)
|
||||
conn = urllib2.urlopen(req)
|
||||
|
||||
@@ -141,14 +133,15 @@ class Connect:
|
||||
|
||||
requestHeaders = "\n".join(["%s: %s" % (header, value) for header, value in req.header_items()])
|
||||
|
||||
for _, cookie in enumerate(conf.cj):
|
||||
if not cookieStr:
|
||||
cookieStr = "Cookie: "
|
||||
|
||||
cookie = str(cookie)
|
||||
index = cookie.index(" for ")
|
||||
|
||||
cookieStr += "%s; " % cookie[8:index]
|
||||
if not conf.dropSetCookie:
|
||||
for _, cookie in enumerate(conf.cj):
|
||||
if not cookieStr:
|
||||
cookieStr = "Cookie: "
|
||||
|
||||
cookie = str(cookie)
|
||||
index = cookie.index(" for ")
|
||||
|
||||
cookieStr += "%s; " % cookie[8:index]
|
||||
|
||||
if not req.has_header("Cookie") and cookieStr:
|
||||
requestHeaders += "\n%s" % cookieStr[:-2]
|
||||
@@ -171,6 +164,9 @@ class Connect:
|
||||
status = conn.msg
|
||||
responseHeaders = conn.info()
|
||||
|
||||
encoding = responseHeaders.get("Content-Encoding")
|
||||
page = decodePage(page, encoding)
|
||||
|
||||
except urllib2.HTTPError, e:
|
||||
if e.code == 401:
|
||||
exceptionMsg = "not authorized, try to provide right HTTP "
|
||||
@@ -208,7 +204,7 @@ class Connect:
|
||||
|
||||
return None, None
|
||||
|
||||
if silent is True:
|
||||
if silent:
|
||||
return None, None
|
||||
|
||||
elif conf.retriesCount < conf.retries:
|
||||
@@ -240,7 +236,6 @@ class Connect:
|
||||
|
||||
return page, responseHeaders
|
||||
|
||||
|
||||
@staticmethod
|
||||
def queryPage(value=None, place=None, content=False, getSeqMatcher=False, silent=False):
|
||||
"""
|
||||
|
||||
@@ -22,8 +22,6 @@ with sqlmap; if not, write to the Free Software Foundation, Inc., 51
|
||||
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
"""
|
||||
|
||||
|
||||
|
||||
import re
|
||||
import time
|
||||
|
||||
@@ -44,7 +42,6 @@ from lib.techniques.blind.inference import bisection
|
||||
from lib.utils.resume import queryOutputLength
|
||||
from lib.utils.resume import resume
|
||||
|
||||
|
||||
def __goInference(payload, expression, charsetType=None, firstChar=None, lastChar=None):
|
||||
start = time.time()
|
||||
|
||||
@@ -67,7 +64,6 @@ def __goInference(payload, expression, charsetType=None, firstChar=None, lastCha
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def __goInferenceFields(expression, expressionFields, expressionFieldsList, payload, expected=None, num=None, resumeValue=True, charsetType=None, firstChar=None, lastChar=None):
|
||||
outputs = []
|
||||
origExpr = None
|
||||
@@ -87,7 +83,7 @@ def __goInferenceFields(expression, expressionFields, expressionFieldsList, payl
|
||||
else:
|
||||
expressionReplaced = expression.replace(expressionFields, field, 1)
|
||||
|
||||
if resumeValue == True:
|
||||
if resumeValue:
|
||||
output = resume(expressionReplaced, payload)
|
||||
|
||||
if not output or ( expected == "int" and not output.isdigit() ):
|
||||
@@ -105,7 +101,6 @@ def __goInferenceFields(expression, expressionFields, expressionFieldsList, payl
|
||||
|
||||
return outputs
|
||||
|
||||
|
||||
def __goInferenceProxy(expression, fromUser=False, expected=None, batch=False, resumeValue=True, unpack=True, charsetType=None, firstChar=None, lastChar=None):
|
||||
"""
|
||||
Retrieve the output of a SQL query characted by character taking
|
||||
@@ -124,15 +119,15 @@ def __goInferenceProxy(expression, fromUser=False, expected=None, batch=False, r
|
||||
untilLimitChar = None
|
||||
untilOrderChar = None
|
||||
|
||||
if resumeValue == True:
|
||||
if resumeValue:
|
||||
output = resume(expression, payload)
|
||||
else:
|
||||
output = None
|
||||
|
||||
if output and ( expected == None or ( expected == "int" and output.isdigit() ) ):
|
||||
if output and ( expected is None or ( expected == "int" and output.isdigit() ) ):
|
||||
return output
|
||||
|
||||
if unpack == False:
|
||||
if not unpack:
|
||||
return __goInference(payload, expression, charsetType, firstChar, lastChar)
|
||||
|
||||
if kb.dbmsDetected:
|
||||
@@ -205,7 +200,7 @@ def __goInferenceProxy(expression, fromUser=False, expected=None, batch=False, r
|
||||
if not stopLimit or stopLimit <= 1:
|
||||
if kb.dbms == "Oracle" and expression.endswith("FROM DUAL"):
|
||||
test = "n"
|
||||
elif batch == True:
|
||||
elif batch:
|
||||
test = "y"
|
||||
else:
|
||||
message = "can the SQL query provided return "
|
||||
@@ -221,7 +216,7 @@ def __goInferenceProxy(expression, fromUser=False, expected=None, batch=False, r
|
||||
untilOrderChar = countedExpression.index(" ORDER BY ")
|
||||
countedExpression = countedExpression[:untilOrderChar]
|
||||
|
||||
if resumeValue == True:
|
||||
if resumeValue:
|
||||
count = resume(countedExpression, payload)
|
||||
|
||||
if not stopLimit:
|
||||
@@ -231,7 +226,7 @@ def __goInferenceProxy(expression, fromUser=False, expected=None, batch=False, r
|
||||
if count and count.isdigit() and int(count) > 0:
|
||||
count = int(count)
|
||||
|
||||
if batch == True:
|
||||
if batch:
|
||||
stopLimit = count
|
||||
else:
|
||||
message = "the SQL query provided can return "
|
||||
@@ -314,7 +309,6 @@ def __goInferenceProxy(expression, fromUser=False, expected=None, batch=False, r
|
||||
|
||||
return returnValue
|
||||
|
||||
|
||||
def __goInband(expression, expected=None, sort=True, resumeValue=True, unpack=True):
|
||||
"""
|
||||
Retrieve the output of a SQL query taking advantage of an inband SQL
|
||||
@@ -330,7 +324,7 @@ def __goInband(expression, expected=None, sort=True, resumeValue=True, unpack=Tr
|
||||
and expression in kb.resumedQueries[conf.url].keys()
|
||||
)
|
||||
|
||||
if condition and resumeValue == True:
|
||||
if condition and resumeValue:
|
||||
output = resume(expression, None)
|
||||
|
||||
if not output or ( expected == "int" and not output.isdigit() ):
|
||||
@@ -344,7 +338,6 @@ def __goInband(expression, expected=None, sort=True, resumeValue=True, unpack=Tr
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def getValue(expression, blind=True, inband=True, fromUser=False, expected=None, batch=False, unpack=True, sort=True, resumeValue=True, charsetType=None, firstChar=None, lastChar=None):
|
||||
"""
|
||||
Called each time sqlmap inject a SQL query on the SQL injection
|
||||
@@ -382,7 +375,6 @@ def getValue(expression, blind=True, inband=True, fromUser=False, expected=None,
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def goStacked(expression, silent=False):
|
||||
expression = cleanQuery(expression)
|
||||
|
||||
|
||||
@@ -22,8 +22,6 @@ with sqlmap; if not, write to the Free Software Foundation, Inc., 51
|
||||
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
"""
|
||||
|
||||
|
||||
|
||||
import httplib
|
||||
import socket
|
||||
import urllib
|
||||
@@ -31,11 +29,9 @@ import urllib2
|
||||
|
||||
from lib.core.settings import PYVERSION
|
||||
|
||||
|
||||
if PYVERSION >= "2.6":
|
||||
import ssl
|
||||
|
||||
|
||||
class ProxyHTTPConnection(httplib.HTTPConnection):
|
||||
_ports = {"http" : 80, "https" : 443}
|
||||
|
||||
@@ -65,7 +61,6 @@ class ProxyHTTPConnection(httplib.HTTPConnection):
|
||||
|
||||
httplib.HTTPConnection.request(self, method, rest, body, headers)
|
||||
|
||||
|
||||
def connect(self):
|
||||
httplib.HTTPConnection.connect(self)
|
||||
|
||||
@@ -91,7 +86,6 @@ class ProxyHTTPConnection(httplib.HTTPConnection):
|
||||
if line == "\r\n":
|
||||
break
|
||||
|
||||
|
||||
class ProxyHTTPSConnection(ProxyHTTPConnection):
|
||||
default_port = 443
|
||||
|
||||
@@ -111,7 +105,6 @@ class ProxyHTTPSConnection(ProxyHTTPConnection):
|
||||
sslobj = socket.ssl(self.sock, self.key_file, self.cert_file)
|
||||
self.sock = httplib.FakeSocket(self.sock, sslobj)
|
||||
|
||||
|
||||
class ProxyHTTPHandler(urllib2.HTTPHandler):
|
||||
def __init__(self, proxy=None, debuglevel=0):
|
||||
self.proxy = proxy
|
||||
@@ -124,7 +117,6 @@ class ProxyHTTPHandler(urllib2.HTTPHandler):
|
||||
|
||||
return urllib2.HTTPHandler.do_open(self, ProxyHTTPConnection, req)
|
||||
|
||||
|
||||
class ProxyHTTPSHandler(urllib2.HTTPSHandler):
|
||||
def __init__(self, proxy=None, debuglevel=0):
|
||||
self.proxy = proxy
|
||||
|
||||
Reference in New Issue
Block a user