mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-07 13:11:29 +00:00
Minor code restyling
This commit is contained in:
@@ -646,7 +646,7 @@ def dataToStdout(data, forceOutput=False):
|
||||
output = data.encode('ascii', errors="replace")
|
||||
|
||||
if output != data:
|
||||
warnMsg = "cannot properly display Unicode characters "
|
||||
warnMsg = "cannot properly display Unicode characters "
|
||||
warnMsg += "inside Windows OS command prompt "
|
||||
warnMsg += "(http://bugs.python.org/issue1602). All "
|
||||
warnMsg += "similar occurances will result in "
|
||||
@@ -1891,7 +1891,7 @@ def adjustTimeDelay(lastQueryDuration, lowerStdLimit):
|
||||
if all([x == candidate for x in kb.delayCandidates]) and candidate < conf.timeSec:
|
||||
print
|
||||
|
||||
warnMsg = "adjusting time delay to %d second%s " % (candidate, 's' if candidate > 1 else '')
|
||||
warnMsg = "adjusting time delay to %d second%s " % (candidate, 's' if candidate > 1 else '')
|
||||
warnMsg += "(due to good response times)"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
@@ -2402,7 +2402,7 @@ def unhandledExceptionMessage():
|
||||
Returns detailed message about occured unhandled exception
|
||||
"""
|
||||
|
||||
errMsg = "unhandled exception in %s, retry your " % VERSION_STRING
|
||||
errMsg = "unhandled exception in %s, retry your " % VERSION_STRING
|
||||
errMsg += "run with the latest development version from the Subversion "
|
||||
errMsg += "repository. If the exception persists, please send by e-mail "
|
||||
errMsg += "to %s the following text " % ML
|
||||
|
||||
@@ -24,7 +24,7 @@ firebirdTypes = {
|
||||
"37":"VARCHAR"
|
||||
}
|
||||
|
||||
sybaseTypes = {
|
||||
sybaseTypes = {
|
||||
"14":"floatn",
|
||||
"8":"float",
|
||||
"15":"datetimn",
|
||||
@@ -55,7 +55,7 @@ sybaseTypes = {
|
||||
"20":"image",
|
||||
}
|
||||
|
||||
mysqlPrivs = {
|
||||
mysqlPrivs = {
|
||||
1:"select_priv",
|
||||
2:"insert_priv",
|
||||
3:"update_priv",
|
||||
@@ -84,7 +84,7 @@ mysqlPrivs = {
|
||||
26:"create_user_priv",
|
||||
}
|
||||
|
||||
pgsqlPrivs = {
|
||||
pgsqlPrivs = {
|
||||
1:"createdb",
|
||||
2:"super",
|
||||
3:"catupd",
|
||||
|
||||
@@ -34,7 +34,7 @@ class Dump:
|
||||
|
||||
def __init__(self):
|
||||
self.__outputFile = None
|
||||
self.__outputFP = None
|
||||
self.__outputFP = None
|
||||
|
||||
def __write(self, data, n=True):
|
||||
text = "%s%s" % (data, "\n" if n else " ")
|
||||
@@ -300,7 +300,7 @@ class Dump:
|
||||
|
||||
def dbTableValues(self, tableValues):
|
||||
replication = None
|
||||
rtable = None
|
||||
rtable = None
|
||||
|
||||
if tableValues is None:
|
||||
return
|
||||
@@ -321,18 +321,18 @@ class Dump:
|
||||
dumpFileName = "%s%s%s.csv" % (dumpDbPath, os.sep, table)
|
||||
dumpFP = openFile(dumpFileName, "wb")
|
||||
|
||||
count = int(tableValues["__infos__"]["count"])
|
||||
separator = str()
|
||||
field = 1
|
||||
fields = len(tableValues) - 1
|
||||
count = int(tableValues["__infos__"]["count"])
|
||||
separator = str()
|
||||
field = 1
|
||||
fields = len(tableValues) - 1
|
||||
|
||||
columns = tableValues.keys()
|
||||
columns.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x)
|
||||
|
||||
for column in columns:
|
||||
if column != "__infos__":
|
||||
info = tableValues[column]
|
||||
lines = "-" * (int(info["length"]) + 2)
|
||||
info = tableValues[column]
|
||||
lines = "-" * (int(info["length"]) + 2)
|
||||
separator += "+%s" % lines
|
||||
|
||||
separator += "+"
|
||||
@@ -381,9 +381,9 @@ class Dump:
|
||||
|
||||
for column in columns:
|
||||
if column != "__infos__":
|
||||
info = tableValues[column]
|
||||
info = tableValues[column]
|
||||
maxlength = int(info["length"])
|
||||
blank = " " * (maxlength - len(column))
|
||||
blank = " " * (maxlength - len(column))
|
||||
|
||||
self.__write("| %s%s" % (column, blank), n=False)
|
||||
|
||||
@@ -458,7 +458,7 @@ class Dump:
|
||||
else:
|
||||
colConsiderStr = " '%s' was" % column
|
||||
|
||||
msg = "Column%s found in the " % colConsiderStr
|
||||
msg = "Column%s found in the " % colConsiderStr
|
||||
msg += "following databases:"
|
||||
self.__write(msg)
|
||||
|
||||
|
||||
@@ -8,94 +8,94 @@ See the file 'doc/COPYING' for copying permission
|
||||
"""
|
||||
|
||||
class PRIORITY:
|
||||
LOWEST = -100
|
||||
LOWER = -50
|
||||
LOW = -10
|
||||
NORMAL = 0
|
||||
HIGH = 10
|
||||
HIGHER = 50
|
||||
LOWEST = -100
|
||||
LOWER = -50
|
||||
LOW = -10
|
||||
NORMAL = 0
|
||||
HIGH = 10
|
||||
HIGHER = 50
|
||||
HIGHEST = 100
|
||||
|
||||
class SORTORDER:
|
||||
FIRST = 0
|
||||
SECOND = 1
|
||||
THIRD = 2
|
||||
FOURTH = 3
|
||||
FIFTH = 4
|
||||
LAST = 100
|
||||
FIRST = 0
|
||||
SECOND = 1
|
||||
THIRD = 2
|
||||
FOURTH = 3
|
||||
FIFTH = 4
|
||||
LAST = 100
|
||||
|
||||
class DBMS:
|
||||
ACCESS = "Microsoft Access"
|
||||
ACCESS = "Microsoft Access"
|
||||
FIREBIRD = "Firebird"
|
||||
MAXDB = "SAP MaxDB"
|
||||
MSSQL = "Microsoft SQL Server"
|
||||
MYSQL = "MySQL"
|
||||
ORACLE = "Oracle"
|
||||
PGSQL = "PostgreSQL"
|
||||
SQLITE = "SQLite"
|
||||
SYBASE = "Sybase"
|
||||
MAXDB = "SAP MaxDB"
|
||||
MSSQL = "Microsoft SQL Server"
|
||||
MYSQL = "MySQL"
|
||||
ORACLE = "Oracle"
|
||||
PGSQL = "PostgreSQL"
|
||||
SQLITE = "SQLite"
|
||||
SYBASE = "Sybase"
|
||||
|
||||
class OS:
|
||||
LINUX = "Linux"
|
||||
LINUX = "Linux"
|
||||
WINDOWS = "Windows"
|
||||
|
||||
class PLACE:
|
||||
GET = "GET"
|
||||
POST = "POST"
|
||||
SOAP = "SOAP"
|
||||
URI = "URI"
|
||||
COOKIE = "Cookie"
|
||||
UA = "User-Agent"
|
||||
GET = "GET"
|
||||
POST = "POST"
|
||||
SOAP = "SOAP"
|
||||
URI = "URI"
|
||||
COOKIE = "Cookie"
|
||||
UA = "User-Agent"
|
||||
REFERER = "Referer"
|
||||
|
||||
class HTTPMETHOD:
|
||||
GET = "GET"
|
||||
POST = "POST"
|
||||
HEAD = "HEAD"
|
||||
GET = "GET"
|
||||
POST = "POST"
|
||||
HEAD = "HEAD"
|
||||
|
||||
class NULLCONNECTION:
|
||||
HEAD = "HEAD"
|
||||
RANGE = "Range"
|
||||
HEAD = "HEAD"
|
||||
RANGE = "Range"
|
||||
|
||||
class HASH:
|
||||
MYSQL = r'(?i)\A\*[0-9a-f]{40}\Z'
|
||||
MYSQL_OLD = r'(?i)\A[0-9a-f]{16}\Z'
|
||||
POSTGRES = r'(?i)\Amd5[0-9a-f]{32}\Z'
|
||||
MSSQL = r'(?i)\A0x0100[0-9a-f]{8}[0-9a-f]{40}\Z'
|
||||
MSSQL_OLD = r'(?i)\A0x0100[0-9a-f]{8}[0-9a-f]{80}\Z'
|
||||
ORACLE = r'(?i)\As:[0-9a-f]{60}\Z'
|
||||
ORACLE_OLD = r'(?i)\A[01-9a-f]{16}\Z'
|
||||
MD5_GENERIC = r'(?i)\A[0-9a-f]{32}\Z'
|
||||
SHA1_GENERIC = r'(?i)\A[0-9a-f]{40}\Z'
|
||||
MYSQL = r'(?i)\A\*[0-9a-f]{40}\Z'
|
||||
MYSQL_OLD = r'(?i)\A[0-9a-f]{16}\Z'
|
||||
POSTGRES = r'(?i)\Amd5[0-9a-f]{32}\Z'
|
||||
MSSQL = r'(?i)\A0x0100[0-9a-f]{8}[0-9a-f]{40}\Z'
|
||||
MSSQL_OLD = r'(?i)\A0x0100[0-9a-f]{8}[0-9a-f]{80}\Z'
|
||||
ORACLE = r'(?i)\As:[0-9a-f]{60}\Z'
|
||||
ORACLE_OLD = r'(?i)\A[01-9a-f]{16}\Z'
|
||||
MD5_GENERIC = r'(?i)\A[0-9a-f]{32}\Z'
|
||||
SHA1_GENERIC = r'(?i)\A[0-9a-f]{40}\Z'
|
||||
CRYPT_GENERIC = r'(?i)\A[./0-9A-Za-z]{13}\Z'
|
||||
|
||||
# Reference: http://www.zytrax.com/tech/web/mobile_ids.html
|
||||
class MOBILES:
|
||||
BLACKBERRY = "RIM Blackberry 9800 Torch;Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en-US) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.246 Mobile Safari/534.1+"
|
||||
GALAXY = "Samsung Galaxy S;Mozilla/5.0 (Linux; U; Android 2.2; en-US; SGH-T959D Build/FROYO) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1"
|
||||
HP = "HP iPAQ 6365;Mozilla/4.0 (compatible; MSIE 4.01; Windows CE; PPC; 240x320; HP iPAQ h6300)"
|
||||
HTC = "HTC Evo;Mozilla/5.0 (Linux; U; Android 2.2; en-us; Sprint APA9292KT Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1"
|
||||
IPHONE = "Apple iPhone 4;Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_0 like Mac OS X; en-us) AppleWebKit/532.9 (KHTML, like Gecko) Version/4.0.5 Mobile/8A293 Safari/531.22.7"
|
||||
NEXUS = "Google Nexus One;Mozilla/5.0 (Linux; U; Android 2.2; en-US; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1"
|
||||
NOKIA = "Nokia N97;Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/10.0.012; Profile/MIDP-2.1 Configuration/CLDC-1.1; en-us) AppleWebKit/525 (KHTML, like Gecko) WicKed/7.1.12344"
|
||||
BLACKBERRY = "RIM Blackberry 9800 Torch;Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en-US) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.246 Mobile Safari/534.1+"
|
||||
GALAXY = "Samsung Galaxy S;Mozilla/5.0 (Linux; U; Android 2.2; en-US; SGH-T959D Build/FROYO) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1"
|
||||
HP = "HP iPAQ 6365;Mozilla/4.0 (compatible; MSIE 4.01; Windows CE; PPC; 240x320; HP iPAQ h6300)"
|
||||
HTC = "HTC Evo;Mozilla/5.0 (Linux; U; Android 2.2; en-us; Sprint APA9292KT Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1"
|
||||
IPHONE = "Apple iPhone 4;Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_0 like Mac OS X; en-us) AppleWebKit/532.9 (KHTML, like Gecko) Version/4.0.5 Mobile/8A293 Safari/531.22.7"
|
||||
NEXUS = "Google Nexus One;Mozilla/5.0 (Linux; U; Android 2.2; en-US; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1"
|
||||
NOKIA = "Nokia N97;Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/10.0.012; Profile/MIDP-2.1 Configuration/CLDC-1.1; en-us) AppleWebKit/525 (KHTML, like Gecko) WicKed/7.1.12344"
|
||||
|
||||
class HTTPHEADER:
|
||||
ACCEPT_ENCODING = "Accept-Encoding"
|
||||
AUTHORIZATION = "Authorization"
|
||||
CONNECTION = "Connection"
|
||||
CONTENT_ENCODING = "Content-Encoding"
|
||||
CONTENT_LENGTH = "Content-Length"
|
||||
CONTENT_RANGE = "Content-Range"
|
||||
CONTENT_TYPE = "Content-Type"
|
||||
COOKIE = "Cookie"
|
||||
ACCEPT_ENCODING = "Accept-Encoding"
|
||||
AUTHORIZATION = "Authorization"
|
||||
CONNECTION = "Connection"
|
||||
CONTENT_ENCODING = "Content-Encoding"
|
||||
CONTENT_LENGTH = "Content-Length"
|
||||
CONTENT_RANGE = "Content-Range"
|
||||
CONTENT_TYPE = "Content-Type"
|
||||
COOKIE = "Cookie"
|
||||
PROXY_AUTHORIZATION = "Proxy-authorization"
|
||||
RANGE = "Range"
|
||||
REFERER = "Referer"
|
||||
USER_AGENT = "User-Agent"
|
||||
RANGE = "Range"
|
||||
REFERER = "Referer"
|
||||
USER_AGENT = "User-Agent"
|
||||
|
||||
class EXPECTED:
|
||||
BOOL = "bool"
|
||||
INT = "int"
|
||||
BOOL = "bool"
|
||||
INT = "int"
|
||||
|
||||
class PAYLOAD:
|
||||
SQLINJECTION = {
|
||||
@@ -134,10 +134,10 @@ class PAYLOAD:
|
||||
}
|
||||
|
||||
class METHOD:
|
||||
COMPARISON = "comparison"
|
||||
GREP = "grep"
|
||||
TIME = "time"
|
||||
UNION = "union"
|
||||
COMPARISON = "comparison"
|
||||
GREP = "grep"
|
||||
TIME = "time"
|
||||
UNION = "union"
|
||||
|
||||
class TECHNIQUE:
|
||||
BOOLEAN = 1
|
||||
|
||||
@@ -163,7 +163,7 @@ def __feedTargetsDict(reqFile, addedTargetUrls):
|
||||
getPostReq = False
|
||||
|
||||
for request in reqResList:
|
||||
url = extractRegexResult(r"URL: (?P<result>.+?)\n", request, re.I)
|
||||
url = extractRegexResult(r"URL: (?P<result>.+?)\n", request, re.I)
|
||||
method = extractRegexResult(r"METHOD: (?P<result>.+?)\n", request, re.I)
|
||||
cookie = extractRegexResult(r"COOKIE: (?P<result>.+?)\n", request, re.I)
|
||||
getPostReq = True
|
||||
@@ -191,7 +191,7 @@ def __feedTargetsDict(reqFile, addedTargetUrls):
|
||||
"""
|
||||
Parses burp logs
|
||||
"""
|
||||
port = None
|
||||
port = None
|
||||
scheme = None
|
||||
|
||||
reqResList = content.split(BURP_SPLITTER)
|
||||
@@ -202,7 +202,7 @@ def __feedTargetsDict(reqFile, addedTargetUrls):
|
||||
|
||||
if schemePort:
|
||||
scheme = schemePort.group(1)
|
||||
port = schemePort.group(2)
|
||||
port = schemePort.group(2)
|
||||
|
||||
if not re.search ("^[\n]*(GET|POST).*?\sHTTP\/", request, re.I):
|
||||
continue
|
||||
@@ -211,13 +211,13 @@ def __feedTargetsDict(reqFile, addedTargetUrls):
|
||||
continue
|
||||
|
||||
getPostReq = False
|
||||
url = None
|
||||
host = None
|
||||
method = None
|
||||
data = None
|
||||
cookie = None
|
||||
params = False
|
||||
lines = request.split("\n")
|
||||
url = None
|
||||
host = None
|
||||
method = None
|
||||
data = None
|
||||
cookie = None
|
||||
params = False
|
||||
lines = request.split("\n")
|
||||
|
||||
for line in lines:
|
||||
if len(line) == 0 or line == "\n":
|
||||
@@ -283,9 +283,9 @@ def __feedTargetsDict(reqFile, addedTargetUrls):
|
||||
scheme = "https"
|
||||
|
||||
if not url.startswith("http"):
|
||||
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
|
||||
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
|
||||
scheme = None
|
||||
port = None
|
||||
port = None
|
||||
|
||||
if not kb.targetUrls or url not in addedTargetUrls:
|
||||
kb.targetUrls.add((url, method, urldecode(data), cookie))
|
||||
@@ -343,14 +343,14 @@ def __setMultipleTargets():
|
||||
__feedTargetsDict(os.path.join(conf.list, reqFile), addedTargetUrls)
|
||||
|
||||
else:
|
||||
errMsg = "the specified list of targets is not a file "
|
||||
errMsg = "the specified list of targets is not a file "
|
||||
errMsg += "nor a directory"
|
||||
raise sqlmapFilePathException, errMsg
|
||||
|
||||
updatedTargetsCount = len(kb.targetUrls)
|
||||
|
||||
if updatedTargetsCount > initialTargetsCount:
|
||||
infoMsg = "sqlmap parsed %d " % (updatedTargetsCount - initialTargetsCount)
|
||||
infoMsg = "sqlmap parsed %d " % (updatedTargetsCount - initialTargetsCount)
|
||||
infoMsg += "testable requests from the targets list"
|
||||
logger.info(infoMsg)
|
||||
|
||||
@@ -371,7 +371,7 @@ def __setRequestFromFile():
|
||||
logger.info(infoMsg)
|
||||
|
||||
if not os.path.isfile(conf.requestFile):
|
||||
errMsg = "the specified HTTP request file "
|
||||
errMsg = "the specified HTTP request file "
|
||||
errMsg += "does not exist"
|
||||
raise sqlmapFilePathException, errMsg
|
||||
|
||||
@@ -414,14 +414,14 @@ def __setGoogleDorking():
|
||||
matches = googleObj.search(conf.googleDork)
|
||||
|
||||
if not matches:
|
||||
errMsg = "unable to find results for your "
|
||||
errMsg = "unable to find results for your "
|
||||
errMsg += "Google dork expression"
|
||||
raise sqlmapGenericException, errMsg
|
||||
|
||||
googleObj.getTargetUrls()
|
||||
|
||||
if kb.targetUrls:
|
||||
logMsg = "sqlmap got %d results for your " % len(matches)
|
||||
logMsg = "sqlmap got %d results for your " % len(matches)
|
||||
logMsg += "Google dork expression, "
|
||||
|
||||
if len(matches) == len(kb.targetUrls):
|
||||
@@ -432,7 +432,7 @@ def __setGoogleDorking():
|
||||
logMsg += "of them are testable targets"
|
||||
logger.info(logMsg)
|
||||
else:
|
||||
errMsg = "sqlmap got %d results " % len(matches)
|
||||
errMsg = "sqlmap got %d results " % len(matches)
|
||||
errMsg += "for your Google dork expression, but none of them "
|
||||
errMsg += "have GET parameters to test for SQL injection"
|
||||
raise sqlmapGenericException, errMsg
|
||||
@@ -450,7 +450,7 @@ def __findPageForms():
|
||||
response, _ = Request.queryPage(response=True)
|
||||
|
||||
if response is None or isinstance(response, basestring):
|
||||
errMsg = "can't do form parsing as no valid response "
|
||||
errMsg = "can't do form parsing as no valid response "
|
||||
errMsg += "object found. please check previous log messages "
|
||||
errMsg += "for connection issues"
|
||||
raise sqlmapGenericException, errMsg
|
||||
@@ -458,7 +458,7 @@ def __findPageForms():
|
||||
try:
|
||||
forms = ParseResponse(response, backwards_compat=False)
|
||||
except ParseError:
|
||||
errMsg = "badly formed HTML at the target url. can't parse forms"
|
||||
errMsg = "badly formed HTML at the target url. can't parse forms"
|
||||
raise sqlmapGenericException, errMsg
|
||||
|
||||
if forms:
|
||||
@@ -478,7 +478,7 @@ def __findPageForms():
|
||||
kb.targetUrls.add(target)
|
||||
kb.formNames.append(target)
|
||||
else:
|
||||
errMsg = "there were no forms found at the given target url"
|
||||
errMsg = "there were no forms found at the given target url"
|
||||
raise sqlmapGenericException, errMsg
|
||||
|
||||
def __setMetasploit():
|
||||
@@ -491,7 +491,7 @@ def __setMetasploit():
|
||||
msfEnvPathExists = False
|
||||
|
||||
if IS_WIN:
|
||||
warnMsg = "some sqlmap takeover functionalities are not yet "
|
||||
warnMsg = "some sqlmap takeover functionalities are not yet "
|
||||
warnMsg += "supported on Windows. Please use Linux in a virtual "
|
||||
warnMsg += "machine for out-of-band features."
|
||||
|
||||
@@ -503,7 +503,7 @@ def __setMetasploit():
|
||||
isAdmin = runningAsAdmin()
|
||||
|
||||
if isAdmin is not True:
|
||||
errMsg = "you need to run sqlmap as an administrator "
|
||||
errMsg = "you need to run sqlmap as an administrator "
|
||||
errMsg += "if you want to perform a SMB relay attack because "
|
||||
errMsg += "it will need to listen on a user-specified SMB "
|
||||
errMsg += "TCP port for incoming connection attempts"
|
||||
@@ -513,7 +513,7 @@ def __setMetasploit():
|
||||
condition = False
|
||||
|
||||
for path in [conf.msfPath, os.path.join(conf.msfPath, 'bin')]:
|
||||
condition = os.path.exists(normalizePath(path))
|
||||
condition = os.path.exists(normalizePath(path))
|
||||
condition &= os.path.exists(normalizePath(os.path.join(path, "msfcli")))
|
||||
condition &= os.path.exists(normalizePath(os.path.join(path, "msfconsole")))
|
||||
condition &= os.path.exists(normalizePath(os.path.join(path, "msfencode")))
|
||||
@@ -524,13 +524,13 @@ def __setMetasploit():
|
||||
break
|
||||
|
||||
if condition:
|
||||
debugMsg = "provided Metasploit Framework 3 path "
|
||||
debugMsg = "provided Metasploit Framework 3 path "
|
||||
debugMsg += "'%s' is valid" % conf.msfPath
|
||||
logger.debug(debugMsg)
|
||||
|
||||
msfEnvPathExists = True
|
||||
else:
|
||||
warnMsg = "the provided Metasploit Framework 3 path "
|
||||
warnMsg = "the provided Metasploit Framework 3 path "
|
||||
warnMsg += "'%s' is not valid. The cause could " % conf.msfPath
|
||||
warnMsg += "be that the path does not exists or that one "
|
||||
warnMsg += "or more of the needed Metasploit executables "
|
||||
@@ -538,12 +538,12 @@ def __setMetasploit():
|
||||
warnMsg += "msfpayload do not exist"
|
||||
logger.warn(warnMsg)
|
||||
else:
|
||||
warnMsg = "you did not provide the local path where Metasploit "
|
||||
warnMsg = "you did not provide the local path where Metasploit "
|
||||
warnMsg += "Framework 3 is installed"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
if not msfEnvPathExists:
|
||||
warnMsg = "sqlmap is going to look for Metasploit Framework 3 "
|
||||
warnMsg = "sqlmap is going to look for Metasploit Framework 3 "
|
||||
warnMsg += "installation into the environment paths"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
@@ -555,25 +555,25 @@ def __setMetasploit():
|
||||
envPaths = envPaths.split(":")
|
||||
|
||||
for envPath in envPaths:
|
||||
envPath = envPath.replace(";", "")
|
||||
condition = os.path.exists(normalizePath(envPath))
|
||||
envPath = envPath.replace(";", "")
|
||||
condition = os.path.exists(normalizePath(envPath))
|
||||
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfcli")))
|
||||
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfconsole")))
|
||||
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfencode")))
|
||||
condition &= os.path.exists(normalizePath(os.path.join(envPath, "msfpayload")))
|
||||
|
||||
if condition:
|
||||
infoMsg = "Metasploit Framework 3 has been found "
|
||||
infoMsg = "Metasploit Framework 3 has been found "
|
||||
infoMsg += "installed in the '%s' path" % envPath
|
||||
logger.info(infoMsg)
|
||||
|
||||
msfEnvPathExists = True
|
||||
conf.msfPath = envPath
|
||||
conf.msfPath = envPath
|
||||
|
||||
break
|
||||
|
||||
if not msfEnvPathExists:
|
||||
errMsg = "unable to locate Metasploit Framework 3 installation. "
|
||||
errMsg = "unable to locate Metasploit Framework 3 installation. "
|
||||
errMsg += "Get it from http://metasploit.com/framework/download/"
|
||||
raise sqlmapFilePathException, errMsg
|
||||
|
||||
@@ -589,7 +589,7 @@ def __setWriteFile():
|
||||
raise sqlmapFilePathException, errMsg
|
||||
|
||||
if not conf.dFile:
|
||||
errMsg = "you did not provide the back-end DBMS absolute path "
|
||||
errMsg = "you did not provide the back-end DBMS absolute path "
|
||||
errMsg += "where you want to write the local file '%s'" % conf.wFile
|
||||
raise sqlmapMissingMandatoryOptionException, errMsg
|
||||
|
||||
@@ -714,11 +714,11 @@ def __setTamperingFunctions():
|
||||
dirname, filename = os.path.split(tfile)
|
||||
dirname = os.path.abspath(dirname)
|
||||
|
||||
infoMsg = "loading tamper script '%s'" % filename[:-3]
|
||||
infoMsg = "loading tamper script '%s'" % filename[:-3]
|
||||
logger.info(infoMsg)
|
||||
|
||||
if not os.path.exists(os.path.join(dirname, '__init__.py')):
|
||||
errMsg = "make sure that there is an empty file '__init__.py' "
|
||||
errMsg = "make sure that there is an empty file '__init__.py' "
|
||||
errMsg += "inside of tamper scripts directory '%s'" % dirname
|
||||
raise sqlmapGenericException, errMsg
|
||||
|
||||
@@ -738,7 +738,7 @@ def __setTamperingFunctions():
|
||||
kb.tamperFunctions.append(function)
|
||||
|
||||
if check_priority and priority > last_priority:
|
||||
message = "it seems that you might have mixed "
|
||||
message = "it seems that you might have mixed "
|
||||
message += "the order of tamper scripts.\n"
|
||||
message += "Do you want to auto resolve this? [Y/n/q]"
|
||||
test = readInput(message, default="Y")
|
||||
@@ -803,13 +803,13 @@ def __setHTTPProxy():
|
||||
debugMsg = "setting the HTTP proxy to pass by all HTTP requests"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
__proxySplit = urlparse.urlsplit(conf.proxy)
|
||||
__proxySplit = urlparse.urlsplit(conf.proxy)
|
||||
__hostnamePort = __proxySplit[1].split(":")
|
||||
|
||||
__scheme = __proxySplit[0]
|
||||
__hostname = __hostnamePort[0]
|
||||
__port = None
|
||||
__proxyString = ""
|
||||
__scheme = __proxySplit[0]
|
||||
__hostname = __hostnamePort[0]
|
||||
__port = None
|
||||
__proxyString = ""
|
||||
|
||||
if len(__hostnamePort) == 2:
|
||||
try:
|
||||
@@ -825,7 +825,7 @@ def __setHTTPProxy():
|
||||
pCredRegExp = re.search("^(.*?):(.*?)$", conf.pCred)
|
||||
|
||||
if not pCredRegExp:
|
||||
errMsg = "Proxy authentication credentials "
|
||||
errMsg = "Proxy authentication credentials "
|
||||
errMsg += "value must be in format username:password"
|
||||
raise sqlmapSyntaxException, errMsg
|
||||
|
||||
@@ -911,12 +911,12 @@ def __setHTTPAuthentication():
|
||||
return
|
||||
|
||||
elif conf.aType and not conf.aCred:
|
||||
errMsg = "you specified the HTTP authentication type, but "
|
||||
errMsg = "you specified the HTTP authentication type, but "
|
||||
errMsg += "did not provide the credentials"
|
||||
raise sqlmapSyntaxException, errMsg
|
||||
|
||||
elif not conf.aType and conf.aCred:
|
||||
errMsg = "you specified the HTTP authentication credentials, "
|
||||
errMsg = "you specified the HTTP authentication credentials, "
|
||||
errMsg += "but did not provide the type"
|
||||
raise sqlmapSyntaxException, errMsg
|
||||
|
||||
@@ -927,16 +927,16 @@ def __setHTTPAuthentication():
|
||||
aTypeLower = conf.aType.lower()
|
||||
|
||||
if aTypeLower not in ( "basic", "digest", "ntlm" ):
|
||||
errMsg = "HTTP authentication type value must be "
|
||||
errMsg = "HTTP authentication type value must be "
|
||||
errMsg += "Basic, Digest or NTLM"
|
||||
raise sqlmapSyntaxException, errMsg
|
||||
elif aTypeLower in ( "basic", "digest" ):
|
||||
regExp = "^(.*?):(.*?)$"
|
||||
errMsg = "HTTP %s authentication credentials " % aTypeLower
|
||||
errMsg = "HTTP %s authentication credentials " % aTypeLower
|
||||
errMsg += "value must be in format username:password"
|
||||
elif aTypeLower == "ntlm":
|
||||
regExp = "^(.*?)\\\(.*?):(.*?)$"
|
||||
errMsg = "HTTP NTLM authentication credentials value must "
|
||||
errMsg = "HTTP NTLM authentication credentials value must "
|
||||
errMsg += "be in format DOMAIN\username:password"
|
||||
|
||||
aCredRegExp = re.search(regExp, conf.aCred)
|
||||
@@ -960,7 +960,7 @@ def __setHTTPAuthentication():
|
||||
try:
|
||||
from ntlm import HTTPNtlmAuthHandler
|
||||
except ImportError, _:
|
||||
errMsg = "sqlmap requires Python NTLM third-party library "
|
||||
errMsg = "sqlmap requires Python NTLM third-party library "
|
||||
errMsg += "in order to authenticate via NTLM, "
|
||||
errMsg += "http://code.google.com/p/python-ntlm/"
|
||||
raise sqlmapMissingDependence, errMsg
|
||||
@@ -973,7 +973,7 @@ def __setHTTPAuthentication():
|
||||
aCertRegExp = re.search("^(.+?),\s*(.+?)$", conf.aCert)
|
||||
|
||||
if not aCertRegExp:
|
||||
errMsg = "HTTP authentication certificate option "
|
||||
errMsg = "HTTP authentication certificate option "
|
||||
errMsg += "must be in format key_file,cert_file"
|
||||
raise sqlmapSyntaxException, errMsg
|
||||
|
||||
@@ -983,7 +983,7 @@ def __setHTTPAuthentication():
|
||||
|
||||
for ifile in (key_file, cert_file):
|
||||
if not os.path.exists(ifile):
|
||||
errMsg = "File '%s' does not exist" % ifile
|
||||
errMsg = "File '%s' does not exist" % ifile
|
||||
raise sqlmapSyntaxException, errMsg
|
||||
|
||||
authHandler = HTTPSCertAuthHandler(key_file, cert_file)
|
||||
@@ -1091,14 +1091,14 @@ def __setHTTPUserAgent():
|
||||
|
||||
else:
|
||||
if not kb.userAgents:
|
||||
debugMsg = "loading random HTTP User-Agent header(s) from "
|
||||
debugMsg = "loading random HTTP User-Agent header(s) from "
|
||||
debugMsg += "file '%s'" % paths.USER_AGENTS
|
||||
logger.debug(debugMsg)
|
||||
|
||||
try:
|
||||
kb.userAgents = getFileItems(paths.USER_AGENTS)
|
||||
except IOError:
|
||||
warnMsg = "unable to read HTTP User-Agent header "
|
||||
warnMsg = "unable to read HTTP User-Agent header "
|
||||
warnMsg += "file '%s'" % paths.USER_AGENTS
|
||||
logger.warn(warnMsg)
|
||||
|
||||
@@ -1115,7 +1115,7 @@ def __setHTTPUserAgent():
|
||||
userAgent = sanitizeStr(userAgent)
|
||||
conf.httpHeaders.append((HTTPHEADER.USER_AGENT, userAgent))
|
||||
|
||||
logMsg = "fetched random HTTP User-Agent header from "
|
||||
logMsg = "fetched random HTTP User-Agent header from "
|
||||
logMsg += "file '%s': %s" % (paths.USER_AGENTS, userAgent)
|
||||
logger.info(logMsg)
|
||||
|
||||
@@ -1154,7 +1154,7 @@ def __setHTTPTimeout():
|
||||
conf.timeout = float(conf.timeout)
|
||||
|
||||
if conf.timeout < 3.0:
|
||||
warnMsg = "the minimum HTTP timeout is 3 seconds, sqlmap "
|
||||
warnMsg = "the minimum HTTP timeout is 3 seconds, sqlmap "
|
||||
warnMsg += "will going to reset it"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
@@ -1234,7 +1234,7 @@ def __cleanupOptions():
|
||||
conf.timeSec = 2 * TIME_DEFAULT_DELAY
|
||||
kb.adjustTimeDelay = False
|
||||
|
||||
warnMsg = "increasing default value for "
|
||||
warnMsg = "increasing default value for "
|
||||
warnMsg += "--time-sec to %d because " % conf.timeSec
|
||||
warnMsg += "--tor switch was provided"
|
||||
logger.warn(warnMsg)
|
||||
@@ -1253,27 +1253,27 @@ def __setConfAttributes():
|
||||
debugMsg = "initializing the configuration"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
conf.boundaries = []
|
||||
conf.cj = None
|
||||
conf.dbmsConnector = None
|
||||
conf.dbmsHandler = None
|
||||
conf.dumpPath = None
|
||||
conf.httpHeaders = []
|
||||
conf.hostname = None
|
||||
conf.loggedToOut = None
|
||||
conf.multipleTargets = False
|
||||
conf.outputPath = None
|
||||
conf.paramDict = {}
|
||||
conf.parameters = {}
|
||||
conf.path = None
|
||||
conf.port = None
|
||||
conf.redirectHandled = False
|
||||
conf.scheme = None
|
||||
conf.sessionFP = None
|
||||
conf.start = True
|
||||
conf.tests = []
|
||||
conf.trafficFP = None
|
||||
conf.wFileType = None
|
||||
conf.boundaries = []
|
||||
conf.cj = None
|
||||
conf.dbmsConnector = None
|
||||
conf.dbmsHandler = None
|
||||
conf.dumpPath = None
|
||||
conf.httpHeaders = []
|
||||
conf.hostname = None
|
||||
conf.loggedToOut = None
|
||||
conf.multipleTargets = False
|
||||
conf.outputPath = None
|
||||
conf.paramDict = {}
|
||||
conf.parameters = {}
|
||||
conf.path = None
|
||||
conf.port = None
|
||||
conf.redirectHandled = False
|
||||
conf.scheme = None
|
||||
conf.sessionFP = None
|
||||
conf.start = True
|
||||
conf.tests = []
|
||||
conf.trafficFP = None
|
||||
conf.wFileType = None
|
||||
|
||||
def __setKnowledgeBaseAttributes(flushAll=True):
|
||||
"""
|
||||
@@ -1284,95 +1284,95 @@ def __setKnowledgeBaseAttributes(flushAll=True):
|
||||
debugMsg = "initializing the knowledge base"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
kb.absFilePaths = set()
|
||||
kb.adjustTimeDelay = False
|
||||
kb.arch = None
|
||||
kb.authHeader = None
|
||||
kb.bannerFp = advancedDict()
|
||||
kb.absFilePaths = set()
|
||||
kb.adjustTimeDelay = False
|
||||
kb.arch = None
|
||||
kb.authHeader = None
|
||||
kb.bannerFp = advancedDict()
|
||||
|
||||
kb.brute = advancedDict({'tables':[], 'columns':[]})
|
||||
kb.bruteMode = False
|
||||
kb.brute = advancedDict({'tables':[], 'columns':[]})
|
||||
kb.bruteMode = False
|
||||
|
||||
kb.cache = advancedDict()
|
||||
kb.cache.content = {}
|
||||
kb.cache.regex = {}
|
||||
kb.cache.stdev = {}
|
||||
kb.cache = advancedDict()
|
||||
kb.cache.content = {}
|
||||
kb.cache.regex = {}
|
||||
kb.cache.stdev = {}
|
||||
|
||||
kb.commonOutputs = None
|
||||
kb.commonOutputs = None
|
||||
|
||||
kb.data = advancedDict()
|
||||
kb.data = advancedDict()
|
||||
|
||||
# Active back-end DBMS fingerprint
|
||||
kb.dbms = None
|
||||
kb.dbmsVersion = [ UNKNOWN_DBMS_VERSION ]
|
||||
kb.dbms = None
|
||||
kb.dbmsVersion = [ UNKNOWN_DBMS_VERSION ]
|
||||
|
||||
kb.delayCandidates = TIME_DELAY_CANDIDATES * [0]
|
||||
kb.dep = None
|
||||
kb.docRoot = None
|
||||
kb.dynamicMarkings = []
|
||||
kb.endDetection = False
|
||||
kb.httpErrorCodes = {}
|
||||
kb.errorIsNone = True
|
||||
kb.formNames = []
|
||||
kb.headersCount = 0
|
||||
kb.headersFp = {}
|
||||
kb.hintValue = None
|
||||
kb.htmlFp = []
|
||||
kb.injection = injectionDict()
|
||||
kb.injections = []
|
||||
kb.delayCandidates = TIME_DELAY_CANDIDATES * [0]
|
||||
kb.dep = None
|
||||
kb.docRoot = None
|
||||
kb.dynamicMarkings = []
|
||||
kb.endDetection = False
|
||||
kb.httpErrorCodes = {}
|
||||
kb.errorIsNone = True
|
||||
kb.formNames = []
|
||||
kb.headersCount = 0
|
||||
kb.headersFp = {}
|
||||
kb.hintValue = None
|
||||
kb.htmlFp = []
|
||||
kb.injection = injectionDict()
|
||||
kb.injections = []
|
||||
|
||||
kb.locks = advancedDict()
|
||||
kb.locks.cacheLock = threading.Lock()
|
||||
kb.locks.logLock = threading.Lock()
|
||||
kb.locks = advancedDict()
|
||||
kb.locks.cacheLock = threading.Lock()
|
||||
kb.locks.logLock = threading.Lock()
|
||||
|
||||
kb.matchRatio = None
|
||||
kb.nullConnection = None
|
||||
kb.pageTemplate = None
|
||||
kb.pageTemplates = dict()
|
||||
kb.originalPage = None
|
||||
kb.matchRatio = None
|
||||
kb.nullConnection = None
|
||||
kb.pageTemplate = None
|
||||
kb.pageTemplates = dict()
|
||||
kb.originalPage = None
|
||||
|
||||
# Back-end DBMS underlying operating system fingerprint via banner (-b)
|
||||
# parsing
|
||||
kb.os = None
|
||||
kb.osVersion = None
|
||||
kb.osSP = None
|
||||
kb.os = None
|
||||
kb.osVersion = None
|
||||
kb.osSP = None
|
||||
|
||||
kb.pageEncoding = DEFAULT_PAGE_ENCODING
|
||||
kb.pageStable = None
|
||||
kb.partRun = None
|
||||
kb.proxyAuthHeader = None
|
||||
kb.queryCounter = 0
|
||||
kb.redirectSetCookie = None
|
||||
kb.responseTimes = []
|
||||
kb.resumedQueries = {}
|
||||
kb.retriesCount = 0
|
||||
kb.singleLogFlags = set()
|
||||
kb.skipOthersDbms = None
|
||||
kb.suppressSession = False
|
||||
kb.suppressResumeInfo = False
|
||||
kb.technique = None
|
||||
kb.testMode = False
|
||||
kb.testQueryCount = 0
|
||||
kb.threadContinue = True
|
||||
kb.threadException = False
|
||||
kb.threadData = {}
|
||||
kb.pageEncoding = DEFAULT_PAGE_ENCODING
|
||||
kb.pageStable = None
|
||||
kb.partRun = None
|
||||
kb.proxyAuthHeader = None
|
||||
kb.queryCounter = 0
|
||||
kb.redirectSetCookie = None
|
||||
kb.responseTimes = []
|
||||
kb.resumedQueries = {}
|
||||
kb.retriesCount = 0
|
||||
kb.singleLogFlags = set()
|
||||
kb.skipOthersDbms = None
|
||||
kb.suppressSession = False
|
||||
kb.suppressResumeInfo = False
|
||||
kb.technique = None
|
||||
kb.testMode = False
|
||||
kb.testQueryCount = 0
|
||||
kb.threadContinue = True
|
||||
kb.threadException = False
|
||||
kb.threadData = {}
|
||||
kb.xpCmdshellAvailable = False
|
||||
|
||||
kb.misc = advancedDict()
|
||||
kb.misc.delimiter = randomStr(length=6, lowercase=True)
|
||||
kb.misc.start = ":%s:" % randomStr(length=3, lowercase=True)
|
||||
kb.misc.stop = ":%s:" % randomStr(length=3, lowercase=True)
|
||||
kb.misc.space = ":%s:" % randomStr(length=1, lowercase=True)
|
||||
kb.misc.dollar = ":%s:" % randomStr(length=1, lowercase=True)
|
||||
kb.misc = advancedDict()
|
||||
kb.misc.delimiter = randomStr(length=6, lowercase=True)
|
||||
kb.misc.start = ":%s:" % randomStr(length=3, lowercase=True)
|
||||
kb.misc.stop = ":%s:" % randomStr(length=3, lowercase=True)
|
||||
kb.misc.space = ":%s:" % randomStr(length=1, lowercase=True)
|
||||
kb.misc.dollar = ":%s:" % randomStr(length=1, lowercase=True)
|
||||
kb.misc.forcedDbms = None
|
||||
|
||||
if flushAll:
|
||||
kb.keywords = set(getFileItems(paths.SQL_KEYWORDS))
|
||||
kb.keywords = set(getFileItems(paths.SQL_KEYWORDS))
|
||||
kb.tamperFunctions = []
|
||||
kb.targetUrls = set()
|
||||
kb.testedParams = set()
|
||||
kb.userAgents = None
|
||||
kb.wordlist = None
|
||||
kb.targetUrls = set()
|
||||
kb.testedParams = set()
|
||||
kb.userAgents = None
|
||||
kb.wordlist = None
|
||||
|
||||
def __useWizardInterface():
|
||||
"""
|
||||
|
||||
@@ -49,7 +49,7 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None):
|
||||
if os.path.exists(imageOutputFile):
|
||||
os.remove(imageOutputFile)
|
||||
|
||||
infoMsg = "profiling the execution into file %s" % profileOutputFile
|
||||
infoMsg = "profiling the execution into file %s" % profileOutputFile
|
||||
logger.info(infoMsg)
|
||||
|
||||
# Start sqlmap main function and generate a raw profile file
|
||||
|
||||
@@ -107,8 +107,8 @@ class Replication:
|
||||
self.connection.close()
|
||||
|
||||
# sqlite data types
|
||||
NULL = DataType('NULL')
|
||||
NULL = DataType('NULL')
|
||||
INTEGER = DataType('INTEGER')
|
||||
REAL = DataType('REAL')
|
||||
TEXT = DataType('TEXT')
|
||||
BLOB = DataType('BLOB')
|
||||
REAL = DataType('REAL')
|
||||
TEXT = DataType('TEXT')
|
||||
BLOB = DataType('BLOB')
|
||||
|
||||
@@ -113,7 +113,7 @@ def setOs():
|
||||
}
|
||||
"""
|
||||
|
||||
infoMsg = ""
|
||||
infoMsg = ""
|
||||
condition = (
|
||||
not kb.resumedQueries
|
||||
or ( kb.resumedQueries.has_key(conf.url) and
|
||||
@@ -191,8 +191,8 @@ def resumeConfKb(expression, url, value):
|
||||
logger.info(logMsg)
|
||||
|
||||
elif expression == "DBMS" and url == conf.url:
|
||||
dbms = unSafeFormatString(value[:-1])
|
||||
dbms = dbms.lower()
|
||||
dbms = unSafeFormatString(value[:-1])
|
||||
dbms = dbms.lower()
|
||||
dbmsVersion = [UNKNOWN_DBMS_VERSION]
|
||||
|
||||
logMsg = "resuming back-end DBMS '%s' " % dbms
|
||||
@@ -203,11 +203,11 @@ def resumeConfKb(expression, url, value):
|
||||
dbmsRegExp = re.search("%s ([\d\.]+)" % firstRegExp, dbms)
|
||||
|
||||
if dbmsRegExp:
|
||||
dbms = dbmsRegExp.group(1)
|
||||
dbms = dbmsRegExp.group(1)
|
||||
dbmsVersion = [ dbmsRegExp.group(2) ]
|
||||
|
||||
if conf.dbms and conf.dbms.lower() != dbms:
|
||||
message = "you provided '%s' as back-end DBMS, " % conf.dbms
|
||||
message = "you provided '%s' as back-end DBMS, " % conf.dbms
|
||||
message += "but from a past scan information on the target URL "
|
||||
message += "sqlmap assumes the back-end DBMS is %s. " % dbms
|
||||
message += "Do you really want to force the back-end "
|
||||
@@ -230,7 +230,7 @@ def resumeConfKb(expression, url, value):
|
||||
logger.info(logMsg)
|
||||
|
||||
if conf.os and conf.os.lower() != os.lower():
|
||||
message = "you provided '%s' as back-end DBMS operating " % conf.os
|
||||
message = "you provided '%s' as back-end DBMS operating " % conf.os
|
||||
message += "system, but from a past scan information on the "
|
||||
message += "target URL sqlmap assumes the back-end DBMS "
|
||||
message += "operating system is %s. " % os
|
||||
|
||||
@@ -17,16 +17,16 @@ from lib.core.enums import PLACE
|
||||
from lib.core.revision import getRevisionNumber
|
||||
|
||||
# sqlmap version and site
|
||||
VERSION = "1.0-dev"
|
||||
REVISION = getRevisionNumber()
|
||||
VERSION_STRING = "sqlmap/%s (r%s)" % (VERSION, REVISION)
|
||||
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
||||
SITE = "http://sqlmap.sourceforge.net"
|
||||
ML = "sqlmap-users@lists.sourceforge.net"
|
||||
VERSION = "1.0-dev"
|
||||
REVISION = getRevisionNumber()
|
||||
VERSION_STRING = "sqlmap/%s (r%s)" % (VERSION, REVISION)
|
||||
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
||||
SITE = "http://sqlmap.sourceforge.net"
|
||||
ML = "sqlmap-users@lists.sourceforge.net"
|
||||
|
||||
# minimum distance of ratio from kb.matchRatio to result in True
|
||||
DIFF_TOLERANCE = 0.05
|
||||
CONSTANT_RATIO = 0.9
|
||||
DIFF_TOLERANCE = 0.05
|
||||
CONSTANT_RATIO = 0.9
|
||||
|
||||
# lower and upper values for match ratio in case of stable page
|
||||
LOWER_RATIO_BOUND = 0.02
|
||||
@@ -37,9 +37,9 @@ logging.addLevelName(9, "PAYLOAD")
|
||||
logging.addLevelName(8, "TRAFFIC OUT")
|
||||
logging.addLevelName(7, "TRAFFIC IN")
|
||||
|
||||
LOGGER = logging.getLogger("sqlmapLog")
|
||||
LOGGER_HANDLER = logging.StreamHandler(sys.stdout)
|
||||
FORMATTER = logging.Formatter("[%(asctime)s] [%(levelname)s] %(message)s", "%H:%M:%S")
|
||||
LOGGER = logging.getLogger("sqlmapLog")
|
||||
LOGGER_HANDLER = logging.StreamHandler(sys.stdout)
|
||||
FORMATTER = logging.Formatter("[%(asctime)s] [%(levelname)s] %(message)s", "%H:%M:%S")
|
||||
|
||||
LOGGER_HANDLER.setFormatter(FORMATTER)
|
||||
LOGGER.addHandler(LOGGER_HANDLER)
|
||||
@@ -47,21 +47,21 @@ LOGGER.setLevel(logging.WARN)
|
||||
|
||||
# dump markers
|
||||
DUMP_NEWLINE_MARKER = "__NEWLINE__"
|
||||
DUMP_CR_MARKER = "__CARRIAGE_RETURN__"
|
||||
DUMP_DEL_MARKER = "__DEL__"
|
||||
DUMP_TAB_MARKER = "__TAB__"
|
||||
DUMP_START_MARKER = "__START__"
|
||||
DUMP_STOP_MARKER = "__STOP__"
|
||||
DUMP_CR_MARKER = "__CARRIAGE_RETURN__"
|
||||
DUMP_DEL_MARKER = "__DEL__"
|
||||
DUMP_TAB_MARKER = "__TAB__"
|
||||
DUMP_START_MARKER = "__START__"
|
||||
DUMP_STOP_MARKER = "__STOP__"
|
||||
|
||||
URI_QUESTION_MARKER = "__QUESTION_MARK__"
|
||||
|
||||
PAYLOAD_DELIMITER = "\x00"
|
||||
PAYLOAD_DELIMITER = "\x00"
|
||||
CHAR_INFERENCE_MARK = "%c"
|
||||
PRINTABLE_CHAR_REGEX = r'[^\x00-\x1f\x7e-\xff]'
|
||||
|
||||
# dumping characters used in GROUP_CONCAT MySQL technique
|
||||
CONCAT_ROW_DELIMITER = ','
|
||||
CONCAT_VALUE_DELIMITER = '|'
|
||||
CONCAT_ROW_DELIMITER = ','
|
||||
CONCAT_VALUE_DELIMITER = '|'
|
||||
|
||||
# coefficient used for a time-based query delay checking (must be >= 7)
|
||||
TIME_STDEV_COEFF = 10
|
||||
@@ -124,20 +124,20 @@ DUMMY_USER_PREFIX = "__dummy__"
|
||||
DEFAULT_PAGE_ENCODING = "iso-8859-1"
|
||||
|
||||
# System variables
|
||||
IS_WIN = subprocess.mswindows
|
||||
IS_WIN = subprocess.mswindows
|
||||
# The name of the operating system dependent module imported. The following
|
||||
# names have currently been registered: 'posix', 'nt', 'mac', 'os2', 'ce',
|
||||
# 'java', 'riscos'
|
||||
PLATFORM = os.name
|
||||
PYVERSION = sys.version.split()[0]
|
||||
PLATFORM = os.name
|
||||
PYVERSION = sys.version.split()[0]
|
||||
|
||||
# Database management system specific variables
|
||||
MSSQL_SYSTEM_DBS = ( "Northwind", "model", "msdb", "pubs", "tempdb" )
|
||||
MYSQL_SYSTEM_DBS = ( "information_schema", "mysql" ) # Before MySQL 5.0 only "mysql"
|
||||
PGSQL_SYSTEM_DBS = ( "information_schema", "pg_catalog", "pg_toast" )
|
||||
ORACLE_SYSTEM_DBS = ( "SYSTEM", "SYSAUX" ) # These are TABLESPACE_NAME
|
||||
SQLITE_SYSTEM_DBS = ( "sqlite_master", "sqlite_temp_master" )
|
||||
ACCESS_SYSTEM_DBS = ( "MSysAccessObjects", "MSysACEs", "MSysObjects", "MSysQueries", "MSysRelationships", "MSysAccessStorage",\
|
||||
MSSQL_SYSTEM_DBS = ( "Northwind", "model", "msdb", "pubs", "tempdb" )
|
||||
MYSQL_SYSTEM_DBS = ( "information_schema", "mysql" ) # Before MySQL 5.0 only "mysql"
|
||||
PGSQL_SYSTEM_DBS = ( "information_schema", "pg_catalog", "pg_toast" )
|
||||
ORACLE_SYSTEM_DBS = ( "SYSTEM", "SYSAUX" ) # These are TABLESPACE_NAME
|
||||
SQLITE_SYSTEM_DBS = ( "sqlite_master", "sqlite_temp_master" )
|
||||
ACCESS_SYSTEM_DBS = ( "MSysAccessObjects", "MSysACEs", "MSysObjects", "MSysQueries", "MSysRelationships", "MSysAccessStorage",\
|
||||
"MSysAccessXML", "MSysModules", "MSysModules2" )
|
||||
FIREBIRD_SYSTEM_DBS = ( "RDB$BACKUP_HISTORY", "RDB$CHARACTER_SETS", "RDB$CHECK_CONSTRAINTS", "RDB$COLLATIONS", "RDB$DATABASE",\
|
||||
"RDB$DEPENDENCIES", "RDB$EXCEPTIONS", "RDB$FIELDS", "RDB$FIELD_DIMENSIONS", " RDB$FILES", "RDB$FILTERS",\
|
||||
@@ -145,21 +145,21 @@ FIREBIRD_SYSTEM_DBS = ( "RDB$BACKUP_HISTORY", "RDB$CHARACTER_SETS", "RDB$CHECK_C
|
||||
"RDB$LOG_FILES", "RDB$PAGES", "RDB$PROCEDURES", "RDB$PROCEDURE_PARAMETERS", "RDB$REF_CONSTRAINTS", "RDB$RELATIONS",\
|
||||
"RDB$RELATION_CONSTRAINTS", "RDB$RELATION_FIELDS", "RDB$ROLES", "RDB$SECURITY_CLASSES", "RDB$TRANSACTIONS", "RDB$TRIGGERS",\
|
||||
"RDB$TRIGGER_MESSAGES", "RDB$TYPES", "RDB$USER_PRIVILEGES", "RDB$VIEW_RELATIONS" )
|
||||
MAXDB_SYSTEM_DBS = ( "SYSINFO", "DOMAIN" )
|
||||
SYBASE_SYSTEM_DBS = ( "master", "model", "sybsystemdb", "sybsystemprocs" )
|
||||
MAXDB_SYSTEM_DBS = ( "SYSINFO", "DOMAIN" )
|
||||
SYBASE_SYSTEM_DBS = ( "master", "model", "sybsystemdb", "sybsystemprocs" )
|
||||
|
||||
MSSQL_ALIASES = [ "microsoft sql server", "mssqlserver", "mssql", "ms" ]
|
||||
MYSQL_ALIASES = [ "mysql", "my" ]
|
||||
PGSQL_ALIASES = [ "postgresql", "postgres", "pgsql", "psql", "pg" ]
|
||||
ORACLE_ALIASES = [ "oracle", "orcl", "ora", "or" ]
|
||||
SQLITE_ALIASES = [ "sqlite", "sqlite3" ]
|
||||
ACCESS_ALIASES = [ "access", "jet", "microsoft access", "msaccess" ]
|
||||
FIREBIRD_ALIASES = [ "firebird", "mozilla firebird", "interbase", "ibase", "fb" ]
|
||||
MAXDB_ALIASES = [ "maxdb", "sap maxdb", "sap db" ]
|
||||
SYBASE_ALIASES = [ "sybase", "sybase sql server" ]
|
||||
MSSQL_ALIASES = [ "microsoft sql server", "mssqlserver", "mssql", "ms" ]
|
||||
MYSQL_ALIASES = [ "mysql", "my" ]
|
||||
PGSQL_ALIASES = [ "postgresql", "postgres", "pgsql", "psql", "pg" ]
|
||||
ORACLE_ALIASES = [ "oracle", "orcl", "ora", "or" ]
|
||||
SQLITE_ALIASES = [ "sqlite", "sqlite3" ]
|
||||
ACCESS_ALIASES = [ "access", "jet", "microsoft access", "msaccess" ]
|
||||
FIREBIRD_ALIASES = [ "firebird", "mozilla firebird", "interbase", "ibase", "fb" ]
|
||||
MAXDB_ALIASES = [ "maxdb", "sap maxdb", "sap db" ]
|
||||
SYBASE_ALIASES = [ "sybase", "sybase sql server" ]
|
||||
|
||||
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES
|
||||
SUPPORTED_OS = ( "linux", "windows" )
|
||||
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES
|
||||
SUPPORTED_OS = ( "linux", "windows" )
|
||||
|
||||
DBMS_DICT = { DBMS.MSSQL: [MSSQL_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/"],
|
||||
DBMS.MYSQL: [MYSQL_ALIASES, "python-mysqldb", "http://mysql-python.sourceforge.net/"],
|
||||
@@ -172,17 +172,17 @@ DBMS_DICT = { DBMS.MSSQL: [MSSQL_ALIASES, "python-pymssql", "http://pymssql.sour
|
||||
DBMS.SYBASE: [SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/"]
|
||||
}
|
||||
|
||||
REFERER_ALIASES = ( "ref", "referer", "referrer" )
|
||||
USER_AGENT_ALIASES = ( "ua", "useragent", "user-agent" )
|
||||
REFERER_ALIASES = ( "ref", "referer", "referrer" )
|
||||
USER_AGENT_ALIASES = ( "ua", "useragent", "user-agent" )
|
||||
|
||||
FROM_TABLE = {
|
||||
FROM_TABLE = {
|
||||
DBMS.ORACLE: " FROM DUAL",
|
||||
DBMS.ACCESS: " FROM MSysObjects",
|
||||
DBMS.FIREBIRD: " FROM RDB$DATABASE",
|
||||
DBMS.MAXDB: " FROM VERSIONS"
|
||||
}
|
||||
|
||||
SQL_STATEMENTS = {
|
||||
SQL_STATEMENTS = {
|
||||
"SQL SELECT statement": (
|
||||
"select ",
|
||||
"show ",
|
||||
@@ -236,10 +236,10 @@ ERROR_PARSING_REGEXES = (
|
||||
)
|
||||
|
||||
# Regular expression used for parsing charset info from meta html headers
|
||||
META_CHARSET_REGEX = r'<meta http-equiv="?content-type"?[^>]+charset=(?P<result>[^">]+)'
|
||||
META_CHARSET_REGEX = r'<meta http-equiv="?content-type"?[^>]+charset=(?P<result>[^">]+)'
|
||||
|
||||
# Regular expression used for parsing refresh info from meta html headers
|
||||
META_REFRESH_REGEX = r'<meta http-equiv="?refresh"?[^>]+content="?[^">]+url=(?P<result>[^">]+)'
|
||||
META_REFRESH_REGEX = r'<meta http-equiv="?refresh"?[^>]+content="?[^">]+url=(?P<result>[^">]+)'
|
||||
|
||||
# Regular expression used for parsing empty fields in tested form data
|
||||
EMPTY_FORM_FIELDS_REGEX = r'(?P<result>[^=]+=(&|\Z))'
|
||||
|
||||
@@ -51,7 +51,7 @@ def blockingWriteToFD(fd, data):
|
||||
while True:
|
||||
try:
|
||||
data_length = len(data)
|
||||
wrote_data = os.write(fd, data)
|
||||
wrote_data = os.write(fd, data)
|
||||
except (OSError, IOError), io:
|
||||
if io.errno in (errno.EAGAIN, errno.EINTR):
|
||||
continue
|
||||
|
||||
@@ -140,12 +140,12 @@ def __setRequestParams():
|
||||
__testableParameters = True
|
||||
|
||||
if not conf.parameters:
|
||||
errMsg = "you did not provide any GET, POST and Cookie "
|
||||
errMsg = "you did not provide any GET, POST and Cookie "
|
||||
errMsg += "parameter, neither an User-Agent or Referer header"
|
||||
raise sqlmapGenericException, errMsg
|
||||
|
||||
elif not __testableParameters:
|
||||
errMsg = "all testable parameters you provided are not present "
|
||||
errMsg = "all testable parameters you provided are not present "
|
||||
errMsg += "within the GET, POST and Cookie parameters"
|
||||
raise sqlmapGenericException, errMsg
|
||||
|
||||
@@ -308,9 +308,9 @@ def initTargetEnv():
|
||||
if conf.cj:
|
||||
conf.cj.clear()
|
||||
|
||||
conf.paramDict = {}
|
||||
conf.parameters = {}
|
||||
conf.sessionFile = None
|
||||
conf.paramDict = {}
|
||||
conf.parameters = {}
|
||||
conf.sessionFile = None
|
||||
|
||||
__setKnowledgeBaseAttributes(False)
|
||||
__restoreCmdLineOptions()
|
||||
|
||||
@@ -164,8 +164,8 @@ def liveTest():
|
||||
|
||||
def initCase(switches=None):
|
||||
paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp()
|
||||
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
||||
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
||||
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
||||
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
||||
cmdLineOptions = cmdLineParser()
|
||||
cmdLineOptions.liveTest = cmdLineOptions.smokeTest = False
|
||||
|
||||
@@ -181,8 +181,8 @@ def initCase(switches=None):
|
||||
def cleanCase():
|
||||
shutil.rmtree(paths.SQLMAP_OUTPUT_PATH, True)
|
||||
paths.SQLMAP_OUTPUT_PATH = os.path.join(paths.SQLMAP_ROOT_PATH, "output")
|
||||
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
||||
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
||||
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
|
||||
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
|
||||
conf.verbose = 1
|
||||
__setVerbosity()
|
||||
|
||||
|
||||
@@ -18,14 +18,14 @@ class ThreadData():
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.disableStdOut = False
|
||||
self.lastErrorPage = None
|
||||
self.lastHTTPError = None
|
||||
self.lastRedirectMsg = None
|
||||
self.lastQueryDuration = 0
|
||||
self.lastRequestUID = 0
|
||||
self.seqMatcher = difflib.SequenceMatcher(None)
|
||||
self.valueStack = []
|
||||
self.disableStdOut = False
|
||||
self.lastErrorPage = None
|
||||
self.lastHTTPError = None
|
||||
self.lastRedirectMsg = None
|
||||
self.lastQueryDuration = 0
|
||||
self.lastRequestUID = 0
|
||||
self.seqMatcher = difflib.SequenceMatcher(None)
|
||||
self.valueStack = []
|
||||
|
||||
def getCurrentThreadUID():
|
||||
return hash(threading.currentThread())
|
||||
|
||||
@@ -73,7 +73,7 @@ XMLNS_ATTR = "xmlns:xsi"
|
||||
SCHEME_NAME = "sqlmap.xsd"
|
||||
SCHEME_NAME_ATTR = "xsi:noNamespaceSchemaLocation"
|
||||
CHARACTERS_TO_ENCODE = range(32) + range(127, 256)
|
||||
ENTITIES = {'"':'"',"'":"'"}
|
||||
ENTITIES = {'"':'"',"'":"'"}
|
||||
|
||||
class XMLDump:
|
||||
'''
|
||||
@@ -83,7 +83,7 @@ class XMLDump:
|
||||
|
||||
def __init__(self):
|
||||
self.__outputFile = None
|
||||
self.__outputFP = None
|
||||
self.__outputFP = None
|
||||
self.__root = None
|
||||
self.__doc = Document()
|
||||
|
||||
@@ -384,7 +384,7 @@ class XMLDump:
|
||||
db = "All"
|
||||
table = tableValues["__infos__"]["table"]
|
||||
|
||||
count = int(tableValues["__infos__"]["count"])
|
||||
count = int(tableValues["__infos__"]["count"])
|
||||
columns = tableValues.keys()
|
||||
columns.sort(key=lambda x: x.lower())
|
||||
|
||||
|
||||
Reference in New Issue
Block a user