First commit regarding #739

This commit is contained in:
Miroslav Stampar
2018-06-19 16:08:38 +02:00
parent 53578bcb7c
commit 961d2b24d1
4 changed files with 217 additions and 210 deletions

View File

@@ -5,7 +5,6 @@ Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import binascii
import cookielib
import glob
import inspect
@@ -47,6 +46,7 @@ from lib.core.common import getUnicode
from lib.core.common import normalizePath
from lib.core.common import ntToPosixSlashes
from lib.core.common import openFile
from lib.core.common import parseRequestFile
from lib.core.common import parseTargetDirect
from lib.core.common import parseTargetUrl
from lib.core.common import paths
@@ -99,10 +99,7 @@ from lib.core.exception import SqlmapUnsupportedDBMSException
from lib.core.exception import SqlmapUserQuitException
from lib.core.log import FORMATTER
from lib.core.optiondict import optDict
from lib.core.settings import BURP_REQUEST_REGEX
from lib.core.settings import BURP_XML_HISTORY_REGEX
from lib.core.settings import CODECS_LIST_PAGE
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
from lib.core.settings import DBMS_ALIASES
from lib.core.settings import DEFAULT_PAGE_ENCODING
@@ -119,7 +116,6 @@ from lib.core.settings import MAX_NUMBER_OF_THREADS
from lib.core.settings import NULL
from lib.core.settings import PARAMETER_SPLITTING_REGEX
from lib.core.settings import PRECONNECT_CANDIDATE_TIMEOUT
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
from lib.core.settings import SITE
from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE
from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
@@ -131,7 +127,6 @@ from lib.core.settings import UNION_CHAR_REGEX
from lib.core.settings import UNKNOWN_DBMS_VERSION
from lib.core.settings import URI_INJECTABLE_REGEX
from lib.core.settings import VERSION_STRING
from lib.core.settings import WEBSCARAB_SPLITTER
from lib.core.threads import getCurrentThreadData
from lib.core.threads import setDaemon
from lib.core.update import update
@@ -173,201 +168,6 @@ try:
except NameError:
WindowsError = None
def _feedTargetsDict(reqFile, addedTargetUrls):
"""
Parses web scarab and burp logs and adds results to the target URL list
"""
def _parseWebScarabLog(content):
"""
Parses web scarab logs (POST method not supported)
"""
reqResList = content.split(WEBSCARAB_SPLITTER)
for request in reqResList:
url = extractRegexResult(r"URL: (?P<result>.+?)\n", request, re.I)
method = extractRegexResult(r"METHOD: (?P<result>.+?)\n", request, re.I)
cookie = extractRegexResult(r"COOKIE: (?P<result>.+?)\n", request, re.I)
if not method or not url:
logger.debug("not a valid WebScarab log data")
continue
if method.upper() == HTTPMETHOD.POST:
warnMsg = "POST requests from WebScarab logs aren't supported "
warnMsg += "as their body content is stored in separate files. "
warnMsg += "Nevertheless you can use -r to load them individually."
logger.warning(warnMsg)
continue
if not(conf.scope and not re.search(conf.scope, url, re.I)):
if not kb.targets or url not in addedTargetUrls:
kb.targets.add((url, method, None, cookie, None))
addedTargetUrls.add(url)
def _parseBurpLog(content):
"""
Parses burp logs
"""
if not re.search(BURP_REQUEST_REGEX, content, re.I | re.S):
if re.search(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
reqResList = []
for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
port, request = match.groups()
try:
request = request.decode("base64")
except binascii.Error:
continue
_ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request)
if _:
host = _.group(0).strip()
if not re.search(r":\d+\Z", host):
request = request.replace(host, "%s:%d" % (host, int(port)))
reqResList.append(request)
else:
reqResList = [content]
else:
reqResList = re.finditer(BURP_REQUEST_REGEX, content, re.I | re.S)
for match in reqResList:
request = match if isinstance(match, basestring) else match.group(0)
request = re.sub(r"\A[^\w]+", "", request)
schemePort = re.search(r"(http[\w]*)\:\/\/.*?\:([\d]+).+?={10,}", request, re.I | re.S)
if schemePort:
scheme = schemePort.group(1)
port = schemePort.group(2)
request = re.sub(r"\n=+\Z", "", request.split(schemePort.group(0))[-1].lstrip())
else:
scheme, port = None, None
if not re.search(r"^[\n]*(%s).*?\sHTTP\/" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), request, re.I | re.M):
continue
if re.search(r"^[\n]*%s.*?\.(%s)\sHTTP\/" % (HTTPMETHOD.GET, "|".join(CRAWL_EXCLUDE_EXTENSIONS)), request, re.I | re.M):
continue
getPostReq = False
url = None
host = None
method = None
data = None
cookie = None
params = False
newline = None
lines = request.split('\n')
headers = []
for index in xrange(len(lines)):
line = lines[index]
if not line.strip() and index == len(lines) - 1:
break
newline = "\r\n" if line.endswith('\r') else '\n'
line = line.strip('\r')
match = re.search(r"\A(%s) (.+) HTTP/[\d.]+\Z" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), line) if not method else None
if len(line.strip()) == 0 and method and method != HTTPMETHOD.GET and data is None:
data = ""
params = True
elif match:
method = match.group(1)
url = match.group(2)
if any(_ in line for _ in ('?', '=', kb.customInjectionMark)):
params = True
getPostReq = True
# POST parameters
elif data is not None and params:
data += "%s%s" % (line, newline)
# GET parameters
elif "?" in line and "=" in line and ": " not in line:
params = True
# Headers
elif re.search(r"\A\S+:", line):
key, value = line.split(":", 1)
value = value.strip().replace("\r", "").replace("\n", "")
# Cookie and Host headers
if key.upper() == HTTP_HEADER.COOKIE.upper():
cookie = value
elif key.upper() == HTTP_HEADER.HOST.upper():
if '://' in value:
scheme, value = value.split('://')[:2]
splitValue = value.split(":")
host = splitValue[0]
if len(splitValue) > 1:
port = filterStringValue(splitValue[1], "[0-9]")
# Avoid to add a static content length header to
# headers and consider the following lines as
# POSTed data
if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper():
params = True
# Avoid proxy and connection type related headers
elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION):
headers.append((getUnicode(key), getUnicode(value)))
if kb.customInjectionMark in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""):
params = True
data = data.rstrip("\r\n") if data else data
if getPostReq and (params or cookie):
if not port and isinstance(scheme, basestring) and scheme.lower() == "https":
port = "443"
elif not scheme and port == "443":
scheme = "https"
if conf.forceSSL:
scheme = "https"
port = port or "443"
if not host:
errMsg = "invalid format of a request file"
raise SqlmapSyntaxException(errMsg)
if not url.startswith("http"):
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
scheme = None
port = None
if not(conf.scope and not re.search(conf.scope, url, re.I)):
if not kb.targets or url not in addedTargetUrls:
kb.targets.add((url, conf.method or method, data, cookie, tuple(headers)))
addedTargetUrls.add(url)
checkFile(reqFile)
try:
with openFile(reqFile, "rb") as f:
content = f.read()
except (IOError, OSError, MemoryError), ex:
errMsg = "something went wrong while trying "
errMsg += "to read the content of file '%s' ('%s')" % (reqFile, getSafeExString(ex))
raise SqlmapSystemException(errMsg)
if conf.scope:
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
_parseBurpLog(content)
_parseWebScarabLog(content)
if not addedTargetUrls:
errMsg = "unable to find usable request(s) "
errMsg += "in provided file ('%s')" % reqFile
raise SqlmapGenericException(errMsg)
def _loadQueries():
"""
Loads queries from 'xml/queries.xml' file.
@@ -413,7 +213,7 @@ def _setMultipleTargets():
"""
initialTargetsCount = len(kb.targets)
addedTargetUrls = set()
seen = set()
if not conf.logFile:
return
@@ -426,7 +226,11 @@ def _setMultipleTargets():
raise SqlmapFilePathException(errMsg)
if os.path.isfile(conf.logFile):
_feedTargetsDict(conf.logFile, addedTargetUrls)
for target in parseRequestFile(conf.logFile):
url = target[0]
if url not in seen:
kb.targets.add(target)
seen.add(url)
elif os.path.isdir(conf.logFile):
files = os.listdir(conf.logFile)
@@ -436,7 +240,11 @@ def _setMultipleTargets():
if not re.search(r"([\d]+)\-request", reqFile):
continue
_feedTargetsDict(os.path.join(conf.logFile, reqFile), addedTargetUrls)
for target in parseRequestFile(os.path.join(conf.logFile, reqFile)):
url = target[0]
if url not in seen:
kb.targets.add(target)
seen.add(url)
else:
errMsg = "the specified list of targets is not a file "
@@ -480,7 +288,7 @@ def _setRequestFromFile():
if not conf.requestFile:
return
addedTargetUrls = set()
seen = set()
conf.requestFile = safeExpandUser(conf.requestFile)
@@ -492,7 +300,11 @@ def _setRequestFromFile():
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
logger.info(infoMsg)
_feedTargetsDict(conf.requestFile, addedTargetUrls)
for target in parseRequestFile(conf.requestFile):
url = target[0]
if url not in seen:
kb.targets.add(target)
seen.add(url)
def _setCrawler():
if not conf.crawlDepth: