mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2026-01-24 15:19:04 +00:00
Some more PEPing (I hope that I haven't broke anything)
This commit is contained in:
@@ -110,7 +110,9 @@ def forgeHeaders(items=None, base=None):
|
||||
kb.mergeCookies = readInput(message, default='Y', boolean=True)
|
||||
|
||||
if kb.mergeCookies and kb.injection.place != PLACE.COOKIE:
|
||||
_ = lambda x: re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(getUnicode(cookie.name)), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value))).replace('\\', r'\\'), x)
|
||||
def _(value):
|
||||
return re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(getUnicode(cookie.name)), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value))).replace('\\', r'\\'), value)
|
||||
|
||||
headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE])
|
||||
|
||||
if PLACE.COOKIE in conf.parameters:
|
||||
@@ -161,7 +163,7 @@ def checkCharEncoding(encoding, warn=True):
|
||||
return encoding
|
||||
|
||||
# Reference: http://www.destructor.de/charsets/index.htm
|
||||
translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "iso-8859-0": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932", "en": "us"}
|
||||
translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "iso-8859-0": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932", "en": "us"}
|
||||
|
||||
for delimiter in (';', ',', '('):
|
||||
if delimiter in encoding:
|
||||
|
||||
@@ -187,8 +187,7 @@ class Connect(object):
|
||||
|
||||
if not kb.dnsMode and conn:
|
||||
headers = conn.info()
|
||||
if headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\
|
||||
or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
||||
if headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate") or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
||||
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
||||
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
|
||||
warnMsg = "large compressed response detected. Disabling compression"
|
||||
@@ -241,27 +240,27 @@ class Connect(object):
|
||||
kb.requestCounter += 1
|
||||
threadData.lastRequestUID = kb.requestCounter
|
||||
|
||||
url = kwargs.get("url", None) or conf.url
|
||||
get = kwargs.get("get", None)
|
||||
post = kwargs.get("post", None)
|
||||
method = kwargs.get("method", None)
|
||||
cookie = kwargs.get("cookie", None)
|
||||
ua = kwargs.get("ua", None) or conf.agent
|
||||
referer = kwargs.get("referer", None) or conf.referer
|
||||
host = kwargs.get("host", None) or conf.host
|
||||
direct_ = kwargs.get("direct", False)
|
||||
multipart = kwargs.get("multipart", None)
|
||||
silent = kwargs.get("silent", False)
|
||||
raise404 = kwargs.get("raise404", True)
|
||||
timeout = kwargs.get("timeout", None) or conf.timeout
|
||||
auxHeaders = kwargs.get("auxHeaders", None)
|
||||
response = kwargs.get("response", False)
|
||||
url = kwargs.get("url", None) or conf.url
|
||||
get = kwargs.get("get", None)
|
||||
post = kwargs.get("post", None)
|
||||
method = kwargs.get("method", None)
|
||||
cookie = kwargs.get("cookie", None)
|
||||
ua = kwargs.get("ua", None) or conf.agent
|
||||
referer = kwargs.get("referer", None) or conf.referer
|
||||
host = kwargs.get("host", None) or conf.host
|
||||
direct_ = kwargs.get("direct", False)
|
||||
multipart = kwargs.get("multipart", None)
|
||||
silent = kwargs.get("silent", False)
|
||||
raise404 = kwargs.get("raise404", True)
|
||||
timeout = kwargs.get("timeout", None) or conf.timeout
|
||||
auxHeaders = kwargs.get("auxHeaders", None)
|
||||
response = kwargs.get("response", False)
|
||||
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout or conf.ignoreTimeouts
|
||||
refreshing = kwargs.get("refreshing", False)
|
||||
retrying = kwargs.get("retrying", False)
|
||||
crawling = kwargs.get("crawling", False)
|
||||
checking = kwargs.get("checking", False)
|
||||
skipRead = kwargs.get("skipRead", False)
|
||||
refreshing = kwargs.get("refreshing", False)
|
||||
retrying = kwargs.get("retrying", False)
|
||||
crawling = kwargs.get("crawling", False)
|
||||
checking = kwargs.get("checking", False)
|
||||
skipRead = kwargs.get("skipRead", False)
|
||||
|
||||
if multipart:
|
||||
post = multipart
|
||||
@@ -1040,7 +1039,7 @@ class Connect(object):
|
||||
name = safeVariableNaming(name)
|
||||
elif name in keywords:
|
||||
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
||||
value = urldecode(value, convall=True, spaceplus=(item==post and kb.postSpaceToPlus))
|
||||
value = urldecode(value, convall=True, spaceplus=(item == post and kb.postSpaceToPlus))
|
||||
variables[name] = value
|
||||
|
||||
if cookie:
|
||||
|
||||
@@ -48,7 +48,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
||||
|
||||
# Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext
|
||||
# https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni
|
||||
if re.search(r"\A[\d.]+\Z", self.host) is None and kb.tlsSNI.get(self.host) != False and hasattr(ssl, "SSLContext"):
|
||||
if re.search(r"\A[\d.]+\Z", self.host) is None and kb.tlsSNI.get(self.host) is not False and hasattr(ssl, "SSLContext"):
|
||||
for protocol in filter(lambda _: _ >= ssl.PROTOCOL_TLSv1, _protocols):
|
||||
try:
|
||||
sock = create_sock()
|
||||
|
||||
@@ -175,10 +175,7 @@ def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, char
|
||||
# forge the SQL limiting the query output one entry at a time
|
||||
# NOTE: we assume that only queries that get data from a table
|
||||
# can return multiple entries
|
||||
if fromUser and " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() \
|
||||
not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and not \
|
||||
expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) \
|
||||
and not re.search(SQL_SCALAR_REGEX, expression, re.I):
|
||||
if fromUser and " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and not expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) and not re.search(SQL_SCALAR_REGEX, expression, re.I):
|
||||
expression, limitCond, topLimit, startLimit, stopLimit = agent.limitCondition(expression)
|
||||
|
||||
if limitCond:
|
||||
|
||||
@@ -19,4 +19,3 @@ def getPageTemplate(payload, place):
|
||||
retVal = kb.pageTemplates[(payload, place)]
|
||||
|
||||
return retVal
|
||||
|
||||
|
||||
Reference in New Issue
Block a user