mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2026-01-19 12:59:02 +00:00
Speed optimization(s)
This commit is contained in:
@@ -2227,10 +2227,6 @@ def getUnicode(value, encoding=None, noneToNull=False):
|
||||
if noneToNull and value is None:
|
||||
return NULL
|
||||
|
||||
if isListLike(value):
|
||||
value = list(getUnicode(_, encoding, noneToNull) for _ in value)
|
||||
return value
|
||||
|
||||
if isinstance(value, unicode):
|
||||
return value
|
||||
elif isinstance(value, basestring):
|
||||
@@ -2242,6 +2238,9 @@ def getUnicode(value, encoding=None, noneToNull=False):
|
||||
return unicode(value, UNICODE_ENCODING)
|
||||
except:
|
||||
value = value[:ex.start] + "".join(INVALID_UNICODE_CHAR_FORMAT % ord(_) for _ in value[ex.start:ex.end]) + value[ex.end:]
|
||||
elif isListLike(value):
|
||||
value = list(getUnicode(_, encoding, noneToNull) for _ in value)
|
||||
return value
|
||||
else:
|
||||
try:
|
||||
return unicode(value)
|
||||
@@ -2559,6 +2558,7 @@ def logHTTPTraffic(requestLogMsg, responseLogMsg):
|
||||
def getPageTemplate(payload, place): # Cross-linked function
|
||||
raise NotImplementedError
|
||||
|
||||
@cachedmethod
|
||||
def getPublicTypeMembers(type_, onlyValues=False):
|
||||
"""
|
||||
Useful for getting members from types (e.g. in enums)
|
||||
@@ -2567,12 +2567,16 @@ def getPublicTypeMembers(type_, onlyValues=False):
|
||||
['Linux', 'Windows']
|
||||
"""
|
||||
|
||||
retVal = []
|
||||
|
||||
for name, value in inspect.getmembers(type_):
|
||||
if not name.startswith('__'):
|
||||
if not onlyValues:
|
||||
yield (name, value)
|
||||
retVal.append((name, value))
|
||||
else:
|
||||
yield value
|
||||
retVal.append(value)
|
||||
|
||||
return retVal
|
||||
|
||||
def enumValueToNameLookup(type_, value_):
|
||||
"""
|
||||
@@ -3581,6 +3585,7 @@ def randomizeParameterValue(value):
|
||||
|
||||
return retVal
|
||||
|
||||
@cachedmethod
|
||||
def asciifyUrl(url, forceQuote=False):
|
||||
"""
|
||||
Attempts to make a unicode URL usuable with ``urllib/urllib2``.
|
||||
@@ -4075,8 +4080,11 @@ def getRequestHeader(request, name):
|
||||
"""
|
||||
|
||||
retVal = None
|
||||
|
||||
if request and name:
|
||||
retVal = max(value if name.upper() == key.upper() else None for key, value in request.header_items())
|
||||
_ = name.upper()
|
||||
retVal = max([value if _ == key.upper() else None for key, value in request.header_items()])
|
||||
|
||||
return retVal
|
||||
|
||||
def isNumber(value):
|
||||
|
||||
@@ -1014,12 +1014,12 @@ def _setDNSCache():
|
||||
"""
|
||||
|
||||
def _getaddrinfo(*args, **kwargs):
|
||||
if args in kb.cache:
|
||||
return kb.cache[args]
|
||||
if args in kb.cache.addrinfo:
|
||||
return kb.cache.addrinfo[args]
|
||||
|
||||
else:
|
||||
kb.cache[args] = socket._getaddrinfo(*args, **kwargs)
|
||||
return kb.cache[args]
|
||||
kb.cache.addrinfo[args] = socket._getaddrinfo(*args, **kwargs)
|
||||
return kb.cache.addrinfo[args]
|
||||
|
||||
if not hasattr(socket, "_getaddrinfo"):
|
||||
socket._getaddrinfo = socket.getaddrinfo
|
||||
@@ -1841,7 +1841,10 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
||||
kb.bruteMode = False
|
||||
|
||||
kb.cache = AttribDict()
|
||||
kb.cache.addrinfo = {}
|
||||
kb.cache.content = {}
|
||||
kb.cache.encoding = {}
|
||||
kb.cache.parsedDbms = {}
|
||||
kb.cache.regex = {}
|
||||
kb.cache.stdev = {}
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ from lib.core.enums import OS
|
||||
from lib.core.revision import getRevisionNumber
|
||||
|
||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||
VERSION = "1.0.9.10"
|
||||
VERSION = "1.0.9.11"
|
||||
REVISION = getRevisionNumber()
|
||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||
|
||||
@@ -59,6 +59,13 @@ def htmlParser(page):
|
||||
|
||||
xmlfile = paths.ERRORS_XML
|
||||
handler = HTMLHandler(page)
|
||||
key = hash(page)
|
||||
|
||||
if key in kb.cache.parsedDbms:
|
||||
retVal = kb.cache.parsedDbms[key]
|
||||
if retVal:
|
||||
handler._markAsErrorPage()
|
||||
return retVal
|
||||
|
||||
parseXmlFile(xmlfile, handler)
|
||||
|
||||
@@ -68,6 +75,8 @@ def htmlParser(page):
|
||||
else:
|
||||
kb.lastParserStatus = None
|
||||
|
||||
kb.cache.parsedDbms[key] = handler.dbms
|
||||
|
||||
# generic SQL warning/error messages
|
||||
if re.search(r"SQL (warning|error|syntax)", page, re.I):
|
||||
handler._markAsErrorPage()
|
||||
|
||||
@@ -26,6 +26,7 @@ from lib.core.common import singleTimeWarnMessage
|
||||
from lib.core.data import conf
|
||||
from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.decorators import cachedmethod
|
||||
from lib.core.enums import DBMS
|
||||
from lib.core.enums import HTTP_HEADER
|
||||
from lib.core.enums import PLACE
|
||||
@@ -136,6 +137,7 @@ def parseResponse(page, headers):
|
||||
if page:
|
||||
htmlParser(page)
|
||||
|
||||
@cachedmethod
|
||||
def checkCharEncoding(encoding, warn=True):
|
||||
"""
|
||||
Checks encoding name, repairs common misspellings and adjusts to
|
||||
@@ -230,7 +232,10 @@ def getHeuristicCharEncoding(page):
|
||||
Returns page encoding charset detected by usage of heuristics
|
||||
Reference: http://chardet.feedparser.org/docs/
|
||||
"""
|
||||
retVal = detect(page)["encoding"]
|
||||
|
||||
key = hash(page)
|
||||
retVal = kb.cache.encoding.get(key) or detect(page)["encoding"]
|
||||
kb.cache.encoding[key] = retVal
|
||||
|
||||
if retVal:
|
||||
infoMsg = "heuristics detected web page charset '%s'" % retVal
|
||||
|
||||
@@ -403,7 +403,7 @@ class Connect(object):
|
||||
responseHeaders = _(ws.getheaders())
|
||||
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
|
||||
|
||||
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
|
||||
requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||
requestMsg += "\n%s" % requestHeaders
|
||||
|
||||
if post is not None:
|
||||
@@ -422,7 +422,7 @@ class Connect(object):
|
||||
else:
|
||||
req = urllib2.Request(url, post, headers)
|
||||
|
||||
requestHeaders += "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items())
|
||||
requestHeaders += "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()])
|
||||
|
||||
if not getRequestHeader(req, HTTP_HEADER.COOKIE) and conf.cj:
|
||||
conf.cj._policy._now = conf.cj._now = int(time.time())
|
||||
@@ -556,7 +556,7 @@ class Connect(object):
|
||||
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
||||
|
||||
if responseHeaders:
|
||||
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
|
||||
logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||
|
||||
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
|
||||
|
||||
@@ -691,7 +691,7 @@ class Connect(object):
|
||||
responseMsg += "[#%d] (%d %s):\n" % (threadData.lastRequestUID, code, status)
|
||||
|
||||
if responseHeaders:
|
||||
logHeaders = "\n".join("%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items())
|
||||
logHeaders = "\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||
|
||||
if not skipLogTraffic:
|
||||
logHTTPTraffic(requestMsg, "%s%s\n\n%s" % (responseMsg, logHeaders, (page or "")[:MAX_CONNECTION_CHUNK_SIZE]))
|
||||
|
||||
Reference in New Issue
Block a user