hello big tables, this is sqlmap, sqlmap this is big tables

This commit is contained in:
Miroslav Stampar
2011-07-24 09:19:33 +00:00
parent 82e1e61554
commit ec1bc0219c
8 changed files with 108 additions and 26 deletions

View File

@@ -23,7 +23,9 @@ import socket
import string
import struct
import sys
import tempfile
import time
import types
import urlparse
import unicodedata
@@ -205,16 +207,48 @@ class BigArray(list):
self.chunks = [[]]
self.cache = None
self.length = 0
self.filenames = set()
def append(self, value):
self.chunks[-1].append(value)
if len(self.chunks[-1]) >= BIGARRAY_CHUNK_LENGTH:
fp = tempfile.TemporaryFile()
pickle.dump(self.chunks[-1], fp)
filename = self._dump(self.chunks[-1])
del(self.chunks[-1][:])
self.chunks[-1] = fp
self.chunks[-1] = filename
self.chunks.append([])
def pop(self):
if len(self.chunks[-1]) < 1:
self.chunks.pop()
fp = open(self.chunks[-1], 'rb')
self.chunks[-1] = pickle.load(fp)
fp.close()
return self.chunks[-1].pop()
def index(self, value):
for index in xrange(len(self)):
if self[index] == value:
return index
return ValueError, "%s is not in list" % value
def _dump(self, value):
handle, filename = tempfile.mkstemp()
self.filenames.add(filename)
os.close(handle)
fp = open(filename, 'w+b')
pickle.dump(value, fp)
fp.close()
return filename
def _checkcache(self, index):
if (self.cache and self.cache[0] != index and self.cache[2]):
filename = self._dump(self.cache[1])
self.chunks[self.cache[0]] = filename
if not (self.cache and self.cache[0] == index):
fp = open(self.chunks[index], 'rb')
self.cache = [index, pickle.load(fp), False]
fp.close()
def __getitem__(self, y):
index = y / BIGARRAY_CHUNK_LENGTH
offset = y % BIGARRAY_CHUNK_LENGTH
@@ -222,14 +256,37 @@ class BigArray(list):
if isinstance(chunk, list):
return chunk[offset]
else:
if not (self.cache and self.cache[0] == index):
chunk.seek(0)
self.cache = (index, pickle.load(chunk))
self._checkcache(index)
return self.cache[1][offset]
def __setitem__(self, y, value):
index = y / BIGARRAY_CHUNK_LENGTH
offset = y % BIGARRAY_CHUNK_LENGTH
chunk = self.chunks[index]
if isinstance(chunk, list):
chunk[offset] = value
else:
self._checkcache(index)
self.cache[1][offset] = value
self.cache[2] = True # dirty flag
def __repr__(self):
return "%s%s" % ("..." if len(self.chunks) > 1 else "", self.chunks[-1].__repr__())
def __iter__(self):
for i in xrange(len(self)):
yield self[i]
def __len__(self):
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * BIGARRAY_CHUNK_LENGTH + len(self.chunks[-1])
def __del__(self):
for filename in self.filenames:
try:
os.remove(filename)
except OSError:
pass
class DynamicContentItem:
"""
Represents line in content page with dynamic properties (candidate
@@ -561,6 +618,15 @@ class Backend:
def isOs(os):
return Backend.getOs() is not None and Backend.getOs().lower() == os.lower()
# Reference: http://code.activestate.com/recipes/325205-cache-decorator-in-python-24/
def cachedmethod(f, cache={}):
def g(*args, **kwargs):
key = ( f, tuple(args), frozenset(kwargs.items()) )
if key not in cache:
cache[key] = f(*args, **kwargs)
return cache[key]
return g
def paramToDict(place, parameters=None):
"""
Split the parameters into names and values, check if these parameters
@@ -1266,7 +1332,7 @@ def parseUnionPage(output, expression, partial=False, condition=None, sort=True)
if output is None:
return None
data = []
data = BigArray()
outCond1 = ( output.startswith(kb.misc.start) and output.endswith(kb.misc.stop) )
outCond2 = ( output.startswith(DUMP_START_MARKER) and output.endswith(DUMP_STOP_MARKER) )
@@ -2204,6 +2270,7 @@ def isNumPosStrValue(value):
return value and isinstance(value, basestring) and value.isdigit() and value != "0"
@cachedmethod
def aliasToDbmsEnum(dbms):
"""
Returns major DBMS name from a given alias
@@ -2730,8 +2797,8 @@ def isNoneValue(value):
if len(value) == 1:
return isNoneValue(value[0])
else:
for i in xrange(len(value)):
if value[i] and value[i] != "None":
for item in value:
if item and item != "None":
return False
return True
elif isinstance(value, dict):

View File

@@ -24,6 +24,7 @@ from lib.core.data import kb
from lib.core.data import logger
from lib.core.enums import DBMS
from lib.core.replication import Replication
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
from lib.core.settings import UNICODE_ENCODING
class Dump:
@@ -37,9 +38,10 @@ class Dump:
self.__outputFile = None
self.__outputFP = None
def __write(self, data, n=True):
def __write(self, data, n=True, console=True):
text = "%s%s" % (data, "\n" if n else " ")
dataToStdout(text)
if console:
dataToStdout(text)
self.__outputFP.write(text)
self.__outputFP.flush()
@@ -407,7 +409,13 @@ class Dump:
if conf.replicate:
rtable.beginTransaction()
if count > TRIM_STDOUT_DUMP_SIZE:
warnMsg = "console output will be trimmed "
warnMsg += "due to the large table size"
logger.warning(warnMsg)
for i in range(count):
console = (i >= count - TRIM_STDOUT_DUMP_SIZE)
field = 1
values = []
@@ -429,7 +437,7 @@ class Dump:
values.append(value)
maxlength = int(info["length"])
blank = " " * (maxlength - len(value))
self.__write("| %s%s" % (value, blank), n=False)
self.__write("| %s%s" % (value, blank), n=False, console=console)
if not conf.replicate:
if not conf.multipleTargets and field == fields:
@@ -442,7 +450,7 @@ class Dump:
if conf.replicate:
rtable.insert(values)
self.__write("|")
self.__write("|", console=console)
if not conf.multipleTargets and not conf.replicate:
dataToDumpFile(dumpFP, "\n")

View File

@@ -31,7 +31,7 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None):
errMsg = "profiling requires third-party libraries (%s). " % getUnicode(e, UNICODE_ENCODING)
errMsg += "Quick steps:%s" % os.linesep
errMsg += "1) Install http://code.google.com/p/pydot/%s" % os.linesep
errMsg += "2) sudo apt-get install python-profiler graphviz"
errMsg += "2) sudo apt-get install python-pyparsing python-profiler graphviz"
logger.error(errMsg)
return

View File

@@ -383,5 +383,8 @@ IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,2,3,table_name FROM informat
# Used for status representation in dictionary attack phase
ROTATING_CHARS = ('\\', '|', '|', '/', '-')
# Chunk length used in BigArray object (only last one is held in memory)
BIGARRAY_CHUNK_LENGTH = 10000
# Chunk length (in items) used by BigArray objects (only last chunk and cached one are held in memory)
BIGARRAY_CHUNK_LENGTH = 5000
# Only console display last n table rows
TRIM_STDOUT_DUMP_SIZE = 256