Minor patches (and one bug from ML)

This commit is contained in:
Miroslav Stampar
2016-12-20 09:53:44 +01:00
parent edc6f47758
commit 17c556a63d
7 changed files with 37 additions and 13 deletions

View File

@@ -3727,7 +3727,6 @@ def isAdminFromPrivileges(privileges):
# In Firebird there is no specific privilege that means
# that the user is DBA
# TODO: confirm
retVal |= (Backend.isDbms(DBMS.FIREBIRD) and all(_ in privileges for _ in ("SELECT", "INSERT", "UPDATE", "DELETE", "REFERENCES", "EXECUTE")))
return retVal
@@ -3810,7 +3809,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
continue
# flag to know if we are dealing with the same target host
_ = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], (response.geturl(), url)))
_ = checkSameHost(response.geturl(), url)
if conf.scope:
if not re.search(conf.scope, url, re.I):
@@ -3833,6 +3832,18 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
return retVal
def checkSameHost(*urls):
"""
Returns True if all provided urls share that same host
>>> checkSameHost('http://www.target.com/page1.php?id=1', 'http://www.target.com/images/page2.php')
True
>>> checkSameHost('http://www.target.com/page1.php?id=1', 'http://www.target2.com/images/page2.php')
False
"""
return all(urlparse.urlparse(url or "").netloc.split(':')[0] == urlparse.urlparse(urls[0] or "").netloc.split(':')[0] for url in urls)
def getHostHeader(url):
"""
Returns proper Host header value for a given target URL
@@ -3902,6 +3913,13 @@ def evaluateCode(code, variables=None):
def serializeObject(object_):
"""
Serializes given object
>>> serializeObject([1, 2, 3, ('a', 'b')])
'gAJdcQEoSwFLAksDVQFhVQFihnECZS4='
>>> serializeObject(None)
'gAJOLg=='
>>> serializeObject('foobar')
'gAJVBmZvb2JhcnEBLg=='
"""
return base64pickle(object_)
@@ -3912,6 +3930,8 @@ def unserializeObject(value):
>>> unserializeObject(serializeObject([1, 2, 3])) == [1, 2, 3]
True
>>> unserializeObject('gAJVBmZvb2JhcnEBLg==')
'foobar'
"""
return base64unpickle(value) if value else None
@@ -3958,6 +3978,8 @@ def decodeHexValue(value, raw=False):
>>> decodeHexValue('3132332031')
u'123 1'
>>> decodeHexValue(['0x31', '0x32'])
[u'1', u'2']
"""
retVal = value

View File

@@ -19,7 +19,7 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
from lib.core.enums import OS
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.0.12.11"
VERSION = "1.0.12.12"
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)

View File

@@ -31,6 +31,7 @@ from extra.safe2bin.safe2bin import safecharencode
from lib.core.agent import agent
from lib.core.common import asciifyUrl
from lib.core.common import calculateDeltaSeconds
from lib.core.common import checkSameHost
from lib.core.common import clearConsoleLine
from lib.core.common import dataToStdout
from lib.core.common import evaluateCode
@@ -266,7 +267,7 @@ class Connect(object):
url = urlparse.urljoin(conf.url, url)
# flag to know if we are dealing with the same target host
target = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], [url, conf.url or ""]))
target = checkSameHost(url, conf.url)
if not retrying:
# Reset the number of connection retries

View File

@@ -12,6 +12,7 @@ import urlparse
import tempfile
import time
from lib.core.common import checkSameHost
from lib.core.common import clearConsoleLine
from lib.core.common import dataToStdout
from lib.core.common import findPageForms
@@ -97,7 +98,7 @@ def crawl(target):
url = urlparse.urljoin(current, href)
# flag to know if we are dealing with the same target host
_ = reduce(lambda x, y: x == y, map(lambda x: urlparse.urlparse(x).netloc.split(':')[0], (url, target)))
_ = checkSameHost(url, target)
if conf.scope:
if not re.search(conf.scope, url, re.I):