God help us all with this Python3 non-sense

This commit is contained in:
Miroslav Stampar
2019-03-27 13:33:46 +01:00
parent 2dbd0267a1
commit 2f53014685
23 changed files with 118 additions and 201 deletions

View File

@@ -61,7 +61,6 @@ from lib.core.defaults import defaults
from lib.core.dicts import DBMS_DICT
from lib.core.dicts import DEFAULT_DOC_ROOTS
from lib.core.dicts import DEPRECATED_OPTIONS
from lib.core.dicts import HTTP_RESPONSES
from lib.core.dicts import SQL_STATEMENTS
from lib.core.enums import ADJUST_TIME_DELAY
from lib.core.enums import CONTENT_STATUS
@@ -174,6 +173,7 @@ from thirdparty.colorama.initialise import init as coloramainit
from thirdparty.magic import magic
from thirdparty.odict import OrderedDict
from thirdparty.six.moves import configparser as _configparser
from thirdparty.six.moves import http_client as _http_client
from thirdparty.six.moves import urllib as _urllib
from thirdparty.termcolor.termcolor import colored
@@ -3301,9 +3301,9 @@ def showHttpErrorCodes():
if kb.httpErrorCodes:
warnMsg = "HTTP error codes detected during run:\n"
warnMsg += ", ".join("%d (%s) - %d times" % (code, HTTP_RESPONSES[code] if code in HTTP_RESPONSES else '?', count) for code, count in kb.httpErrorCodes.items())
warnMsg += ", ".join("%d (%s) - %d times" % (code, _http_client.responses[code] if code in _http_client.responses else '?', count) for code, count in kb.httpErrorCodes.items())
logger.warn(warnMsg)
if any((str(_).startswith('4') or str(_).startswith('5')) and _ != 500 and _ != kb.originalCode for _ in kb.httpErrorCodes.keys()):
if any((str(_).startswith('4') or str(_).startswith('5')) and _ != _http_client.INTERNAL_SERVER_ERROR and _ != kb.originalCode for _ in kb.httpErrorCodes.keys()):
msg = "too many 4xx and/or 5xx HTTP error codes "
msg += "could mean that some kind of protection is involved (e.g. WAF)"
logger.debug(msg)

View File

@@ -330,47 +330,3 @@ PART_RUN_CONTENT_TYPES = {
"osCmd": CONTENT_TYPE.OS_CMD,
"regRead": CONTENT_TYPE.REG_READ
}
HTTP_RESPONSES = {
200: "OK",
201: "Created",
202: "Accepted",
203: "Non-Authoritative Information",
204: "No Content",
205: "Reset Content",
206: "Partial Content",
100: "Continue",
101: "Switching Protocols",
300: "Multiple Choices",
301: "Moved Permanently",
302: "Found",
303: "See Other",
304: "Not Modified",
305: "Use Proxy",
306: "(Unused)",
307: "Temporary Redirect",
400: "Bad Request",
401: "Unauthorized",
402: "Payment Required",
403: "Forbidden",
404: "Not Found",
405: "Method Not Allowed",
406: "Not Acceptable",
407: "Proxy Authentication Required",
408: "Request Timeout",
409: "Conflict",
410: "Gone",
411: "Length Required",
412: "Precondition Failed",
413: "Request Entity Too Large",
414: "Request-URI Too Long",
415: "Unsupported Media Type",
416: "Requested Range Not Satisfiable",
417: "Expectation Failed",
500: "Internal Server Error",
501: "Not Implemented",
502: "Bad Gateway",
503: "Service Unavailable",
504: "Gateway Timeout",
505: "HTTP Version Not Supported"
}

View File

@@ -5,9 +5,7 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import cookielib
import glob
import httplib
import inspect
import logging
import os
@@ -19,7 +17,6 @@ import tempfile
import threading
import time
import urllib2
import urlparse
import lib.controller.checks
import lib.core.common
@@ -153,14 +150,17 @@ from lib.utils.purge import purge
from thirdparty.keepalive import keepalive
from thirdparty.multipart import multipartpost
from thirdparty.oset.pyoset import oset
from thirdparty.six.moves import http_client as _http_client
from thirdparty.six.moves import http_cookiejar as _http_cookiejar
from thirdparty.six.moves import urllib as _urllib
from thirdparty.socks import socks
from xml.etree.ElementTree import ElementTree
authHandler = urllib2.BaseHandler()
authHandler = _urllib.request.BaseHandler()
chunkedHandler = ChunkedHandler()
httpsHandler = HTTPSHandler()
keepAliveHandler = keepalive.HTTPHandler()
proxyHandler = urllib2.ProxyHandler()
proxyHandler = _urllib.request.ProxyHandler()
redirectHandler = SmartRedirectHandler()
rangeHandler = HTTPRangeHandler()
multipartPostHandler = multipartpost.MultipartPostHandler()
@@ -1053,7 +1053,7 @@ def _setHTTPHandlers():
logger.debug(debugMsg)
try:
_ = urlparse.urlsplit(conf.proxy)
_ = _urllib.parse.urlsplit(conf.proxy)
except Exception as ex:
errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, getSafeExString(ex))
raise SqlmapSyntaxException(errMsg)
@@ -1090,9 +1090,9 @@ def _setHTTPHandlers():
proxyHandler.proxies = {}
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if scheme == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, hostname, port, username=username, password=password)
socks.wrapmodule(urllib2)
socks.wrapmodule(_http_client)
else:
socks.unwrapmodule(urllib2)
socks.unwrapmodule(_http_client)
if conf.proxyCred:
# Reference: http://stackoverflow.com/questions/34079/how-to-specify-an-authenticated-proxy-for-a-python-http-connection
@@ -1112,12 +1112,12 @@ def _setHTTPHandlers():
if not conf.dropSetCookie:
if not conf.loadCookies:
conf.cj = cookielib.CookieJar()
conf.cj = _http_cookiejar.CookieJar()
else:
conf.cj = cookielib.MozillaCookieJar()
conf.cj = _http_cookiejar.MozillaCookieJar()
resetCookieJar(conf.cj)
handlers.append(urllib2.HTTPCookieProcessor(conf.cj))
handlers.append(_urllib.request.HTTPCookieProcessor(conf.cj))
# Reference: http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html
if conf.keepAlive:
@@ -1133,8 +1133,8 @@ def _setHTTPHandlers():
else:
handlers.append(keepAliveHandler)
opener = urllib2.build_opener(*handlers)
urllib2.install_opener(opener)
opener = _urllib.request.build_opener(*handlers)
_urllib.request.install_opener(opener)
def _setSafeVisit():
"""
@@ -1166,7 +1166,7 @@ def _setSafeVisit():
if value.endswith(":443"):
scheme = "https"
value = "%s://%s" % (scheme, value)
kb.safeReq.url = urlparse.urljoin(value, kb.safeReq.url)
kb.safeReq.url = _urllib.parse.urljoin(value, kb.safeReq.url)
else:
break
@@ -1289,7 +1289,7 @@ def _setHTTPAuthentication():
conf.authUsername = aCredRegExp.group(1)
conf.authPassword = aCredRegExp.group(2)
kb.passwordMgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
kb.passwordMgr = _urllib.request.HTTPPasswordMgrWithDefaultRealm()
_setAuthCred()
@@ -1297,7 +1297,7 @@ def _setHTTPAuthentication():
authHandler = SmartHTTPBasicAuthHandler(kb.passwordMgr)
elif authType == AUTH_TYPE.DIGEST:
authHandler = urllib2.HTTPDigestAuthHandler(kb.passwordMgr)
authHandler = _urllib.request.HTTPDigestAuthHandler(kb.passwordMgr)
elif authType == AUTH_TYPE.NTLM:
try:
@@ -1459,7 +1459,7 @@ def _setHostname():
if conf.url:
try:
conf.hostname = urlparse.urlsplit(conf.url).netloc.split(':')[0]
conf.hostname = _urllib.parse.urlsplit(conf.url).netloc.split(':')[0]
except ValueError as ex:
errMsg = "problem occurred while "
errMsg += "parsing an URL '%s' ('%s')" % (conf.url, getSafeExString(ex))
@@ -1783,8 +1783,8 @@ def _cleanupEnvironment():
Cleanup environment (e.g. from leftovers after --sqlmap-shell).
"""
if issubclass(urllib2.socket.socket, socks.socksocket):
socks.unwrapmodule(urllib2)
if issubclass(_http_client.socket.socket, socks.socksocket):
socks.unwrapmodule(_http_client)
if hasattr(socket, "_ready"):
socket._ready.clear()
@@ -2312,11 +2312,11 @@ def _setTorSocksProxySettings():
# SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29)
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5 if conf.torType == PROXY_TYPE.SOCKS5 else socks.PROXY_TYPE_SOCKS4, LOCALHOST, port)
socks.wrapmodule(urllib2)
socks.wrapmodule(_http_client)
def _setHttpChunked():
if conf.chunked and conf.data:
httplib.HTTPConnection._set_content_length = lambda self, a, b: None
_http_client.HTTPConnection._set_content_length = lambda self, a, b: None
def _checkWebSocket():
if conf.url and (conf.url.startswith("ws:/") or conf.url.startswith("wss:/")):

View File

@@ -6,9 +6,9 @@ See the file 'LICENSE' for copying permission
"""
import codecs
import httplib
from lib.core.settings import IS_WIN
from thirdparty.six.moves import http_client as _http_client
def dirtyPatches():
"""
@@ -16,7 +16,7 @@ def dirtyPatches():
"""
# accept overly long result lines (e.g. SQLi results in HTTP header responses)
httplib._MAXLINE = 1 * 1024 * 1024
_http_client._MAXLINE = 1 * 1024 * 1024
# add support for inet_pton() on Windows OS
if IS_WIN:

View File

@@ -17,7 +17,7 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
from lib.core.enums import OS
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
VERSION = "1.3.3.56"
VERSION = "1.3.3.57"
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)

View File

@@ -12,7 +12,6 @@ import subprocess
import sys
import tempfile
import time
import urlparse
from lib.core.common import Backend
from lib.core.common import getSafeExString
@@ -74,6 +73,7 @@ from lib.core.settings import USER_AGENT_ALIASES
from lib.core.settings import XML_RECOGNITION_REGEX
from lib.utils.hashdb import HashDB
from thirdparty.odict import OrderedDict
from thirdparty.six.moves import urllib as _urllib
def _setRequestParams():
"""
@@ -276,7 +276,7 @@ def _setRequestParams():
if not kb.processUserMarks:
if place == PLACE.URI:
query = urlparse.urlsplit(value).query
query = _urllib.parse.urlsplit(value).query
if query:
parameters = conf.parameters[PLACE.GET] = query
paramDict = paramToDict(PLACE.GET, parameters)

View File

@@ -13,6 +13,7 @@ import threading
import time
import traceback
from lib.core.compat import WichmannHill
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
@@ -57,7 +58,7 @@ class _ThreadData(threading.local):
self.lastRequestMsg = None
self.lastRequestUID = 0
self.lastRedirectURL = None
self.random = random.WichmannHill()
self.random = WichmannHill()
self.resumed = False
self.retriesCount = 0
self.seqMatcher = difflib.SequenceMatcher(None)