mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-07 13:11:29 +00:00
God help us all with this Python3 non-sense
This commit is contained in:
@@ -9,7 +9,6 @@ See the file 'LICENSE' for copying permission
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import httplib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
@@ -19,7 +18,6 @@ import sqlite3
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import urllib2
|
||||
|
||||
from lib.core.common import dataToStdout
|
||||
from lib.core.common import getSafeExString
|
||||
@@ -57,6 +55,8 @@ from thirdparty.bottle.bottle import request
|
||||
from thirdparty.bottle.bottle import response
|
||||
from thirdparty.bottle.bottle import run
|
||||
from thirdparty.bottle.bottle import server_names
|
||||
from thirdparty.six.moves import http_client as _http_client
|
||||
from thirdparty.six.moves import urllib as _urllib
|
||||
|
||||
# Global data storage
|
||||
class DataStore(object):
|
||||
@@ -716,8 +716,8 @@ def _client(url, options=None):
|
||||
if DataStore.username or DataStore.password:
|
||||
headers["Authorization"] = "Basic %s" % base64encode("%s:%s" % (DataStore.username or "", DataStore.password or ""))
|
||||
|
||||
req = urllib2.Request(url, data, headers)
|
||||
response = urllib2.urlopen(req)
|
||||
req = _urllib.request.Request(url, data, headers)
|
||||
response = _urllib.request.urlopen(req)
|
||||
text = response.read()
|
||||
except:
|
||||
if options:
|
||||
@@ -746,7 +746,7 @@ def client(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT, username=Non
|
||||
try:
|
||||
_client(addr)
|
||||
except Exception as ex:
|
||||
if not isinstance(ex, urllib2.HTTPError) or ex.code == httplib.UNAUTHORIZED:
|
||||
if not isinstance(ex, _urllib.error.HTTPError) or ex.code == _http_client.UNAUTHORIZED:
|
||||
errMsg = "There has been a problem while connecting to the "
|
||||
errMsg += "REST-JSON API server at '%s' " % addr
|
||||
errMsg += "(%s)" % ex
|
||||
|
||||
@@ -5,10 +5,8 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import httplib
|
||||
import os
|
||||
import re
|
||||
import urlparse
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
@@ -34,6 +32,8 @@ from lib.parse.sitemap import parseSitemap
|
||||
from lib.request.connect import Connect as Request
|
||||
from thirdparty.beautifulsoup.beautifulsoup import BeautifulSoup
|
||||
from thirdparty.oset.pyoset import oset
|
||||
from thirdparty.six.moves import http_client as _http_client
|
||||
from thirdparty.six.moves import urllib as _urllib
|
||||
|
||||
def crawl(target):
|
||||
try:
|
||||
@@ -70,7 +70,7 @@ def crawl(target):
|
||||
except SqlmapSyntaxException:
|
||||
errMsg = "invalid URL detected. skipping '%s'" % current
|
||||
logger.critical(errMsg)
|
||||
except httplib.InvalidURL as ex:
|
||||
except _http_client.InvalidURL as ex:
|
||||
errMsg = "invalid URL detected ('%s'). skipping " % getSafeExString(ex)
|
||||
errMsg += "URL '%s'" % current
|
||||
logger.critical(errMsg)
|
||||
@@ -96,7 +96,7 @@ def crawl(target):
|
||||
if href:
|
||||
if threadData.lastRedirectURL and threadData.lastRedirectURL[0] == threadData.lastRequestUID:
|
||||
current = threadData.lastRedirectURL[1]
|
||||
url = urlparse.urljoin(current, href)
|
||||
url = _urllib.parse.urljoin(current, href)
|
||||
|
||||
# flag to know if we are dealing with the same target host
|
||||
_ = checkSameHost(url, target)
|
||||
@@ -135,7 +135,7 @@ def crawl(target):
|
||||
if readInput(message, default='N', boolean=True):
|
||||
found = True
|
||||
items = None
|
||||
url = urlparse.urljoin(target, "/sitemap.xml")
|
||||
url = _urllib.parse.urljoin(target, "/sitemap.xml")
|
||||
try:
|
||||
items = parseSitemap(url)
|
||||
except SqlmapConnectionException as ex:
|
||||
|
||||
@@ -8,7 +8,6 @@ See the file 'LICENSE' for copying permission
|
||||
import base64
|
||||
import BaseHTTPServer
|
||||
import datetime
|
||||
import httplib
|
||||
import io
|
||||
import re
|
||||
import time
|
||||
@@ -157,12 +156,12 @@ class Response:
|
||||
altered = status_line + "\r\n" + remain
|
||||
comment = first_line
|
||||
|
||||
response = httplib.HTTPResponse(FakeSocket(altered))
|
||||
response = _http_client.HTTPResponse(FakeSocket(altered))
|
||||
response.begin()
|
||||
|
||||
try:
|
||||
content = response.read(-1)
|
||||
except httplib.IncompleteRead:
|
||||
except _http_client.IncompleteRead:
|
||||
content = raw[raw.find("\r\n\r\n") + 4:].rstrip("\r\n")
|
||||
|
||||
return cls(httpVersion="HTTP/1.1" if response.version == 11 else "HTTP/1.0",
|
||||
|
||||
@@ -5,11 +5,8 @@ Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||
See the file 'LICENSE' for copying permission
|
||||
"""
|
||||
|
||||
import httplib
|
||||
import re
|
||||
import socket
|
||||
import urllib
|
||||
import urllib2
|
||||
|
||||
from lib.core.common import getSafeExString
|
||||
from lib.core.common import getUnicode
|
||||
@@ -34,6 +31,8 @@ from lib.core.settings import GOOGLE_REGEX
|
||||
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
||||
from lib.core.settings import UNICODE_ENCODING
|
||||
from lib.request.basic import decodePage
|
||||
from thirdparty.six.moves import http_client as _http_client
|
||||
from thirdparty.six.moves import urllib as _urllib
|
||||
from thirdparty.socks import socks
|
||||
|
||||
def _search(dork):
|
||||
@@ -52,8 +51,8 @@ def _search(dork):
|
||||
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
||||
|
||||
try:
|
||||
req = urllib2.Request("https://www.google.com/ncr", headers=headers)
|
||||
conn = urllib2.urlopen(req)
|
||||
req = _urllib.request.Request("https://www.google.com/ncr", headers=headers)
|
||||
conn = _urllib.request.urlopen(req)
|
||||
except Exception as ex:
|
||||
errMsg = "unable to connect to Google ('%s')" % getSafeExString(ex)
|
||||
raise SqlmapConnectionException(errMsg)
|
||||
@@ -67,11 +66,11 @@ def _search(dork):
|
||||
url += "&start=%d" % ((gpage - 1) * 100)
|
||||
|
||||
try:
|
||||
req = urllib2.Request(url, headers=headers)
|
||||
conn = urllib2.urlopen(req)
|
||||
req = _urllib.request.Request(url, headers=headers)
|
||||
conn = _urllib.request.urlopen(req)
|
||||
|
||||
requestMsg = "HTTP request:\nGET %s" % url
|
||||
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
||||
requestMsg += " %s" % _http_client.HTTPException._http_vsn_str
|
||||
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
||||
|
||||
page = conn.read()
|
||||
@@ -88,7 +87,7 @@ def _search(dork):
|
||||
responseMsg += "%s\n%s\n" % (responseHeaders, page)
|
||||
|
||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
||||
except urllib2.HTTPError as ex:
|
||||
except _urllib.error.HTTPError as ex:
|
||||
try:
|
||||
page = ex.read()
|
||||
except Exception as _:
|
||||
@@ -96,11 +95,11 @@ def _search(dork):
|
||||
warnMsg += "an error page information (%s)" % getSafeExString(_)
|
||||
logger.critical(warnMsg)
|
||||
return None
|
||||
except (urllib2.URLError, httplib.error, socket.error, socket.timeout, socks.ProxyError):
|
||||
except (_urllib.error.URLError, _http_client.error, socket.error, socket.timeout, socks.ProxyError):
|
||||
errMsg = "unable to connect to Google"
|
||||
raise SqlmapConnectionException(errMsg)
|
||||
|
||||
retVal = [urllib.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)]
|
||||
retVal = [_urllib.parse.unquote(match.group(1) or match.group(2)) for match in re.finditer(GOOGLE_REGEX, page, re.I)]
|
||||
|
||||
if not retVal and "detected unusual traffic" in page:
|
||||
warnMsg = "Google has detected 'unusual' traffic from "
|
||||
@@ -129,11 +128,11 @@ def _search(dork):
|
||||
regex = DUCKDUCKGO_REGEX
|
||||
|
||||
try:
|
||||
req = urllib2.Request(url, data=data, headers=headers)
|
||||
conn = urllib2.urlopen(req)
|
||||
req = _urllib.request.Request(url, data=data, headers=headers)
|
||||
conn = _urllib.request.urlopen(req)
|
||||
|
||||
requestMsg = "HTTP request:\nGET %s" % url
|
||||
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
||||
requestMsg += " %s" % _http_client.HTTPException._http_vsn_str
|
||||
logger.log(CUSTOM_LOGGING.TRAFFIC_OUT, requestMsg)
|
||||
|
||||
page = conn.read()
|
||||
@@ -150,7 +149,7 @@ def _search(dork):
|
||||
responseMsg += "%s\n%s\n" % (responseHeaders, page)
|
||||
|
||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
||||
except urllib2.HTTPError as ex:
|
||||
except _urllib.error.HTTPError as ex:
|
||||
try:
|
||||
page = ex.read()
|
||||
page = decodePage(page, ex.headers.get("Content-Encoding"), ex.headers.get("Content-Type"))
|
||||
@@ -163,7 +162,7 @@ def _search(dork):
|
||||
errMsg = "unable to connect"
|
||||
raise SqlmapConnectionException(errMsg)
|
||||
|
||||
retVal = [urllib.unquote(match.group(1).replace("&", "&")) for match in re.finditer(regex, page, re.I | re.S)]
|
||||
retVal = [_urllib.parse.unquote(match.group(1).replace("&", "&")) for match in re.finditer(regex, page, re.I | re.S)]
|
||||
|
||||
if not retVal and "issue with the Tor Exit Node you are currently using" in page:
|
||||
warnMsg = "DuckDuckGo has detected 'unusual' traffic from "
|
||||
|
||||
Reference in New Issue
Block a user